repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
eestay/edx-platform
|
lms/djangoapps/certificates/management/commands/gen_cert_report.py
|
91
|
4512
|
"""
Generate a report of certificate statuses
"""
from django.core.management.base import BaseCommand, CommandError
from certificates.models import GeneratedCertificate
from django.contrib.auth.models import User
from optparse import make_option
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import CourseKey
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from django.db.models import Count
class Command(BaseCommand):
help = """
Generate a certificate status report for a given course.
This command does not do anything other than report the current
certificate status.
generating - A request has been made to generate a certificate,
but it has not been generated yet.
regenerating - A request has been made to regenerate a certificate,
but it has not been generated yet.
deleting - A request has been made to delete a certificate.
deleted - The certificate has been deleted.
downloadable - The certificate is available for download.
notpassing - The student was graded but is not passing
"""
option_list = BaseCommand.option_list + (
make_option('-c', '--course',
metavar='COURSE_ID',
dest='course',
default=None,
help='Only generate for COURSE_ID'),
)
def handle(self, *args, **options):
# Find all courses that have ended
if options['course']:
try:
course_id = CourseKey.from_string(options['course'])
except InvalidKeyError:
print("Course id {} could not be parsed as a CourseKey; falling back to SSCK.from_dep_str".format(options['course']))
course_id = SlashSeparatedCourseKey.from_deprecated_string(options['course'])
else:
raise CommandError("You must specify a course")
cert_data = {}
# find students who are active
# number of enrolled students = downloadable + notpassing
print "Looking up certificate states for {0}".format(options['course'])
enrolled_current = User.objects.filter(
courseenrollment__course_id=course_id,
courseenrollment__is_active=True
)
enrolled_total = User.objects.filter(
courseenrollment__course_id=course_id
)
verified_enrolled = GeneratedCertificate.objects.filter(
course_id__exact=course_id, mode__exact='verified'
)
honor_enrolled = GeneratedCertificate.objects.filter(
course_id__exact=course_id, mode__exact='honor'
)
audit_enrolled = GeneratedCertificate.objects.filter(
course_id__exact=course_id, mode__exact='audit'
)
cert_data[course_id] = {
'enrolled_current': enrolled_current.count(),
'enrolled_total': enrolled_total.count(),
'verified_enrolled': verified_enrolled.count(),
'honor_enrolled': honor_enrolled.count(),
'audit_enrolled': audit_enrolled.count()
}
status_tally = GeneratedCertificate.objects.filter(
course_id__exact=course_id
).values('status').annotate(
dcount=Count('status')
)
cert_data[course_id].update(
{status['status']: status['dcount']
for status in status_tally})
mode_tally = GeneratedCertificate.objects.filter(
course_id__exact=course_id,
status__exact='downloadable'
).values('mode').annotate(
dcount=Count('mode')
)
cert_data[course_id].update(
{mode['mode']: mode['dcount']
for mode in mode_tally}
)
# all states we have seen far all courses
status_headings = sorted(set(
[status for course in cert_data
for status in cert_data[course]])
)
# print the heading for the report
print "{:>26}".format("course ID"),
print ' '.join(["{:>16}".format(heading)
for heading in status_headings]
)
# print the report
print "{0:>26}".format(course_id.to_deprecated_string()),
for heading in status_headings:
if heading in cert_data[course_id]:
print "{:>16}".format(cert_data[course_id][heading]),
else:
print " " * 16,
print
|
agpl-3.0
|
yrizk/django-blog
|
blogvenv/lib/python3.4/site-packages/django/contrib/sessions/backends/cache.py
|
61
|
2497
|
from django.conf import settings
from django.contrib.sessions.backends.base import CreateError, SessionBase
from django.core.cache import caches
from django.utils.six.moves import range
KEY_PREFIX = "django.contrib.sessions.cache"
class SessionStore(SessionBase):
"""
A cache-based session store.
"""
def __init__(self, session_key=None):
self._cache = caches[settings.SESSION_CACHE_ALIAS]
super(SessionStore, self).__init__(session_key)
@property
def cache_key(self):
return KEY_PREFIX + self._get_or_create_session_key()
def load(self):
try:
session_data = self._cache.get(self.cache_key, None)
except Exception:
# Some backends (e.g. memcache) raise an exception on invalid
# cache keys. If this happens, reset the session. See #17810.
session_data = None
if session_data is not None:
return session_data
self.create()
return {}
def create(self):
# Because a cache can fail silently (e.g. memcache), we don't know if
# we are failing to create a new session because of a key collision or
# because the cache is missing. So we try for a (large) number of times
# and then raise an exception. That's the risk you shoulder if using
# cache backing.
for i in range(10000):
self._session_key = self._get_new_session_key()
try:
self.save(must_create=True)
except CreateError:
continue
self.modified = True
return
raise RuntimeError(
"Unable to create a new session key. "
"It is likely that the cache is unavailable.")
def save(self, must_create=False):
if must_create:
func = self._cache.add
else:
func = self._cache.set
result = func(self.cache_key,
self._get_session(no_load=must_create),
self.get_expiry_age())
if must_create and not result:
raise CreateError
def exists(self, session_key):
return (KEY_PREFIX + session_key) in self._cache
def delete(self, session_key=None):
if session_key is None:
if self.session_key is None:
return
session_key = self.session_key
self._cache.delete(KEY_PREFIX + session_key)
@classmethod
def clear_expired(cls):
pass
|
apache-2.0
|
wanghaven/readthedocs.org
|
readthedocs/builds/filters.py
|
33
|
1312
|
from django.utils.translation import ugettext_lazy as _
import django_filters
from readthedocs.builds import constants
from readthedocs.builds.models import Build, Version
ANY_REPO = (
('', _('Any')),
)
BUILD_TYPES = ANY_REPO + constants.BUILD_TYPES
class VersionSlugFilter(django_filters.FilterSet):
class Meta:
model = Version
fields = {
'identifier': ['icontains'],
'slug': ['icontains'],
}
class VersionFilter(django_filters.FilterSet):
project = django_filters.CharFilter(name='project__slug')
# Allow filtering on slug= or version=
slug = django_filters.CharFilter(label=_("Name"), name='slug',
lookup_type='exact')
version = django_filters.CharFilter(label=_("Version"), name='slug',
lookup_type='exact')
class Meta:
model = Version
fields = ['project', 'slug', 'version']
class BuildFilter(django_filters.FilterSet):
date = django_filters.DateRangeFilter(label=_("Build Date"), name="date", lookup_type='range')
type = django_filters.ChoiceFilter(label=_("Build Type"),
choices=BUILD_TYPES)
class Meta:
model = Build
fields = ['type', 'date', 'success']
|
mit
|
willworth/thermos
|
thermos/Lib/site-packages/setuptools/command/sdist.py
|
130
|
6821
|
from distutils import log
import distutils.command.sdist as orig
import os
import sys
import io
import contextlib
import six
from .py36compat import sdist_add_defaults
import pkg_resources
_default_revctrl = list
def walk_revctrl(dirname=''):
"""Find all files under revision control"""
for ep in pkg_resources.iter_entry_points('setuptools.file_finders'):
for item in ep.load()(dirname):
yield item
class sdist(sdist_add_defaults, orig.sdist):
"""Smart sdist that finds anything supported by revision control"""
user_options = [
('formats=', None,
"formats for source distribution (comma-separated list)"),
('keep-temp', 'k',
"keep the distribution tree around after creating " +
"archive file(s)"),
('dist-dir=', 'd',
"directory to put the source distribution archive(s) in "
"[default: dist]"),
]
negative_opt = {}
READMES = 'README', 'README.rst', 'README.txt'
def run(self):
self.run_command('egg_info')
ei_cmd = self.get_finalized_command('egg_info')
self.filelist = ei_cmd.filelist
self.filelist.append(os.path.join(ei_cmd.egg_info, 'SOURCES.txt'))
self.check_readme()
# Run sub commands
for cmd_name in self.get_sub_commands():
self.run_command(cmd_name)
# Call check_metadata only if no 'check' command
# (distutils <= 2.6)
import distutils.command
if 'check' not in distutils.command.__all__:
self.check_metadata()
self.make_distribution()
dist_files = getattr(self.distribution, 'dist_files', [])
for file in self.archive_files:
data = ('sdist', '', file)
if data not in dist_files:
dist_files.append(data)
def initialize_options(self):
orig.sdist.initialize_options(self)
self._default_to_gztar()
def _default_to_gztar(self):
# only needed on Python prior to 3.6.
if sys.version_info >= (3, 6, 0, 'beta', 1):
return
self.formats = ['gztar']
def make_distribution(self):
"""
Workaround for #516
"""
with self._remove_os_link():
orig.sdist.make_distribution(self)
@staticmethod
@contextlib.contextmanager
def _remove_os_link():
"""
In a context, remove and restore os.link if it exists
"""
class NoValue:
pass
orig_val = getattr(os, 'link', NoValue)
try:
del os.link
except Exception:
pass
try:
yield
finally:
if orig_val is not NoValue:
setattr(os, 'link', orig_val)
def __read_template_hack(self):
# This grody hack closes the template file (MANIFEST.in) if an
# exception occurs during read_template.
# Doing so prevents an error when easy_install attempts to delete the
# file.
try:
orig.sdist.read_template(self)
except Exception:
_, _, tb = sys.exc_info()
tb.tb_next.tb_frame.f_locals['template'].close()
raise
# Beginning with Python 2.7.2, 3.1.4, and 3.2.1, this leaky file handle
# has been fixed, so only override the method if we're using an earlier
# Python.
has_leaky_handle = (
sys.version_info < (2, 7, 2)
or (3, 0) <= sys.version_info < (3, 1, 4)
or (3, 2) <= sys.version_info < (3, 2, 1)
)
if has_leaky_handle:
read_template = __read_template_hack
def _add_defaults_python(self):
"""getting python files"""
if self.distribution.has_pure_modules():
build_py = self.get_finalized_command('build_py')
self.filelist.extend(build_py.get_source_files())
# This functionality is incompatible with include_package_data, and
# will in fact create an infinite recursion if include_package_data
# is True. Use of include_package_data will imply that
# distutils-style automatic handling of package_data is disabled
if not self.distribution.include_package_data:
for _, src_dir, _, filenames in build_py.data_files:
self.filelist.extend([os.path.join(src_dir, filename)
for filename in filenames])
def _add_defaults_data_files(self):
try:
if six.PY2:
sdist_add_defaults._add_defaults_data_files(self)
else:
super()._add_defaults_data_files()
except TypeError:
log.warn("data_files contains unexpected objects")
def check_readme(self):
for f in self.READMES:
if os.path.exists(f):
return
else:
self.warn(
"standard file not found: should have one of " +
', '.join(self.READMES)
)
def make_release_tree(self, base_dir, files):
orig.sdist.make_release_tree(self, base_dir, files)
# Save any egg_info command line options used to create this sdist
dest = os.path.join(base_dir, 'setup.cfg')
if hasattr(os, 'link') and os.path.exists(dest):
# unlink and re-copy, since it might be hard-linked, and
# we don't want to change the source version
os.unlink(dest)
self.copy_file('setup.cfg', dest)
self.get_finalized_command('egg_info').save_version_info(dest)
def _manifest_is_not_generated(self):
# check for special comment used in 2.7.1 and higher
if not os.path.isfile(self.manifest):
return False
with io.open(self.manifest, 'rb') as fp:
first_line = fp.readline()
return (first_line !=
'# file GENERATED by distutils, do NOT edit\n'.encode())
def read_manifest(self):
"""Read the manifest file (named by 'self.manifest') and use it to
fill in 'self.filelist', the list of files to include in the source
distribution.
"""
log.info("reading manifest file '%s'", self.manifest)
manifest = open(self.manifest, 'rb')
for line in manifest:
# The manifest must contain UTF-8. See #303.
if six.PY3:
try:
line = line.decode('UTF-8')
except UnicodeDecodeError:
log.warn("%r not UTF-8 decodable -- skipping" % line)
continue
# ignore comments and blank lines
line = line.strip()
if line.startswith('#') or not line:
continue
self.filelist.append(line)
manifest.close()
|
mit
|
mcking49/apache-flask
|
Python/Lib/encodings/unicode_escape.py
|
852
|
1184
|
""" Python 'unicode-escape' Codec
Written by Marc-Andre Lemburg ([email protected]).
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
"""
import codecs
### Codec APIs
class Codec(codecs.Codec):
# Note: Binding these as C functions will result in the class not
# converting them to methods. This is intended.
encode = codecs.unicode_escape_encode
decode = codecs.unicode_escape_decode
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.unicode_escape_encode(input, self.errors)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.unicode_escape_decode(input, self.errors)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='unicode-escape',
encode=Codec.encode,
decode=Codec.decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamwriter=StreamWriter,
streamreader=StreamReader,
)
|
mit
|
JavaRabbit/CS496_capstone
|
appengine/flexible/sendgrid/main_test.py
|
8
|
1447
|
# Copyright 2016 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
import pytest
@pytest.fixture
def app(monkeypatch):
monkeypatch.setenv('SENDGRID_API_KEY', 'apikey')
monkeypatch.setenv('SENDGRID_SENDER', '[email protected]')
import main
main.app.testing = True
return main.app.test_client()
def test_get(app):
r = app.get('/')
assert r.status_code == 200
@mock.patch('python_http_client.client.Client._make_request')
def test_post(make_request_mock, app):
response = mock.Mock()
response.getcode.return_value = 200
response.read.return_value = 'OK'
response.info.return_value = {}
make_request_mock.return_value = response
app.post('/send/email', data={
'to': '[email protected]'
})
assert make_request_mock.called
request = make_request_mock.call_args[0][1]
assert '[email protected]' in request.data.decode('utf-8')
|
apache-2.0
|
gregdek/ansible
|
lib/ansible/modules/remote_management/ucs/ucs_san_connectivity.py
|
64
|
9965
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: ucs_san_connectivity
short_description: Configures SAN Connectivity Policies on Cisco UCS Manager
description:
- Configures SAN Connectivity Policies on Cisco UCS Manager.
- Examples can be used with the UCS Platform Emulator U(https://communities.cisco.com/ucspe).
extends_documentation_fragment: ucs
options:
state:
description:
- If C(present), will verify SAN Connectivity Policies are present and will create if needed.
- If C(absent), will verify SAN Connectivity Policies are absent and will delete if needed.
choices: [present, absent]
default: present
name:
description:
- The name of the SAN Connectivity Policy.
- This name can be between 1 and 16 alphanumeric characters.
- "You cannot use spaces or any special characters other than - (hyphen), \"_\" (underscore), : (colon), and . (period)."
- You cannot change this name after the policy is created.
required: yes
description:
description:
- A description of the policy.
- Cisco recommends including information about where and when to use the policy.
- Enter up to 256 characters.
- "You can use any characters or spaces except the following:"
- "` (accent mark), \ (backslash), ^ (carat), \" (double quote), = (equal sign), > (greater than), < (less than), or ' (single quote)."
aliases: [ descr ]
wwnn_pool:
description:
- Name of the WWNN pool to use for WWNN assignment.
default: default
vhba_list:
description:
- List of vHBAs used by the SAN Connectivity Policy.
- vHBAs used by the SAN Connectivity Policy must be created from a vHBA template.
- "Each list element has the following suboptions:"
- "= name"
- " The name of the virtual HBA (required)."
- "= vhba_template"
- " The name of the virtual HBA template (required)."
- "- adapter_policy"
- " The name of the Fibre Channel adapter policy."
- " A user defined policy can be used, or one of the system defined policies (default, Linux, Solaris, VMware, Windows, WindowsBoot)"
- " [Default: default]"
- "- order"
- " String specifying the vHBA assignment order (e.g., '1', '2')."
- " [Default: unspecified]"
org_dn:
description:
- Org dn (distinguished name)
default: org-root
requirements:
- ucsmsdk
author:
- David Soper (@dsoper2)
- CiscoUcs (@CiscoUcs)
version_added: '2.5'
'''
EXAMPLES = r'''
- name: Configure SAN Connectivity Policy
ucs_san_connectivity:
hostname: 172.16.143.150
username: admin
password: password
name: Cntr-FC-Boot
wwnn_pool: WWNN-Pool
vhba_list:
- name: Fabric-A
vhba_template: vHBA-Template-A
adapter_policy: Linux
- name: Fabric-B
vhba_template: vHBA-Template-B
adapter_policy: Linux
- name: Remove SAN Connectivity Policy
ucs_san_connectivity:
hostname: 172.16.143.150
username: admin
password: password
name: Cntr-FC-Boot
state: absent
'''
RETURN = r'''
#
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.remote_management.ucs import UCSModule, ucs_argument_spec
def main():
argument_spec = ucs_argument_spec
argument_spec.update(
org_dn=dict(type='str', default='org-root'),
name=dict(type='str'),
descr=dict(type='str'),
wwnn_pool=dict(type='str', default='default'),
vhba_list=dict(type='list'),
state=dict(type='str', default='present', choices=['present', 'absent']),
san_connectivity_list=dict(type='list'),
)
# Note that use of san_connectivity_list is an experimental feature which allows multiple resource updates with a single UCSM connection.
# Support for san_connectivity_list may change or be removed once persistent UCS connections are supported.
# Either san_connectivity_list or name is required (user can specify either a list or single resource).
module = AnsibleModule(
argument_spec,
supports_check_mode=True,
required_one_of=[
['san_connectivity_list', 'name'],
],
mutually_exclusive=[
['san_connectivity_list', 'name'],
],
)
ucs = UCSModule(module)
err = False
from ucsmsdk.mometa.vnic.VnicSanConnPolicy import VnicSanConnPolicy
from ucsmsdk.mometa.vnic.VnicFcNode import VnicFcNode
from ucsmsdk.mometa.vnic.VnicFc import VnicFc
from ucsmsdk.mometa.vnic.VnicFcIf import VnicFcIf
changed = False
try:
# Only documented use is a single resource, but to also support experimental
# feature allowing multiple updates all params are converted to a san_connectivity_list below.
if module.params['san_connectivity_list']:
# directly use the list (single resource and list are mutually exclusive
san_connectivity_list = module.params['san_connectivity_list']
else:
# single resource specified, create list from the current params
san_connectivity_list = [module.params]
for san_connectivity in san_connectivity_list:
mo_exists = False
props_match = False
# set default params. Done here to set values for lists which can't be done in the argument_spec
if not san_connectivity.get('descr'):
san_connectivity['descr'] = ''
if not san_connectivity.get('wwnn_pool'):
san_connectivity['wwnn_pool'] = 'default'
if san_connectivity.get('vhba_list'):
for vhba in san_connectivity['vhba_list']:
if not vhba.get('adapter_policy'):
vhba['adapter_policy'] = ''
if not vhba.get('order'):
vhba['order'] = 'unspecified'
# dn is <org_dn>/san-conn-pol-<name>
dn = module.params['org_dn'] + '/san-conn-pol-' + san_connectivity['name']
mo = ucs.login_handle.query_dn(dn)
if mo:
mo_exists = True
# check top-level mo props
kwargs = dict(descr=san_connectivity['descr'])
if (mo.check_prop_match(**kwargs)):
# top-level props match, check next level mo/props
# vnicFcNode object
child_dn = dn + '/fc-node'
mo_1 = ucs.login_handle.query_dn(child_dn)
if mo_1:
kwargs = dict(ident_pool_name=san_connectivity['wwnn_pool'])
if (mo_1.check_prop_match(**kwargs)):
if not san_connectivity.get('vhba_list'):
props_match = True
else:
# check vnicFc props
for vhba in san_connectivity['vhba_list']:
child_dn = dn + '/fc-' + vhba['name']
mo_2 = ucs.login_handle.query_dn(child_dn)
kwargs = {}
kwargs['adaptor_profile_name'] = vhba['adapter_policy']
kwargs['order'] = vhba['order']
kwargs['nw_templ_name'] = vhba['vhba_template']
if (mo_2.check_prop_match(**kwargs)):
props_match = True
if module.params['state'] == 'absent':
# mo must exist but all properties do not have to match
if mo_exists:
if not module.check_mode:
ucs.login_handle.remove_mo(mo)
ucs.login_handle.commit()
changed = True
else:
if not props_match:
if not module.check_mode:
# create if mo does not already exist
mo = VnicSanConnPolicy(
parent_mo_or_dn=module.params['org_dn'],
name=san_connectivity['name'],
descr=san_connectivity['descr'],
)
mo_1 = VnicFcNode(
parent_mo_or_dn=mo,
ident_pool_name=san_connectivity['wwnn_pool'],
addr='pool-derived',
)
if san_connectivity.get('vhba_list'):
for vhba in san_connectivity['vhba_list']:
mo_2 = VnicFc(
parent_mo_or_dn=mo,
name=vhba['name'],
adaptor_profile_name=vhba['adapter_policy'],
nw_templ_name=vhba['vhba_template'],
order=vhba['order'],
)
mo_2_1 = VnicFcIf(
parent_mo_or_dn=mo_2,
name='default',
)
ucs.login_handle.add_mo(mo, True)
ucs.login_handle.commit()
changed = True
except Exception as e:
err = True
ucs.result['msg'] = "setup error: %s " % str(e)
ucs.result['changed'] = changed
if err:
module.fail_json(**ucs.result)
module.exit_json(**ucs.result)
if __name__ == '__main__':
main()
|
gpl-3.0
|
bravominski/PennApps2015-HeartMates
|
venv/lib/python2.7/site-packages/flask/helpers.py
|
776
|
33793
|
# -*- coding: utf-8 -*-
"""
flask.helpers
~~~~~~~~~~~~~
Implements various helpers.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import os
import sys
import pkgutil
import posixpath
import mimetypes
from time import time
from zlib import adler32
from threading import RLock
from werkzeug.routing import BuildError
from functools import update_wrapper
try:
from werkzeug.urls import url_quote
except ImportError:
from urlparse import quote as url_quote
from werkzeug.datastructures import Headers
from werkzeug.exceptions import NotFound
# this was moved in 0.7
try:
from werkzeug.wsgi import wrap_file
except ImportError:
from werkzeug.utils import wrap_file
from jinja2 import FileSystemLoader
from .signals import message_flashed
from .globals import session, _request_ctx_stack, _app_ctx_stack, \
current_app, request
from ._compat import string_types, text_type
# sentinel
_missing = object()
# what separators does this operating system provide that are not a slash?
# this is used by the send_from_directory function to ensure that nobody is
# able to access files from outside the filesystem.
_os_alt_seps = list(sep for sep in [os.path.sep, os.path.altsep]
if sep not in (None, '/'))
def _endpoint_from_view_func(view_func):
"""Internal helper that returns the default endpoint for a given
function. This always is the function name.
"""
assert view_func is not None, 'expected view func if endpoint ' \
'is not provided.'
return view_func.__name__
def stream_with_context(generator_or_function):
"""Request contexts disappear when the response is started on the server.
This is done for efficiency reasons and to make it less likely to encounter
memory leaks with badly written WSGI middlewares. The downside is that if
you are using streamed responses, the generator cannot access request bound
information any more.
This function however can help you keep the context around for longer::
from flask import stream_with_context, request, Response
@app.route('/stream')
def streamed_response():
@stream_with_context
def generate():
yield 'Hello '
yield request.args['name']
yield '!'
return Response(generate())
Alternatively it can also be used around a specific generator::
from flask import stream_with_context, request, Response
@app.route('/stream')
def streamed_response():
def generate():
yield 'Hello '
yield request.args['name']
yield '!'
return Response(stream_with_context(generate()))
.. versionadded:: 0.9
"""
try:
gen = iter(generator_or_function)
except TypeError:
def decorator(*args, **kwargs):
gen = generator_or_function()
return stream_with_context(gen)
return update_wrapper(decorator, generator_or_function)
def generator():
ctx = _request_ctx_stack.top
if ctx is None:
raise RuntimeError('Attempted to stream with context but '
'there was no context in the first place to keep around.')
with ctx:
# Dummy sentinel. Has to be inside the context block or we're
# not actually keeping the context around.
yield None
# The try/finally is here so that if someone passes a WSGI level
# iterator in we're still running the cleanup logic. Generators
# don't need that because they are closed on their destruction
# automatically.
try:
for item in gen:
yield item
finally:
if hasattr(gen, 'close'):
gen.close()
# The trick is to start the generator. Then the code execution runs until
# the first dummy None is yielded at which point the context was already
# pushed. This item is discarded. Then when the iteration continues the
# real generator is executed.
wrapped_g = generator()
next(wrapped_g)
return wrapped_g
def make_response(*args):
"""Sometimes it is necessary to set additional headers in a view. Because
views do not have to return response objects but can return a value that
is converted into a response object by Flask itself, it becomes tricky to
add headers to it. This function can be called instead of using a return
and you will get a response object which you can use to attach headers.
If view looked like this and you want to add a new header::
def index():
return render_template('index.html', foo=42)
You can now do something like this::
def index():
response = make_response(render_template('index.html', foo=42))
response.headers['X-Parachutes'] = 'parachutes are cool'
return response
This function accepts the very same arguments you can return from a
view function. This for example creates a response with a 404 error
code::
response = make_response(render_template('not_found.html'), 404)
The other use case of this function is to force the return value of a
view function into a response which is helpful with view
decorators::
response = make_response(view_function())
response.headers['X-Parachutes'] = 'parachutes are cool'
Internally this function does the following things:
- if no arguments are passed, it creates a new response argument
- if one argument is passed, :meth:`flask.Flask.make_response`
is invoked with it.
- if more than one argument is passed, the arguments are passed
to the :meth:`flask.Flask.make_response` function as tuple.
.. versionadded:: 0.6
"""
if not args:
return current_app.response_class()
if len(args) == 1:
args = args[0]
return current_app.make_response(args)
def url_for(endpoint, **values):
"""Generates a URL to the given endpoint with the method provided.
Variable arguments that are unknown to the target endpoint are appended
to the generated URL as query arguments. If the value of a query argument
is `None`, the whole pair is skipped. In case blueprints are active
you can shortcut references to the same blueprint by prefixing the
local endpoint with a dot (``.``).
This will reference the index function local to the current blueprint::
url_for('.index')
For more information, head over to the :ref:`Quickstart <url-building>`.
To integrate applications, :class:`Flask` has a hook to intercept URL build
errors through :attr:`Flask.build_error_handler`. The `url_for` function
results in a :exc:`~werkzeug.routing.BuildError` when the current app does
not have a URL for the given endpoint and values. When it does, the
:data:`~flask.current_app` calls its :attr:`~Flask.build_error_handler` if
it is not `None`, which can return a string to use as the result of
`url_for` (instead of `url_for`'s default to raise the
:exc:`~werkzeug.routing.BuildError` exception) or re-raise the exception.
An example::
def external_url_handler(error, endpoint, **values):
"Looks up an external URL when `url_for` cannot build a URL."
# This is an example of hooking the build_error_handler.
# Here, lookup_url is some utility function you've built
# which looks up the endpoint in some external URL registry.
url = lookup_url(endpoint, **values)
if url is None:
# External lookup did not have a URL.
# Re-raise the BuildError, in context of original traceback.
exc_type, exc_value, tb = sys.exc_info()
if exc_value is error:
raise exc_type, exc_value, tb
else:
raise error
# url_for will use this result, instead of raising BuildError.
return url
app.build_error_handler = external_url_handler
Here, `error` is the instance of :exc:`~werkzeug.routing.BuildError`, and
`endpoint` and `**values` are the arguments passed into `url_for`. Note
that this is for building URLs outside the current application, and not for
handling 404 NotFound errors.
.. versionadded:: 0.10
The `_scheme` parameter was added.
.. versionadded:: 0.9
The `_anchor` and `_method` parameters were added.
.. versionadded:: 0.9
Calls :meth:`Flask.handle_build_error` on
:exc:`~werkzeug.routing.BuildError`.
:param endpoint: the endpoint of the URL (name of the function)
:param values: the variable arguments of the URL rule
:param _external: if set to `True`, an absolute URL is generated. Server
address can be changed via `SERVER_NAME` configuration variable which
defaults to `localhost`.
:param _scheme: a string specifying the desired URL scheme. The `_external`
parameter must be set to `True` or a `ValueError` is raised.
:param _anchor: if provided this is added as anchor to the URL.
:param _method: if provided this explicitly specifies an HTTP method.
"""
appctx = _app_ctx_stack.top
reqctx = _request_ctx_stack.top
if appctx is None:
raise RuntimeError('Attempted to generate a URL without the '
'application context being pushed. This has to be '
'executed when application context is available.')
# If request specific information is available we have some extra
# features that support "relative" urls.
if reqctx is not None:
url_adapter = reqctx.url_adapter
blueprint_name = request.blueprint
if not reqctx.request._is_old_module:
if endpoint[:1] == '.':
if blueprint_name is not None:
endpoint = blueprint_name + endpoint
else:
endpoint = endpoint[1:]
else:
# TODO: get rid of this deprecated functionality in 1.0
if '.' not in endpoint:
if blueprint_name is not None:
endpoint = blueprint_name + '.' + endpoint
elif endpoint.startswith('.'):
endpoint = endpoint[1:]
external = values.pop('_external', False)
# Otherwise go with the url adapter from the appctx and make
# the urls external by default.
else:
url_adapter = appctx.url_adapter
if url_adapter is None:
raise RuntimeError('Application was not able to create a URL '
'adapter for request independent URL generation. '
'You might be able to fix this by setting '
'the SERVER_NAME config variable.')
external = values.pop('_external', True)
anchor = values.pop('_anchor', None)
method = values.pop('_method', None)
scheme = values.pop('_scheme', None)
appctx.app.inject_url_defaults(endpoint, values)
if scheme is not None:
if not external:
raise ValueError('When specifying _scheme, _external must be True')
url_adapter.url_scheme = scheme
try:
rv = url_adapter.build(endpoint, values, method=method,
force_external=external)
except BuildError as error:
# We need to inject the values again so that the app callback can
# deal with that sort of stuff.
values['_external'] = external
values['_anchor'] = anchor
values['_method'] = method
return appctx.app.handle_url_build_error(error, endpoint, values)
if anchor is not None:
rv += '#' + url_quote(anchor)
return rv
def get_template_attribute(template_name, attribute):
"""Loads a macro (or variable) a template exports. This can be used to
invoke a macro from within Python code. If you for example have a
template named `_cider.html` with the following contents:
.. sourcecode:: html+jinja
{% macro hello(name) %}Hello {{ name }}!{% endmacro %}
You can access this from Python code like this::
hello = get_template_attribute('_cider.html', 'hello')
return hello('World')
.. versionadded:: 0.2
:param template_name: the name of the template
:param attribute: the name of the variable of macro to access
"""
return getattr(current_app.jinja_env.get_template(template_name).module,
attribute)
def flash(message, category='message'):
"""Flashes a message to the next request. In order to remove the
flashed message from the session and to display it to the user,
the template has to call :func:`get_flashed_messages`.
.. versionchanged:: 0.3
`category` parameter added.
:param message: the message to be flashed.
:param category: the category for the message. The following values
are recommended: ``'message'`` for any kind of message,
``'error'`` for errors, ``'info'`` for information
messages and ``'warning'`` for warnings. However any
kind of string can be used as category.
"""
# Original implementation:
#
# session.setdefault('_flashes', []).append((category, message))
#
# This assumed that changes made to mutable structures in the session are
# are always in sync with the sess on object, which is not true for session
# implementations that use external storage for keeping their keys/values.
flashes = session.get('_flashes', [])
flashes.append((category, message))
session['_flashes'] = flashes
message_flashed.send(current_app._get_current_object(),
message=message, category=category)
def get_flashed_messages(with_categories=False, category_filter=[]):
"""Pulls all flashed messages from the session and returns them.
Further calls in the same request to the function will return
the same messages. By default just the messages are returned,
but when `with_categories` is set to `True`, the return value will
be a list of tuples in the form ``(category, message)`` instead.
Filter the flashed messages to one or more categories by providing those
categories in `category_filter`. This allows rendering categories in
separate html blocks. The `with_categories` and `category_filter`
arguments are distinct:
* `with_categories` controls whether categories are returned with message
text (`True` gives a tuple, where `False` gives just the message text).
* `category_filter` filters the messages down to only those matching the
provided categories.
See :ref:`message-flashing-pattern` for examples.
.. versionchanged:: 0.3
`with_categories` parameter added.
.. versionchanged:: 0.9
`category_filter` parameter added.
:param with_categories: set to `True` to also receive categories.
:param category_filter: whitelist of categories to limit return values
"""
flashes = _request_ctx_stack.top.flashes
if flashes is None:
_request_ctx_stack.top.flashes = flashes = session.pop('_flashes') \
if '_flashes' in session else []
if category_filter:
flashes = list(filter(lambda f: f[0] in category_filter, flashes))
if not with_categories:
return [x[1] for x in flashes]
return flashes
def send_file(filename_or_fp, mimetype=None, as_attachment=False,
attachment_filename=None, add_etags=True,
cache_timeout=None, conditional=False):
"""Sends the contents of a file to the client. This will use the
most efficient method available and configured. By default it will
try to use the WSGI server's file_wrapper support. Alternatively
you can set the application's :attr:`~Flask.use_x_sendfile` attribute
to ``True`` to directly emit an `X-Sendfile` header. This however
requires support of the underlying webserver for `X-Sendfile`.
By default it will try to guess the mimetype for you, but you can
also explicitly provide one. For extra security you probably want
to send certain files as attachment (HTML for instance). The mimetype
guessing requires a `filename` or an `attachment_filename` to be
provided.
Please never pass filenames to this function from user sources without
checking them first. Something like this is usually sufficient to
avoid security problems::
if '..' in filename or filename.startswith('/'):
abort(404)
.. versionadded:: 0.2
.. versionadded:: 0.5
The `add_etags`, `cache_timeout` and `conditional` parameters were
added. The default behavior is now to attach etags.
.. versionchanged:: 0.7
mimetype guessing and etag support for file objects was
deprecated because it was unreliable. Pass a filename if you are
able to, otherwise attach an etag yourself. This functionality
will be removed in Flask 1.0
.. versionchanged:: 0.9
cache_timeout pulls its default from application config, when None.
:param filename_or_fp: the filename of the file to send. This is
relative to the :attr:`~Flask.root_path` if a
relative path is specified.
Alternatively a file object might be provided
in which case `X-Sendfile` might not work and
fall back to the traditional method. Make sure
that the file pointer is positioned at the start
of data to send before calling :func:`send_file`.
:param mimetype: the mimetype of the file if provided, otherwise
auto detection happens.
:param as_attachment: set to `True` if you want to send this file with
a ``Content-Disposition: attachment`` header.
:param attachment_filename: the filename for the attachment if it
differs from the file's filename.
:param add_etags: set to `False` to disable attaching of etags.
:param conditional: set to `True` to enable conditional responses.
:param cache_timeout: the timeout in seconds for the headers. When `None`
(default), this value is set by
:meth:`~Flask.get_send_file_max_age` of
:data:`~flask.current_app`.
"""
mtime = None
if isinstance(filename_or_fp, string_types):
filename = filename_or_fp
file = None
else:
from warnings import warn
file = filename_or_fp
filename = getattr(file, 'name', None)
# XXX: this behavior is now deprecated because it was unreliable.
# removed in Flask 1.0
if not attachment_filename and not mimetype \
and isinstance(filename, string_types):
warn(DeprecationWarning('The filename support for file objects '
'passed to send_file is now deprecated. Pass an '
'attach_filename if you want mimetypes to be guessed.'),
stacklevel=2)
if add_etags:
warn(DeprecationWarning('In future flask releases etags will no '
'longer be generated for file objects passed to the send_file '
'function because this behavior was unreliable. Pass '
'filenames instead if possible, otherwise attach an etag '
'yourself based on another value'), stacklevel=2)
if filename is not None:
if not os.path.isabs(filename):
filename = os.path.join(current_app.root_path, filename)
if mimetype is None and (filename or attachment_filename):
mimetype = mimetypes.guess_type(filename or attachment_filename)[0]
if mimetype is None:
mimetype = 'application/octet-stream'
headers = Headers()
if as_attachment:
if attachment_filename is None:
if filename is None:
raise TypeError('filename unavailable, required for '
'sending as attachment')
attachment_filename = os.path.basename(filename)
headers.add('Content-Disposition', 'attachment',
filename=attachment_filename)
if current_app.use_x_sendfile and filename:
if file is not None:
file.close()
headers['X-Sendfile'] = filename
headers['Content-Length'] = os.path.getsize(filename)
data = None
else:
if file is None:
file = open(filename, 'rb')
mtime = os.path.getmtime(filename)
headers['Content-Length'] = os.path.getsize(filename)
data = wrap_file(request.environ, file)
rv = current_app.response_class(data, mimetype=mimetype, headers=headers,
direct_passthrough=True)
# if we know the file modification date, we can store it as the
# the time of the last modification.
if mtime is not None:
rv.last_modified = int(mtime)
rv.cache_control.public = True
if cache_timeout is None:
cache_timeout = current_app.get_send_file_max_age(filename)
if cache_timeout is not None:
rv.cache_control.max_age = cache_timeout
rv.expires = int(time() + cache_timeout)
if add_etags and filename is not None:
rv.set_etag('flask-%s-%s-%s' % (
os.path.getmtime(filename),
os.path.getsize(filename),
adler32(
filename.encode('utf-8') if isinstance(filename, text_type)
else filename
) & 0xffffffff
))
if conditional:
rv = rv.make_conditional(request)
# make sure we don't send x-sendfile for servers that
# ignore the 304 status code for x-sendfile.
if rv.status_code == 304:
rv.headers.pop('x-sendfile', None)
return rv
def safe_join(directory, filename):
"""Safely join `directory` and `filename`.
Example usage::
@app.route('/wiki/<path:filename>')
def wiki_page(filename):
filename = safe_join(app.config['WIKI_FOLDER'], filename)
with open(filename, 'rb') as fd:
content = fd.read() # Read and process the file content...
:param directory: the base directory.
:param filename: the untrusted filename relative to that directory.
:raises: :class:`~werkzeug.exceptions.NotFound` if the resulting path
would fall out of `directory`.
"""
filename = posixpath.normpath(filename)
for sep in _os_alt_seps:
if sep in filename:
raise NotFound()
if os.path.isabs(filename) or \
filename == '..' or \
filename.startswith('../'):
raise NotFound()
return os.path.join(directory, filename)
def send_from_directory(directory, filename, **options):
"""Send a file from a given directory with :func:`send_file`. This
is a secure way to quickly expose static files from an upload folder
or something similar.
Example usage::
@app.route('/uploads/<path:filename>')
def download_file(filename):
return send_from_directory(app.config['UPLOAD_FOLDER'],
filename, as_attachment=True)
.. admonition:: Sending files and Performance
It is strongly recommended to activate either `X-Sendfile` support in
your webserver or (if no authentication happens) to tell the webserver
to serve files for the given path on its own without calling into the
web application for improved performance.
.. versionadded:: 0.5
:param directory: the directory where all the files are stored.
:param filename: the filename relative to that directory to
download.
:param options: optional keyword arguments that are directly
forwarded to :func:`send_file`.
"""
filename = safe_join(directory, filename)
if not os.path.isfile(filename):
raise NotFound()
options.setdefault('conditional', True)
return send_file(filename, **options)
def get_root_path(import_name):
"""Returns the path to a package or cwd if that cannot be found. This
returns the path of a package or the folder that contains a module.
Not to be confused with the package path returned by :func:`find_package`.
"""
# Module already imported and has a file attribute. Use that first.
mod = sys.modules.get(import_name)
if mod is not None and hasattr(mod, '__file__'):
return os.path.dirname(os.path.abspath(mod.__file__))
# Next attempt: check the loader.
loader = pkgutil.get_loader(import_name)
# Loader does not exist or we're referring to an unloaded main module
# or a main module without path (interactive sessions), go with the
# current working directory.
if loader is None or import_name == '__main__':
return os.getcwd()
# For .egg, zipimporter does not have get_filename until Python 2.7.
# Some other loaders might exhibit the same behavior.
if hasattr(loader, 'get_filename'):
filepath = loader.get_filename(import_name)
else:
# Fall back to imports.
__import__(import_name)
filepath = sys.modules[import_name].__file__
# filepath is import_name.py for a module, or __init__.py for a package.
return os.path.dirname(os.path.abspath(filepath))
def find_package(import_name):
"""Finds a package and returns the prefix (or None if the package is
not installed) as well as the folder that contains the package or
module as a tuple. The package path returned is the module that would
have to be added to the pythonpath in order to make it possible to
import the module. The prefix is the path below which a UNIX like
folder structure exists (lib, share etc.).
"""
root_mod_name = import_name.split('.')[0]
loader = pkgutil.get_loader(root_mod_name)
if loader is None or import_name == '__main__':
# import name is not found, or interactive/main module
package_path = os.getcwd()
else:
# For .egg, zipimporter does not have get_filename until Python 2.7.
if hasattr(loader, 'get_filename'):
filename = loader.get_filename(root_mod_name)
elif hasattr(loader, 'archive'):
# zipimporter's loader.archive points to the .egg or .zip
# archive filename is dropped in call to dirname below.
filename = loader.archive
else:
# At least one loader is missing both get_filename and archive:
# Google App Engine's HardenedModulesHook
#
# Fall back to imports.
__import__(import_name)
filename = sys.modules[import_name].__file__
package_path = os.path.abspath(os.path.dirname(filename))
# package_path ends with __init__.py for a package
if loader.is_package(root_mod_name):
package_path = os.path.dirname(package_path)
site_parent, site_folder = os.path.split(package_path)
py_prefix = os.path.abspath(sys.prefix)
if package_path.startswith(py_prefix):
return py_prefix, package_path
elif site_folder.lower() == 'site-packages':
parent, folder = os.path.split(site_parent)
# Windows like installations
if folder.lower() == 'lib':
base_dir = parent
# UNIX like installations
elif os.path.basename(parent).lower() == 'lib':
base_dir = os.path.dirname(parent)
else:
base_dir = site_parent
return base_dir, package_path
return None, package_path
class locked_cached_property(object):
"""A decorator that converts a function into a lazy property. The
function wrapped is called the first time to retrieve the result
and then that calculated result is used the next time you access
the value. Works like the one in Werkzeug but has a lock for
thread safety.
"""
def __init__(self, func, name=None, doc=None):
self.__name__ = name or func.__name__
self.__module__ = func.__module__
self.__doc__ = doc or func.__doc__
self.func = func
self.lock = RLock()
def __get__(self, obj, type=None):
if obj is None:
return self
with self.lock:
value = obj.__dict__.get(self.__name__, _missing)
if value is _missing:
value = self.func(obj)
obj.__dict__[self.__name__] = value
return value
class _PackageBoundObject(object):
def __init__(self, import_name, template_folder=None):
#: The name of the package or module. Do not change this once
#: it was set by the constructor.
self.import_name = import_name
#: location of the templates. `None` if templates should not be
#: exposed.
self.template_folder = template_folder
#: Where is the app root located?
self.root_path = get_root_path(self.import_name)
self._static_folder = None
self._static_url_path = None
def _get_static_folder(self):
if self._static_folder is not None:
return os.path.join(self.root_path, self._static_folder)
def _set_static_folder(self, value):
self._static_folder = value
static_folder = property(_get_static_folder, _set_static_folder)
del _get_static_folder, _set_static_folder
def _get_static_url_path(self):
if self._static_url_path is None:
if self.static_folder is None:
return None
return '/' + os.path.basename(self.static_folder)
return self._static_url_path
def _set_static_url_path(self, value):
self._static_url_path = value
static_url_path = property(_get_static_url_path, _set_static_url_path)
del _get_static_url_path, _set_static_url_path
@property
def has_static_folder(self):
"""This is `True` if the package bound object's container has a
folder named ``'static'``.
.. versionadded:: 0.5
"""
return self.static_folder is not None
@locked_cached_property
def jinja_loader(self):
"""The Jinja loader for this package bound object.
.. versionadded:: 0.5
"""
if self.template_folder is not None:
return FileSystemLoader(os.path.join(self.root_path,
self.template_folder))
def get_send_file_max_age(self, filename):
"""Provides default cache_timeout for the :func:`send_file` functions.
By default, this function returns ``SEND_FILE_MAX_AGE_DEFAULT`` from
the configuration of :data:`~flask.current_app`.
Static file functions such as :func:`send_from_directory` use this
function, and :func:`send_file` calls this function on
:data:`~flask.current_app` when the given cache_timeout is `None`. If a
cache_timeout is given in :func:`send_file`, that timeout is used;
otherwise, this method is called.
This allows subclasses to change the behavior when sending files based
on the filename. For example, to set the cache timeout for .js files
to 60 seconds::
class MyFlask(flask.Flask):
def get_send_file_max_age(self, name):
if name.lower().endswith('.js'):
return 60
return flask.Flask.get_send_file_max_age(self, name)
.. versionadded:: 0.9
"""
return current_app.config['SEND_FILE_MAX_AGE_DEFAULT']
def send_static_file(self, filename):
"""Function used internally to send static files from the static
folder to the browser.
.. versionadded:: 0.5
"""
if not self.has_static_folder:
raise RuntimeError('No static folder for this object')
# Ensure get_send_file_max_age is called in all cases.
# Here, we ensure get_send_file_max_age is called for Blueprints.
cache_timeout = self.get_send_file_max_age(filename)
return send_from_directory(self.static_folder, filename,
cache_timeout=cache_timeout)
def open_resource(self, resource, mode='rb'):
"""Opens a resource from the application's resource folder. To see
how this works, consider the following folder structure::
/myapplication.py
/schema.sql
/static
/style.css
/templates
/layout.html
/index.html
If you want to open the `schema.sql` file you would do the
following::
with app.open_resource('schema.sql') as f:
contents = f.read()
do_something_with(contents)
:param resource: the name of the resource. To access resources within
subfolders use forward slashes as separator.
:param mode: resource file opening mode, default is 'rb'.
"""
if mode not in ('r', 'rb'):
raise ValueError('Resources can only be opened for reading')
return open(os.path.join(self.root_path, resource), mode)
|
apache-2.0
|
rajalokan/nova
|
nova/notifications/objects/exception.py
|
3
|
1991
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import inspect
import six
from nova.notifications.objects import base
from nova.objects import base as nova_base
from nova.objects import fields
@nova_base.NovaObjectRegistry.register_notification
class ExceptionPayload(base.NotificationPayloadBase):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'module_name': fields.StringField(),
'function_name': fields.StringField(),
'exception': fields.StringField(),
'exception_message': fields.StringField()
}
@classmethod
def from_exception(cls, fault):
trace = inspect.trace()[-1]
# TODO(gibi): apply strutils.mask_password on exception_message and
# consider emitting the exception_message only if the safe flag is
# true in the exception like in the REST API
module = inspect.getmodule(trace[0])
module_name = module.__name__ if module else 'unknown'
return cls(
function_name=trace[3],
module_name=module_name,
exception=fault.__class__.__name__,
exception_message=six.text_type(fault))
@base.notification_sample('compute-exception.json')
@nova_base.NovaObjectRegistry.register_notification
class ExceptionNotification(base.NotificationBase):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'payload': fields.ObjectField('ExceptionPayload')
}
|
apache-2.0
|
Cinntax/home-assistant
|
homeassistant/components/brunt/cover.py
|
2
|
5307
|
"""Support for Brunt Blind Engine covers."""
import logging
import voluptuous as vol
from homeassistant.const import ATTR_ATTRIBUTION, CONF_PASSWORD, CONF_USERNAME
from homeassistant.components.cover import (
ATTR_POSITION,
CoverDevice,
PLATFORM_SCHEMA,
SUPPORT_CLOSE,
SUPPORT_OPEN,
SUPPORT_SET_POSITION,
)
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
COVER_FEATURES = SUPPORT_OPEN | SUPPORT_CLOSE | SUPPORT_SET_POSITION
DEVICE_CLASS = "window"
ATTR_REQUEST_POSITION = "request_position"
NOTIFICATION_ID = "brunt_notification"
NOTIFICATION_TITLE = "Brunt Cover Setup"
ATTRIBUTION = "Based on an unofficial Brunt SDK."
CLOSED_POSITION = 0
OPEN_POSITION = 100
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{vol.Required(CONF_USERNAME): cv.string, vol.Required(CONF_PASSWORD): cv.string}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the brunt platform."""
# pylint: disable=no-name-in-module
from brunt import BruntAPI
username = config[CONF_USERNAME]
password = config[CONF_PASSWORD]
bapi = BruntAPI(username=username, password=password)
try:
things = bapi.getThings()["things"]
if not things:
_LOGGER.error("No things present in account.")
else:
add_entities(
[
BruntDevice(bapi, thing["NAME"], thing["thingUri"])
for thing in things
],
True,
)
except (TypeError, KeyError, NameError, ValueError) as ex:
_LOGGER.error("%s", ex)
hass.components.persistent_notification.create(
"Error: {}<br />"
"You will need to restart hass after fixing."
"".format(ex),
title=NOTIFICATION_TITLE,
notification_id=NOTIFICATION_ID,
)
class BruntDevice(CoverDevice):
"""
Representation of a Brunt cover device.
Contains the common logic for all Brunt devices.
"""
def __init__(self, bapi, name, thing_uri):
"""Init the Brunt device."""
self._bapi = bapi
self._name = name
self._thing_uri = thing_uri
self._state = {}
self._available = None
@property
def name(self):
"""Return the name of the device as reported by tellcore."""
return self._name
@property
def available(self):
"""Could the device be accessed during the last update call."""
return self._available
@property
def current_cover_position(self):
"""
Return current position of cover.
None is unknown, 0 is closed, 100 is fully open.
"""
pos = self._state.get("currentPosition")
return int(pos) if pos else None
@property
def request_cover_position(self):
"""
Return request position of cover.
The request position is the position of the last request
to Brunt, at times there is a diff of 1 to current
None is unknown, 0 is closed, 100 is fully open.
"""
pos = self._state.get("requestPosition")
return int(pos) if pos else None
@property
def move_state(self):
"""
Return current moving state of cover.
None is unknown, 0 when stopped, 1 when opening, 2 when closing
"""
mov = self._state.get("moveState")
return int(mov) if mov else None
@property
def is_opening(self):
"""Return if the cover is opening or not."""
return self.move_state == 1
@property
def is_closing(self):
"""Return if the cover is closing or not."""
return self.move_state == 2
@property
def device_state_attributes(self):
"""Return the detailed device state attributes."""
return {
ATTR_ATTRIBUTION: ATTRIBUTION,
ATTR_REQUEST_POSITION: self.request_cover_position,
}
@property
def device_class(self):
"""Return the class of this device, from component DEVICE_CLASSES."""
return DEVICE_CLASS
@property
def supported_features(self):
"""Flag supported features."""
return COVER_FEATURES
@property
def is_closed(self):
"""Return true if cover is closed, else False."""
return self.current_cover_position == CLOSED_POSITION
def update(self):
"""Poll the current state of the device."""
try:
self._state = self._bapi.getState(thingUri=self._thing_uri).get("thing")
self._available = True
except (TypeError, KeyError, NameError, ValueError) as ex:
_LOGGER.error("%s", ex)
self._available = False
def open_cover(self, **kwargs):
"""Set the cover to the open position."""
self._bapi.changeRequestPosition(OPEN_POSITION, thingUri=self._thing_uri)
def close_cover(self, **kwargs):
"""Set the cover to the closed position."""
self._bapi.changeRequestPosition(CLOSED_POSITION, thingUri=self._thing_uri)
def set_cover_position(self, **kwargs):
"""Set the cover to a specific position."""
self._bapi.changeRequestPosition(
kwargs[ATTR_POSITION], thingUri=self._thing_uri
)
|
apache-2.0
|
silentfuzzle/calibre
|
src/html5lib/constants.py
|
16
|
89683
|
from __future__ import absolute_import, division, unicode_literals
import string
import gettext
_ = gettext.gettext
EOF = None
E = {
"null-character":
_("Null character in input stream, replaced with U+FFFD."),
"invalid-codepoint":
_("Invalid codepoint in stream."),
"incorrectly-placed-solidus":
_("Solidus (/) incorrectly placed in tag."),
"incorrect-cr-newline-entity":
_("Incorrect CR newline entity, replaced with LF."),
"illegal-windows-1252-entity":
_("Entity used with illegal number (windows-1252 reference)."),
"cant-convert-numeric-entity":
_("Numeric entity couldn't be converted to character "
"(codepoint U+%(charAsInt)08x)."),
"illegal-codepoint-for-numeric-entity":
_("Numeric entity represents an illegal codepoint: "
"U+%(charAsInt)08x."),
"numeric-entity-without-semicolon":
_("Numeric entity didn't end with ';'."),
"expected-numeric-entity-but-got-eof":
_("Numeric entity expected. Got end of file instead."),
"expected-numeric-entity":
_("Numeric entity expected but none found."),
"named-entity-without-semicolon":
_("Named entity didn't end with ';'."),
"expected-named-entity":
_("Named entity expected. Got none."),
"attributes-in-end-tag":
_("End tag contains unexpected attributes."),
'self-closing-flag-on-end-tag':
_("End tag contains unexpected self-closing flag."),
"expected-tag-name-but-got-right-bracket":
_("Expected tag name. Got '>' instead."),
"expected-tag-name-but-got-question-mark":
_("Expected tag name. Got '?' instead. (HTML doesn't "
"support processing instructions.)"),
"expected-tag-name":
_("Expected tag name. Got something else instead"),
"expected-closing-tag-but-got-right-bracket":
_("Expected closing tag. Got '>' instead. Ignoring '</>'."),
"expected-closing-tag-but-got-eof":
_("Expected closing tag. Unexpected end of file."),
"expected-closing-tag-but-got-char":
_("Expected closing tag. Unexpected character '%(data)s' found."),
"eof-in-tag-name":
_("Unexpected end of file in the tag name."),
"expected-attribute-name-but-got-eof":
_("Unexpected end of file. Expected attribute name instead."),
"eof-in-attribute-name":
_("Unexpected end of file in attribute name."),
"invalid-character-in-attribute-name":
_("Invalid character in attribute name"),
"duplicate-attribute":
_("Dropped duplicate attribute on tag."),
"expected-end-of-tag-name-but-got-eof":
_("Unexpected end of file. Expected = or end of tag."),
"expected-attribute-value-but-got-eof":
_("Unexpected end of file. Expected attribute value."),
"expected-attribute-value-but-got-right-bracket":
_("Expected attribute value. Got '>' instead."),
'equals-in-unquoted-attribute-value':
_("Unexpected = in unquoted attribute"),
'unexpected-character-in-unquoted-attribute-value':
_("Unexpected character in unquoted attribute"),
"invalid-character-after-attribute-name":
_("Unexpected character after attribute name."),
"unexpected-character-after-attribute-value":
_("Unexpected character after attribute value."),
"eof-in-attribute-value-double-quote":
_("Unexpected end of file in attribute value (\")."),
"eof-in-attribute-value-single-quote":
_("Unexpected end of file in attribute value (')."),
"eof-in-attribute-value-no-quotes":
_("Unexpected end of file in attribute value."),
"unexpected-EOF-after-solidus-in-tag":
_("Unexpected end of file in tag. Expected >"),
"unexpected-character-after-solidus-in-tag":
_("Unexpected character after / in tag. Expected >"),
"expected-dashes-or-doctype":
_("Expected '--' or 'DOCTYPE'. Not found."),
"unexpected-bang-after-double-dash-in-comment":
_("Unexpected ! after -- in comment"),
"unexpected-space-after-double-dash-in-comment":
_("Unexpected space after -- in comment"),
"incorrect-comment":
_("Incorrect comment."),
"eof-in-comment":
_("Unexpected end of file in comment."),
"eof-in-comment-end-dash":
_("Unexpected end of file in comment (-)"),
"unexpected-dash-after-double-dash-in-comment":
_("Unexpected '-' after '--' found in comment."),
"eof-in-comment-double-dash":
_("Unexpected end of file in comment (--)."),
"eof-in-comment-end-space-state":
_("Unexpected end of file in comment."),
"eof-in-comment-end-bang-state":
_("Unexpected end of file in comment."),
"unexpected-char-in-comment":
_("Unexpected character in comment found."),
"need-space-after-doctype":
_("No space after literal string 'DOCTYPE'."),
"expected-doctype-name-but-got-right-bracket":
_("Unexpected > character. Expected DOCTYPE name."),
"expected-doctype-name-but-got-eof":
_("Unexpected end of file. Expected DOCTYPE name."),
"eof-in-doctype-name":
_("Unexpected end of file in DOCTYPE name."),
"eof-in-doctype":
_("Unexpected end of file in DOCTYPE."),
"expected-space-or-right-bracket-in-doctype":
_("Expected space or '>'. Got '%(data)s'"),
"unexpected-end-of-doctype":
_("Unexpected end of DOCTYPE."),
"unexpected-char-in-doctype":
_("Unexpected character in DOCTYPE."),
"eof-in-innerhtml":
_("XXX innerHTML EOF"),
"unexpected-doctype":
_("Unexpected DOCTYPE. Ignored."),
"non-html-root":
_("html needs to be the first start tag."),
"expected-doctype-but-got-eof":
_("Unexpected End of file. Expected DOCTYPE."),
"unknown-doctype":
_("Erroneous DOCTYPE."),
"expected-doctype-but-got-chars":
_("Unexpected non-space characters. Expected DOCTYPE."),
"expected-doctype-but-got-start-tag":
_("Unexpected start tag (%(name)s). Expected DOCTYPE."),
"expected-doctype-but-got-end-tag":
_("Unexpected end tag (%(name)s). Expected DOCTYPE."),
"end-tag-after-implied-root":
_("Unexpected end tag (%(name)s) after the (implied) root element."),
"expected-named-closing-tag-but-got-eof":
_("Unexpected end of file. Expected end tag (%(name)s)."),
"two-heads-are-not-better-than-one":
_("Unexpected start tag head in existing head. Ignored."),
"unexpected-end-tag":
_("Unexpected end tag (%(name)s). Ignored."),
"unexpected-start-tag-out-of-my-head":
_("Unexpected start tag (%(name)s) that can be in head. Moved."),
"unexpected-start-tag":
_("Unexpected start tag (%(name)s)."),
"missing-end-tag":
_("Missing end tag (%(name)s)."),
"missing-end-tags":
_("Missing end tags (%(name)s)."),
"unexpected-start-tag-implies-end-tag":
_("Unexpected start tag (%(startName)s) "
"implies end tag (%(endName)s)."),
"unexpected-start-tag-treated-as":
_("Unexpected start tag (%(originalName)s). Treated as %(newName)s."),
"deprecated-tag":
_("Unexpected start tag %(name)s. Don't use it!"),
"unexpected-start-tag-ignored":
_("Unexpected start tag %(name)s. Ignored."),
"expected-one-end-tag-but-got-another":
_("Unexpected end tag (%(gotName)s). "
"Missing end tag (%(expectedName)s)."),
"end-tag-too-early":
_("End tag (%(name)s) seen too early. Expected other end tag."),
"end-tag-too-early-named":
_("Unexpected end tag (%(gotName)s). Expected end tag (%(expectedName)s)."),
"end-tag-too-early-ignored":
_("End tag (%(name)s) seen too early. Ignored."),
"adoption-agency-1.1":
_("End tag (%(name)s) violates step 1, "
"paragraph 1 of the adoption agency algorithm."),
"adoption-agency-1.2":
_("End tag (%(name)s) violates step 1, "
"paragraph 2 of the adoption agency algorithm."),
"adoption-agency-1.3":
_("End tag (%(name)s) violates step 1, "
"paragraph 3 of the adoption agency algorithm."),
"adoption-agency-4.4":
_("End tag (%(name)s) violates step 4, "
"paragraph 4 of the adoption agency algorithm."),
"unexpected-end-tag-treated-as":
_("Unexpected end tag (%(originalName)s). Treated as %(newName)s."),
"no-end-tag":
_("This element (%(name)s) has no end tag."),
"unexpected-implied-end-tag-in-table":
_("Unexpected implied end tag (%(name)s) in the table phase."),
"unexpected-implied-end-tag-in-table-body":
_("Unexpected implied end tag (%(name)s) in the table body phase."),
"unexpected-char-implies-table-voodoo":
_("Unexpected non-space characters in "
"table context caused voodoo mode."),
"unexpected-hidden-input-in-table":
_("Unexpected input with type hidden in table context."),
"unexpected-form-in-table":
_("Unexpected form in table context."),
"unexpected-start-tag-implies-table-voodoo":
_("Unexpected start tag (%(name)s) in "
"table context caused voodoo mode."),
"unexpected-end-tag-implies-table-voodoo":
_("Unexpected end tag (%(name)s) in "
"table context caused voodoo mode."),
"unexpected-cell-in-table-body":
_("Unexpected table cell start tag (%(name)s) "
"in the table body phase."),
"unexpected-cell-end-tag":
_("Got table cell end tag (%(name)s) "
"while required end tags are missing."),
"unexpected-end-tag-in-table-body":
_("Unexpected end tag (%(name)s) in the table body phase. Ignored."),
"unexpected-implied-end-tag-in-table-row":
_("Unexpected implied end tag (%(name)s) in the table row phase."),
"unexpected-end-tag-in-table-row":
_("Unexpected end tag (%(name)s) in the table row phase. Ignored."),
"unexpected-select-in-select":
_("Unexpected select start tag in the select phase "
"treated as select end tag."),
"unexpected-input-in-select":
_("Unexpected input start tag in the select phase."),
"unexpected-start-tag-in-select":
_("Unexpected start tag token (%(name)s in the select phase. "
"Ignored."),
"unexpected-end-tag-in-select":
_("Unexpected end tag (%(name)s) in the select phase. Ignored."),
"unexpected-table-element-start-tag-in-select-in-table":
_("Unexpected table element start tag (%(name)s) in the select in table phase."),
"unexpected-table-element-end-tag-in-select-in-table":
_("Unexpected table element end tag (%(name)s) in the select in table phase."),
"unexpected-char-after-body":
_("Unexpected non-space characters in the after body phase."),
"unexpected-start-tag-after-body":
_("Unexpected start tag token (%(name)s)"
" in the after body phase."),
"unexpected-end-tag-after-body":
_("Unexpected end tag token (%(name)s)"
" in the after body phase."),
"unexpected-char-in-frameset":
_("Unexpected characters in the frameset phase. Characters ignored."),
"unexpected-start-tag-in-frameset":
_("Unexpected start tag token (%(name)s)"
" in the frameset phase. Ignored."),
"unexpected-frameset-in-frameset-innerhtml":
_("Unexpected end tag token (frameset) "
"in the frameset phase (innerHTML)."),
"unexpected-end-tag-in-frameset":
_("Unexpected end tag token (%(name)s)"
" in the frameset phase. Ignored."),
"unexpected-char-after-frameset":
_("Unexpected non-space characters in the "
"after frameset phase. Ignored."),
"unexpected-start-tag-after-frameset":
_("Unexpected start tag (%(name)s)"
" in the after frameset phase. Ignored."),
"unexpected-end-tag-after-frameset":
_("Unexpected end tag (%(name)s)"
" in the after frameset phase. Ignored."),
"unexpected-end-tag-after-body-innerhtml":
_("Unexpected end tag after body(innerHtml)"),
"expected-eof-but-got-char":
_("Unexpected non-space characters. Expected end of file."),
"expected-eof-but-got-start-tag":
_("Unexpected start tag (%(name)s)"
". Expected end of file."),
"expected-eof-but-got-end-tag":
_("Unexpected end tag (%(name)s)"
". Expected end of file."),
"eof-in-table":
_("Unexpected end of file. Expected table content."),
"eof-in-select":
_("Unexpected end of file. Expected select content."),
"eof-in-frameset":
_("Unexpected end of file. Expected frameset content."),
"eof-in-script-in-script":
_("Unexpected end of file. Expected script content."),
"eof-in-foreign-lands":
_("Unexpected end of file. Expected foreign content"),
"non-void-element-with-trailing-solidus":
_("Trailing solidus not allowed on element %(name)s"),
"unexpected-html-element-in-foreign-content":
_("Element %(name)s not allowed in a non-html context"),
"unexpected-end-tag-before-html":
_("Unexpected end tag (%(name)s) before html."),
"XXX-undefined-error":
_("Undefined error (this sucks and should be fixed)"),
}
namespaces = {
"html": "http://www.w3.org/1999/xhtml",
"mathml": "http://www.w3.org/1998/Math/MathML",
"svg": "http://www.w3.org/2000/svg",
"xlink": "http://www.w3.org/1999/xlink",
"xml": "http://www.w3.org/XML/1998/namespace",
"xmlns": "http://www.w3.org/2000/xmlns/"
}
scopingElements = frozenset((
(namespaces["html"], "applet"),
(namespaces["html"], "caption"),
(namespaces["html"], "html"),
(namespaces["html"], "marquee"),
(namespaces["html"], "object"),
(namespaces["html"], "table"),
(namespaces["html"], "td"),
(namespaces["html"], "th"),
(namespaces["mathml"], "mi"),
(namespaces["mathml"], "mo"),
(namespaces["mathml"], "mn"),
(namespaces["mathml"], "ms"),
(namespaces["mathml"], "mtext"),
(namespaces["mathml"], "annotation-xml"),
(namespaces["svg"], "foreignObject"),
(namespaces["svg"], "desc"),
(namespaces["svg"], "title"),
))
formattingElements = frozenset((
(namespaces["html"], "a"),
(namespaces["html"], "b"),
(namespaces["html"], "big"),
(namespaces["html"], "code"),
(namespaces["html"], "em"),
(namespaces["html"], "font"),
(namespaces["html"], "i"),
(namespaces["html"], "nobr"),
(namespaces["html"], "s"),
(namespaces["html"], "small"),
(namespaces["html"], "strike"),
(namespaces["html"], "strong"),
(namespaces["html"], "tt"),
(namespaces["html"], "u")
))
specialElements = frozenset((
(namespaces["html"], "address"),
(namespaces["html"], "applet"),
(namespaces["html"], "area"),
(namespaces["html"], "article"),
(namespaces["html"], "aside"),
(namespaces["html"], "base"),
(namespaces["html"], "basefont"),
(namespaces["html"], "bgsound"),
(namespaces["html"], "blockquote"),
(namespaces["html"], "body"),
(namespaces["html"], "br"),
(namespaces["html"], "button"),
(namespaces["html"], "caption"),
(namespaces["html"], "center"),
(namespaces["html"], "col"),
(namespaces["html"], "colgroup"),
(namespaces["html"], "command"),
(namespaces["html"], "dd"),
(namespaces["html"], "details"),
(namespaces["html"], "dir"),
(namespaces["html"], "div"),
(namespaces["html"], "dl"),
(namespaces["html"], "dt"),
(namespaces["html"], "embed"),
(namespaces["html"], "fieldset"),
(namespaces["html"], "figure"),
(namespaces["html"], "footer"),
(namespaces["html"], "form"),
(namespaces["html"], "frame"),
(namespaces["html"], "frameset"),
(namespaces["html"], "h1"),
(namespaces["html"], "h2"),
(namespaces["html"], "h3"),
(namespaces["html"], "h4"),
(namespaces["html"], "h5"),
(namespaces["html"], "h6"),
(namespaces["html"], "head"),
(namespaces["html"], "header"),
(namespaces["html"], "hr"),
(namespaces["html"], "html"),
(namespaces["html"], "iframe"),
# Note that image is commented out in the spec as "this isn't an
# element that can end up on the stack, so it doesn't matter,"
(namespaces["html"], "image"),
(namespaces["html"], "img"),
(namespaces["html"], "input"),
(namespaces["html"], "isindex"),
(namespaces["html"], "li"),
(namespaces["html"], "link"),
(namespaces["html"], "listing"),
(namespaces["html"], "marquee"),
(namespaces["html"], "menu"),
(namespaces["html"], "meta"),
(namespaces["html"], "nav"),
(namespaces["html"], "noembed"),
(namespaces["html"], "noframes"),
(namespaces["html"], "noscript"),
(namespaces["html"], "object"),
(namespaces["html"], "ol"),
(namespaces["html"], "p"),
(namespaces["html"], "param"),
(namespaces["html"], "plaintext"),
(namespaces["html"], "pre"),
(namespaces["html"], "script"),
(namespaces["html"], "section"),
(namespaces["html"], "select"),
(namespaces["html"], "style"),
(namespaces["html"], "table"),
(namespaces["html"], "tbody"),
(namespaces["html"], "td"),
(namespaces["html"], "textarea"),
(namespaces["html"], "tfoot"),
(namespaces["html"], "th"),
(namespaces["html"], "thead"),
(namespaces["html"], "title"),
(namespaces["html"], "tr"),
(namespaces["html"], "ul"),
(namespaces["html"], "wbr"),
(namespaces["html"], "xmp"),
(namespaces["svg"], "foreignObject")
))
htmlIntegrationPointElements = frozenset((
(namespaces["mathml"], "annotaion-xml"),
(namespaces["svg"], "foreignObject"),
(namespaces["svg"], "desc"),
(namespaces["svg"], "title")
))
mathmlTextIntegrationPointElements = frozenset((
(namespaces["mathml"], "mi"),
(namespaces["mathml"], "mo"),
(namespaces["mathml"], "mn"),
(namespaces["mathml"], "ms"),
(namespaces["mathml"], "mtext")
))
adjustSVGAttributes = {
"attributename": "attributeName",
"attributetype": "attributeType",
"basefrequency": "baseFrequency",
"baseprofile": "baseProfile",
"calcmode": "calcMode",
"clippathunits": "clipPathUnits",
"contentscripttype": "contentScriptType",
"contentstyletype": "contentStyleType",
"diffuseconstant": "diffuseConstant",
"edgemode": "edgeMode",
"externalresourcesrequired": "externalResourcesRequired",
"filterres": "filterRes",
"filterunits": "filterUnits",
"glyphref": "glyphRef",
"gradienttransform": "gradientTransform",
"gradientunits": "gradientUnits",
"kernelmatrix": "kernelMatrix",
"kernelunitlength": "kernelUnitLength",
"keypoints": "keyPoints",
"keysplines": "keySplines",
"keytimes": "keyTimes",
"lengthadjust": "lengthAdjust",
"limitingconeangle": "limitingConeAngle",
"markerheight": "markerHeight",
"markerunits": "markerUnits",
"markerwidth": "markerWidth",
"maskcontentunits": "maskContentUnits",
"maskunits": "maskUnits",
"numoctaves": "numOctaves",
"pathlength": "pathLength",
"patterncontentunits": "patternContentUnits",
"patterntransform": "patternTransform",
"patternunits": "patternUnits",
"pointsatx": "pointsAtX",
"pointsaty": "pointsAtY",
"pointsatz": "pointsAtZ",
"preservealpha": "preserveAlpha",
"preserveaspectratio": "preserveAspectRatio",
"primitiveunits": "primitiveUnits",
"refx": "refX",
"refy": "refY",
"repeatcount": "repeatCount",
"repeatdur": "repeatDur",
"requiredextensions": "requiredExtensions",
"requiredfeatures": "requiredFeatures",
"specularconstant": "specularConstant",
"specularexponent": "specularExponent",
"spreadmethod": "spreadMethod",
"startoffset": "startOffset",
"stddeviation": "stdDeviation",
"stitchtiles": "stitchTiles",
"surfacescale": "surfaceScale",
"systemlanguage": "systemLanguage",
"tablevalues": "tableValues",
"targetx": "targetX",
"targety": "targetY",
"textlength": "textLength",
"viewbox": "viewBox",
"viewtarget": "viewTarget",
"xchannelselector": "xChannelSelector",
"ychannelselector": "yChannelSelector",
"zoomandpan": "zoomAndPan"
}
adjustMathMLAttributes = {"definitionurl": "definitionURL"}
adjustForeignAttributes = {
"xlink:actuate": ("xlink", "actuate", namespaces["xlink"]),
"xlink:arcrole": ("xlink", "arcrole", namespaces["xlink"]),
"xlink:href": ("xlink", "href", namespaces["xlink"]),
"xlink:role": ("xlink", "role", namespaces["xlink"]),
"xlink:show": ("xlink", "show", namespaces["xlink"]),
"xlink:title": ("xlink", "title", namespaces["xlink"]),
"xlink:type": ("xlink", "type", namespaces["xlink"]),
"xml:base": ("xml", "base", namespaces["xml"]),
"xml:lang": ("xml", "lang", namespaces["xml"]),
"xml:space": ("xml", "space", namespaces["xml"]),
"xmlns": (None, "xmlns", namespaces["xmlns"]),
"xmlns:xlink": ("xmlns", "xlink", namespaces["xmlns"])
}
unadjustForeignAttributes = dict([((ns, local), qname) for qname, (prefix, local, ns) in
adjustForeignAttributes.items()])
spaceCharacters = frozenset((
"\t",
"\n",
"\u000C",
" ",
"\r"
))
tableInsertModeElements = frozenset((
"table",
"tbody",
"tfoot",
"thead",
"tr"
))
asciiLowercase = frozenset(string.ascii_lowercase)
asciiUppercase = frozenset(string.ascii_uppercase)
asciiLetters = frozenset(string.ascii_letters)
digits = frozenset(string.digits)
hexDigits = frozenset(string.hexdigits)
asciiUpper2Lower = dict([(ord(c), ord(c.lower()))
for c in string.ascii_uppercase])
# Heading elements need to be ordered
headingElements = (
"h1",
"h2",
"h3",
"h4",
"h5",
"h6"
)
voidElements = frozenset((
"base",
"command",
"event-source",
"link",
"meta",
"hr",
"br",
"img",
"embed",
"param",
"area",
"col",
"input",
"source",
"track"
))
cdataElements = frozenset(('title', 'textarea'))
rcdataElements = frozenset((
'style',
'script',
'xmp',
'iframe',
'noembed',
'noframes',
'noscript'
))
booleanAttributes = {
"": frozenset(("irrelevant",)),
"style": frozenset(("scoped",)),
"img": frozenset(("ismap",)),
"audio": frozenset(("autoplay", "controls")),
"video": frozenset(("autoplay", "controls")),
"script": frozenset(("defer", "async")),
"details": frozenset(("open",)),
"datagrid": frozenset(("multiple", "disabled")),
"command": frozenset(("hidden", "disabled", "checked", "default")),
"hr": frozenset(("noshade")),
"menu": frozenset(("autosubmit",)),
"fieldset": frozenset(("disabled", "readonly")),
"option": frozenset(("disabled", "readonly", "selected")),
"optgroup": frozenset(("disabled", "readonly")),
"button": frozenset(("disabled", "autofocus")),
"input": frozenset(("disabled", "readonly", "required", "autofocus", "checked", "ismap")),
"select": frozenset(("disabled", "readonly", "autofocus", "multiple")),
"output": frozenset(("disabled", "readonly")),
}
# entitiesWindows1252 has to be _ordered_ and needs to have an index. It
# therefore can't be a frozenset.
entitiesWindows1252 = (
8364, # 0x80 0x20AC EURO SIGN
65533, # 0x81 UNDEFINED
8218, # 0x82 0x201A SINGLE LOW-9 QUOTATION MARK
402, # 0x83 0x0192 LATIN SMALL LETTER F WITH HOOK
8222, # 0x84 0x201E DOUBLE LOW-9 QUOTATION MARK
8230, # 0x85 0x2026 HORIZONTAL ELLIPSIS
8224, # 0x86 0x2020 DAGGER
8225, # 0x87 0x2021 DOUBLE DAGGER
710, # 0x88 0x02C6 MODIFIER LETTER CIRCUMFLEX ACCENT
8240, # 0x89 0x2030 PER MILLE SIGN
352, # 0x8A 0x0160 LATIN CAPITAL LETTER S WITH CARON
8249, # 0x8B 0x2039 SINGLE LEFT-POINTING ANGLE QUOTATION MARK
338, # 0x8C 0x0152 LATIN CAPITAL LIGATURE OE
65533, # 0x8D UNDEFINED
381, # 0x8E 0x017D LATIN CAPITAL LETTER Z WITH CARON
65533, # 0x8F UNDEFINED
65533, # 0x90 UNDEFINED
8216, # 0x91 0x2018 LEFT SINGLE QUOTATION MARK
8217, # 0x92 0x2019 RIGHT SINGLE QUOTATION MARK
8220, # 0x93 0x201C LEFT DOUBLE QUOTATION MARK
8221, # 0x94 0x201D RIGHT DOUBLE QUOTATION MARK
8226, # 0x95 0x2022 BULLET
8211, # 0x96 0x2013 EN DASH
8212, # 0x97 0x2014 EM DASH
732, # 0x98 0x02DC SMALL TILDE
8482, # 0x99 0x2122 TRADE MARK SIGN
353, # 0x9A 0x0161 LATIN SMALL LETTER S WITH CARON
8250, # 0x9B 0x203A SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
339, # 0x9C 0x0153 LATIN SMALL LIGATURE OE
65533, # 0x9D UNDEFINED
382, # 0x9E 0x017E LATIN SMALL LETTER Z WITH CARON
376 # 0x9F 0x0178 LATIN CAPITAL LETTER Y WITH DIAERESIS
)
xmlEntities = frozenset(('lt;', 'gt;', 'amp;', 'apos;', 'quot;'))
entities = {
"AElig": "\xc6",
"AElig;": "\xc6",
"AMP": "&",
"AMP;": "&",
"Aacute": "\xc1",
"Aacute;": "\xc1",
"Abreve;": "\u0102",
"Acirc": "\xc2",
"Acirc;": "\xc2",
"Acy;": "\u0410",
"Afr;": "\U0001d504",
"Agrave": "\xc0",
"Agrave;": "\xc0",
"Alpha;": "\u0391",
"Amacr;": "\u0100",
"And;": "\u2a53",
"Aogon;": "\u0104",
"Aopf;": "\U0001d538",
"ApplyFunction;": "\u2061",
"Aring": "\xc5",
"Aring;": "\xc5",
"Ascr;": "\U0001d49c",
"Assign;": "\u2254",
"Atilde": "\xc3",
"Atilde;": "\xc3",
"Auml": "\xc4",
"Auml;": "\xc4",
"Backslash;": "\u2216",
"Barv;": "\u2ae7",
"Barwed;": "\u2306",
"Bcy;": "\u0411",
"Because;": "\u2235",
"Bernoullis;": "\u212c",
"Beta;": "\u0392",
"Bfr;": "\U0001d505",
"Bopf;": "\U0001d539",
"Breve;": "\u02d8",
"Bscr;": "\u212c",
"Bumpeq;": "\u224e",
"CHcy;": "\u0427",
"COPY": "\xa9",
"COPY;": "\xa9",
"Cacute;": "\u0106",
"Cap;": "\u22d2",
"CapitalDifferentialD;": "\u2145",
"Cayleys;": "\u212d",
"Ccaron;": "\u010c",
"Ccedil": "\xc7",
"Ccedil;": "\xc7",
"Ccirc;": "\u0108",
"Cconint;": "\u2230",
"Cdot;": "\u010a",
"Cedilla;": "\xb8",
"CenterDot;": "\xb7",
"Cfr;": "\u212d",
"Chi;": "\u03a7",
"CircleDot;": "\u2299",
"CircleMinus;": "\u2296",
"CirclePlus;": "\u2295",
"CircleTimes;": "\u2297",
"ClockwiseContourIntegral;": "\u2232",
"CloseCurlyDoubleQuote;": "\u201d",
"CloseCurlyQuote;": "\u2019",
"Colon;": "\u2237",
"Colone;": "\u2a74",
"Congruent;": "\u2261",
"Conint;": "\u222f",
"ContourIntegral;": "\u222e",
"Copf;": "\u2102",
"Coproduct;": "\u2210",
"CounterClockwiseContourIntegral;": "\u2233",
"Cross;": "\u2a2f",
"Cscr;": "\U0001d49e",
"Cup;": "\u22d3",
"CupCap;": "\u224d",
"DD;": "\u2145",
"DDotrahd;": "\u2911",
"DJcy;": "\u0402",
"DScy;": "\u0405",
"DZcy;": "\u040f",
"Dagger;": "\u2021",
"Darr;": "\u21a1",
"Dashv;": "\u2ae4",
"Dcaron;": "\u010e",
"Dcy;": "\u0414",
"Del;": "\u2207",
"Delta;": "\u0394",
"Dfr;": "\U0001d507",
"DiacriticalAcute;": "\xb4",
"DiacriticalDot;": "\u02d9",
"DiacriticalDoubleAcute;": "\u02dd",
"DiacriticalGrave;": "`",
"DiacriticalTilde;": "\u02dc",
"Diamond;": "\u22c4",
"DifferentialD;": "\u2146",
"Dopf;": "\U0001d53b",
"Dot;": "\xa8",
"DotDot;": "\u20dc",
"DotEqual;": "\u2250",
"DoubleContourIntegral;": "\u222f",
"DoubleDot;": "\xa8",
"DoubleDownArrow;": "\u21d3",
"DoubleLeftArrow;": "\u21d0",
"DoubleLeftRightArrow;": "\u21d4",
"DoubleLeftTee;": "\u2ae4",
"DoubleLongLeftArrow;": "\u27f8",
"DoubleLongLeftRightArrow;": "\u27fa",
"DoubleLongRightArrow;": "\u27f9",
"DoubleRightArrow;": "\u21d2",
"DoubleRightTee;": "\u22a8",
"DoubleUpArrow;": "\u21d1",
"DoubleUpDownArrow;": "\u21d5",
"DoubleVerticalBar;": "\u2225",
"DownArrow;": "\u2193",
"DownArrowBar;": "\u2913",
"DownArrowUpArrow;": "\u21f5",
"DownBreve;": "\u0311",
"DownLeftRightVector;": "\u2950",
"DownLeftTeeVector;": "\u295e",
"DownLeftVector;": "\u21bd",
"DownLeftVectorBar;": "\u2956",
"DownRightTeeVector;": "\u295f",
"DownRightVector;": "\u21c1",
"DownRightVectorBar;": "\u2957",
"DownTee;": "\u22a4",
"DownTeeArrow;": "\u21a7",
"Downarrow;": "\u21d3",
"Dscr;": "\U0001d49f",
"Dstrok;": "\u0110",
"ENG;": "\u014a",
"ETH": "\xd0",
"ETH;": "\xd0",
"Eacute": "\xc9",
"Eacute;": "\xc9",
"Ecaron;": "\u011a",
"Ecirc": "\xca",
"Ecirc;": "\xca",
"Ecy;": "\u042d",
"Edot;": "\u0116",
"Efr;": "\U0001d508",
"Egrave": "\xc8",
"Egrave;": "\xc8",
"Element;": "\u2208",
"Emacr;": "\u0112",
"EmptySmallSquare;": "\u25fb",
"EmptyVerySmallSquare;": "\u25ab",
"Eogon;": "\u0118",
"Eopf;": "\U0001d53c",
"Epsilon;": "\u0395",
"Equal;": "\u2a75",
"EqualTilde;": "\u2242",
"Equilibrium;": "\u21cc",
"Escr;": "\u2130",
"Esim;": "\u2a73",
"Eta;": "\u0397",
"Euml": "\xcb",
"Euml;": "\xcb",
"Exists;": "\u2203",
"ExponentialE;": "\u2147",
"Fcy;": "\u0424",
"Ffr;": "\U0001d509",
"FilledSmallSquare;": "\u25fc",
"FilledVerySmallSquare;": "\u25aa",
"Fopf;": "\U0001d53d",
"ForAll;": "\u2200",
"Fouriertrf;": "\u2131",
"Fscr;": "\u2131",
"GJcy;": "\u0403",
"GT": ">",
"GT;": ">",
"Gamma;": "\u0393",
"Gammad;": "\u03dc",
"Gbreve;": "\u011e",
"Gcedil;": "\u0122",
"Gcirc;": "\u011c",
"Gcy;": "\u0413",
"Gdot;": "\u0120",
"Gfr;": "\U0001d50a",
"Gg;": "\u22d9",
"Gopf;": "\U0001d53e",
"GreaterEqual;": "\u2265",
"GreaterEqualLess;": "\u22db",
"GreaterFullEqual;": "\u2267",
"GreaterGreater;": "\u2aa2",
"GreaterLess;": "\u2277",
"GreaterSlantEqual;": "\u2a7e",
"GreaterTilde;": "\u2273",
"Gscr;": "\U0001d4a2",
"Gt;": "\u226b",
"HARDcy;": "\u042a",
"Hacek;": "\u02c7",
"Hat;": "^",
"Hcirc;": "\u0124",
"Hfr;": "\u210c",
"HilbertSpace;": "\u210b",
"Hopf;": "\u210d",
"HorizontalLine;": "\u2500",
"Hscr;": "\u210b",
"Hstrok;": "\u0126",
"HumpDownHump;": "\u224e",
"HumpEqual;": "\u224f",
"IEcy;": "\u0415",
"IJlig;": "\u0132",
"IOcy;": "\u0401",
"Iacute": "\xcd",
"Iacute;": "\xcd",
"Icirc": "\xce",
"Icirc;": "\xce",
"Icy;": "\u0418",
"Idot;": "\u0130",
"Ifr;": "\u2111",
"Igrave": "\xcc",
"Igrave;": "\xcc",
"Im;": "\u2111",
"Imacr;": "\u012a",
"ImaginaryI;": "\u2148",
"Implies;": "\u21d2",
"Int;": "\u222c",
"Integral;": "\u222b",
"Intersection;": "\u22c2",
"InvisibleComma;": "\u2063",
"InvisibleTimes;": "\u2062",
"Iogon;": "\u012e",
"Iopf;": "\U0001d540",
"Iota;": "\u0399",
"Iscr;": "\u2110",
"Itilde;": "\u0128",
"Iukcy;": "\u0406",
"Iuml": "\xcf",
"Iuml;": "\xcf",
"Jcirc;": "\u0134",
"Jcy;": "\u0419",
"Jfr;": "\U0001d50d",
"Jopf;": "\U0001d541",
"Jscr;": "\U0001d4a5",
"Jsercy;": "\u0408",
"Jukcy;": "\u0404",
"KHcy;": "\u0425",
"KJcy;": "\u040c",
"Kappa;": "\u039a",
"Kcedil;": "\u0136",
"Kcy;": "\u041a",
"Kfr;": "\U0001d50e",
"Kopf;": "\U0001d542",
"Kscr;": "\U0001d4a6",
"LJcy;": "\u0409",
"LT": "<",
"LT;": "<",
"Lacute;": "\u0139",
"Lambda;": "\u039b",
"Lang;": "\u27ea",
"Laplacetrf;": "\u2112",
"Larr;": "\u219e",
"Lcaron;": "\u013d",
"Lcedil;": "\u013b",
"Lcy;": "\u041b",
"LeftAngleBracket;": "\u27e8",
"LeftArrow;": "\u2190",
"LeftArrowBar;": "\u21e4",
"LeftArrowRightArrow;": "\u21c6",
"LeftCeiling;": "\u2308",
"LeftDoubleBracket;": "\u27e6",
"LeftDownTeeVector;": "\u2961",
"LeftDownVector;": "\u21c3",
"LeftDownVectorBar;": "\u2959",
"LeftFloor;": "\u230a",
"LeftRightArrow;": "\u2194",
"LeftRightVector;": "\u294e",
"LeftTee;": "\u22a3",
"LeftTeeArrow;": "\u21a4",
"LeftTeeVector;": "\u295a",
"LeftTriangle;": "\u22b2",
"LeftTriangleBar;": "\u29cf",
"LeftTriangleEqual;": "\u22b4",
"LeftUpDownVector;": "\u2951",
"LeftUpTeeVector;": "\u2960",
"LeftUpVector;": "\u21bf",
"LeftUpVectorBar;": "\u2958",
"LeftVector;": "\u21bc",
"LeftVectorBar;": "\u2952",
"Leftarrow;": "\u21d0",
"Leftrightarrow;": "\u21d4",
"LessEqualGreater;": "\u22da",
"LessFullEqual;": "\u2266",
"LessGreater;": "\u2276",
"LessLess;": "\u2aa1",
"LessSlantEqual;": "\u2a7d",
"LessTilde;": "\u2272",
"Lfr;": "\U0001d50f",
"Ll;": "\u22d8",
"Lleftarrow;": "\u21da",
"Lmidot;": "\u013f",
"LongLeftArrow;": "\u27f5",
"LongLeftRightArrow;": "\u27f7",
"LongRightArrow;": "\u27f6",
"Longleftarrow;": "\u27f8",
"Longleftrightarrow;": "\u27fa",
"Longrightarrow;": "\u27f9",
"Lopf;": "\U0001d543",
"LowerLeftArrow;": "\u2199",
"LowerRightArrow;": "\u2198",
"Lscr;": "\u2112",
"Lsh;": "\u21b0",
"Lstrok;": "\u0141",
"Lt;": "\u226a",
"Map;": "\u2905",
"Mcy;": "\u041c",
"MediumSpace;": "\u205f",
"Mellintrf;": "\u2133",
"Mfr;": "\U0001d510",
"MinusPlus;": "\u2213",
"Mopf;": "\U0001d544",
"Mscr;": "\u2133",
"Mu;": "\u039c",
"NJcy;": "\u040a",
"Nacute;": "\u0143",
"Ncaron;": "\u0147",
"Ncedil;": "\u0145",
"Ncy;": "\u041d",
"NegativeMediumSpace;": "\u200b",
"NegativeThickSpace;": "\u200b",
"NegativeThinSpace;": "\u200b",
"NegativeVeryThinSpace;": "\u200b",
"NestedGreaterGreater;": "\u226b",
"NestedLessLess;": "\u226a",
"NewLine;": "\n",
"Nfr;": "\U0001d511",
"NoBreak;": "\u2060",
"NonBreakingSpace;": "\xa0",
"Nopf;": "\u2115",
"Not;": "\u2aec",
"NotCongruent;": "\u2262",
"NotCupCap;": "\u226d",
"NotDoubleVerticalBar;": "\u2226",
"NotElement;": "\u2209",
"NotEqual;": "\u2260",
"NotEqualTilde;": "\u2242\u0338",
"NotExists;": "\u2204",
"NotGreater;": "\u226f",
"NotGreaterEqual;": "\u2271",
"NotGreaterFullEqual;": "\u2267\u0338",
"NotGreaterGreater;": "\u226b\u0338",
"NotGreaterLess;": "\u2279",
"NotGreaterSlantEqual;": "\u2a7e\u0338",
"NotGreaterTilde;": "\u2275",
"NotHumpDownHump;": "\u224e\u0338",
"NotHumpEqual;": "\u224f\u0338",
"NotLeftTriangle;": "\u22ea",
"NotLeftTriangleBar;": "\u29cf\u0338",
"NotLeftTriangleEqual;": "\u22ec",
"NotLess;": "\u226e",
"NotLessEqual;": "\u2270",
"NotLessGreater;": "\u2278",
"NotLessLess;": "\u226a\u0338",
"NotLessSlantEqual;": "\u2a7d\u0338",
"NotLessTilde;": "\u2274",
"NotNestedGreaterGreater;": "\u2aa2\u0338",
"NotNestedLessLess;": "\u2aa1\u0338",
"NotPrecedes;": "\u2280",
"NotPrecedesEqual;": "\u2aaf\u0338",
"NotPrecedesSlantEqual;": "\u22e0",
"NotReverseElement;": "\u220c",
"NotRightTriangle;": "\u22eb",
"NotRightTriangleBar;": "\u29d0\u0338",
"NotRightTriangleEqual;": "\u22ed",
"NotSquareSubset;": "\u228f\u0338",
"NotSquareSubsetEqual;": "\u22e2",
"NotSquareSuperset;": "\u2290\u0338",
"NotSquareSupersetEqual;": "\u22e3",
"NotSubset;": "\u2282\u20d2",
"NotSubsetEqual;": "\u2288",
"NotSucceeds;": "\u2281",
"NotSucceedsEqual;": "\u2ab0\u0338",
"NotSucceedsSlantEqual;": "\u22e1",
"NotSucceedsTilde;": "\u227f\u0338",
"NotSuperset;": "\u2283\u20d2",
"NotSupersetEqual;": "\u2289",
"NotTilde;": "\u2241",
"NotTildeEqual;": "\u2244",
"NotTildeFullEqual;": "\u2247",
"NotTildeTilde;": "\u2249",
"NotVerticalBar;": "\u2224",
"Nscr;": "\U0001d4a9",
"Ntilde": "\xd1",
"Ntilde;": "\xd1",
"Nu;": "\u039d",
"OElig;": "\u0152",
"Oacute": "\xd3",
"Oacute;": "\xd3",
"Ocirc": "\xd4",
"Ocirc;": "\xd4",
"Ocy;": "\u041e",
"Odblac;": "\u0150",
"Ofr;": "\U0001d512",
"Ograve": "\xd2",
"Ograve;": "\xd2",
"Omacr;": "\u014c",
"Omega;": "\u03a9",
"Omicron;": "\u039f",
"Oopf;": "\U0001d546",
"OpenCurlyDoubleQuote;": "\u201c",
"OpenCurlyQuote;": "\u2018",
"Or;": "\u2a54",
"Oscr;": "\U0001d4aa",
"Oslash": "\xd8",
"Oslash;": "\xd8",
"Otilde": "\xd5",
"Otilde;": "\xd5",
"Otimes;": "\u2a37",
"Ouml": "\xd6",
"Ouml;": "\xd6",
"OverBar;": "\u203e",
"OverBrace;": "\u23de",
"OverBracket;": "\u23b4",
"OverParenthesis;": "\u23dc",
"PartialD;": "\u2202",
"Pcy;": "\u041f",
"Pfr;": "\U0001d513",
"Phi;": "\u03a6",
"Pi;": "\u03a0",
"PlusMinus;": "\xb1",
"Poincareplane;": "\u210c",
"Popf;": "\u2119",
"Pr;": "\u2abb",
"Precedes;": "\u227a",
"PrecedesEqual;": "\u2aaf",
"PrecedesSlantEqual;": "\u227c",
"PrecedesTilde;": "\u227e",
"Prime;": "\u2033",
"Product;": "\u220f",
"Proportion;": "\u2237",
"Proportional;": "\u221d",
"Pscr;": "\U0001d4ab",
"Psi;": "\u03a8",
"QUOT": "\"",
"QUOT;": "\"",
"Qfr;": "\U0001d514",
"Qopf;": "\u211a",
"Qscr;": "\U0001d4ac",
"RBarr;": "\u2910",
"REG": "\xae",
"REG;": "\xae",
"Racute;": "\u0154",
"Rang;": "\u27eb",
"Rarr;": "\u21a0",
"Rarrtl;": "\u2916",
"Rcaron;": "\u0158",
"Rcedil;": "\u0156",
"Rcy;": "\u0420",
"Re;": "\u211c",
"ReverseElement;": "\u220b",
"ReverseEquilibrium;": "\u21cb",
"ReverseUpEquilibrium;": "\u296f",
"Rfr;": "\u211c",
"Rho;": "\u03a1",
"RightAngleBracket;": "\u27e9",
"RightArrow;": "\u2192",
"RightArrowBar;": "\u21e5",
"RightArrowLeftArrow;": "\u21c4",
"RightCeiling;": "\u2309",
"RightDoubleBracket;": "\u27e7",
"RightDownTeeVector;": "\u295d",
"RightDownVector;": "\u21c2",
"RightDownVectorBar;": "\u2955",
"RightFloor;": "\u230b",
"RightTee;": "\u22a2",
"RightTeeArrow;": "\u21a6",
"RightTeeVector;": "\u295b",
"RightTriangle;": "\u22b3",
"RightTriangleBar;": "\u29d0",
"RightTriangleEqual;": "\u22b5",
"RightUpDownVector;": "\u294f",
"RightUpTeeVector;": "\u295c",
"RightUpVector;": "\u21be",
"RightUpVectorBar;": "\u2954",
"RightVector;": "\u21c0",
"RightVectorBar;": "\u2953",
"Rightarrow;": "\u21d2",
"Ropf;": "\u211d",
"RoundImplies;": "\u2970",
"Rrightarrow;": "\u21db",
"Rscr;": "\u211b",
"Rsh;": "\u21b1",
"RuleDelayed;": "\u29f4",
"SHCHcy;": "\u0429",
"SHcy;": "\u0428",
"SOFTcy;": "\u042c",
"Sacute;": "\u015a",
"Sc;": "\u2abc",
"Scaron;": "\u0160",
"Scedil;": "\u015e",
"Scirc;": "\u015c",
"Scy;": "\u0421",
"Sfr;": "\U0001d516",
"ShortDownArrow;": "\u2193",
"ShortLeftArrow;": "\u2190",
"ShortRightArrow;": "\u2192",
"ShortUpArrow;": "\u2191",
"Sigma;": "\u03a3",
"SmallCircle;": "\u2218",
"Sopf;": "\U0001d54a",
"Sqrt;": "\u221a",
"Square;": "\u25a1",
"SquareIntersection;": "\u2293",
"SquareSubset;": "\u228f",
"SquareSubsetEqual;": "\u2291",
"SquareSuperset;": "\u2290",
"SquareSupersetEqual;": "\u2292",
"SquareUnion;": "\u2294",
"Sscr;": "\U0001d4ae",
"Star;": "\u22c6",
"Sub;": "\u22d0",
"Subset;": "\u22d0",
"SubsetEqual;": "\u2286",
"Succeeds;": "\u227b",
"SucceedsEqual;": "\u2ab0",
"SucceedsSlantEqual;": "\u227d",
"SucceedsTilde;": "\u227f",
"SuchThat;": "\u220b",
"Sum;": "\u2211",
"Sup;": "\u22d1",
"Superset;": "\u2283",
"SupersetEqual;": "\u2287",
"Supset;": "\u22d1",
"THORN": "\xde",
"THORN;": "\xde",
"TRADE;": "\u2122",
"TSHcy;": "\u040b",
"TScy;": "\u0426",
"Tab;": "\t",
"Tau;": "\u03a4",
"Tcaron;": "\u0164",
"Tcedil;": "\u0162",
"Tcy;": "\u0422",
"Tfr;": "\U0001d517",
"Therefore;": "\u2234",
"Theta;": "\u0398",
"ThickSpace;": "\u205f\u200a",
"ThinSpace;": "\u2009",
"Tilde;": "\u223c",
"TildeEqual;": "\u2243",
"TildeFullEqual;": "\u2245",
"TildeTilde;": "\u2248",
"Topf;": "\U0001d54b",
"TripleDot;": "\u20db",
"Tscr;": "\U0001d4af",
"Tstrok;": "\u0166",
"Uacute": "\xda",
"Uacute;": "\xda",
"Uarr;": "\u219f",
"Uarrocir;": "\u2949",
"Ubrcy;": "\u040e",
"Ubreve;": "\u016c",
"Ucirc": "\xdb",
"Ucirc;": "\xdb",
"Ucy;": "\u0423",
"Udblac;": "\u0170",
"Ufr;": "\U0001d518",
"Ugrave": "\xd9",
"Ugrave;": "\xd9",
"Umacr;": "\u016a",
"UnderBar;": "_",
"UnderBrace;": "\u23df",
"UnderBracket;": "\u23b5",
"UnderParenthesis;": "\u23dd",
"Union;": "\u22c3",
"UnionPlus;": "\u228e",
"Uogon;": "\u0172",
"Uopf;": "\U0001d54c",
"UpArrow;": "\u2191",
"UpArrowBar;": "\u2912",
"UpArrowDownArrow;": "\u21c5",
"UpDownArrow;": "\u2195",
"UpEquilibrium;": "\u296e",
"UpTee;": "\u22a5",
"UpTeeArrow;": "\u21a5",
"Uparrow;": "\u21d1",
"Updownarrow;": "\u21d5",
"UpperLeftArrow;": "\u2196",
"UpperRightArrow;": "\u2197",
"Upsi;": "\u03d2",
"Upsilon;": "\u03a5",
"Uring;": "\u016e",
"Uscr;": "\U0001d4b0",
"Utilde;": "\u0168",
"Uuml": "\xdc",
"Uuml;": "\xdc",
"VDash;": "\u22ab",
"Vbar;": "\u2aeb",
"Vcy;": "\u0412",
"Vdash;": "\u22a9",
"Vdashl;": "\u2ae6",
"Vee;": "\u22c1",
"Verbar;": "\u2016",
"Vert;": "\u2016",
"VerticalBar;": "\u2223",
"VerticalLine;": "|",
"VerticalSeparator;": "\u2758",
"VerticalTilde;": "\u2240",
"VeryThinSpace;": "\u200a",
"Vfr;": "\U0001d519",
"Vopf;": "\U0001d54d",
"Vscr;": "\U0001d4b1",
"Vvdash;": "\u22aa",
"Wcirc;": "\u0174",
"Wedge;": "\u22c0",
"Wfr;": "\U0001d51a",
"Wopf;": "\U0001d54e",
"Wscr;": "\U0001d4b2",
"Xfr;": "\U0001d51b",
"Xi;": "\u039e",
"Xopf;": "\U0001d54f",
"Xscr;": "\U0001d4b3",
"YAcy;": "\u042f",
"YIcy;": "\u0407",
"YUcy;": "\u042e",
"Yacute": "\xdd",
"Yacute;": "\xdd",
"Ycirc;": "\u0176",
"Ycy;": "\u042b",
"Yfr;": "\U0001d51c",
"Yopf;": "\U0001d550",
"Yscr;": "\U0001d4b4",
"Yuml;": "\u0178",
"ZHcy;": "\u0416",
"Zacute;": "\u0179",
"Zcaron;": "\u017d",
"Zcy;": "\u0417",
"Zdot;": "\u017b",
"ZeroWidthSpace;": "\u200b",
"Zeta;": "\u0396",
"Zfr;": "\u2128",
"Zopf;": "\u2124",
"Zscr;": "\U0001d4b5",
"aacute": "\xe1",
"aacute;": "\xe1",
"abreve;": "\u0103",
"ac;": "\u223e",
"acE;": "\u223e\u0333",
"acd;": "\u223f",
"acirc": "\xe2",
"acirc;": "\xe2",
"acute": "\xb4",
"acute;": "\xb4",
"acy;": "\u0430",
"aelig": "\xe6",
"aelig;": "\xe6",
"af;": "\u2061",
"afr;": "\U0001d51e",
"agrave": "\xe0",
"agrave;": "\xe0",
"alefsym;": "\u2135",
"aleph;": "\u2135",
"alpha;": "\u03b1",
"amacr;": "\u0101",
"amalg;": "\u2a3f",
"amp": "&",
"amp;": "&",
"and;": "\u2227",
"andand;": "\u2a55",
"andd;": "\u2a5c",
"andslope;": "\u2a58",
"andv;": "\u2a5a",
"ang;": "\u2220",
"ange;": "\u29a4",
"angle;": "\u2220",
"angmsd;": "\u2221",
"angmsdaa;": "\u29a8",
"angmsdab;": "\u29a9",
"angmsdac;": "\u29aa",
"angmsdad;": "\u29ab",
"angmsdae;": "\u29ac",
"angmsdaf;": "\u29ad",
"angmsdag;": "\u29ae",
"angmsdah;": "\u29af",
"angrt;": "\u221f",
"angrtvb;": "\u22be",
"angrtvbd;": "\u299d",
"angsph;": "\u2222",
"angst;": "\xc5",
"angzarr;": "\u237c",
"aogon;": "\u0105",
"aopf;": "\U0001d552",
"ap;": "\u2248",
"apE;": "\u2a70",
"apacir;": "\u2a6f",
"ape;": "\u224a",
"apid;": "\u224b",
"apos;": "'",
"approx;": "\u2248",
"approxeq;": "\u224a",
"aring": "\xe5",
"aring;": "\xe5",
"ascr;": "\U0001d4b6",
"ast;": "*",
"asymp;": "\u2248",
"asympeq;": "\u224d",
"atilde": "\xe3",
"atilde;": "\xe3",
"auml": "\xe4",
"auml;": "\xe4",
"awconint;": "\u2233",
"awint;": "\u2a11",
"bNot;": "\u2aed",
"backcong;": "\u224c",
"backepsilon;": "\u03f6",
"backprime;": "\u2035",
"backsim;": "\u223d",
"backsimeq;": "\u22cd",
"barvee;": "\u22bd",
"barwed;": "\u2305",
"barwedge;": "\u2305",
"bbrk;": "\u23b5",
"bbrktbrk;": "\u23b6",
"bcong;": "\u224c",
"bcy;": "\u0431",
"bdquo;": "\u201e",
"becaus;": "\u2235",
"because;": "\u2235",
"bemptyv;": "\u29b0",
"bepsi;": "\u03f6",
"bernou;": "\u212c",
"beta;": "\u03b2",
"beth;": "\u2136",
"between;": "\u226c",
"bfr;": "\U0001d51f",
"bigcap;": "\u22c2",
"bigcirc;": "\u25ef",
"bigcup;": "\u22c3",
"bigodot;": "\u2a00",
"bigoplus;": "\u2a01",
"bigotimes;": "\u2a02",
"bigsqcup;": "\u2a06",
"bigstar;": "\u2605",
"bigtriangledown;": "\u25bd",
"bigtriangleup;": "\u25b3",
"biguplus;": "\u2a04",
"bigvee;": "\u22c1",
"bigwedge;": "\u22c0",
"bkarow;": "\u290d",
"blacklozenge;": "\u29eb",
"blacksquare;": "\u25aa",
"blacktriangle;": "\u25b4",
"blacktriangledown;": "\u25be",
"blacktriangleleft;": "\u25c2",
"blacktriangleright;": "\u25b8",
"blank;": "\u2423",
"blk12;": "\u2592",
"blk14;": "\u2591",
"blk34;": "\u2593",
"block;": "\u2588",
"bne;": "=\u20e5",
"bnequiv;": "\u2261\u20e5",
"bnot;": "\u2310",
"bopf;": "\U0001d553",
"bot;": "\u22a5",
"bottom;": "\u22a5",
"bowtie;": "\u22c8",
"boxDL;": "\u2557",
"boxDR;": "\u2554",
"boxDl;": "\u2556",
"boxDr;": "\u2553",
"boxH;": "\u2550",
"boxHD;": "\u2566",
"boxHU;": "\u2569",
"boxHd;": "\u2564",
"boxHu;": "\u2567",
"boxUL;": "\u255d",
"boxUR;": "\u255a",
"boxUl;": "\u255c",
"boxUr;": "\u2559",
"boxV;": "\u2551",
"boxVH;": "\u256c",
"boxVL;": "\u2563",
"boxVR;": "\u2560",
"boxVh;": "\u256b",
"boxVl;": "\u2562",
"boxVr;": "\u255f",
"boxbox;": "\u29c9",
"boxdL;": "\u2555",
"boxdR;": "\u2552",
"boxdl;": "\u2510",
"boxdr;": "\u250c",
"boxh;": "\u2500",
"boxhD;": "\u2565",
"boxhU;": "\u2568",
"boxhd;": "\u252c",
"boxhu;": "\u2534",
"boxminus;": "\u229f",
"boxplus;": "\u229e",
"boxtimes;": "\u22a0",
"boxuL;": "\u255b",
"boxuR;": "\u2558",
"boxul;": "\u2518",
"boxur;": "\u2514",
"boxv;": "\u2502",
"boxvH;": "\u256a",
"boxvL;": "\u2561",
"boxvR;": "\u255e",
"boxvh;": "\u253c",
"boxvl;": "\u2524",
"boxvr;": "\u251c",
"bprime;": "\u2035",
"breve;": "\u02d8",
"brvbar": "\xa6",
"brvbar;": "\xa6",
"bscr;": "\U0001d4b7",
"bsemi;": "\u204f",
"bsim;": "\u223d",
"bsime;": "\u22cd",
"bsol;": "\\",
"bsolb;": "\u29c5",
"bsolhsub;": "\u27c8",
"bull;": "\u2022",
"bullet;": "\u2022",
"bump;": "\u224e",
"bumpE;": "\u2aae",
"bumpe;": "\u224f",
"bumpeq;": "\u224f",
"cacute;": "\u0107",
"cap;": "\u2229",
"capand;": "\u2a44",
"capbrcup;": "\u2a49",
"capcap;": "\u2a4b",
"capcup;": "\u2a47",
"capdot;": "\u2a40",
"caps;": "\u2229\ufe00",
"caret;": "\u2041",
"caron;": "\u02c7",
"ccaps;": "\u2a4d",
"ccaron;": "\u010d",
"ccedil": "\xe7",
"ccedil;": "\xe7",
"ccirc;": "\u0109",
"ccups;": "\u2a4c",
"ccupssm;": "\u2a50",
"cdot;": "\u010b",
"cedil": "\xb8",
"cedil;": "\xb8",
"cemptyv;": "\u29b2",
"cent": "\xa2",
"cent;": "\xa2",
"centerdot;": "\xb7",
"cfr;": "\U0001d520",
"chcy;": "\u0447",
"check;": "\u2713",
"checkmark;": "\u2713",
"chi;": "\u03c7",
"cir;": "\u25cb",
"cirE;": "\u29c3",
"circ;": "\u02c6",
"circeq;": "\u2257",
"circlearrowleft;": "\u21ba",
"circlearrowright;": "\u21bb",
"circledR;": "\xae",
"circledS;": "\u24c8",
"circledast;": "\u229b",
"circledcirc;": "\u229a",
"circleddash;": "\u229d",
"cire;": "\u2257",
"cirfnint;": "\u2a10",
"cirmid;": "\u2aef",
"cirscir;": "\u29c2",
"clubs;": "\u2663",
"clubsuit;": "\u2663",
"colon;": ":",
"colone;": "\u2254",
"coloneq;": "\u2254",
"comma;": ",",
"commat;": "@",
"comp;": "\u2201",
"compfn;": "\u2218",
"complement;": "\u2201",
"complexes;": "\u2102",
"cong;": "\u2245",
"congdot;": "\u2a6d",
"conint;": "\u222e",
"copf;": "\U0001d554",
"coprod;": "\u2210",
"copy": "\xa9",
"copy;": "\xa9",
"copysr;": "\u2117",
"crarr;": "\u21b5",
"cross;": "\u2717",
"cscr;": "\U0001d4b8",
"csub;": "\u2acf",
"csube;": "\u2ad1",
"csup;": "\u2ad0",
"csupe;": "\u2ad2",
"ctdot;": "\u22ef",
"cudarrl;": "\u2938",
"cudarrr;": "\u2935",
"cuepr;": "\u22de",
"cuesc;": "\u22df",
"cularr;": "\u21b6",
"cularrp;": "\u293d",
"cup;": "\u222a",
"cupbrcap;": "\u2a48",
"cupcap;": "\u2a46",
"cupcup;": "\u2a4a",
"cupdot;": "\u228d",
"cupor;": "\u2a45",
"cups;": "\u222a\ufe00",
"curarr;": "\u21b7",
"curarrm;": "\u293c",
"curlyeqprec;": "\u22de",
"curlyeqsucc;": "\u22df",
"curlyvee;": "\u22ce",
"curlywedge;": "\u22cf",
"curren": "\xa4",
"curren;": "\xa4",
"curvearrowleft;": "\u21b6",
"curvearrowright;": "\u21b7",
"cuvee;": "\u22ce",
"cuwed;": "\u22cf",
"cwconint;": "\u2232",
"cwint;": "\u2231",
"cylcty;": "\u232d",
"dArr;": "\u21d3",
"dHar;": "\u2965",
"dagger;": "\u2020",
"daleth;": "\u2138",
"darr;": "\u2193",
"dash;": "\u2010",
"dashv;": "\u22a3",
"dbkarow;": "\u290f",
"dblac;": "\u02dd",
"dcaron;": "\u010f",
"dcy;": "\u0434",
"dd;": "\u2146",
"ddagger;": "\u2021",
"ddarr;": "\u21ca",
"ddotseq;": "\u2a77",
"deg": "\xb0",
"deg;": "\xb0",
"delta;": "\u03b4",
"demptyv;": "\u29b1",
"dfisht;": "\u297f",
"dfr;": "\U0001d521",
"dharl;": "\u21c3",
"dharr;": "\u21c2",
"diam;": "\u22c4",
"diamond;": "\u22c4",
"diamondsuit;": "\u2666",
"diams;": "\u2666",
"die;": "\xa8",
"digamma;": "\u03dd",
"disin;": "\u22f2",
"div;": "\xf7",
"divide": "\xf7",
"divide;": "\xf7",
"divideontimes;": "\u22c7",
"divonx;": "\u22c7",
"djcy;": "\u0452",
"dlcorn;": "\u231e",
"dlcrop;": "\u230d",
"dollar;": "$",
"dopf;": "\U0001d555",
"dot;": "\u02d9",
"doteq;": "\u2250",
"doteqdot;": "\u2251",
"dotminus;": "\u2238",
"dotplus;": "\u2214",
"dotsquare;": "\u22a1",
"doublebarwedge;": "\u2306",
"downarrow;": "\u2193",
"downdownarrows;": "\u21ca",
"downharpoonleft;": "\u21c3",
"downharpoonright;": "\u21c2",
"drbkarow;": "\u2910",
"drcorn;": "\u231f",
"drcrop;": "\u230c",
"dscr;": "\U0001d4b9",
"dscy;": "\u0455",
"dsol;": "\u29f6",
"dstrok;": "\u0111",
"dtdot;": "\u22f1",
"dtri;": "\u25bf",
"dtrif;": "\u25be",
"duarr;": "\u21f5",
"duhar;": "\u296f",
"dwangle;": "\u29a6",
"dzcy;": "\u045f",
"dzigrarr;": "\u27ff",
"eDDot;": "\u2a77",
"eDot;": "\u2251",
"eacute": "\xe9",
"eacute;": "\xe9",
"easter;": "\u2a6e",
"ecaron;": "\u011b",
"ecir;": "\u2256",
"ecirc": "\xea",
"ecirc;": "\xea",
"ecolon;": "\u2255",
"ecy;": "\u044d",
"edot;": "\u0117",
"ee;": "\u2147",
"efDot;": "\u2252",
"efr;": "\U0001d522",
"eg;": "\u2a9a",
"egrave": "\xe8",
"egrave;": "\xe8",
"egs;": "\u2a96",
"egsdot;": "\u2a98",
"el;": "\u2a99",
"elinters;": "\u23e7",
"ell;": "\u2113",
"els;": "\u2a95",
"elsdot;": "\u2a97",
"emacr;": "\u0113",
"empty;": "\u2205",
"emptyset;": "\u2205",
"emptyv;": "\u2205",
"emsp13;": "\u2004",
"emsp14;": "\u2005",
"emsp;": "\u2003",
"eng;": "\u014b",
"ensp;": "\u2002",
"eogon;": "\u0119",
"eopf;": "\U0001d556",
"epar;": "\u22d5",
"eparsl;": "\u29e3",
"eplus;": "\u2a71",
"epsi;": "\u03b5",
"epsilon;": "\u03b5",
"epsiv;": "\u03f5",
"eqcirc;": "\u2256",
"eqcolon;": "\u2255",
"eqsim;": "\u2242",
"eqslantgtr;": "\u2a96",
"eqslantless;": "\u2a95",
"equals;": "=",
"equest;": "\u225f",
"equiv;": "\u2261",
"equivDD;": "\u2a78",
"eqvparsl;": "\u29e5",
"erDot;": "\u2253",
"erarr;": "\u2971",
"escr;": "\u212f",
"esdot;": "\u2250",
"esim;": "\u2242",
"eta;": "\u03b7",
"eth": "\xf0",
"eth;": "\xf0",
"euml": "\xeb",
"euml;": "\xeb",
"euro;": "\u20ac",
"excl;": "!",
"exist;": "\u2203",
"expectation;": "\u2130",
"exponentiale;": "\u2147",
"fallingdotseq;": "\u2252",
"fcy;": "\u0444",
"female;": "\u2640",
"ffilig;": "\ufb03",
"fflig;": "\ufb00",
"ffllig;": "\ufb04",
"ffr;": "\U0001d523",
"filig;": "\ufb01",
"fjlig;": "fj",
"flat;": "\u266d",
"fllig;": "\ufb02",
"fltns;": "\u25b1",
"fnof;": "\u0192",
"fopf;": "\U0001d557",
"forall;": "\u2200",
"fork;": "\u22d4",
"forkv;": "\u2ad9",
"fpartint;": "\u2a0d",
"frac12": "\xbd",
"frac12;": "\xbd",
"frac13;": "\u2153",
"frac14": "\xbc",
"frac14;": "\xbc",
"frac15;": "\u2155",
"frac16;": "\u2159",
"frac18;": "\u215b",
"frac23;": "\u2154",
"frac25;": "\u2156",
"frac34": "\xbe",
"frac34;": "\xbe",
"frac35;": "\u2157",
"frac38;": "\u215c",
"frac45;": "\u2158",
"frac56;": "\u215a",
"frac58;": "\u215d",
"frac78;": "\u215e",
"frasl;": "\u2044",
"frown;": "\u2322",
"fscr;": "\U0001d4bb",
"gE;": "\u2267",
"gEl;": "\u2a8c",
"gacute;": "\u01f5",
"gamma;": "\u03b3",
"gammad;": "\u03dd",
"gap;": "\u2a86",
"gbreve;": "\u011f",
"gcirc;": "\u011d",
"gcy;": "\u0433",
"gdot;": "\u0121",
"ge;": "\u2265",
"gel;": "\u22db",
"geq;": "\u2265",
"geqq;": "\u2267",
"geqslant;": "\u2a7e",
"ges;": "\u2a7e",
"gescc;": "\u2aa9",
"gesdot;": "\u2a80",
"gesdoto;": "\u2a82",
"gesdotol;": "\u2a84",
"gesl;": "\u22db\ufe00",
"gesles;": "\u2a94",
"gfr;": "\U0001d524",
"gg;": "\u226b",
"ggg;": "\u22d9",
"gimel;": "\u2137",
"gjcy;": "\u0453",
"gl;": "\u2277",
"glE;": "\u2a92",
"gla;": "\u2aa5",
"glj;": "\u2aa4",
"gnE;": "\u2269",
"gnap;": "\u2a8a",
"gnapprox;": "\u2a8a",
"gne;": "\u2a88",
"gneq;": "\u2a88",
"gneqq;": "\u2269",
"gnsim;": "\u22e7",
"gopf;": "\U0001d558",
"grave;": "`",
"gscr;": "\u210a",
"gsim;": "\u2273",
"gsime;": "\u2a8e",
"gsiml;": "\u2a90",
"gt": ">",
"gt;": ">",
"gtcc;": "\u2aa7",
"gtcir;": "\u2a7a",
"gtdot;": "\u22d7",
"gtlPar;": "\u2995",
"gtquest;": "\u2a7c",
"gtrapprox;": "\u2a86",
"gtrarr;": "\u2978",
"gtrdot;": "\u22d7",
"gtreqless;": "\u22db",
"gtreqqless;": "\u2a8c",
"gtrless;": "\u2277",
"gtrsim;": "\u2273",
"gvertneqq;": "\u2269\ufe00",
"gvnE;": "\u2269\ufe00",
"hArr;": "\u21d4",
"hairsp;": "\u200a",
"half;": "\xbd",
"hamilt;": "\u210b",
"hardcy;": "\u044a",
"harr;": "\u2194",
"harrcir;": "\u2948",
"harrw;": "\u21ad",
"hbar;": "\u210f",
"hcirc;": "\u0125",
"hearts;": "\u2665",
"heartsuit;": "\u2665",
"hellip;": "\u2026",
"hercon;": "\u22b9",
"hfr;": "\U0001d525",
"hksearow;": "\u2925",
"hkswarow;": "\u2926",
"hoarr;": "\u21ff",
"homtht;": "\u223b",
"hookleftarrow;": "\u21a9",
"hookrightarrow;": "\u21aa",
"hopf;": "\U0001d559",
"horbar;": "\u2015",
"hscr;": "\U0001d4bd",
"hslash;": "\u210f",
"hstrok;": "\u0127",
"hybull;": "\u2043",
"hyphen;": "\u2010",
"iacute": "\xed",
"iacute;": "\xed",
"ic;": "\u2063",
"icirc": "\xee",
"icirc;": "\xee",
"icy;": "\u0438",
"iecy;": "\u0435",
"iexcl": "\xa1",
"iexcl;": "\xa1",
"iff;": "\u21d4",
"ifr;": "\U0001d526",
"igrave": "\xec",
"igrave;": "\xec",
"ii;": "\u2148",
"iiiint;": "\u2a0c",
"iiint;": "\u222d",
"iinfin;": "\u29dc",
"iiota;": "\u2129",
"ijlig;": "\u0133",
"imacr;": "\u012b",
"image;": "\u2111",
"imagline;": "\u2110",
"imagpart;": "\u2111",
"imath;": "\u0131",
"imof;": "\u22b7",
"imped;": "\u01b5",
"in;": "\u2208",
"incare;": "\u2105",
"infin;": "\u221e",
"infintie;": "\u29dd",
"inodot;": "\u0131",
"int;": "\u222b",
"intcal;": "\u22ba",
"integers;": "\u2124",
"intercal;": "\u22ba",
"intlarhk;": "\u2a17",
"intprod;": "\u2a3c",
"iocy;": "\u0451",
"iogon;": "\u012f",
"iopf;": "\U0001d55a",
"iota;": "\u03b9",
"iprod;": "\u2a3c",
"iquest": "\xbf",
"iquest;": "\xbf",
"iscr;": "\U0001d4be",
"isin;": "\u2208",
"isinE;": "\u22f9",
"isindot;": "\u22f5",
"isins;": "\u22f4",
"isinsv;": "\u22f3",
"isinv;": "\u2208",
"it;": "\u2062",
"itilde;": "\u0129",
"iukcy;": "\u0456",
"iuml": "\xef",
"iuml;": "\xef",
"jcirc;": "\u0135",
"jcy;": "\u0439",
"jfr;": "\U0001d527",
"jmath;": "\u0237",
"jopf;": "\U0001d55b",
"jscr;": "\U0001d4bf",
"jsercy;": "\u0458",
"jukcy;": "\u0454",
"kappa;": "\u03ba",
"kappav;": "\u03f0",
"kcedil;": "\u0137",
"kcy;": "\u043a",
"kfr;": "\U0001d528",
"kgreen;": "\u0138",
"khcy;": "\u0445",
"kjcy;": "\u045c",
"kopf;": "\U0001d55c",
"kscr;": "\U0001d4c0",
"lAarr;": "\u21da",
"lArr;": "\u21d0",
"lAtail;": "\u291b",
"lBarr;": "\u290e",
"lE;": "\u2266",
"lEg;": "\u2a8b",
"lHar;": "\u2962",
"lacute;": "\u013a",
"laemptyv;": "\u29b4",
"lagran;": "\u2112",
"lambda;": "\u03bb",
"lang;": "\u27e8",
"langd;": "\u2991",
"langle;": "\u27e8",
"lap;": "\u2a85",
"laquo": "\xab",
"laquo;": "\xab",
"larr;": "\u2190",
"larrb;": "\u21e4",
"larrbfs;": "\u291f",
"larrfs;": "\u291d",
"larrhk;": "\u21a9",
"larrlp;": "\u21ab",
"larrpl;": "\u2939",
"larrsim;": "\u2973",
"larrtl;": "\u21a2",
"lat;": "\u2aab",
"latail;": "\u2919",
"late;": "\u2aad",
"lates;": "\u2aad\ufe00",
"lbarr;": "\u290c",
"lbbrk;": "\u2772",
"lbrace;": "{",
"lbrack;": "[",
"lbrke;": "\u298b",
"lbrksld;": "\u298f",
"lbrkslu;": "\u298d",
"lcaron;": "\u013e",
"lcedil;": "\u013c",
"lceil;": "\u2308",
"lcub;": "{",
"lcy;": "\u043b",
"ldca;": "\u2936",
"ldquo;": "\u201c",
"ldquor;": "\u201e",
"ldrdhar;": "\u2967",
"ldrushar;": "\u294b",
"ldsh;": "\u21b2",
"le;": "\u2264",
"leftarrow;": "\u2190",
"leftarrowtail;": "\u21a2",
"leftharpoondown;": "\u21bd",
"leftharpoonup;": "\u21bc",
"leftleftarrows;": "\u21c7",
"leftrightarrow;": "\u2194",
"leftrightarrows;": "\u21c6",
"leftrightharpoons;": "\u21cb",
"leftrightsquigarrow;": "\u21ad",
"leftthreetimes;": "\u22cb",
"leg;": "\u22da",
"leq;": "\u2264",
"leqq;": "\u2266",
"leqslant;": "\u2a7d",
"les;": "\u2a7d",
"lescc;": "\u2aa8",
"lesdot;": "\u2a7f",
"lesdoto;": "\u2a81",
"lesdotor;": "\u2a83",
"lesg;": "\u22da\ufe00",
"lesges;": "\u2a93",
"lessapprox;": "\u2a85",
"lessdot;": "\u22d6",
"lesseqgtr;": "\u22da",
"lesseqqgtr;": "\u2a8b",
"lessgtr;": "\u2276",
"lesssim;": "\u2272",
"lfisht;": "\u297c",
"lfloor;": "\u230a",
"lfr;": "\U0001d529",
"lg;": "\u2276",
"lgE;": "\u2a91",
"lhard;": "\u21bd",
"lharu;": "\u21bc",
"lharul;": "\u296a",
"lhblk;": "\u2584",
"ljcy;": "\u0459",
"ll;": "\u226a",
"llarr;": "\u21c7",
"llcorner;": "\u231e",
"llhard;": "\u296b",
"lltri;": "\u25fa",
"lmidot;": "\u0140",
"lmoust;": "\u23b0",
"lmoustache;": "\u23b0",
"lnE;": "\u2268",
"lnap;": "\u2a89",
"lnapprox;": "\u2a89",
"lne;": "\u2a87",
"lneq;": "\u2a87",
"lneqq;": "\u2268",
"lnsim;": "\u22e6",
"loang;": "\u27ec",
"loarr;": "\u21fd",
"lobrk;": "\u27e6",
"longleftarrow;": "\u27f5",
"longleftrightarrow;": "\u27f7",
"longmapsto;": "\u27fc",
"longrightarrow;": "\u27f6",
"looparrowleft;": "\u21ab",
"looparrowright;": "\u21ac",
"lopar;": "\u2985",
"lopf;": "\U0001d55d",
"loplus;": "\u2a2d",
"lotimes;": "\u2a34",
"lowast;": "\u2217",
"lowbar;": "_",
"loz;": "\u25ca",
"lozenge;": "\u25ca",
"lozf;": "\u29eb",
"lpar;": "(",
"lparlt;": "\u2993",
"lrarr;": "\u21c6",
"lrcorner;": "\u231f",
"lrhar;": "\u21cb",
"lrhard;": "\u296d",
"lrm;": "\u200e",
"lrtri;": "\u22bf",
"lsaquo;": "\u2039",
"lscr;": "\U0001d4c1",
"lsh;": "\u21b0",
"lsim;": "\u2272",
"lsime;": "\u2a8d",
"lsimg;": "\u2a8f",
"lsqb;": "[",
"lsquo;": "\u2018",
"lsquor;": "\u201a",
"lstrok;": "\u0142",
"lt": "<",
"lt;": "<",
"ltcc;": "\u2aa6",
"ltcir;": "\u2a79",
"ltdot;": "\u22d6",
"lthree;": "\u22cb",
"ltimes;": "\u22c9",
"ltlarr;": "\u2976",
"ltquest;": "\u2a7b",
"ltrPar;": "\u2996",
"ltri;": "\u25c3",
"ltrie;": "\u22b4",
"ltrif;": "\u25c2",
"lurdshar;": "\u294a",
"luruhar;": "\u2966",
"lvertneqq;": "\u2268\ufe00",
"lvnE;": "\u2268\ufe00",
"mDDot;": "\u223a",
"macr": "\xaf",
"macr;": "\xaf",
"male;": "\u2642",
"malt;": "\u2720",
"maltese;": "\u2720",
"map;": "\u21a6",
"mapsto;": "\u21a6",
"mapstodown;": "\u21a7",
"mapstoleft;": "\u21a4",
"mapstoup;": "\u21a5",
"marker;": "\u25ae",
"mcomma;": "\u2a29",
"mcy;": "\u043c",
"mdash;": "\u2014",
"measuredangle;": "\u2221",
"mfr;": "\U0001d52a",
"mho;": "\u2127",
"micro": "\xb5",
"micro;": "\xb5",
"mid;": "\u2223",
"midast;": "*",
"midcir;": "\u2af0",
"middot": "\xb7",
"middot;": "\xb7",
"minus;": "\u2212",
"minusb;": "\u229f",
"minusd;": "\u2238",
"minusdu;": "\u2a2a",
"mlcp;": "\u2adb",
"mldr;": "\u2026",
"mnplus;": "\u2213",
"models;": "\u22a7",
"mopf;": "\U0001d55e",
"mp;": "\u2213",
"mscr;": "\U0001d4c2",
"mstpos;": "\u223e",
"mu;": "\u03bc",
"multimap;": "\u22b8",
"mumap;": "\u22b8",
"nGg;": "\u22d9\u0338",
"nGt;": "\u226b\u20d2",
"nGtv;": "\u226b\u0338",
"nLeftarrow;": "\u21cd",
"nLeftrightarrow;": "\u21ce",
"nLl;": "\u22d8\u0338",
"nLt;": "\u226a\u20d2",
"nLtv;": "\u226a\u0338",
"nRightarrow;": "\u21cf",
"nVDash;": "\u22af",
"nVdash;": "\u22ae",
"nabla;": "\u2207",
"nacute;": "\u0144",
"nang;": "\u2220\u20d2",
"nap;": "\u2249",
"napE;": "\u2a70\u0338",
"napid;": "\u224b\u0338",
"napos;": "\u0149",
"napprox;": "\u2249",
"natur;": "\u266e",
"natural;": "\u266e",
"naturals;": "\u2115",
"nbsp": "\xa0",
"nbsp;": "\xa0",
"nbump;": "\u224e\u0338",
"nbumpe;": "\u224f\u0338",
"ncap;": "\u2a43",
"ncaron;": "\u0148",
"ncedil;": "\u0146",
"ncong;": "\u2247",
"ncongdot;": "\u2a6d\u0338",
"ncup;": "\u2a42",
"ncy;": "\u043d",
"ndash;": "\u2013",
"ne;": "\u2260",
"neArr;": "\u21d7",
"nearhk;": "\u2924",
"nearr;": "\u2197",
"nearrow;": "\u2197",
"nedot;": "\u2250\u0338",
"nequiv;": "\u2262",
"nesear;": "\u2928",
"nesim;": "\u2242\u0338",
"nexist;": "\u2204",
"nexists;": "\u2204",
"nfr;": "\U0001d52b",
"ngE;": "\u2267\u0338",
"nge;": "\u2271",
"ngeq;": "\u2271",
"ngeqq;": "\u2267\u0338",
"ngeqslant;": "\u2a7e\u0338",
"nges;": "\u2a7e\u0338",
"ngsim;": "\u2275",
"ngt;": "\u226f",
"ngtr;": "\u226f",
"nhArr;": "\u21ce",
"nharr;": "\u21ae",
"nhpar;": "\u2af2",
"ni;": "\u220b",
"nis;": "\u22fc",
"nisd;": "\u22fa",
"niv;": "\u220b",
"njcy;": "\u045a",
"nlArr;": "\u21cd",
"nlE;": "\u2266\u0338",
"nlarr;": "\u219a",
"nldr;": "\u2025",
"nle;": "\u2270",
"nleftarrow;": "\u219a",
"nleftrightarrow;": "\u21ae",
"nleq;": "\u2270",
"nleqq;": "\u2266\u0338",
"nleqslant;": "\u2a7d\u0338",
"nles;": "\u2a7d\u0338",
"nless;": "\u226e",
"nlsim;": "\u2274",
"nlt;": "\u226e",
"nltri;": "\u22ea",
"nltrie;": "\u22ec",
"nmid;": "\u2224",
"nopf;": "\U0001d55f",
"not": "\xac",
"not;": "\xac",
"notin;": "\u2209",
"notinE;": "\u22f9\u0338",
"notindot;": "\u22f5\u0338",
"notinva;": "\u2209",
"notinvb;": "\u22f7",
"notinvc;": "\u22f6",
"notni;": "\u220c",
"notniva;": "\u220c",
"notnivb;": "\u22fe",
"notnivc;": "\u22fd",
"npar;": "\u2226",
"nparallel;": "\u2226",
"nparsl;": "\u2afd\u20e5",
"npart;": "\u2202\u0338",
"npolint;": "\u2a14",
"npr;": "\u2280",
"nprcue;": "\u22e0",
"npre;": "\u2aaf\u0338",
"nprec;": "\u2280",
"npreceq;": "\u2aaf\u0338",
"nrArr;": "\u21cf",
"nrarr;": "\u219b",
"nrarrc;": "\u2933\u0338",
"nrarrw;": "\u219d\u0338",
"nrightarrow;": "\u219b",
"nrtri;": "\u22eb",
"nrtrie;": "\u22ed",
"nsc;": "\u2281",
"nsccue;": "\u22e1",
"nsce;": "\u2ab0\u0338",
"nscr;": "\U0001d4c3",
"nshortmid;": "\u2224",
"nshortparallel;": "\u2226",
"nsim;": "\u2241",
"nsime;": "\u2244",
"nsimeq;": "\u2244",
"nsmid;": "\u2224",
"nspar;": "\u2226",
"nsqsube;": "\u22e2",
"nsqsupe;": "\u22e3",
"nsub;": "\u2284",
"nsubE;": "\u2ac5\u0338",
"nsube;": "\u2288",
"nsubset;": "\u2282\u20d2",
"nsubseteq;": "\u2288",
"nsubseteqq;": "\u2ac5\u0338",
"nsucc;": "\u2281",
"nsucceq;": "\u2ab0\u0338",
"nsup;": "\u2285",
"nsupE;": "\u2ac6\u0338",
"nsupe;": "\u2289",
"nsupset;": "\u2283\u20d2",
"nsupseteq;": "\u2289",
"nsupseteqq;": "\u2ac6\u0338",
"ntgl;": "\u2279",
"ntilde": "\xf1",
"ntilde;": "\xf1",
"ntlg;": "\u2278",
"ntriangleleft;": "\u22ea",
"ntrianglelefteq;": "\u22ec",
"ntriangleright;": "\u22eb",
"ntrianglerighteq;": "\u22ed",
"nu;": "\u03bd",
"num;": "#",
"numero;": "\u2116",
"numsp;": "\u2007",
"nvDash;": "\u22ad",
"nvHarr;": "\u2904",
"nvap;": "\u224d\u20d2",
"nvdash;": "\u22ac",
"nvge;": "\u2265\u20d2",
"nvgt;": ">\u20d2",
"nvinfin;": "\u29de",
"nvlArr;": "\u2902",
"nvle;": "\u2264\u20d2",
"nvlt;": "<\u20d2",
"nvltrie;": "\u22b4\u20d2",
"nvrArr;": "\u2903",
"nvrtrie;": "\u22b5\u20d2",
"nvsim;": "\u223c\u20d2",
"nwArr;": "\u21d6",
"nwarhk;": "\u2923",
"nwarr;": "\u2196",
"nwarrow;": "\u2196",
"nwnear;": "\u2927",
"oS;": "\u24c8",
"oacute": "\xf3",
"oacute;": "\xf3",
"oast;": "\u229b",
"ocir;": "\u229a",
"ocirc": "\xf4",
"ocirc;": "\xf4",
"ocy;": "\u043e",
"odash;": "\u229d",
"odblac;": "\u0151",
"odiv;": "\u2a38",
"odot;": "\u2299",
"odsold;": "\u29bc",
"oelig;": "\u0153",
"ofcir;": "\u29bf",
"ofr;": "\U0001d52c",
"ogon;": "\u02db",
"ograve": "\xf2",
"ograve;": "\xf2",
"ogt;": "\u29c1",
"ohbar;": "\u29b5",
"ohm;": "\u03a9",
"oint;": "\u222e",
"olarr;": "\u21ba",
"olcir;": "\u29be",
"olcross;": "\u29bb",
"oline;": "\u203e",
"olt;": "\u29c0",
"omacr;": "\u014d",
"omega;": "\u03c9",
"omicron;": "\u03bf",
"omid;": "\u29b6",
"ominus;": "\u2296",
"oopf;": "\U0001d560",
"opar;": "\u29b7",
"operp;": "\u29b9",
"oplus;": "\u2295",
"or;": "\u2228",
"orarr;": "\u21bb",
"ord;": "\u2a5d",
"order;": "\u2134",
"orderof;": "\u2134",
"ordf": "\xaa",
"ordf;": "\xaa",
"ordm": "\xba",
"ordm;": "\xba",
"origof;": "\u22b6",
"oror;": "\u2a56",
"orslope;": "\u2a57",
"orv;": "\u2a5b",
"oscr;": "\u2134",
"oslash": "\xf8",
"oslash;": "\xf8",
"osol;": "\u2298",
"otilde": "\xf5",
"otilde;": "\xf5",
"otimes;": "\u2297",
"otimesas;": "\u2a36",
"ouml": "\xf6",
"ouml;": "\xf6",
"ovbar;": "\u233d",
"par;": "\u2225",
"para": "\xb6",
"para;": "\xb6",
"parallel;": "\u2225",
"parsim;": "\u2af3",
"parsl;": "\u2afd",
"part;": "\u2202",
"pcy;": "\u043f",
"percnt;": "%",
"period;": ".",
"permil;": "\u2030",
"perp;": "\u22a5",
"pertenk;": "\u2031",
"pfr;": "\U0001d52d",
"phi;": "\u03c6",
"phiv;": "\u03d5",
"phmmat;": "\u2133",
"phone;": "\u260e",
"pi;": "\u03c0",
"pitchfork;": "\u22d4",
"piv;": "\u03d6",
"planck;": "\u210f",
"planckh;": "\u210e",
"plankv;": "\u210f",
"plus;": "+",
"plusacir;": "\u2a23",
"plusb;": "\u229e",
"pluscir;": "\u2a22",
"plusdo;": "\u2214",
"plusdu;": "\u2a25",
"pluse;": "\u2a72",
"plusmn": "\xb1",
"plusmn;": "\xb1",
"plussim;": "\u2a26",
"plustwo;": "\u2a27",
"pm;": "\xb1",
"pointint;": "\u2a15",
"popf;": "\U0001d561",
"pound": "\xa3",
"pound;": "\xa3",
"pr;": "\u227a",
"prE;": "\u2ab3",
"prap;": "\u2ab7",
"prcue;": "\u227c",
"pre;": "\u2aaf",
"prec;": "\u227a",
"precapprox;": "\u2ab7",
"preccurlyeq;": "\u227c",
"preceq;": "\u2aaf",
"precnapprox;": "\u2ab9",
"precneqq;": "\u2ab5",
"precnsim;": "\u22e8",
"precsim;": "\u227e",
"prime;": "\u2032",
"primes;": "\u2119",
"prnE;": "\u2ab5",
"prnap;": "\u2ab9",
"prnsim;": "\u22e8",
"prod;": "\u220f",
"profalar;": "\u232e",
"profline;": "\u2312",
"profsurf;": "\u2313",
"prop;": "\u221d",
"propto;": "\u221d",
"prsim;": "\u227e",
"prurel;": "\u22b0",
"pscr;": "\U0001d4c5",
"psi;": "\u03c8",
"puncsp;": "\u2008",
"qfr;": "\U0001d52e",
"qint;": "\u2a0c",
"qopf;": "\U0001d562",
"qprime;": "\u2057",
"qscr;": "\U0001d4c6",
"quaternions;": "\u210d",
"quatint;": "\u2a16",
"quest;": "?",
"questeq;": "\u225f",
"quot": "\"",
"quot;": "\"",
"rAarr;": "\u21db",
"rArr;": "\u21d2",
"rAtail;": "\u291c",
"rBarr;": "\u290f",
"rHar;": "\u2964",
"race;": "\u223d\u0331",
"racute;": "\u0155",
"radic;": "\u221a",
"raemptyv;": "\u29b3",
"rang;": "\u27e9",
"rangd;": "\u2992",
"range;": "\u29a5",
"rangle;": "\u27e9",
"raquo": "\xbb",
"raquo;": "\xbb",
"rarr;": "\u2192",
"rarrap;": "\u2975",
"rarrb;": "\u21e5",
"rarrbfs;": "\u2920",
"rarrc;": "\u2933",
"rarrfs;": "\u291e",
"rarrhk;": "\u21aa",
"rarrlp;": "\u21ac",
"rarrpl;": "\u2945",
"rarrsim;": "\u2974",
"rarrtl;": "\u21a3",
"rarrw;": "\u219d",
"ratail;": "\u291a",
"ratio;": "\u2236",
"rationals;": "\u211a",
"rbarr;": "\u290d",
"rbbrk;": "\u2773",
"rbrace;": "}",
"rbrack;": "]",
"rbrke;": "\u298c",
"rbrksld;": "\u298e",
"rbrkslu;": "\u2990",
"rcaron;": "\u0159",
"rcedil;": "\u0157",
"rceil;": "\u2309",
"rcub;": "}",
"rcy;": "\u0440",
"rdca;": "\u2937",
"rdldhar;": "\u2969",
"rdquo;": "\u201d",
"rdquor;": "\u201d",
"rdsh;": "\u21b3",
"real;": "\u211c",
"realine;": "\u211b",
"realpart;": "\u211c",
"reals;": "\u211d",
"rect;": "\u25ad",
"reg": "\xae",
"reg;": "\xae",
"rfisht;": "\u297d",
"rfloor;": "\u230b",
"rfr;": "\U0001d52f",
"rhard;": "\u21c1",
"rharu;": "\u21c0",
"rharul;": "\u296c",
"rho;": "\u03c1",
"rhov;": "\u03f1",
"rightarrow;": "\u2192",
"rightarrowtail;": "\u21a3",
"rightharpoondown;": "\u21c1",
"rightharpoonup;": "\u21c0",
"rightleftarrows;": "\u21c4",
"rightleftharpoons;": "\u21cc",
"rightrightarrows;": "\u21c9",
"rightsquigarrow;": "\u219d",
"rightthreetimes;": "\u22cc",
"ring;": "\u02da",
"risingdotseq;": "\u2253",
"rlarr;": "\u21c4",
"rlhar;": "\u21cc",
"rlm;": "\u200f",
"rmoust;": "\u23b1",
"rmoustache;": "\u23b1",
"rnmid;": "\u2aee",
"roang;": "\u27ed",
"roarr;": "\u21fe",
"robrk;": "\u27e7",
"ropar;": "\u2986",
"ropf;": "\U0001d563",
"roplus;": "\u2a2e",
"rotimes;": "\u2a35",
"rpar;": ")",
"rpargt;": "\u2994",
"rppolint;": "\u2a12",
"rrarr;": "\u21c9",
"rsaquo;": "\u203a",
"rscr;": "\U0001d4c7",
"rsh;": "\u21b1",
"rsqb;": "]",
"rsquo;": "\u2019",
"rsquor;": "\u2019",
"rthree;": "\u22cc",
"rtimes;": "\u22ca",
"rtri;": "\u25b9",
"rtrie;": "\u22b5",
"rtrif;": "\u25b8",
"rtriltri;": "\u29ce",
"ruluhar;": "\u2968",
"rx;": "\u211e",
"sacute;": "\u015b",
"sbquo;": "\u201a",
"sc;": "\u227b",
"scE;": "\u2ab4",
"scap;": "\u2ab8",
"scaron;": "\u0161",
"sccue;": "\u227d",
"sce;": "\u2ab0",
"scedil;": "\u015f",
"scirc;": "\u015d",
"scnE;": "\u2ab6",
"scnap;": "\u2aba",
"scnsim;": "\u22e9",
"scpolint;": "\u2a13",
"scsim;": "\u227f",
"scy;": "\u0441",
"sdot;": "\u22c5",
"sdotb;": "\u22a1",
"sdote;": "\u2a66",
"seArr;": "\u21d8",
"searhk;": "\u2925",
"searr;": "\u2198",
"searrow;": "\u2198",
"sect": "\xa7",
"sect;": "\xa7",
"semi;": ";",
"seswar;": "\u2929",
"setminus;": "\u2216",
"setmn;": "\u2216",
"sext;": "\u2736",
"sfr;": "\U0001d530",
"sfrown;": "\u2322",
"sharp;": "\u266f",
"shchcy;": "\u0449",
"shcy;": "\u0448",
"shortmid;": "\u2223",
"shortparallel;": "\u2225",
"shy": "\xad",
"shy;": "\xad",
"sigma;": "\u03c3",
"sigmaf;": "\u03c2",
"sigmav;": "\u03c2",
"sim;": "\u223c",
"simdot;": "\u2a6a",
"sime;": "\u2243",
"simeq;": "\u2243",
"simg;": "\u2a9e",
"simgE;": "\u2aa0",
"siml;": "\u2a9d",
"simlE;": "\u2a9f",
"simne;": "\u2246",
"simplus;": "\u2a24",
"simrarr;": "\u2972",
"slarr;": "\u2190",
"smallsetminus;": "\u2216",
"smashp;": "\u2a33",
"smeparsl;": "\u29e4",
"smid;": "\u2223",
"smile;": "\u2323",
"smt;": "\u2aaa",
"smte;": "\u2aac",
"smtes;": "\u2aac\ufe00",
"softcy;": "\u044c",
"sol;": "/",
"solb;": "\u29c4",
"solbar;": "\u233f",
"sopf;": "\U0001d564",
"spades;": "\u2660",
"spadesuit;": "\u2660",
"spar;": "\u2225",
"sqcap;": "\u2293",
"sqcaps;": "\u2293\ufe00",
"sqcup;": "\u2294",
"sqcups;": "\u2294\ufe00",
"sqsub;": "\u228f",
"sqsube;": "\u2291",
"sqsubset;": "\u228f",
"sqsubseteq;": "\u2291",
"sqsup;": "\u2290",
"sqsupe;": "\u2292",
"sqsupset;": "\u2290",
"sqsupseteq;": "\u2292",
"squ;": "\u25a1",
"square;": "\u25a1",
"squarf;": "\u25aa",
"squf;": "\u25aa",
"srarr;": "\u2192",
"sscr;": "\U0001d4c8",
"ssetmn;": "\u2216",
"ssmile;": "\u2323",
"sstarf;": "\u22c6",
"star;": "\u2606",
"starf;": "\u2605",
"straightepsilon;": "\u03f5",
"straightphi;": "\u03d5",
"strns;": "\xaf",
"sub;": "\u2282",
"subE;": "\u2ac5",
"subdot;": "\u2abd",
"sube;": "\u2286",
"subedot;": "\u2ac3",
"submult;": "\u2ac1",
"subnE;": "\u2acb",
"subne;": "\u228a",
"subplus;": "\u2abf",
"subrarr;": "\u2979",
"subset;": "\u2282",
"subseteq;": "\u2286",
"subseteqq;": "\u2ac5",
"subsetneq;": "\u228a",
"subsetneqq;": "\u2acb",
"subsim;": "\u2ac7",
"subsub;": "\u2ad5",
"subsup;": "\u2ad3",
"succ;": "\u227b",
"succapprox;": "\u2ab8",
"succcurlyeq;": "\u227d",
"succeq;": "\u2ab0",
"succnapprox;": "\u2aba",
"succneqq;": "\u2ab6",
"succnsim;": "\u22e9",
"succsim;": "\u227f",
"sum;": "\u2211",
"sung;": "\u266a",
"sup1": "\xb9",
"sup1;": "\xb9",
"sup2": "\xb2",
"sup2;": "\xb2",
"sup3": "\xb3",
"sup3;": "\xb3",
"sup;": "\u2283",
"supE;": "\u2ac6",
"supdot;": "\u2abe",
"supdsub;": "\u2ad8",
"supe;": "\u2287",
"supedot;": "\u2ac4",
"suphsol;": "\u27c9",
"suphsub;": "\u2ad7",
"suplarr;": "\u297b",
"supmult;": "\u2ac2",
"supnE;": "\u2acc",
"supne;": "\u228b",
"supplus;": "\u2ac0",
"supset;": "\u2283",
"supseteq;": "\u2287",
"supseteqq;": "\u2ac6",
"supsetneq;": "\u228b",
"supsetneqq;": "\u2acc",
"supsim;": "\u2ac8",
"supsub;": "\u2ad4",
"supsup;": "\u2ad6",
"swArr;": "\u21d9",
"swarhk;": "\u2926",
"swarr;": "\u2199",
"swarrow;": "\u2199",
"swnwar;": "\u292a",
"szlig": "\xdf",
"szlig;": "\xdf",
"target;": "\u2316",
"tau;": "\u03c4",
"tbrk;": "\u23b4",
"tcaron;": "\u0165",
"tcedil;": "\u0163",
"tcy;": "\u0442",
"tdot;": "\u20db",
"telrec;": "\u2315",
"tfr;": "\U0001d531",
"there4;": "\u2234",
"therefore;": "\u2234",
"theta;": "\u03b8",
"thetasym;": "\u03d1",
"thetav;": "\u03d1",
"thickapprox;": "\u2248",
"thicksim;": "\u223c",
"thinsp;": "\u2009",
"thkap;": "\u2248",
"thksim;": "\u223c",
"thorn": "\xfe",
"thorn;": "\xfe",
"tilde;": "\u02dc",
"times": "\xd7",
"times;": "\xd7",
"timesb;": "\u22a0",
"timesbar;": "\u2a31",
"timesd;": "\u2a30",
"tint;": "\u222d",
"toea;": "\u2928",
"top;": "\u22a4",
"topbot;": "\u2336",
"topcir;": "\u2af1",
"topf;": "\U0001d565",
"topfork;": "\u2ada",
"tosa;": "\u2929",
"tprime;": "\u2034",
"trade;": "\u2122",
"triangle;": "\u25b5",
"triangledown;": "\u25bf",
"triangleleft;": "\u25c3",
"trianglelefteq;": "\u22b4",
"triangleq;": "\u225c",
"triangleright;": "\u25b9",
"trianglerighteq;": "\u22b5",
"tridot;": "\u25ec",
"trie;": "\u225c",
"triminus;": "\u2a3a",
"triplus;": "\u2a39",
"trisb;": "\u29cd",
"tritime;": "\u2a3b",
"trpezium;": "\u23e2",
"tscr;": "\U0001d4c9",
"tscy;": "\u0446",
"tshcy;": "\u045b",
"tstrok;": "\u0167",
"twixt;": "\u226c",
"twoheadleftarrow;": "\u219e",
"twoheadrightarrow;": "\u21a0",
"uArr;": "\u21d1",
"uHar;": "\u2963",
"uacute": "\xfa",
"uacute;": "\xfa",
"uarr;": "\u2191",
"ubrcy;": "\u045e",
"ubreve;": "\u016d",
"ucirc": "\xfb",
"ucirc;": "\xfb",
"ucy;": "\u0443",
"udarr;": "\u21c5",
"udblac;": "\u0171",
"udhar;": "\u296e",
"ufisht;": "\u297e",
"ufr;": "\U0001d532",
"ugrave": "\xf9",
"ugrave;": "\xf9",
"uharl;": "\u21bf",
"uharr;": "\u21be",
"uhblk;": "\u2580",
"ulcorn;": "\u231c",
"ulcorner;": "\u231c",
"ulcrop;": "\u230f",
"ultri;": "\u25f8",
"umacr;": "\u016b",
"uml": "\xa8",
"uml;": "\xa8",
"uogon;": "\u0173",
"uopf;": "\U0001d566",
"uparrow;": "\u2191",
"updownarrow;": "\u2195",
"upharpoonleft;": "\u21bf",
"upharpoonright;": "\u21be",
"uplus;": "\u228e",
"upsi;": "\u03c5",
"upsih;": "\u03d2",
"upsilon;": "\u03c5",
"upuparrows;": "\u21c8",
"urcorn;": "\u231d",
"urcorner;": "\u231d",
"urcrop;": "\u230e",
"uring;": "\u016f",
"urtri;": "\u25f9",
"uscr;": "\U0001d4ca",
"utdot;": "\u22f0",
"utilde;": "\u0169",
"utri;": "\u25b5",
"utrif;": "\u25b4",
"uuarr;": "\u21c8",
"uuml": "\xfc",
"uuml;": "\xfc",
"uwangle;": "\u29a7",
"vArr;": "\u21d5",
"vBar;": "\u2ae8",
"vBarv;": "\u2ae9",
"vDash;": "\u22a8",
"vangrt;": "\u299c",
"varepsilon;": "\u03f5",
"varkappa;": "\u03f0",
"varnothing;": "\u2205",
"varphi;": "\u03d5",
"varpi;": "\u03d6",
"varpropto;": "\u221d",
"varr;": "\u2195",
"varrho;": "\u03f1",
"varsigma;": "\u03c2",
"varsubsetneq;": "\u228a\ufe00",
"varsubsetneqq;": "\u2acb\ufe00",
"varsupsetneq;": "\u228b\ufe00",
"varsupsetneqq;": "\u2acc\ufe00",
"vartheta;": "\u03d1",
"vartriangleleft;": "\u22b2",
"vartriangleright;": "\u22b3",
"vcy;": "\u0432",
"vdash;": "\u22a2",
"vee;": "\u2228",
"veebar;": "\u22bb",
"veeeq;": "\u225a",
"vellip;": "\u22ee",
"verbar;": "|",
"vert;": "|",
"vfr;": "\U0001d533",
"vltri;": "\u22b2",
"vnsub;": "\u2282\u20d2",
"vnsup;": "\u2283\u20d2",
"vopf;": "\U0001d567",
"vprop;": "\u221d",
"vrtri;": "\u22b3",
"vscr;": "\U0001d4cb",
"vsubnE;": "\u2acb\ufe00",
"vsubne;": "\u228a\ufe00",
"vsupnE;": "\u2acc\ufe00",
"vsupne;": "\u228b\ufe00",
"vzigzag;": "\u299a",
"wcirc;": "\u0175",
"wedbar;": "\u2a5f",
"wedge;": "\u2227",
"wedgeq;": "\u2259",
"weierp;": "\u2118",
"wfr;": "\U0001d534",
"wopf;": "\U0001d568",
"wp;": "\u2118",
"wr;": "\u2240",
"wreath;": "\u2240",
"wscr;": "\U0001d4cc",
"xcap;": "\u22c2",
"xcirc;": "\u25ef",
"xcup;": "\u22c3",
"xdtri;": "\u25bd",
"xfr;": "\U0001d535",
"xhArr;": "\u27fa",
"xharr;": "\u27f7",
"xi;": "\u03be",
"xlArr;": "\u27f8",
"xlarr;": "\u27f5",
"xmap;": "\u27fc",
"xnis;": "\u22fb",
"xodot;": "\u2a00",
"xopf;": "\U0001d569",
"xoplus;": "\u2a01",
"xotime;": "\u2a02",
"xrArr;": "\u27f9",
"xrarr;": "\u27f6",
"xscr;": "\U0001d4cd",
"xsqcup;": "\u2a06",
"xuplus;": "\u2a04",
"xutri;": "\u25b3",
"xvee;": "\u22c1",
"xwedge;": "\u22c0",
"yacute": "\xfd",
"yacute;": "\xfd",
"yacy;": "\u044f",
"ycirc;": "\u0177",
"ycy;": "\u044b",
"yen": "\xa5",
"yen;": "\xa5",
"yfr;": "\U0001d536",
"yicy;": "\u0457",
"yopf;": "\U0001d56a",
"yscr;": "\U0001d4ce",
"yucy;": "\u044e",
"yuml": "\xff",
"yuml;": "\xff",
"zacute;": "\u017a",
"zcaron;": "\u017e",
"zcy;": "\u0437",
"zdot;": "\u017c",
"zeetrf;": "\u2128",
"zeta;": "\u03b6",
"zfr;": "\U0001d537",
"zhcy;": "\u0436",
"zigrarr;": "\u21dd",
"zopf;": "\U0001d56b",
"zscr;": "\U0001d4cf",
"zwj;": "\u200d",
"zwnj;": "\u200c",
}
replacementCharacters = {
0x0: "\uFFFD",
0x0d: "\u000D",
0x80: "\u20AC",
0x81: "\u0081",
0x81: "\u0081",
0x82: "\u201A",
0x83: "\u0192",
0x84: "\u201E",
0x85: "\u2026",
0x86: "\u2020",
0x87: "\u2021",
0x88: "\u02C6",
0x89: "\u2030",
0x8A: "\u0160",
0x8B: "\u2039",
0x8C: "\u0152",
0x8D: "\u008D",
0x8E: "\u017D",
0x8F: "\u008F",
0x90: "\u0090",
0x91: "\u2018",
0x92: "\u2019",
0x93: "\u201C",
0x94: "\u201D",
0x95: "\u2022",
0x96: "\u2013",
0x97: "\u2014",
0x98: "\u02DC",
0x99: "\u2122",
0x9A: "\u0161",
0x9B: "\u203A",
0x9C: "\u0153",
0x9D: "\u009D",
0x9E: "\u017E",
0x9F: "\u0178",
}
encodings = {
'437': 'cp437',
'850': 'cp850',
'852': 'cp852',
'855': 'cp855',
'857': 'cp857',
'860': 'cp860',
'861': 'cp861',
'862': 'cp862',
'863': 'cp863',
'865': 'cp865',
'866': 'cp866',
'869': 'cp869',
'ansix341968': 'ascii',
'ansix341986': 'ascii',
'arabic': 'iso8859-6',
'ascii': 'ascii',
'asmo708': 'iso8859-6',
'big5': 'big5',
'big5hkscs': 'big5hkscs',
'chinese': 'gbk',
'cp037': 'cp037',
'cp1026': 'cp1026',
'cp154': 'ptcp154',
'cp367': 'ascii',
'cp424': 'cp424',
'cp437': 'cp437',
'cp500': 'cp500',
'cp775': 'cp775',
'cp819': 'windows-1252',
'cp850': 'cp850',
'cp852': 'cp852',
'cp855': 'cp855',
'cp857': 'cp857',
'cp860': 'cp860',
'cp861': 'cp861',
'cp862': 'cp862',
'cp863': 'cp863',
'cp864': 'cp864',
'cp865': 'cp865',
'cp866': 'cp866',
'cp869': 'cp869',
'cp936': 'gbk',
'cpgr': 'cp869',
'cpis': 'cp861',
'csascii': 'ascii',
'csbig5': 'big5',
'cseuckr': 'cp949',
'cseucpkdfmtjapanese': 'euc_jp',
'csgb2312': 'gbk',
'cshproman8': 'hp-roman8',
'csibm037': 'cp037',
'csibm1026': 'cp1026',
'csibm424': 'cp424',
'csibm500': 'cp500',
'csibm855': 'cp855',
'csibm857': 'cp857',
'csibm860': 'cp860',
'csibm861': 'cp861',
'csibm863': 'cp863',
'csibm864': 'cp864',
'csibm865': 'cp865',
'csibm866': 'cp866',
'csibm869': 'cp869',
'csiso2022jp': 'iso2022_jp',
'csiso2022jp2': 'iso2022_jp_2',
'csiso2022kr': 'iso2022_kr',
'csiso58gb231280': 'gbk',
'csisolatin1': 'windows-1252',
'csisolatin2': 'iso8859-2',
'csisolatin3': 'iso8859-3',
'csisolatin4': 'iso8859-4',
'csisolatin5': 'windows-1254',
'csisolatin6': 'iso8859-10',
'csisolatinarabic': 'iso8859-6',
'csisolatincyrillic': 'iso8859-5',
'csisolatingreek': 'iso8859-7',
'csisolatinhebrew': 'iso8859-8',
'cskoi8r': 'koi8-r',
'csksc56011987': 'cp949',
'cspc775baltic': 'cp775',
'cspc850multilingual': 'cp850',
'cspc862latinhebrew': 'cp862',
'cspc8codepage437': 'cp437',
'cspcp852': 'cp852',
'csptcp154': 'ptcp154',
'csshiftjis': 'shift_jis',
'csunicode11utf7': 'utf-7',
'cyrillic': 'iso8859-5',
'cyrillicasian': 'ptcp154',
'ebcdiccpbe': 'cp500',
'ebcdiccpca': 'cp037',
'ebcdiccpch': 'cp500',
'ebcdiccphe': 'cp424',
'ebcdiccpnl': 'cp037',
'ebcdiccpus': 'cp037',
'ebcdiccpwt': 'cp037',
'ecma114': 'iso8859-6',
'ecma118': 'iso8859-7',
'elot928': 'iso8859-7',
'eucjp': 'euc_jp',
'euckr': 'cp949',
'extendedunixcodepackedformatforjapanese': 'euc_jp',
'gb18030': 'gb18030',
'gb2312': 'gbk',
'gb231280': 'gbk',
'gbk': 'gbk',
'greek': 'iso8859-7',
'greek8': 'iso8859-7',
'hebrew': 'iso8859-8',
'hproman8': 'hp-roman8',
'hzgb2312': 'hz',
'ibm037': 'cp037',
'ibm1026': 'cp1026',
'ibm367': 'ascii',
'ibm424': 'cp424',
'ibm437': 'cp437',
'ibm500': 'cp500',
'ibm775': 'cp775',
'ibm819': 'windows-1252',
'ibm850': 'cp850',
'ibm852': 'cp852',
'ibm855': 'cp855',
'ibm857': 'cp857',
'ibm860': 'cp860',
'ibm861': 'cp861',
'ibm862': 'cp862',
'ibm863': 'cp863',
'ibm864': 'cp864',
'ibm865': 'cp865',
'ibm866': 'cp866',
'ibm869': 'cp869',
'iso2022jp': 'iso2022_jp',
'iso2022jp2': 'iso2022_jp_2',
'iso2022kr': 'iso2022_kr',
'iso646irv1991': 'ascii',
'iso646us': 'ascii',
'iso88591': 'windows-1252',
'iso885910': 'iso8859-10',
'iso8859101992': 'iso8859-10',
'iso885911987': 'windows-1252',
'iso885913': 'iso8859-13',
'iso885914': 'iso8859-14',
'iso8859141998': 'iso8859-14',
'iso885915': 'iso8859-15',
'iso885916': 'iso8859-16',
'iso8859162001': 'iso8859-16',
'iso88592': 'iso8859-2',
'iso885921987': 'iso8859-2',
'iso88593': 'iso8859-3',
'iso885931988': 'iso8859-3',
'iso88594': 'iso8859-4',
'iso885941988': 'iso8859-4',
'iso88595': 'iso8859-5',
'iso885951988': 'iso8859-5',
'iso88596': 'iso8859-6',
'iso885961987': 'iso8859-6',
'iso88597': 'iso8859-7',
'iso885971987': 'iso8859-7',
'iso88598': 'iso8859-8',
'iso885981988': 'iso8859-8',
'iso88599': 'windows-1254',
'iso885991989': 'windows-1254',
'isoceltic': 'iso8859-14',
'isoir100': 'windows-1252',
'isoir101': 'iso8859-2',
'isoir109': 'iso8859-3',
'isoir110': 'iso8859-4',
'isoir126': 'iso8859-7',
'isoir127': 'iso8859-6',
'isoir138': 'iso8859-8',
'isoir144': 'iso8859-5',
'isoir148': 'windows-1254',
'isoir149': 'cp949',
'isoir157': 'iso8859-10',
'isoir199': 'iso8859-14',
'isoir226': 'iso8859-16',
'isoir58': 'gbk',
'isoir6': 'ascii',
'koi8r': 'koi8-r',
'koi8u': 'koi8-u',
'korean': 'cp949',
'ksc5601': 'cp949',
'ksc56011987': 'cp949',
'ksc56011989': 'cp949',
'l1': 'windows-1252',
'l10': 'iso8859-16',
'l2': 'iso8859-2',
'l3': 'iso8859-3',
'l4': 'iso8859-4',
'l5': 'windows-1254',
'l6': 'iso8859-10',
'l8': 'iso8859-14',
'latin1': 'windows-1252',
'latin10': 'iso8859-16',
'latin2': 'iso8859-2',
'latin3': 'iso8859-3',
'latin4': 'iso8859-4',
'latin5': 'windows-1254',
'latin6': 'iso8859-10',
'latin8': 'iso8859-14',
'latin9': 'iso8859-15',
'ms936': 'gbk',
'mskanji': 'shift_jis',
'pt154': 'ptcp154',
'ptcp154': 'ptcp154',
'r8': 'hp-roman8',
'roman8': 'hp-roman8',
'shiftjis': 'shift_jis',
'tis620': 'cp874',
'unicode11utf7': 'utf-7',
'us': 'ascii',
'usascii': 'ascii',
'utf16': 'utf-16',
'utf16be': 'utf-16-be',
'utf16le': 'utf-16-le',
'utf8': 'utf-8',
'windows1250': 'cp1250',
'windows1251': 'cp1251',
'windows1252': 'cp1252',
'windows1253': 'cp1253',
'windows1254': 'cp1254',
'windows1255': 'cp1255',
'windows1256': 'cp1256',
'windows1257': 'cp1257',
'windows1258': 'cp1258',
'windows936': 'gbk',
'x-x-big5': 'big5'}
tokenTypes = {
"Doctype": 0,
"Characters": 1,
"SpaceCharacters": 2,
"StartTag": 3,
"EndTag": 4,
"EmptyTag": 5,
"Comment": 6,
"ParseError": 7
}
tagTokenTypes = frozenset((tokenTypes["StartTag"], tokenTypes["EndTag"],
tokenTypes["EmptyTag"]))
prefixes = dict([(v, k) for k, v in namespaces.items()])
prefixes["http://www.w3.org/1998/Math/MathML"] = "math"
class DataLossWarning(UserWarning):
pass
class ReparseException(Exception):
pass
|
gpl-3.0
|
skipisz/linux
|
Documentation/target/tcm_mod_builder.py
|
4981
|
41422
|
#!/usr/bin/python
# The TCM v4 multi-protocol fabric module generation script for drivers/target/$NEW_MOD
#
# Copyright (c) 2010 Rising Tide Systems
# Copyright (c) 2010 Linux-iSCSI.org
#
# Author: [email protected]
#
import os, sys
import subprocess as sub
import string
import re
import optparse
tcm_dir = ""
fabric_ops = []
fabric_mod_dir = ""
fabric_mod_port = ""
fabric_mod_init_port = ""
def tcm_mod_err(msg):
print msg
sys.exit(1)
def tcm_mod_create_module_subdir(fabric_mod_dir_var):
if os.path.isdir(fabric_mod_dir_var) == True:
return 1
print "Creating fabric_mod_dir: " + fabric_mod_dir_var
ret = os.mkdir(fabric_mod_dir_var)
if ret:
tcm_mod_err("Unable to mkdir " + fabric_mod_dir_var)
return
def tcm_mod_build_FC_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* Binary World Wide unique Port Name for FC Initiator Nport */\n"
buf += " u64 nport_wwpn;\n"
buf += " /* ASCII formatted WWPN for FC Initiator Nport */\n"
buf += " char nport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* FC lport target portal group tag for TCM */\n"
buf += " u16 lport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_lport */\n"
buf += " struct " + fabric_mod_name + "_lport *lport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_lport {\n"
buf += " /* SCSI protocol the lport is providing */\n"
buf += " u8 lport_proto_id;\n"
buf += " /* Binary World Wide unique Port Name for FC Target Lport */\n"
buf += " u64 lport_wwpn;\n"
buf += " /* ASCII formatted WWPN for FC Target Lport */\n"
buf += " char lport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_lport() */\n"
buf += " struct se_wwn lport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "lport"
fabric_mod_init_port = "nport"
return
def tcm_mod_build_SAS_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* Binary World Wide unique Port Name for SAS Initiator port */\n"
buf += " u64 iport_wwpn;\n"
buf += " /* ASCII formatted WWPN for Sas Initiator port */\n"
buf += " char iport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* SAS port target portal group tag for TCM */\n"
buf += " u16 tport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_tport */\n"
buf += " struct " + fabric_mod_name + "_tport *tport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tport {\n"
buf += " /* SCSI protocol the tport is providing */\n"
buf += " u8 tport_proto_id;\n"
buf += " /* Binary World Wide unique Port Name for SAS Target port */\n"
buf += " u64 tport_wwpn;\n"
buf += " /* ASCII formatted WWPN for SAS Target port */\n"
buf += " char tport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tport() */\n"
buf += " struct se_wwn tport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "tport"
fabric_mod_init_port = "iport"
return
def tcm_mod_build_iSCSI_include(fabric_mod_dir_var, fabric_mod_name):
global fabric_mod_port
global fabric_mod_init_port
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_base.h"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#define " + fabric_mod_name.upper() + "_VERSION \"v0.1\"\n"
buf += "#define " + fabric_mod_name.upper() + "_NAMELEN 32\n"
buf += "\n"
buf += "struct " + fabric_mod_name + "_nacl {\n"
buf += " /* ASCII formatted InitiatorName */\n"
buf += " char iport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_nodeacl() */\n"
buf += " struct se_node_acl se_node_acl;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tpg {\n"
buf += " /* iSCSI target portal group tag for TCM */\n"
buf += " u16 tport_tpgt;\n"
buf += " /* Pointer back to " + fabric_mod_name + "_tport */\n"
buf += " struct " + fabric_mod_name + "_tport *tport;\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tpg() */\n"
buf += " struct se_portal_group se_tpg;\n"
buf += "};\n\n"
buf += "struct " + fabric_mod_name + "_tport {\n"
buf += " /* SCSI protocol the tport is providing */\n"
buf += " u8 tport_proto_id;\n"
buf += " /* ASCII formatted TargetName for IQN */\n"
buf += " char tport_name[" + fabric_mod_name.upper() + "_NAMELEN];\n"
buf += " /* Returned by " + fabric_mod_name + "_make_tport() */\n"
buf += " struct se_wwn tport_wwn;\n"
buf += "};\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
fabric_mod_port = "tport"
fabric_mod_init_port = "iport"
return
def tcm_mod_build_base_includes(proto_ident, fabric_mod_dir_val, fabric_mod_name):
if proto_ident == "FC":
tcm_mod_build_FC_include(fabric_mod_dir_val, fabric_mod_name)
elif proto_ident == "SAS":
tcm_mod_build_SAS_include(fabric_mod_dir_val, fabric_mod_name)
elif proto_ident == "iSCSI":
tcm_mod_build_iSCSI_include(fabric_mod_dir_val, fabric_mod_name)
else:
print "Unsupported proto_ident: " + proto_ident
sys.exit(1)
return
def tcm_mod_build_configfs(proto_ident, fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_configfs.c"
print "Writing file: " + f
p = open(f, 'w');
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "#include <linux/module.h>\n"
buf += "#include <linux/moduleparam.h>\n"
buf += "#include <linux/version.h>\n"
buf += "#include <generated/utsrelease.h>\n"
buf += "#include <linux/utsname.h>\n"
buf += "#include <linux/init.h>\n"
buf += "#include <linux/slab.h>\n"
buf += "#include <linux/kthread.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/string.h>\n"
buf += "#include <linux/configfs.h>\n"
buf += "#include <linux/ctype.h>\n"
buf += "#include <asm/unaligned.h>\n\n"
buf += "#include <target/target_core_base.h>\n"
buf += "#include <target/target_core_fabric.h>\n"
buf += "#include <target/target_core_fabric_configfs.h>\n"
buf += "#include <target/target_core_configfs.h>\n"
buf += "#include <target/configfs_macros.h>\n\n"
buf += "#include \"" + fabric_mod_name + "_base.h\"\n"
buf += "#include \"" + fabric_mod_name + "_fabric.h\"\n\n"
buf += "/* Local pointer to allocated TCM configfs fabric module */\n"
buf += "struct target_fabric_configfs *" + fabric_mod_name + "_fabric_configfs;\n\n"
buf += "static struct se_node_acl *" + fabric_mod_name + "_make_nodeacl(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct se_node_acl *se_nacl, *se_nacl_new;\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl;\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " u64 wwpn = 0;\n"
buf += " u32 nexus_depth;\n\n"
buf += " /* " + fabric_mod_name + "_parse_wwn(name, &wwpn, 1) < 0)\n"
buf += " return ERR_PTR(-EINVAL); */\n"
buf += " se_nacl_new = " + fabric_mod_name + "_alloc_fabric_acl(se_tpg);\n"
buf += " if (!se_nacl_new)\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += "//#warning FIXME: Hardcoded nexus depth in " + fabric_mod_name + "_make_nodeacl()\n"
buf += " nexus_depth = 1;\n"
buf += " /*\n"
buf += " * se_nacl_new may be released by core_tpg_add_initiator_node_acl()\n"
buf += " * when converting a NodeACL from demo mode -> explict\n"
buf += " */\n"
buf += " se_nacl = core_tpg_add_initiator_node_acl(se_tpg, se_nacl_new,\n"
buf += " name, nexus_depth);\n"
buf += " if (IS_ERR(se_nacl)) {\n"
buf += " " + fabric_mod_name + "_release_fabric_acl(se_tpg, se_nacl_new);\n"
buf += " return se_nacl;\n"
buf += " }\n"
buf += " /*\n"
buf += " * Locate our struct " + fabric_mod_name + "_nacl and set the FC Nport WWPN\n"
buf += " */\n"
buf += " nacl = container_of(se_nacl, struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " nacl->" + fabric_mod_init_port + "_wwpn = wwpn;\n"
buf += " /* " + fabric_mod_name + "_format_wwn(&nacl->" + fabric_mod_init_port + "_name[0], " + fabric_mod_name.upper() + "_NAMELEN, wwpn); */\n\n"
buf += " return se_nacl;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_nodeacl(struct se_node_acl *se_acl)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl = container_of(se_acl,\n"
buf += " struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
buf += " core_tpg_del_initiator_node_acl(se_acl->se_tpg, se_acl, 1);\n"
buf += " kfree(nacl);\n"
buf += "}\n\n"
buf += "static struct se_portal_group *" + fabric_mod_name + "_make_tpg(\n"
buf += " struct se_wwn *wwn,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + "*" + fabric_mod_port + " = container_of(wwn,\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + ", " + fabric_mod_port + "_wwn);\n\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg;\n"
buf += " unsigned long tpgt;\n"
buf += " int ret;\n\n"
buf += " if (strstr(name, \"tpgt_\") != name)\n"
buf += " return ERR_PTR(-EINVAL);\n"
buf += " if (strict_strtoul(name + 5, 10, &tpgt) || tpgt > UINT_MAX)\n"
buf += " return ERR_PTR(-EINVAL);\n\n"
buf += " tpg = kzalloc(sizeof(struct " + fabric_mod_name + "_tpg), GFP_KERNEL);\n"
buf += " if (!tpg) {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_tpg\");\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += " }\n"
buf += " tpg->" + fabric_mod_port + " = " + fabric_mod_port + ";\n"
buf += " tpg->" + fabric_mod_port + "_tpgt = tpgt;\n\n"
buf += " ret = core_tpg_register(&" + fabric_mod_name + "_fabric_configfs->tf_ops, wwn,\n"
buf += " &tpg->se_tpg, (void *)tpg,\n"
buf += " TRANSPORT_TPG_TYPE_NORMAL);\n"
buf += " if (ret < 0) {\n"
buf += " kfree(tpg);\n"
buf += " return NULL;\n"
buf += " }\n"
buf += " return &tpg->se_tpg;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_tpg(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n\n"
buf += " core_tpg_deregister(se_tpg);\n"
buf += " kfree(tpg);\n"
buf += "}\n\n"
buf += "static struct se_wwn *" + fabric_mod_name + "_make_" + fabric_mod_port + "(\n"
buf += " struct target_fabric_configfs *tf,\n"
buf += " struct config_group *group,\n"
buf += " const char *name)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + ";\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " u64 wwpn = 0;\n\n"
buf += " /* if (" + fabric_mod_name + "_parse_wwn(name, &wwpn, 1) < 0)\n"
buf += " return ERR_PTR(-EINVAL); */\n\n"
buf += " " + fabric_mod_port + " = kzalloc(sizeof(struct " + fabric_mod_name + "_" + fabric_mod_port + "), GFP_KERNEL);\n"
buf += " if (!" + fabric_mod_port + ") {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_" + fabric_mod_port + "\");\n"
buf += " return ERR_PTR(-ENOMEM);\n"
buf += " }\n"
if proto_ident == "FC" or proto_ident == "SAS":
buf += " " + fabric_mod_port + "->" + fabric_mod_port + "_wwpn = wwpn;\n"
buf += " /* " + fabric_mod_name + "_format_wwn(&" + fabric_mod_port + "->" + fabric_mod_port + "_name[0], " + fabric_mod_name.upper() + "_NAMELEN, wwpn); */\n\n"
buf += " return &" + fabric_mod_port + "->" + fabric_mod_port + "_wwn;\n"
buf += "}\n\n"
buf += "static void " + fabric_mod_name + "_drop_" + fabric_mod_port + "(struct se_wwn *wwn)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = container_of(wwn,\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + ", " + fabric_mod_port + "_wwn);\n"
buf += " kfree(" + fabric_mod_port + ");\n"
buf += "}\n\n"
buf += "static ssize_t " + fabric_mod_name + "_wwn_show_attr_version(\n"
buf += " struct target_fabric_configfs *tf,\n"
buf += " char *page)\n"
buf += "{\n"
buf += " return sprintf(page, \"" + fabric_mod_name.upper() + " fabric module %s on %s/%s\"\n"
buf += " \"on \"UTS_RELEASE\"\\n\", " + fabric_mod_name.upper() + "_VERSION, utsname()->sysname,\n"
buf += " utsname()->machine);\n"
buf += "}\n\n"
buf += "TF_WWN_ATTR_RO(" + fabric_mod_name + ", version);\n\n"
buf += "static struct configfs_attribute *" + fabric_mod_name + "_wwn_attrs[] = {\n"
buf += " &" + fabric_mod_name + "_wwn_version.attr,\n"
buf += " NULL,\n"
buf += "};\n\n"
buf += "static struct target_core_fabric_ops " + fabric_mod_name + "_ops = {\n"
buf += " .get_fabric_name = " + fabric_mod_name + "_get_fabric_name,\n"
buf += " .get_fabric_proto_ident = " + fabric_mod_name + "_get_fabric_proto_ident,\n"
buf += " .tpg_get_wwn = " + fabric_mod_name + "_get_fabric_wwn,\n"
buf += " .tpg_get_tag = " + fabric_mod_name + "_get_tag,\n"
buf += " .tpg_get_default_depth = " + fabric_mod_name + "_get_default_depth,\n"
buf += " .tpg_get_pr_transport_id = " + fabric_mod_name + "_get_pr_transport_id,\n"
buf += " .tpg_get_pr_transport_id_len = " + fabric_mod_name + "_get_pr_transport_id_len,\n"
buf += " .tpg_parse_pr_out_transport_id = " + fabric_mod_name + "_parse_pr_out_transport_id,\n"
buf += " .tpg_check_demo_mode = " + fabric_mod_name + "_check_false,\n"
buf += " .tpg_check_demo_mode_cache = " + fabric_mod_name + "_check_true,\n"
buf += " .tpg_check_demo_mode_write_protect = " + fabric_mod_name + "_check_true,\n"
buf += " .tpg_check_prod_mode_write_protect = " + fabric_mod_name + "_check_false,\n"
buf += " .tpg_alloc_fabric_acl = " + fabric_mod_name + "_alloc_fabric_acl,\n"
buf += " .tpg_release_fabric_acl = " + fabric_mod_name + "_release_fabric_acl,\n"
buf += " .tpg_get_inst_index = " + fabric_mod_name + "_tpg_get_inst_index,\n"
buf += " .release_cmd = " + fabric_mod_name + "_release_cmd,\n"
buf += " .shutdown_session = " + fabric_mod_name + "_shutdown_session,\n"
buf += " .close_session = " + fabric_mod_name + "_close_session,\n"
buf += " .stop_session = " + fabric_mod_name + "_stop_session,\n"
buf += " .fall_back_to_erl0 = " + fabric_mod_name + "_reset_nexus,\n"
buf += " .sess_logged_in = " + fabric_mod_name + "_sess_logged_in,\n"
buf += " .sess_get_index = " + fabric_mod_name + "_sess_get_index,\n"
buf += " .sess_get_initiator_sid = NULL,\n"
buf += " .write_pending = " + fabric_mod_name + "_write_pending,\n"
buf += " .write_pending_status = " + fabric_mod_name + "_write_pending_status,\n"
buf += " .set_default_node_attributes = " + fabric_mod_name + "_set_default_node_attrs,\n"
buf += " .get_task_tag = " + fabric_mod_name + "_get_task_tag,\n"
buf += " .get_cmd_state = " + fabric_mod_name + "_get_cmd_state,\n"
buf += " .queue_data_in = " + fabric_mod_name + "_queue_data_in,\n"
buf += " .queue_status = " + fabric_mod_name + "_queue_status,\n"
buf += " .queue_tm_rsp = " + fabric_mod_name + "_queue_tm_rsp,\n"
buf += " .get_fabric_sense_len = " + fabric_mod_name + "_get_fabric_sense_len,\n"
buf += " .set_fabric_sense_len = " + fabric_mod_name + "_set_fabric_sense_len,\n"
buf += " .is_state_remove = " + fabric_mod_name + "_is_state_remove,\n"
buf += " /*\n"
buf += " * Setup function pointers for generic logic in target_core_fabric_configfs.c\n"
buf += " */\n"
buf += " .fabric_make_wwn = " + fabric_mod_name + "_make_" + fabric_mod_port + ",\n"
buf += " .fabric_drop_wwn = " + fabric_mod_name + "_drop_" + fabric_mod_port + ",\n"
buf += " .fabric_make_tpg = " + fabric_mod_name + "_make_tpg,\n"
buf += " .fabric_drop_tpg = " + fabric_mod_name + "_drop_tpg,\n"
buf += " .fabric_post_link = NULL,\n"
buf += " .fabric_pre_unlink = NULL,\n"
buf += " .fabric_make_np = NULL,\n"
buf += " .fabric_drop_np = NULL,\n"
buf += " .fabric_make_nodeacl = " + fabric_mod_name + "_make_nodeacl,\n"
buf += " .fabric_drop_nodeacl = " + fabric_mod_name + "_drop_nodeacl,\n"
buf += "};\n\n"
buf += "static int " + fabric_mod_name + "_register_configfs(void)\n"
buf += "{\n"
buf += " struct target_fabric_configfs *fabric;\n"
buf += " int ret;\n\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + " fabric module %s on %s/%s\"\n"
buf += " \" on \"UTS_RELEASE\"\\n\"," + fabric_mod_name.upper() + "_VERSION, utsname()->sysname,\n"
buf += " utsname()->machine);\n"
buf += " /*\n"
buf += " * Register the top level struct config_item_type with TCM core\n"
buf += " */\n"
buf += " fabric = target_fabric_configfs_init(THIS_MODULE, \"" + fabric_mod_name[4:] + "\");\n"
buf += " if (IS_ERR(fabric)) {\n"
buf += " printk(KERN_ERR \"target_fabric_configfs_init() failed\\n\");\n"
buf += " return PTR_ERR(fabric);\n"
buf += " }\n"
buf += " /*\n"
buf += " * Setup fabric->tf_ops from our local " + fabric_mod_name + "_ops\n"
buf += " */\n"
buf += " fabric->tf_ops = " + fabric_mod_name + "_ops;\n"
buf += " /*\n"
buf += " * Setup default attribute lists for various fabric->tf_cit_tmpl\n"
buf += " */\n"
buf += " TF_CIT_TMPL(fabric)->tfc_wwn_cit.ct_attrs = " + fabric_mod_name + "_wwn_attrs;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_attrib_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_param_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_np_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_base_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_attrib_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_auth_cit.ct_attrs = NULL;\n"
buf += " TF_CIT_TMPL(fabric)->tfc_tpg_nacl_param_cit.ct_attrs = NULL;\n"
buf += " /*\n"
buf += " * Register the fabric for use within TCM\n"
buf += " */\n"
buf += " ret = target_fabric_configfs_register(fabric);\n"
buf += " if (ret < 0) {\n"
buf += " printk(KERN_ERR \"target_fabric_configfs_register() failed\"\n"
buf += " \" for " + fabric_mod_name.upper() + "\\n\");\n"
buf += " return ret;\n"
buf += " }\n"
buf += " /*\n"
buf += " * Setup our local pointer to *fabric\n"
buf += " */\n"
buf += " " + fabric_mod_name + "_fabric_configfs = fabric;\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + "[0] - Set fabric -> " + fabric_mod_name + "_fabric_configfs\\n\");\n"
buf += " return 0;\n"
buf += "};\n\n"
buf += "static void __exit " + fabric_mod_name + "_deregister_configfs(void)\n"
buf += "{\n"
buf += " if (!" + fabric_mod_name + "_fabric_configfs)\n"
buf += " return;\n\n"
buf += " target_fabric_configfs_deregister(" + fabric_mod_name + "_fabric_configfs);\n"
buf += " " + fabric_mod_name + "_fabric_configfs = NULL;\n"
buf += " printk(KERN_INFO \"" + fabric_mod_name.upper() + "[0] - Cleared " + fabric_mod_name + "_fabric_configfs\\n\");\n"
buf += "};\n\n"
buf += "static int __init " + fabric_mod_name + "_init(void)\n"
buf += "{\n"
buf += " int ret;\n\n"
buf += " ret = " + fabric_mod_name + "_register_configfs();\n"
buf += " if (ret < 0)\n"
buf += " return ret;\n\n"
buf += " return 0;\n"
buf += "};\n\n"
buf += "static void __exit " + fabric_mod_name + "_exit(void)\n"
buf += "{\n"
buf += " " + fabric_mod_name + "_deregister_configfs();\n"
buf += "};\n\n"
buf += "MODULE_DESCRIPTION(\"" + fabric_mod_name.upper() + " series fabric driver\");\n"
buf += "MODULE_LICENSE(\"GPL\");\n"
buf += "module_init(" + fabric_mod_name + "_init);\n"
buf += "module_exit(" + fabric_mod_name + "_exit);\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_scan_fabric_ops(tcm_dir):
fabric_ops_api = tcm_dir + "include/target/target_core_fabric.h"
print "Using tcm_mod_scan_fabric_ops: " + fabric_ops_api
process_fo = 0;
p = open(fabric_ops_api, 'r')
line = p.readline()
while line:
if process_fo == 0 and re.search('struct target_core_fabric_ops {', line):
line = p.readline()
continue
if process_fo == 0:
process_fo = 1;
line = p.readline()
# Search for function pointer
if not re.search('\(\*', line):
continue
fabric_ops.append(line.rstrip())
continue
line = p.readline()
# Search for function pointer
if not re.search('\(\*', line):
continue
fabric_ops.append(line.rstrip())
p.close()
return
def tcm_mod_dump_fabric_ops(proto_ident, fabric_mod_dir_var, fabric_mod_name):
buf = ""
bufi = ""
f = fabric_mod_dir_var + "/" + fabric_mod_name + "_fabric.c"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
fi = fabric_mod_dir_var + "/" + fabric_mod_name + "_fabric.h"
print "Writing file: " + fi
pi = open(fi, 'w')
if not pi:
tcm_mod_err("Unable to open file: " + fi)
buf = "#include <linux/slab.h>\n"
buf += "#include <linux/kthread.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/list.h>\n"
buf += "#include <linux/types.h>\n"
buf += "#include <linux/string.h>\n"
buf += "#include <linux/ctype.h>\n"
buf += "#include <asm/unaligned.h>\n"
buf += "#include <scsi/scsi.h>\n"
buf += "#include <scsi/scsi_host.h>\n"
buf += "#include <scsi/scsi_device.h>\n"
buf += "#include <scsi/scsi_cmnd.h>\n"
buf += "#include <scsi/libfc.h>\n\n"
buf += "#include <target/target_core_base.h>\n"
buf += "#include <target/target_core_fabric.h>\n"
buf += "#include <target/target_core_configfs.h>\n\n"
buf += "#include \"" + fabric_mod_name + "_base.h\"\n"
buf += "#include \"" + fabric_mod_name + "_fabric.h\"\n\n"
buf += "int " + fabric_mod_name + "_check_true(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_check_true(struct se_portal_group *);\n"
buf += "int " + fabric_mod_name + "_check_false(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_check_false(struct se_portal_group *);\n"
total_fabric_ops = len(fabric_ops)
i = 0
while i < total_fabric_ops:
fo = fabric_ops[i]
i += 1
# print "fabric_ops: " + fo
if re.search('get_fabric_name', fo):
buf += "char *" + fabric_mod_name + "_get_fabric_name(void)\n"
buf += "{\n"
buf += " return \"" + fabric_mod_name[4:] + "\";\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_get_fabric_name(void);\n"
continue
if re.search('get_fabric_proto_ident', fo):
buf += "u8 " + fabric_mod_name + "_get_fabric_proto_ident(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " u8 proto_id;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " proto_id = fc_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " proto_id = sas_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " proto_id = iscsi_get_fabric_proto_ident(se_tpg);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return proto_id;\n"
buf += "}\n\n"
bufi += "u8 " + fabric_mod_name + "_get_fabric_proto_ident(struct se_portal_group *);\n"
if re.search('get_wwn', fo):
buf += "char *" + fabric_mod_name + "_get_fabric_wwn(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n\n"
buf += " return &" + fabric_mod_port + "->" + fabric_mod_port + "_name[0];\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_get_fabric_wwn(struct se_portal_group *);\n"
if re.search('get_tag', fo):
buf += "u16 " + fabric_mod_name + "_get_tag(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " return tpg->" + fabric_mod_port + "_tpgt;\n"
buf += "}\n\n"
bufi += "u16 " + fabric_mod_name + "_get_tag(struct se_portal_group *);\n"
if re.search('get_default_depth', fo):
buf += "u32 " + fabric_mod_name + "_get_default_depth(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_default_depth(struct se_portal_group *);\n"
if re.search('get_pr_transport_id\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_pr_transport_id(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl,\n"
buf += " struct t10_pr_registration *pr_reg,\n"
buf += " int *format_code,\n"
buf += " unsigned char *buf)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " int ret = 0;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " ret = fc_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " ret = sas_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " ret = iscsi_get_pr_transport_id(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code, buf);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return ret;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_pr_transport_id(struct se_portal_group *,\n"
bufi += " struct se_node_acl *, struct t10_pr_registration *,\n"
bufi += " int *, unsigned char *);\n"
if re.search('get_pr_transport_id_len\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_pr_transport_id_len(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl,\n"
buf += " struct t10_pr_registration *pr_reg,\n"
buf += " int *format_code)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " int ret = 0;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " ret = fc_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " ret = sas_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " ret = iscsi_get_pr_transport_id_len(se_tpg, se_nacl, pr_reg,\n"
buf += " format_code);\n"
buf += " break;\n"
buf += " }\n\n"
buf += " return ret;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_pr_transport_id_len(struct se_portal_group *,\n"
bufi += " struct se_node_acl *, struct t10_pr_registration *,\n"
bufi += " int *);\n"
if re.search('parse_pr_out_transport_id\)\(', fo):
buf += "char *" + fabric_mod_name + "_parse_pr_out_transport_id(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " const char *buf,\n"
buf += " u32 *out_tid_len,\n"
buf += " char **port_nexus_ptr)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_tpg *tpg = container_of(se_tpg,\n"
buf += " struct " + fabric_mod_name + "_tpg, se_tpg);\n"
buf += " struct " + fabric_mod_name + "_" + fabric_mod_port + " *" + fabric_mod_port + " = tpg->" + fabric_mod_port + ";\n"
buf += " char *tid = NULL;\n\n"
buf += " switch (" + fabric_mod_port + "->" + fabric_mod_port + "_proto_id) {\n"
if proto_ident == "FC":
buf += " case SCSI_PROTOCOL_FCP:\n"
buf += " default:\n"
buf += " tid = fc_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
elif proto_ident == "SAS":
buf += " case SCSI_PROTOCOL_SAS:\n"
buf += " default:\n"
buf += " tid = sas_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
elif proto_ident == "iSCSI":
buf += " case SCSI_PROTOCOL_ISCSI:\n"
buf += " default:\n"
buf += " tid = iscsi_parse_pr_out_transport_id(se_tpg, buf, out_tid_len,\n"
buf += " port_nexus_ptr);\n"
buf += " }\n\n"
buf += " return tid;\n"
buf += "}\n\n"
bufi += "char *" + fabric_mod_name + "_parse_pr_out_transport_id(struct se_portal_group *,\n"
bufi += " const char *, u32 *, char **);\n"
if re.search('alloc_fabric_acl\)\(', fo):
buf += "struct se_node_acl *" + fabric_mod_name + "_alloc_fabric_acl(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl;\n\n"
buf += " nacl = kzalloc(sizeof(struct " + fabric_mod_name + "_nacl), GFP_KERNEL);\n"
buf += " if (!nacl) {\n"
buf += " printk(KERN_ERR \"Unable to allocate struct " + fabric_mod_name + "_nacl\\n\");\n"
buf += " return NULL;\n"
buf += " }\n\n"
buf += " return &nacl->se_node_acl;\n"
buf += "}\n\n"
bufi += "struct se_node_acl *" + fabric_mod_name + "_alloc_fabric_acl(struct se_portal_group *);\n"
if re.search('release_fabric_acl\)\(', fo):
buf += "void " + fabric_mod_name + "_release_fabric_acl(\n"
buf += " struct se_portal_group *se_tpg,\n"
buf += " struct se_node_acl *se_nacl)\n"
buf += "{\n"
buf += " struct " + fabric_mod_name + "_nacl *nacl = container_of(se_nacl,\n"
buf += " struct " + fabric_mod_name + "_nacl, se_node_acl);\n"
buf += " kfree(nacl);\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_release_fabric_acl(struct se_portal_group *,\n"
bufi += " struct se_node_acl *);\n"
if re.search('tpg_get_inst_index\)\(', fo):
buf += "u32 " + fabric_mod_name + "_tpg_get_inst_index(struct se_portal_group *se_tpg)\n"
buf += "{\n"
buf += " return 1;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_tpg_get_inst_index(struct se_portal_group *);\n"
if re.search('\*release_cmd\)\(', fo):
buf += "void " + fabric_mod_name + "_release_cmd(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_release_cmd(struct se_cmd *);\n"
if re.search('shutdown_session\)\(', fo):
buf += "int " + fabric_mod_name + "_shutdown_session(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_shutdown_session(struct se_session *);\n"
if re.search('close_session\)\(', fo):
buf += "void " + fabric_mod_name + "_close_session(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_close_session(struct se_session *);\n"
if re.search('stop_session\)\(', fo):
buf += "void " + fabric_mod_name + "_stop_session(struct se_session *se_sess, int sess_sleep , int conn_sleep)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_stop_session(struct se_session *, int, int);\n"
if re.search('fall_back_to_erl0\)\(', fo):
buf += "void " + fabric_mod_name + "_reset_nexus(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_reset_nexus(struct se_session *);\n"
if re.search('sess_logged_in\)\(', fo):
buf += "int " + fabric_mod_name + "_sess_logged_in(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_sess_logged_in(struct se_session *);\n"
if re.search('sess_get_index\)\(', fo):
buf += "u32 " + fabric_mod_name + "_sess_get_index(struct se_session *se_sess)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_sess_get_index(struct se_session *);\n"
if re.search('write_pending\)\(', fo):
buf += "int " + fabric_mod_name + "_write_pending(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_write_pending(struct se_cmd *);\n"
if re.search('write_pending_status\)\(', fo):
buf += "int " + fabric_mod_name + "_write_pending_status(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_write_pending_status(struct se_cmd *);\n"
if re.search('set_default_node_attributes\)\(', fo):
buf += "void " + fabric_mod_name + "_set_default_node_attrs(struct se_node_acl *nacl)\n"
buf += "{\n"
buf += " return;\n"
buf += "}\n\n"
bufi += "void " + fabric_mod_name + "_set_default_node_attrs(struct se_node_acl *);\n"
if re.search('get_task_tag\)\(', fo):
buf += "u32 " + fabric_mod_name + "_get_task_tag(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u32 " + fabric_mod_name + "_get_task_tag(struct se_cmd *);\n"
if re.search('get_cmd_state\)\(', fo):
buf += "int " + fabric_mod_name + "_get_cmd_state(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_get_cmd_state(struct se_cmd *);\n"
if re.search('queue_data_in\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_data_in(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_data_in(struct se_cmd *);\n"
if re.search('queue_status\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_status(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_status(struct se_cmd *);\n"
if re.search('queue_tm_rsp\)\(', fo):
buf += "int " + fabric_mod_name + "_queue_tm_rsp(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_queue_tm_rsp(struct se_cmd *);\n"
if re.search('get_fabric_sense_len\)\(', fo):
buf += "u16 " + fabric_mod_name + "_get_fabric_sense_len(void)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u16 " + fabric_mod_name + "_get_fabric_sense_len(void);\n"
if re.search('set_fabric_sense_len\)\(', fo):
buf += "u16 " + fabric_mod_name + "_set_fabric_sense_len(struct se_cmd *se_cmd, u32 sense_length)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "u16 " + fabric_mod_name + "_set_fabric_sense_len(struct se_cmd *, u32);\n"
if re.search('is_state_remove\)\(', fo):
buf += "int " + fabric_mod_name + "_is_state_remove(struct se_cmd *se_cmd)\n"
buf += "{\n"
buf += " return 0;\n"
buf += "}\n\n"
bufi += "int " + fabric_mod_name + "_is_state_remove(struct se_cmd *);\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
ret = pi.write(bufi)
if ret:
tcm_mod_err("Unable to write fi: " + fi)
pi.close()
return
def tcm_mod_build_kbuild(fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/Makefile"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
buf += fabric_mod_name + "-objs := " + fabric_mod_name + "_fabric.o \\\n"
buf += " " + fabric_mod_name + "_configfs.o\n"
buf += "obj-$(CONFIG_" + fabric_mod_name.upper() + ") += " + fabric_mod_name + ".o\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_build_kconfig(fabric_mod_dir_var, fabric_mod_name):
buf = ""
f = fabric_mod_dir_var + "/Kconfig"
print "Writing file: " + f
p = open(f, 'w')
if not p:
tcm_mod_err("Unable to open file: " + f)
buf = "config " + fabric_mod_name.upper() + "\n"
buf += " tristate \"" + fabric_mod_name.upper() + " fabric module\"\n"
buf += " depends on TARGET_CORE && CONFIGFS_FS\n"
buf += " default n\n"
buf += " ---help---\n"
buf += " Say Y here to enable the " + fabric_mod_name.upper() + " fabric module\n"
ret = p.write(buf)
if ret:
tcm_mod_err("Unable to write f: " + f)
p.close()
return
def tcm_mod_add_kbuild(tcm_dir, fabric_mod_name):
buf = "obj-$(CONFIG_" + fabric_mod_name.upper() + ") += " + fabric_mod_name.lower() + "/\n"
kbuild = tcm_dir + "/drivers/target/Makefile"
f = open(kbuild, 'a')
f.write(buf)
f.close()
return
def tcm_mod_add_kconfig(tcm_dir, fabric_mod_name):
buf = "source \"drivers/target/" + fabric_mod_name.lower() + "/Kconfig\"\n"
kconfig = tcm_dir + "/drivers/target/Kconfig"
f = open(kconfig, 'a')
f.write(buf)
f.close()
return
def main(modname, proto_ident):
# proto_ident = "FC"
# proto_ident = "SAS"
# proto_ident = "iSCSI"
tcm_dir = os.getcwd();
tcm_dir += "/../../"
print "tcm_dir: " + tcm_dir
fabric_mod_name = modname
fabric_mod_dir = tcm_dir + "drivers/target/" + fabric_mod_name
print "Set fabric_mod_name: " + fabric_mod_name
print "Set fabric_mod_dir: " + fabric_mod_dir
print "Using proto_ident: " + proto_ident
if proto_ident != "FC" and proto_ident != "SAS" and proto_ident != "iSCSI":
print "Unsupported proto_ident: " + proto_ident
sys.exit(1)
ret = tcm_mod_create_module_subdir(fabric_mod_dir)
if ret:
print "tcm_mod_create_module_subdir() failed because module already exists!"
sys.exit(1)
tcm_mod_build_base_includes(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_scan_fabric_ops(tcm_dir)
tcm_mod_dump_fabric_ops(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_build_configfs(proto_ident, fabric_mod_dir, fabric_mod_name)
tcm_mod_build_kbuild(fabric_mod_dir, fabric_mod_name)
tcm_mod_build_kconfig(fabric_mod_dir, fabric_mod_name)
input = raw_input("Would you like to add " + fabric_mod_name + "to drivers/target/Makefile..? [yes,no]: ")
if input == "yes" or input == "y":
tcm_mod_add_kbuild(tcm_dir, fabric_mod_name)
input = raw_input("Would you like to add " + fabric_mod_name + "to drivers/target/Kconfig..? [yes,no]: ")
if input == "yes" or input == "y":
tcm_mod_add_kconfig(tcm_dir, fabric_mod_name)
return
parser = optparse.OptionParser()
parser.add_option('-m', '--modulename', help='Module name', dest='modname',
action='store', nargs=1, type='string')
parser.add_option('-p', '--protoident', help='Protocol Ident', dest='protoident',
action='store', nargs=1, type='string')
(opts, args) = parser.parse_args()
mandatories = ['modname', 'protoident']
for m in mandatories:
if not opts.__dict__[m]:
print "mandatory option is missing\n"
parser.print_help()
exit(-1)
if __name__ == "__main__":
main(str(opts.modname), opts.protoident)
|
gpl-2.0
|
BjoernT/python-openstackclient
|
openstackclient/tests/fakes.py
|
1
|
5201
|
# Copyright 2013 Nebula Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import json
import mock
import six
import sys
from keystoneauth1 import fixture
import requests
AUTH_TOKEN = "foobar"
AUTH_URL = "http://0.0.0.0"
USERNAME = "itchy"
PASSWORD = "scratchy"
PROJECT_NAME = "poochie"
REGION_NAME = "richie"
INTERFACE = "catchy"
VERSION = "3"
TEST_RESPONSE_DICT = fixture.V2Token(token_id=AUTH_TOKEN,
user_name=USERNAME)
_s = TEST_RESPONSE_DICT.add_service('identity', name='keystone')
_s.add_endpoint(AUTH_URL + '/v2.0')
TEST_RESPONSE_DICT_V3 = fixture.V3Token(user_name=USERNAME)
TEST_RESPONSE_DICT_V3.set_project_scope()
TEST_VERSIONS = fixture.DiscoveryList(href=AUTH_URL)
class FakeStdout(object):
def __init__(self):
self.content = []
def write(self, text):
self.content.append(text)
def make_string(self):
result = ''
for line in self.content:
result = result + line
return result
class FakeLog(object):
def __init__(self):
self.messages = {}
def debug(self, msg):
self.messages['debug'] = msg
def info(self, msg):
self.messages['info'] = msg
def warning(self, msg):
self.messages['warning'] = msg
def error(self, msg):
self.messages['error'] = msg
def critical(self, msg):
self.messages['critical'] = msg
class FakeApp(object):
def __init__(self, _stdout, _log):
self.stdout = _stdout
self.client_manager = None
self.stdin = sys.stdin
self.stdout = _stdout or sys.stdout
self.stderr = sys.stderr
self.log = _log
class FakeClient(object):
def __init__(self, **kwargs):
self.endpoint = kwargs['endpoint']
self.token = kwargs['token']
class FakeClientManager(object):
def __init__(self):
self.compute = None
self.identity = None
self.image = None
self.object_store = None
self.volume = None
self.network = None
self.session = None
self.auth_ref = None
self.auth_plugin_name = None
def get_configuration(self):
return {
'auth': {
'username': USERNAME,
'password': PASSWORD,
'token': AUTH_TOKEN,
},
'region': REGION_NAME,
'identity_api_version': VERSION,
}
class FakeModule(object):
def __init__(self, name, version):
self.name = name
self.__version__ = version
class FakeResource(object):
def __init__(self, manager=None, info={}, loaded=False, methods={}):
"""Set attributes and methods for a resource.
:param manager:
The resource manager
:param Dictionary info:
A dictionary with all attributes
:param bool loaded:
True if the resource is loaded in memory
:param Dictionary methods:
A dictionary with all methods
"""
self.__name__ = type(self).__name__
self.manager = manager
self._info = info
self._add_details(info)
self._add_methods(methods)
self._loaded = loaded
def _add_details(self, info):
for (k, v) in six.iteritems(info):
setattr(self, k, v)
def _add_methods(self, methods):
"""Fake methods with MagicMock objects.
For each <@key, @value> pairs in methods, add an callable MagicMock
object named @key as an attribute, and set the mock's return_value to
@value. When users access the attribute with (), @value will be
returned, which looks like a function call.
"""
for (name, ret) in six.iteritems(methods):
method = mock.MagicMock(return_value=ret)
setattr(self, name, method)
def __repr__(self):
reprkeys = sorted(k for k in self.__dict__.keys() if k[0] != '_' and
k != 'manager')
info = ", ".join("%s=%s" % (k, getattr(self, k)) for k in reprkeys)
return "<%s %s>" % (self.__class__.__name__, info)
class FakeResponse(requests.Response):
def __init__(self, headers={}, status_code=200, data=None, encoding=None):
super(FakeResponse, self).__init__()
self.status_code = status_code
self.headers.update(headers)
self._content = json.dumps(data)
if not isinstance(self._content, six.binary_type):
self._content = self._content.encode()
class FakeModel(dict):
def __getattr__(self, key):
try:
return self[key]
except KeyError:
raise AttributeError(key)
|
apache-2.0
|
piotrpawlaczek/suds-jurko
|
suds/xsd/__init__.py
|
18
|
2609
|
# This program is free software; you can redistribute it and/or modify
# it under the terms of the (LGPL) GNU Lesser General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library Lesser General Public License for more details at
# ( http://www.gnu.org/licenses/lgpl.html ).
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# written by: Jeff Ortel ( [email protected] )
from suds import *
from suds.sax import Namespace, splitPrefix
def qualify(ref, resolvers, defns=Namespace.default):
"""
Get a reference that is I{qualified} by namespace.
@param ref: A referenced schema type name.
@type ref: str
@param resolvers: A list of objects to be used to resolve types.
@type resolvers: [L{sax.element.Element},]
@param defns: An optional target namespace used to qualify references
when no prefix is specified.
@type defns: A default namespace I{tuple: (prefix,uri)} used when ref not prefixed.
@return: A qualified reference.
@rtype: (name, namespace-uri)
"""
ns = None
p, n = splitPrefix(ref)
if p is not None:
if not isinstance(resolvers, (list, tuple)):
resolvers = (resolvers,)
for r in resolvers:
resolved = r.resolvePrefix(p)
if resolved[1] is not None:
ns = resolved
break
if ns is None:
raise Exception('prefix (%s) not resolved' % p)
else:
ns = defns
return (n, ns[1])
def isqref(object):
"""
Get whether the object is a I{qualified reference}.
@param object: An object to be tested.
@type object: I{any}
@rtype: boolean
@see: L{qualify}
"""
return (\
isinstance(object, tuple) and \
len(object) == 2 and \
isinstance(object[0], basestring) and \
isinstance(object[1], basestring))
class Filter:
def __init__(self, inclusive=False, *items):
self.inclusive = inclusive
self.items = items
def __contains__(self, x):
if self.inclusive:
result = ( x in self.items )
else:
result = ( x not in self.items )
return result
|
lgpl-3.0
|
ahmadshahwan/cohorte-runtime
|
python/src/lib/python/unidecode/x08d.py
|
252
|
4636
|
data = (
'Wei ', # 0x00
'Bai ', # 0x01
'Chen ', # 0x02
'Zhuan ', # 0x03
'Zhi ', # 0x04
'Zhui ', # 0x05
'Biao ', # 0x06
'Yun ', # 0x07
'Zeng ', # 0x08
'Tan ', # 0x09
'Zan ', # 0x0a
'Yan ', # 0x0b
'[?] ', # 0x0c
'Shan ', # 0x0d
'Wan ', # 0x0e
'Ying ', # 0x0f
'Jin ', # 0x10
'Gan ', # 0x11
'Xian ', # 0x12
'Zang ', # 0x13
'Bi ', # 0x14
'Du ', # 0x15
'Shu ', # 0x16
'Yan ', # 0x17
'[?] ', # 0x18
'Xuan ', # 0x19
'Long ', # 0x1a
'Gan ', # 0x1b
'Zang ', # 0x1c
'Bei ', # 0x1d
'Zhen ', # 0x1e
'Fu ', # 0x1f
'Yuan ', # 0x20
'Gong ', # 0x21
'Cai ', # 0x22
'Ze ', # 0x23
'Xian ', # 0x24
'Bai ', # 0x25
'Zhang ', # 0x26
'Huo ', # 0x27
'Zhi ', # 0x28
'Fan ', # 0x29
'Tan ', # 0x2a
'Pin ', # 0x2b
'Bian ', # 0x2c
'Gou ', # 0x2d
'Zhu ', # 0x2e
'Guan ', # 0x2f
'Er ', # 0x30
'Jian ', # 0x31
'Bi ', # 0x32
'Shi ', # 0x33
'Tie ', # 0x34
'Gui ', # 0x35
'Kuang ', # 0x36
'Dai ', # 0x37
'Mao ', # 0x38
'Fei ', # 0x39
'He ', # 0x3a
'Yi ', # 0x3b
'Zei ', # 0x3c
'Zhi ', # 0x3d
'Jia ', # 0x3e
'Hui ', # 0x3f
'Zi ', # 0x40
'Ren ', # 0x41
'Lu ', # 0x42
'Zang ', # 0x43
'Zi ', # 0x44
'Gai ', # 0x45
'Jin ', # 0x46
'Qiu ', # 0x47
'Zhen ', # 0x48
'Lai ', # 0x49
'She ', # 0x4a
'Fu ', # 0x4b
'Du ', # 0x4c
'Ji ', # 0x4d
'Shu ', # 0x4e
'Shang ', # 0x4f
'Si ', # 0x50
'Bi ', # 0x51
'Zhou ', # 0x52
'Geng ', # 0x53
'Pei ', # 0x54
'Tan ', # 0x55
'Lai ', # 0x56
'Feng ', # 0x57
'Zhui ', # 0x58
'Fu ', # 0x59
'Zhuan ', # 0x5a
'Sai ', # 0x5b
'Ze ', # 0x5c
'Yan ', # 0x5d
'Zan ', # 0x5e
'Yun ', # 0x5f
'Zeng ', # 0x60
'Shan ', # 0x61
'Ying ', # 0x62
'Gan ', # 0x63
'Chi ', # 0x64
'Xi ', # 0x65
'She ', # 0x66
'Nan ', # 0x67
'Xiong ', # 0x68
'Xi ', # 0x69
'Cheng ', # 0x6a
'He ', # 0x6b
'Cheng ', # 0x6c
'Zhe ', # 0x6d
'Xia ', # 0x6e
'Tang ', # 0x6f
'Zou ', # 0x70
'Zou ', # 0x71
'Li ', # 0x72
'Jiu ', # 0x73
'Fu ', # 0x74
'Zhao ', # 0x75
'Gan ', # 0x76
'Qi ', # 0x77
'Shan ', # 0x78
'Qiong ', # 0x79
'Qin ', # 0x7a
'Xian ', # 0x7b
'Ci ', # 0x7c
'Jue ', # 0x7d
'Qin ', # 0x7e
'Chi ', # 0x7f
'Ci ', # 0x80
'Chen ', # 0x81
'Chen ', # 0x82
'Die ', # 0x83
'Ju ', # 0x84
'Chao ', # 0x85
'Di ', # 0x86
'Se ', # 0x87
'Zhan ', # 0x88
'Zhu ', # 0x89
'Yue ', # 0x8a
'Qu ', # 0x8b
'Jie ', # 0x8c
'Chi ', # 0x8d
'Chu ', # 0x8e
'Gua ', # 0x8f
'Xue ', # 0x90
'Ci ', # 0x91
'Tiao ', # 0x92
'Duo ', # 0x93
'Lie ', # 0x94
'Gan ', # 0x95
'Suo ', # 0x96
'Cu ', # 0x97
'Xi ', # 0x98
'Zhao ', # 0x99
'Su ', # 0x9a
'Yin ', # 0x9b
'Ju ', # 0x9c
'Jian ', # 0x9d
'Que ', # 0x9e
'Tang ', # 0x9f
'Chuo ', # 0xa0
'Cui ', # 0xa1
'Lu ', # 0xa2
'Qu ', # 0xa3
'Dang ', # 0xa4
'Qiu ', # 0xa5
'Zi ', # 0xa6
'Ti ', # 0xa7
'Qu ', # 0xa8
'Chi ', # 0xa9
'Huang ', # 0xaa
'Qiao ', # 0xab
'Qiao ', # 0xac
'Yao ', # 0xad
'Zao ', # 0xae
'Ti ', # 0xaf
'[?] ', # 0xb0
'Zan ', # 0xb1
'Zan ', # 0xb2
'Zu ', # 0xb3
'Pa ', # 0xb4
'Bao ', # 0xb5
'Ku ', # 0xb6
'Ke ', # 0xb7
'Dun ', # 0xb8
'Jue ', # 0xb9
'Fu ', # 0xba
'Chen ', # 0xbb
'Jian ', # 0xbc
'Fang ', # 0xbd
'Zhi ', # 0xbe
'Sa ', # 0xbf
'Yue ', # 0xc0
'Pa ', # 0xc1
'Qi ', # 0xc2
'Yue ', # 0xc3
'Qiang ', # 0xc4
'Tuo ', # 0xc5
'Tai ', # 0xc6
'Yi ', # 0xc7
'Nian ', # 0xc8
'Ling ', # 0xc9
'Mei ', # 0xca
'Ba ', # 0xcb
'Die ', # 0xcc
'Ku ', # 0xcd
'Tuo ', # 0xce
'Jia ', # 0xcf
'Ci ', # 0xd0
'Pao ', # 0xd1
'Qia ', # 0xd2
'Zhu ', # 0xd3
'Ju ', # 0xd4
'Die ', # 0xd5
'Zhi ', # 0xd6
'Fu ', # 0xd7
'Pan ', # 0xd8
'Ju ', # 0xd9
'Shan ', # 0xda
'Bo ', # 0xdb
'Ni ', # 0xdc
'Ju ', # 0xdd
'Li ', # 0xde
'Gen ', # 0xdf
'Yi ', # 0xe0
'Ji ', # 0xe1
'Dai ', # 0xe2
'Xian ', # 0xe3
'Jiao ', # 0xe4
'Duo ', # 0xe5
'Zhu ', # 0xe6
'Zhuan ', # 0xe7
'Kua ', # 0xe8
'Zhuai ', # 0xe9
'Gui ', # 0xea
'Qiong ', # 0xeb
'Kui ', # 0xec
'Xiang ', # 0xed
'Chi ', # 0xee
'Lu ', # 0xef
'Beng ', # 0xf0
'Zhi ', # 0xf1
'Jia ', # 0xf2
'Tiao ', # 0xf3
'Cai ', # 0xf4
'Jian ', # 0xf5
'Ta ', # 0xf6
'Qiao ', # 0xf7
'Bi ', # 0xf8
'Xian ', # 0xf9
'Duo ', # 0xfa
'Ji ', # 0xfb
'Ju ', # 0xfc
'Ji ', # 0xfd
'Shu ', # 0xfe
'Tu ', # 0xff
)
|
apache-2.0
|
mkmelin/bedrock
|
tests/pages/partnerships.py
|
11
|
1341
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from selenium.webdriver.common.by import By
from pages.base import BasePage
class PartnershipsPage(BasePage):
URL_TEMPLATE = '/{locale}/about/partnerships'
_first_name_locator = (By.ID, 'first_name')
_last_name_locator = (By.ID, 'last_name')
_company_locator = (By.ID, 'company')
_email_locator = (By.ID, 'email')
_submit_request_locator = (By.ID, 'sf-form-submit')
_thank_you_locator = (By.ID, 'partner-form-success')
def type_first_name(self, value):
self.find_element(*self._first_name_locator).send_keys(value)
def type_last_name(self, value):
self.find_element(*self._last_name_locator).send_keys(value)
def type_company(self, value):
self.find_element(*self._company_locator).send_keys(value)
def type_email(self, value):
self.find_element(*self._email_locator).send_keys(value)
def submit_request(self):
self.find_element(*self._submit_request_locator).click()
self.wait.until(lambda s: self.request_successful)
@property
def request_successful(self):
return self.is_element_displayed(*self._thank_you_locator)
|
mpl-2.0
|
scottlawsonbc/audio-reactive-led-strip
|
python/visualization.py
|
2
|
12877
|
from __future__ import print_function
from __future__ import division
import time
import numpy as np
from scipy.ndimage.filters import gaussian_filter1d
import config
import microphone
import dsp
import led
_time_prev = time.time() * 1000.0
"""The previous time that the frames_per_second() function was called"""
_fps = dsp.ExpFilter(val=config.FPS, alpha_decay=0.2, alpha_rise=0.2)
"""The low-pass filter used to estimate frames-per-second"""
def frames_per_second():
"""Return the estimated frames per second
Returns the current estimate for frames-per-second (FPS).
FPS is estimated by measured the amount of time that has elapsed since
this function was previously called. The FPS estimate is low-pass filtered
to reduce noise.
This function is intended to be called one time for every iteration of
the program's main loop.
Returns
-------
fps : float
Estimated frames-per-second. This value is low-pass filtered
to reduce noise.
"""
global _time_prev, _fps
time_now = time.time() * 1000.0
dt = time_now - _time_prev
_time_prev = time_now
if dt == 0.0:
return _fps.value
return _fps.update(1000.0 / dt)
def memoize(function):
"""Provides a decorator for memoizing functions"""
from functools import wraps
memo = {}
@wraps(function)
def wrapper(*args):
if args in memo:
return memo[args]
else:
rv = function(*args)
memo[args] = rv
return rv
return wrapper
@memoize
def _normalized_linspace(size):
return np.linspace(0, 1, size)
def interpolate(y, new_length):
"""Intelligently resizes the array by linearly interpolating the values
Parameters
----------
y : np.array
Array that should be resized
new_length : int
The length of the new interpolated array
Returns
-------
z : np.array
New array with length of new_length that contains the interpolated
values of y.
"""
if len(y) == new_length:
return y
x_old = _normalized_linspace(len(y))
x_new = _normalized_linspace(new_length)
z = np.interp(x_new, x_old, y)
return z
r_filt = dsp.ExpFilter(np.tile(0.01, config.N_PIXELS // 2),
alpha_decay=0.2, alpha_rise=0.99)
g_filt = dsp.ExpFilter(np.tile(0.01, config.N_PIXELS // 2),
alpha_decay=0.05, alpha_rise=0.3)
b_filt = dsp.ExpFilter(np.tile(0.01, config.N_PIXELS // 2),
alpha_decay=0.1, alpha_rise=0.5)
common_mode = dsp.ExpFilter(np.tile(0.01, config.N_PIXELS // 2),
alpha_decay=0.99, alpha_rise=0.01)
p_filt = dsp.ExpFilter(np.tile(1, (3, config.N_PIXELS // 2)),
alpha_decay=0.1, alpha_rise=0.99)
p = np.tile(1.0, (3, config.N_PIXELS // 2))
gain = dsp.ExpFilter(np.tile(0.01, config.N_FFT_BINS),
alpha_decay=0.001, alpha_rise=0.99)
def visualize_scroll(y):
"""Effect that originates in the center and scrolls outwards"""
global p
y = y**2.0
gain.update(y)
y /= gain.value
y *= 255.0
r = int(np.max(y[:len(y) // 3]))
g = int(np.max(y[len(y) // 3: 2 * len(y) // 3]))
b = int(np.max(y[2 * len(y) // 3:]))
# Scrolling effect window
p[:, 1:] = p[:, :-1]
p *= 0.98
p = gaussian_filter1d(p, sigma=0.2)
# Create new color originating at the center
p[0, 0] = r
p[1, 0] = g
p[2, 0] = b
# Update the LED strip
return np.concatenate((p[:, ::-1], p), axis=1)
def visualize_energy(y):
"""Effect that expands from the center with increasing sound energy"""
global p
y = np.copy(y)
gain.update(y)
y /= gain.value
# Scale by the width of the LED strip
y *= float((config.N_PIXELS // 2) - 1)
# Map color channels according to energy in the different freq bands
scale = 0.9
r = int(np.mean(y[:len(y) // 3]**scale))
g = int(np.mean(y[len(y) // 3: 2 * len(y) // 3]**scale))
b = int(np.mean(y[2 * len(y) // 3:]**scale))
# Assign color to different frequency regions
p[0, :r] = 255.0
p[0, r:] = 0.0
p[1, :g] = 255.0
p[1, g:] = 0.0
p[2, :b] = 255.0
p[2, b:] = 0.0
p_filt.update(p)
p = np.round(p_filt.value)
# Apply substantial blur to smooth the edges
p[0, :] = gaussian_filter1d(p[0, :], sigma=4.0)
p[1, :] = gaussian_filter1d(p[1, :], sigma=4.0)
p[2, :] = gaussian_filter1d(p[2, :], sigma=4.0)
# Set the new pixel value
return np.concatenate((p[:, ::-1], p), axis=1)
_prev_spectrum = np.tile(0.01, config.N_PIXELS // 2)
def visualize_spectrum(y):
"""Effect that maps the Mel filterbank frequencies onto the LED strip"""
global _prev_spectrum
y = np.copy(interpolate(y, config.N_PIXELS // 2))
common_mode.update(y)
diff = y - _prev_spectrum
_prev_spectrum = np.copy(y)
# Color channel mappings
r = r_filt.update(y - common_mode.value)
g = np.abs(diff)
b = b_filt.update(np.copy(y))
# Mirror the color channels for symmetric output
r = np.concatenate((r[::-1], r))
g = np.concatenate((g[::-1], g))
b = np.concatenate((b[::-1], b))
output = np.array([r, g,b]) * 255
return output
fft_plot_filter = dsp.ExpFilter(np.tile(1e-1, config.N_FFT_BINS),
alpha_decay=0.5, alpha_rise=0.99)
mel_gain = dsp.ExpFilter(np.tile(1e-1, config.N_FFT_BINS),
alpha_decay=0.01, alpha_rise=0.99)
mel_smoothing = dsp.ExpFilter(np.tile(1e-1, config.N_FFT_BINS),
alpha_decay=0.5, alpha_rise=0.99)
volume = dsp.ExpFilter(config.MIN_VOLUME_THRESHOLD,
alpha_decay=0.02, alpha_rise=0.02)
fft_window = np.hamming(int(config.MIC_RATE / config.FPS) * config.N_ROLLING_HISTORY)
prev_fps_update = time.time()
def microphone_update(audio_samples):
global y_roll, prev_rms, prev_exp, prev_fps_update
# Normalize samples between 0 and 1
y = audio_samples / 2.0**15
# Construct a rolling window of audio samples
y_roll[:-1] = y_roll[1:]
y_roll[-1, :] = np.copy(y)
y_data = np.concatenate(y_roll, axis=0).astype(np.float32)
vol = np.max(np.abs(y_data))
if vol < config.MIN_VOLUME_THRESHOLD:
print('No audio input. Volume below threshold. Volume:', vol)
led.pixels = np.tile(0, (3, config.N_PIXELS))
led.update()
else:
# Transform audio input into the frequency domain
N = len(y_data)
N_zeros = 2**int(np.ceil(np.log2(N))) - N
# Pad with zeros until the next power of two
y_data *= fft_window
y_padded = np.pad(y_data, (0, N_zeros), mode='constant')
YS = np.abs(np.fft.rfft(y_padded)[:N // 2])
# Construct a Mel filterbank from the FFT data
mel = np.atleast_2d(YS).T * dsp.mel_y.T
# Scale data to values more suitable for visualization
# mel = np.sum(mel, axis=0)
mel = np.sum(mel, axis=0)
mel = mel**2.0
# Gain normalization
mel_gain.update(np.max(gaussian_filter1d(mel, sigma=1.0)))
mel /= mel_gain.value
mel = mel_smoothing.update(mel)
# Map filterbank output onto LED strip
output = visualization_effect(mel)
led.pixels = output
led.update()
if config.USE_GUI:
# Plot filterbank output
x = np.linspace(config.MIN_FREQUENCY, config.MAX_FREQUENCY, len(mel))
mel_curve.setData(x=x, y=fft_plot_filter.update(mel))
# Plot the color channels
r_curve.setData(y=led.pixels[0])
g_curve.setData(y=led.pixels[1])
b_curve.setData(y=led.pixels[2])
if config.USE_GUI:
app.processEvents()
if config.DISPLAY_FPS:
fps = frames_per_second()
if time.time() - 0.5 > prev_fps_update:
prev_fps_update = time.time()
print('FPS {:.0f} / {:.0f}'.format(fps, config.FPS))
# Number of audio samples to read every time frame
samples_per_frame = int(config.MIC_RATE / config.FPS)
# Array containing the rolling audio sample window
y_roll = np.random.rand(config.N_ROLLING_HISTORY, samples_per_frame) / 1e16
visualization_effect = visualize_spectrum
"""Visualization effect to display on the LED strip"""
if __name__ == '__main__':
if config.USE_GUI:
import pyqtgraph as pg
from pyqtgraph.Qt import QtGui, QtCore
# Create GUI window
app = QtGui.QApplication([])
view = pg.GraphicsView()
layout = pg.GraphicsLayout(border=(100,100,100))
view.setCentralItem(layout)
view.show()
view.setWindowTitle('Visualization')
view.resize(800,600)
# Mel filterbank plot
fft_plot = layout.addPlot(title='Filterbank Output', colspan=3)
fft_plot.setRange(yRange=[-0.1, 1.2])
fft_plot.disableAutoRange(axis=pg.ViewBox.YAxis)
x_data = np.array(range(1, config.N_FFT_BINS + 1))
mel_curve = pg.PlotCurveItem()
mel_curve.setData(x=x_data, y=x_data*0)
fft_plot.addItem(mel_curve)
# Visualization plot
layout.nextRow()
led_plot = layout.addPlot(title='Visualization Output', colspan=3)
led_plot.setRange(yRange=[-5, 260])
led_plot.disableAutoRange(axis=pg.ViewBox.YAxis)
# Pen for each of the color channel curves
r_pen = pg.mkPen((255, 30, 30, 200), width=4)
g_pen = pg.mkPen((30, 255, 30, 200), width=4)
b_pen = pg.mkPen((30, 30, 255, 200), width=4)
# Color channel curves
r_curve = pg.PlotCurveItem(pen=r_pen)
g_curve = pg.PlotCurveItem(pen=g_pen)
b_curve = pg.PlotCurveItem(pen=b_pen)
# Define x data
x_data = np.array(range(1, config.N_PIXELS + 1))
r_curve.setData(x=x_data, y=x_data*0)
g_curve.setData(x=x_data, y=x_data*0)
b_curve.setData(x=x_data, y=x_data*0)
# Add curves to plot
led_plot.addItem(r_curve)
led_plot.addItem(g_curve)
led_plot.addItem(b_curve)
# Frequency range label
freq_label = pg.LabelItem('')
# Frequency slider
def freq_slider_change(tick):
minf = freq_slider.tickValue(0)**2.0 * (config.MIC_RATE / 2.0)
maxf = freq_slider.tickValue(1)**2.0 * (config.MIC_RATE / 2.0)
t = 'Frequency range: {:.0f} - {:.0f} Hz'.format(minf, maxf)
freq_label.setText(t)
config.MIN_FREQUENCY = minf
config.MAX_FREQUENCY = maxf
dsp.create_mel_bank()
freq_slider = pg.TickSliderItem(orientation='bottom', allowAdd=False)
freq_slider.addTick((config.MIN_FREQUENCY / (config.MIC_RATE / 2.0))**0.5)
freq_slider.addTick((config.MAX_FREQUENCY / (config.MIC_RATE / 2.0))**0.5)
freq_slider.tickMoveFinished = freq_slider_change
freq_label.setText('Frequency range: {} - {} Hz'.format(
config.MIN_FREQUENCY,
config.MAX_FREQUENCY))
# Effect selection
active_color = '#16dbeb'
inactive_color = '#FFFFFF'
def energy_click(x):
global visualization_effect
visualization_effect = visualize_energy
energy_label.setText('Energy', color=active_color)
scroll_label.setText('Scroll', color=inactive_color)
spectrum_label.setText('Spectrum', color=inactive_color)
def scroll_click(x):
global visualization_effect
visualization_effect = visualize_scroll
energy_label.setText('Energy', color=inactive_color)
scroll_label.setText('Scroll', color=active_color)
spectrum_label.setText('Spectrum', color=inactive_color)
def spectrum_click(x):
global visualization_effect
visualization_effect = visualize_spectrum
energy_label.setText('Energy', color=inactive_color)
scroll_label.setText('Scroll', color=inactive_color)
spectrum_label.setText('Spectrum', color=active_color)
# Create effect "buttons" (labels with click event)
energy_label = pg.LabelItem('Energy')
scroll_label = pg.LabelItem('Scroll')
spectrum_label = pg.LabelItem('Spectrum')
energy_label.mousePressEvent = energy_click
scroll_label.mousePressEvent = scroll_click
spectrum_label.mousePressEvent = spectrum_click
energy_click(0)
# Layout
layout.nextRow()
layout.addItem(freq_label, colspan=3)
layout.nextRow()
layout.addItem(freq_slider, colspan=3)
layout.nextRow()
layout.addItem(energy_label)
layout.addItem(scroll_label)
layout.addItem(spectrum_label)
# Initialize LEDs
led.update()
# Start listening to live audio stream
microphone.start_stream(microphone_update)
|
mit
|
bstell/TachyFont
|
run_time/src/gae_server/third_party/fonttools/Lib/fontTools/ttLib/tables/_p_o_s_t.py
|
8
|
8856
|
from __future__ import print_function, division, absolute_import
from fontTools.misc.py23 import *
from fontTools import ttLib
from fontTools.ttLib.standardGlyphOrder import standardGlyphOrder
from fontTools.misc import sstruct
from fontTools.misc.textTools import safeEval, readHex
from . import DefaultTable
import sys
import struct
import array
postFormat = """
>
formatType: 16.16F
italicAngle: 16.16F # italic angle in degrees
underlinePosition: h
underlineThickness: h
isFixedPitch: L
minMemType42: L # minimum memory if TrueType font is downloaded
maxMemType42: L # maximum memory if TrueType font is downloaded
minMemType1: L # minimum memory if Type1 font is downloaded
maxMemType1: L # maximum memory if Type1 font is downloaded
"""
postFormatSize = sstruct.calcsize(postFormat)
class table__p_o_s_t(DefaultTable.DefaultTable):
def decompile(self, data, ttFont):
sstruct.unpack(postFormat, data[:postFormatSize], self)
data = data[postFormatSize:]
if self.formatType == 1.0:
self.decode_format_1_0(data, ttFont)
elif self.formatType == 2.0:
self.decode_format_2_0(data, ttFont)
elif self.formatType == 3.0:
self.decode_format_3_0(data, ttFont)
elif self.formatType == 4.0:
self.decode_format_4_0(data, ttFont)
else:
# supported format
raise ttLib.TTLibError("'post' table format %f not supported" % self.formatType)
def compile(self, ttFont):
data = sstruct.pack(postFormat, self)
if self.formatType == 1.0:
pass # we're done
elif self.formatType == 2.0:
data = data + self.encode_format_2_0(ttFont)
elif self.formatType == 3.0:
pass # we're done
elif self.formatType == 4.0:
data = data + self.encode_format_4_0(ttFont)
else:
# supported format
raise ttLib.TTLibError("'post' table format %f not supported" % self.formatType)
return data
def getGlyphOrder(self):
"""This function will get called by a ttLib.TTFont instance.
Do not call this function yourself, use TTFont().getGlyphOrder()
or its relatives instead!
"""
if not hasattr(self, "glyphOrder"):
raise ttLib.TTLibError("illegal use of getGlyphOrder()")
glyphOrder = self.glyphOrder
del self.glyphOrder
return glyphOrder
def decode_format_1_0(self, data, ttFont):
self.glyphOrder = standardGlyphOrder[:ttFont["maxp"].numGlyphs]
def decode_format_2_0(self, data, ttFont):
numGlyphs, = struct.unpack(">H", data[:2])
numGlyphs = int(numGlyphs)
if numGlyphs > ttFont['maxp'].numGlyphs:
# Assume the numGlyphs field is bogus, so sync with maxp.
# I've seen this in one font, and if the assumption is
# wrong elsewhere, well, so be it: it's hard enough to
# work around _one_ non-conforming post format...
numGlyphs = ttFont['maxp'].numGlyphs
data = data[2:]
indices = array.array("H")
indices.fromstring(data[:2*numGlyphs])
if sys.byteorder != "big":
indices.byteswap()
data = data[2*numGlyphs:]
self.extraNames = extraNames = unpackPStrings(data)
self.glyphOrder = glyphOrder = [""] * int(ttFont['maxp'].numGlyphs)
for glyphID in range(numGlyphs):
index = indices[glyphID]
if index > 32767: # reserved for future use; ignore
name = ""
elif index > 257:
name = extraNames[index-258]
else:
# fetch names from standard list
name = standardGlyphOrder[index]
glyphOrder[glyphID] = name
self.build_psNameMapping(ttFont)
def build_psNameMapping(self, ttFont):
mapping = {}
allNames = {}
for i in range(ttFont['maxp'].numGlyphs):
glyphName = psName = self.glyphOrder[i]
if glyphName == "":
glyphName = "glyph%.5d" % i
if glyphName in allNames:
# make up a new glyphName that's unique
n = allNames[glyphName]
while (glyphName + "#" + str(n)) in allNames:
n += 1
allNames[glyphName] = n + 1
glyphName = glyphName + "#" + str(n)
self.glyphOrder[i] = glyphName
allNames[glyphName] = 1
if glyphName != psName:
mapping[glyphName] = psName
self.mapping = mapping
def decode_format_3_0(self, data, ttFont):
# Setting self.glyphOrder to None will cause the TTFont object
# try and construct glyph names from a Unicode cmap table.
self.glyphOrder = None
def decode_format_4_0(self, data, ttFont):
from fontTools import agl
numGlyphs = ttFont['maxp'].numGlyphs
indices = array.array("H")
indices.fromstring(data)
if sys.byteorder != "big":
indices.byteswap()
# In some older fonts, the size of the post table doesn't match
# the number of glyphs. Sometimes it's bigger, sometimes smaller.
self.glyphOrder = glyphOrder = [''] * int(numGlyphs)
for i in range(min(len(indices),numGlyphs)):
if indices[i] == 0xFFFF:
self.glyphOrder[i] = ''
elif indices[i] in agl.UV2AGL:
self.glyphOrder[i] = agl.UV2AGL[indices[i]]
else:
self.glyphOrder[i] = "uni%04X" % indices[i]
self.build_psNameMapping(ttFont)
def encode_format_2_0(self, ttFont):
numGlyphs = ttFont['maxp'].numGlyphs
glyphOrder = ttFont.getGlyphOrder()
assert len(glyphOrder) == numGlyphs
indices = array.array("H")
extraDict = {}
extraNames = self.extraNames
for i in range(len(extraNames)):
extraDict[extraNames[i]] = i
for glyphID in range(numGlyphs):
glyphName = glyphOrder[glyphID]
if glyphName in self.mapping:
psName = self.mapping[glyphName]
else:
psName = glyphName
if psName in extraDict:
index = 258 + extraDict[psName]
elif psName in standardGlyphOrder:
index = standardGlyphOrder.index(psName)
else:
index = 258 + len(extraNames)
assert index < 32768, "Too many glyph names for 'post' table format 2"
extraDict[psName] = len(extraNames)
extraNames.append(psName)
indices.append(index)
if sys.byteorder != "big":
indices.byteswap()
return struct.pack(">H", numGlyphs) + indices.tostring() + packPStrings(extraNames)
def encode_format_4_0(self, ttFont):
from fontTools import agl
numGlyphs = ttFont['maxp'].numGlyphs
glyphOrder = ttFont.getGlyphOrder()
assert len(glyphOrder) == numGlyphs
indices = array.array("H")
for glyphID in glyphOrder:
glyphID = glyphID.split('#')[0]
if glyphID in agl.AGL2UV:
indices.append(agl.AGL2UV[glyphID])
elif len(glyphID) == 7 and glyphID[:3] == 'uni':
indices.append(int(glyphID[3:],16))
else:
indices.append(0xFFFF)
if sys.byteorder != "big":
indices.byteswap()
return indices.tostring()
def toXML(self, writer, ttFont):
formatstring, names, fixes = sstruct.getformat(postFormat)
for name in names:
value = getattr(self, name)
writer.simpletag(name, value=value)
writer.newline()
if hasattr(self, "mapping"):
writer.begintag("psNames")
writer.newline()
writer.comment("This file uses unique glyph names based on the information\n"
"found in the 'post' table. Since these names might not be unique,\n"
"we have to invent artificial names in case of clashes. In order to\n"
"be able to retain the original information, we need a name to\n"
"ps name mapping for those cases where they differ. That's what\n"
"you see below.\n")
writer.newline()
items = sorted(self.mapping.items())
for name, psName in items:
writer.simpletag("psName", name=name, psName=psName)
writer.newline()
writer.endtag("psNames")
writer.newline()
if hasattr(self, "extraNames"):
writer.begintag("extraNames")
writer.newline()
writer.comment("following are the name that are not taken from the standard Mac glyph order")
writer.newline()
for name in self.extraNames:
writer.simpletag("psName", name=name)
writer.newline()
writer.endtag("extraNames")
writer.newline()
if hasattr(self, "data"):
writer.begintag("hexdata")
writer.newline()
writer.dumphex(self.data)
writer.endtag("hexdata")
writer.newline()
def fromXML(self, name, attrs, content, ttFont):
if name not in ("psNames", "extraNames", "hexdata"):
setattr(self, name, safeEval(attrs["value"]))
elif name == "psNames":
self.mapping = {}
for element in content:
if not isinstance(element, tuple):
continue
name, attrs, content = element
if name == "psName":
self.mapping[attrs["name"]] = attrs["psName"]
elif name == "extraNames":
self.extraNames = []
for element in content:
if not isinstance(element, tuple):
continue
name, attrs, content = element
if name == "psName":
self.extraNames.append(attrs["name"])
else:
self.data = readHex(content)
def unpackPStrings(data):
strings = []
index = 0
dataLen = len(data)
while index < dataLen:
length = byteord(data[index])
strings.append(tostr(data[index+1:index+1+length], encoding="latin1"))
index = index + 1 + length
return strings
def packPStrings(strings):
data = b""
for s in strings:
data = data + bytechr(len(s)) + tobytes(s, encoding="latin1")
return data
|
apache-2.0
|
Ravenm/2143-OOP-NASH
|
python3env/Lib/site-packages/PIL/ImageStat.py
|
88
|
3847
|
#
# The Python Imaging Library.
# $Id$
#
# global image statistics
#
# History:
# 1996-04-05 fl Created
# 1997-05-21 fl Added mask; added rms, var, stddev attributes
# 1997-08-05 fl Added median
# 1998-07-05 hk Fixed integer overflow error
#
# Notes:
# This class shows how to implement delayed evaluation of attributes.
# To get a certain value, simply access the corresponding attribute.
# The __getattr__ dispatcher takes care of the rest.
#
# Copyright (c) Secret Labs AB 1997.
# Copyright (c) Fredrik Lundh 1996-97.
#
# See the README file for information on usage and redistribution.
#
import math
import operator
import functools
class Stat(object):
def __init__(self, image_or_list, mask=None):
try:
if mask:
self.h = image_or_list.histogram(mask)
else:
self.h = image_or_list.histogram()
except AttributeError:
self.h = image_or_list # assume it to be a histogram list
if not isinstance(self.h, list):
raise TypeError("first argument must be image or list")
self.bands = list(range(len(self.h) // 256))
def __getattr__(self, id):
"Calculate missing attribute"
if id[:4] == "_get":
raise AttributeError(id)
# calculate missing attribute
v = getattr(self, "_get" + id)()
setattr(self, id, v)
return v
def _getextrema(self):
"Get min/max values for each band in the image"
def minmax(histogram):
n = 255
x = 0
for i in range(256):
if histogram[i]:
n = min(n, i)
x = max(x, i)
return n, x # returns (255, 0) if there's no data in the histogram
v = []
for i in range(0, len(self.h), 256):
v.append(minmax(self.h[i:]))
return v
def _getcount(self):
"Get total number of pixels in each layer"
v = []
for i in range(0, len(self.h), 256):
v.append(functools.reduce(operator.add, self.h[i:i+256]))
return v
def _getsum(self):
"Get sum of all pixels in each layer"
v = []
for i in range(0, len(self.h), 256):
layerSum = 0.0
for j in range(256):
layerSum += j * self.h[i + j]
v.append(layerSum)
return v
def _getsum2(self):
"Get squared sum of all pixels in each layer"
v = []
for i in range(0, len(self.h), 256):
sum2 = 0.0
for j in range(256):
sum2 += (j ** 2) * float(self.h[i + j])
v.append(sum2)
return v
def _getmean(self):
"Get average pixel level for each layer"
v = []
for i in self.bands:
v.append(self.sum[i] / self.count[i])
return v
def _getmedian(self):
"Get median pixel level for each layer"
v = []
for i in self.bands:
s = 0
l = self.count[i]//2
b = i * 256
for j in range(256):
s = s + self.h[b+j]
if s > l:
break
v.append(j)
return v
def _getrms(self):
"Get RMS for each layer"
v = []
for i in self.bands:
v.append(math.sqrt(self.sum2[i] / self.count[i]))
return v
def _getvar(self):
"Get variance for each layer"
v = []
for i in self.bands:
n = self.count[i]
v.append((self.sum2[i]-(self.sum[i]**2.0)/n)/n)
return v
def _getstddev(self):
"Get standard deviation for each layer"
v = []
for i in self.bands:
v.append(math.sqrt(self.var[i]))
return v
Global = Stat # compatibility
|
cc0-1.0
|
PetePriority/home-assistant
|
homeassistant/components/abode/camera.py
|
4
|
2867
|
"""
This component provides HA camera support for Abode Security System.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/camera.abode/
"""
import logging
from datetime import timedelta
import requests
from homeassistant.components.abode import AbodeDevice, DOMAIN as ABODE_DOMAIN
from homeassistant.components.camera import Camera
from homeassistant.util import Throttle
DEPENDENCIES = ['abode']
MIN_TIME_BETWEEN_UPDATES = timedelta(seconds=90)
_LOGGER = logging.getLogger(__name__)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up Abode camera devices."""
import abodepy.helpers.constants as CONST
import abodepy.helpers.timeline as TIMELINE
data = hass.data[ABODE_DOMAIN]
devices = []
for device in data.abode.get_devices(generic_type=CONST.TYPE_CAMERA):
if data.is_excluded(device):
continue
devices.append(AbodeCamera(data, device, TIMELINE.CAPTURE_IMAGE))
data.devices.extend(devices)
add_entities(devices)
class AbodeCamera(AbodeDevice, Camera):
"""Representation of an Abode camera."""
def __init__(self, data, device, event):
"""Initialize the Abode device."""
AbodeDevice.__init__(self, data, device)
Camera.__init__(self)
self._event = event
self._response = None
async def async_added_to_hass(self):
"""Subscribe Abode events."""
await super().async_added_to_hass()
self.hass.async_add_job(
self._data.abode.events.add_timeline_callback,
self._event, self._capture_callback
)
def capture(self):
"""Request a new image capture."""
return self._device.capture()
@Throttle(MIN_TIME_BETWEEN_UPDATES)
def refresh_image(self):
"""Find a new image on the timeline."""
if self._device.refresh_image():
self.get_image()
def get_image(self):
"""Attempt to download the most recent capture."""
if self._device.image_url:
try:
self._response = requests.get(
self._device.image_url, stream=True)
self._response.raise_for_status()
except requests.HTTPError as err:
_LOGGER.warning("Failed to get camera image: %s", err)
self._response = None
else:
self._response = None
def camera_image(self):
"""Get a camera image."""
self.refresh_image()
if self._response:
return self._response.content
return None
def _capture_callback(self, capture):
"""Update the image with the device then refresh device."""
self._device.update_image_location(capture)
self.get_image()
self.schedule_update_ha_state()
|
apache-2.0
|
thorwhalen/ut
|
ppi/binary_pairs_kv.py
|
1
|
8713
|
__author__ = 'thor'
import copy
from itertools import combinations
class EdgeStats(object):
def __init__(self):
self._count = CountVal(0.0)
self.a = KeyVal()
self.ab = KeyVal()
def count_data(self, item_iterator):
self.__init__()
for nodes in item_iterator:
self._count.increment()
for a in nodes:
self.a.add(KeyVal({a: Val(1.0)}))
for ab in combinations(nodes, 2):
self.ab.add(KeyVal({ab[0]: KeyVal({ab[1]: Val(1.0)})}))
self.ab.add(KeyVal({ab[1]: KeyVal({ab[0]: Val(1.0)})}))
def counts_to_probs(self, alpha=0, beta=0):
prior_num = Val(float(alpha))
prior_denom = Val(float(alpha + beta))
self.ab = (self.ab + prior_num) / (self.a + prior_denom)
self.a = (self.a + prior_num) / (self._count + prior_denom)
def __getitem__(self, item):
return self.ab.v.get(item, None)
# default functions
def get_a_list_from_item_default(pair_set):
return pair_set[0]
def get_b_list_from_item_default(pair_set):
return pair_set[1]
class BipartiteEdgeCounts(object):
"""
The class that manages the count data.
"""
# _count
# a_count
# b_count
# ab_count
# ba_count
def __init__(self, get_a_list_from_item=None, get_b_list_from_item=None):
self._count = CountVal(0.0)
self.a_count = KeyCount()
self.b_count = KeyCount()
self.ab_count = KeyCount()
self.ba_count = KeyCount()
self.get_a_list_from_item = get_a_list_from_item or get_a_list_from_item_default
self.get_b_list_from_item = get_b_list_from_item or get_b_list_from_item_default
def learn(self, item_iterator):
self.__init__()
for item in item_iterator:
self._count.increment()
a_list = self.get_a_list_from_item(item)
b_list = self.get_b_list_from_item(item)
for a in a_list:
self.a_count.increment(a)
for b in b_list:
self.b_count.increment(b)
for a in a_list:
for b in b_list:
self.ab_count.add(KeyVal({a: KeyVal({b: Val(1.0)})}))
self.ba_count.add(KeyVal({b: KeyVal({a: Val(1.0)})}))
class Val(object):
"""
The mother class of other Val classes.
A Val should hold a value and be able to add and subtract from it.
This mother class implements normal addition of floats, but should be overridden to
implement other types of values such as multiplication, addition of vectors,
merging of likelihoods etc.
Most of the time, you'll only need to override the add() and the sub() methods.
You may also want to override the default value. This value should act as the
'unit' or 'neutral' value of the add operation (therefore the sub operation as well).
For example, the unit value of multiplication (which will still be called "add") is 1.0.
"""
v = 0.0
def __init__(self, v):
if isinstance(v, Val):
self.v = copy.deepcopy(v.v)
else:
self.v = copy.deepcopy(v)
def add(self, y):
self.v = self.v + y.v
def sub(self, y):
self.v = self.v - y.v
def mul(self, y):
self.v = self.v * y.v
def div(self, y):
self.v = self.v / y.v
def unwrapped(self):
if hasattr(self.v, 'v'):
return self.v.unwrapped()
else:
return self.v
def map(self, fun):
if hasattr(self.v, 'v'):
self.v.map(fun)
else:
self.v = fun(self.v)
def __add__(self, y):
x = copy.deepcopy(self)
x.add(y)
return x
def __sub__(self, y):
x = copy.deepcopy(self)
x.sub(y)
return x
def __mul__(self, y):
x = copy.deepcopy(self)
x.mul(y)
return x
def __div__(self, y):
x = copy.deepcopy(self)
x.div(y)
return x
def __str__(self):
return str(self.v)
def __repr__(self):
return str(self.v)
class CountVal(Val):
v = 0.0
def __init__(self, v=0.0):
super(CountVal, self).__init__(v)
self.v = float(v)
def increment(self):
self.v += 1.0
class LHVal(Val):
"""
An LHVal manages a binary likelihood.
That is, it holds (as a single float) the binary likelihood distribution and allows one to
merge two such distributions.
"""
v = .5; # where the value will be stored
def __init__(self, v=.5):
super(LHVal, self).__init__(v)
self.v = float(v)
def mul(self, y):
self.v = (self.v * y.v) / (self.v * y.v + (1 - self.v) * (1 - y.v))
def div(self, y):
self.v = (self.v / y.v) / (self.v / y.v + (1 - self.v) / (1 - y.v))
class KeyVal(Val):
"""
Here the type of the value is a dict (to implement a map).
The addition of two dicts (therefore the add() method) v and w.
The add(val) method will here be defined to be a sum-update of the (key,value)
pairs of the
Extends a map so that one can add and subtract dict pairs by adding or subtracting
the (key-aligned) values
"""
def __init__(self, v=None):
if v is None:
self.v = dict()
else:
super(KeyVal, self).__init__(v)
def add(self, kv):
try:
if hasattr(kv.v, 'keys'):
for k in list(kv.v.keys()):
if k in list(self.v.keys()):
self.v[k].add(kv.v[k])
else:
self.v[k] = kv.v[k]
else:
try:
for k in list(self.v.keys()):
self.v[k].v = self.v[k].v + kv.v
except TypeError:
for k in list(self.v.keys()):
self.v[k].add(kv)
except AttributeError:
self.add(Val(kv))
def sub(self, kv):
try:
if hasattr(kv.v, 'keys'):
for k in list(kv.v.keys()):
if k in list(self.v.keys()):
self.v[k].sub(kv.v[k])
else:
try:
for k in list(self.v.keys()):
self.v[k].v = self.v[k].v - kv.v
except TypeError:
for k in list(self.v.keys()):
self.v[k].sub(kv)
except AttributeError:
self.sub(Val(kv))
def mul(self, kv):
try:
if hasattr(kv.v, 'keys'):
for k in list(kv.v.keys()):
if k in list(self.v.keys()):
self.v[k].mul(kv.v[k])
else:
self.v[k] = kv.v[k]
else:
try:
for k in list(self.v.keys()):
self.v[k].v = self.v[k].v * kv.v
except TypeError:
for k in list(self.v.keys()):
self.v[k].mul(kv)
except AttributeError:
self.mul(Val(kv))
def div(self, kv):
try:
if hasattr(kv.v, 'keys'):
for k in list(kv.v.keys()):
if k in list(self.v.keys()):
self.v[k].div(kv.v[k])
else:
try:
for k in list(self.v.keys()):
self.v[k].v = self.v[k].v / kv.v
except TypeError:
for k in list(self.v.keys()):
self.v[k].div(kv)
except AttributeError:
self.div(Val(kv))
def unwrapped(self):
return {k: v.unwrapped() for k, v in self.v.items()}
def map(self, fun):
for v in self.v.values():
v.map(fun)
def keys(self):
return list(self.v.keys())
def iteritems(self):
return iter(self.v.items())
def __getitem__(self, item):
return self.v.get(item, None)
class KeyCount(KeyVal):
# v = dict()
# init_val_constructor = None;
"""
Extends a map so that one can add and subtract dict pairs by adding or subtracting the (key-aligned) values
"""
def __init__(self, v=None):
if v is None:
self.v = dict()
else:
super(KeyCount, self).__init__(v)
def increment(self, k):
if k in self.v:
self.v[k].add(Val(1.0))
else:
self.v[k] = Val(1.0)
# if __name__ == "__main__":
# d = ut.daf.get.rand(nrows=9)
# s = d['A'].iloc[0:5]
# ss = d['B'].iloc[3:8]
# t = s + ss
# print t
|
mit
|
skosukhin/spack
|
lib/spack/external/jinja2/defaults.py
|
130
|
1323
|
# -*- coding: utf-8 -*-
"""
jinja2.defaults
~~~~~~~~~~~~~~~
Jinja default filters and tags.
:copyright: (c) 2017 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
from jinja2._compat import range_type
from jinja2.utils import generate_lorem_ipsum, Cycler, Joiner
# defaults for the parser / lexer
BLOCK_START_STRING = '{%'
BLOCK_END_STRING = '%}'
VARIABLE_START_STRING = '{{'
VARIABLE_END_STRING = '}}'
COMMENT_START_STRING = '{#'
COMMENT_END_STRING = '#}'
LINE_STATEMENT_PREFIX = None
LINE_COMMENT_PREFIX = None
TRIM_BLOCKS = False
LSTRIP_BLOCKS = False
NEWLINE_SEQUENCE = '\n'
KEEP_TRAILING_NEWLINE = False
# default filters, tests and namespace
from jinja2.filters import FILTERS as DEFAULT_FILTERS
from jinja2.tests import TESTS as DEFAULT_TESTS
DEFAULT_NAMESPACE = {
'range': range_type,
'dict': dict,
'lipsum': generate_lorem_ipsum,
'cycler': Cycler,
'joiner': Joiner
}
# default policies
DEFAULT_POLICIES = {
'compiler.ascii_str': True,
'urlize.rel': 'noopener',
'urlize.target': None,
'truncate.leeway': 5,
'json.dumps_function': None,
'json.dumps_kwargs': {'sort_keys': True},
}
# export all constants
__all__ = tuple(x for x in locals().keys() if x.isupper())
|
lgpl-2.1
|
leigh123linux/Cinnamon
|
files/usr/share/cinnamon/cinnamon-desktop-editor/cinnamon-desktop-editor.py
|
3
|
17290
|
#!/usr/bin/python3
import sys
import os
import gettext
import glob
from optparse import OptionParser
import shutil
import subprocess
from setproctitle import setproctitle
import gi
gi.require_version("Gtk", "3.0")
gi.require_version("CMenu", "3.0")
from gi.repository import GLib, Gtk, Gio, CMenu
sys.path.insert(0, '/usr/share/cinnamon/cinnamon-menu-editor')
from cme import util
sys.path.insert(0, '/usr/share/cinnamon/cinnamon-settings/bin')
import JsonSettingsWidgets
# i18n
gettext.install("cinnamon", "/usr/share/locale")
# i18n for menu item
#_ = gettext.gettext # bug !!! _ is already defined by gettext.install!
home = os.path.expanduser("~")
PANEL_LAUNCHER_PATH = os.path.join(home, ".cinnamon", "panel-launchers")
EXTENSIONS = (".png", ".xpm", ".svg")
DEFAULT_ICON_NAME = "cinnamon-panel-launcher"
def escape_space(string):
return string.replace(" ", "\ ")
def ask(msg):
dialog = Gtk.MessageDialog(None,
Gtk.DialogFlags.DESTROY_WITH_PARENT | Gtk.DialogFlags.MODAL,
Gtk.MessageType.QUESTION,
Gtk.ButtonsType.YES_NO,
None)
dialog.set_markup(msg)
dialog.show_all()
response = dialog.run()
dialog.destroy()
return response == Gtk.ResponseType.YES
DESKTOP_GROUP = GLib.KEY_FILE_DESKTOP_GROUP
class ItemEditor(object):
ui_file = None
def __init__(self, item_path=None, callback=None, destdir=None):
self.builder = Gtk.Builder()
self.builder.set_translation_domain('cinnamon') # let it translate!
self.builder.add_from_file(self.ui_file)
self.callback = callback
self.destdir = destdir
self.dialog = self.builder.get_object('editor')
self.dialog.connect('response', self.on_response)
self.icon_chooser = self.builder.get_object('icon-chooser')
self.icon_chooser.get_dialog().set_property("allow-paths", True)
self.build_ui()
self.item_path = item_path
self.load()
self.check_custom_path()
self.resync_validity()
def build_ui(self):
raise NotImplementedError()
def check_custom_path(self):
raise NotImplementedError()
def sync_widgets(self, name_valid, exec_valid):
if name_valid:
self.builder.get_object('name-entry').set_icon_from_icon_name(Gtk.EntryIconPosition.SECONDARY, 'gtk-ok')
self.builder.get_object('name-entry').set_icon_tooltip_text(Gtk.EntryIconPosition.SECONDARY,
_("Valid name"))
else:
self.builder.get_object('name-entry').set_icon_from_icon_name(Gtk.EntryIconPosition.SECONDARY, 'process-stop')
self.builder.get_object('name-entry').set_icon_tooltip_text(Gtk.EntryIconPosition.SECONDARY,
_("The name cannot be empty."))
if exec_valid:
self.builder.get_object('exec-entry').set_icon_from_icon_name(Gtk.EntryIconPosition.SECONDARY, 'gtk-ok')
self.builder.get_object('exec-entry').set_icon_tooltip_text(Gtk.EntryIconPosition.SECONDARY,
_("Valid executable"))
else:
self.builder.get_object('exec-entry').set_icon_from_icon_name(Gtk.EntryIconPosition.SECONDARY, 'process-stop')
self.builder.get_object('exec-entry').set_icon_tooltip_text(Gtk.EntryIconPosition.SECONDARY,
_("The executable is not valid. It cannot be empty and spaces in the path must be escaped with backslash (\\)."))
self.builder.get_object('ok').set_sensitive(name_valid and exec_valid)
def validate_exec_line(self, string):
try:
success, parsed = GLib.shell_parse_argv(string)
if GLib.find_program_in_path(parsed[0]) or ((not os.path.isdir(parsed[0])) and os.access(parsed[0], os.X_OK)):
return True
except:
pass
return False
def get_keyfile_edits(self):
raise NotImplementedError()
def set_text(self, ctl, name):
try:
val = self.keyfile.get_string(DESKTOP_GROUP, name)
except GLib.GError:
pass
else:
self.builder.get_object(ctl).set_text(val)
def set_check(self, ctl, name):
try:
val = self.keyfile.get_boolean(DESKTOP_GROUP, name)
except GLib.GError:
pass
else:
self.builder.get_object(ctl).set_active(val)
def set_icon(self, name):
try:
val = self.keyfile.get_string(DESKTOP_GROUP, name)
except GLib.GError:
pass
else:
print(val)
self.icon_chooser.set_icon(val)
print('icon:', self.icon_chooser.get_icon())
def load(self):
self.keyfile = GLib.KeyFile()
path = self.item_path or ""
try:
self.keyfile.load_from_file(path, util.KEY_FILE_FLAGS)
except GLib.GError:
pass
def save(self):
util.fillKeyFile(self.keyfile, self.get_keyfile_edits())
contents, length = self.keyfile.to_data()
need_exec = False
if self.destdir is not None:
self.item_path = os.path.join(self.destdir, self.builder.get_object('name-entry').get_text() + ".desktop")
need_exec = True
try:
with open(self.item_path, 'w') as f:
f.write(contents)
if need_exec:
os.chmod(self.item_path, 0o755)
subprocess.Popen(['update-desktop-database', util.getUserItemPath()], env=os.environ)
except IOError as e:
if ask(_("Cannot create the launcher at this location. Add to the desktop instead?")):
self.destdir = GLib.get_user_special_dir(GLib.UserDirectory.DIRECTORY_DESKTOP)
self.save()
def run(self):
self.dialog.present()
def on_response(self, dialog, response):
if response == Gtk.ResponseType.OK:
self.save()
self.callback(True, self.item_path)
else:
self.callback(False, self.item_path)
self.dialog.destroy()
class LauncherEditor(ItemEditor):
ui_file = '/usr/share/cinnamon/cinnamon-desktop-editor/launcher-editor.ui'
def build_ui(self):
self.builder.get_object('exec-browse').connect('clicked', self.pick_exec)
self.builder.get_object('name-entry').connect('changed', self.resync_validity)
self.builder.get_object('exec-entry').connect('changed', self.resync_validity)
def resync_validity(self, *args):
name_text = self.builder.get_object('name-entry').get_text().strip()
exec_text = self.builder.get_object('exec-entry').get_text().strip()
name_valid = name_text != ""
exec_valid = self.validate_exec_line(exec_text)
self.sync_widgets(name_valid, exec_valid)
def load(self):
super(LauncherEditor, self).load()
self.set_text('name-entry', "Name")
self.set_text('exec-entry', "Exec")
self.set_text('comment-entry', "Comment")
self.set_check('terminal-check', "Terminal")
self.set_icon("Icon")
def get_keyfile_edits(self):
return dict(Name=self.builder.get_object('name-entry').get_text(),
Exec=self.builder.get_object('exec-entry').get_text(),
Comment=self.builder.get_object('comment-entry').get_text(),
Terminal=self.builder.get_object('terminal-check').get_active(),
Icon=self.icon_chooser.get_icon(),
Type="Application")
def pick_exec(self, button):
chooser = Gtk.FileChooserDialog(title=_("Choose a command"),
parent=self.dialog,
buttons=(Gtk.STOCK_CANCEL, Gtk.ResponseType.REJECT,
Gtk.STOCK_OK, Gtk.ResponseType.ACCEPT))
response = chooser.run()
if response == Gtk.ResponseType.ACCEPT:
self.builder.get_object('exec-entry').set_text(escape_space(chooser.get_filename()))
chooser.destroy()
def check_custom_path(self):
if self.item_path:
self.item_path = os.path.join(util.getUserItemPath(), os.path.split(self.item_path)[1])
class DirectoryEditor(ItemEditor):
ui_file = '/usr/share/cinnamon/cinnamon-desktop-editor/directory-editor.ui'
def build_ui(self):
self.builder.get_object('name-entry').connect('changed', self.resync_validity)
def resync_validity(self, *args):
name_text = self.builder.get_object('name-entry').get_text().strip()
valid = (name_text != "")
self.builder.get_object('ok').set_sensitive(valid)
def load(self):
super(DirectoryEditor, self).load()
self.set_text('name-entry', "Name")
self.set_text('comment-entry', "Comment")
self.set_icon("Icon")
def get_keyfile_edits(self):
return dict(Name=self.builder.get_object('name-entry').get_text(),
Comment=self.builder.get_object('comment-entry').get_text(),
Icon=self.icon_chooser.get_icon(),
Type="Directory")
def check_custom_path(self):
self.item_path = os.path.join(util.getUserDirectoryPath(), os.path.split(self.item_path)[1])
class CinnamonLauncherEditor(ItemEditor):
ui_file = '/usr/share/cinnamon/cinnamon-desktop-editor/launcher-editor.ui'
def build_ui(self):
self.builder.get_object('exec-browse').connect('clicked', self.pick_exec)
self.builder.get_object('name-entry').connect('changed', self.resync_validity)
self.builder.get_object('exec-entry').connect('changed', self.resync_validity)
def check_custom_path(self):
dir = Gio.file_new_for_path(PANEL_LAUNCHER_PATH)
if not dir.query_exists(None):
dir.make_directory_with_parents(None)
if self.item_path is None or "cinnamon-custom-launcher" not in self.item_path:
i = 1
while True:
name = os.path.join(PANEL_LAUNCHER_PATH, 'cinnamon-custom-launcher-' + str(i) + '.desktop')
file = Gio.file_parse_name(name)
if not file.query_exists(None):
break
i += 1
self.item_path = name
def resync_validity(self, *args):
name_text = self.builder.get_object('name-entry').get_text().strip()
exec_text = self.builder.get_object('exec-entry').get_text().strip()
name_valid = name_text != ""
exec_valid = self.validate_exec_line(exec_text)
self.sync_widgets(name_valid, exec_valid)
def load(self):
super(CinnamonLauncherEditor, self).load()
self.set_text('name-entry', "Name")
self.set_text('exec-entry', "Exec")
self.set_text('comment-entry', "Comment")
self.set_check('terminal-check', "Terminal")
self.set_icon("Icon")
def get_keyfile_edits(self):
return dict(Name=self.builder.get_object('name-entry').get_text(),
Exec=self.builder.get_object('exec-entry').get_text(),
Comment=self.builder.get_object('comment-entry').get_text(),
Terminal=self.builder.get_object('terminal-check').get_active(),
Icon=self.icon_chooser.get_icon(),
Type="Application")
def pick_exec(self, button):
chooser = Gtk.FileChooserDialog(title=_("Choose a command"),
parent=self.dialog,
buttons=(Gtk.STOCK_CANCEL, Gtk.ResponseType.REJECT,
Gtk.STOCK_OK, Gtk.ResponseType.ACCEPT))
response = chooser.run()
if response == Gtk.ResponseType.ACCEPT:
self.builder.get_object('exec-entry').set_text(escape_space(chooser.get_filename()))
chooser.destroy()
class Main:
def __init__(self):
parser = OptionParser()
parser.add_option("-o", "--original", dest="original_desktop_file", help="Path of original .desktop file", metavar="ORIG_FILE")
parser.add_option("-d", "--directory", dest="destination_directory", help="Destination directory of the new launcher", metavar="DEST_DIR")
parser.add_option("-f", "--file", dest="desktop_file", help="Name of desktop file (i.e. gnome-terminal.desktop)", metavar="DESKTOP_NAME")
parser.add_option("-m", "--mode", dest="mode", default=None, help="Mode to run in: launcher, directory, panel-launcher or nemo-launcher")
(options, args) = parser.parse_args()
if not options.mode:
parser.error("You must select a mode to run in")
if options.mode in ("directory", "launcher") and not options.original_desktop_file:
parser.error("directory and launcher modes must be accompanied by the -o argument")
if options.mode == "nemo-launcher" and not options.destination_directory:
parser.error("nemo-launcher mode must be accompanied by the -d argument")
if options.mode == "cinnamon-launcher" and len(args) < 1:
parser.error("cinnamon-launcher mode must have the following syntax:\n"
"cinnamon-desktop-editor -mcinnamon-launcher [-ffoo.desktop] <json-path>")
self.tree = CMenu.Tree.new("cinnamon-applications.menu", CMenu.TreeFlags.INCLUDE_NODISPLAY)
if not self.tree.load_sync():
raise ValueError("can not load menu tree")
self.mode = options.mode
self.orig_file = options.original_desktop_file
self.desktop_file = options.desktop_file
self.dest_dir = options.destination_directory
if options.mode == "cinnamon-launcher":
self.json_path = args[0]
if self.desktop_file is not None:
self.get_desktop_path()
if self.mode == "directory":
editor = DirectoryEditor(self.orig_file, self.directory_cb)
editor.dialog.show_all()
elif self.mode == "launcher":
editor = LauncherEditor(self.orig_file, self.launcher_cb)
editor.dialog.show_all()
elif self.mode == "cinnamon-launcher":
editor = CinnamonLauncherEditor(self.orig_file, self.panel_launcher_cb)
editor.dialog.show_all()
elif self.mode == "nemo-launcher":
editor = LauncherEditor(self.orig_file, self.nemo_launcher_cb, self.dest_dir)
editor.dialog.show_all()
else:
print("Invalid args")
def directory_cb(self, success, dest_path):
self.end()
def launcher_cb(self, success, dest_path):
self.end()
def panel_launcher_cb(self, success, dest_path):
if success:
settings = JsonSettingsWidgets.JSONSettingsHandler(self.json_path)
launchers = settings.get_value("launcherList")
if self.desktop_file is None:
launchers.append(os.path.split(dest_path)[1])
else:
i = launchers.index(self.desktop_file)
if i >= 0:
del launchers[i]
launchers.insert(i, os.path.split(dest_path)[1])
settings.save_settings()
if self.desktop_file is None:
self.ask_menu_launcher(dest_path)
self.end()
def nemo_launcher_cb(self, success, dest_path):
if success:
self.ask_menu_launcher(dest_path)
self.end()
def ask_menu_launcher(self, dest_path):
if ask(_("Would you like to add this launcher to the menu also? It will be placed in the Other category initially.")):
new_file_path = os.path.join(util.getUserItemPath(), os.path.split(dest_path)[1])
shutil.copy(dest_path, new_file_path)
def get_desktop_path(self):
self.search_menu_sys()
if self.orig_file is None:
panel_launchers = glob.glob(os.path.join(PANEL_LAUNCHER_PATH, "*.desktop"))
for launcher in panel_launchers:
if os.path.split(launcher)[1] == self.desktop_file:
self.orig_file = launcher
def search_menu_sys(self, parent=None):
if parent is None:
parent = self.tree.get_root_directory()
item_iter = parent.iter()
item_type = item_iter.next()
while item_type != CMenu.TreeItemType.INVALID:
if item_type == CMenu.TreeItemType.DIRECTORY:
item = item_iter.get_directory()
self.search_menu_sys(item)
elif item_type == CMenu.TreeItemType.ENTRY:
item = item_iter.get_entry()
if item.get_desktop_file_id() == self.desktop_file:
self.orig_file = item.get_desktop_file_path()
item_type = item_iter.next()
def end(self):
Gtk.main_quit()
if __name__ == "__main__":
setproctitle("cinnamon-desktop-editor")
Gtk.Window.set_default_icon_name(DEFAULT_ICON_NAME)
Main()
Gtk.main()
|
gpl-2.0
|
nicholasserra/sentry
|
tests/sentry/api/endpoints/test_team_stats.py
|
10
|
1170
|
from __future__ import absolute_import
from django.core.urlresolvers import reverse
from sentry.app import tsdb
from sentry.testutils import APITestCase
class TeamStatsTest(APITestCase):
def test_simple(self):
self.login_as(user=self.user)
team = self.create_team(name='foo')
project_1 = self.create_project(team=team, name='a')
project_2 = self.create_project(team=team, name='b')
team_2 = self.create_team(name='bar')
project_3 = self.create_project(team=team_2, name='c')
tsdb.incr(tsdb.models.project, project_1.id, count=3)
tsdb.incr(tsdb.models.project, project_2.id, count=5)
tsdb.incr(tsdb.models.project, project_3.id, count=10)
url = reverse('sentry-api-0-team-stats', kwargs={
'organization_slug': team.organization.slug,
'team_slug': team.slug,
})
response = self.client.get(url, format='json')
assert response.status_code == 200, response.content
assert response.data[-1][1] == 8, response.data
for point in response.data[:-1]:
assert point[1] == 0
assert len(response.data) == 24
|
bsd-3-clause
|
addition-it-solutions/project-all
|
addons/hr_timesheet_invoice/report/account_analytic_profit.py
|
8
|
5744
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.report import report_sxw
from openerp.osv import osv
class account_analytic_profit(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(account_analytic_profit, self).__init__(cr, uid, name, context=context)
self.localcontext.update({
'lines': self._lines,
'user_ids': self._user_ids,
'journal_ids': self._journal_ids,
'line': self._line,
})
def _user_ids(self, lines):
user_obj = self.pool['res.users']
ids=list(set([b.user_id.id for b in lines]))
return user_obj.browse(self.cr, self.uid, ids)
def _journal_ids(self, form, user_id):
if isinstance(user_id, (int, long)):
user_id = [user_id]
line_obj = self.pool['account.analytic.line']
journal_obj = self.pool['account.analytic.journal']
line_ids=line_obj.search(self.cr, self.uid, [
('date', '>=', form['date_from']),
('date', '<=', form['date_to']),
('journal_id', 'in', form['journal_ids'][0][2]),
('user_id', 'in', user_id),
])
ids=list(set([b.journal_id.id for b in line_obj.browse(self.cr, self.uid, line_ids)]))
return journal_obj.browse(self.cr, self.uid, ids)
def _line(self, form, journal_ids, user_ids):
line_obj = self.pool['account.analytic.line']
product_obj = self.pool['product.product']
price_obj = self.pool['product.pricelist']
ids=line_obj.search(self.cr, self.uid, [
('date', '>=', form['date_from']),
('date', '<=', form['date_to']),
('journal_id', 'in', journal_ids),
('user_id', 'in', user_ids),
])
res={}
for line in line_obj.browse(self.cr, self.uid, ids):
if line.account_id.pricelist_id:
if line.account_id.to_invoice:
if line.to_invoice:
id=line.to_invoice.id
name=line.to_invoice.name
discount=line.to_invoice.factor
else:
name="/"
discount=1.0
id = -1
else:
name="Fixed"
discount=0.0
id=0
pl=line.account_id.pricelist_id.id
price=price_obj.price_get(self.cr, self.uid, [pl], line.product_id.id, line.unit_amount or 1.0, line.account_id.partner_id.id)[pl]
else:
name="/"
discount=1.0
id = -1
price=0.0
if id not in res:
res[id]={'name': name, 'amount': 0, 'cost':0, 'unit_amount':0,'amount_th':0}
xxx = round(price * line.unit_amount * (1-(discount or 0.0)), 2)
res[id]['amount_th']+=xxx
if line.invoice_id:
self.cr.execute('select id from account_analytic_line where invoice_id=%s', (line.invoice_id.id,))
tot = 0
for lid in self.cr.fetchall():
lid2 = line_obj.browse(self.cr, self.uid, lid[0])
pl=lid2.account_id.pricelist_id.id
price=price_obj.price_get(self.cr, self.uid, [pl], lid2.product_id.id, lid2.unit_amount or 1.0, lid2.account_id.partner_id.id)[pl]
tot += price * lid2.unit_amount * (1-(discount or 0.0))
if tot:
procent = line.invoice_id.amount_untaxed / tot
res[id]['amount'] += xxx * procent
else:
res[id]['amount'] += xxx
else:
res[id]['amount'] += xxx
res[id]['cost']+=line.amount
res[id]['unit_amount']+=line.unit_amount
for id in res:
res[id]['profit']=res[id]['amount']+res[id]['cost']
res[id]['eff']=res[id]['cost'] and '%d' % (-res[id]['amount'] / res[id]['cost'] * 100,) or 0.0
return res.values()
def _lines(self, form):
line_obj = self.pool['account.analytic.line']
ids=line_obj.search(self.cr, self.uid, [
('date', '>=', form['date_from']),
('date', '<=', form['date_to']),
('journal_id', 'in', form['journal_ids'][0][2]),
('user_id', 'in', form['employee_ids'][0][2]),
])
return line_obj.browse(self.cr, self.uid, ids)
class report_account_analytic_profit(osv.AbstractModel):
_name = 'report.hr_timesheet_invoice.report_analyticprofit'
_inherit = 'report.abstract_report'
_template = 'hr_timesheet_invoice.report_analyticprofit'
_wrapped_report_class = account_analytic_profit
|
agpl-3.0
|
jamesgk/robofab
|
Lib/robofab/interface/all/dialogs_legacy.py
|
8
|
18412
|
"""
Dialogs.
Cross-platform and cross-application compatible. Some of them anyway.
(Not all dialogs work on PCs outside of FontLab. Some dialogs are for FontLab only. Sorry.)
Mac and FontLab implementation written by the RoboFab development team.
PC implementation by Eigi Eigendorf and is (C)2002 Eigi Eigendorf.
"""
import os
import sys
from robofab import RoboFabError
from warnings import warn
MAC = False
PC = False
haveMacfs = False
if sys.platform in ('mac', 'darwin'):
MAC = True
elif os.name == 'nt':
PC = True
else:
warn("dialogs.py only supports Mac and PC platforms.")
pyVersion = sys.version_info[:3]
inFontLab = False
try:
from FL import *
inFontLab = True
except ImportError: pass
try:
import W
hasW = True
except ImportError:
hasW = False
try:
import dialogKit
hasDialogKit = True
except ImportError:
hasDialogKit = False
try:
import EasyDialogs
hasEasyDialogs = True
except:
hasEasyDialogs = False
if MAC:
if pyVersion < (2, 3, 0):
import macfs
haveMacfs = True
elif PC and not inFontLab:
from win32com.shell import shell
import win32ui
import win32con
def _raisePlatformError(dialog):
"""error raiser"""
if MAC:
p = 'Macintosh'
elif PC:
p = 'PC'
else:
p = sys.platform
raise RoboFabError("%s is not currently available on the %s platform"%(dialog, p))
class _FontLabDialogOneList:
"""A one list dialog for FontLab. This class should not be called directly. Use the OneList function."""
def __init__(self, list, message, title='RoboFab'):
self.message = message
self.selected = None
self.list = list
self.d = Dialog(self)
self.d.size = Point(250, 250)
self.d.title = title
self.d.Center()
self.d.AddControl(LISTCONTROL, Rect(12, 30, 238, 190), "list", STYLE_LIST, self.message)
self.list_index = 0
def Run(self):
return self.d.Run()
def on_cancel(self, code):
self.selected = None
def on_ok(self, code):
self.d.GetValue('list')
# Since FLS v5.2, the GetValue() method of the Dialog() class returns
# a 'wrong' index value from the specified LISTCONTROL.
# If the selected index is n, it will return n-1. For example, when
# the index is 1, it returns 0; when it's 2, it returns 1, and so on.
# If the selection is empty, FLS v5.2 returns -2, while the old v5.0
# returned None.
# See also:
# - http://forum.fontlab.com/index.php?topic=8807.0
# - http://forum.fontlab.com/index.php?topic=9003.0
#
# Edited based on feedback from Adam Twardoch
if fl.buildnumber > 4600 and sys.platform == 'win32':
if self.list_index == -2:
self.selected = None
else:
self.selected = self.list_index + 1
else:
self.selected = self.list_index
class _FontLabDialogSearchList:
"""A dialog for searching through a list. It contains a text field and a results list FontLab. This class should not be called directly. Use the SearchList function."""
def __init__(self, aList, message, title="RoboFab"):
self.d = Dialog(self)
self.d.size = Point(250, 290)
self.d.title = title
self.d.Center()
self.message = message
self._fullContent = aList
self.possibleHits = list(aList)
self.possibleHits.sort()
self.possibleHits_index = 0
self.entryField = ""
self.selected = None
self.d.AddControl(STATICCONTROL, Rect(10, 10, 240, 30), "message", STYLE_LABEL, message)
self.d.AddControl(EDITCONTROL, Rect(10, 30, 240, aAUTO), "entryField", STYLE_EDIT, "")
self.d.AddControl(LISTCONTROL, Rect(12, 60, 238, 230), "possibleHits", STYLE_LIST, "")
def run(self):
self.d.Run()
def on_entryField(self, code):
self.d.GetValue("entryField")
entry = self.entryField
count = len(entry)
possibleHits = [
i for i in self._fullContent
if len(i) >= count
and i[:count] == entry
]
possibleHits.sort()
self.possibleHits = possibleHits
self.possibleHits_index = 0
self.d.PutValue("possibleHits")
def on_ok(self, code):
self.d.GetValue("possibleHits")
sel = self.possibleHits_index
if sel == -1:
self.selected = None
else:
self.selected = self.possibleHits[sel]
def on_cancel(self, code):
self.selected = None
class _FontLabDialogTwoFields:
"""A two field dialog for FontLab. This class should not be called directly. Use the TwoFields function."""
def __init__(self, title_1, value_1, title_2, value_2, title='RoboFab'):
self.d = Dialog(self)
self.d.size = Point(200, 125)
self.d.title = title
self.d.Center()
self.d.AddControl(EDITCONTROL, Rect(120, 10, aIDENT2, aAUTO), "v1edit", STYLE_EDIT, title_1)
self.d.AddControl(EDITCONTROL, Rect(120, 40, aIDENT2, aAUTO), "v2edit", STYLE_EDIT, title_2)
self.v1edit = value_1
self.v2edit = value_2
def Run(self):
return self.d.Run()
def on_cancel(self, code):
self.v1edit = None
self.v2edit = None
def on_ok(self, code):
self.d.GetValue("v1edit")
self.d.GetValue("v2edit")
self.v1 = self.v1edit
self.v2 = self.v2edit
class _FontLabDialogTwoChecks:
"""A two check box dialog for FontLab. This class should not be called directly. Use the TwoChecks function."""
def __init__(self, title_1, title_2, value1=1, value2=1, title='RoboFab'):
self.d = Dialog(self)
self.d.size = Point(200, 105)
self.d.title = title
self.d.Center()
self.d.AddControl(CHECKBOXCONTROL, Rect(10, 10, aIDENT2, aAUTO), "check1", STYLE_CHECKBOX, title_1)
self.d.AddControl(CHECKBOXCONTROL, Rect(10, 30, aIDENT2, aAUTO), "check2", STYLE_CHECKBOX, title_2)
self.check1 = value1
self.check2 = value2
def Run(self):
return self.d.Run()
def on_cancel(self, code):
self.check1 = None
self.check2 = None
def on_ok(self, code):
self.d.GetValue("check1")
self.d.GetValue("check2")
class _FontLabDialogAskString:
"""A one simple string prompt dialog for FontLab. This class should not be called directly. Use the GetString function."""
def __init__(self, message, value, title='RoboFab'):
self.d = Dialog(self)
self.d.size = Point(350, 130)
self.d.title = title
self.d.Center()
self.d.AddControl(STATICCONTROL, Rect(aIDENT, aIDENT, aIDENT, aAUTO), "label", STYLE_LABEL, message)
self.d.AddControl(EDITCONTROL, Rect(aIDENT, 40, aIDENT, aAUTO), "value", STYLE_EDIT, '')
self.value=value
def Run(self):
return self.d.Run()
def on_cancel(self, code):
self.value = None
def on_ok(self, code):
self.d.GetValue("value")
class _FontLabDialogMessage:
"""A simple message dialog for FontLab. This class should not be called directly. Use the SimpleMessage function."""
def __init__(self, message, title='RoboFab'):
self.d = Dialog(self)
self.d.size = Point(350, 130)
self.d.title = title
self.d.Center()
self.d.AddControl(STATICCONTROL, Rect(aIDENT, aIDENT, aIDENT, 80), "label", STYLE_LABEL, message)
def Run(self):
return self.d.Run()
class _FontLabDialogGetYesNoCancel:
"""A yes no cancel message dialog for FontLab. This class should not be called directly. Use the YesNoCancel function."""
def __init__(self, message, title='RoboFab'):
self.d = Dialog(self)
self.d.size = Point(350, 130)
self.d.title = title
self.d.Center()
self.d.ok = 'Yes'
self.d.AddControl(STATICCONTROL, Rect(aIDENT, aIDENT, aIDENT, 80), "label", STYLE_LABEL, message)
self.d.AddControl(BUTTONCONTROL, Rect(100, 95, 172, 115), "button", STYLE_BUTTON, "No")
self.value = 0
def Run(self):
return self.d.Run()
def on_ok(self, code):
self.value = 1
def on_cancel(self, code):
self.value = -1
def on_button(self, code):
self.value = 0
self.d.End()
class _MacOneListW:
"""A one list dialog for Macintosh. This class should not be called directly. Use the OneList function."""
def __init__(self, list, message='Make a selection'):
import W
self.list = list
self.selected = None
self.w = W.ModalDialog((200, 240))
self.w.message = W.TextBox((10, 10, -10, 30), message)
self.w.list = W.List((10, 35, -10, -50), list)
self.w.l = W.HorizontalLine((10, -40, -10, 1), 1)
self.w.cancel = W.Button((10, -30, 87, -10), 'Cancel', self.cancel)
self.w.ok = W.Button((102, -30, 88, -10), 'OK', self.ok)
self.w.setdefaultbutton(self.w.ok)
self.w.bind('cmd.', self.w.cancel.push)
self.w.open()
def ok(self):
if len(self.w.list.getselection()) == 1:
self.selected = self.w.list.getselection()[0]
self.w.close()
def cancel(self):
self.selected = None
self.w.close()
class _MacTwoChecksW:
""" Version using W """
def __init__(self, title_1, title_2, value1=1, value2=1, title='RoboFab'):
import W
self.check1 = value1
self.check2 = value2
self.w = W.ModalDialog((200, 100))
self.w.check1 = W.CheckBox((10, 10, -10, 16), title_1, value=value1)
self.w.check2 = W.CheckBox((10, 35, -10, 16), title_2, value=value2)
self.w.l = W.HorizontalLine((10, 60, -10, 1), 1)
self.w.cancel = W.Button((10, 70, 85, 20), 'Cancel', self.cancel)
self.w.ok = W.Button((105, 70, 85, 20), 'OK', self.ok)
self.w.setdefaultbutton(self.w.ok)
self.w.bind('cmd.', self.w.cancel.push)
self.w.open()
def ok(self):
self.check1 = self.w.check1.get()
self.check2 = self.w.check2.get()
self.w.close()
def cancel(self):
self.check1 = None
self.check2 = None
self.w.close()
class ProgressBar:
def __init__(self, title='RoboFab...', ticks=0, label=''):
"""
A progress bar.
Availability: FontLab, Mac
"""
self._tickValue = 1
if inFontLab:
fl.BeginProgress(title, ticks)
elif MAC and hasEasyDialogs:
import EasyDialogs
self._bar = EasyDialogs.ProgressBar(title, maxval=ticks, label=label)
else:
_raisePlatformError('Progress')
def getCurrentTick(self):
return self._tickValue
def tick(self, tickValue=None):
"""
Tick the progress bar.
Availability: FontLab, Mac
"""
if not tickValue:
tickValue = self._tickValue
if inFontLab:
fl.TickProgress(tickValue)
elif MAC:
self._bar.set(tickValue)
else:
pass
self._tickValue = tickValue + 1
def label(self, label):
"""
Set the label on the progress bar.
Availability: Mac
"""
if inFontLab:
pass
elif MAC:
self._bar.label(label)
else:
pass
def close(self):
"""
Close the progressbar.
Availability: FontLab, Mac
"""
if inFontLab:
fl.EndProgress()
elif MAC:
del self._bar
else:
pass
def SelectFont(message="Select a font:", title='RoboFab'):
"""
Returns font instance if there is one, otherwise it returns None.
Availability: FontLab
"""
from robofab.world import RFont
if inFontLab:
list = []
for i in range(fl.count):
list.append(fl[i].full_name)
name = OneList(list, message, title)
if name is None:
return None
else:
return RFont(fl[list.index(name)])
else:
_raisePlatformError('SelectFont')
def SelectGlyph(font, message="Select a glyph:", title='RoboFab'):
"""
Returns glyph instance if there is one, otherwise it returns None.
Availability: FontLab
"""
from fontTools.misc.textTools import caselessSort
if inFontLab:
tl = font.keys()
list = caselessSort(tl)
glyphname = OneList(list, message, title)
if glyphname is None:
return None
else:
return font[glyphname]
else:
_raisePlatformError('SelectGlyph')
def FindGlyph(font, message="Search for a glyph:", title='RoboFab'):
"""
Returns glyph instance if there is one, otherwise it returns None.
Availability: FontLab
"""
if inFontLab:
glyphname = SearchList(font.keys(), message, title)
if glyphname is None:
return None
else:
return font[glyphname]
else:
_raisePlatformError('SelectGlyph')
def OneList(list, message="Select an item:", title='RoboFab'):
"""
Returns selected item, otherwise it returns None.
Availability: FontLab, Macintosh
"""
if inFontLab:
ol = _FontLabDialogOneList(list, message)
ol.Run()
selected = ol.selected
if selected is None:
return None
else:
try:
return list[selected]
except:
return None
elif MAC:
if hasW:
d = _MacOneListW(list, message)
sel = d.selected
if sel is None:
return None
else:
return list[sel]
else:
_raisePlatformError('OneList')
elif PC:
_raisePlatformError('OneList')
def SearchList(list, message="Select an item:", title='RoboFab'):
"""
Returns selected item, otherwise it returns None.
Availability: FontLab
"""
if inFontLab:
sl = _FontLabDialogSearchList(list, message, title)
sl.run()
selected = sl.selected
if selected is None:
return None
else:
return selected
else:
_raisePlatformError('SearchList')
def TwoFields(title_1="One:", value_1="0", title_2="Two:", value_2="0", title='RoboFab'):
"""
Returns (value 1, value 2).
Availability: FontLab
"""
if inFontLab:
tf = _FontLabDialogTwoFields(title_1, value_1, title_2, value_2, title)
tf.Run()
try:
v1 = tf.v1
v2 = tf.v2
return (v1, v2)
except:
return None
else:
_raisePlatformError('TwoFields')
def TwoChecks(title_1="One", title_2="Two", value1=1, value2=1, title='RoboFab'):
"""
Returns check value:
1 if check box 1 is checked
2 if check box 2 is checked
3 if both are checked
0 if neither are checked
None if cancel is clicked.
Availability: FontLab, Macintosh
"""
tc = None
if inFontLab:
tc = _FontLabDialogTwoChecks(title_1, title_2, value1, value2, title)
tc.Run()
elif MAC:
if hasW:
tc = _MacTwoChecksW(title_1, title_2, value1, value2, title)
else:
_raisePlatformError('TwoChecks')
else:
_raisePlatformError('TwoChecks')
c1 = tc.check1
c2 = tc.check2
if c1 == 1 and c2 == 0:
return 1
elif c1 == 0 and c2 == 1:
return 2
elif c1 == 1 and c2 == 1:
return 3
elif c1 == 0 and c2 == 0:
return 0
else:
return None
def Message(message, title='RoboFab'):
"""
A simple message dialog.
Availability: FontLab, Macintosh
"""
if inFontLab:
_FontLabDialogMessage(message, title).Run()
elif MAC:
import EasyDialogs
EasyDialogs.Message(message)
else:
_raisePlatformError('Message')
def AskString(message, value='', title='RoboFab'):
"""
Returns entered string.
Availability: FontLab, Macintosh
"""
if inFontLab:
askString = _FontLabDialogAskString(message, value, title)
askString.Run()
v = askString.value
if v is None:
return None
else:
return v
elif MAC:
import EasyDialogs
askString = EasyDialogs.AskString(message)
if askString is None:
return None
if len(askString) == 0:
return None
else:
return askString
else:
_raisePlatformError('GetString')
def AskYesNoCancel(message, title='RoboFab', default=0):
"""
Returns 1 for 'Yes', 0 for 'No' and -1 for 'Cancel'.
Availability: FontLab, Macintosh
("default" argument only available on Macintosh)
"""
if inFontLab:
gync = _FontLabDialogGetYesNoCancel(message, title)
gync.Run()
v = gync.value
return v
elif MAC:
import EasyDialogs
gync = EasyDialogs.AskYesNoCancel(message, default=default)
return gync
else:
_raisePlatformError('GetYesNoCancel')
def GetFile(message=None):
"""
Select file dialog. Returns path if one is selected. Otherwise it returns None.
Availability: FontLab, Macintosh, PC
"""
path = None
if MAC:
if haveMacfs:
fss, ok = macfs.PromptGetFile(message)
if ok:
path = fss.as_pathname()
else:
from robofab.interface.mac.getFileOrFolder import GetFile
path = GetFile(message)
elif PC:
if inFontLab:
if not message:
message = ''
path = fl.GetFileName(1, message, '', '')
else:
openFlags = win32con.OFN_FILEMUSTEXIST|win32con.OFN_EXPLORER
mode_open = 1
myDialog = win32ui.CreateFileDialog(mode_open,None,None,openFlags)
myDialog.SetOFNTitle(message)
is_OK = myDialog.DoModal()
if is_OK == 1:
path = myDialog.GetPathName()
else:
_raisePlatformError('GetFile')
return path
def GetFolder(message=None):
"""
Select folder dialog. Returns path if one is selected. Otherwise it returns None.
Availability: FontLab, Macintosh, PC
"""
path = None
if MAC:
if haveMacfs:
fss, ok = macfs.GetDirectory(message)
if ok:
path = fss.as_pathname()
else:
from robofab.interface.mac.getFileOrFolder import GetFileOrFolder
# This _also_ allows the user to select _files_, but given the
# package/folder dichotomy, I think we have no other choice.
path = GetFileOrFolder(message)
elif PC:
if inFontLab:
if not message:
message = ''
path = fl.GetPathName('', message)
else:
myTuple = shell.SHBrowseForFolder(0, None, message, 64)
try:
path = shell.SHGetPathFromIDList(myTuple[0])
except:
pass
else:
_raisePlatformError('GetFile')
return path
GetDirectory = GetFolder
def PutFile(message=None, fileName=None):
"""
Save file dialog. Returns path if one is entered. Otherwise it returns None.
Availability: FontLab, Macintosh, PC
"""
path = None
if MAC:
if haveMacfs:
fss, ok = macfs.StandardPutFile(message, fileName)
if ok:
path = fss.as_pathname()
else:
import EasyDialogs
path = EasyDialogs.AskFileForSave(message, savedFileName=fileName)
elif PC:
if inFontLab:
if not message:
message = ''
if not fileName:
fileName = ''
path = fl.GetFileName(0, message, fileName, '')
else:
openFlags = win32con.OFN_OVERWRITEPROMPT|win32con.OFN_EXPLORER
mode_save = 0
myDialog = win32ui.CreateFileDialog(mode_save, None, fileName, openFlags)
myDialog.SetOFNTitle(message)
is_OK = myDialog.DoModal()
if is_OK == 1:
path = myDialog.GetPathName()
else:
_raisePlatformError('GetFile')
return path
if __name__=='__main__':
import traceback
print "dialogs hasW", hasW
print "dialogs hasDialogKit", hasDialogKit
print "dialogs MAC", MAC
print "dialogs PC", PC
print "dialogs inFontLab", inFontLab
print "dialogs hasEasyDialogs", hasEasyDialogs
def tryDialog(dialogClass, args=None):
print
print "tryDialog:", dialogClass, "with args:", args
try:
if args is not None:
apply(dialogClass, args)
else:
apply(dialogClass)
except:
traceback.print_exc(limit=0)
tryDialog(TwoChecks, ('hello', 'world', 1, 0, 'ugh'))
tryDialog(TwoFields)
tryDialog(TwoChecks, ('hello', 'world', 1, 0, 'ugh'))
tryDialog(OneList, (['a', 'b', 'c'], 'hello world'))
tryDialog(Message, ('hello world',))
tryDialog(AskString, ('hello world',))
tryDialog(AskYesNoCancel, ('hello world',))
try:
b = ProgressBar('hello', 50, 'world')
for i in range(50):
if i == 25:
b.label('ugh.')
b.tick(i)
b.close()
except:
traceback.print_exc(limit=0)
|
bsd-3-clause
|
bioinformatics-ua/catalogue
|
emif/searchengine/models.py
|
2
|
1201
|
# -*- coding: utf-8 -*-
# Copyright (C) 2014 Universidade de Aveiro, DETI/IEETA, Bioinformatics Group - http://bioinformatics.ua.pt/
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.db import models
from django import forms
class Slugs(models.Model):
slug1 = models.CharField(max_length=1256, blank=False)
# TODO: delete
description = models.TextField()
#question = models.ForeignKey(Question, help_text = u"The question that this is an answer to")
def __unicode__(self):
return self.slug1
class Nomenclature(models.Model):
name = models.CharField(max_length=256)
|
gpl-3.0
|
tylerjereddy/scipy
|
scipy/integrate/_ivp/base.py
|
12
|
9550
|
import numpy as np
def check_arguments(fun, y0, support_complex):
"""Helper function for checking arguments common to all solvers."""
y0 = np.asarray(y0)
if np.issubdtype(y0.dtype, np.complexfloating):
if not support_complex:
raise ValueError("`y0` is complex, but the chosen solver does "
"not support integration in a complex domain.")
dtype = complex
else:
dtype = float
y0 = y0.astype(dtype, copy=False)
if y0.ndim != 1:
raise ValueError("`y0` must be 1-dimensional.")
def fun_wrapped(t, y):
return np.asarray(fun(t, y), dtype=dtype)
return fun_wrapped, y0
class OdeSolver:
"""Base class for ODE solvers.
In order to implement a new solver you need to follow the guidelines:
1. A constructor must accept parameters presented in the base class
(listed below) along with any other parameters specific to a solver.
2. A constructor must accept arbitrary extraneous arguments
``**extraneous``, but warn that these arguments are irrelevant
using `common.warn_extraneous` function. Do not pass these
arguments to the base class.
3. A solver must implement a private method `_step_impl(self)` which
propagates a solver one step further. It must return tuple
``(success, message)``, where ``success`` is a boolean indicating
whether a step was successful, and ``message`` is a string
containing description of a failure if a step failed or None
otherwise.
4. A solver must implement a private method `_dense_output_impl(self)`,
which returns a `DenseOutput` object covering the last successful
step.
5. A solver must have attributes listed below in Attributes section.
Note that ``t_old`` and ``step_size`` are updated automatically.
6. Use `fun(self, t, y)` method for the system rhs evaluation, this
way the number of function evaluations (`nfev`) will be tracked
automatically.
7. For convenience, a base class provides `fun_single(self, t, y)` and
`fun_vectorized(self, t, y)` for evaluating the rhs in
non-vectorized and vectorized fashions respectively (regardless of
how `fun` from the constructor is implemented). These calls don't
increment `nfev`.
8. If a solver uses a Jacobian matrix and LU decompositions, it should
track the number of Jacobian evaluations (`njev`) and the number of
LU decompositions (`nlu`).
9. By convention, the function evaluations used to compute a finite
difference approximation of the Jacobian should not be counted in
`nfev`, thus use `fun_single(self, t, y)` or
`fun_vectorized(self, t, y)` when computing a finite difference
approximation of the Jacobian.
Parameters
----------
fun : callable
Right-hand side of the system. The calling signature is ``fun(t, y)``.
Here ``t`` is a scalar and there are two options for ndarray ``y``.
It can either have shape (n,), then ``fun`` must return array_like with
shape (n,). Or, alternatively, it can have shape (n, n_points), then
``fun`` must return array_like with shape (n, n_points) (each column
corresponds to a single column in ``y``). The choice between the two
options is determined by `vectorized` argument (see below).
t0 : float
Initial time.
y0 : array_like, shape (n,)
Initial state.
t_bound : float
Boundary time --- the integration won't continue beyond it. It also
determines the direction of the integration.
vectorized : bool
Whether `fun` is implemented in a vectorized fashion.
support_complex : bool, optional
Whether integration in a complex domain should be supported.
Generally determined by a derived solver class capabilities.
Default is False.
Attributes
----------
n : int
Number of equations.
status : string
Current status of the solver: 'running', 'finished' or 'failed'.
t_bound : float
Boundary time.
direction : float
Integration direction: +1 or -1.
t : float
Current time.
y : ndarray
Current state.
t_old : float
Previous time. None if no steps were made yet.
step_size : float
Size of the last successful step. None if no steps were made yet.
nfev : int
Number of the system's rhs evaluations.
njev : int
Number of the Jacobian evaluations.
nlu : int
Number of LU decompositions.
"""
TOO_SMALL_STEP = "Required step size is less than spacing between numbers."
def __init__(self, fun, t0, y0, t_bound, vectorized,
support_complex=False):
self.t_old = None
self.t = t0
self._fun, self.y = check_arguments(fun, y0, support_complex)
self.t_bound = t_bound
self.vectorized = vectorized
if vectorized:
def fun_single(t, y):
return self._fun(t, y[:, None]).ravel()
fun_vectorized = self._fun
else:
fun_single = self._fun
def fun_vectorized(t, y):
f = np.empty_like(y)
for i, yi in enumerate(y.T):
f[:, i] = self._fun(t, yi)
return f
def fun(t, y):
self.nfev += 1
return self.fun_single(t, y)
self.fun = fun
self.fun_single = fun_single
self.fun_vectorized = fun_vectorized
self.direction = np.sign(t_bound - t0) if t_bound != t0 else 1
self.n = self.y.size
self.status = 'running'
self.nfev = 0
self.njev = 0
self.nlu = 0
@property
def step_size(self):
if self.t_old is None:
return None
else:
return np.abs(self.t - self.t_old)
def step(self):
"""Perform one integration step.
Returns
-------
message : string or None
Report from the solver. Typically a reason for a failure if
`self.status` is 'failed' after the step was taken or None
otherwise.
"""
if self.status != 'running':
raise RuntimeError("Attempt to step on a failed or finished "
"solver.")
if self.n == 0 or self.t == self.t_bound:
# Handle corner cases of empty solver or no integration.
self.t_old = self.t
self.t = self.t_bound
message = None
self.status = 'finished'
else:
t = self.t
success, message = self._step_impl()
if not success:
self.status = 'failed'
else:
self.t_old = t
if self.direction * (self.t - self.t_bound) >= 0:
self.status = 'finished'
return message
def dense_output(self):
"""Compute a local interpolant over the last successful step.
Returns
-------
sol : `DenseOutput`
Local interpolant over the last successful step.
"""
if self.t_old is None:
raise RuntimeError("Dense output is available after a successful "
"step was made.")
if self.n == 0 or self.t == self.t_old:
# Handle corner cases of empty solver and no integration.
return ConstantDenseOutput(self.t_old, self.t, self.y)
else:
return self._dense_output_impl()
def _step_impl(self):
raise NotImplementedError
def _dense_output_impl(self):
raise NotImplementedError
class DenseOutput:
"""Base class for local interpolant over step made by an ODE solver.
It interpolates between `t_min` and `t_max` (see Attributes below).
Evaluation outside this interval is not forbidden, but the accuracy is not
guaranteed.
Attributes
----------
t_min, t_max : float
Time range of the interpolation.
"""
def __init__(self, t_old, t):
self.t_old = t_old
self.t = t
self.t_min = min(t, t_old)
self.t_max = max(t, t_old)
def __call__(self, t):
"""Evaluate the interpolant.
Parameters
----------
t : float or array_like with shape (n_points,)
Points to evaluate the solution at.
Returns
-------
y : ndarray, shape (n,) or (n, n_points)
Computed values. Shape depends on whether `t` was a scalar or a
1-D array.
"""
t = np.asarray(t)
if t.ndim > 1:
raise ValueError("`t` must be a float or a 1-D array.")
return self._call_impl(t)
def _call_impl(self, t):
raise NotImplementedError
class ConstantDenseOutput(DenseOutput):
"""Constant value interpolator.
This class used for degenerate integration cases: equal integration limits
or a system with 0 equations.
"""
def __init__(self, t_old, t, value):
super().__init__(t_old, t)
self.value = value
def _call_impl(self, t):
if t.ndim == 0:
return self.value
else:
ret = np.empty((self.value.shape[0], t.shape[0]))
ret[:] = self.value[:, None]
return ret
|
bsd-3-clause
|
algiopensource/server-tools
|
users_ldap_push/models/res_company_ldap.py
|
16
|
2645
|
# -*- coding: utf-8 -*-
##############################################################################
#
# This module copyright (C) 2015 Therp BV (<http://therp.nl>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import _, models, fields, api, exceptions
class ResCompanyLdap(models.Model):
_inherit = 'res.company.ldap'
@api.model
def _create_ldap_entry_field_mappings_default(self):
return [
(0, 0, {
'field_id':
self.env.ref('base.field_res_users_login').id,
'attribute': 'userid',
'use_for_dn': True,
}),
]
create_ldap_entry = fields.Boolean('Create ldap entry', default=True)
create_ldap_entry_base = fields.Char(
'Create ldap entry in subtree',
help='Leave empty to use your LDAP base')
create_ldap_entry_objectclass = fields.Char(
'Object class', default='account',
help='Separate object classes by comma if you need more than one')
create_ldap_entry_field_mappings = fields.One2many(
'res.company.ldap.field_mapping', 'ldap_id', string='Field mappings',
default=_create_ldap_entry_field_mappings_default)
@api.model
def get_or_create_user(self, conf, login, ldap_entry):
user_id = super(ResCompanyLdap, self).get_or_create_user(
conf, login, ldap_entry)
if user_id:
self.env['res.users'].browse(user_id).write({
'ldap_entry_dn': ldap_entry[0],
})
return user_id
@api.constrains('create_ldap_entry_field_mappings')
def _constrain_create_ldap_entry_field_mappings(self):
for this in self:
if len(this.create_ldap_entry_field_mappings
.filtered('use_for_dn')) != 1:
raise exceptions.ValidationError(
_('You need to set exactly one mapping as DN'))
|
agpl-3.0
|
Rosy-S/twilio-python
|
twilio/rest/resources/notifications.py
|
51
|
1258
|
from .util import normalize_dates
from . import InstanceResource, ListResource
class Notification(InstanceResource):
def delete(self):
"""
Delete this notification
"""
return self.delete_instance()
class Notifications(ListResource):
name = "Notifications"
instance = Notification
@normalize_dates
def list(self, before=None, after=None, **kwargs):
"""
Returns a page of :class:`Notification` resources as a list.
For paging information see :class:`ListResource`.
**NOTE**: Due to the potentially voluminous amount of data in a
notification, the full HTTP request and response data is only returned
in the Notification instance resource representation.
:param date after: Only list notifications logged after this datetime
:param date before: Only list notifications logger before this datetime
:param log_level: If 1, only shows errors. If 0, only show warnings
"""
kwargs["MessageDate<"] = before
kwargs["MessageDate>"] = after
return self.get_instances(kwargs)
def delete(self, sid):
"""
Delete a given Notificiation
"""
return self.delete_instance(sid)
|
mit
|
purplecode/node-gyp
|
gyp/pylib/gyp/generator/xcode.py
|
1363
|
58344
|
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import filecmp
import gyp.common
import gyp.xcodeproj_file
import gyp.xcode_ninja
import errno
import os
import sys
import posixpath
import re
import shutil
import subprocess
import tempfile
# Project files generated by this module will use _intermediate_var as a
# custom Xcode setting whose value is a DerivedSources-like directory that's
# project-specific and configuration-specific. The normal choice,
# DERIVED_FILE_DIR, is target-specific, which is thought to be too restrictive
# as it is likely that multiple targets within a single project file will want
# to access the same set of generated files. The other option,
# PROJECT_DERIVED_FILE_DIR, is unsuitable because while it is project-specific,
# it is not configuration-specific. INTERMEDIATE_DIR is defined as
# $(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION).
_intermediate_var = 'INTERMEDIATE_DIR'
# SHARED_INTERMEDIATE_DIR is the same, except that it is shared among all
# targets that share the same BUILT_PRODUCTS_DIR.
_shared_intermediate_var = 'SHARED_INTERMEDIATE_DIR'
_library_search_paths_var = 'LIBRARY_SEARCH_PATHS'
generator_default_variables = {
'EXECUTABLE_PREFIX': '',
'EXECUTABLE_SUFFIX': '',
'STATIC_LIB_PREFIX': 'lib',
'SHARED_LIB_PREFIX': 'lib',
'STATIC_LIB_SUFFIX': '.a',
'SHARED_LIB_SUFFIX': '.dylib',
# INTERMEDIATE_DIR is a place for targets to build up intermediate products.
# It is specific to each build environment. It is only guaranteed to exist
# and be constant within the context of a project, corresponding to a single
# input file. Some build environments may allow their intermediate directory
# to be shared on a wider scale, but this is not guaranteed.
'INTERMEDIATE_DIR': '$(%s)' % _intermediate_var,
'OS': 'mac',
'PRODUCT_DIR': '$(BUILT_PRODUCTS_DIR)',
'LIB_DIR': '$(BUILT_PRODUCTS_DIR)',
'RULE_INPUT_ROOT': '$(INPUT_FILE_BASE)',
'RULE_INPUT_EXT': '$(INPUT_FILE_SUFFIX)',
'RULE_INPUT_NAME': '$(INPUT_FILE_NAME)',
'RULE_INPUT_PATH': '$(INPUT_FILE_PATH)',
'RULE_INPUT_DIRNAME': '$(INPUT_FILE_DIRNAME)',
'SHARED_INTERMEDIATE_DIR': '$(%s)' % _shared_intermediate_var,
'CONFIGURATION_NAME': '$(CONFIGURATION)',
}
# The Xcode-specific sections that hold paths.
generator_additional_path_sections = [
'mac_bundle_resources',
'mac_framework_headers',
'mac_framework_private_headers',
# 'mac_framework_dirs', input already handles _dirs endings.
]
# The Xcode-specific keys that exist on targets and aren't moved down to
# configurations.
generator_additional_non_configuration_keys = [
'ios_app_extension',
'ios_watch_app',
'ios_watchkit_extension',
'mac_bundle',
'mac_bundle_resources',
'mac_framework_headers',
'mac_framework_private_headers',
'mac_xctest_bundle',
'xcode_create_dependents_test_runner',
]
# We want to let any rules apply to files that are resources also.
generator_extra_sources_for_rules = [
'mac_bundle_resources',
'mac_framework_headers',
'mac_framework_private_headers',
]
generator_filelist_paths = None
# Xcode's standard set of library directories, which don't need to be duplicated
# in LIBRARY_SEARCH_PATHS. This list is not exhaustive, but that's okay.
xcode_standard_library_dirs = frozenset([
'$(SDKROOT)/usr/lib',
'$(SDKROOT)/usr/local/lib',
])
def CreateXCConfigurationList(configuration_names):
xccl = gyp.xcodeproj_file.XCConfigurationList({'buildConfigurations': []})
if len(configuration_names) == 0:
configuration_names = ['Default']
for configuration_name in configuration_names:
xcbc = gyp.xcodeproj_file.XCBuildConfiguration({
'name': configuration_name})
xccl.AppendProperty('buildConfigurations', xcbc)
xccl.SetProperty('defaultConfigurationName', configuration_names[0])
return xccl
class XcodeProject(object):
def __init__(self, gyp_path, path, build_file_dict):
self.gyp_path = gyp_path
self.path = path
self.project = gyp.xcodeproj_file.PBXProject(path=path)
projectDirPath = gyp.common.RelativePath(
os.path.dirname(os.path.abspath(self.gyp_path)),
os.path.dirname(path) or '.')
self.project.SetProperty('projectDirPath', projectDirPath)
self.project_file = \
gyp.xcodeproj_file.XCProjectFile({'rootObject': self.project})
self.build_file_dict = build_file_dict
# TODO(mark): add destructor that cleans up self.path if created_dir is
# True and things didn't complete successfully. Or do something even
# better with "try"?
self.created_dir = False
try:
os.makedirs(self.path)
self.created_dir = True
except OSError, e:
if e.errno != errno.EEXIST:
raise
def Finalize1(self, xcode_targets, serialize_all_tests):
# Collect a list of all of the build configuration names used by the
# various targets in the file. It is very heavily advised to keep each
# target in an entire project (even across multiple project files) using
# the same set of configuration names.
configurations = []
for xct in self.project.GetProperty('targets'):
xccl = xct.GetProperty('buildConfigurationList')
xcbcs = xccl.GetProperty('buildConfigurations')
for xcbc in xcbcs:
name = xcbc.GetProperty('name')
if name not in configurations:
configurations.append(name)
# Replace the XCConfigurationList attached to the PBXProject object with
# a new one specifying all of the configuration names used by the various
# targets.
try:
xccl = CreateXCConfigurationList(configurations)
self.project.SetProperty('buildConfigurationList', xccl)
except:
sys.stderr.write("Problem with gyp file %s\n" % self.gyp_path)
raise
# The need for this setting is explained above where _intermediate_var is
# defined. The comments below about wanting to avoid project-wide build
# settings apply here too, but this needs to be set on a project-wide basis
# so that files relative to the _intermediate_var setting can be displayed
# properly in the Xcode UI.
#
# Note that for configuration-relative files such as anything relative to
# _intermediate_var, for the purposes of UI tree view display, Xcode will
# only resolve the configuration name once, when the project file is
# opened. If the active build configuration is changed, the project file
# must be closed and reopened if it is desired for the tree view to update.
# This is filed as Apple radar 6588391.
xccl.SetBuildSetting(_intermediate_var,
'$(PROJECT_DERIVED_FILE_DIR)/$(CONFIGURATION)')
xccl.SetBuildSetting(_shared_intermediate_var,
'$(SYMROOT)/DerivedSources/$(CONFIGURATION)')
# Set user-specified project-wide build settings and config files. This
# is intended to be used very sparingly. Really, almost everything should
# go into target-specific build settings sections. The project-wide
# settings are only intended to be used in cases where Xcode attempts to
# resolve variable references in a project context as opposed to a target
# context, such as when resolving sourceTree references while building up
# the tree tree view for UI display.
# Any values set globally are applied to all configurations, then any
# per-configuration values are applied.
for xck, xcv in self.build_file_dict.get('xcode_settings', {}).iteritems():
xccl.SetBuildSetting(xck, xcv)
if 'xcode_config_file' in self.build_file_dict:
config_ref = self.project.AddOrGetFileInRootGroup(
self.build_file_dict['xcode_config_file'])
xccl.SetBaseConfiguration(config_ref)
build_file_configurations = self.build_file_dict.get('configurations', {})
if build_file_configurations:
for config_name in configurations:
build_file_configuration_named = \
build_file_configurations.get(config_name, {})
if build_file_configuration_named:
xcc = xccl.ConfigurationNamed(config_name)
for xck, xcv in build_file_configuration_named.get('xcode_settings',
{}).iteritems():
xcc.SetBuildSetting(xck, xcv)
if 'xcode_config_file' in build_file_configuration_named:
config_ref = self.project.AddOrGetFileInRootGroup(
build_file_configurations[config_name]['xcode_config_file'])
xcc.SetBaseConfiguration(config_ref)
# Sort the targets based on how they appeared in the input.
# TODO(mark): Like a lot of other things here, this assumes internal
# knowledge of PBXProject - in this case, of its "targets" property.
# ordinary_targets are ordinary targets that are already in the project
# file. run_test_targets are the targets that run unittests and should be
# used for the Run All Tests target. support_targets are the action/rule
# targets used by GYP file targets, just kept for the assert check.
ordinary_targets = []
run_test_targets = []
support_targets = []
# targets is full list of targets in the project.
targets = []
# does the it define it's own "all"?
has_custom_all = False
# targets_for_all is the list of ordinary_targets that should be listed
# in this project's "All" target. It includes each non_runtest_target
# that does not have suppress_wildcard set.
targets_for_all = []
for target in self.build_file_dict['targets']:
target_name = target['target_name']
toolset = target['toolset']
qualified_target = gyp.common.QualifiedTarget(self.gyp_path, target_name,
toolset)
xcode_target = xcode_targets[qualified_target]
# Make sure that the target being added to the sorted list is already in
# the unsorted list.
assert xcode_target in self.project._properties['targets']
targets.append(xcode_target)
ordinary_targets.append(xcode_target)
if xcode_target.support_target:
support_targets.append(xcode_target.support_target)
targets.append(xcode_target.support_target)
if not int(target.get('suppress_wildcard', False)):
targets_for_all.append(xcode_target)
if target_name.lower() == 'all':
has_custom_all = True;
# If this target has a 'run_as' attribute, add its target to the
# targets, and add it to the test targets.
if target.get('run_as'):
# Make a target to run something. It should have one
# dependency, the parent xcode target.
xccl = CreateXCConfigurationList(configurations)
run_target = gyp.xcodeproj_file.PBXAggregateTarget({
'name': 'Run ' + target_name,
'productName': xcode_target.GetProperty('productName'),
'buildConfigurationList': xccl,
},
parent=self.project)
run_target.AddDependency(xcode_target)
command = target['run_as']
script = ''
if command.get('working_directory'):
script = script + 'cd "%s"\n' % \
gyp.xcodeproj_file.ConvertVariablesToShellSyntax(
command.get('working_directory'))
if command.get('environment'):
script = script + "\n".join(
['export %s="%s"' %
(key, gyp.xcodeproj_file.ConvertVariablesToShellSyntax(val))
for (key, val) in command.get('environment').iteritems()]) + "\n"
# Some test end up using sockets, files on disk, etc. and can get
# confused if more then one test runs at a time. The generator
# flag 'xcode_serialize_all_test_runs' controls the forcing of all
# tests serially. It defaults to True. To get serial runs this
# little bit of python does the same as the linux flock utility to
# make sure only one runs at a time.
command_prefix = ''
if serialize_all_tests:
command_prefix = \
"""python -c "import fcntl, subprocess, sys
file = open('$TMPDIR/GYP_serialize_test_runs', 'a')
fcntl.flock(file.fileno(), fcntl.LOCK_EX)
sys.exit(subprocess.call(sys.argv[1:]))" """
# If we were unable to exec for some reason, we want to exit
# with an error, and fixup variable references to be shell
# syntax instead of xcode syntax.
script = script + 'exec ' + command_prefix + '%s\nexit 1\n' % \
gyp.xcodeproj_file.ConvertVariablesToShellSyntax(
gyp.common.EncodePOSIXShellList(command.get('action')))
ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
'shellScript': script,
'showEnvVarsInLog': 0,
})
run_target.AppendProperty('buildPhases', ssbp)
# Add the run target to the project file.
targets.append(run_target)
run_test_targets.append(run_target)
xcode_target.test_runner = run_target
# Make sure that the list of targets being replaced is the same length as
# the one replacing it, but allow for the added test runner targets.
assert len(self.project._properties['targets']) == \
len(ordinary_targets) + len(support_targets)
self.project._properties['targets'] = targets
# Get rid of unnecessary levels of depth in groups like the Source group.
self.project.RootGroupsTakeOverOnlyChildren(True)
# Sort the groups nicely. Do this after sorting the targets, because the
# Products group is sorted based on the order of the targets.
self.project.SortGroups()
# Create an "All" target if there's more than one target in this project
# file and the project didn't define its own "All" target. Put a generated
# "All" target first so that people opening up the project for the first
# time will build everything by default.
if len(targets_for_all) > 1 and not has_custom_all:
xccl = CreateXCConfigurationList(configurations)
all_target = gyp.xcodeproj_file.PBXAggregateTarget(
{
'buildConfigurationList': xccl,
'name': 'All',
},
parent=self.project)
for target in targets_for_all:
all_target.AddDependency(target)
# TODO(mark): This is evil because it relies on internal knowledge of
# PBXProject._properties. It's important to get the "All" target first,
# though.
self.project._properties['targets'].insert(0, all_target)
# The same, but for run_test_targets.
if len(run_test_targets) > 1:
xccl = CreateXCConfigurationList(configurations)
run_all_tests_target = gyp.xcodeproj_file.PBXAggregateTarget(
{
'buildConfigurationList': xccl,
'name': 'Run All Tests',
},
parent=self.project)
for run_test_target in run_test_targets:
run_all_tests_target.AddDependency(run_test_target)
# Insert after the "All" target, which must exist if there is more than
# one run_test_target.
self.project._properties['targets'].insert(1, run_all_tests_target)
def Finalize2(self, xcode_targets, xcode_target_to_target_dict):
# Finalize2 needs to happen in a separate step because the process of
# updating references to other projects depends on the ordering of targets
# within remote project files. Finalize1 is responsible for sorting duty,
# and once all project files are sorted, Finalize2 can come in and update
# these references.
# To support making a "test runner" target that will run all the tests
# that are direct dependents of any given target, we look for
# xcode_create_dependents_test_runner being set on an Aggregate target,
# and generate a second target that will run the tests runners found under
# the marked target.
for bf_tgt in self.build_file_dict['targets']:
if int(bf_tgt.get('xcode_create_dependents_test_runner', 0)):
tgt_name = bf_tgt['target_name']
toolset = bf_tgt['toolset']
qualified_target = gyp.common.QualifiedTarget(self.gyp_path,
tgt_name, toolset)
xcode_target = xcode_targets[qualified_target]
if isinstance(xcode_target, gyp.xcodeproj_file.PBXAggregateTarget):
# Collect all the run test targets.
all_run_tests = []
pbxtds = xcode_target.GetProperty('dependencies')
for pbxtd in pbxtds:
pbxcip = pbxtd.GetProperty('targetProxy')
dependency_xct = pbxcip.GetProperty('remoteGlobalIDString')
if hasattr(dependency_xct, 'test_runner'):
all_run_tests.append(dependency_xct.test_runner)
# Directly depend on all the runners as they depend on the target
# that builds them.
if len(all_run_tests) > 0:
run_all_target = gyp.xcodeproj_file.PBXAggregateTarget({
'name': 'Run %s Tests' % tgt_name,
'productName': tgt_name,
},
parent=self.project)
for run_test_target in all_run_tests:
run_all_target.AddDependency(run_test_target)
# Insert the test runner after the related target.
idx = self.project._properties['targets'].index(xcode_target)
self.project._properties['targets'].insert(idx + 1, run_all_target)
# Update all references to other projects, to make sure that the lists of
# remote products are complete. Otherwise, Xcode will fill them in when
# it opens the project file, which will result in unnecessary diffs.
# TODO(mark): This is evil because it relies on internal knowledge of
# PBXProject._other_pbxprojects.
for other_pbxproject in self.project._other_pbxprojects.keys():
self.project.AddOrGetProjectReference(other_pbxproject)
self.project.SortRemoteProductReferences()
# Give everything an ID.
self.project_file.ComputeIDs()
# Make sure that no two objects in the project file have the same ID. If
# multiple objects wind up with the same ID, upon loading the file, Xcode
# will only recognize one object (the last one in the file?) and the
# results are unpredictable.
self.project_file.EnsureNoIDCollisions()
def Write(self):
# Write the project file to a temporary location first. Xcode watches for
# changes to the project file and presents a UI sheet offering to reload
# the project when it does change. However, in some cases, especially when
# multiple projects are open or when Xcode is busy, things don't work so
# seamlessly. Sometimes, Xcode is able to detect that a project file has
# changed but can't unload it because something else is referencing it.
# To mitigate this problem, and to avoid even having Xcode present the UI
# sheet when an open project is rewritten for inconsequential changes, the
# project file is written to a temporary file in the xcodeproj directory
# first. The new temporary file is then compared to the existing project
# file, if any. If they differ, the new file replaces the old; otherwise,
# the new project file is simply deleted. Xcode properly detects a file
# being renamed over an open project file as a change and so it remains
# able to present the "project file changed" sheet under this system.
# Writing to a temporary file first also avoids the possible problem of
# Xcode rereading an incomplete project file.
(output_fd, new_pbxproj_path) = \
tempfile.mkstemp(suffix='.tmp', prefix='project.pbxproj.gyp.',
dir=self.path)
try:
output_file = os.fdopen(output_fd, 'wb')
self.project_file.Print(output_file)
output_file.close()
pbxproj_path = os.path.join(self.path, 'project.pbxproj')
same = False
try:
same = filecmp.cmp(pbxproj_path, new_pbxproj_path, False)
except OSError, e:
if e.errno != errno.ENOENT:
raise
if same:
# The new file is identical to the old one, just get rid of the new
# one.
os.unlink(new_pbxproj_path)
else:
# The new file is different from the old one, or there is no old one.
# Rename the new file to the permanent name.
#
# tempfile.mkstemp uses an overly restrictive mode, resulting in a
# file that can only be read by the owner, regardless of the umask.
# There's no reason to not respect the umask here, which means that
# an extra hoop is required to fetch it and reset the new file's mode.
#
# No way to get the umask without setting a new one? Set a safe one
# and then set it back to the old value.
umask = os.umask(077)
os.umask(umask)
os.chmod(new_pbxproj_path, 0666 & ~umask)
os.rename(new_pbxproj_path, pbxproj_path)
except Exception:
# Don't leave turds behind. In fact, if this code was responsible for
# creating the xcodeproj directory, get rid of that too.
os.unlink(new_pbxproj_path)
if self.created_dir:
shutil.rmtree(self.path, True)
raise
def AddSourceToTarget(source, type, pbxp, xct):
# TODO(mark): Perhaps source_extensions and library_extensions can be made a
# little bit fancier.
source_extensions = ['c', 'cc', 'cpp', 'cxx', 'm', 'mm', 's', 'swift']
# .o is conceptually more of a "source" than a "library," but Xcode thinks
# of "sources" as things to compile and "libraries" (or "frameworks") as
# things to link with. Adding an object file to an Xcode target's frameworks
# phase works properly.
library_extensions = ['a', 'dylib', 'framework', 'o']
basename = posixpath.basename(source)
(root, ext) = posixpath.splitext(basename)
if ext:
ext = ext[1:].lower()
if ext in source_extensions and type != 'none':
xct.SourcesPhase().AddFile(source)
elif ext in library_extensions and type != 'none':
xct.FrameworksPhase().AddFile(source)
else:
# Files that aren't added to a sources or frameworks build phase can still
# go into the project file, just not as part of a build phase.
pbxp.AddOrGetFileInRootGroup(source)
def AddResourceToTarget(resource, pbxp, xct):
# TODO(mark): Combine with AddSourceToTarget above? Or just inline this call
# where it's used.
xct.ResourcesPhase().AddFile(resource)
def AddHeaderToTarget(header, pbxp, xct, is_public):
# TODO(mark): Combine with AddSourceToTarget above? Or just inline this call
# where it's used.
settings = '{ATTRIBUTES = (%s, ); }' % ('Private', 'Public')[is_public]
xct.HeadersPhase().AddFile(header, settings)
_xcode_variable_re = re.compile(r'(\$\((.*?)\))')
def ExpandXcodeVariables(string, expansions):
"""Expands Xcode-style $(VARIABLES) in string per the expansions dict.
In some rare cases, it is appropriate to expand Xcode variables when a
project file is generated. For any substring $(VAR) in string, if VAR is a
key in the expansions dict, $(VAR) will be replaced with expansions[VAR].
Any $(VAR) substring in string for which VAR is not a key in the expansions
dict will remain in the returned string.
"""
matches = _xcode_variable_re.findall(string)
if matches == None:
return string
matches.reverse()
for match in matches:
(to_replace, variable) = match
if not variable in expansions:
continue
replacement = expansions[variable]
string = re.sub(re.escape(to_replace), replacement, string)
return string
_xcode_define_re = re.compile(r'([\\\"\' ])')
def EscapeXcodeDefine(s):
"""We must escape the defines that we give to XCode so that it knows not to
split on spaces and to respect backslash and quote literals. However, we
must not quote the define, or Xcode will incorrectly intepret variables
especially $(inherited)."""
return re.sub(_xcode_define_re, r'\\\1', s)
def PerformBuild(data, configurations, params):
options = params['options']
for build_file, build_file_dict in data.iteritems():
(build_file_root, build_file_ext) = os.path.splitext(build_file)
if build_file_ext != '.gyp':
continue
xcodeproj_path = build_file_root + options.suffix + '.xcodeproj'
if options.generator_output:
xcodeproj_path = os.path.join(options.generator_output, xcodeproj_path)
for config in configurations:
arguments = ['xcodebuild', '-project', xcodeproj_path]
arguments += ['-configuration', config]
print "Building [%s]: %s" % (config, arguments)
subprocess.check_call(arguments)
def CalculateGeneratorInputInfo(params):
toplevel = params['options'].toplevel_dir
if params.get('flavor') == 'ninja':
generator_dir = os.path.relpath(params['options'].generator_output or '.')
output_dir = params.get('generator_flags', {}).get('output_dir', 'out')
output_dir = os.path.normpath(os.path.join(generator_dir, output_dir))
qualified_out_dir = os.path.normpath(os.path.join(
toplevel, output_dir, 'gypfiles-xcode-ninja'))
else:
output_dir = os.path.normpath(os.path.join(toplevel, 'xcodebuild'))
qualified_out_dir = os.path.normpath(os.path.join(
toplevel, output_dir, 'gypfiles'))
global generator_filelist_paths
generator_filelist_paths = {
'toplevel': toplevel,
'qualified_out_dir': qualified_out_dir,
}
def GenerateOutput(target_list, target_dicts, data, params):
# Optionally configure each spec to use ninja as the external builder.
ninja_wrapper = params.get('flavor') == 'ninja'
if ninja_wrapper:
(target_list, target_dicts, data) = \
gyp.xcode_ninja.CreateWrapper(target_list, target_dicts, data, params)
options = params['options']
generator_flags = params.get('generator_flags', {})
parallel_builds = generator_flags.get('xcode_parallel_builds', True)
serialize_all_tests = \
generator_flags.get('xcode_serialize_all_test_runs', True)
upgrade_check_project_version = \
generator_flags.get('xcode_upgrade_check_project_version', None)
# Format upgrade_check_project_version with leading zeros as needed.
if upgrade_check_project_version:
upgrade_check_project_version = str(upgrade_check_project_version)
while len(upgrade_check_project_version) < 4:
upgrade_check_project_version = '0' + upgrade_check_project_version
skip_excluded_files = \
not generator_flags.get('xcode_list_excluded_files', True)
xcode_projects = {}
for build_file, build_file_dict in data.iteritems():
(build_file_root, build_file_ext) = os.path.splitext(build_file)
if build_file_ext != '.gyp':
continue
xcodeproj_path = build_file_root + options.suffix + '.xcodeproj'
if options.generator_output:
xcodeproj_path = os.path.join(options.generator_output, xcodeproj_path)
xcp = XcodeProject(build_file, xcodeproj_path, build_file_dict)
xcode_projects[build_file] = xcp
pbxp = xcp.project
# Set project-level attributes from multiple options
project_attributes = {};
if parallel_builds:
project_attributes['BuildIndependentTargetsInParallel'] = 'YES'
if upgrade_check_project_version:
project_attributes['LastUpgradeCheck'] = upgrade_check_project_version
project_attributes['LastTestingUpgradeCheck'] = \
upgrade_check_project_version
project_attributes['LastSwiftUpdateCheck'] = \
upgrade_check_project_version
pbxp.SetProperty('attributes', project_attributes)
# Add gyp/gypi files to project
if not generator_flags.get('standalone'):
main_group = pbxp.GetProperty('mainGroup')
build_group = gyp.xcodeproj_file.PBXGroup({'name': 'Build'})
main_group.AppendChild(build_group)
for included_file in build_file_dict['included_files']:
build_group.AddOrGetFileByPath(included_file, False)
xcode_targets = {}
xcode_target_to_target_dict = {}
for qualified_target in target_list:
[build_file, target_name, toolset] = \
gyp.common.ParseQualifiedTarget(qualified_target)
spec = target_dicts[qualified_target]
if spec['toolset'] != 'target':
raise Exception(
'Multiple toolsets not supported in xcode build (target %s)' %
qualified_target)
configuration_names = [spec['default_configuration']]
for configuration_name in sorted(spec['configurations'].keys()):
if configuration_name not in configuration_names:
configuration_names.append(configuration_name)
xcp = xcode_projects[build_file]
pbxp = xcp.project
# Set up the configurations for the target according to the list of names
# supplied.
xccl = CreateXCConfigurationList(configuration_names)
# Create an XCTarget subclass object for the target. The type with
# "+bundle" appended will be used if the target has "mac_bundle" set.
# loadable_modules not in a mac_bundle are mapped to
# com.googlecode.gyp.xcode.bundle, a pseudo-type that xcode.py interprets
# to create a single-file mh_bundle.
_types = {
'executable': 'com.apple.product-type.tool',
'loadable_module': 'com.googlecode.gyp.xcode.bundle',
'shared_library': 'com.apple.product-type.library.dynamic',
'static_library': 'com.apple.product-type.library.static',
'mac_kernel_extension': 'com.apple.product-type.kernel-extension',
'executable+bundle': 'com.apple.product-type.application',
'loadable_module+bundle': 'com.apple.product-type.bundle',
'loadable_module+xctest': 'com.apple.product-type.bundle.unit-test',
'shared_library+bundle': 'com.apple.product-type.framework',
'executable+extension+bundle': 'com.apple.product-type.app-extension',
'executable+watch+extension+bundle':
'com.apple.product-type.watchkit-extension',
'executable+watch+bundle':
'com.apple.product-type.application.watchapp',
'mac_kernel_extension+bundle': 'com.apple.product-type.kernel-extension',
}
target_properties = {
'buildConfigurationList': xccl,
'name': target_name,
}
type = spec['type']
is_xctest = int(spec.get('mac_xctest_bundle', 0))
is_bundle = int(spec.get('mac_bundle', 0)) or is_xctest
is_app_extension = int(spec.get('ios_app_extension', 0))
is_watchkit_extension = int(spec.get('ios_watchkit_extension', 0))
is_watch_app = int(spec.get('ios_watch_app', 0))
if type != 'none':
type_bundle_key = type
if is_xctest:
type_bundle_key += '+xctest'
assert type == 'loadable_module', (
'mac_xctest_bundle targets must have type loadable_module '
'(target %s)' % target_name)
elif is_app_extension:
assert is_bundle, ('ios_app_extension flag requires mac_bundle '
'(target %s)' % target_name)
type_bundle_key += '+extension+bundle'
elif is_watchkit_extension:
assert is_bundle, ('ios_watchkit_extension flag requires mac_bundle '
'(target %s)' % target_name)
type_bundle_key += '+watch+extension+bundle'
elif is_watch_app:
assert is_bundle, ('ios_watch_app flag requires mac_bundle '
'(target %s)' % target_name)
type_bundle_key += '+watch+bundle'
elif is_bundle:
type_bundle_key += '+bundle'
xctarget_type = gyp.xcodeproj_file.PBXNativeTarget
try:
target_properties['productType'] = _types[type_bundle_key]
except KeyError, e:
gyp.common.ExceptionAppend(e, "-- unknown product type while "
"writing target %s" % target_name)
raise
else:
xctarget_type = gyp.xcodeproj_file.PBXAggregateTarget
assert not is_bundle, (
'mac_bundle targets cannot have type none (target "%s")' %
target_name)
assert not is_xctest, (
'mac_xctest_bundle targets cannot have type none (target "%s")' %
target_name)
target_product_name = spec.get('product_name')
if target_product_name is not None:
target_properties['productName'] = target_product_name
xct = xctarget_type(target_properties, parent=pbxp,
force_outdir=spec.get('product_dir'),
force_prefix=spec.get('product_prefix'),
force_extension=spec.get('product_extension'))
pbxp.AppendProperty('targets', xct)
xcode_targets[qualified_target] = xct
xcode_target_to_target_dict[xct] = spec
spec_actions = spec.get('actions', [])
spec_rules = spec.get('rules', [])
# Xcode has some "issues" with checking dependencies for the "Compile
# sources" step with any source files/headers generated by actions/rules.
# To work around this, if a target is building anything directly (not
# type "none"), then a second target is used to run the GYP actions/rules
# and is made a dependency of this target. This way the work is done
# before the dependency checks for what should be recompiled.
support_xct = None
# The Xcode "issues" don't affect xcode-ninja builds, since the dependency
# logic all happens in ninja. Don't bother creating the extra targets in
# that case.
if type != 'none' and (spec_actions or spec_rules) and not ninja_wrapper:
support_xccl = CreateXCConfigurationList(configuration_names);
support_target_suffix = generator_flags.get(
'support_target_suffix', ' Support')
support_target_properties = {
'buildConfigurationList': support_xccl,
'name': target_name + support_target_suffix,
}
if target_product_name:
support_target_properties['productName'] = \
target_product_name + ' Support'
support_xct = \
gyp.xcodeproj_file.PBXAggregateTarget(support_target_properties,
parent=pbxp)
pbxp.AppendProperty('targets', support_xct)
xct.AddDependency(support_xct)
# Hang the support target off the main target so it can be tested/found
# by the generator during Finalize.
xct.support_target = support_xct
prebuild_index = 0
# Add custom shell script phases for "actions" sections.
for action in spec_actions:
# There's no need to write anything into the script to ensure that the
# output directories already exist, because Xcode will look at the
# declared outputs and automatically ensure that they exist for us.
# Do we have a message to print when this action runs?
message = action.get('message')
if message:
message = 'echo note: ' + gyp.common.EncodePOSIXShellArgument(message)
else:
message = ''
# Turn the list into a string that can be passed to a shell.
action_string = gyp.common.EncodePOSIXShellList(action['action'])
# Convert Xcode-type variable references to sh-compatible environment
# variable references.
message_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax(message)
action_string_sh = gyp.xcodeproj_file.ConvertVariablesToShellSyntax(
action_string)
script = ''
# Include the optional message
if message_sh:
script += message_sh + '\n'
# Be sure the script runs in exec, and that if exec fails, the script
# exits signalling an error.
script += 'exec ' + action_string_sh + '\nexit 1\n'
ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
'inputPaths': action['inputs'],
'name': 'Action "' + action['action_name'] + '"',
'outputPaths': action['outputs'],
'shellScript': script,
'showEnvVarsInLog': 0,
})
if support_xct:
support_xct.AppendProperty('buildPhases', ssbp)
else:
# TODO(mark): this assumes too much knowledge of the internals of
# xcodeproj_file; some of these smarts should move into xcodeproj_file
# itself.
xct._properties['buildPhases'].insert(prebuild_index, ssbp)
prebuild_index = prebuild_index + 1
# TODO(mark): Should verify that at most one of these is specified.
if int(action.get('process_outputs_as_sources', False)):
for output in action['outputs']:
AddSourceToTarget(output, type, pbxp, xct)
if int(action.get('process_outputs_as_mac_bundle_resources', False)):
for output in action['outputs']:
AddResourceToTarget(output, pbxp, xct)
# tgt_mac_bundle_resources holds the list of bundle resources so
# the rule processing can check against it.
if is_bundle:
tgt_mac_bundle_resources = spec.get('mac_bundle_resources', [])
else:
tgt_mac_bundle_resources = []
# Add custom shell script phases driving "make" for "rules" sections.
#
# Xcode's built-in rule support is almost powerful enough to use directly,
# but there are a few significant deficiencies that render them unusable.
# There are workarounds for some of its inadequacies, but in aggregate,
# the workarounds added complexity to the generator, and some workarounds
# actually require input files to be crafted more carefully than I'd like.
# Consequently, until Xcode rules are made more capable, "rules" input
# sections will be handled in Xcode output by shell script build phases
# performed prior to the compilation phase.
#
# The following problems with Xcode rules were found. The numbers are
# Apple radar IDs. I hope that these shortcomings are addressed, I really
# liked having the rules handled directly in Xcode during the period that
# I was prototyping this.
#
# 6588600 Xcode compiles custom script rule outputs too soon, compilation
# fails. This occurs when rule outputs from distinct inputs are
# interdependent. The only workaround is to put rules and their
# inputs in a separate target from the one that compiles the rule
# outputs. This requires input file cooperation and it means that
# process_outputs_as_sources is unusable.
# 6584932 Need to declare that custom rule outputs should be excluded from
# compilation. A possible workaround is to lie to Xcode about a
# rule's output, giving it a dummy file it doesn't know how to
# compile. The rule action script would need to touch the dummy.
# 6584839 I need a way to declare additional inputs to a custom rule.
# A possible workaround is a shell script phase prior to
# compilation that touches a rule's primary input files if any
# would-be additional inputs are newer than the output. Modifying
# the source tree - even just modification times - feels dirty.
# 6564240 Xcode "custom script" build rules always dump all environment
# variables. This is a low-prioroty problem and is not a
# show-stopper.
rules_by_ext = {}
for rule in spec_rules:
rules_by_ext[rule['extension']] = rule
# First, some definitions:
#
# A "rule source" is a file that was listed in a target's "sources"
# list and will have a rule applied to it on the basis of matching the
# rule's "extensions" attribute. Rule sources are direct inputs to
# rules.
#
# Rule definitions may specify additional inputs in their "inputs"
# attribute. These additional inputs are used for dependency tracking
# purposes.
#
# A "concrete output" is a rule output with input-dependent variables
# resolved. For example, given a rule with:
# 'extension': 'ext', 'outputs': ['$(INPUT_FILE_BASE).cc'],
# if the target's "sources" list contained "one.ext" and "two.ext",
# the "concrete output" for rule input "two.ext" would be "two.cc". If
# a rule specifies multiple outputs, each input file that the rule is
# applied to will have the same number of concrete outputs.
#
# If any concrete outputs are outdated or missing relative to their
# corresponding rule_source or to any specified additional input, the
# rule action must be performed to generate the concrete outputs.
# concrete_outputs_by_rule_source will have an item at the same index
# as the rule['rule_sources'] that it corresponds to. Each item is a
# list of all of the concrete outputs for the rule_source.
concrete_outputs_by_rule_source = []
# concrete_outputs_all is a flat list of all concrete outputs that this
# rule is able to produce, given the known set of input files
# (rule_sources) that apply to it.
concrete_outputs_all = []
# messages & actions are keyed by the same indices as rule['rule_sources']
# and concrete_outputs_by_rule_source. They contain the message and
# action to perform after resolving input-dependent variables. The
# message is optional, in which case None is stored for each rule source.
messages = []
actions = []
for rule_source in rule.get('rule_sources', []):
rule_source_dirname, rule_source_basename = \
posixpath.split(rule_source)
(rule_source_root, rule_source_ext) = \
posixpath.splitext(rule_source_basename)
# These are the same variable names that Xcode uses for its own native
# rule support. Because Xcode's rule engine is not being used, they
# need to be expanded as they are written to the makefile.
rule_input_dict = {
'INPUT_FILE_BASE': rule_source_root,
'INPUT_FILE_SUFFIX': rule_source_ext,
'INPUT_FILE_NAME': rule_source_basename,
'INPUT_FILE_PATH': rule_source,
'INPUT_FILE_DIRNAME': rule_source_dirname,
}
concrete_outputs_for_this_rule_source = []
for output in rule.get('outputs', []):
# Fortunately, Xcode and make both use $(VAR) format for their
# variables, so the expansion is the only transformation necessary.
# Any remaning $(VAR)-type variables in the string can be given
# directly to make, which will pick up the correct settings from
# what Xcode puts into the environment.
concrete_output = ExpandXcodeVariables(output, rule_input_dict)
concrete_outputs_for_this_rule_source.append(concrete_output)
# Add all concrete outputs to the project.
pbxp.AddOrGetFileInRootGroup(concrete_output)
concrete_outputs_by_rule_source.append( \
concrete_outputs_for_this_rule_source)
concrete_outputs_all.extend(concrete_outputs_for_this_rule_source)
# TODO(mark): Should verify that at most one of these is specified.
if int(rule.get('process_outputs_as_sources', False)):
for output in concrete_outputs_for_this_rule_source:
AddSourceToTarget(output, type, pbxp, xct)
# If the file came from the mac_bundle_resources list or if the rule
# is marked to process outputs as bundle resource, do so.
was_mac_bundle_resource = rule_source in tgt_mac_bundle_resources
if was_mac_bundle_resource or \
int(rule.get('process_outputs_as_mac_bundle_resources', False)):
for output in concrete_outputs_for_this_rule_source:
AddResourceToTarget(output, pbxp, xct)
# Do we have a message to print when this rule runs?
message = rule.get('message')
if message:
message = gyp.common.EncodePOSIXShellArgument(message)
message = ExpandXcodeVariables(message, rule_input_dict)
messages.append(message)
# Turn the list into a string that can be passed to a shell.
action_string = gyp.common.EncodePOSIXShellList(rule['action'])
action = ExpandXcodeVariables(action_string, rule_input_dict)
actions.append(action)
if len(concrete_outputs_all) > 0:
# TODO(mark): There's a possibilty for collision here. Consider
# target "t" rule "A_r" and target "t_A" rule "r".
makefile_name = '%s.make' % re.sub(
'[^a-zA-Z0-9_]', '_' , '%s_%s' % (target_name, rule['rule_name']))
makefile_path = os.path.join(xcode_projects[build_file].path,
makefile_name)
# TODO(mark): try/close? Write to a temporary file and swap it only
# if it's got changes?
makefile = open(makefile_path, 'wb')
# make will build the first target in the makefile by default. By
# convention, it's called "all". List all (or at least one)
# concrete output for each rule source as a prerequisite of the "all"
# target.
makefile.write('all: \\\n')
for concrete_output_index in \
xrange(0, len(concrete_outputs_by_rule_source)):
# Only list the first (index [0]) concrete output of each input
# in the "all" target. Otherwise, a parallel make (-j > 1) would
# attempt to process each input multiple times simultaneously.
# Otherwise, "all" could just contain the entire list of
# concrete_outputs_all.
concrete_output = \
concrete_outputs_by_rule_source[concrete_output_index][0]
if concrete_output_index == len(concrete_outputs_by_rule_source) - 1:
eol = ''
else:
eol = ' \\'
makefile.write(' %s%s\n' % (concrete_output, eol))
for (rule_source, concrete_outputs, message, action) in \
zip(rule['rule_sources'], concrete_outputs_by_rule_source,
messages, actions):
makefile.write('\n')
# Add a rule that declares it can build each concrete output of a
# rule source. Collect the names of the directories that are
# required.
concrete_output_dirs = []
for concrete_output_index in xrange(0, len(concrete_outputs)):
concrete_output = concrete_outputs[concrete_output_index]
if concrete_output_index == 0:
bol = ''
else:
bol = ' '
makefile.write('%s%s \\\n' % (bol, concrete_output))
concrete_output_dir = posixpath.dirname(concrete_output)
if (concrete_output_dir and
concrete_output_dir not in concrete_output_dirs):
concrete_output_dirs.append(concrete_output_dir)
makefile.write(' : \\\n')
# The prerequisites for this rule are the rule source itself and
# the set of additional rule inputs, if any.
prerequisites = [rule_source]
prerequisites.extend(rule.get('inputs', []))
for prerequisite_index in xrange(0, len(prerequisites)):
prerequisite = prerequisites[prerequisite_index]
if prerequisite_index == len(prerequisites) - 1:
eol = ''
else:
eol = ' \\'
makefile.write(' %s%s\n' % (prerequisite, eol))
# Make sure that output directories exist before executing the rule
# action.
if len(concrete_output_dirs) > 0:
makefile.write('\t@mkdir -p "%s"\n' %
'" "'.join(concrete_output_dirs))
# The rule message and action have already had the necessary variable
# substitutions performed.
if message:
# Mark it with note: so Xcode picks it up in build output.
makefile.write('\t@echo note: %s\n' % message)
makefile.write('\t%s\n' % action)
makefile.close()
# It might be nice to ensure that needed output directories exist
# here rather than in each target in the Makefile, but that wouldn't
# work if there ever was a concrete output that had an input-dependent
# variable anywhere other than in the leaf position.
# Don't declare any inputPaths or outputPaths. If they're present,
# Xcode will provide a slight optimization by only running the script
# phase if any output is missing or outdated relative to any input.
# Unfortunately, it will also assume that all outputs are touched by
# the script, and if the outputs serve as files in a compilation
# phase, they will be unconditionally rebuilt. Since make might not
# rebuild everything that could be declared here as an output, this
# extra compilation activity is unnecessary. With inputPaths and
# outputPaths not supplied, make will always be called, but it knows
# enough to not do anything when everything is up-to-date.
# To help speed things up, pass -j COUNT to make so it does some work
# in parallel. Don't use ncpus because Xcode will build ncpus targets
# in parallel and if each target happens to have a rules step, there
# would be ncpus^2 things going. With a machine that has 2 quad-core
# Xeons, a build can quickly run out of processes based on
# scheduling/other tasks, and randomly failing builds are no good.
script = \
"""JOB_COUNT="$(/usr/sbin/sysctl -n hw.ncpu)"
if [ "${JOB_COUNT}" -gt 4 ]; then
JOB_COUNT=4
fi
exec xcrun make -f "${PROJECT_FILE_PATH}/%s" -j "${JOB_COUNT}"
exit 1
""" % makefile_name
ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
'name': 'Rule "' + rule['rule_name'] + '"',
'shellScript': script,
'showEnvVarsInLog': 0,
})
if support_xct:
support_xct.AppendProperty('buildPhases', ssbp)
else:
# TODO(mark): this assumes too much knowledge of the internals of
# xcodeproj_file; some of these smarts should move into xcodeproj_file
# itself.
xct._properties['buildPhases'].insert(prebuild_index, ssbp)
prebuild_index = prebuild_index + 1
# Extra rule inputs also go into the project file. Concrete outputs were
# already added when they were computed.
groups = ['inputs', 'inputs_excluded']
if skip_excluded_files:
groups = [x for x in groups if not x.endswith('_excluded')]
for group in groups:
for item in rule.get(group, []):
pbxp.AddOrGetFileInRootGroup(item)
# Add "sources".
for source in spec.get('sources', []):
(source_root, source_extension) = posixpath.splitext(source)
if source_extension[1:] not in rules_by_ext:
# AddSourceToTarget will add the file to a root group if it's not
# already there.
AddSourceToTarget(source, type, pbxp, xct)
else:
pbxp.AddOrGetFileInRootGroup(source)
# Add "mac_bundle_resources" and "mac_framework_private_headers" if
# it's a bundle of any type.
if is_bundle:
for resource in tgt_mac_bundle_resources:
(resource_root, resource_extension) = posixpath.splitext(resource)
if resource_extension[1:] not in rules_by_ext:
AddResourceToTarget(resource, pbxp, xct)
else:
pbxp.AddOrGetFileInRootGroup(resource)
for header in spec.get('mac_framework_private_headers', []):
AddHeaderToTarget(header, pbxp, xct, False)
# Add "mac_framework_headers". These can be valid for both frameworks
# and static libraries.
if is_bundle or type == 'static_library':
for header in spec.get('mac_framework_headers', []):
AddHeaderToTarget(header, pbxp, xct, True)
# Add "copies".
pbxcp_dict = {}
for copy_group in spec.get('copies', []):
dest = copy_group['destination']
if dest[0] not in ('/', '$'):
# Relative paths are relative to $(SRCROOT).
dest = '$(SRCROOT)/' + dest
code_sign = int(copy_group.get('xcode_code_sign', 0))
settings = (None, '{ATTRIBUTES = (CodeSignOnCopy, ); }')[code_sign];
# Coalesce multiple "copies" sections in the same target with the same
# "destination" property into the same PBXCopyFilesBuildPhase, otherwise
# they'll wind up with ID collisions.
pbxcp = pbxcp_dict.get(dest, None)
if pbxcp is None:
pbxcp = gyp.xcodeproj_file.PBXCopyFilesBuildPhase({
'name': 'Copy to ' + copy_group['destination']
},
parent=xct)
pbxcp.SetDestination(dest)
# TODO(mark): The usual comment about this knowing too much about
# gyp.xcodeproj_file internals applies.
xct._properties['buildPhases'].insert(prebuild_index, pbxcp)
pbxcp_dict[dest] = pbxcp
for file in copy_group['files']:
pbxcp.AddFile(file, settings)
# Excluded files can also go into the project file.
if not skip_excluded_files:
for key in ['sources', 'mac_bundle_resources', 'mac_framework_headers',
'mac_framework_private_headers']:
excluded_key = key + '_excluded'
for item in spec.get(excluded_key, []):
pbxp.AddOrGetFileInRootGroup(item)
# So can "inputs" and "outputs" sections of "actions" groups.
groups = ['inputs', 'inputs_excluded', 'outputs', 'outputs_excluded']
if skip_excluded_files:
groups = [x for x in groups if not x.endswith('_excluded')]
for action in spec.get('actions', []):
for group in groups:
for item in action.get(group, []):
# Exclude anything in BUILT_PRODUCTS_DIR. They're products, not
# sources.
if not item.startswith('$(BUILT_PRODUCTS_DIR)/'):
pbxp.AddOrGetFileInRootGroup(item)
for postbuild in spec.get('postbuilds', []):
action_string_sh = gyp.common.EncodePOSIXShellList(postbuild['action'])
script = 'exec ' + action_string_sh + '\nexit 1\n'
# Make the postbuild step depend on the output of ld or ar from this
# target. Apparently putting the script step after the link step isn't
# sufficient to ensure proper ordering in all cases. With an input
# declared but no outputs, the script step should run every time, as
# desired.
ssbp = gyp.xcodeproj_file.PBXShellScriptBuildPhase({
'inputPaths': ['$(BUILT_PRODUCTS_DIR)/$(EXECUTABLE_PATH)'],
'name': 'Postbuild "' + postbuild['postbuild_name'] + '"',
'shellScript': script,
'showEnvVarsInLog': 0,
})
xct.AppendProperty('buildPhases', ssbp)
# Add dependencies before libraries, because adding a dependency may imply
# adding a library. It's preferable to keep dependencies listed first
# during a link phase so that they can override symbols that would
# otherwise be provided by libraries, which will usually include system
# libraries. On some systems, ld is finicky and even requires the
# libraries to be ordered in such a way that unresolved symbols in
# earlier-listed libraries may only be resolved by later-listed libraries.
# The Mac linker doesn't work that way, but other platforms do, and so
# their linker invocations need to be constructed in this way. There's
# no compelling reason for Xcode's linker invocations to differ.
if 'dependencies' in spec:
for dependency in spec['dependencies']:
xct.AddDependency(xcode_targets[dependency])
# The support project also gets the dependencies (in case they are
# needed for the actions/rules to work).
if support_xct:
support_xct.AddDependency(xcode_targets[dependency])
if 'libraries' in spec:
for library in spec['libraries']:
xct.FrameworksPhase().AddFile(library)
# Add the library's directory to LIBRARY_SEARCH_PATHS if necessary.
# I wish Xcode handled this automatically.
library_dir = posixpath.dirname(library)
if library_dir not in xcode_standard_library_dirs and (
not xct.HasBuildSetting(_library_search_paths_var) or
library_dir not in xct.GetBuildSetting(_library_search_paths_var)):
xct.AppendBuildSetting(_library_search_paths_var, library_dir)
for configuration_name in configuration_names:
configuration = spec['configurations'][configuration_name]
xcbc = xct.ConfigurationNamed(configuration_name)
for include_dir in configuration.get('mac_framework_dirs', []):
xcbc.AppendBuildSetting('FRAMEWORK_SEARCH_PATHS', include_dir)
for include_dir in configuration.get('include_dirs', []):
xcbc.AppendBuildSetting('HEADER_SEARCH_PATHS', include_dir)
for library_dir in configuration.get('library_dirs', []):
if library_dir not in xcode_standard_library_dirs and (
not xcbc.HasBuildSetting(_library_search_paths_var) or
library_dir not in xcbc.GetBuildSetting(_library_search_paths_var)):
xcbc.AppendBuildSetting(_library_search_paths_var, library_dir)
if 'defines' in configuration:
for define in configuration['defines']:
set_define = EscapeXcodeDefine(define)
xcbc.AppendBuildSetting('GCC_PREPROCESSOR_DEFINITIONS', set_define)
if 'xcode_settings' in configuration:
for xck, xcv in configuration['xcode_settings'].iteritems():
xcbc.SetBuildSetting(xck, xcv)
if 'xcode_config_file' in configuration:
config_ref = pbxp.AddOrGetFileInRootGroup(
configuration['xcode_config_file'])
xcbc.SetBaseConfiguration(config_ref)
build_files = []
for build_file, build_file_dict in data.iteritems():
if build_file.endswith('.gyp'):
build_files.append(build_file)
for build_file in build_files:
xcode_projects[build_file].Finalize1(xcode_targets, serialize_all_tests)
for build_file in build_files:
xcode_projects[build_file].Finalize2(xcode_targets,
xcode_target_to_target_dict)
for build_file in build_files:
xcode_projects[build_file].Write()
|
mit
|
danlrobertson/servo
|
tests/wpt/web-platform-tests/html/semantics/embedded-content/media-elements/track/track-element/cors/support/cors-tester.py
|
238
|
1454
|
from wptserve.handlers import HTTPException
import urllib
def main(request, response):
if request.method != "GET":
raise HTTPException(400, message="Method was not GET")
if not "id" in request.GET:
raise HTTPException(400, message="No id")
id = request.GET['id']
if "read" in request.GET:
data = request.server.stash.take(id)
if data is None:
response.set_error(404, "Tried to read data not yet set")
return
return [("Content-Type", "text/plain")], data
elif "cleanup" in request.GET:
request.server.stash.take(id)
return "OK"
elif "delete-cookie" in request.GET:
response.delete_cookie(id)
return [("Content-Type", "text/plain")], "OK"
if "origin" in request.GET:
response.headers.set('Access-Control-Allow-Origin', request.GET['origin'])
response.headers.set('Access-Control-Allow-Credentials', 'true')
cors = request.headers.get("origin", "no")
cookie = request.cookies.first(id, "no")
line = 'cors = ' + cors + ' | cookie = ' + cookie.value;
data = request.server.stash.take(id)
if data is not None:
line = data + "\n" + line
request.server.stash.put(id, line)
if "redirect" in request.GET:
response.status = 302
response.headers.set('Location', request.GET['redirect'])
else:
return """WEBVTT
00:00:00.000 --> 00:00:10.000
Test"""
|
mpl-2.0
|
zzzombat/lucid-python-django
|
django/core/handlers/base.py
|
55
|
11479
|
import sys
from django import http
from django.core import signals
from django.utils.encoding import force_unicode
from django.utils.importlib import import_module
from django.utils.log import getLogger
logger = getLogger('django.request')
class BaseHandler(object):
# Changes that are always applied to a response (in this order).
response_fixes = [
http.fix_location_header,
http.conditional_content_removal,
http.fix_IE_for_attach,
http.fix_IE_for_vary,
]
def __init__(self):
self._request_middleware = self._view_middleware = self._response_middleware = self._exception_middleware = None
def load_middleware(self):
"""
Populate middleware lists from settings.MIDDLEWARE_CLASSES.
Must be called after the environment is fixed (see __call__).
"""
from django.conf import settings
from django.core import exceptions
self._view_middleware = []
self._template_response_middleware = []
self._response_middleware = []
self._exception_middleware = []
request_middleware = []
for middleware_path in settings.MIDDLEWARE_CLASSES:
try:
mw_module, mw_classname = middleware_path.rsplit('.', 1)
except ValueError:
raise exceptions.ImproperlyConfigured('%s isn\'t a middleware module' % middleware_path)
try:
mod = import_module(mw_module)
except ImportError, e:
raise exceptions.ImproperlyConfigured('Error importing middleware %s: "%s"' % (mw_module, e))
try:
mw_class = getattr(mod, mw_classname)
except AttributeError:
raise exceptions.ImproperlyConfigured('Middleware module "%s" does not define a "%s" class' % (mw_module, mw_classname))
try:
mw_instance = mw_class()
except exceptions.MiddlewareNotUsed:
continue
if hasattr(mw_instance, 'process_request'):
request_middleware.append(mw_instance.process_request)
if hasattr(mw_instance, 'process_view'):
self._view_middleware.append(mw_instance.process_view)
if hasattr(mw_instance, 'process_template_response'):
self._template_response_middleware.insert(0, mw_instance.process_template_response)
if hasattr(mw_instance, 'process_response'):
self._response_middleware.insert(0, mw_instance.process_response)
if hasattr(mw_instance, 'process_exception'):
self._exception_middleware.insert(0, mw_instance.process_exception)
# We only assign to this when initialization is complete as it is used
# as a flag for initialization being complete.
self._request_middleware = request_middleware
def get_response(self, request):
"Returns an HttpResponse object for the given HttpRequest"
from django.core import exceptions, urlresolvers
from django.conf import settings
try:
# Setup default url resolver for this thread, this code is outside
# the try/except so we don't get a spurious "unbound local
# variable" exception in the event an exception is raised before
# resolver is set
urlconf = settings.ROOT_URLCONF
urlresolvers.set_urlconf(urlconf)
resolver = urlresolvers.RegexURLResolver(r'^/', urlconf)
try:
response = None
# Apply request middleware
for middleware_method in self._request_middleware:
response = middleware_method(request)
if response:
break
if response is None:
if hasattr(request, "urlconf"):
# Reset url resolver with a custom urlconf.
urlconf = request.urlconf
urlresolvers.set_urlconf(urlconf)
resolver = urlresolvers.RegexURLResolver(r'^/', urlconf)
callback, callback_args, callback_kwargs = resolver.resolve(
request.path_info)
# Apply view middleware
for middleware_method in self._view_middleware:
response = middleware_method(request, callback, callback_args, callback_kwargs)
if response:
break
if response is None:
try:
response = callback(request, *callback_args, **callback_kwargs)
except Exception, e:
# If the view raised an exception, run it through exception
# middleware, and if the exception middleware returns a
# response, use that. Otherwise, reraise the exception.
for middleware_method in self._exception_middleware:
response = middleware_method(request, e)
if response:
break
if response is None:
raise
# Complain if the view returned None (a common error).
if response is None:
try:
view_name = callback.func_name # If it's a function
except AttributeError:
view_name = callback.__class__.__name__ + '.__call__' # If it's a class
raise ValueError("The view %s.%s didn't return an HttpResponse object." % (callback.__module__, view_name))
# If the response supports deferred rendering, apply template
# response middleware and the render the response
if hasattr(response, 'render') and callable(response.render):
for middleware_method in self._template_response_middleware:
response = middleware_method(request, response)
response = response.render()
except http.Http404, e:
logger.warning('Not Found: %s' % request.path,
extra={
'status_code': 404,
'request': request
})
if settings.DEBUG:
from django.views import debug
response = debug.technical_404_response(request, e)
else:
try:
callback, param_dict = resolver.resolve404()
response = callback(request, **param_dict)
except:
try:
response = self.handle_uncaught_exception(request, resolver, sys.exc_info())
finally:
receivers = signals.got_request_exception.send(sender=self.__class__, request=request)
except exceptions.PermissionDenied:
logger.warning('Forbidden (Permission denied): %s' % request.path,
extra={
'status_code': 403,
'request': request
})
response = http.HttpResponseForbidden('<h1>Permission denied</h1>')
except SystemExit:
# Allow sys.exit() to actually exit. See tickets #1023 and #4701
raise
except: # Handle everything else, including SuspiciousOperation, etc.
# Get the exception info now, in case another exception is thrown later.
receivers = signals.got_request_exception.send(sender=self.__class__, request=request)
response = self.handle_uncaught_exception(request, resolver, sys.exc_info())
finally:
# Reset URLconf for this thread on the way out for complete
# isolation of request.urlconf
urlresolvers.set_urlconf(None)
try:
# Apply response middleware, regardless of the response
for middleware_method in self._response_middleware:
response = middleware_method(request, response)
response = self.apply_response_fixes(request, response)
except: # Any exception should be gathered and handled
receivers = signals.got_request_exception.send(sender=self.__class__, request=request)
response = self.handle_uncaught_exception(request, resolver, sys.exc_info())
return response
def handle_uncaught_exception(self, request, resolver, exc_info):
"""
Processing for any otherwise uncaught exceptions (those that will
generate HTTP 500 responses). Can be overridden by subclasses who want
customised 500 handling.
Be *very* careful when overriding this because the error could be
caused by anything, so assuming something like the database is always
available would be an error.
"""
from django.conf import settings
if settings.DEBUG_PROPAGATE_EXCEPTIONS:
raise
if settings.DEBUG:
from django.views import debug
return debug.technical_500_response(request, *exc_info)
logger.error('Internal Server Error: %s' % request.path,
exc_info=exc_info,
extra={
'status_code': 500,
'request':request
}
)
# If Http500 handler is not installed, re-raise last exception
if resolver.urlconf_module is None:
raise exc_info[1], None, exc_info[2]
# Return an HttpResponse that displays a friendly error message.
callback, param_dict = resolver.resolve500()
return callback(request, **param_dict)
def apply_response_fixes(self, request, response):
"""
Applies each of the functions in self.response_fixes to the request and
response, modifying the response in the process. Returns the new
response.
"""
for func in self.response_fixes:
response = func(request, response)
return response
def get_script_name(environ):
"""
Returns the equivalent of the HTTP request's SCRIPT_NAME environment
variable. If Apache mod_rewrite has been used, returns what would have been
the script name prior to any rewriting (so it's the script name as seen
from the client's perspective), unless DJANGO_USE_POST_REWRITE is set (to
anything).
"""
from django.conf import settings
if settings.FORCE_SCRIPT_NAME is not None:
return force_unicode(settings.FORCE_SCRIPT_NAME)
# If Apache's mod_rewrite had a whack at the URL, Apache set either
# SCRIPT_URL or REDIRECT_URL to the full resource URL before applying any
# rewrites. Unfortunately not every Web server (lighttpd!) passes this
# information through all the time, so FORCE_SCRIPT_NAME, above, is still
# needed.
script_url = environ.get('SCRIPT_URL', u'')
if not script_url:
script_url = environ.get('REDIRECT_URL', u'')
if script_url:
return force_unicode(script_url[:-len(environ.get('PATH_INFO', ''))])
return force_unicode(environ.get('SCRIPT_NAME', u''))
|
bsd-3-clause
|
lordzuko/appengine-mapreduce
|
python/test/mapreduce/api/map_job/datastore_input_reader_base_test.py
|
15
|
9111
|
#!/usr/bin/env python
"""Datastore Input Reader Base Test for the map_job API."""
import unittest
from google.appengine.api import datastore_types
from google.appengine.api import namespace_manager
from google.appengine.datastore import datastore_stub_util
from google.appengine.ext import testbed
from testlib import testutil
from mapreduce.api import map_job
# pylint: disable=invalid-name
class SkipTestsMeta(type):
"""Enables skipping tests from the base class but not when sub-classed."""
def __init__(cls, name, bases, dct):
super(SkipTestsMeta, cls).__init__(name, bases, dct)
if cls.__name__ == "DatastoreInputReaderBaseTest":
unittest.skip("Skip tests when testing from the base class.")(cls)
else:
# Since there is no unittest.unskip(), do it manually.
cls.__unittest_skip__ = False
cls.__unittest_skip_why__ = None
class DatastoreInputReaderBaseTest(unittest.TestCase):
"""Base test class used by concrete DatastoreInputReaders."""
# Enable the meta class to skip all tests.
__metaclass__ = SkipTestsMeta
TEST_JOB_NAME = "TestJobHandlerName"
# Subclass should override with its own create entities function.
@property
def _create_entities(self):
return testutil._create_entities
# Subclass should override with its own entity kind or model class path
@property
def entity_kind(self):
return "TestEntity"
# Subclass should override with its own reader class.
@property
def reader_cls(self):
raise NotImplementedError("reader_cls() not implemented in %s"
% self.__class__)
def _get_keyname(self, entity):
"""Get keyname from an entity of certain type."""
return entity.key().name()
# Subclass should override with its own assert equals.
def _assertEquals_splitInput(self, itr, keys):
"""AssertEquals helper for splitInput tests.
Check the outputs from a single shard.
Args:
itr: input reader returned from splitInput.
keys: a set of expected key names from this iterator.
"""
results = []
while True:
try:
results.append(self._get_keyname(iter(itr).next()))
itr = itr.__class__.from_json(itr.to_json())
except StopIteration:
break
results.sort()
keys.sort()
self.assertEquals(keys, results)
# Subclass should override with its own assert equals.
def _assertEqualsForAllShards_splitInput(self, keys, max_read, *itrs):
"""AssertEquals helper for splitInput tests.
Check the outputs from all shards. This is used when sharding
has random factor.
Args:
keys: a set of expected key names from this iterator.
max_read: limit number of results read from the iterators before failing
or None for no limit. Useful for preventing infinite loops or bounding
the execution of the test.
*itrs: input readers returned from splitInput.
"""
results = []
for itr in itrs:
while True:
try:
results.append(self._get_keyname(iter(itr).next()))
itr = itr.__class__.from_json(itr.to_json())
if max_read is not None and len(results) > max_read:
self.fail("Too many results found")
except StopIteration:
break
results.sort()
keys.sort()
self.assertEquals(keys, results)
def setUp(self):
self.testbed = testbed.Testbed()
unittest.TestCase.setUp(self)
self.testbed.activate()
self.testbed.init_datastore_v3_stub()
self.testbed.init_memcache_stub()
namespace_manager.set_namespace(None)
self._original_max = (
self.reader_cls.MAX_NAMESPACES_FOR_KEY_SHARD)
self.reader_cls.MAX_NAMESPACES_FOR_KEY_SHARD = 2
def tearDown(self):
# Restore the scatter property set to the original one.
datastore_stub_util._SPECIAL_PROPERTY_MAP[
datastore_types.SCATTER_SPECIAL_PROPERTY] = (
False, True, datastore_stub_util._GetScatterProperty)
# Restore max limit on ns sharding.
self.reader_cls.MAX_NAMESPACES_FOR_KEY_SHARD = (
self._original_max)
self.testbed.deactivate()
def testSplitInput_withNs(self):
self._create_entities(range(3), {"1": 1}, "f")
params = {
"entity_kind": self.entity_kind,
"namespace": "f",
}
conf = map_job.JobConfig(
job_name=self.TEST_JOB_NAME,
mapper=map_job.Mapper,
input_reader_cls=self.reader_cls,
input_reader_params=params,
shard_count=2)
results = self.reader_cls.split_input(conf)
self.assertEquals(2, len(results))
self._assertEqualsForAllShards_splitInput(["0", "1", "2"], None, *results)
def testSplitInput_withNs_moreShardThanScatter(self):
self._create_entities(range(3), {"1": 1}, "f")
params = {
"entity_kind": self.entity_kind,
"namespace": "f",
}
conf = map_job.JobConfig(
job_name=self.TEST_JOB_NAME,
mapper=map_job.Mapper,
input_reader_cls=self.reader_cls,
input_reader_params=params,
shard_count=4)
results = self.reader_cls.split_input(conf)
self.assertTrue(len(results) >= 2)
self._assertEqualsForAllShards_splitInput(["0", "1", "2"], None, *results)
def testSplitInput_noEntity(self):
params = {
"entity_kind": self.entity_kind,
}
conf = map_job.JobConfig(
job_name=self.TEST_JOB_NAME,
mapper=map_job.Mapper,
input_reader_cls=self.reader_cls,
input_reader_params=params,
shard_count=1)
results = self.reader_cls.split_input(conf)
self.assertEquals(None, results)
def testSplitInput_moreThanOneNS(self):
self._create_entities(range(3), {"1": 1}, "1")
self._create_entities(range(10, 13), {"11": 11}, "2")
params = {
"entity_kind": self.entity_kind,
}
conf = map_job.JobConfig(
job_name=self.TEST_JOB_NAME,
mapper=map_job.Mapper,
input_reader_cls=self.reader_cls,
input_reader_params=params,
shard_count=4)
results = self.reader_cls.split_input(conf)
self.assertTrue(len(results) >= 2)
self._assertEqualsForAllShards_splitInput(
["0", "1", "2", "10", "11", "12"], None, *results)
def testSplitInput_moreThanOneUnevenNS(self):
self._create_entities(range(5), {"1": 1, "3": 3}, "1")
self._create_entities(range(10, 13), {"11": 11}, "2")
params = {
"entity_kind": self.entity_kind,
}
conf = map_job.JobConfig(
job_name=self.TEST_JOB_NAME,
mapper=map_job.Mapper,
input_reader_cls=self.reader_cls,
input_reader_params=params,
shard_count=4)
results = self.reader_cls.split_input(conf)
self.assertTrue(len(results) >= 3)
self._assertEqualsForAllShards_splitInput(
["0", "1", "2", "3", "4", "10", "11", "12"], None, *results)
def testSplitInput_lotsOfNS(self):
self._create_entities(range(3), {"1": 1}, "9")
self._create_entities(range(3, 6), {"4": 4}, "_")
self._create_entities(range(6, 9), {"7": 7}, "a")
params = {
"entity_kind": self.entity_kind,
}
conf = map_job.JobConfig(
job_name=self.TEST_JOB_NAME,
mapper=map_job.Mapper,
input_reader_cls=self.reader_cls,
input_reader_params=params,
shard_count=3)
results = self.reader_cls.split_input(conf)
self.assertEquals(3, len(results))
self._assertEquals_splitInput(results[0], ["0", "1", "2"])
self._assertEquals_splitInput(results[1], ["3", "4", "5"])
self._assertEquals_splitInput(results[2], ["6", "7", "8"])
def testSplitInput_withNsAndDefaultNs(self):
shards = 2
# 10 entities in the default namespace
empty_ns_keys = [str(k) for k in range(10)]
self._create_entities(empty_ns_keys,
dict([(k, 1) for k in empty_ns_keys]),
None)
# 10 entities for each of N different non-default namespaces. The number
# of namespaces, N, is set to be twice the cutoff for switching to sharding
# by namespace instead of keys.
non_empty_ns_keys = []
for ns_num in range(self.reader_cls.MAX_NAMESPACES_FOR_KEY_SHARD * 2):
ns_keys = ["n-%02d-k-%02d" % (ns_num, k) for k in range(10)]
non_empty_ns_keys.extend(ns_keys)
self._create_entities(ns_keys,
dict([(k, 1) for k in ns_keys]),
"%02d" % ns_num)
# Test a query over all namespaces
params = {
"entity_kind": self.entity_kind,
"namespace": None}
conf = map_job.JobConfig(
job_name=self.TEST_JOB_NAME,
mapper=map_job.Mapper,
input_reader_cls=self.reader_cls,
input_reader_params=params,
shard_count=shards)
results = self.reader_cls.split_input(conf)
self.assertEqual(shards, len(results))
all_keys = empty_ns_keys + non_empty_ns_keys
self._assertEqualsForAllShards_splitInput(all_keys,
len(all_keys),
*results)
if __name__ == "__main__":
unittest.main()
|
apache-2.0
|
medspx/QGIS
|
tests/src/python/test_qgslayoutgridsettings.py
|
22
|
5022
|
# -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsLayoutGridSettings.
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Nyall Dawson'
__date__ = '05/07/2017'
__copyright__ = 'Copyright 2017, The QGIS Project'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import qgis # NOQA
from qgis.core import (QgsProject,
QgsLayout,
QgsLayoutGridSettings,
QgsLayoutMeasurement,
QgsUnitTypes,
QgsLayoutPoint,
QgsLayoutItemPage,
QgsReadWriteContext)
from qgis.PyQt.QtGui import (QPen,
QColor)
from qgis.PyQt.QtXml import QDomDocument
from qgis.testing import start_app, unittest
start_app()
class TestQgsLayoutGridSettings(unittest.TestCase):
def testGettersSetters(self):
p = QgsProject()
l = QgsLayout(p)
s = QgsLayoutGridSettings(l)
s.setResolution(QgsLayoutMeasurement(5, QgsUnitTypes.LayoutPoints))
self.assertEqual(s.resolution().length(), 5.0)
self.assertEqual(s.resolution().units(), QgsUnitTypes.LayoutPoints)
s.setOffset(QgsLayoutPoint(6, 7, QgsUnitTypes.LayoutPixels))
self.assertEqual(s.offset().x(), 6.0)
self.assertEqual(s.offset().y(), 7.0)
self.assertEqual(s.offset().units(), QgsUnitTypes.LayoutPixels)
s.setPen(QPen(QColor(255, 0, 255)))
self.assertEqual(s.pen().color().name(), QColor(255, 0, 255).name())
s.setStyle(QgsLayoutGridSettings.StyleDots)
self.assertEqual(s.style(), QgsLayoutGridSettings.StyleDots)
def testReadWriteXml(self):
p = QgsProject()
l = QgsLayout(p)
s = QgsLayoutGridSettings(l)
s.setResolution(QgsLayoutMeasurement(5, QgsUnitTypes.LayoutPoints))
s.setOffset(QgsLayoutPoint(6, 7, QgsUnitTypes.LayoutPixels))
doc = QDomDocument("testdoc")
elem = doc.createElement("test")
self.assertTrue(s.writeXml(elem, doc, QgsReadWriteContext()))
s2 = QgsLayoutGridSettings(l)
self.assertTrue(s2.readXml(elem.firstChildElement(), doc, QgsReadWriteContext()))
self.assertEqual(s2.resolution().length(), 5.0)
self.assertEqual(s2.resolution().units(), QgsUnitTypes.LayoutPoints)
self.assertEqual(s2.offset().x(), 6.0)
self.assertEqual(s2.offset().y(), 7.0)
self.assertEqual(s2.offset().units(), QgsUnitTypes.LayoutPixels)
def testUndoRedo(self):
p = QgsProject()
l = QgsLayout(p)
g = l.gridSettings()
g.setResolution(QgsLayoutMeasurement(15, QgsUnitTypes.LayoutPoints))
# these two commands should be 'collapsed'
g.setOffset(QgsLayoutPoint(555, 10, QgsUnitTypes.LayoutPoints))
g.setOffset(QgsLayoutPoint(5, 10, QgsUnitTypes.LayoutPoints))
# these two commands should be 'collapsed'
g.setResolution(QgsLayoutMeasurement(45, QgsUnitTypes.LayoutInches))
g.setResolution(QgsLayoutMeasurement(35, QgsUnitTypes.LayoutInches))
self.assertEqual(g.offset().x(), 5.0)
self.assertEqual(g.offset().y(), 10.0)
self.assertEqual(g.offset().units(), QgsUnitTypes.LayoutPoints)
self.assertEqual(g.resolution().length(), 35.0)
self.assertEqual(g.resolution().units(), QgsUnitTypes.LayoutInches)
l.undoStack().stack().undo()
self.assertEqual(g.offset().x(), 5.0)
self.assertEqual(g.offset().y(), 10.0)
self.assertEqual(g.offset().units(), QgsUnitTypes.LayoutPoints)
self.assertEqual(g.resolution().length(), 15.0)
self.assertEqual(g.resolution().units(), QgsUnitTypes.LayoutPoints)
l.undoStack().stack().undo()
self.assertEqual(g.offset().x(), 0.0)
self.assertEqual(g.offset().y(), 0.0)
self.assertEqual(g.offset().units(), QgsUnitTypes.LayoutMillimeters)
self.assertEqual(g.resolution().length(), 15.0)
self.assertEqual(g.resolution().units(), QgsUnitTypes.LayoutPoints)
l.undoStack().stack().redo()
self.assertEqual(g.offset().x(), 5.0)
self.assertEqual(g.offset().y(), 10.0)
self.assertEqual(g.offset().units(), QgsUnitTypes.LayoutPoints)
self.assertEqual(g.resolution().length(), 15.0)
self.assertEqual(g.resolution().units(), QgsUnitTypes.LayoutPoints)
l.undoStack().stack().redo()
self.assertEqual(g.offset().x(), 5.0)
self.assertEqual(g.offset().y(), 10.0)
self.assertEqual(g.offset().units(), QgsUnitTypes.LayoutPoints)
self.assertEqual(g.resolution().length(), 35.0)
self.assertEqual(g.resolution().units(), QgsUnitTypes.LayoutInches)
if __name__ == '__main__':
unittest.main()
|
gpl-2.0
|
kirca/odoo
|
addons/base_import/__openerp__.py
|
62
|
1229
|
{
'name': 'Base import',
'description': """
New extensible file import for OpenERP
======================================
Re-implement openerp's file import system:
* Server side, the previous system forces most of the logic into the
client which duplicates the effort (between clients), makes the
import system much harder to use without a client (direct RPC or
other forms of automation) and makes knowledge about the
import/export system much harder to gather as it is spread over
3+ different projects.
* In a more extensible manner, so users and partners can build their
own front-end to import from other file formats (e.g. OpenDocument
files) which may be simpler to handle in their work flow or from
their data production sources.
* In a module, so that administrators and users of OpenERP who do not
need or want an online import can avoid it being available to users.
""",
'category': 'Uncategorized',
'website': 'http://www.openerp.com',
'author': 'OpenERP SA',
'depends': ['web'],
'installable': True,
'auto_install': True,
'data': [
'security/ir.model.access.csv',
'views/base_import.xml',
],
'qweb': ['static/src/xml/import.xml'],
}
|
agpl-3.0
|
XXLRay/libreshot
|
build/lib.linux-x86_64-2.7/libreshot/uploads/youtube/gdata/client.py
|
9
|
47380
|
#!/usr/bin/env python
#
# Copyright (C) 2008, 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This module is used for version 2 of the Google Data APIs.
"""Provides a client to interact with Google Data API servers.
This module is used for version 2 of the Google Data APIs. The primary class
in this module is GDClient.
GDClient: handles auth and CRUD operations when communicating with servers.
GDataClient: deprecated client for version one services. Will be removed.
"""
__author__ = '[email protected] (Jeff Scudder)'
import re
import atom.client
import atom.core
import atom.http_core
import gdata.gauth
import gdata.data
class Error(Exception):
pass
class RequestError(Error):
status = None
reason = None
body = None
headers = None
class RedirectError(RequestError):
pass
class CaptchaChallenge(RequestError):
captcha_url = None
captcha_token = None
class ClientLoginTokenMissing(Error):
pass
class MissingOAuthParameters(Error):
pass
class ClientLoginFailed(RequestError):
pass
class UnableToUpgradeToken(RequestError):
pass
class Unauthorized(Error):
pass
class BadAuthenticationServiceURL(RedirectError):
pass
class BadAuthentication(RequestError):
pass
class NotModified(RequestError):
pass
class NotImplemented(RequestError):
pass
def error_from_response(message, http_response, error_class,
response_body=None):
"""Creates a new exception and sets the HTTP information in the error.
Args:
message: str human readable message to be displayed if the exception is
not caught.
http_response: The response from the server, contains error information.
error_class: The exception to be instantiated and populated with
information from the http_response
response_body: str (optional) specify if the response has already been read
from the http_response object.
"""
if response_body is None:
body = http_response.read()
else:
body = response_body
error = error_class('%s: %i, %s' % (message, http_response.status, body))
error.status = http_response.status
error.reason = http_response.reason
error.body = body
error.headers = atom.http_core.get_headers(http_response)
return error
def get_xml_version(version):
"""Determines which XML schema to use based on the client API version.
Args:
version: string which is converted to an int. The version string is in
the form 'Major.Minor.x.y.z' and only the major version number
is considered. If None is provided assume version 1.
"""
if version is None:
return 1
return int(version.split('.')[0])
class GDClient(atom.client.AtomPubClient):
"""Communicates with Google Data servers to perform CRUD operations.
This class is currently experimental and may change in backwards
incompatible ways.
This class exists to simplify the following three areas involved in using
the Google Data APIs.
CRUD Operations:
The client provides a generic 'request' method for making HTTP requests.
There are a number of convenience methods which are built on top of
request, which include get_feed, get_entry, get_next, post, update, and
delete. These methods contact the Google Data servers.
Auth:
Reading user-specific private data requires authorization from the user as
do any changes to user data. An auth_token object can be passed into any
of the HTTP requests to set the Authorization header in the request.
You may also want to set the auth_token member to a an object which can
use modify_request to set the Authorization header in the HTTP request.
If you are authenticating using the email address and password, you can
use the client_login method to obtain an auth token and set the
auth_token member.
If you are using browser redirects, specifically AuthSub, you will want
to use gdata.gauth.AuthSubToken.from_url to obtain the token after the
redirect, and you will probably want to updgrade this since use token
to a multiple use (session) token using the upgrade_token method.
API Versions:
This client is multi-version capable and can be used with Google Data API
version 1 and version 2. The version should be specified by setting the
api_version member to a string, either '1' or '2'.
"""
# The gsessionid is used by Google Calendar to prevent redirects.
__gsessionid = None
api_version = None
# Name of the Google Data service when making a ClientLogin request.
auth_service = None
# URL prefixes which should be requested for AuthSub and OAuth.
auth_scopes = None
def request(self, method=None, uri=None, auth_token=None,
http_request=None, converter=None, desired_class=None,
redirects_remaining=4, **kwargs):
"""Make an HTTP request to the server.
See also documentation for atom.client.AtomPubClient.request.
If a 302 redirect is sent from the server to the client, this client
assumes that the redirect is in the form used by the Google Calendar API.
The same request URI and method will be used as in the original request,
but a gsessionid URL parameter will be added to the request URI with
the value provided in the server's 302 redirect response. If the 302
redirect is not in the format specified by the Google Calendar API, a
RedirectError will be raised containing the body of the server's
response.
The method calls the client's modify_request method to make any changes
required by the client before the request is made. For example, a
version 2 client could add a GData-Version: 2 header to the request in
its modify_request method.
Args:
method: str The HTTP verb for this request, usually 'GET', 'POST',
'PUT', or 'DELETE'
uri: atom.http_core.Uri, str, or unicode The URL being requested.
auth_token: An object which sets the Authorization HTTP header in its
modify_request method. Recommended classes include
gdata.gauth.ClientLoginToken and gdata.gauth.AuthSubToken
among others.
http_request: (optional) atom.http_core.HttpRequest
converter: function which takes the body of the response as it's only
argument and returns the desired object.
desired_class: class descended from atom.core.XmlElement to which a
successful response should be converted. If there is no
converter function specified (converter=None) then the
desired_class will be used in calling the
atom.core.parse function. If neither
the desired_class nor the converter is specified, an
HTTP reponse object will be returned.
redirects_remaining: (optional) int, if this number is 0 and the
server sends a 302 redirect, the request method
will raise an exception. This parameter is used in
recursive request calls to avoid an infinite loop.
Any additional arguments are passed through to
atom.client.AtomPubClient.request.
Returns:
An HTTP response object (see atom.http_core.HttpResponse for a
description of the object's interface) if no converter was
specified and no desired_class was specified. If a converter function
was provided, the results of calling the converter are returned. If no
converter was specified but a desired_class was provided, the response
body will be converted to the class using
atom.core.parse.
"""
if isinstance(uri, (str, unicode)):
uri = atom.http_core.Uri.parse_uri(uri)
# Add the gsession ID to the URL to prevent further redirects.
# TODO: If different sessions are using the same client, there will be a
# multitude of redirects and session ID shuffling.
# If the gsession ID is in the URL, adopt it as the standard location.
if uri is not None and uri.query is not None and 'gsessionid' in uri.query:
self.__gsessionid = uri.query['gsessionid']
# The gsession ID could also be in the HTTP request.
elif (http_request is not None and http_request.uri is not None
and http_request.uri.query is not None
and 'gsessionid' in http_request.uri.query):
self.__gsessionid = http_request.uri.query['gsessionid']
# If the gsession ID is stored in the client, and was not present in the
# URI then add it to the URI.
elif self.__gsessionid is not None:
uri.query['gsessionid'] = self.__gsessionid
# The AtomPubClient should call this class' modify_request before
# performing the HTTP request.
#http_request = self.modify_request(http_request)
response = atom.client.AtomPubClient.request(self, method=method,
uri=uri, auth_token=auth_token, http_request=http_request, **kwargs)
# On success, convert the response body using the desired converter
# function if present.
if response is None:
return None
if response.status == 200 or response.status == 201:
if converter is not None:
return converter(response)
elif desired_class is not None:
if self.api_version is not None:
return atom.core.parse(response.read(), desired_class,
version=get_xml_version(self.api_version))
else:
# No API version was specified, so allow parse to
# use the default version.
return atom.core.parse(response.read(), desired_class)
else:
return response
# TODO: move the redirect logic into the Google Calendar client once it
# exists since the redirects are only used in the calendar API.
elif response.status == 302:
if redirects_remaining > 0:
location = (response.getheader('Location')
or response.getheader('location'))
if location is not None:
m = re.compile('[\?\&]gsessionid=(\w*)').search(location)
if m is not None:
self.__gsessionid = m.group(1)
# Make a recursive call with the gsession ID in the URI to follow
# the redirect.
return self.request(method=method, uri=uri, auth_token=auth_token,
http_request=http_request, converter=converter,
desired_class=desired_class,
redirects_remaining=redirects_remaining-1,
**kwargs)
else:
raise error_from_response('302 received without Location header',
response, RedirectError)
else:
raise error_from_response('Too many redirects from server',
response, RedirectError)
elif response.status == 401:
raise error_from_response('Unauthorized - Server responded with',
response, Unauthorized)
elif response.status == 304:
raise error_from_response('Entry Not Modified - Server responded with',
response, NotModified)
elif response.status == 501:
raise error_from_response(
'This API operation is not implemented. - Server responded with',
response, NotImplemented)
# If the server's response was not a 200, 201, 302, 304, 401, or 501, raise
# an exception.
else:
raise error_from_response('Server responded with', response,
RequestError)
Request = request
def request_client_login_token(
self, email, password, source, service=None,
account_type='HOSTED_OR_GOOGLE',
auth_url=atom.http_core.Uri.parse_uri(
'https://www.google.com/accounts/ClientLogin'),
captcha_token=None, captcha_response=None):
service = service or self.auth_service
# Set the target URL.
http_request = atom.http_core.HttpRequest(uri=auth_url, method='POST')
http_request.add_body_part(
gdata.gauth.generate_client_login_request_body(email=email,
password=password, service=service, source=source,
account_type=account_type, captcha_token=captcha_token,
captcha_response=captcha_response),
'application/x-www-form-urlencoded')
# Use the underlying http_client to make the request.
response = self.http_client.request(http_request)
response_body = response.read()
if response.status == 200:
token_string = gdata.gauth.get_client_login_token_string(response_body)
if token_string is not None:
return gdata.gauth.ClientLoginToken(token_string)
else:
raise ClientLoginTokenMissing(
'Recieved a 200 response to client login request,'
' but no token was present. %s' % (response_body,))
elif response.status == 403:
captcha_challenge = gdata.gauth.get_captcha_challenge(response_body)
if captcha_challenge:
challenge = CaptchaChallenge('CAPTCHA required')
challenge.captcha_url = captcha_challenge['url']
challenge.captcha_token = captcha_challenge['token']
raise challenge
elif response_body.splitlines()[0] == 'Error=BadAuthentication':
raise BadAuthentication('Incorrect username or password')
else:
raise error_from_response('Server responded with a 403 code',
response, RequestError, response_body)
elif response.status == 302:
# Google tries to redirect all bad URLs back to
# http://www.google.<locale>. If a redirect
# attempt is made, assume the user has supplied an incorrect
# authentication URL
raise error_from_response('Server responded with a redirect',
response, BadAuthenticationServiceURL,
response_body)
else:
raise error_from_response('Server responded to ClientLogin request',
response, ClientLoginFailed, response_body)
RequestClientLoginToken = request_client_login_token
def client_login(self, email, password, source, service=None,
account_type='HOSTED_OR_GOOGLE',
auth_url=atom.http_core.Uri.parse_uri(
'https://www.google.com/accounts/ClientLogin'),
captcha_token=None, captcha_response=None):
"""Performs an auth request using the user's email address and password.
In order to modify user specific data and read user private data, your
application must be authorized by the user. One way to demonstrage
authorization is by including a Client Login token in the Authorization
HTTP header of all requests. This method requests the Client Login token
by sending the user's email address, password, the name of the
application, and the service code for the service which will be accessed
by the application. If the username and password are correct, the server
will respond with the client login code and a new ClientLoginToken
object will be set in the client's auth_token member. With the auth_token
set, future requests from this client will include the Client Login
token.
For a list of service names, see
http://code.google.com/apis/gdata/faq.html#clientlogin
For more information on Client Login, see:
http://code.google.com/apis/accounts/docs/AuthForInstalledApps.html
Args:
email: str The user's email address or username.
password: str The password for the user's account.
source: str The name of your application. This can be anything you
like but should should give some indication of which app is
making the request.
service: str The service code for the service you would like to access.
For example, 'cp' for contacts, 'cl' for calendar. For a full
list see
http://code.google.com/apis/gdata/faq.html#clientlogin
If you are using a subclass of the gdata.client.GDClient, the
service will usually be filled in for you so you do not need
to specify it. For example see BloggerClient,
SpreadsheetsClient, etc.
account_type: str (optional) The type of account which is being
authenticated. This can be either 'GOOGLE' for a Google
Account, 'HOSTED' for a Google Apps Account, or the
default 'HOSTED_OR_GOOGLE' which will select the Google
Apps Account if the same email address is used for both
a Google Account and a Google Apps Account.
auth_url: str (optional) The URL to which the login request should be
sent.
captcha_token: str (optional) If a previous login attempt was reponded
to with a CAPTCHA challenge, this is the token which
identifies the challenge (from the CAPTCHA's URL).
captcha_response: str (optional) If a previous login attempt was
reponded to with a CAPTCHA challenge, this is the
response text which was contained in the challenge.
Returns:
None
Raises:
A RequestError or one of its suclasses: BadAuthentication,
BadAuthenticationServiceURL, ClientLoginFailed,
ClientLoginTokenMissing, or CaptchaChallenge
"""
service = service or self.auth_service
self.auth_token = self.request_client_login_token(email, password,
source, service=service, account_type=account_type, auth_url=auth_url,
captcha_token=captcha_token, captcha_response=captcha_response)
ClientLogin = client_login
def upgrade_token(self, token=None, url=atom.http_core.Uri.parse_uri(
'https://www.google.com/accounts/AuthSubSessionToken')):
"""Asks the Google auth server for a multi-use AuthSub token.
For details on AuthSub, see:
http://code.google.com/apis/accounts/docs/AuthSub.html
Args:
token: gdata.gauth.AuthSubToken or gdata.gauth.SecureAuthSubToken
(optional) If no token is passed in, the client's auth_token member
is used to request the new token. The token object will be modified
to contain the new session token string.
url: str or atom.http_core.Uri (optional) The URL to which the token
upgrade request should be sent. Defaults to:
https://www.google.com/accounts/AuthSubSessionToken
Returns:
The upgraded gdata.gauth.AuthSubToken object.
"""
# Default to using the auth_token member if no token is provided.
if token is None:
token = self.auth_token
# We cannot upgrade a None token.
if token is None:
raise UnableToUpgradeToken('No token was provided.')
if not isinstance(token, gdata.gauth.AuthSubToken):
raise UnableToUpgradeToken(
'Cannot upgrade the token because it is not an AuthSubToken object.')
http_request = atom.http_core.HttpRequest(uri=url, method='GET')
token.modify_request(http_request)
# Use the lower level HttpClient to make the request.
response = self.http_client.request(http_request)
if response.status == 200:
token._upgrade_token(response.read())
return token
else:
raise UnableToUpgradeToken(
'Server responded to token upgrade request with %s: %s' % (
response.status, response.read()))
UpgradeToken = upgrade_token
def revoke_token(self, token=None, url=atom.http_core.Uri.parse_uri(
'https://www.google.com/accounts/AuthSubRevokeToken')):
"""Requests that the token be invalidated.
This method can be used for both AuthSub and OAuth tokens (to invalidate
a ClientLogin token, the user must change their password).
Returns:
True if the server responded with a 200.
Raises:
A RequestError if the server responds with a non-200 status.
"""
# Default to using the auth_token member if no token is provided.
if token is None:
token = self.auth_token
http_request = atom.http_core.HttpRequest(uri=url, method='GET')
token.modify_request(http_request)
response = self.http_client.request(http_request)
if response.status != 200:
raise error_from_response('Server sent non-200 to revoke token',
response, RequestError, response.read())
return True
RevokeToken = revoke_token
def get_oauth_token(self, scopes, next, consumer_key, consumer_secret=None,
rsa_private_key=None,
url=gdata.gauth.REQUEST_TOKEN_URL):
"""Obtains an OAuth request token to allow the user to authorize this app.
Once this client has a request token, the user can authorize the request
token by visiting the authorization URL in their browser. After being
redirected back to this app at the 'next' URL, this app can then exchange
the authorized request token for an access token.
For more information see the documentation on Google Accounts with OAuth:
http://code.google.com/apis/accounts/docs/OAuth.html#AuthProcess
Args:
scopes: list of strings or atom.http_core.Uri objects which specify the
URL prefixes which this app will be accessing. For example, to access
the Google Calendar API, you would want to use scopes:
['https://www.google.com/calendar/feeds/',
'http://www.google.com/calendar/feeds/']
next: str or atom.http_core.Uri object, The URL which the user's browser
should be sent to after they authorize access to their data. This
should be a URL in your application which will read the token
information from the URL and upgrade the request token to an access
token.
consumer_key: str This is the identifier for this application which you
should have received when you registered your application with Google
to use OAuth.
consumer_secret: str (optional) The shared secret between your app and
Google which provides evidence that this request is coming from you
application and not another app. If present, this libraries assumes
you want to use an HMAC signature to verify requests. Keep this data
a secret.
rsa_private_key: str (optional) The RSA private key which is used to
generate a digital signature which is checked by Google's server. If
present, this library assumes that you want to use an RSA signature
to verify requests. Keep this data a secret.
url: The URL to which a request for a token should be made. The default
is Google's OAuth request token provider.
"""
http_request = None
if rsa_private_key is not None:
http_request = gdata.gauth.generate_request_for_request_token(
consumer_key, gdata.gauth.RSA_SHA1, scopes,
rsa_key=rsa_private_key, auth_server_url=url, next=next)
elif consumer_secret is not None:
http_request = gdata.gauth.generate_request_for_request_token(
consumer_key, gdata.gauth.HMAC_SHA1, scopes,
consumer_secret=consumer_secret, auth_server_url=url, next=next)
else:
raise MissingOAuthParameters(
'To request an OAuth token, you must provide your consumer secret'
' or your private RSA key.')
response = self.http_client.request(http_request)
response_body = response.read()
if response.status != 200:
raise error_from_response('Unable to obtain OAuth request token',
response, RequestError, response_body)
if rsa_private_key is not None:
return gdata.gauth.rsa_token_from_body(response_body, consumer_key,
rsa_private_key,
gdata.gauth.REQUEST_TOKEN)
elif consumer_secret is not None:
return gdata.gauth.hmac_token_from_body(response_body, consumer_key,
consumer_secret,
gdata.gauth.REQUEST_TOKEN)
GetOAuthToken = get_oauth_token
def get_access_token(self, request_token,
url=gdata.gauth.ACCESS_TOKEN_URL):
"""Exchanges an authorized OAuth request token for an access token.
Contacts the Google OAuth server to upgrade a previously authorized
request token. Once the request token is upgraded to an access token,
the access token may be used to access the user's data.
For more details, see the Google Accounts OAuth documentation:
http://code.google.com/apis/accounts/docs/OAuth.html#AccessToken
Args:
request_token: An OAuth token which has been authorized by the user.
url: (optional) The URL to which the upgrade request should be sent.
Defaults to: https://www.google.com/accounts/OAuthAuthorizeToken
"""
http_request = gdata.gauth.generate_request_for_access_token(
request_token, auth_server_url=url)
response = self.http_client.request(http_request)
response_body = response.read()
if response.status != 200:
raise error_from_response(
'Unable to upgrade OAuth request token to access token',
response, RequestError, response_body)
return gdata.gauth.upgrade_to_access_token(request_token, response_body)
GetAccessToken = get_access_token
def modify_request(self, http_request):
"""Adds or changes request before making the HTTP request.
This client will add the API version if it is specified.
Subclasses may override this method to add their own request
modifications before the request is made.
"""
http_request = atom.client.AtomPubClient.modify_request(self,
http_request)
if self.api_version is not None:
http_request.headers['GData-Version'] = self.api_version
return http_request
ModifyRequest = modify_request
def get_feed(self, uri, auth_token=None, converter=None,
desired_class=gdata.data.GDFeed, **kwargs):
return self.request(method='GET', uri=uri, auth_token=auth_token,
converter=converter, desired_class=desired_class,
**kwargs)
GetFeed = get_feed
def get_entry(self, uri, auth_token=None, converter=None,
desired_class=gdata.data.GDEntry, etag=None, **kwargs):
http_request = atom.http_core.HttpRequest()
# Conditional retrieval
if etag is not None:
http_request.headers['If-None-Match'] = etag
return self.request(method='GET', uri=uri, auth_token=auth_token,
http_request=http_request, converter=converter,
desired_class=desired_class, **kwargs)
GetEntry = get_entry
def get_next(self, feed, auth_token=None, converter=None,
desired_class=None, **kwargs):
"""Fetches the next set of results from the feed.
When requesting a feed, the number of entries returned is capped at a
service specific default limit (often 25 entries). You can specify your
own entry-count cap using the max-results URL query parameter. If there
are more results than could fit under max-results, the feed will contain
a next link. This method performs a GET against this next results URL.
Returns:
A new feed object containing the next set of entries in this feed.
"""
if converter is None and desired_class is None:
desired_class = feed.__class__
return self.get_feed(feed.find_next_link(), auth_token=auth_token,
converter=converter, desired_class=desired_class,
**kwargs)
GetNext = get_next
# TODO: add a refresh method to re-fetch the entry/feed from the server
# if it has been updated.
def post(self, entry, uri, auth_token=None, converter=None,
desired_class=None, **kwargs):
if converter is None and desired_class is None:
desired_class = entry.__class__
http_request = atom.http_core.HttpRequest()
http_request.add_body_part(
entry.to_string(get_xml_version(self.api_version)),
'application/atom+xml')
return self.request(method='POST', uri=uri, auth_token=auth_token,
http_request=http_request, converter=converter,
desired_class=desired_class, **kwargs)
Post = post
def update(self, entry, auth_token=None, force=False, **kwargs):
"""Edits the entry on the server by sending the XML for this entry.
Performs a PUT and converts the response to a new entry object with a
matching class to the entry passed in.
Args:
entry:
auth_token:
force: boolean stating whether an update should be forced. Defaults to
False. Normally, if a change has been made since the passed in
entry was obtained, the server will not overwrite the entry since
the changes were based on an obsolete version of the entry.
Setting force to True will cause the update to silently
overwrite whatever version is present.
Returns:
A new Entry object of a matching type to the entry which was passed in.
"""
http_request = atom.http_core.HttpRequest()
http_request.add_body_part(
entry.to_string(get_xml_version(self.api_version)),
'application/atom+xml')
# Include the ETag in the request if present.
if force:
http_request.headers['If-Match'] = '*'
elif hasattr(entry, 'etag') and entry.etag:
http_request.headers['If-Match'] = entry.etag
return self.request(method='PUT', uri=entry.find_edit_link(),
auth_token=auth_token, http_request=http_request,
desired_class=entry.__class__, **kwargs)
Update = update
def delete(self, entry_or_uri, auth_token=None, force=False, **kwargs):
http_request = atom.http_core.HttpRequest()
# Include the ETag in the request if present.
if force:
http_request.headers['If-Match'] = '*'
elif hasattr(entry_or_uri, 'etag') and entry_or_uri.etag:
http_request.headers['If-Match'] = entry_or_uri.etag
# If the user passes in a URL, just delete directly, may not work as
# the service might require an ETag.
if isinstance(entry_or_uri, (str, unicode, atom.http_core.Uri)):
return self.request(method='DELETE', uri=entry_or_uri,
http_request=http_request, auth_token=auth_token,
**kwargs)
return self.request(method='DELETE', uri=entry_or_uri.find_edit_link(),
http_request=http_request, auth_token=auth_token,
**kwargs)
Delete = delete
#TODO: implement batch requests.
#def batch(feed, uri, auth_token=None, converter=None, **kwargs):
# pass
# TODO: add a refresh method to request a conditional update to an entry
# or feed.
def _add_query_param(param_string, value, http_request):
if value:
http_request.uri.query[param_string] = value
class Query(object):
def __init__(self, text_query=None, categories=None, author=None, alt=None,
updated_min=None, updated_max=None, pretty_print=False,
published_min=None, published_max=None, start_index=None,
max_results=None, strict=False):
"""Constructs a Google Data Query to filter feed contents serverside.
Args:
text_query: Full text search str (optional)
categories: list of strings (optional). Each string is a required
category. To include an 'or' query, put a | in the string between
terms. For example, to find everything in the Fitz category and
the Laurie or Jane category (Fitz and (Laurie or Jane)) you would
set categories to ['Fitz', 'Laurie|Jane'].
author: str (optional) The service returns entries where the author
name and/or email address match your query string.
alt: str (optional) for the Alternative representation type you'd like
the feed in. If you don't specify an alt parameter, the service
returns an Atom feed. This is equivalent to alt='atom'.
alt='rss' returns an RSS 2.0 result feed.
alt='json' returns a JSON representation of the feed.
alt='json-in-script' Requests a response that wraps JSON in a script
tag.
alt='atom-in-script' Requests an Atom response that wraps an XML
string in a script tag.
alt='rss-in-script' Requests an RSS response that wraps an XML
string in a script tag.
updated_min: str (optional), RFC 3339 timestamp format, lower bounds.
For example: 2005-08-09T10:57:00-08:00
updated_max: str (optional) updated time must be earlier than timestamp.
pretty_print: boolean (optional) If True the server's XML response will
be indented to make it more human readable. Defaults to False.
published_min: str (optional), Similar to updated_min but for published
time.
published_max: str (optional), Similar to updated_max but for published
time.
start_index: int or str (optional) 1-based index of the first result to
be retrieved. Note that this isn't a general cursoring mechanism.
If you first send a query with ?start-index=1&max-results=10 and
then send another query with ?start-index=11&max-results=10, the
service cannot guarantee that the results are equivalent to
?start-index=1&max-results=20, because insertions and deletions
could have taken place in between the two queries.
max_results: int or str (optional) Maximum number of results to be
retrieved. Each service has a default max (usually 25) which can
vary from service to service. There is also a service-specific
limit to the max_results you can fetch in a request.
strict: boolean (optional) If True, the server will return an error if
the server does not recognize any of the parameters in the request
URL. Defaults to False.
"""
self.text_query = text_query
self.categories = categories or []
self.author = author
self.alt = alt
self.updated_min = updated_min
self.updated_max = updated_max
self.pretty_print = pretty_print
self.published_min = published_min
self.published_max = published_max
self.start_index = start_index
self.max_results = max_results
self.strict = strict
def modify_request(self, http_request):
_add_query_param('q', self.text_query, http_request)
if self.categories:
http_request.uri.query['category'] = ','.join(self.categories)
_add_query_param('author', self.author, http_request)
_add_query_param('alt', self.alt, http_request)
_add_query_param('updated-min', self.updated_min, http_request)
_add_query_param('updated-max', self.updated_max, http_request)
if self.pretty_print:
http_request.uri.query['prettyprint'] = 'true'
_add_query_param('published-min', self.published_min, http_request)
_add_query_param('published-max', self.published_max, http_request)
if self.start_index is not None:
http_request.uri.query['start-index'] = str(self.start_index)
if self.max_results is not None:
http_request.uri.query['max-results'] = str(self.max_results)
if self.strict:
http_request.uri.query['strict'] = 'true'
ModifyRequest = modify_request
class GDQuery(atom.http_core.Uri):
def _get_text_query(self):
return self.query['q']
def _set_text_query(self, value):
self.query['q'] = value
text_query = property(_get_text_query, _set_text_query,
doc='The q parameter for searching for an exact text match on content')
class ResumableUploader(object):
"""Resumable upload helper for the Google Data protocol."""
DEFAULT_CHUNK_SIZE = 5242880 # 5MB
def __init__(self, client, file_handle, content_type, total_file_size,
chunk_size=None, desired_class=None):
"""Starts a resumable upload to a service that supports the protocol.
Args:
client: gdata.client.GDClient A Google Data API service.
file_handle: object A file-like object containing the file to upload.
content_type: str The mimetype of the file to upload.
total_file_size: int The file's total size in bytes.
chunk_size: int The size of each upload chunk. If None, the
DEFAULT_CHUNK_SIZE will be used.
desired_class: object (optional) The type of gdata.data.GDEntry to parse
the completed entry as. This should be specific to the API.
"""
self.client = client
self.file_handle = file_handle
self.content_type = content_type
self.total_file_size = total_file_size
self.chunk_size = chunk_size or self.DEFAULT_CHUNK_SIZE
self.desired_class = desired_class or gdata.data.GDEntry
self.upload_uri = None
# Send the entire file if the chunk size is less than fize's total size.
if self.total_file_size <= self.chunk_size:
self.chunk_size = total_file_size
def _init_session(self, resumable_media_link, entry=None, headers=None,
auth_token=None):
"""Starts a new resumable upload to a service that supports the protocol.
The method makes a request to initiate a new upload session. The unique
upload uri returned by the server (and set in this method) should be used
to send upload chunks to the server.
Args:
resumable_media_link: str The full URL for the #resumable-create-media or
#resumable-edit-media link for starting a resumable upload request or
updating media using a resumable PUT.
entry: A (optional) gdata.data.GDEntry containging metadata to create the
upload from.
headers: dict (optional) Additional headers to send in the initial request
to create the resumable upload request. These headers will override
any default headers sent in the request. For example:
headers={'Slug': 'MyTitle'}.
auth_token: (optional) An object which sets the Authorization HTTP header
in its modify_request method. Recommended classes include
gdata.gauth.ClientLoginToken and gdata.gauth.AuthSubToken
among others.
Returns:
The final Atom entry as created on the server. The entry will be
parsed accoring to the class specified in self.desired_class.
Raises:
RequestError if the unique upload uri is not set or the
server returns something other than an HTTP 308 when the upload is
incomplete.
"""
http_request = atom.http_core.HttpRequest()
# Send empty POST if Atom XML wasn't specified.
if entry is None:
http_request.add_body_part('', self.content_type, size=0)
else:
http_request.add_body_part(str(entry), 'application/atom+xml',
size=len(str(entry)))
http_request.headers['X-Upload-Content-Type'] = self.content_type
http_request.headers['X-Upload-Content-Length'] = self.total_file_size
if headers is not None:
http_request.headers.update(headers)
response = self.client.request(method='POST',
uri=resumable_media_link,
auth_token=auth_token,
http_request=http_request)
self.upload_uri = (response.getheader('location') or
response.getheader('Location'))
_InitSession = _init_session
def upload_chunk(self, start_byte, content_bytes):
"""Uploads a byte range (chunk) to the resumable upload server.
Args:
start_byte: int The byte offset of the total file where the byte range
passed in lives.
content_bytes: str The file contents of this chunk.
Returns:
The final Atom entry created on the server. The entry object's type will
be the class specified in self.desired_class.
Raises:
RequestError if the unique upload uri is not set or the
server returns something other than an HTTP 308 when the upload is
incomplete.
"""
if self.upload_uri is None:
raise RequestError('Resumable upload request not initialized.')
# Adjustment if last byte range is less than defined chunk size.
chunk_size = self.chunk_size
if len(content_bytes) <= chunk_size:
chunk_size = len(content_bytes)
http_request = atom.http_core.HttpRequest()
http_request.add_body_part(content_bytes, self.content_type,
size=len(content_bytes))
http_request.headers['Content-Range'] = ('bytes %s-%s/%s'
% (start_byte,
start_byte + chunk_size - 1,
self.total_file_size))
try:
response = self.client.request(method='POST', uri=self.upload_uri,
http_request=http_request,
desired_class=self.desired_class)
return response
except RequestError, error:
if error.status == 308:
return None
else:
raise error
UploadChunk = upload_chunk
def upload_file(self, resumable_media_link, entry=None, headers=None,
auth_token=None):
"""Uploads an entire file in chunks using the resumable upload protocol.
If you are interested in pausing an upload or controlling the chunking
yourself, use the upload_chunk() method instead.
Args:
resumable_media_link: str The full URL for the #resumable-create-media for
starting a resumable upload request.
entry: A (optional) gdata.data.GDEntry containging metadata to create the
upload from.
headers: dict Additional headers to send in the initial request to create
the resumable upload request. These headers will override any default
headers sent in the request. For example: headers={'Slug': 'MyTitle'}.
auth_token: (optional) An object which sets the Authorization HTTP header
in its modify_request method. Recommended classes include
gdata.gauth.ClientLoginToken and gdata.gauth.AuthSubToken
among others.
Returns:
The final Atom entry created on the server. The entry object's type will
be the class specified in self.desired_class.
Raises:
RequestError if anything other than a HTTP 308 is returned
when the request raises an exception.
"""
self._init_session(resumable_media_link, headers=headers,
auth_token=auth_token, entry=entry)
start_byte = 0
entry = None
while not entry:
entry = self.upload_chunk(
start_byte, self.file_handle.read(self.chunk_size))
start_byte += self.chunk_size
return entry
UploadFile = upload_file
def update_file(self, entry_or_resumable_edit_link, headers=None, force=False,
auth_token=None):
"""Updates the contents of an existing file using the resumable protocol.
If you are interested in pausing an upload or controlling the chunking
yourself, use the upload_chunk() method instead.
Args:
entry_or_resumable_edit_link: object or string A gdata.data.GDEntry for
the entry/file to update or the full uri of the link with rel
#resumable-edit-media.
headers: dict Additional headers to send in the initial request to create
the resumable upload request. These headers will override any default
headers sent in the request. For example: headers={'Slug': 'MyTitle'}.
force boolean (optional) True to force an update and set the If-Match
header to '*'. If False and entry_or_resumable_edit_link is a
gdata.data.GDEntry object, its etag value is used. Otherwise this
parameter should be set to True to force the update.
auth_token: (optional) An object which sets the Authorization HTTP header
in its modify_request method. Recommended classes include
gdata.gauth.ClientLoginToken and gdata.gauth.AuthSubToken
among others.
Returns:
The final Atom entry created on the server. The entry object's type will
be the class specified in self.desired_class.
Raises:
RequestError if anything other than a HTTP 308 is returned
when the request raises an exception.
"""
# Need to override the POST request for a resumable update (required).
customer_headers = {'X-HTTP-Method-Override': 'PUT'}
if headers is not None:
customer_headers.update(headers)
if isinstance(entry_or_resumable_edit_link, gdata.data.GDEntry):
resumable_edit_link = entry_or_resumable_edit_link.find_url(
'http://schemas.google.com/g/2005#resumable-edit-media')
customer_headers['If-Match'] = entry_or_resumable_edit_link.etag
else:
resumable_edit_link = entry_or_resumable_edit_link
if force:
customer_headers['If-Match'] = '*'
return self.upload_file(resumable_edit_link, headers=customer_headers,
auth_token=auth_token)
UpdateFile = update_file
def query_upload_status(self, uri=None):
"""Queries the current status of a resumable upload request.
Args:
uri: str (optional) A resumable upload uri to query and override the one
that is set in this object.
Returns:
An integer representing the file position (byte) to resume the upload from
or True if the upload is complete.
Raises:
RequestError if anything other than a HTTP 308 is returned
when the request raises an exception.
"""
# Override object's unique upload uri.
if uri is None:
uri = self.upload_uri
http_request = atom.http_core.HttpRequest()
http_request.headers['Content-Length'] = '0'
http_request.headers['Content-Range'] = 'bytes */%s' % self.total_file_size
try:
response = self.client.request(
method='POST', uri=uri, http_request=http_request)
if response.status == 201:
return True
else:
raise error_from_response(
'%s returned by server' % response.status, response, RequestError)
except RequestError, error:
if error.status == 308:
for pair in error.headers:
if pair[0].capitalize() == 'Range':
return int(pair[1].split('-')[1]) + 1
else:
raise error
QueryUploadStatus = query_upload_status
|
gpl-3.0
|
Fahmedullah/parallel-ssh
|
psshlib/askpass_client.py
|
66
|
3467
|
#!/usr/bin/env python
# -*- Mode: python -*-
# Copyright (c) 2009-2012, Andrew McNabb
"""Implementation of SSH_ASKPASS to get a password to ssh from pssh.
The password is read from the socket specified by the environment variable
PSSH_ASKPASS_SOCKET. The other end of this socket is pssh.
The ssh man page discusses SSH_ASKPASS as follows:
If ssh needs a passphrase, it will read the passphrase from the current
terminal if it was run from a terminal. If ssh does not have a terminal
associated with it but DISPLAY and SSH_ASKPASS are set, it will execute
the program specified by SSH_ASKPASS and open an X11 window to read the
passphrase. This is particularly useful when calling ssh from a .xsession
or related script. (Note that on some machines it may be necessary to
redirect the input from /dev/null to make this work.)
"""
import os
import socket
import sys
import textwrap
bin_dir = os.path.dirname(os.path.abspath(sys.argv[0]))
askpass_bin_path = os.path.join(bin_dir, 'pssh-askpass')
ASKPASS_PATHS = (askpass_bin_path,
'/usr/libexec/pssh/pssh-askpass',
'/usr/local/libexec/pssh/pssh-askpass',
'/usr/lib/pssh/pssh-askpass',
'/usr/local/lib/pssh/pssh-askpass')
_executable_path = None
def executable_path():
"""Determines the value to use for SSH_ASKPASS.
The value is cached since this may be called many times.
"""
global _executable_path
if _executable_path is None:
for path in ASKPASS_PATHS:
if os.access(path, os.X_OK):
_executable_path = path
break
else:
_executable_path = ''
sys.stderr.write(textwrap.fill("Warning: could not find an"
" executable path for askpass because PSSH was not"
" installed correctly. Password prompts will not work."))
sys.stderr.write('\n')
return _executable_path
def askpass_main():
"""Connects to pssh over the socket specified at PSSH_ASKPASS_SOCKET."""
verbose = os.getenv('PSSH_ASKPASS_VERBOSE')
# It's not documented anywhere, as far as I can tell, but ssh may prompt
# for a password or ask a yes/no question. The command-line argument
# specifies what is needed.
if len(sys.argv) > 1:
prompt = sys.argv[1]
if verbose:
sys.stderr.write('pssh-askpass received prompt: "%s"\n' % prompt)
if not prompt.strip().lower().endswith('password:'):
sys.stderr.write(prompt)
sys.stderr.write('\n')
sys.exit(1)
else:
sys.stderr.write('Error: pssh-askpass called without a prompt.\n')
sys.exit(1)
address = os.getenv('PSSH_ASKPASS_SOCKET')
if not address:
sys.stderr.write(textwrap.fill("pssh error: SSH requested a password."
" Please create SSH keys or use the -A option to provide a"
" password."))
sys.stderr.write('\n')
sys.exit(1)
sock = socket.socket(socket.AF_UNIX)
try:
sock.connect(address)
except socket.error:
_, e, _ = sys.exc_info()
message = e.args[1]
sys.stderr.write("Couldn't bind to %s: %s.\n" % (address, message))
sys.exit(2)
try:
password = sock.makefile().read()
except socket.error:
sys.stderr.write("Socket error.\n")
sys.exit(3)
print(password)
if __name__ == '__main__':
askpass_main()
|
bsd-3-clause
|
tersmitten/ansible
|
lib/ansible/modules/network/fortios/fortios_user_radius.py
|
20
|
28212
|
#!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
# the lib use python logging can get it if the following is set in your
# Ansible config.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_user_radius
short_description: Configure RADIUS server entries in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS by allowing the
user to set and modify user feature and radius category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.2
version_added: "2.8"
author:
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Requires fortiosapi library developed by Fortinet
- Run as a local_action in your playbook
requirements:
- fortiosapi>=0.9.8
options:
host:
description:
- FortiOS or FortiGate ip address.
required: true
username:
description:
- FortiOS or FortiGate username.
required: true
password:
description:
- FortiOS or FortiGate password.
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS
protocol
type: bool
default: true
user_radius:
description:
- Configure RADIUS server entries.
default: null
suboptions:
state:
description:
- Indicates whether to create or remove the object
choices:
- present
- absent
accounting-server:
description:
- Additional accounting servers.
suboptions:
id:
description:
- ID (0 - 4294967295).
required: true
port:
description:
- RADIUS accounting port number.
secret:
description:
- Secret key.
server:
description:
- Server CN domain name or IP.
source-ip:
description:
- Source IP address for communications to the RADIUS server.
status:
description:
- Status.
choices:
- enable
- disable
acct-all-servers:
description:
- Enable/disable sending of accounting messages to all configured servers (default = disable).
choices:
- enable
- disable
acct-interim-interval:
description:
- Time in seconds between each accounting interim update message.
all-usergroup:
description:
- Enable/disable automatically including this RADIUS server in all user groups.
choices:
- disable
- enable
auth-type:
description:
- Authentication methods/protocols permitted for this RADIUS server.
choices:
- auto
- ms_chap_v2
- ms_chap
- chap
- pap
class:
description:
- Class attribute name(s).
suboptions:
name:
description:
- Class name.
required: true
h3c-compatibility:
description:
- Enable/disable compatibility with the H3C, a mechanism that performs security checking for authentication.
choices:
- enable
- disable
name:
description:
- RADIUS server entry name.
required: true
nas-ip:
description:
- IP address used to communicate with the RADIUS server and used as NAS-IP-Address and Called-Station-ID attributes.
password-encoding:
description:
- Password encoding.
choices:
- auto
- ISO-8859-1
password-renewal:
description:
- Enable/disable password renewal.
choices:
- enable
- disable
radius-coa:
description:
- Enable to allow a mechanism to change the attributes of an authentication, authorization, and accounting session after it is
authenticated.
choices:
- enable
- disable
radius-port:
description:
- RADIUS service port number.
rsso:
description:
- Enable/disable RADIUS based single sign on feature.
choices:
- enable
- disable
rsso-context-timeout:
description:
- Time in seconds before the logged out user is removed from the "user context list" of logged on users.
rsso-endpoint-attribute:
description:
- RADIUS attributes used to extract the user end point identifer from the RADIUS Start record.
choices:
- User-Name
- NAS-IP-Address
- Framed-IP-Address
- Framed-IP-Netmask
- Filter-Id
- Login-IP-Host
- Reply-Message
- Callback-Number
- Callback-Id
- Framed-Route
- Framed-IPX-Network
- Class
- Called-Station-Id
- Calling-Station-Id
- NAS-Identifier
- Proxy-State
- Login-LAT-Service
- Login-LAT-Node
- Login-LAT-Group
- Framed-AppleTalk-Zone
- Acct-Session-Id
- Acct-Multi-Session-Id
rsso-endpoint-block-attribute:
description:
- RADIUS attributes used to block a user.
choices:
- User-Name
- NAS-IP-Address
- Framed-IP-Address
- Framed-IP-Netmask
- Filter-Id
- Login-IP-Host
- Reply-Message
- Callback-Number
- Callback-Id
- Framed-Route
- Framed-IPX-Network
- Class
- Called-Station-Id
- Calling-Station-Id
- NAS-Identifier
- Proxy-State
- Login-LAT-Service
- Login-LAT-Node
- Login-LAT-Group
- Framed-AppleTalk-Zone
- Acct-Session-Id
- Acct-Multi-Session-Id
rsso-ep-one-ip-only:
description:
- Enable/disable the replacement of old IP addresses with new ones for the same endpoint on RADIUS accounting Start messages.
choices:
- enable
- disable
rsso-flush-ip-session:
description:
- Enable/disable flushing user IP sessions on RADIUS accounting Stop messages.
choices:
- enable
- disable
rsso-log-flags:
description:
- Events to log.
choices:
- protocol-error
- profile-missing
- accounting-stop-missed
- accounting-event
- endpoint-block
- radiusd-other
- none
rsso-log-period:
description:
- Time interval in seconds that group event log messages will be generated for dynamic profile events.
rsso-radius-response:
description:
- Enable/disable sending RADIUS response packets after receiving Start and Stop records.
choices:
- enable
- disable
rsso-radius-server-port:
description:
- UDP port to listen on for RADIUS Start and Stop records.
rsso-secret:
description:
- RADIUS secret used by the RADIUS accounting server.
rsso-validate-request-secret:
description:
- Enable/disable validating the RADIUS request shared secret in the Start or End record.
choices:
- enable
- disable
secondary-secret:
description:
- Secret key to access the secondary server.
secondary-server:
description:
- Secondary RADIUS CN domain name or IP.
secret:
description:
- Pre-shared secret key used to access the primary RADIUS server.
server:
description:
- Primary RADIUS server CN domain name or IP address.
source-ip:
description:
- Source IP address for communications to the RADIUS server.
sso-attribute:
description:
- RADIUS attribute that contains the profile group name to be extracted from the RADIUS Start record.
choices:
- User-Name
- NAS-IP-Address
- Framed-IP-Address
- Framed-IP-Netmask
- Filter-Id
- Login-IP-Host
- Reply-Message
- Callback-Number
- Callback-Id
- Framed-Route
- Framed-IPX-Network
- Class
- Called-Station-Id
- Calling-Station-Id
- NAS-Identifier
- Proxy-State
- Login-LAT-Service
- Login-LAT-Node
- Login-LAT-Group
- Framed-AppleTalk-Zone
- Acct-Session-Id
- Acct-Multi-Session-Id
sso-attribute-key:
description:
- Key prefix for SSO group value in the SSO attribute.
sso-attribute-value-override:
description:
- Enable/disable override old attribute value with new value for the same endpoint.
choices:
- enable
- disable
tertiary-secret:
description:
- Secret key to access the tertiary server.
tertiary-server:
description:
- Tertiary RADIUS CN domain name or IP.
timeout:
description:
- Time in seconds between re-sending authentication requests.
use-management-vdom:
description:
- Enable/disable using management VDOM to send requests.
choices:
- enable
- disable
username-case-sensitive:
description:
- Enable/disable case sensitive user names.
choices:
- enable
- disable
'''
EXAMPLES = '''
- hosts: localhost
vars:
host: "192.168.122.40"
username: "admin"
password: ""
vdom: "root"
tasks:
- name: Configure RADIUS server entries.
fortios_user_radius:
host: "{{ host }}"
username: "{{ username }}"
password: "{{ password }}"
vdom: "{{ vdom }}"
https: "False"
user_radius:
state: "present"
accounting-server:
-
id: "4"
port: "5"
secret: "<your_own_value>"
server: "192.168.100.40"
source-ip: "84.230.14.43"
status: "enable"
acct-all-servers: "enable"
acct-interim-interval: "11"
all-usergroup: "disable"
auth-type: "auto"
class:
-
name: "default_name_15"
h3c-compatibility: "enable"
name: "default_name_17"
nas-ip: "<your_own_value>"
password-encoding: "auto"
password-renewal: "enable"
radius-coa: "enable"
radius-port: "22"
rsso: "enable"
rsso-context-timeout: "24"
rsso-endpoint-attribute: "User-Name"
rsso-endpoint-block-attribute: "User-Name"
rsso-ep-one-ip-only: "enable"
rsso-flush-ip-session: "enable"
rsso-log-flags: "protocol-error"
rsso-log-period: "30"
rsso-radius-response: "enable"
rsso-radius-server-port: "32"
rsso-secret: "<your_own_value>"
rsso-validate-request-secret: "enable"
secondary-secret: "<your_own_value>"
secondary-server: "<your_own_value>"
secret: "<your_own_value>"
server: "192.168.100.40"
source-ip: "84.230.14.43"
sso-attribute: "User-Name"
sso-attribute-key: "<your_own_value>"
sso-attribute-value-override: "enable"
tertiary-secret: "<your_own_value>"
tertiary-server: "<your_own_value>"
timeout: "45"
use-management-vdom: "enable"
username-case-sensitive: "enable"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
fos = None
def login(data):
host = data['host']
username = data['username']
password = data['password']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password)
def filter_user_radius_data(json):
option_list = ['accounting-server', 'acct-all-servers', 'acct-interim-interval',
'all-usergroup', 'auth-type', 'class',
'h3c-compatibility', 'name', 'nas-ip',
'password-encoding', 'password-renewal', 'radius-coa',
'radius-port', 'rsso', 'rsso-context-timeout',
'rsso-endpoint-attribute', 'rsso-endpoint-block-attribute', 'rsso-ep-one-ip-only',
'rsso-flush-ip-session', 'rsso-log-flags', 'rsso-log-period',
'rsso-radius-response', 'rsso-radius-server-port', 'rsso-secret',
'rsso-validate-request-secret', 'secondary-secret', 'secondary-server',
'secret', 'server', 'source-ip',
'sso-attribute', 'sso-attribute-key', 'sso-attribute-value-override',
'tertiary-secret', 'tertiary-server', 'timeout',
'use-management-vdom', 'username-case-sensitive']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def flatten_multilists_attributes(data):
multilist_attrs = []
for attr in multilist_attrs:
try:
path = "data['" + "']['".join(elem for elem in attr) + "']"
current_val = eval(path)
flattened_val = ' '.join(elem for elem in current_val)
exec(path + '= flattened_val')
except BaseException:
pass
return data
def user_radius(data, fos):
vdom = data['vdom']
user_radius_data = data['user_radius']
flattened_data = flatten_multilists_attributes(user_radius_data)
filtered_data = filter_user_radius_data(flattened_data)
if user_radius_data['state'] == "present":
return fos.set('user',
'radius',
data=filtered_data,
vdom=vdom)
elif user_radius_data['state'] == "absent":
return fos.delete('user',
'radius',
mkey=filtered_data['name'],
vdom=vdom)
def fortios_user(data, fos):
login(data)
if data['user_radius']:
resp = user_radius(data, fos)
fos.logout()
return not resp['status'] == "success", resp['status'] == "success", resp
def main():
fields = {
"host": {"required": True, "type": "str"},
"username": {"required": True, "type": "str"},
"password": {"required": False, "type": "str", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": True},
"user_radius": {
"required": False, "type": "dict",
"options": {
"state": {"required": True, "type": "str",
"choices": ["present", "absent"]},
"accounting-server": {"required": False, "type": "list",
"options": {
"id": {"required": True, "type": "int"},
"port": {"required": False, "type": "int"},
"secret": {"required": False, "type": "str"},
"server": {"required": False, "type": "str"},
"source-ip": {"required": False, "type": "str"},
"status": {"required": False, "type": "str",
"choices": ["enable", "disable"]}
}},
"acct-all-servers": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"acct-interim-interval": {"required": False, "type": "int"},
"all-usergroup": {"required": False, "type": "str",
"choices": ["disable", "enable"]},
"auth-type": {"required": False, "type": "str",
"choices": ["auto", "ms_chap_v2", "ms_chap",
"chap", "pap"]},
"class": {"required": False, "type": "list",
"options": {
"name": {"required": True, "type": "str"}
}},
"h3c-compatibility": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"name": {"required": True, "type": "str"},
"nas-ip": {"required": False, "type": "str"},
"password-encoding": {"required": False, "type": "str",
"choices": ["auto", "ISO-8859-1"]},
"password-renewal": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"radius-coa": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"radius-port": {"required": False, "type": "int"},
"rsso": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"rsso-context-timeout": {"required": False, "type": "int"},
"rsso-endpoint-attribute": {"required": False, "type": "str",
"choices": ["User-Name", "NAS-IP-Address", "Framed-IP-Address",
"Framed-IP-Netmask", "Filter-Id", "Login-IP-Host",
"Reply-Message", "Callback-Number", "Callback-Id",
"Framed-Route", "Framed-IPX-Network", "Class",
"Called-Station-Id", "Calling-Station-Id", "NAS-Identifier",
"Proxy-State", "Login-LAT-Service", "Login-LAT-Node",
"Login-LAT-Group", "Framed-AppleTalk-Zone", "Acct-Session-Id",
"Acct-Multi-Session-Id"]},
"rsso-endpoint-block-attribute": {"required": False, "type": "str",
"choices": ["User-Name", "NAS-IP-Address", "Framed-IP-Address",
"Framed-IP-Netmask", "Filter-Id", "Login-IP-Host",
"Reply-Message", "Callback-Number", "Callback-Id",
"Framed-Route", "Framed-IPX-Network", "Class",
"Called-Station-Id", "Calling-Station-Id", "NAS-Identifier",
"Proxy-State", "Login-LAT-Service", "Login-LAT-Node",
"Login-LAT-Group", "Framed-AppleTalk-Zone", "Acct-Session-Id",
"Acct-Multi-Session-Id"]},
"rsso-ep-one-ip-only": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"rsso-flush-ip-session": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"rsso-log-flags": {"required": False, "type": "str",
"choices": ["protocol-error", "profile-missing", "accounting-stop-missed",
"accounting-event", "endpoint-block", "radiusd-other",
"none"]},
"rsso-log-period": {"required": False, "type": "int"},
"rsso-radius-response": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"rsso-radius-server-port": {"required": False, "type": "int"},
"rsso-secret": {"required": False, "type": "str"},
"rsso-validate-request-secret": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"secondary-secret": {"required": False, "type": "str"},
"secondary-server": {"required": False, "type": "str"},
"secret": {"required": False, "type": "str"},
"server": {"required": False, "type": "str"},
"source-ip": {"required": False, "type": "str"},
"sso-attribute": {"required": False, "type": "str",
"choices": ["User-Name", "NAS-IP-Address", "Framed-IP-Address",
"Framed-IP-Netmask", "Filter-Id", "Login-IP-Host",
"Reply-Message", "Callback-Number", "Callback-Id",
"Framed-Route", "Framed-IPX-Network", "Class",
"Called-Station-Id", "Calling-Station-Id", "NAS-Identifier",
"Proxy-State", "Login-LAT-Service", "Login-LAT-Node",
"Login-LAT-Group", "Framed-AppleTalk-Zone", "Acct-Session-Id",
"Acct-Multi-Session-Id"]},
"sso-attribute-key": {"required": False, "type": "str"},
"sso-attribute-value-override": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"tertiary-secret": {"required": False, "type": "str"},
"tertiary-server": {"required": False, "type": "str"},
"timeout": {"required": False, "type": "int"},
"use-management-vdom": {"required": False, "type": "str",
"choices": ["enable", "disable"]},
"username-case-sensitive": {"required": False, "type": "str",
"choices": ["enable", "disable"]}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
global fos
fos = FortiOSAPI()
is_error, has_changed, result = fortios_user(module.params, fos)
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
|
gpl-3.0
|
sungyism/sungyism
|
gmetad-python/Gmetad/gmetad_daemon.py
|
16
|
3379
|
#/*******************************************************************************
#* Portions Copyright (C) 2008 Novell, Inc. All rights reserved.
#*
#* Redistribution and use in source and binary forms, with or without
#* modification, are permitted provided that the following conditions are met:
#*
#* - Redistributions of source code must retain the above copyright notice,
#* this list of conditions and the following disclaimer.
#*
#* - Redistributions in binary form must reproduce the above copyright notice,
#* this list of conditions and the following disclaimer in the documentation
#* and/or other materials provided with the distribution.
#*
#* - Neither the name of Novell, Inc. nor the names of its
#* contributors may be used to endorse or promote products derived from this
#* software without specific prior written permission.
#*
#* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ``AS IS''
#* AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
#* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
#* ARE DISCLAIMED. IN NO EVENT SHALL Novell, Inc. OR THE CONTRIBUTORS
#* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
#* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
#* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
#* INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
#* CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
#* ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
#* POSSIBILITY OF SUCH DAMAGE.
#*
#* Authors: Matt Ryan (mrayn novell.com)
#* Brad Nicholes (bnicholes novell.com)
#******************************************************************************/
import os
import pwd
import sys
import resource
from gmetad_config import getConfig, GmetadConfig
def setuid():
cfg = getConfig()
setuid_user = None
if cfg[GmetadConfig.SETUID]:
setuid_user = cfg[GmetadConfig.SETUID_USERNAME]
if setuid_user is not None:
try:
os.setuid(pwd.getpwnam(setuid_user)[2])
except Exception:
print 'Unable to setuid to user "%s", exiting' % setuid_user
sys.exit()
def daemonize(ignore_fds=[]):
UMASK=0
WORKDIR = '/'
MAXFD = 1024
REDIRECT_TO = '/dev/null'
if hasattr(os, 'devnull'):
REDIRECT_TO = os.devnull
try:
pid = os.fork()
except OSError, e:
raise Exception, 'Daemonize error: %d (%s)' % (e.errno, e.strerror)
if pid == 0:
# first child
os.setsid()
try:
pid = os.fork()
except OSError, e:
raise Exception, 'Daemonize error: %d (%s)' % (e.errno, e.strerror)
if pid == 0:
# second child
os.chdir(WORKDIR)
os.umask(UMASK)
else:
os._exit(0)
else:
os._exit(0)
maxfd = resource.getrlimit(resource.RLIMIT_NOFILE)[1]
if resource.RLIM_INFINITY == maxfd:
maxfd = MAXFD
for fd in range(0,maxfd):
if fd in ignore_fds: continue
try:
os.close(fd)
except OSError:
pass
os.open(REDIRECT_TO, os.O_RDWR)
os.dup2(0,1)
os.dup2(0,2)
|
bsd-3-clause
|
steven-cutting/icsisumm
|
icsisumm-primary-sys34_v1/nltk/nltk-0.9.2/nltk/parse/api.py
|
9
|
6878
|
# Natural Language Toolkit: Parser API
#
# Copyright (C) 2001-2008 University of Pennsylvania
# Author: Steven Bird <[email protected]>
# Edward Loper <[email protected]>
# URL: <http://nltk.sf.net>
# For license information, see LICENSE.TXT
#
import itertools
from nltk.internals import deprecated, Deprecated, overridden
class ParserI(object):
"""
A processing class for deriving trees that represent possible
structures for a sequence of tokens. These tree structures are
known as X{parses}. Typically, parsers are used to derive syntax
trees for sentences. But parsers can also be used to derive other
kinds of tree structure, such as morphological trees and discourse
structures.
Subclasses must define:
- at least one of: L{parse()}, L{nbest_parse()}, L{iter_parse()},
L{batch_parse()}, L{batch_nbest_parse()}, L{batch_iter_parse()}.
Subclasses may define:
- L{grammar()}
- either L{prob_classify()} or L{batch_prob_classify()} (or both)
"""
def grammar(self):
"""
@return: The grammar used by this parser.
"""
raise NotImplementedError()
def parse(self, sent):
"""
@return: A parse tree that represents the structure of the
given sentence, or C{None} if no parse tree is found. If
multiple parses are found, then return the best parse.
@param sent: The sentence to be parsed
@type sent: L{list} of L{string}
@rtype: L{Tree}
"""
if overridden(self.batch_parse):
return self.batch_parse([sent])[0]
else:
trees = self.nbest_parse(sent, 1)
if trees: return trees[0]
else: return None
def nbest_parse(self, sent, n=None):
"""
@return: A list of parse trees that represent possible
structures for the given sentence. When possible, this list is
sorted from most likely to least likely. If C{n} is
specified, then the returned list will contain at most C{n}
parse trees.
@param sent: The sentence to be parsed
@type sent: L{list} of L{string}
@param n: The maximum number of trees to return.
@type n: C{int}
@rtype: C{list} of L{Tree}
"""
if overridden(self.batch_nbest_parse):
return self.batch_nbest_parse([sent],n)[0]
elif overridden(self.parse) or overriden(self.batch_parse):
tree = self.parse(sent)
if tree: return [tree]
else: return []
else:
return list(itertools.islice(self.iter_parse(sent), n))
def iter_parse(self, sent):
"""
@return: An iterator that generates parse trees that represent
possible structures for the given sentence. When possible,
this list is sorted from most likely to least likely.
@param sent: The sentence to be parsed
@type sent: L{list} of L{string}
@rtype: C{iterator} of L{Tree}
"""
if overridden(self.batch_iter_parse):
return self.batch_iter_parse([sent])[0]
elif overridden(self.nbest_parse) or overridden(self.batch_nbest_parse):
return iter(self.nbest_parse(sent))
elif overridden(self.parse) or overriden(self.batch_parse):
tree = self.parse(sent)
if tree: return iter([tree])
else: return iter([])
else:
raise NotImplementedError()
def prob_parse(self, sent):
"""
@return: A probability distribution over the possible parse
trees for the given sentence. If there are no possible parse
trees for the given sentence, return a probability distribution
that assigns a probability of 1.0 to C{None}.
@param sent: The sentence to be parsed
@type sent: L{list} of L{string}
@rtype: L{ProbDist} of L{Tree}
"""
if overridden(self.batch_prob_parse):
return self.batch_prob_parse([sent])[0]
else:
raise NotImplementedError
def batch_parse(self, sents):
"""
Apply L{self.parse()} to each element of C{sents}. I.e.:
>>> return [self.parse(sent) for sent in sents]
@rtype: C{list} of L{Tree}
"""
return [self.parse(sent) for sent in sents]
def batch_nbest_parse(self, sents, n=None):
"""
Apply L{self.nbest_parse()} to each element of C{sents}. I.e.:
>>> return [self.nbest_parse(sent, n) for sent in sents]
@rtype: C{list} of C{list} of L{Tree}
"""
return [self.nbest_parse(sent,n ) for sent in sents]
def batch_iter_parse(self, sents):
"""
Apply L{self.iter_parse()} to each element of C{sents}. I.e.:
>>> return [self.iter_parse(sent) for sent in sents]
@rtype: C{list} of C{iterator} of L{Tree}
"""
return [self.iter_parse(sent) for sent in sents]
def batch_prob_parse(self, sents):
"""
Apply L{self.prob_parse()} to each element of C{sents}. I.e.:
>>> return [self.prob_parse(sent) for sent in sents]
@rtype: C{list} of L{ProbDist} of L{Tree}
"""
return [self.prob_parse(sent) for sent in sents]
#////////////////////////////////////////////////////////////
#{ Deprecated
@deprecated("Use parse() instead.")
def get_parse(self, sent):
return self.parse(sent)
@deprecated("Use nbest_parse() instead.")
def get_parse_list(self, sent):
return self.nbest_parse(sent)
@deprecated("Use prob_parse() instead.")
def get_parse_prob(self, sent):
return self.prob_parse(sent)
@deprecated("Use prob_parse() instead.")
def get_parse_dict(self, sent):
return self.prob_parse(sent)
@deprecated("No longer supported.")
def batch_test(self, filename):
f = open(filename)
for line in f:
line = line.strip()
if not line: continue
if line.startswith('#'):
print line
continue
print "Sentence:", line
parses = self.nbest_parse(line)
print "%d parses." % len(parses)
for tree in parses: print tree
#}
#////////////////////////////////////////////////////////////
######################################################################
#{ Deprecated
class ParseI(ParserI, Deprecated):
"Use ParserI instead."
class AbstractParser(Deprecated, ParserI):
"""Use ParserI instead."""
@deprecated("Use nltk.cfg.Grammar.check_coverage() instead.")
def _check_coverage(self, tokens):
self._grammar.check_coverage(tokens)
#}
######################################################################
|
gpl-3.0
|
pombredanne/mopidy-webhooks
|
mopidy_webhooks/reporters/status.py
|
2
|
2051
|
# future imports
from __future__ import absolute_import
from __future__ import unicode_literals
# stdlib imports
import logging
import time
# third-party imports
import pykka
# local imports
from ..utils import send_webhook
logger = logging.getLogger(__name__)
class StatusReporter(pykka.ThreadingActor):
"""Periodically sends webhook notifications to the configured server
containing data on the player's current status.
"""
def __init__(self, config, core):
super(StatusReporter, self).__init__()
self.config = config['webhooks']
self.core = core
self.in_future = self.actor_ref.proxy()
def on_start(self):
"""Runs when the actor is started and schedules a status update
"""
logger.info('StatusReporter started.')
# if configured not to report status then return immediately
if self.config['status_update_interval'] == 0:
logger.info('StatusReporter disabled by configuration.')
return
self.in_future.report_status()
def report_again(self, current_status):
"""Computes a sleep interval, sleeps for the specified amount of time
then kicks off another status report.
"""
# calculate sleep interval based on current status and configured interval
_m = {'playing': 1, 'paused': 2, 'stopped': 5}[current_status['state']]
interval = (self.config['status_update_interval'] * _m) / 1000.0
# sleep for computed interval and kickoff another webhook
time.sleep(interval)
self.in_future.report_status()
def report_status(self):
"""Get status of player from mopidy core and send webhook.
"""
current_status = {
'current_track': self.core.playback.current_track.get(),
'state': self.core.playback.state.get(),
'time_position': self.core.playback.time_position.get(),
}
send_webhook(self.config, {'status_report': current_status})
self.report_again(current_status)
|
apache-2.0
|
7anner/grpc
|
test/http2_test/test_rst_during_data.py
|
26
|
2707
|
# Copyright 2016, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import http2_base_server
class TestcaseRstStreamDuringData(object):
"""
In response to an incoming request, this test sends headers, followed by
some data, followed by a reset stream frame. Client asserts that the RPC
failed and does not deliver the message to the application.
"""
def __init__(self):
self._base_server = http2_base_server.H2ProtocolBaseServer()
self._base_server._handlers['DataReceived'] = self.on_data_received
self._base_server._handlers['SendDone'] = self.on_send_done
def get_base_server(self):
return self._base_server
def on_data_received(self, event):
self._base_server.on_data_received_default(event)
sr = self._base_server.parse_received_data(event.stream_id)
if sr:
response_data = self._base_server.default_response_data(sr.response_size)
self._ready_to_send = True
response_len = len(response_data)
truncated_response_data = response_data[0:response_len/2]
self._base_server.setup_send(truncated_response_data, event.stream_id)
def on_send_done(self, stream_id):
self._base_server.send_reset_stream()
self._base_server._stream_status[stream_id] = False
|
bsd-3-clause
|
hargup/sympy
|
sympy/polys/domains/pythonrationalfield.py
|
117
|
2234
|
"""Implementation of :class:`PythonRationalField` class. """
from __future__ import print_function, division
from sympy.polys.domains.rationalfield import RationalField
from sympy.polys.domains.groundtypes import PythonInteger, PythonRational, SymPyRational
from sympy.polys.polyerrors import CoercionFailed
from sympy.utilities import public
@public
class PythonRationalField(RationalField):
"""Rational field based on Python rational number type. """
dtype = PythonRational
zero = dtype(0)
one = dtype(1)
alias = 'QQ_python'
def __init__(self):
pass
def get_ring(self):
"""Returns ring associated with ``self``. """
from sympy.polys.domains import PythonIntegerRing
return PythonIntegerRing()
def to_sympy(self, a):
"""Convert `a` to a SymPy object. """
return SymPyRational(a.numerator, a.denominator)
def from_sympy(self, a):
"""Convert SymPy's Rational to `dtype`. """
if a.is_Rational:
return PythonRational(a.p, a.q)
elif a.is_Float:
from sympy.polys.domains import RR
p, q = RR.to_rational(a)
return PythonRational(int(p), int(q))
else:
raise CoercionFailed("expected `Rational` object, got %s" % a)
def from_ZZ_python(K1, a, K0):
"""Convert a Python `int` object to `dtype`. """
return PythonRational(a)
def from_QQ_python(K1, a, K0):
"""Convert a Python `Fraction` object to `dtype`. """
return a
def from_ZZ_gmpy(K1, a, K0):
"""Convert a GMPY `mpz` object to `dtype`. """
return PythonRational(PythonInteger(a))
def from_QQ_gmpy(K1, a, K0):
"""Convert a GMPY `mpq` object to `dtype`. """
return PythonRational(PythonInteger(a.numer()),
PythonInteger(a.denom()))
def from_RealField(K1, a, K0):
"""Convert a mpmath `mpf` object to `dtype`. """
p, q = K0.to_rational(a)
return PythonRational(int(p), int(q))
def numer(self, a):
"""Returns numerator of `a`. """
return a.numerator
def denom(self, a):
"""Returns denominator of `a`. """
return a.denominator
|
bsd-3-clause
|
GbalsaC/bitnamiP
|
venv/lib/python2.7/site-packages/django/contrib/localflavor/mk/mk_choices.py
|
91
|
2580
|
# -*- coding: utf-8 -*-
"""
Macedonian municipalities per the reorganization from 2004.
"""
from django.utils.translation import ugettext_lazy as _
MK_MUNICIPALITIES = (
('AD', _(u'Aerodrom')),
('AR', _(u'Aračinovo')),
('BR', _(u'Berovo')),
('TL', _(u'Bitola')),
('BG', _(u'Bogdanci')),
('VJ', _(u'Bogovinje')),
('BS', _(u'Bosilovo')),
('BN', _(u'Brvenica')),
('BU', _(u'Butel')),
('VA', _(u'Valandovo')),
('VL', _(u'Vasilevo')),
('VV', _(u'Vevčani')),
('VE', _(u'Veles')),
('NI', _(u'Vinica')),
('VC', _(u'Vraneštica')),
('VH', _(u'Vrapčište')),
('GB', _(u'Gazi Baba')),
('GV', _(u'Gevgelija')),
('GT', _(u'Gostivar')),
('GR', _(u'Gradsko')),
('DB', _(u'Debar')),
('DA', _(u'Debarca')),
('DL', _(u'Delčevo')),
('DK', _(u'Demir Kapija')),
('DM', _(u'Demir Hisar')),
('DE', _(u'Dolneni')),
('DR', _(u'Drugovo')),
('GP', _(u'Gjorče Petrov')),
('ZE', _(u'Želino')),
('ZA', _(u'Zajas')),
('ZK', _(u'Zelenikovo')),
('ZR', _(u'Zrnovci')),
('IL', _(u'Ilinden')),
('JG', _(u'Jegunovce')),
('AV', _(u'Kavadarci')),
('KB', _(u'Karbinci')),
('KX', _(u'Karpoš')),
('VD', _(u'Kisela Voda')),
('KH', _(u'Kičevo')),
('KN', _(u'Konče')),
('OC', _(u'Koćani')),
('KY', _(u'Kratovo')),
('KZ', _(u'Kriva Palanka')),
('KG', _(u'Krivogaštani')),
('KS', _(u'Kruševo')),
('UM', _(u'Kumanovo')),
('LI', _(u'Lipkovo')),
('LO', _(u'Lozovo')),
('MR', _(u'Mavrovo i Rostuša')),
('MK', _(u'Makedonska Kamenica')),
('MD', _(u'Makedonski Brod')),
('MG', _(u'Mogila')),
('NG', _(u'Negotino')),
('NV', _(u'Novaci')),
('NS', _(u'Novo Selo')),
('OS', _(u'Oslomej')),
('OD', _(u'Ohrid')),
('PE', _(u'Petrovec')),
('PH', _(u'Pehčevo')),
('PN', _(u'Plasnica')),
('PP', _(u'Prilep')),
('PT', _(u'Probištip')),
('RV', _(u'Radoviš')),
('RN', _(u'Rankovce')),
('RE', _(u'Resen')),
('RO', _(u'Rosoman')),
('AJ', _(u'Saraj')),
('SL', _(u'Sveti Nikole')),
('SS', _(u'Sopište')),
('SD', _(u'Star Dojran')),
('NA', _(u'Staro Nagoričane')),
('UG', _(u'Struga')),
('RU', _(u'Strumica')),
('SU', _(u'Studeničani')),
('TR', _(u'Tearce')),
('ET', _(u'Tetovo')),
('CE', _(u'Centar')),
('CZ', _(u'Centar-Župa')),
('CI', _(u'Čair')),
('CA', _(u'Čaška')),
('CH', _(u'Češinovo-Obleševo')),
('CS', _(u'Čučer-Sandevo')),
('ST', _(u'Štip')),
('SO', _(u'Šuto Orizari')),
)
|
agpl-3.0
|
varunarya10/nova_test_latest
|
nova/tests/unit/test_configdrive2.py
|
44
|
4722
|
# Copyright 2012 Michael Still and Canonical Inc
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import tempfile
import mock
from mox3 import mox
from oslo_config import cfg
from oslo_utils import fileutils
from nova import context
from nova import test
from nova.tests.unit import fake_instance
from nova import utils
from nova.virt import configdrive
CONF = cfg.CONF
class FakeInstanceMD(object):
def metadata_for_config_drive(self):
yield ('this/is/a/path/hello', 'This is some content')
class ConfigDriveTestCase(test.NoDBTestCase):
def test_create_configdrive_iso(self):
CONF.set_override('config_drive_format', 'iso9660')
imagefile = None
try:
self.mox.StubOutWithMock(utils, 'execute')
utils.execute('genisoimage', '-o', mox.IgnoreArg(), '-ldots',
'-allow-lowercase', '-allow-multidot', '-l',
'-publisher', mox.IgnoreArg(), '-quiet', '-J', '-r',
'-V', 'config-2', mox.IgnoreArg(), attempts=1,
run_as_root=False).AndReturn(None)
self.mox.ReplayAll()
with configdrive.ConfigDriveBuilder(FakeInstanceMD()) as c:
(fd, imagefile) = tempfile.mkstemp(prefix='cd_iso_')
os.close(fd)
c.make_drive(imagefile)
finally:
if imagefile:
fileutils.delete_if_exists(imagefile)
def test_create_configdrive_vfat(self):
CONF.set_override('config_drive_format', 'vfat')
imagefile = None
try:
self.mox.StubOutWithMock(utils, 'mkfs')
self.mox.StubOutWithMock(utils, 'execute')
self.mox.StubOutWithMock(utils, 'trycmd')
utils.mkfs('vfat', mox.IgnoreArg(),
label='config-2').AndReturn(None)
utils.trycmd('mount', '-o', mox.IgnoreArg(), mox.IgnoreArg(),
mox.IgnoreArg(),
run_as_root=True).AndReturn((None, None))
utils.execute('umount', mox.IgnoreArg(),
run_as_root=True).AndReturn(None)
self.mox.ReplayAll()
with configdrive.ConfigDriveBuilder(FakeInstanceMD()) as c:
(fd, imagefile) = tempfile.mkstemp(prefix='cd_vfat_')
os.close(fd)
c.make_drive(imagefile)
# NOTE(mikal): we can't check for a VFAT output here because the
# filesystem creation stuff has been mocked out because it
# requires root permissions
finally:
if imagefile:
fileutils.delete_if_exists(imagefile)
def test_config_drive_required_by_image_property(self):
inst = fake_instance.fake_instance_obj(context.get_admin_context())
inst.config_drive = ''
inst.system_metadata = {
utils.SM_IMAGE_PROP_PREFIX + 'img_config_drive': 'mandatory'}
self.assertTrue(configdrive.required_by(inst))
inst.system_metadata = {
utils.SM_IMAGE_PROP_PREFIX + 'img_config_drive': 'optional'}
self.assertFalse(configdrive.required_by(inst))
@mock.patch.object(configdrive, 'required_by', return_value=False)
def test_config_drive_update_instance_required_by_false(self,
mock_required):
inst = fake_instance.fake_instance_obj(context.get_admin_context())
inst.config_drive = ''
configdrive.update_instance(inst)
self.assertEqual('', inst.config_drive)
inst.config_drive = True
configdrive.update_instance(inst)
self.assertTrue(inst.config_drive)
@mock.patch.object(configdrive, 'required_by', return_value=True)
def test_config_drive_update_instance(self, mock_required):
inst = fake_instance.fake_instance_obj(context.get_admin_context())
inst.config_drive = ''
configdrive.update_instance(inst)
self.assertTrue(inst.config_drive)
inst.config_drive = True
configdrive.update_instance(inst)
self.assertTrue(inst.config_drive)
|
apache-2.0
|
pbs/django-cms
|
cms/plugins/file/south_migrations/0001_initial.py
|
11
|
1086
|
from south.db import db
from django.db import models
from cms.plugins.file.models import *
class Migration:
depends_on = (
("cms", "0001_initial"),
)
def forwards(self, orm):
# Adding model 'File'
db.create_table('file_file', (
('cmsplugin_ptr', models.OneToOneField(orm['cms.CMSPlugin'])),
('file', models.FileField(_("file"), upload_to=CMSPlugin.get_media_path)),
('title', models.CharField(_("title"), max_length=255, null=True, blank=True)),
))
db.send_create_signal('file', ['File'])
def backwards(self, orm):
# Deleting model 'File'
db.delete_table('file_file')
models = {
'cms.cmsplugin': {
'_stub': True,
'id': ('models.AutoField', [], {'primary_key': 'True'})
},
'cms.page': {
'Meta': {'ordering': "('tree_id','lft')"},
'_stub': True,
'id': ('models.AutoField', [], {'primary_key': 'True'})
}
}
|
bsd-3-clause
|
movmov/cc
|
vendor/boto/boto/pyami/installers/ubuntu/trac.py
|
47
|
6271
|
# Copyright (c) 2008 Chris Moyer http://coredumped.org
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
from boto.pyami.installers.ubuntu.installer import Installer
import boto
import os
class Trac(Installer):
"""
Install Trac and DAV-SVN
Sets up a Vhost pointing to [Trac]->home
Using the config parameter [Trac]->hostname
Sets up a trac environment for every directory found under [Trac]->data_dir
[Trac]
name = My Foo Server
hostname = trac.foo.com
home = /mnt/sites/trac
data_dir = /mnt/trac
svn_dir = /mnt/subversion
server_admin = [email protected]
sdb_auth_domain = users
# Optional
SSLCertificateFile = /mnt/ssl/foo.crt
SSLCertificateKeyFile = /mnt/ssl/foo.key
SSLCertificateChainFile = /mnt/ssl/FooCA.crt
"""
def install(self):
self.run('apt-get -y install trac', notify=True, exit_on_error=True)
self.run('apt-get -y install libapache2-svn', notify=True, exit_on_error=True)
self.run("a2enmod ssl")
self.run("a2enmod mod_python")
self.run("a2enmod dav_svn")
self.run("a2enmod rewrite")
# Make sure that boto.log is writable by everyone so that subversion post-commit hooks can
# write to it.
self.run("touch /var/log/boto.log")
self.run("chmod a+w /var/log/boto.log")
def setup_vhost(self):
domain = boto.config.get("Trac", "hostname").strip()
if domain:
domain_info = domain.split('.')
cnf = open("/etc/apache2/sites-available/%s" % domain_info[0], "w")
cnf.write("NameVirtualHost *:80\n")
if boto.config.get("Trac", "SSLCertificateFile"):
cnf.write("NameVirtualHost *:443\n\n")
cnf.write("<VirtualHost *:80>\n")
cnf.write("\tServerAdmin %s\n" % boto.config.get("Trac", "server_admin").strip())
cnf.write("\tServerName %s\n" % domain)
cnf.write("\tRewriteEngine On\n")
cnf.write("\tRewriteRule ^(.*)$ https://%s$1\n" % domain)
cnf.write("</VirtualHost>\n\n")
cnf.write("<VirtualHost *:443>\n")
else:
cnf.write("<VirtualHost *:80>\n")
cnf.write("\tServerAdmin %s\n" % boto.config.get("Trac", "server_admin").strip())
cnf.write("\tServerName %s\n" % domain)
cnf.write("\tDocumentRoot %s\n" % boto.config.get("Trac", "home").strip())
cnf.write("\t<Directory %s>\n" % boto.config.get("Trac", "home").strip())
cnf.write("\t\tOptions FollowSymLinks Indexes MultiViews\n")
cnf.write("\t\tAllowOverride All\n")
cnf.write("\t\tOrder allow,deny\n")
cnf.write("\t\tallow from all\n")
cnf.write("\t</Directory>\n")
cnf.write("\t<Location />\n")
cnf.write("\t\tAuthType Basic\n")
cnf.write("\t\tAuthName \"%s\"\n" % boto.config.get("Trac", "name"))
cnf.write("\t\tRequire valid-user\n")
cnf.write("\t\tAuthUserFile /mnt/apache/passwd/passwords\n")
cnf.write("\t</Location>\n")
data_dir = boto.config.get("Trac", "data_dir")
for env in os.listdir(data_dir):
if(env[0] != "."):
cnf.write("\t<Location /trac/%s>\n" % env)
cnf.write("\t\tSetHandler mod_python\n")
cnf.write("\t\tPythonInterpreter main_interpreter\n")
cnf.write("\t\tPythonHandler trac.web.modpython_frontend\n")
cnf.write("\t\tPythonOption TracEnv %s/%s\n" % (data_dir, env))
cnf.write("\t\tPythonOption TracUriRoot /trac/%s\n" % env)
cnf.write("\t</Location>\n")
svn_dir = boto.config.get("Trac", "svn_dir")
for env in os.listdir(svn_dir):
if(env[0] != "."):
cnf.write("\t<Location /svn/%s>\n" % env)
cnf.write("\t\tDAV svn\n")
cnf.write("\t\tSVNPath %s/%s\n" % (svn_dir, env))
cnf.write("\t</Location>\n")
cnf.write("\tErrorLog /var/log/apache2/error.log\n")
cnf.write("\tLogLevel warn\n")
cnf.write("\tCustomLog /var/log/apache2/access.log combined\n")
cnf.write("\tServerSignature On\n")
SSLCertificateFile = boto.config.get("Trac", "SSLCertificateFile")
if SSLCertificateFile:
cnf.write("\tSSLEngine On\n")
cnf.write("\tSSLCertificateFile %s\n" % SSLCertificateFile)
SSLCertificateKeyFile = boto.config.get("Trac", "SSLCertificateKeyFile")
if SSLCertificateKeyFile:
cnf.write("\tSSLCertificateKeyFile %s\n" % SSLCertificateKeyFile)
SSLCertificateChainFile = boto.config.get("Trac", "SSLCertificateChainFile")
if SSLCertificateChainFile:
cnf.write("\tSSLCertificateChainFile %s\n" % SSLCertificateChainFile)
cnf.write("</VirtualHost>\n")
cnf.close()
self.run("a2ensite %s" % domain_info[0])
self.run("/etc/init.d/apache2 force-reload")
def main(self):
self.install()
self.setup_vhost()
|
apache-2.0
|
kernc/networkx
|
networkx/linalg/tests/test_graphmatrix.py
|
40
|
4292
|
from nose import SkipTest
import networkx as nx
from networkx.generators.degree_seq import havel_hakimi_graph
class TestGraphMatrix(object):
numpy=1 # nosetests attribute, use nosetests -a 'not numpy' to skip test
@classmethod
def setupClass(cls):
global numpy
global assert_equal
global assert_almost_equal
try:
import numpy
import scipy
from numpy.testing import assert_equal,assert_almost_equal
except ImportError:
raise SkipTest('SciPy not available.')
def setUp(self):
deg=[3,2,2,1,0]
self.G=havel_hakimi_graph(deg)
self.OI=numpy.array([[-1, -1, -1, 0],
[1, 0, 0, -1],
[0, 1, 0, 1],
[0, 0, 1, 0],
[0, 0, 0, 0]])
self.A=numpy.array([[0, 1, 1, 1, 0],
[1, 0, 1, 0, 0],
[1, 1, 0, 0, 0],
[1, 0, 0, 0, 0],
[0, 0, 0, 0, 0]])
self.WG=nx.Graph( (u,v,{'weight':0.5,'other':0.3})
for (u,v) in self.G.edges_iter() )
self.WG.add_node(4)
self.WA=numpy.array([[0 , 0.5, 0.5, 0.5, 0],
[0.5, 0 , 0.5, 0 , 0],
[0.5, 0.5, 0 , 0 , 0],
[0.5, 0 , 0 , 0 , 0],
[0 , 0 , 0 , 0 , 0]])
self.MG=nx.MultiGraph(self.G)
self.MG2=self.MG.copy()
self.MG2.add_edge(0,1)
self.MG2A=numpy.array([[0, 2, 1, 1, 0],
[2, 0, 1, 0, 0],
[1, 1, 0, 0, 0],
[1, 0, 0, 0, 0],
[0, 0, 0, 0, 0]])
self.MGOI=numpy.array([[-1, -1, -1, -1, 0],
[1, 1, 0, 0, -1],
[0, 0, 1, 0, 1],
[0, 0, 0, 1, 0],
[0, 0, 0, 0, 0]])
def test_incidence_matrix(self):
"Conversion to incidence matrix"
assert_equal(nx.incidence_matrix(self.G,oriented=True).todense(),self.OI)
assert_equal(nx.incidence_matrix(self.G).todense(),numpy.abs(self.OI))
assert_equal(nx.incidence_matrix(self.MG,oriented=True).todense(),self.OI)
assert_equal(nx.incidence_matrix(self.MG).todense(),numpy.abs(self.OI))
assert_equal(nx.incidence_matrix(self.MG2,oriented=True).todense(),self.MGOI)
assert_equal(nx.incidence_matrix(self.MG2).todense(),numpy.abs(self.MGOI))
assert_equal(nx.incidence_matrix(self.WG,oriented=True).todense(),self.OI)
assert_equal(nx.incidence_matrix(self.WG).todense(),numpy.abs(self.OI))
assert_equal(nx.incidence_matrix(self.WG,oriented=True,
weight='weight').todense(),0.5*self.OI)
assert_equal(nx.incidence_matrix(self.WG,weight='weight').todense(),
numpy.abs(0.5*self.OI))
assert_equal(nx.incidence_matrix(self.WG,oriented=True,weight='other').todense(),
0.3*self.OI)
WMG=nx.MultiGraph(self.WG)
WMG.add_edge(0,1,attr_dict={'weight':0.5,'other':0.3})
assert_equal(nx.incidence_matrix(WMG,weight='weight').todense(),
numpy.abs(0.5*self.MGOI))
assert_equal(nx.incidence_matrix(WMG,weight='weight',oriented=True).todense(),
0.5*self.MGOI)
assert_equal(nx.incidence_matrix(WMG,weight='other',oriented=True).todense(),
0.3*self.MGOI)
def test_adjacency_matrix(self):
"Conversion to adjacency matrix"
assert_equal(nx.adj_matrix(self.G).todense(),self.A)
assert_equal(nx.adj_matrix(self.MG).todense(),self.A)
assert_equal(nx.adj_matrix(self.MG2).todense(),self.MG2A)
assert_equal(nx.adj_matrix(self.G,nodelist=[0,1]).todense(),self.A[:2,:2])
assert_equal(nx.adj_matrix(self.WG).todense(),self.WA)
assert_equal(nx.adj_matrix(self.WG,weight=None).todense(),self.A)
assert_equal(nx.adj_matrix(self.MG2,weight=None).todense(),self.MG2A)
assert_equal(nx.adj_matrix(self.WG,weight='other').todense(),0.6*self.WA)
|
bsd-3-clause
|
sayanriju/marchfluxmenu
|
onadd.py
|
1
|
1232
|
# /usr/bin/env python
## Filename : onadd.py
import os
from main import *
import cPickle as pickle
''' Executed when a .desktop file is added, generally on installtion of a new package '''
itemdata = 'itemlist.data'
new_file_list = GetLatestFiles('')
for filename in new_file_list:
if fnmatch.fnmatch(filename,'*.desktop'):
try:
item = ParseDesktopFile(filename) # Parse the newest .desktop file
break
except:
pass
else:
continue
init_string, end_string, dic = ParseFluxboxMenu('')
Sort = is_sorted(dic[item.submenu].members)
dic[item.submenu].AppendToSubMenu(item)
list = sortdic(dic)
string = init_string
for m in list :
if m.label not in submenu_dict.keys():
#continue
pass
if m.population != 0:
if m.label == item.submenu:
m.GenerateSubMenu(Sort = Sort)
else:
m.GenerateSubMenu(Sort = is_sorted(m.members))
string += m.body
string += '\n'
string += end_string
filename = os.path.expanduser('~/.fluxbox/menu')
f = file(filename,'w')
f.write(string)
f.close()
## Updating item list for next iteration of daemon
item_list = ListExecItemsFromDesktop('')
f = file(itemdata, 'w')
pickle.dump(item_list, f) # dump the object to a file
f.close()
#print len(item_list)
|
gpl-3.0
|
VirusOnline/VoragineCore
|
dep/libmpq/bindings/python/mpq.py
|
501
|
10430
|
"""wrapper for libmpq"""
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import ctypes
import ctypes.util
import os
libmpq = ctypes.CDLL(ctypes.util.find_library("mpq"))
class Error(Exception):
pass
errors = {
-1: (IOError, "open"),
-2: (IOError, "close"),
-3: (IOError, "seek"),
-4: (IOError, "read"),
-5: (IOError, "write"),
-6: (MemoryError,),
-7: (Error, "file is not an mpq or is corrupted"),
-8: (AssertionError, "not initialized"),
-9: (AssertionError, "buffer size too small"),
-10: (IndexError, "file not in archive"),
-11: (AssertionError, "decrypt"),
-12: (AssertionError, "unpack"),
}
def check_error(result, func, arguments, errors=errors):
try:
error = errors[result]
except KeyError:
return result
else:
raise error[0](*error[1:])
libmpq.libmpq__version.restype = ctypes.c_char_p
libmpq.libmpq__archive_open.errcheck = check_error
libmpq.libmpq__archive_close.errcheck = check_error
libmpq.libmpq__archive_packed_size.errcheck = check_error
libmpq.libmpq__archive_unpacked_size.errcheck = check_error
libmpq.libmpq__archive_offset.errcheck = check_error
libmpq.libmpq__archive_version.errcheck = check_error
libmpq.libmpq__archive_files.errcheck = check_error
libmpq.libmpq__file_packed_size.errcheck = check_error
libmpq.libmpq__file_unpacked_size.errcheck = check_error
libmpq.libmpq__file_offset.errcheck = check_error
libmpq.libmpq__file_blocks.errcheck = check_error
libmpq.libmpq__file_encrypted.errcheck = check_error
libmpq.libmpq__file_compressed.errcheck = check_error
libmpq.libmpq__file_imploded.errcheck = check_error
libmpq.libmpq__file_number.errcheck = check_error
libmpq.libmpq__file_read.errcheck = check_error
libmpq.libmpq__block_open_offset.errcheck = check_error
libmpq.libmpq__block_close_offset.errcheck = check_error
libmpq.libmpq__block_unpacked_size.errcheck = check_error
libmpq.libmpq__block_read.errcheck = check_error
__version__ = libmpq.libmpq__version()
class Reader(object):
def __init__(self, file, libmpq=libmpq):
self._file = file
self._pos = 0
self._buf = []
self._cur_block = 0
libmpq.libmpq__block_open_offset(self._file._archive._mpq,
self._file.number)
def __iter__(self):
return self
def __repr__(self):
return "iter(%r)" % self._file
def seek(self, offset, whence=os.SEEK_SET, os=os):
if whence == os.SEEK_SET:
pass
elif whence == os.SEEK_CUR:
offset += self._pos
elif whence == os.SEEK_END:
offset += self._file.unpacked_size
else:
raise ValueError, "invalid whence"
if offset >= self._pos:
self.read(offset - self._pos)
else:
self._pos = 0
self._buf = []
self._cur_block = 0
self.read(offset)
def tell(self):
return self._pos
def _read_block(self, ctypes=ctypes, libmpq=libmpq):
block_size = ctypes.c_uint64()
libmpq.libmpq__block_unpacked_size(self._file._archive._mpq,
self._file.number, self._cur_block, ctypes.byref(block_size))
block_data = ctypes.create_string_buffer(block_size.value)
libmpq.libmpq__block_read(self._file._archive._mpq,
self._file.number, self._cur_block,
block_data, ctypes.c_uint64(len(block_data)), None)
self._buf.append(block_data.raw)
self._cur_block += 1
def read(self, size=-1):
while size < 0 or sum(map(len, self._buf)) < size:
if self._cur_block == self._file.blocks:
break
self._read_block()
buf = "".join(self._buf)
if size < 0:
ret = buf
self._buf = []
else:
ret = buf[:size]
self._buf = [buf[size:]]
self._pos += len(ret)
return ret
def readline(self, os=os):
line = []
while True:
char = self.read(1)
if char == "":
break
if char not in '\r\n' and line and line[-1] in '\r\n':
self.seek(-1, os.SEEK_CUR)
break
line.append(char)
return ''.join(line)
def next(self):
line = self.readline()
if not line:
raise StopIteration
return line
def readlines(self, sizehint=-1):
res = []
while sizehint < 0 or sum(map(len, res)) < sizehint:
line = self.readline()
if not line:
break
res.append(line)
return res
xreadlines = __iter__
def __del__(self, libmpq=libmpq):
libmpq.libmpq__block_close_offset(self._file._archive._mpq,
self._file.number)
class File(object):
def __init__(self, archive, number, ctypes=ctypes, libmpq=libmpq):
self._archive = archive
self.number = number
for name, atype in [
("packed_size", ctypes.c_uint64),
("unpacked_size", ctypes.c_uint64),
("offset", ctypes.c_uint64),
("blocks", ctypes.c_uint32),
("encrypted", ctypes.c_uint32),
("compressed", ctypes.c_uint32),
("imploded", ctypes.c_uint32),
]:
data = atype()
func = getattr(libmpq, "libmpq__file_"+name)
func(self._archive._mpq, self.number, ctypes.byref(data))
setattr(self, name, data.value)
def __str__(self, ctypes=ctypes, libmpq=libmpq):
data = ctypes.create_string_buffer(self.unpacked_size)
libmpq.libmpq__file_read(self._archive._mpq, self.number,
data, ctypes.c_uint64(len(data)), None)
return data.raw
def __repr__(self):
return "%r[%i]" % (self._archive, self.number)
def __iter__(self, Reader=Reader):
return Reader(self)
class Archive(object):
def __init__(self, source, ctypes=ctypes, File=File, libmpq=libmpq):
self._source = source
if isinstance(source, File):
assert not source.encrypted
assert not source.compressed
assert not source.imploded
self.filename = source._archive.filename
offset = source._archive.offset + source.offset
else:
self.filename = source
offset = -1
self._mpq = ctypes.c_void_p()
libmpq.libmpq__archive_open(ctypes.byref(self._mpq), self.filename,
ctypes.c_uint64(offset))
self._opened = True
for field_name, field_type in [
("packed_size", ctypes.c_uint64),
("unpacked_size", ctypes.c_uint64),
("offset", ctypes.c_uint64),
("version", ctypes.c_uint32),
("files", ctypes.c_uint32),
]:
func = getattr(libmpq, "libmpq__archive_" + field_name)
data = field_type()
func(self._mpq, ctypes.byref(data))
setattr(self, field_name, data.value)
def __del__(self, libmpq=libmpq):
if getattr(self, "_opened", False):
libmpq.libmpq__archive_close(self._mpq)
def __len__(self):
return self.files
def __contains__(self, item, ctypes=ctypes, libmpq=libmpq):
if isinstance(item, str):
data = ctypes.c_uint32()
try:
libmpq.libmpq__file_number(self._mpq, ctypes.c_char_p(item),
ctypes.byref(data))
except IndexError:
return False
return True
return 0 <= item < self.files
def __getitem__(self, item, ctypes=ctypes, File=File, libmpq=libmpq):
if isinstance(item, str):
data = ctypes.c_int()
libmpq.libmpq__file_number(self._mpq, ctypes.c_char_p(item),
ctypes.byref(data))
item = data.value
else:
if not 0 <= item < self.files:
raise IndexError, "file not in archive"
return File(self, item)
def __repr__(self):
return "mpq.Archive(%r)" % self._source
# Remove clutter - everything except Error and Archive.
del os, check_error, ctypes, errors, File, libmpq, Reader
if __name__ == "__main__":
import sys, random
archive = Archive(sys.argv[1])
print repr(archive)
for k, v in archive.__dict__.iteritems():
#if k[0] == '_': continue
print " " * (4 - 1), k, v
assert '(listfile)' in archive
assert 0 in archive
assert len(archive) == archive.files
files = [x.strip() for x in archive['(listfile)']]
files.extend(xrange(archive.files))
for key in files: #sys.argv[2:] if sys.argv[2:] else xrange(archive.files):
file = archive[key]
print
print " " * (4 - 1), repr(file)
for k, v in file.__dict__.iteritems():
#if k[0] == '_': continue
print " " * (8 - 1), k, v
a = str(file)
b = iter(file).read()
reader = iter(file)
c = []
while True:
l = random.randrange(1, 10)
d = reader.read(l)
if not d: break
assert len(d) <= l
c.append(d)
c = "".join(c)
d = []
reader.seek(0)
for line in reader:
d.append(line)
d = "".join(d)
assert a == b == c == d, map(hash, [a,b,c,d])
assert len(a) == file.unpacked_size
repr(iter(file))
reader.seek(0)
a = reader.readlines()
reader.seek(0)
b = list(reader)
assert a == b
|
gpl-3.0
|
MarkWh1te/xueqiu_predict
|
python3_env/lib/python3.4/site-packages/sqlalchemy/orm/loading.py
|
7
|
23341
|
# orm/loading.py
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""private module containing functions used to convert database
rows into object instances and associated state.
the functions here are called primarily by Query, Mapper,
as well as some of the attribute loading strategies.
"""
from __future__ import absolute_import
from .. import util
from . import attributes, exc as orm_exc
from ..sql import util as sql_util
from . import strategy_options
from .util import _none_set, state_str
from .base import _SET_DEFERRED_EXPIRED, _DEFER_FOR_STATE
from .. import exc as sa_exc
import collections
_new_runid = util.counter()
def instances(query, cursor, context):
"""Return an ORM result as an iterator."""
context.runid = _new_runid()
filtered = query._has_mapper_entities
single_entity = len(query._entities) == 1 and \
query._entities[0].supports_single_entity
if filtered:
if single_entity:
filter_fn = id
else:
def filter_fn(row):
return tuple(
id(item)
if ent.use_id_for_hash
else item
for ent, item in zip(query._entities, row)
)
try:
(process, labels) = \
list(zip(*[
query_entity.row_processor(query,
context, cursor)
for query_entity in query._entities
]))
if not single_entity:
keyed_tuple = util.lightweight_named_tuple('result', labels)
while True:
context.partials = {}
if query._yield_per:
fetch = cursor.fetchmany(query._yield_per)
if not fetch:
break
else:
fetch = cursor.fetchall()
if single_entity:
proc = process[0]
rows = [proc(row) for row in fetch]
else:
rows = [keyed_tuple([proc(row) for proc in process])
for row in fetch]
if filtered:
rows = util.unique_list(rows, filter_fn)
for row in rows:
yield row
if not query._yield_per:
break
except Exception as err:
cursor.close()
util.raise_from_cause(err)
@util.dependencies("sqlalchemy.orm.query")
def merge_result(querylib, query, iterator, load=True):
"""Merge a result into this :class:`.Query` object's Session."""
session = query.session
if load:
# flush current contents if we expect to load data
session._autoflush()
autoflush = session.autoflush
try:
session.autoflush = False
single_entity = len(query._entities) == 1
if single_entity:
if isinstance(query._entities[0], querylib._MapperEntity):
result = [session._merge(
attributes.instance_state(instance),
attributes.instance_dict(instance),
load=load, _recursive={}, _resolve_conflict_map={})
for instance in iterator]
else:
result = list(iterator)
else:
mapped_entities = [i for i, e in enumerate(query._entities)
if isinstance(e, querylib._MapperEntity)]
result = []
keys = [ent._label_name for ent in query._entities]
keyed_tuple = util.lightweight_named_tuple('result', keys)
for row in iterator:
newrow = list(row)
for i in mapped_entities:
if newrow[i] is not None:
newrow[i] = session._merge(
attributes.instance_state(newrow[i]),
attributes.instance_dict(newrow[i]),
load=load, _recursive={}, _resolve_conflict_map={})
result.append(keyed_tuple(newrow))
return iter(result)
finally:
session.autoflush = autoflush
def get_from_identity(session, key, passive):
"""Look up the given key in the given session's identity map,
check the object for expired state if found.
"""
instance = session.identity_map.get(key)
if instance is not None:
state = attributes.instance_state(instance)
# expired - ensure it still exists
if state.expired:
if not passive & attributes.SQL_OK:
# TODO: no coverage here
return attributes.PASSIVE_NO_RESULT
elif not passive & attributes.RELATED_OBJECT_OK:
# this mode is used within a flush and the instance's
# expired state will be checked soon enough, if necessary
return instance
try:
state._load_expired(state, passive)
except orm_exc.ObjectDeletedError:
session._remove_newly_deleted([state])
return None
return instance
else:
return None
def load_on_ident(query, key,
refresh_state=None, lockmode=None,
only_load_props=None):
"""Load the given identity key from the database."""
if key is not None:
ident = key[1]
else:
ident = None
if refresh_state is None:
q = query._clone()
q._get_condition()
else:
q = query._clone()
if ident is not None:
mapper = query._mapper_zero()
(_get_clause, _get_params) = mapper._get_clause
# None present in ident - turn those comparisons
# into "IS NULL"
if None in ident:
nones = set([
_get_params[col].key for col, value in
zip(mapper.primary_key, ident) if value is None
])
_get_clause = sql_util.adapt_criterion_to_null(
_get_clause, nones)
_get_clause = q._adapt_clause(_get_clause, True, False)
q._criterion = _get_clause
params = dict([
(_get_params[primary_key].key, id_val)
for id_val, primary_key in zip(ident, mapper.primary_key)
])
q._params = params
if lockmode is not None:
version_check = True
q = q.with_lockmode(lockmode)
elif query._for_update_arg is not None:
version_check = True
q._for_update_arg = query._for_update_arg
else:
version_check = False
q._get_options(
populate_existing=bool(refresh_state),
version_check=version_check,
only_load_props=only_load_props,
refresh_state=refresh_state)
q._order_by = None
try:
return q.one()
except orm_exc.NoResultFound:
return None
def _setup_entity_query(
context, mapper, query_entity,
path, adapter, column_collection,
with_polymorphic=None, only_load_props=None,
polymorphic_discriminator=None, **kw):
if with_polymorphic:
poly_properties = mapper._iterate_polymorphic_properties(
with_polymorphic)
else:
poly_properties = mapper._polymorphic_properties
quick_populators = {}
path.set(
context.attributes,
"memoized_setups",
quick_populators)
for value in poly_properties:
if only_load_props and \
value.key not in only_load_props:
continue
value.setup(
context,
query_entity,
path,
adapter,
only_load_props=only_load_props,
column_collection=column_collection,
memoized_populators=quick_populators,
**kw
)
if polymorphic_discriminator is not None and \
polymorphic_discriminator \
is not mapper.polymorphic_on:
if adapter:
pd = adapter.columns[polymorphic_discriminator]
else:
pd = polymorphic_discriminator
column_collection.append(pd)
def _instance_processor(
mapper, context, result, path, adapter,
only_load_props=None, refresh_state=None,
polymorphic_discriminator=None,
_polymorphic_from=None):
"""Produce a mapper level row processor callable
which processes rows into mapped instances."""
# note that this method, most of which exists in a closure
# called _instance(), resists being broken out, as
# attempts to do so tend to add significant function
# call overhead. _instance() is the most
# performance-critical section in the whole ORM.
pk_cols = mapper.primary_key
if adapter:
pk_cols = [adapter.columns[c] for c in pk_cols]
identity_class = mapper._identity_class
populators = collections.defaultdict(list)
props = mapper._prop_set
if only_load_props is not None:
props = props.intersection(
mapper._props[k] for k in only_load_props)
quick_populators = path.get(
context.attributes, "memoized_setups", _none_set)
for prop in props:
if prop in quick_populators:
# this is an inlined path just for column-based attributes.
col = quick_populators[prop]
if col is _DEFER_FOR_STATE:
populators["new"].append(
(prop.key, prop._deferred_column_loader))
elif col is _SET_DEFERRED_EXPIRED:
# note that in this path, we are no longer
# searching in the result to see if the column might
# be present in some unexpected way.
populators["expire"].append((prop.key, False))
else:
if adapter:
col = adapter.columns[col]
getter = result._getter(col, False)
if getter:
populators["quick"].append((prop.key, getter))
else:
# fall back to the ColumnProperty itself, which
# will iterate through all of its columns
# to see if one fits
prop.create_row_processor(
context, path, mapper, result, adapter, populators)
else:
prop.create_row_processor(
context, path, mapper, result, adapter, populators)
propagate_options = context.propagate_options
if propagate_options:
load_path = context.query._current_path + path \
if context.query._current_path.path else path
session_identity_map = context.session.identity_map
populate_existing = context.populate_existing or mapper.always_refresh
load_evt = bool(mapper.class_manager.dispatch.load)
refresh_evt = bool(mapper.class_manager.dispatch.refresh)
persistent_evt = bool(context.session.dispatch.loaded_as_persistent)
if persistent_evt:
loaded_as_persistent = context.session.dispatch.loaded_as_persistent
instance_state = attributes.instance_state
instance_dict = attributes.instance_dict
session_id = context.session.hash_key
version_check = context.version_check
runid = context.runid
if refresh_state:
refresh_identity_key = refresh_state.key
if refresh_identity_key is None:
# super-rare condition; a refresh is being called
# on a non-instance-key instance; this is meant to only
# occur within a flush()
refresh_identity_key = \
mapper._identity_key_from_state(refresh_state)
else:
refresh_identity_key = None
if mapper.allow_partial_pks:
is_not_primary_key = _none_set.issuperset
else:
is_not_primary_key = _none_set.intersection
def _instance(row):
# determine the state that we'll be populating
if refresh_identity_key:
# fixed state that we're refreshing
state = refresh_state
instance = state.obj()
dict_ = instance_dict(instance)
isnew = state.runid != runid
currentload = True
loaded_instance = False
else:
# look at the row, see if that identity is in the
# session, or we have to create a new one
identitykey = (
identity_class,
tuple([row[column] for column in pk_cols])
)
instance = session_identity_map.get(identitykey)
if instance is not None:
# existing instance
state = instance_state(instance)
dict_ = instance_dict(instance)
isnew = state.runid != runid
currentload = not isnew
loaded_instance = False
if version_check and not currentload:
_validate_version_id(mapper, state, dict_, row, adapter)
else:
# create a new instance
# check for non-NULL values in the primary key columns,
# else no entity is returned for the row
if is_not_primary_key(identitykey[1]):
return None
isnew = True
currentload = True
loaded_instance = True
instance = mapper.class_manager.new_instance()
dict_ = instance_dict(instance)
state = instance_state(instance)
state.key = identitykey
# attach instance to session.
state.session_id = session_id
session_identity_map._add_unpresent(state, identitykey)
# populate. this looks at whether this state is new
# for this load or was existing, and whether or not this
# row is the first row with this identity.
if currentload or populate_existing:
# full population routines. Objects here are either
# just created, or we are doing a populate_existing
if isnew and propagate_options:
state.load_options = propagate_options
state.load_path = load_path
_populate_full(
context, row, state, dict_, isnew,
loaded_instance, populate_existing, populators)
if isnew:
if loaded_instance:
if load_evt:
state.manager.dispatch.load(state, context)
if persistent_evt:
loaded_as_persistent(context.session, state.obj())
elif refresh_evt:
state.manager.dispatch.refresh(
state, context, only_load_props)
if populate_existing or state.modified:
if refresh_state and only_load_props:
state._commit(dict_, only_load_props)
else:
state._commit_all(dict_, session_identity_map)
else:
# partial population routines, for objects that were already
# in the Session, but a row matches them; apply eager loaders
# on existing objects, etc.
unloaded = state.unloaded
isnew = state not in context.partials
if not isnew or unloaded or populators["eager"]:
# state is having a partial set of its attributes
# refreshed. Populate those attributes,
# and add to the "context.partials" collection.
to_load = _populate_partial(
context, row, state, dict_, isnew,
unloaded, populators)
if isnew:
if refresh_evt:
state.manager.dispatch.refresh(
state, context, to_load)
state._commit(dict_, to_load)
return instance
if mapper.polymorphic_map and not _polymorphic_from and not refresh_state:
# if we are doing polymorphic, dispatch to a different _instance()
# method specific to the subclass mapper
_instance = _decorate_polymorphic_switch(
_instance, context, mapper, result, path,
polymorphic_discriminator, adapter)
return _instance
def _populate_full(
context, row, state, dict_, isnew,
loaded_instance, populate_existing, populators):
if isnew:
# first time we are seeing a row with this identity.
state.runid = context.runid
for key, getter in populators["quick"]:
dict_[key] = getter(row)
if populate_existing:
for key, set_callable in populators["expire"]:
dict_.pop(key, None)
if set_callable:
state.expired_attributes.add(key)
else:
for key, set_callable in populators["expire"]:
if set_callable:
state.expired_attributes.add(key)
for key, populator in populators["new"]:
populator(state, dict_, row)
for key, populator in populators["delayed"]:
populator(state, dict_, row)
else:
# have already seen rows with this identity.
for key, populator in populators["existing"]:
populator(state, dict_, row)
def _populate_partial(
context, row, state, dict_, isnew,
unloaded, populators):
if not isnew:
to_load = context.partials[state]
for key, populator in populators["existing"]:
if key in to_load:
populator(state, dict_, row)
else:
to_load = unloaded
context.partials[state] = to_load
for key, getter in populators["quick"]:
if key in to_load:
dict_[key] = getter(row)
for key, set_callable in populators["expire"]:
if key in to_load:
dict_.pop(key, None)
if set_callable:
state.expired_attributes.add(key)
for key, populator in populators["new"]:
if key in to_load:
populator(state, dict_, row)
for key, populator in populators["delayed"]:
if key in to_load:
populator(state, dict_, row)
for key, populator in populators["eager"]:
if key not in unloaded:
populator(state, dict_, row)
return to_load
def _validate_version_id(mapper, state, dict_, row, adapter):
version_id_col = mapper.version_id_col
if version_id_col is None:
return
if adapter:
version_id_col = adapter.columns[version_id_col]
if mapper._get_state_attr_by_column(
state, dict_, mapper.version_id_col) != row[version_id_col]:
raise orm_exc.StaleDataError(
"Instance '%s' has version id '%s' which "
"does not match database-loaded version id '%s'."
% (state_str(state), mapper._get_state_attr_by_column(
state, dict_, mapper.version_id_col),
row[version_id_col]))
def _decorate_polymorphic_switch(
instance_fn, context, mapper, result, path,
polymorphic_discriminator, adapter):
if polymorphic_discriminator is not None:
polymorphic_on = polymorphic_discriminator
else:
polymorphic_on = mapper.polymorphic_on
if polymorphic_on is None:
return instance_fn
if adapter:
polymorphic_on = adapter.columns[polymorphic_on]
def configure_subclass_mapper(discriminator):
try:
sub_mapper = mapper.polymorphic_map[discriminator]
except KeyError:
raise AssertionError(
"No such polymorphic_identity %r is defined" %
discriminator)
else:
if sub_mapper is mapper:
return None
return _instance_processor(
sub_mapper, context, result,
path, adapter, _polymorphic_from=mapper)
polymorphic_instances = util.PopulateDict(
configure_subclass_mapper
)
def polymorphic_instance(row):
discriminator = row[polymorphic_on]
if discriminator is not None:
_instance = polymorphic_instances[discriminator]
if _instance:
return _instance(row)
return instance_fn(row)
return polymorphic_instance
def load_scalar_attributes(mapper, state, attribute_names):
"""initiate a column-based attribute refresh operation."""
# assert mapper is _state_mapper(state)
session = state.session
if not session:
raise orm_exc.DetachedInstanceError(
"Instance %s is not bound to a Session; "
"attribute refresh operation cannot proceed" %
(state_str(state)))
has_key = bool(state.key)
result = False
if mapper.inherits and not mapper.concrete:
# because we are using Core to produce a select() that we
# pass to the Query, we aren't calling setup() for mapped
# attributes; in 1.0 this means deferred attrs won't get loaded
# by default
statement = mapper._optimized_get_statement(state, attribute_names)
if statement is not None:
result = load_on_ident(
session.query(mapper).
options(
strategy_options.Load(mapper).undefer("*")
).from_statement(statement),
None,
only_load_props=attribute_names,
refresh_state=state
)
if result is False:
if has_key:
identity_key = state.key
else:
# this codepath is rare - only valid when inside a flush, and the
# object is becoming persistent but hasn't yet been assigned
# an identity_key.
# check here to ensure we have the attrs we need.
pk_attrs = [mapper._columntoproperty[col].key
for col in mapper.primary_key]
if state.expired_attributes.intersection(pk_attrs):
raise sa_exc.InvalidRequestError(
"Instance %s cannot be refreshed - it's not "
" persistent and does not "
"contain a full primary key." % state_str(state))
identity_key = mapper._identity_key_from_state(state)
if (_none_set.issubset(identity_key) and
not mapper.allow_partial_pks) or \
_none_set.issuperset(identity_key):
util.warn_limited(
"Instance %s to be refreshed doesn't "
"contain a full primary key - can't be refreshed "
"(and shouldn't be expired, either).",
state_str(state))
return
result = load_on_ident(
session.query(mapper),
identity_key,
refresh_state=state,
only_load_props=attribute_names)
# if instance is pending, a refresh operation
# may not complete (even if PK attributes are assigned)
if has_key and result is None:
raise orm_exc.ObjectDeletedError(state)
|
mit
|
obsh/tornado
|
tornado/template.py
|
15
|
35334
|
#!/usr/bin/env python
#
# Copyright 2009 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""A simple template system that compiles templates to Python code.
Basic usage looks like::
t = template.Template("<html>{{ myvalue }}</html>")
print t.generate(myvalue="XXX")
`Loader` is a class that loads templates from a root directory and caches
the compiled templates::
loader = template.Loader("/home/btaylor")
print loader.load("test.html").generate(myvalue="XXX")
We compile all templates to raw Python. Error-reporting is currently... uh,
interesting. Syntax for the templates::
### base.html
<html>
<head>
<title>{% block title %}Default title{% end %}</title>
</head>
<body>
<ul>
{% for student in students %}
{% block student %}
<li>{{ escape(student.name) }}</li>
{% end %}
{% end %}
</ul>
</body>
</html>
### bold.html
{% extends "base.html" %}
{% block title %}A bolder title{% end %}
{% block student %}
<li><span style="bold">{{ escape(student.name) }}</span></li>
{% end %}
Unlike most other template systems, we do not put any restrictions on the
expressions you can include in your statements. ``if`` and ``for`` blocks get
translated exactly into Python, so you can do complex expressions like::
{% for student in [p for p in people if p.student and p.age > 23] %}
<li>{{ escape(student.name) }}</li>
{% end %}
Translating directly to Python means you can apply functions to expressions
easily, like the ``escape()`` function in the examples above. You can pass
functions in to your template just like any other variable
(In a `.RequestHandler`, override `.RequestHandler.get_template_namespace`)::
### Python code
def add(x, y):
return x + y
template.execute(add=add)
### The template
{{ add(1, 2) }}
We provide the functions `escape() <.xhtml_escape>`, `.url_escape()`,
`.json_encode()`, and `.squeeze()` to all templates by default.
Typical applications do not create `Template` or `Loader` instances by
hand, but instead use the `~.RequestHandler.render` and
`~.RequestHandler.render_string` methods of
`tornado.web.RequestHandler`, which load templates automatically based
on the ``template_path`` `.Application` setting.
Variable names beginning with ``_tt_`` are reserved by the template
system and should not be used by application code.
Syntax Reference
----------------
Template expressions are surrounded by double curly braces: ``{{ ... }}``.
The contents may be any python expression, which will be escaped according
to the current autoescape setting and inserted into the output. Other
template directives use ``{% %}``.
To comment out a section so that it is omitted from the output, surround it
with ``{# ... #}``.
These tags may be escaped as ``{{!``, ``{%!``, and ``{#!``
if you need to include a literal ``{{``, ``{%``, or ``{#`` in the output.
``{% apply *function* %}...{% end %}``
Applies a function to the output of all template code between ``apply``
and ``end``::
{% apply linkify %}{{name}} said: {{message}}{% end %}
Note that as an implementation detail apply blocks are implemented
as nested functions and thus may interact strangely with variables
set via ``{% set %}``, or the use of ``{% break %}`` or ``{% continue %}``
within loops.
``{% autoescape *function* %}``
Sets the autoescape mode for the current file. This does not affect
other files, even those referenced by ``{% include %}``. Note that
autoescaping can also be configured globally, at the `.Application`
or `Loader`.::
{% autoescape xhtml_escape %}
{% autoescape None %}
``{% block *name* %}...{% end %}``
Indicates a named, replaceable block for use with ``{% extends %}``.
Blocks in the parent template will be replaced with the contents of
the same-named block in a child template.::
<!-- base.html -->
<title>{% block title %}Default title{% end %}</title>
<!-- mypage.html -->
{% extends "base.html" %}
{% block title %}My page title{% end %}
``{% comment ... %}``
A comment which will be removed from the template output. Note that
there is no ``{% end %}`` tag; the comment goes from the word ``comment``
to the closing ``%}`` tag.
``{% extends *filename* %}``
Inherit from another template. Templates that use ``extends`` should
contain one or more ``block`` tags to replace content from the parent
template. Anything in the child template not contained in a ``block``
tag will be ignored. For an example, see the ``{% block %}`` tag.
``{% for *var* in *expr* %}...{% end %}``
Same as the python ``for`` statement. ``{% break %}`` and
``{% continue %}`` may be used inside the loop.
``{% from *x* import *y* %}``
Same as the python ``import`` statement.
``{% if *condition* %}...{% elif *condition* %}...{% else %}...{% end %}``
Conditional statement - outputs the first section whose condition is
true. (The ``elif`` and ``else`` sections are optional)
``{% import *module* %}``
Same as the python ``import`` statement.
``{% include *filename* %}``
Includes another template file. The included file can see all the local
variables as if it were copied directly to the point of the ``include``
directive (the ``{% autoescape %}`` directive is an exception).
Alternately, ``{% module Template(filename, **kwargs) %}`` may be used
to include another template with an isolated namespace.
``{% module *expr* %}``
Renders a `~tornado.web.UIModule`. The output of the ``UIModule`` is
not escaped::
{% module Template("foo.html", arg=42) %}
``UIModules`` are a feature of the `tornado.web.RequestHandler`
class (and specifically its ``render`` method) and will not work
when the template system is used on its own in other contexts.
``{% raw *expr* %}``
Outputs the result of the given expression without autoescaping.
``{% set *x* = *y* %}``
Sets a local variable.
``{% try %}...{% except %}...{% else %}...{% finally %}...{% end %}``
Same as the python ``try`` statement.
``{% while *condition* %}... {% end %}``
Same as the python ``while`` statement. ``{% break %}`` and
``{% continue %}`` may be used inside the loop.
``{% whitespace *mode* %}``
Sets the whitespace mode for the remainder of the current file
(or until the next ``{% whitespace %}`` directive). See
`filter_whitespace` for available options. New in Tornado 4.3.
"""
from __future__ import absolute_import, division, print_function, with_statement
import datetime
import linecache
import os.path
import posixpath
import re
import threading
from tornado import escape
from tornado.log import app_log
from tornado.util import ObjectDict, exec_in, unicode_type, PY3
if PY3:
from io import StringIO
else:
from cStringIO import StringIO
_DEFAULT_AUTOESCAPE = "xhtml_escape"
_UNSET = object()
def filter_whitespace(mode, text):
"""Transform whitespace in ``text`` according to ``mode``.
Available modes are:
* ``all``: Return all whitespace unmodified.
* ``single``: Collapse consecutive whitespace with a single whitespace
character, preserving newlines.
* ``oneline``: Collapse all runs of whitespace into a single space
character, removing all newlines in the process.
.. versionadded:: 4.3
"""
if mode == 'all':
return text
elif mode == 'single':
text = re.sub(r"([\t ]+)", " ", text)
text = re.sub(r"(\s*\n\s*)", "\n", text)
return text
elif mode == 'oneline':
return re.sub(r"(\s+)", " ", text)
else:
raise Exception("invalid whitespace mode %s" % mode)
class Template(object):
"""A compiled template.
We compile into Python from the given template_string. You can generate
the template from variables with generate().
"""
# note that the constructor's signature is not extracted with
# autodoc because _UNSET looks like garbage. When changing
# this signature update website/sphinx/template.rst too.
def __init__(self, template_string, name="<string>", loader=None,
compress_whitespace=_UNSET, autoescape=_UNSET,
whitespace=None):
"""Construct a Template.
:arg str template_string: the contents of the template file.
:arg str name: the filename from which the template was loaded
(used for error message).
:arg tornado.template.BaseLoader loader: the `~tornado.template.BaseLoader` responsible for this template,
used to resolve ``{% include %}`` and ``{% extend %}``
directives.
:arg bool compress_whitespace: Deprecated since Tornado 4.3.
Equivalent to ``whitespace="single"`` if true and
``whitespace="all"`` if false.
:arg str autoescape: The name of a function in the template
namespace, or ``None`` to disable escaping by default.
:arg str whitespace: A string specifying treatment of whitespace;
see `filter_whitespace` for options.
.. versionchanged:: 4.3
Added ``whitespace`` parameter; deprecated ``compress_whitespace``.
"""
self.name = escape.native_str(name)
if compress_whitespace is not _UNSET:
# Convert deprecated compress_whitespace (bool) to whitespace (str).
if whitespace is not None:
raise Exception("cannot set both whitespace and compress_whitespace")
whitespace = "single" if compress_whitespace else "all"
if whitespace is None:
if loader and loader.whitespace:
whitespace = loader.whitespace
else:
# Whitespace defaults by filename.
if name.endswith(".html") or name.endswith(".js"):
whitespace = "single"
else:
whitespace = "all"
# Validate the whitespace setting.
filter_whitespace(whitespace, '')
if autoescape is not _UNSET:
self.autoescape = autoescape
elif loader:
self.autoescape = loader.autoescape
else:
self.autoescape = _DEFAULT_AUTOESCAPE
self.namespace = loader.namespace if loader else {}
reader = _TemplateReader(name, escape.native_str(template_string),
whitespace)
self.file = _File(self, _parse(reader, self))
self.code = self._generate_python(loader)
self.loader = loader
try:
# Under python2.5, the fake filename used here must match
# the module name used in __name__ below.
# The dont_inherit flag prevents template.py's future imports
# from being applied to the generated code.
self.compiled = compile(
escape.to_unicode(self.code),
"%s.generated.py" % self.name.replace('.', '_'),
"exec", dont_inherit=True)
except Exception:
formatted_code = _format_code(self.code).rstrip()
app_log.error("%s code:\n%s", self.name, formatted_code)
raise
def generate(self, **kwargs):
"""Generate this template with the given arguments."""
namespace = {
"escape": escape.xhtml_escape,
"xhtml_escape": escape.xhtml_escape,
"url_escape": escape.url_escape,
"json_encode": escape.json_encode,
"squeeze": escape.squeeze,
"linkify": escape.linkify,
"datetime": datetime,
"_tt_utf8": escape.utf8, # for internal use
"_tt_string_types": (unicode_type, bytes),
# __name__ and __loader__ allow the traceback mechanism to find
# the generated source code.
"__name__": self.name.replace('.', '_'),
"__loader__": ObjectDict(get_source=lambda name: self.code),
}
namespace.update(self.namespace)
namespace.update(kwargs)
exec_in(self.compiled, namespace)
execute = namespace["_tt_execute"]
# Clear the traceback module's cache of source data now that
# we've generated a new template (mainly for this module's
# unittests, where different tests reuse the same name).
linecache.clearcache()
return execute()
def _generate_python(self, loader):
buffer = StringIO()
try:
# named_blocks maps from names to _NamedBlock objects
named_blocks = {}
ancestors = self._get_ancestors(loader)
ancestors.reverse()
for ancestor in ancestors:
ancestor.find_named_blocks(loader, named_blocks)
writer = _CodeWriter(buffer, named_blocks, loader,
ancestors[0].template)
ancestors[0].generate(writer)
return buffer.getvalue()
finally:
buffer.close()
def _get_ancestors(self, loader):
ancestors = [self.file]
for chunk in self.file.body.chunks:
if isinstance(chunk, _ExtendsBlock):
if not loader:
raise ParseError("{% extends %} block found, but no "
"template loader")
template = loader.load(chunk.name, self.name)
ancestors.extend(template._get_ancestors(loader))
return ancestors
class BaseLoader(object):
"""Base class for template loaders.
You must use a template loader to use template constructs like
``{% extends %}`` and ``{% include %}``. The loader caches all
templates after they are loaded the first time.
"""
def __init__(self, autoescape=_DEFAULT_AUTOESCAPE, namespace=None,
whitespace=None):
"""Construct a template loader.
:arg str autoescape: The name of a function in the template
namespace, such as "xhtml_escape", or ``None`` to disable
autoescaping by default.
:arg dict namespace: A dictionary to be added to the default template
namespace, or ``None``.
:arg str whitespace: A string specifying default behavior for
whitespace in templates; see `filter_whitespace` for options.
Default is "single" for files ending in ".html" and ".js" and
"all" for other files.
.. versionchanged:: 4.3
Added ``whitespace`` parameter.
"""
self.autoescape = autoescape
self.namespace = namespace or {}
self.whitespace = whitespace
self.templates = {}
# self.lock protects self.templates. It's a reentrant lock
# because templates may load other templates via `include` or
# `extends`. Note that thanks to the GIL this code would be safe
# even without the lock, but could lead to wasted work as multiple
# threads tried to compile the same template simultaneously.
self.lock = threading.RLock()
def reset(self):
"""Resets the cache of compiled templates."""
with self.lock:
self.templates = {}
def resolve_path(self, name, parent_path=None):
"""Converts a possibly-relative path to absolute (used internally)."""
raise NotImplementedError()
def load(self, name, parent_path=None):
"""Loads a template."""
name = self.resolve_path(name, parent_path=parent_path)
with self.lock:
if name not in self.templates:
self.templates[name] = self._create_template(name)
return self.templates[name]
def _create_template(self, name):
raise NotImplementedError()
class Loader(BaseLoader):
"""A template loader that loads from a single root directory.
"""
def __init__(self, root_directory, **kwargs):
super(Loader, self).__init__(**kwargs)
self.root = os.path.abspath(root_directory)
def resolve_path(self, name, parent_path=None):
if parent_path and not parent_path.startswith("<") and \
not parent_path.startswith("/") and \
not name.startswith("/"):
current_path = os.path.join(self.root, parent_path)
file_dir = os.path.dirname(os.path.abspath(current_path))
relative_path = os.path.abspath(os.path.join(file_dir, name))
if relative_path.startswith(self.root):
name = relative_path[len(self.root) + 1:]
return name
def _create_template(self, name):
path = os.path.join(self.root, name)
with open(path, "rb") as f:
template = Template(f.read(), name=name, loader=self)
return template
class DictLoader(BaseLoader):
"""A template loader that loads from a dictionary."""
def __init__(self, dict, **kwargs):
super(DictLoader, self).__init__(**kwargs)
self.dict = dict
def resolve_path(self, name, parent_path=None):
if parent_path and not parent_path.startswith("<") and \
not parent_path.startswith("/") and \
not name.startswith("/"):
file_dir = posixpath.dirname(parent_path)
name = posixpath.normpath(posixpath.join(file_dir, name))
return name
def _create_template(self, name):
return Template(self.dict[name], name=name, loader=self)
class _Node(object):
def each_child(self):
return ()
def generate(self, writer):
raise NotImplementedError()
def find_named_blocks(self, loader, named_blocks):
for child in self.each_child():
child.find_named_blocks(loader, named_blocks)
class _File(_Node):
def __init__(self, template, body):
self.template = template
self.body = body
self.line = 0
def generate(self, writer):
writer.write_line("def _tt_execute():", self.line)
with writer.indent():
writer.write_line("_tt_buffer = []", self.line)
writer.write_line("_tt_append = _tt_buffer.append", self.line)
self.body.generate(writer)
writer.write_line("return _tt_utf8('').join(_tt_buffer)", self.line)
def each_child(self):
return (self.body,)
class _ChunkList(_Node):
def __init__(self, chunks):
self.chunks = chunks
def generate(self, writer):
for chunk in self.chunks:
chunk.generate(writer)
def each_child(self):
return self.chunks
class _NamedBlock(_Node):
def __init__(self, name, body, template, line):
self.name = name
self.body = body
self.template = template
self.line = line
def each_child(self):
return (self.body,)
def generate(self, writer):
block = writer.named_blocks[self.name]
with writer.include(block.template, self.line):
block.body.generate(writer)
def find_named_blocks(self, loader, named_blocks):
named_blocks[self.name] = self
_Node.find_named_blocks(self, loader, named_blocks)
class _ExtendsBlock(_Node):
def __init__(self, name):
self.name = name
class _IncludeBlock(_Node):
def __init__(self, name, reader, line):
self.name = name
self.template_name = reader.name
self.line = line
def find_named_blocks(self, loader, named_blocks):
included = loader.load(self.name, self.template_name)
included.file.find_named_blocks(loader, named_blocks)
def generate(self, writer):
included = writer.loader.load(self.name, self.template_name)
with writer.include(included, self.line):
included.file.body.generate(writer)
class _ApplyBlock(_Node):
def __init__(self, method, line, body=None):
self.method = method
self.line = line
self.body = body
def each_child(self):
return (self.body,)
def generate(self, writer):
method_name = "_tt_apply%d" % writer.apply_counter
writer.apply_counter += 1
writer.write_line("def %s():" % method_name, self.line)
with writer.indent():
writer.write_line("_tt_buffer = []", self.line)
writer.write_line("_tt_append = _tt_buffer.append", self.line)
self.body.generate(writer)
writer.write_line("return _tt_utf8('').join(_tt_buffer)", self.line)
writer.write_line("_tt_append(_tt_utf8(%s(%s())))" % (
self.method, method_name), self.line)
class _ControlBlock(_Node):
def __init__(self, statement, line, body=None):
self.statement = statement
self.line = line
self.body = body
def each_child(self):
return (self.body,)
def generate(self, writer):
writer.write_line("%s:" % self.statement, self.line)
with writer.indent():
self.body.generate(writer)
# Just in case the body was empty
writer.write_line("pass", self.line)
class _IntermediateControlBlock(_Node):
def __init__(self, statement, line):
self.statement = statement
self.line = line
def generate(self, writer):
# In case the previous block was empty
writer.write_line("pass", self.line)
writer.write_line("%s:" % self.statement, self.line, writer.indent_size() - 1)
class _Statement(_Node):
def __init__(self, statement, line):
self.statement = statement
self.line = line
def generate(self, writer):
writer.write_line(self.statement, self.line)
class _Expression(_Node):
def __init__(self, expression, line, raw=False):
self.expression = expression
self.line = line
self.raw = raw
def generate(self, writer):
writer.write_line("_tt_tmp = %s" % self.expression, self.line)
writer.write_line("if isinstance(_tt_tmp, _tt_string_types):"
" _tt_tmp = _tt_utf8(_tt_tmp)", self.line)
writer.write_line("else: _tt_tmp = _tt_utf8(str(_tt_tmp))", self.line)
if not self.raw and writer.current_template.autoescape is not None:
# In python3 functions like xhtml_escape return unicode,
# so we have to convert to utf8 again.
writer.write_line("_tt_tmp = _tt_utf8(%s(_tt_tmp))" %
writer.current_template.autoescape, self.line)
writer.write_line("_tt_append(_tt_tmp)", self.line)
class _Module(_Expression):
def __init__(self, expression, line):
super(_Module, self).__init__("_tt_modules." + expression, line,
raw=True)
class _Text(_Node):
def __init__(self, value, line, whitespace):
self.value = value
self.line = line
self.whitespace = whitespace
def generate(self, writer):
value = self.value
# Compress whitespace if requested, with a crude heuristic to avoid
# altering preformatted whitespace.
if "<pre>" not in value:
value = filter_whitespace(self.whitespace, value)
if value:
writer.write_line('_tt_append(%r)' % escape.utf8(value), self.line)
class ParseError(Exception):
"""Raised for template syntax errors.
``ParseError`` instances have ``filename`` and ``lineno`` attributes
indicating the position of the error.
.. versionchanged:: 4.3
Added ``filename`` and ``lineno`` attributes.
"""
def __init__(self, message, filename=None, lineno=0):
self.message = message
# The names "filename" and "lineno" are chosen for consistency
# with python SyntaxError.
self.filename = filename
self.lineno = lineno
def __str__(self):
return '%s at %s:%d' % (self.message, self.filename, self.lineno)
class _CodeWriter(object):
def __init__(self, file, named_blocks, loader, current_template):
self.file = file
self.named_blocks = named_blocks
self.loader = loader
self.current_template = current_template
self.apply_counter = 0
self.include_stack = []
self._indent = 0
def indent_size(self):
return self._indent
def indent(self):
class Indenter(object):
def __enter__(_):
self._indent += 1
return self
def __exit__(_, *args):
assert self._indent > 0
self._indent -= 1
return Indenter()
def include(self, template, line):
self.include_stack.append((self.current_template, line))
self.current_template = template
class IncludeTemplate(object):
def __enter__(_):
return self
def __exit__(_, *args):
self.current_template = self.include_stack.pop()[0]
return IncludeTemplate()
def write_line(self, line, line_number, indent=None):
if indent is None:
indent = self._indent
line_comment = ' # %s:%d' % (self.current_template.name, line_number)
if self.include_stack:
ancestors = ["%s:%d" % (tmpl.name, lineno)
for (tmpl, lineno) in self.include_stack]
line_comment += ' (via %s)' % ', '.join(reversed(ancestors))
print(" " * indent + line + line_comment, file=self.file)
class _TemplateReader(object):
def __init__(self, name, text, whitespace):
self.name = name
self.text = text
self.whitespace = whitespace
self.line = 1
self.pos = 0
def find(self, needle, start=0, end=None):
assert start >= 0, start
pos = self.pos
start += pos
if end is None:
index = self.text.find(needle, start)
else:
end += pos
assert end >= start
index = self.text.find(needle, start, end)
if index != -1:
index -= pos
return index
def consume(self, count=None):
if count is None:
count = len(self.text) - self.pos
newpos = self.pos + count
self.line += self.text.count("\n", self.pos, newpos)
s = self.text[self.pos:newpos]
self.pos = newpos
return s
def remaining(self):
return len(self.text) - self.pos
def __len__(self):
return self.remaining()
def __getitem__(self, key):
if type(key) is slice:
size = len(self)
start, stop, step = key.indices(size)
if start is None:
start = self.pos
else:
start += self.pos
if stop is not None:
stop += self.pos
return self.text[slice(start, stop, step)]
elif key < 0:
return self.text[key]
else:
return self.text[self.pos + key]
def __str__(self):
return self.text[self.pos:]
def raise_parse_error(self, msg):
raise ParseError(msg, self.name, self.line)
def _format_code(code):
lines = code.splitlines()
format = "%%%dd %%s\n" % len(repr(len(lines) + 1))
return "".join([format % (i + 1, line) for (i, line) in enumerate(lines)])
def _parse(reader, template, in_block=None, in_loop=None):
body = _ChunkList([])
while True:
# Find next template directive
curly = 0
while True:
curly = reader.find("{", curly)
if curly == -1 or curly + 1 == reader.remaining():
# EOF
if in_block:
reader.raise_parse_error(
"Missing {%% end %%} block for %s" % in_block)
body.chunks.append(_Text(reader.consume(), reader.line,
reader.whitespace))
return body
# If the first curly brace is not the start of a special token,
# start searching from the character after it
if reader[curly + 1] not in ("{", "%", "#"):
curly += 1
continue
# When there are more than 2 curlies in a row, use the
# innermost ones. This is useful when generating languages
# like latex where curlies are also meaningful
if (curly + 2 < reader.remaining() and
reader[curly + 1] == '{' and reader[curly + 2] == '{'):
curly += 1
continue
break
# Append any text before the special token
if curly > 0:
cons = reader.consume(curly)
body.chunks.append(_Text(cons, reader.line,
reader.whitespace))
start_brace = reader.consume(2)
line = reader.line
# Template directives may be escaped as "{{!" or "{%!".
# In this case output the braces and consume the "!".
# This is especially useful in conjunction with jquery templates,
# which also use double braces.
if reader.remaining() and reader[0] == "!":
reader.consume(1)
body.chunks.append(_Text(start_brace, line,
reader.whitespace))
continue
# Comment
if start_brace == "{#":
end = reader.find("#}")
if end == -1:
reader.raise_parse_error("Missing end comment #}")
contents = reader.consume(end).strip()
reader.consume(2)
continue
# Expression
if start_brace == "{{":
end = reader.find("}}")
if end == -1:
reader.raise_parse_error("Missing end expression }}")
contents = reader.consume(end).strip()
reader.consume(2)
if not contents:
reader.raise_parse_error("Empty expression")
body.chunks.append(_Expression(contents, line))
continue
# Block
assert start_brace == "{%", start_brace
end = reader.find("%}")
if end == -1:
reader.raise_parse_error("Missing end block %}")
contents = reader.consume(end).strip()
reader.consume(2)
if not contents:
reader.raise_parse_error("Empty block tag ({% %})")
operator, space, suffix = contents.partition(" ")
suffix = suffix.strip()
# Intermediate ("else", "elif", etc) blocks
intermediate_blocks = {
"else": set(["if", "for", "while", "try"]),
"elif": set(["if"]),
"except": set(["try"]),
"finally": set(["try"]),
}
allowed_parents = intermediate_blocks.get(operator)
if allowed_parents is not None:
if not in_block:
reader.raise_parse_error("%s outside %s block" %
(operator, allowed_parents))
if in_block not in allowed_parents:
reader.raise_parse_error(
"%s block cannot be attached to %s block" %
(operator, in_block))
body.chunks.append(_IntermediateControlBlock(contents, line))
continue
# End tag
elif operator == "end":
if not in_block:
reader.raise_parse_error("Extra {% end %} block")
return body
elif operator in ("extends", "include", "set", "import", "from",
"comment", "autoescape", "whitespace", "raw",
"module"):
if operator == "comment":
continue
if operator == "extends":
suffix = suffix.strip('"').strip("'")
if not suffix:
reader.raise_parse_error("extends missing file path")
block = _ExtendsBlock(suffix)
elif operator in ("import", "from"):
if not suffix:
reader.raise_parse_error("import missing statement")
block = _Statement(contents, line)
elif operator == "include":
suffix = suffix.strip('"').strip("'")
if not suffix:
reader.raise_parse_error("include missing file path")
block = _IncludeBlock(suffix, reader, line)
elif operator == "set":
if not suffix:
reader.raise_parse_error("set missing statement")
block = _Statement(suffix, line)
elif operator == "autoescape":
fn = suffix.strip()
if fn == "None":
fn = None
template.autoescape = fn
continue
elif operator == "whitespace":
mode = suffix.strip()
# Validate the selected mode
filter_whitespace(mode, '')
reader.whitespace = mode
continue
elif operator == "raw":
block = _Expression(suffix, line, raw=True)
elif operator == "module":
block = _Module(suffix, line)
body.chunks.append(block)
continue
elif operator in ("apply", "block", "try", "if", "for", "while"):
# parse inner body recursively
if operator in ("for", "while"):
block_body = _parse(reader, template, operator, operator)
elif operator == "apply":
# apply creates a nested function so syntactically it's not
# in the loop.
block_body = _parse(reader, template, operator, None)
else:
block_body = _parse(reader, template, operator, in_loop)
if operator == "apply":
if not suffix:
reader.raise_parse_error("apply missing method name")
block = _ApplyBlock(suffix, line, block_body)
elif operator == "block":
if not suffix:
reader.raise_parse_error("block missing name")
block = _NamedBlock(suffix, block_body, template, line)
else:
block = _ControlBlock(contents, line, block_body)
body.chunks.append(block)
continue
elif operator in ("break", "continue"):
if not in_loop:
reader.raise_parse_error("%s outside %s block" %
(operator, set(["for", "while"])))
body.chunks.append(_Statement(contents, line))
continue
else:
reader.raise_parse_error("unknown operator: %r" % operator)
|
apache-2.0
|
Srisai85/scipy
|
scipy/signal/_arraytools.py
|
91
|
5145
|
"""
Functions for acting on a axis of an array.
"""
from __future__ import division, print_function, absolute_import
import numpy as np
def axis_slice(a, start=None, stop=None, step=None, axis=-1):
"""Take a slice along axis 'axis' from 'a'.
Parameters
----------
a : numpy.ndarray
The array to be sliced.
start, stop, step : int or None
The slice parameters.
axis : int, optional
The axis of `a` to be sliced.
Examples
--------
>>> a = array([[1, 2, 3], [4, 5, 6], [7, 8, 9]])
>>> axis_slice(a, start=0, stop=1, axis=1)
array([[1],
[4],
[7]])
>>> axis_slice(a, start=1, axis=0)
array([[4, 5, 6],
[7, 8, 9]])
Notes
-----
The keyword arguments start, stop and step are used by calling
slice(start, stop, step). This implies axis_slice() does not
handle its arguments the exacty the same as indexing. To select
a single index k, for example, use
axis_slice(a, start=k, stop=k+1)
In this case, the length of the axis 'axis' in the result will
be 1; the trivial dimension is not removed. (Use numpy.squeeze()
to remove trivial axes.)
"""
a_slice = [slice(None)] * a.ndim
a_slice[axis] = slice(start, stop, step)
b = a[a_slice]
return b
def axis_reverse(a, axis=-1):
"""Reverse the 1-d slices of `a` along axis `axis`.
Returns axis_slice(a, step=-1, axis=axis).
"""
return axis_slice(a, step=-1, axis=axis)
def odd_ext(x, n, axis=-1):
"""Generate a new ndarray by making an odd extension of x along an axis.
Parameters
----------
x : ndarray
The array to be extended.
n : int
The number of elements by which to extend x at each end of the axis.
axis : int, optional
The axis along which to extend x. Default is -1.
Examples
--------
>>> a = array([[1.0,2.0,3.0,4.0,5.0], [0.0, 1.0, 4.0, 9.0, 16.0]])
>>> _odd_ext(a, 2)
array([[-1., 0., 1., 2., 3., 4., 5., 6., 7.],
[-4., -1, 0., 1., 4., 9., 16., 23., 28.]])
"""
if n < 1:
return x
if n > x.shape[axis] - 1:
raise ValueError(("The extension length n (%d) is too big. " +
"It must not exceed x.shape[axis]-1, which is %d.")
% (n, x.shape[axis] - 1))
left_end = axis_slice(x, start=0, stop=1, axis=axis)
left_ext = axis_slice(x, start=n, stop=0, step=-1, axis=axis)
right_end = axis_slice(x, start=-1, axis=axis)
right_ext = axis_slice(x, start=-2, stop=-(n + 2), step=-1, axis=axis)
ext = np.concatenate((2 * left_end - left_ext,
x,
2 * right_end - right_ext),
axis=axis)
return ext
def even_ext(x, n, axis=-1):
"""Create an ndarray that is an even extension of x along an axis.
Parameters
----------
x : ndarray
The array to be extended.
n : int
The number of elements by which to extend x at each end of the axis.
axis : int, optional
The axis along which to extend x. Default is -1.
Examples
--------
>>> a = array([[1.0,2.0,3.0,4.0,5.0], [0.0, 1.0, 4.0, 9.0, 16.0]])
>>> _even_ext(a, 2)
array([[ 3., 2., 1., 2., 3., 4., 5., 4., 3.],
[ 4., 1., 0., 1., 4., 9., 16., 9., 4.]])
"""
if n < 1:
return x
if n > x.shape[axis] - 1:
raise ValueError(("The extension length n (%d) is too big. " +
"It must not exceed x.shape[axis]-1, which is %d.")
% (n, x.shape[axis] - 1))
left_ext = axis_slice(x, start=n, stop=0, step=-1, axis=axis)
right_ext = axis_slice(x, start=-2, stop=-(n + 2), step=-1, axis=axis)
ext = np.concatenate((left_ext,
x,
right_ext),
axis=axis)
return ext
def const_ext(x, n, axis=-1):
"""Create an ndarray that is a constant extension of x along an axis.
The extension repeats the values at the first and last element of
the axis.
Parameters
----------
x : ndarray
The array to be extended.
n : int
The number of elements by which to extend x at each end of the axis.
axis : int, optional
The axis along which to extend x. Default is -1.
Examples
--------
>>> a = array([[1.0,2.0,3.0,4.0,5.0], [0.0, 1.0, 4.0, 9.0, 16.0]])
>>> _const_ext(a, 2)
array([[ 1., 1., 1., 2., 3., 4., 5., 5., 5.],
[ 0., 0., 0., 1., 4., 9., 16., 16., 16.]])
"""
if n < 1:
return x
left_end = axis_slice(x, start=0, stop=1, axis=axis)
ones_shape = [1] * x.ndim
ones_shape[axis] = n
ones = np.ones(ones_shape, dtype=x.dtype)
left_ext = ones * left_end
right_end = axis_slice(x, start=-1, axis=axis)
right_ext = ones * right_end
ext = np.concatenate((left_ext,
x,
right_ext),
axis=axis)
return ext
|
bsd-3-clause
|
adityacs/ansible
|
lib/ansible/modules/storage/netapp/netapp_e_lun_mapping.py
|
48
|
12304
|
#!/usr/bin/python
# (c) 2016, NetApp, Inc
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: netapp_e_lun_mapping
author: Kevin Hulquest (@hulquest)
short_description: Create or Remove LUN Mappings
description:
- Allows for the creation and removal of volume to host mappings for NetApp E-series storage arrays.
version_added: "2.2"
options:
validate_certs:
required: false
default: true
description:
- Should https certificates be validated?
ssid:
description:
- "The storage system array identifier."
required: False
lun:
description:
- The LUN number you wish to give the mapping
- If the supplied I(volume_name) is associated with a different LUN, it will be updated to what is supplied here.
required: False
default: 0
target:
description:
- The name of host or hostgroup you wish to assign to the mapping
- If omitted, the default hostgroup is used.
- If the supplied I(volume_name) is associated with a different target, it will be updated to what is supplied here.
required: False
volume_name:
description:
- The name of the volume you wish to include in the mapping.
required: True
target_type:
description:
- Whether the target is a host or group.
- Required if supplying an explicit target.
required: False
choices: ["host", "group"]
state:
description:
- Present will ensure the mapping exists, absent will remove the mapping.
- All parameters I(lun), I(target), I(target_type) and I(volume_name) must still be supplied.
required: True
choices: ["present", "absent"]
api_url:
description:
- "The full API url. Example: http://ENDPOINT:8080/devmgr/v2"
- This can optionally be set via an environment variable, API_URL
required: False
api_username:
description:
- The username used to authenticate against the API. This can optionally be set via an environment variable, API_USERNAME
required: False
api_password:
description:
- The password used to authenticate against the API. This can optionally be set via an environment variable, API_PASSWORD
required: False
'''
EXAMPLES = '''
---
- name: Lun Mapping Example
netapp_e_lun_mapping:
state: present
ssid: 1
lun: 12
target: Wilson
volume_name: Colby1
target_type: group
api_url: "{{ netapp_api_url }}"
api_username: "{{ netapp_api_username }}"
api_password: "{{ netapp_api_password }}"
'''
RETURN = '''
msg: Mapping exists.
msg: Mapping removed.
'''
import json
from ansible.module_utils.api import basic_auth_argument_spec
from ansible.module_utils.pycompat24 import get_exception
from ansible.module_utils.urls import open_url
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six.moves.urllib.error import HTTPError
HEADERS = {
"Content-Type": "application/json",
"Accept": "application/json"
}
def request(url, data=None, headers=None, method='GET', use_proxy=True,
force=False, last_mod_time=None, timeout=10, validate_certs=True,
url_username=None, url_password=None, http_agent=None, force_basic_auth=True, ignore_errors=False):
try:
r = open_url(url=url, data=data, headers=headers, method=method, use_proxy=use_proxy,
force=force, last_mod_time=last_mod_time, timeout=timeout, validate_certs=validate_certs,
url_username=url_username, url_password=url_password, http_agent=http_agent,
force_basic_auth=force_basic_auth)
except HTTPError:
err = get_exception()
r = err.fp
try:
raw_data = r.read()
if raw_data:
data = json.loads(raw_data)
else:
raw_data = None
except:
if ignore_errors:
pass
else:
raise Exception(raw_data)
resp_code = r.getcode()
if resp_code >= 400 and not ignore_errors:
raise Exception(resp_code, data)
else:
return resp_code, data
def get_host_and_group_map(module, ssid, api_url, user, pwd):
mapping = dict(host=dict(), group=dict())
hostgroups = 'storage-systems/%s/host-groups' % ssid
groups_url = api_url + hostgroups
try:
hg_rc, hg_data = request(groups_url, headers=HEADERS, url_username=user, url_password=pwd)
except:
err = get_exception()
module.fail_json(msg="Failed to get host groups. Id [%s]. Error [%s]" % (ssid, str(err)))
for group in hg_data:
mapping['group'][group['name']] = group['id']
hosts = 'storage-systems/%s/hosts' % ssid
hosts_url = api_url + hosts
try:
h_rc, h_data = request(hosts_url, headers=HEADERS, url_username=user, url_password=pwd)
except:
err = get_exception()
module.fail_json(msg="Failed to get hosts. Id [%s]. Error [%s]" % (ssid, str(err)))
for host in h_data:
mapping['host'][host['name']] = host['id']
return mapping
def get_volume_id(module, data, ssid, name, api_url, user, pwd):
qty = 0
for volume in data:
if volume['name'] == name:
qty += 1
if qty > 1:
module.fail_json(msg="More than one volume with the name: %s was found, "
"please use the volume WWN instead" % name)
else:
wwn = volume['wwn']
try:
return wwn
except NameError:
module.fail_json(msg="No volume with the name: %s, was found" % (name))
def get_hostgroups(module, ssid, api_url, user, pwd):
groups = "storage-systems/%s/host-groups" % ssid
url = api_url + groups
try:
rc, data = request(url, headers=HEADERS, url_username=user, url_password=pwd)
return data
except Exception:
module.fail_json(msg="There was an issue with connecting, please check that your"
"endpoint is properly defined and your credentials are correct")
def get_volumes(module, ssid, api_url, user, pwd, mappable):
volumes = 'storage-systems/%s/%s' % (ssid, mappable)
url = api_url + volumes
try:
rc, data = request(url, url_username=user, url_password=pwd)
except Exception:
err = get_exception()
module.fail_json(
msg="Failed to mappable objects. Type[%s. Id [%s]. Error [%s]." % (mappable, ssid, str(err)))
return data
def get_lun_mappings(ssid, api_url, user, pwd, get_all=None):
mappings = 'storage-systems/%s/volume-mappings' % ssid
url = api_url + mappings
rc, data = request(url, url_username=user, url_password=pwd)
if not get_all:
remove_keys = ('ssid', 'perms', 'lunMappingRef', 'type', 'id')
for key in remove_keys:
for mapping in data:
del mapping[key]
return data
def create_mapping(module, ssid, lun_map, vol_name, api_url, user, pwd):
mappings = 'storage-systems/%s/volume-mappings' % ssid
url = api_url + mappings
post_body = json.dumps(dict(
mappableObjectId=lun_map['volumeRef'],
targetId=lun_map['mapRef'],
lun=lun_map['lun']
))
rc, data = request(url, data=post_body, method='POST', url_username=user, url_password=pwd, headers=HEADERS,
ignore_errors=True)
if rc == 422:
data = move_lun(module, ssid, lun_map, vol_name, api_url, user, pwd)
# module.fail_json(msg="The volume you specified '%s' is already "
# "part of a different LUN mapping. If you "
# "want to move it to a different host or "
# "hostgroup, then please use the "
# "netapp_e_move_lun module" % vol_name)
return data
def move_lun(module, ssid, lun_map, vol_name, api_url, user, pwd):
lun_id = get_lun_id(module, ssid, lun_map, api_url, user, pwd)
move_lun = "storage-systems/%s/volume-mappings/%s/move" % (ssid, lun_id)
url = api_url + move_lun
post_body = json.dumps(dict(targetId=lun_map['mapRef'], lun=lun_map['lun']))
rc, data = request(url, data=post_body, method='POST', url_username=user, url_password=pwd, headers=HEADERS)
return data
def get_lun_id(module, ssid, lun_mapping, api_url, user, pwd):
data = get_lun_mappings(ssid, api_url, user, pwd, get_all=True)
for lun_map in data:
if lun_map['volumeRef'] == lun_mapping['volumeRef']:
return lun_map['id']
# This shouldn't ever get called
module.fail_json(msg="No LUN map found.")
def remove_mapping(module, ssid, lun_mapping, api_url, user, pwd):
lun_id = get_lun_id(module, ssid, lun_mapping, api_url, user, pwd)
lun_del = "storage-systems/%s/volume-mappings/%s" % (ssid, lun_id)
url = api_url + lun_del
rc, data = request(url, method='DELETE', url_username=user, url_password=pwd, headers=HEADERS)
return data
def main():
argument_spec = basic_auth_argument_spec()
argument_spec.update(dict(
api_username=dict(type='str', required=True),
api_password=dict(type='str', required=True, no_log=True),
api_url=dict(type='str', required=True),
state=dict(required=True, choices=['present', 'absent']),
target=dict(required=False, default=None),
target_type=dict(required=False, choices=['host', 'group']),
lun=dict(required=False, type='int', default=0),
ssid=dict(required=False),
volume_name=dict(required=True),
))
module = AnsibleModule(argument_spec=argument_spec)
state = module.params['state']
target = module.params['target']
target_type = module.params['target_type']
lun = module.params['lun']
ssid = module.params['ssid']
vol_name = module.params['volume_name']
user = module.params['api_username']
pwd = module.params['api_password']
api_url = module.params['api_url']
if not api_url.endswith('/'):
api_url += '/'
volume_map = get_volumes(module, ssid, api_url, user, pwd, "volumes")
thin_volume_map = get_volumes(module, ssid, api_url, user, pwd, "thin-volumes")
volref = None
for vol in volume_map:
if vol['label'] == vol_name:
volref = vol['volumeRef']
if not volref:
for vol in thin_volume_map:
if vol['label'] == vol_name:
volref = vol['volumeRef']
if not volref:
module.fail_json(changed=False, msg="No volume with the name %s was found" % vol_name)
host_and_group_mapping = get_host_and_group_map(module, ssid, api_url, user, pwd)
desired_lun_mapping = dict(
mapRef=host_and_group_mapping[target_type][target],
lun=lun,
volumeRef=volref
)
lun_mappings = get_lun_mappings(ssid, api_url, user, pwd)
if state == 'present':
if desired_lun_mapping in lun_mappings:
module.exit_json(changed=False, msg="Mapping exists")
else:
result = create_mapping(module, ssid, desired_lun_mapping, vol_name, api_url, user, pwd)
module.exit_json(changed=True, **result)
elif state == 'absent':
if desired_lun_mapping in lun_mappings:
result = remove_mapping(module, ssid, desired_lun_mapping, api_url, user, pwd)
module.exit_json(changed=True, msg="Mapping removed")
else:
module.exit_json(changed=False, msg="Mapping absent")
if __name__ == '__main__':
main()
|
gpl-3.0
|
osamak/student-portal
|
niqati/south_migrations/0003_convert_to_episode.py
|
2
|
13829
|
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
class Migration(DataMigration):
def forwards(self, orm):
"Write your forwards methods here."
# Note: Don't use "from appname.models import ModelName".
# Use orm.ModelName to refer to models in this application,
# and orm['appname.ModelName'] for models in other applications.
for code in orm.Code.objects.all():
code.episode = code.activity.episode_set.first()
code.save()
for order in orm.Code_Order.objects.all():
order.episode = order.activity.episode_set.first()
order.save()
def backwards(self, orm):
"Write your backwards methods here."
for code in orm.Code.objects.all():
code.episode = None
code.save()
for order in orm.Code_Order.objects.all():
order.episode = None
order.save()
models = {
u'activities.activity': {
'Meta': {'object_name': 'Activity'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['activities.Category']", 'null': 'True', 'on_delete': 'models.SET_NULL'}),
'description': ('django.db.models.fields.TextField', [], {}),
'edit_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'inside_collaborators': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'is_editable': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'organizers': ('django.db.models.fields.IntegerField', [], {}),
'outside_collaborators': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'participants': ('django.db.models.fields.IntegerField', [], {}),
'primary_club': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'primary_activity'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['clubs.Club']"}),
'public_description': ('django.db.models.fields.TextField', [], {}),
'requirements': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'secondary_clubs': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'secondary_activity'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['clubs.Club']"}),
'submission_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'submitter': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'on_delete': 'models.SET_NULL'})
},
u'activities.category': {
'Meta': {'object_name': 'Category'},
'ar_name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'en_name': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['activities.Category']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'})
},
u'activities.episode': {
'Meta': {'object_name': 'Episode'},
'activity': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['activities.Activity']"}),
'can_report_early': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'end_date': ('django.db.models.fields.DateField', [], {}),
'end_time': ('django.db.models.fields.TimeField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'requires_report': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'requires_story': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'start_date': ('django.db.models.fields.DateField', [], {}),
'start_time': ('django.db.models.fields.TimeField', [], {})
},
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'clubs.club': {
'Meta': {'object_name': 'Club'},
'city': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'college': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'to': u"orm['clubs.College']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'coordinator': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'coordination'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': u"orm['auth.User']"}),
'creation_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'deputies': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'deputyships'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['auth.User']"}),
'description': ('django.db.models.fields.TextField', [], {}),
'edit_date': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '254'}),
'employee': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'employee'", 'on_delete': 'models.SET_NULL', 'default': 'None', 'to': u"orm['auth.User']", 'blank': 'True', 'null': 'True'}),
'english_name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'memberships'", 'null': 'True', 'symmetrical': 'False', 'to': u"orm['auth.User']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'parenthood'", 'on_delete': 'models.SET_NULL', 'default': 'None', 'to': u"orm['clubs.Club']", 'blank': 'True', 'null': 'True'}),
'special': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
u'clubs.college': {
'Meta': {'object_name': 'College'},
'city': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'gender': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'section': ('django.db.models.fields.CharField', [], {'max_length': '2'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'niqati.category': {
'Meta': {'object_name': 'Category'},
'ar_label': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '20'}),
'points': ('django.db.models.fields.IntegerField', [], {}),
'requires_approval': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
u'niqati.code': {
'Meta': {'object_name': 'Code'},
'activity': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['activities.Activity']"}),
'asset': ('django.db.models.fields.CharField', [], {'max_length': '300', 'blank': 'True'}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['niqati.Category']"}),
'code_string': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '16'}),
'collection': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['niqati.Code_Collection']"}),
'episode': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['activities.Episode']", 'null': 'True', 'blank': 'True'}),
'generation_date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'redeem_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']", 'null': 'True', 'blank': 'True'})
},
u'niqati.code_collection': {
'Meta': {'object_name': 'Code_Collection'},
'approved': ('django.db.models.fields.NullBooleanField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'asset': ('django.db.models.fields.files.FileField', [], {'max_length': '100'}),
'code_category': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['niqati.Category']"}),
'code_count': ('django.db.models.fields.IntegerField', [], {}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'date_ordered': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'delivery_type': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'parent_order': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['niqati.Code_Order']"})
},
u'niqati.code_order': {
'Meta': {'object_name': 'Code_Order'},
'activity': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['activities.Activity']"}),
'date_ordered': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'episode': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['activities.Episode']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
}
}
complete_apps = ['niqati']
symmetrical = True
|
agpl-3.0
|
vongazman/libcloud
|
libcloud/compute/drivers/elasticstack.py
|
2
|
16303
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Base driver for the providers based on the ElasticStack platform -
http://www.elasticstack.com.
"""
import re
import time
import base64
from libcloud.utils.py3 import httplib
from libcloud.utils.py3 import b
try:
import simplejson as json
except ImportError:
import json
from libcloud.common.base import ConnectionUserAndKey, JsonResponse
from libcloud.common.types import InvalidCredsError
from libcloud.compute.types import NodeState
from libcloud.compute.base import NodeDriver, NodeSize, Node
from libcloud.compute.base import NodeImage
from libcloud.compute.deployment import ScriptDeployment, SSHKeyDeployment
from libcloud.compute.deployment import MultiStepDeployment
NODE_STATE_MAP = {
'active': NodeState.RUNNING,
'dead': NodeState.TERMINATED,
'dumped': NodeState.TERMINATED,
}
# Default timeout (in seconds) for the drive imaging process
IMAGING_TIMEOUT = 10 * 60
# ElasticStack doesn't specify special instance types, so I just specified
# some plans based on the other provider offerings.
#
# Basically for CPU any value between 500Mhz and 20000Mhz should work,
# 256MB to 8192MB for ram and 1GB to 2TB for disk.
INSTANCE_TYPES = {
'small': {
'id': 'small',
'name': 'Small instance',
'cpu': 2000,
'memory': 1700,
'disk': 160,
'bandwidth': None,
},
'medium': {
'id': 'medium',
'name': 'Medium instance',
'cpu': 3000,
'memory': 4096,
'disk': 500,
'bandwidth': None,
},
'large': {
'id': 'large',
'name': 'Large instance',
'cpu': 4000,
'memory': 7680,
'disk': 850,
'bandwidth': None,
},
'extra-large': {
'id': 'extra-large',
'name': 'Extra Large instance',
'cpu': 8000,
'memory': 8192,
'disk': 1690,
'bandwidth': None,
},
'high-cpu-medium': {
'id': 'high-cpu-medium',
'name': 'High-CPU Medium instance',
'cpu': 5000,
'memory': 1700,
'disk': 350,
'bandwidth': None,
},
'high-cpu-extra-large': {
'id': 'high-cpu-extra-large',
'name': 'High-CPU Extra Large instance',
'cpu': 20000,
'memory': 7168,
'disk': 1690,
'bandwidth': None,
},
}
class ElasticStackException(Exception):
def __str__(self):
return self.args[0]
def __repr__(self):
return "<ElasticStackException '%s'>" % (self.args[0])
class ElasticStackResponse(JsonResponse):
def success(self):
if self.status == 401:
raise InvalidCredsError()
return 200 <= self.status <= 299
def parse_error(self):
error_header = self.headers.get('x-elastic-error', '')
return 'X-Elastic-Error: %s (%s)' % (error_header, self.body.strip())
class ElasticStackNodeSize(NodeSize):
def __init__(self, id, name, cpu, ram, disk, bandwidth, price, driver):
self.id = id
self.name = name
self.cpu = cpu
self.ram = ram
self.disk = disk
self.bandwidth = bandwidth
self.price = price
self.driver = driver
def __repr__(self):
return (('<NodeSize: id=%s, name=%s, cpu=%s, ram=%s '
'disk=%s bandwidth=%s price=%s driver=%s ...>')
% (self.id, self.name, self.cpu, self.ram,
self.disk, self.bandwidth, self.price, self.driver.name))
class ElasticStackBaseConnection(ConnectionUserAndKey):
"""
Base connection class for the ElasticStack driver
"""
host = None
responseCls = ElasticStackResponse
def add_default_headers(self, headers):
headers['Accept'] = 'application/json'
headers['Content-Type'] = 'application/json'
headers['Authorization'] = \
('Basic %s' % (base64.b64encode(b('%s:%s' % (self.user_id,
self.key))))
.decode('utf-8'))
return headers
class ElasticStackBaseNodeDriver(NodeDriver):
website = 'http://www.elasticstack.com'
connectionCls = ElasticStackBaseConnection
features = {"create_node": ["generates_password"]}
def reboot_node(self, node):
# Reboots the node
response = self.connection.request(
action='/servers/%s/reset' % (node.id),
method='POST'
)
return response.status == 204
def destroy_node(self, node):
# Kills the server immediately
response = self.connection.request(
action='/servers/%s/destroy' % (node.id),
method='POST'
)
return response.status == 204
def list_images(self, location=None):
# Returns a list of available pre-installed system drive images
images = []
for key, value in self._standard_drives.items():
image = NodeImage(
id=value['uuid'],
name=value['description'],
driver=self.connection.driver,
extra={
'size_gunzipped': value['size_gunzipped']
}
)
images.append(image)
return images
def list_sizes(self, location=None):
sizes = []
for key, value in INSTANCE_TYPES.items():
size = ElasticStackNodeSize(
id=value['id'],
name=value['name'], cpu=value['cpu'], ram=value['memory'],
disk=value['disk'], bandwidth=value['bandwidth'],
price=self._get_size_price(size_id=value['id']),
driver=self.connection.driver
)
sizes.append(size)
return sizes
def list_nodes(self):
# Returns a list of active (running) nodes
response = self.connection.request(action='/servers/info').object
nodes = []
for data in response:
node = self._to_node(data)
nodes.append(node)
return nodes
def create_node(self, **kwargs):
"""Creates an ElasticStack instance
@inherits: :class:`NodeDriver.create_node`
:keyword name: String with a name for this new node (required)
:type name: ``str``
:keyword smp: Number of virtual processors or None to calculate
based on the cpu speed
:type smp: ``int``
:keyword nic_model: e1000, rtl8139 or virtio
(if not specified, e1000 is used)
:type nic_model: ``str``
:keyword vnc_password: If set, the same password is also used for
SSH access with user toor,
otherwise VNC access is disabled and
no SSH login is possible.
:type vnc_password: ``str``
"""
size = kwargs['size']
image = kwargs['image']
smp = kwargs.get('smp', 'auto')
nic_model = kwargs.get('nic_model', 'e1000')
vnc_password = ssh_password = kwargs.get('vnc_password', None)
if nic_model not in ('e1000', 'rtl8139', 'virtio'):
raise ElasticStackException('Invalid NIC model specified')
# check that drive size is not smaller than pre installed image size
# First we create a drive with the specified size
drive_data = {}
drive_data.update({'name': kwargs['name'],
'size': '%sG' % (kwargs['size'].disk)})
response = self.connection.request(action='/drives/create',
data=json.dumps(drive_data),
method='POST').object
if not response:
raise ElasticStackException('Drive creation failed')
drive_uuid = response['drive']
# Then we image the selected pre-installed system drive onto it
response = self.connection.request(
action='/drives/%s/image/%s/gunzip' % (drive_uuid, image.id),
method='POST'
)
if response.status not in (200, 204):
raise ElasticStackException('Drive imaging failed')
# We wait until the drive is imaged and then boot up the node
# (in most cases, the imaging process shouldn't take longer
# than a few minutes)
response = self.connection.request(
action='/drives/%s/info' % (drive_uuid)
).object
imaging_start = time.time()
while 'imaging' in response:
response = self.connection.request(
action='/drives/%s/info' % (drive_uuid)
).object
elapsed_time = time.time() - imaging_start
if ('imaging' in response and elapsed_time >= IMAGING_TIMEOUT):
raise ElasticStackException('Drive imaging timed out')
time.sleep(1)
node_data = {}
node_data.update({'name': kwargs['name'],
'cpu': size.cpu,
'mem': size.ram,
'ide:0:0': drive_uuid,
'boot': 'ide:0:0',
'smp': smp})
node_data.update({'nic:0:model': nic_model, 'nic:0:dhcp': 'auto'})
if vnc_password:
node_data.update({'vnc': 'auto', 'vnc:password': vnc_password})
response = self.connection.request(
action='/servers/create', data=json.dumps(node_data),
method='POST'
).object
if isinstance(response, list):
nodes = [self._to_node(node, ssh_password) for node in response]
else:
nodes = self._to_node(response, ssh_password)
return nodes
# Extension methods
def ex_set_node_configuration(self, node, **kwargs):
"""
Changes the configuration of the running server
:param node: Node which should be used
:type node: :class:`Node`
:param kwargs: keyword arguments
:type kwargs: ``dict``
:rtype: ``bool``
"""
valid_keys = ('^name$', '^parent$', '^cpu$', '^smp$', '^mem$',
'^boot$', '^nic:0:model$', '^nic:0:dhcp',
'^nic:1:model$', '^nic:1:vlan$', '^nic:1:mac$',
'^vnc:ip$', '^vnc:password$', '^vnc:tls',
'^ide:[0-1]:[0-1](:media)?$',
'^scsi:0:[0-7](:media)?$', '^block:[0-7](:media)?$')
invalid_keys = []
keys = list(kwargs.keys())
for key in keys:
matches = False
for regex in valid_keys:
if re.match(regex, key):
matches = True
break
if not matches:
invalid_keys.append(key)
if invalid_keys:
raise ElasticStackException(
'Invalid configuration key specified: %s'
% (',' .join(invalid_keys))
)
response = self.connection.request(
action='/servers/%s/set' % (node.id), data=json.dumps(kwargs),
method='POST'
)
return (response.status == httplib.OK and response.body != '')
def deploy_node(self, **kwargs):
"""
Create a new node, and start deployment.
@inherits: :class:`NodeDriver.deploy_node`
:keyword enable_root: If true, root password will be set to
vnc_password (this will enable SSH access)
and default 'toor' account will be deleted.
:type enable_root: ``bool``
"""
image = kwargs['image']
vnc_password = kwargs.get('vnc_password', None)
enable_root = kwargs.get('enable_root', False)
if not vnc_password:
raise ValueError('You need to provide vnc_password argument '
'if you want to use deployment')
if (image in self._standard_drives and
not self._standard_drives[image]['supports_deployment']):
raise ValueError('Image %s does not support deployment'
% (image.id))
if enable_root:
script = ("unset HISTFILE;"
"echo root:%s | chpasswd;"
"sed -i '/^toor.*$/d' /etc/passwd /etc/shadow;"
"history -c") % vnc_password
root_enable_script = ScriptDeployment(script=script,
delete=True)
deploy = kwargs.get('deploy', None)
if deploy:
if (isinstance(deploy, ScriptDeployment) or
isinstance(deploy, SSHKeyDeployment)):
deployment = MultiStepDeployment([deploy,
root_enable_script])
elif isinstance(deploy, MultiStepDeployment):
deployment = deploy
deployment.add(root_enable_script)
else:
deployment = root_enable_script
kwargs['deploy'] = deployment
if not kwargs.get('ssh_username', None):
kwargs['ssh_username'] = 'toor'
return super(ElasticStackBaseNodeDriver, self).deploy_node(**kwargs)
def ex_shutdown_node(self, node):
"""
Sends the ACPI power-down event
:param node: Node which should be used
:type node: :class:`Node`
:rtype: ``bool``
"""
response = self.connection.request(
action='/servers/%s/shutdown' % (node.id),
method='POST'
)
return response.status == 204
def ex_destroy_drive(self, drive_uuid):
"""
Deletes a drive
:param drive_uuid: Drive uuid which should be used
:type drive_uuid: ``str``
:rtype: ``bool``
"""
response = self.connection.request(
action='/drives/%s/destroy' % (drive_uuid),
method='POST'
)
return response.status == 204
# Helper methods
def _to_node(self, data, ssh_password=None):
try:
state = NODE_STATE_MAP[data['status']]
except KeyError:
state = NodeState.UNKNOWN
if 'nic:0:dhcp:ip' in data:
if isinstance(data['nic:0:dhcp:ip'], list):
public_ip = data['nic:0:dhcp:ip']
else:
public_ip = [data['nic:0:dhcp:ip']]
else:
public_ip = []
extra = {'cpu': data['cpu'],
'mem': data['mem']}
if 'started' in data:
extra['started'] = data['started']
if 'smp' in data:
extra['smp'] = data['smp']
if 'vnc:ip' in data:
extra['vnc:ip'] = data['vnc:ip']
if 'vnc:password' in data:
extra['vnc:password'] = data['vnc:password']
boot_device = data['boot']
if isinstance(boot_device, list):
for device in boot_device:
extra[device] = data[device]
else:
extra[boot_device] = data[boot_device]
if ssh_password:
extra.update({'password': ssh_password})
node = Node(id=data['server'], name=data['name'], state=state,
public_ips=public_ip, private_ips=None,
driver=self.connection.driver,
extra=extra)
return node
|
apache-2.0
|
TEAM-Gummy/platform_external_chromium_org
|
ui/ozone/generate_ozone_platform_list.py
|
28
|
2740
|
#!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Code generator for Ozone platform list.
This script takes as arguments a list of platform names and generates a C++
source file containing a list of those platforms. Each list entry contains the
name and a function pointer to the initializer for that platform.
Example Output: ./generate_ozone_platform_list.py --default wayland dri wayland
#include "ui/ozone/ozone_platform_list.h"
namespace ui {
OzonePlatform* CreateOzonePlatformDri();
OzonePlatform* CreateOzonePlatformWayland();
const OzonePlatformListEntry kOzonePlatforms[] = {
{ "wayland", &CreateOzonePlatformWayland },
{ "dri", &CreateOzonePlatformDri },
};
const int kOzonePlatformCount = 2;
} // namespace ui
"""
import optparse
import os
import collections
import re
import sys
import string
def GetConstructorName(name):
"""Determine name of static constructor function from platform name.
We just capitalize the platform name and prepend "CreateOzonePlatform".
"""
return 'CreateOzonePlatform' + string.capitalize(name)
def GeneratePlatformList(out, platforms):
"""Generate static array containing a list of ozone platforms."""
out.write('#include "ui/ozone/ozone_platform_list.h"\n')
out.write('\n')
out.write('namespace ui {\n')
out.write('\n')
# Prototypes for platform initializers.
for platform in platforms:
out.write('OzonePlatform* %s();\n' % GetConstructorName(platform))
out.write('\n')
# List of platform names and initializers.
out.write('const OzonePlatformListEntry kOzonePlatforms[] = {\n')
for platform in platforms:
out.write(' { "%s", &%s },\n' % (platform, GetConstructorName(platform)))
out.write('};\n')
out.write('\n')
out.write('const int kOzonePlatformCount = %d;\n' % len(platforms))
out.write('\n')
out.write('} // namespace ui\n')
def main(argv):
parser = optparse.OptionParser()
parser.add_option('--output_file')
parser.add_option('--default')
options, platforms = parser.parse_args(argv)
# Write to standard output or file specified by --output_file.
out = sys.stdout
if options.output_file:
out = open(options.output_file, 'wb')
# Reorder the platforms when --default is specified.
# The default platform must appear first in the platform list.
if options.default and options.default in platforms:
platforms.remove(options.default)
platforms.insert(0, options.default)
GeneratePlatformList(out, platforms)
if options.output_file:
out.close()
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
|
bsd-3-clause
|
mistydemeo/gyp
|
test/msvs/list_excluded/gyptest-all.py
|
347
|
1292
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that msvs_list_excluded_files=0 doesn't list files that would
normally be in _excluded_files, and that if that flag is not set, then they
are still listed.
"""
import os
import TestGyp
test = TestGyp.TestGyp(formats=['msvs'], workdir='workarea_all')
# with the flag set to 0
try:
os.environ['GYP_GENERATOR_FLAGS'] = 'msvs_list_excluded_files=0'
test.run_gyp('hello_exclude.gyp')
finally:
del os.environ['GYP_GENERATOR_FLAGS']
if test.uses_msbuild:
test.must_not_contain('hello.vcxproj', 'hello_mac')
else:
test.must_not_contain('hello.vcproj', 'hello_mac')
# with the flag not set
test.run_gyp('hello_exclude.gyp')
if test.uses_msbuild:
test.must_contain('hello.vcxproj', 'hello_mac')
else:
test.must_contain('hello.vcproj', 'hello_mac')
# with the flag explicitly set to 1
try:
os.environ['GYP_GENERATOR_FLAGS'] = 'msvs_list_excluded_files=1'
test.run_gyp('hello_exclude.gyp')
finally:
del os.environ['GYP_GENERATOR_FLAGS']
if test.uses_msbuild:
test.must_contain('hello.vcxproj', 'hello_mac')
else:
test.must_contain('hello.vcproj', 'hello_mac')
test.pass_test()
|
bsd-3-clause
|
gppezzi/easybuild-framework
|
easybuild/tools/package/package_naming_scheme/easybuild_pns.py
|
2
|
2335
|
##
# Copyright 2015-2019 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://www.vscentrum.be),
# Flemish Research Foundation (FWO) (http://www.fwo.be/en)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# https://github.com/easybuilders/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
##
"""
Implementation of the EasyBuild packaging naming scheme
:author: Robert Schmidt (Ottawa Hospital Research Institute)
:author: Kenneth Hoste (Ghent University)
"""
from easybuild.tools.module_naming_scheme.utilities import det_full_ec_version
from easybuild.tools.package.package_naming_scheme.pns import PackageNamingScheme
from easybuild.tools.version import VERSION as EASYBUILD_VERSION
class EasyBuildPNS(PackageNamingScheme):
"""Class implmenting the default EasyBuild packaging naming scheme."""
def name(self, ec):
"""Determine package name"""
self.log.debug("Easyconfig dict passed to name() looks like: %s ", ec)
return '%s-%s' % (ec['name'], det_full_ec_version(ec))
def version(self, ec):
"""Determine package version: EasyBuild version used to build & install."""
ebver = str(EASYBUILD_VERSION)
if ebver.endswith('dev'):
# try and make sure that 'dev' EasyBuild version is not considered newer just because it's longer
# (e.g., 2.2.0 vs 2.2.0dev)
# cfr. http://rpm.org/ticket/56,
# https://debian-handbook.info/browse/stable/sect.manipulating-packages-with-dpkg.html (see box in 5.4.3)
ebver.replace('dev', '~dev')
return 'eb-%s' % ebver
|
gpl-2.0
|
m0ppers/arangodb
|
3rdParty/V8/V8-5.0.71.39/build/gyp/pylib/gyp/generator/cmake.py
|
1355
|
44604
|
# Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""cmake output module
This module is under development and should be considered experimental.
This module produces cmake (2.8.8+) input as its output. One CMakeLists.txt is
created for each configuration.
This module's original purpose was to support editing in IDEs like KDevelop
which use CMake for project management. It is also possible to use CMake to
generate projects for other IDEs such as eclipse cdt and code::blocks. QtCreator
will convert the CMakeLists.txt to a code::blocks cbp for the editor to read,
but build using CMake. As a result QtCreator editor is unaware of compiler
defines. The generated CMakeLists.txt can also be used to build on Linux. There
is currently no support for building on platforms other than Linux.
The generated CMakeLists.txt should properly compile all projects. However,
there is a mismatch between gyp and cmake with regard to linking. All attempts
are made to work around this, but CMake sometimes sees -Wl,--start-group as a
library and incorrectly repeats it. As a result the output of this generator
should not be relied on for building.
When using with kdevelop, use version 4.4+. Previous versions of kdevelop will
not be able to find the header file directories described in the generated
CMakeLists.txt file.
"""
import multiprocessing
import os
import signal
import string
import subprocess
import gyp.common
generator_default_variables = {
'EXECUTABLE_PREFIX': '',
'EXECUTABLE_SUFFIX': '',
'STATIC_LIB_PREFIX': 'lib',
'STATIC_LIB_SUFFIX': '.a',
'SHARED_LIB_PREFIX': 'lib',
'SHARED_LIB_SUFFIX': '.so',
'SHARED_LIB_DIR': '${builddir}/lib.${TOOLSET}',
'LIB_DIR': '${obj}.${TOOLSET}',
'INTERMEDIATE_DIR': '${obj}.${TOOLSET}/${TARGET}/geni',
'SHARED_INTERMEDIATE_DIR': '${obj}/gen',
'PRODUCT_DIR': '${builddir}',
'RULE_INPUT_PATH': '${RULE_INPUT_PATH}',
'RULE_INPUT_DIRNAME': '${RULE_INPUT_DIRNAME}',
'RULE_INPUT_NAME': '${RULE_INPUT_NAME}',
'RULE_INPUT_ROOT': '${RULE_INPUT_ROOT}',
'RULE_INPUT_EXT': '${RULE_INPUT_EXT}',
'CONFIGURATION_NAME': '${configuration}',
}
FULL_PATH_VARS = ('${CMAKE_CURRENT_LIST_DIR}', '${builddir}', '${obj}')
generator_supports_multiple_toolsets = True
generator_wants_static_library_dependencies_adjusted = True
COMPILABLE_EXTENSIONS = {
'.c': 'cc',
'.cc': 'cxx',
'.cpp': 'cxx',
'.cxx': 'cxx',
'.s': 's', # cc
'.S': 's', # cc
}
def RemovePrefix(a, prefix):
"""Returns 'a' without 'prefix' if it starts with 'prefix'."""
return a[len(prefix):] if a.startswith(prefix) else a
def CalculateVariables(default_variables, params):
"""Calculate additional variables for use in the build (called by gyp)."""
default_variables.setdefault('OS', gyp.common.GetFlavor(params))
def Compilable(filename):
"""Return true if the file is compilable (should be in OBJS)."""
return any(filename.endswith(e) for e in COMPILABLE_EXTENSIONS)
def Linkable(filename):
"""Return true if the file is linkable (should be on the link line)."""
return filename.endswith('.o')
def NormjoinPathForceCMakeSource(base_path, rel_path):
"""Resolves rel_path against base_path and returns the result.
If rel_path is an absolute path it is returned unchanged.
Otherwise it is resolved against base_path and normalized.
If the result is a relative path, it is forced to be relative to the
CMakeLists.txt.
"""
if os.path.isabs(rel_path):
return rel_path
if any([rel_path.startswith(var) for var in FULL_PATH_VARS]):
return rel_path
# TODO: do we need to check base_path for absolute variables as well?
return os.path.join('${CMAKE_CURRENT_LIST_DIR}',
os.path.normpath(os.path.join(base_path, rel_path)))
def NormjoinPath(base_path, rel_path):
"""Resolves rel_path against base_path and returns the result.
TODO: what is this really used for?
If rel_path begins with '$' it is returned unchanged.
Otherwise it is resolved against base_path if relative, then normalized.
"""
if rel_path.startswith('$') and not rel_path.startswith('${configuration}'):
return rel_path
return os.path.normpath(os.path.join(base_path, rel_path))
def CMakeStringEscape(a):
"""Escapes the string 'a' for use inside a CMake string.
This means escaping
'\' otherwise it may be seen as modifying the next character
'"' otherwise it will end the string
';' otherwise the string becomes a list
The following do not need to be escaped
'#' when the lexer is in string state, this does not start a comment
The following are yet unknown
'$' generator variables (like ${obj}) must not be escaped,
but text $ should be escaped
what is wanted is to know which $ come from generator variables
"""
return a.replace('\\', '\\\\').replace(';', '\\;').replace('"', '\\"')
def SetFileProperty(output, source_name, property_name, values, sep):
"""Given a set of source file, sets the given property on them."""
output.write('set_source_files_properties(')
output.write(source_name)
output.write(' PROPERTIES ')
output.write(property_name)
output.write(' "')
for value in values:
output.write(CMakeStringEscape(value))
output.write(sep)
output.write('")\n')
def SetFilesProperty(output, variable, property_name, values, sep):
"""Given a set of source files, sets the given property on them."""
output.write('set_source_files_properties(')
WriteVariable(output, variable)
output.write(' PROPERTIES ')
output.write(property_name)
output.write(' "')
for value in values:
output.write(CMakeStringEscape(value))
output.write(sep)
output.write('")\n')
def SetTargetProperty(output, target_name, property_name, values, sep=''):
"""Given a target, sets the given property."""
output.write('set_target_properties(')
output.write(target_name)
output.write(' PROPERTIES ')
output.write(property_name)
output.write(' "')
for value in values:
output.write(CMakeStringEscape(value))
output.write(sep)
output.write('")\n')
def SetVariable(output, variable_name, value):
"""Sets a CMake variable."""
output.write('set(')
output.write(variable_name)
output.write(' "')
output.write(CMakeStringEscape(value))
output.write('")\n')
def SetVariableList(output, variable_name, values):
"""Sets a CMake variable to a list."""
if not values:
return SetVariable(output, variable_name, "")
if len(values) == 1:
return SetVariable(output, variable_name, values[0])
output.write('list(APPEND ')
output.write(variable_name)
output.write('\n "')
output.write('"\n "'.join([CMakeStringEscape(value) for value in values]))
output.write('")\n')
def UnsetVariable(output, variable_name):
"""Unsets a CMake variable."""
output.write('unset(')
output.write(variable_name)
output.write(')\n')
def WriteVariable(output, variable_name, prepend=None):
if prepend:
output.write(prepend)
output.write('${')
output.write(variable_name)
output.write('}')
class CMakeTargetType(object):
def __init__(self, command, modifier, property_modifier):
self.command = command
self.modifier = modifier
self.property_modifier = property_modifier
cmake_target_type_from_gyp_target_type = {
'executable': CMakeTargetType('add_executable', None, 'RUNTIME'),
'static_library': CMakeTargetType('add_library', 'STATIC', 'ARCHIVE'),
'shared_library': CMakeTargetType('add_library', 'SHARED', 'LIBRARY'),
'loadable_module': CMakeTargetType('add_library', 'MODULE', 'LIBRARY'),
'none': CMakeTargetType('add_custom_target', 'SOURCES', None),
}
def StringToCMakeTargetName(a):
"""Converts the given string 'a' to a valid CMake target name.
All invalid characters are replaced by '_'.
Invalid for cmake: ' ', '/', '(', ')', '"'
Invalid for make: ':'
Invalid for unknown reasons but cause failures: '.'
"""
return a.translate(string.maketrans(' /():."', '_______'))
def WriteActions(target_name, actions, extra_sources, extra_deps,
path_to_gyp, output):
"""Write CMake for the 'actions' in the target.
Args:
target_name: the name of the CMake target being generated.
actions: the Gyp 'actions' dict for this target.
extra_sources: [(<cmake_src>, <src>)] to append with generated source files.
extra_deps: [<cmake_taget>] to append with generated targets.
path_to_gyp: relative path from CMakeLists.txt being generated to
the Gyp file in which the target being generated is defined.
"""
for action in actions:
action_name = StringToCMakeTargetName(action['action_name'])
action_target_name = '%s__%s' % (target_name, action_name)
inputs = action['inputs']
inputs_name = action_target_name + '__input'
SetVariableList(output, inputs_name,
[NormjoinPathForceCMakeSource(path_to_gyp, dep) for dep in inputs])
outputs = action['outputs']
cmake_outputs = [NormjoinPathForceCMakeSource(path_to_gyp, out)
for out in outputs]
outputs_name = action_target_name + '__output'
SetVariableList(output, outputs_name, cmake_outputs)
# Build up a list of outputs.
# Collect the output dirs we'll need.
dirs = set(dir for dir in (os.path.dirname(o) for o in outputs) if dir)
if int(action.get('process_outputs_as_sources', False)):
extra_sources.extend(zip(cmake_outputs, outputs))
# add_custom_command
output.write('add_custom_command(OUTPUT ')
WriteVariable(output, outputs_name)
output.write('\n')
if len(dirs) > 0:
for directory in dirs:
output.write(' COMMAND ${CMAKE_COMMAND} -E make_directory ')
output.write(directory)
output.write('\n')
output.write(' COMMAND ')
output.write(gyp.common.EncodePOSIXShellList(action['action']))
output.write('\n')
output.write(' DEPENDS ')
WriteVariable(output, inputs_name)
output.write('\n')
output.write(' WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/')
output.write(path_to_gyp)
output.write('\n')
output.write(' COMMENT ')
if 'message' in action:
output.write(action['message'])
else:
output.write(action_target_name)
output.write('\n')
output.write(' VERBATIM\n')
output.write(')\n')
# add_custom_target
output.write('add_custom_target(')
output.write(action_target_name)
output.write('\n DEPENDS ')
WriteVariable(output, outputs_name)
output.write('\n SOURCES ')
WriteVariable(output, inputs_name)
output.write('\n)\n')
extra_deps.append(action_target_name)
def NormjoinRulePathForceCMakeSource(base_path, rel_path, rule_source):
if rel_path.startswith(("${RULE_INPUT_PATH}","${RULE_INPUT_DIRNAME}")):
if any([rule_source.startswith(var) for var in FULL_PATH_VARS]):
return rel_path
return NormjoinPathForceCMakeSource(base_path, rel_path)
def WriteRules(target_name, rules, extra_sources, extra_deps,
path_to_gyp, output):
"""Write CMake for the 'rules' in the target.
Args:
target_name: the name of the CMake target being generated.
actions: the Gyp 'actions' dict for this target.
extra_sources: [(<cmake_src>, <src>)] to append with generated source files.
extra_deps: [<cmake_taget>] to append with generated targets.
path_to_gyp: relative path from CMakeLists.txt being generated to
the Gyp file in which the target being generated is defined.
"""
for rule in rules:
rule_name = StringToCMakeTargetName(target_name + '__' + rule['rule_name'])
inputs = rule.get('inputs', [])
inputs_name = rule_name + '__input'
SetVariableList(output, inputs_name,
[NormjoinPathForceCMakeSource(path_to_gyp, dep) for dep in inputs])
outputs = rule['outputs']
var_outputs = []
for count, rule_source in enumerate(rule.get('rule_sources', [])):
action_name = rule_name + '_' + str(count)
rule_source_dirname, rule_source_basename = os.path.split(rule_source)
rule_source_root, rule_source_ext = os.path.splitext(rule_source_basename)
SetVariable(output, 'RULE_INPUT_PATH', rule_source)
SetVariable(output, 'RULE_INPUT_DIRNAME', rule_source_dirname)
SetVariable(output, 'RULE_INPUT_NAME', rule_source_basename)
SetVariable(output, 'RULE_INPUT_ROOT', rule_source_root)
SetVariable(output, 'RULE_INPUT_EXT', rule_source_ext)
# Build up a list of outputs.
# Collect the output dirs we'll need.
dirs = set(dir for dir in (os.path.dirname(o) for o in outputs) if dir)
# Create variables for the output, as 'local' variable will be unset.
these_outputs = []
for output_index, out in enumerate(outputs):
output_name = action_name + '_' + str(output_index)
SetVariable(output, output_name,
NormjoinRulePathForceCMakeSource(path_to_gyp, out,
rule_source))
if int(rule.get('process_outputs_as_sources', False)):
extra_sources.append(('${' + output_name + '}', out))
these_outputs.append('${' + output_name + '}')
var_outputs.append('${' + output_name + '}')
# add_custom_command
output.write('add_custom_command(OUTPUT\n')
for out in these_outputs:
output.write(' ')
output.write(out)
output.write('\n')
for directory in dirs:
output.write(' COMMAND ${CMAKE_COMMAND} -E make_directory ')
output.write(directory)
output.write('\n')
output.write(' COMMAND ')
output.write(gyp.common.EncodePOSIXShellList(rule['action']))
output.write('\n')
output.write(' DEPENDS ')
WriteVariable(output, inputs_name)
output.write(' ')
output.write(NormjoinPath(path_to_gyp, rule_source))
output.write('\n')
# CMAKE_CURRENT_LIST_DIR is where the CMakeLists.txt lives.
# The cwd is the current build directory.
output.write(' WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/')
output.write(path_to_gyp)
output.write('\n')
output.write(' COMMENT ')
if 'message' in rule:
output.write(rule['message'])
else:
output.write(action_name)
output.write('\n')
output.write(' VERBATIM\n')
output.write(')\n')
UnsetVariable(output, 'RULE_INPUT_PATH')
UnsetVariable(output, 'RULE_INPUT_DIRNAME')
UnsetVariable(output, 'RULE_INPUT_NAME')
UnsetVariable(output, 'RULE_INPUT_ROOT')
UnsetVariable(output, 'RULE_INPUT_EXT')
# add_custom_target
output.write('add_custom_target(')
output.write(rule_name)
output.write(' DEPENDS\n')
for out in var_outputs:
output.write(' ')
output.write(out)
output.write('\n')
output.write('SOURCES ')
WriteVariable(output, inputs_name)
output.write('\n')
for rule_source in rule.get('rule_sources', []):
output.write(' ')
output.write(NormjoinPath(path_to_gyp, rule_source))
output.write('\n')
output.write(')\n')
extra_deps.append(rule_name)
def WriteCopies(target_name, copies, extra_deps, path_to_gyp, output):
"""Write CMake for the 'copies' in the target.
Args:
target_name: the name of the CMake target being generated.
actions: the Gyp 'actions' dict for this target.
extra_deps: [<cmake_taget>] to append with generated targets.
path_to_gyp: relative path from CMakeLists.txt being generated to
the Gyp file in which the target being generated is defined.
"""
copy_name = target_name + '__copies'
# CMake gets upset with custom targets with OUTPUT which specify no output.
have_copies = any(copy['files'] for copy in copies)
if not have_copies:
output.write('add_custom_target(')
output.write(copy_name)
output.write(')\n')
extra_deps.append(copy_name)
return
class Copy(object):
def __init__(self, ext, command):
self.cmake_inputs = []
self.cmake_outputs = []
self.gyp_inputs = []
self.gyp_outputs = []
self.ext = ext
self.inputs_name = None
self.outputs_name = None
self.command = command
file_copy = Copy('', 'copy')
dir_copy = Copy('_dirs', 'copy_directory')
for copy in copies:
files = copy['files']
destination = copy['destination']
for src in files:
path = os.path.normpath(src)
basename = os.path.split(path)[1]
dst = os.path.join(destination, basename)
copy = file_copy if os.path.basename(src) else dir_copy
copy.cmake_inputs.append(NormjoinPathForceCMakeSource(path_to_gyp, src))
copy.cmake_outputs.append(NormjoinPathForceCMakeSource(path_to_gyp, dst))
copy.gyp_inputs.append(src)
copy.gyp_outputs.append(dst)
for copy in (file_copy, dir_copy):
if copy.cmake_inputs:
copy.inputs_name = copy_name + '__input' + copy.ext
SetVariableList(output, copy.inputs_name, copy.cmake_inputs)
copy.outputs_name = copy_name + '__output' + copy.ext
SetVariableList(output, copy.outputs_name, copy.cmake_outputs)
# add_custom_command
output.write('add_custom_command(\n')
output.write('OUTPUT')
for copy in (file_copy, dir_copy):
if copy.outputs_name:
WriteVariable(output, copy.outputs_name, ' ')
output.write('\n')
for copy in (file_copy, dir_copy):
for src, dst in zip(copy.gyp_inputs, copy.gyp_outputs):
# 'cmake -E copy src dst' will create the 'dst' directory if needed.
output.write('COMMAND ${CMAKE_COMMAND} -E %s ' % copy.command)
output.write(src)
output.write(' ')
output.write(dst)
output.write("\n")
output.write('DEPENDS')
for copy in (file_copy, dir_copy):
if copy.inputs_name:
WriteVariable(output, copy.inputs_name, ' ')
output.write('\n')
output.write('WORKING_DIRECTORY ${CMAKE_CURRENT_LIST_DIR}/')
output.write(path_to_gyp)
output.write('\n')
output.write('COMMENT Copying for ')
output.write(target_name)
output.write('\n')
output.write('VERBATIM\n')
output.write(')\n')
# add_custom_target
output.write('add_custom_target(')
output.write(copy_name)
output.write('\n DEPENDS')
for copy in (file_copy, dir_copy):
if copy.outputs_name:
WriteVariable(output, copy.outputs_name, ' ')
output.write('\n SOURCES')
if file_copy.inputs_name:
WriteVariable(output, file_copy.inputs_name, ' ')
output.write('\n)\n')
extra_deps.append(copy_name)
def CreateCMakeTargetBaseName(qualified_target):
"""This is the name we would like the target to have."""
_, gyp_target_name, gyp_target_toolset = (
gyp.common.ParseQualifiedTarget(qualified_target))
cmake_target_base_name = gyp_target_name
if gyp_target_toolset and gyp_target_toolset != 'target':
cmake_target_base_name += '_' + gyp_target_toolset
return StringToCMakeTargetName(cmake_target_base_name)
def CreateCMakeTargetFullName(qualified_target):
"""An unambiguous name for the target."""
gyp_file, gyp_target_name, gyp_target_toolset = (
gyp.common.ParseQualifiedTarget(qualified_target))
cmake_target_full_name = gyp_file + ':' + gyp_target_name
if gyp_target_toolset and gyp_target_toolset != 'target':
cmake_target_full_name += '_' + gyp_target_toolset
return StringToCMakeTargetName(cmake_target_full_name)
class CMakeNamer(object):
"""Converts Gyp target names into CMake target names.
CMake requires that target names be globally unique. One way to ensure
this is to fully qualify the names of the targets. Unfortunatly, this
ends up with all targets looking like "chrome_chrome_gyp_chrome" instead
of just "chrome". If this generator were only interested in building, it
would be possible to fully qualify all target names, then create
unqualified target names which depend on all qualified targets which
should have had that name. This is more or less what the 'make' generator
does with aliases. However, one goal of this generator is to create CMake
files for use with IDEs, and fully qualified names are not as user
friendly.
Since target name collision is rare, we do the above only when required.
Toolset variants are always qualified from the base, as this is required for
building. However, it also makes sense for an IDE, as it is possible for
defines to be different.
"""
def __init__(self, target_list):
self.cmake_target_base_names_conficting = set()
cmake_target_base_names_seen = set()
for qualified_target in target_list:
cmake_target_base_name = CreateCMakeTargetBaseName(qualified_target)
if cmake_target_base_name not in cmake_target_base_names_seen:
cmake_target_base_names_seen.add(cmake_target_base_name)
else:
self.cmake_target_base_names_conficting.add(cmake_target_base_name)
def CreateCMakeTargetName(self, qualified_target):
base_name = CreateCMakeTargetBaseName(qualified_target)
if base_name in self.cmake_target_base_names_conficting:
return CreateCMakeTargetFullName(qualified_target)
return base_name
def WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
options, generator_flags, all_qualified_targets, output):
# The make generator does this always.
# TODO: It would be nice to be able to tell CMake all dependencies.
circular_libs = generator_flags.get('circular', True)
if not generator_flags.get('standalone', False):
output.write('\n#')
output.write(qualified_target)
output.write('\n')
gyp_file, _, _ = gyp.common.ParseQualifiedTarget(qualified_target)
rel_gyp_file = gyp.common.RelativePath(gyp_file, options.toplevel_dir)
rel_gyp_dir = os.path.dirname(rel_gyp_file)
# Relative path from build dir to top dir.
build_to_top = gyp.common.InvertRelativePath(build_dir, options.toplevel_dir)
# Relative path from build dir to gyp dir.
build_to_gyp = os.path.join(build_to_top, rel_gyp_dir)
path_from_cmakelists_to_gyp = build_to_gyp
spec = target_dicts.get(qualified_target, {})
config = spec.get('configurations', {}).get(config_to_use, {})
target_name = spec.get('target_name', '<missing target name>')
target_type = spec.get('type', '<missing target type>')
target_toolset = spec.get('toolset')
cmake_target_type = cmake_target_type_from_gyp_target_type.get(target_type)
if cmake_target_type is None:
print ('Target %s has unknown target type %s, skipping.' %
( target_name, target_type ) )
return
SetVariable(output, 'TARGET', target_name)
SetVariable(output, 'TOOLSET', target_toolset)
cmake_target_name = namer.CreateCMakeTargetName(qualified_target)
extra_sources = []
extra_deps = []
# Actions must come first, since they can generate more OBJs for use below.
if 'actions' in spec:
WriteActions(cmake_target_name, spec['actions'], extra_sources, extra_deps,
path_from_cmakelists_to_gyp, output)
# Rules must be early like actions.
if 'rules' in spec:
WriteRules(cmake_target_name, spec['rules'], extra_sources, extra_deps,
path_from_cmakelists_to_gyp, output)
# Copies
if 'copies' in spec:
WriteCopies(cmake_target_name, spec['copies'], extra_deps,
path_from_cmakelists_to_gyp, output)
# Target and sources
srcs = spec.get('sources', [])
# Gyp separates the sheep from the goats based on file extensions.
# A full separation is done here because of flag handing (see below).
s_sources = []
c_sources = []
cxx_sources = []
linkable_sources = []
other_sources = []
for src in srcs:
_, ext = os.path.splitext(src)
src_type = COMPILABLE_EXTENSIONS.get(ext, None)
src_norm_path = NormjoinPath(path_from_cmakelists_to_gyp, src);
if src_type == 's':
s_sources.append(src_norm_path)
elif src_type == 'cc':
c_sources.append(src_norm_path)
elif src_type == 'cxx':
cxx_sources.append(src_norm_path)
elif Linkable(ext):
linkable_sources.append(src_norm_path)
else:
other_sources.append(src_norm_path)
for extra_source in extra_sources:
src, real_source = extra_source
_, ext = os.path.splitext(real_source)
src_type = COMPILABLE_EXTENSIONS.get(ext, None)
if src_type == 's':
s_sources.append(src)
elif src_type == 'cc':
c_sources.append(src)
elif src_type == 'cxx':
cxx_sources.append(src)
elif Linkable(ext):
linkable_sources.append(src)
else:
other_sources.append(src)
s_sources_name = None
if s_sources:
s_sources_name = cmake_target_name + '__asm_srcs'
SetVariableList(output, s_sources_name, s_sources)
c_sources_name = None
if c_sources:
c_sources_name = cmake_target_name + '__c_srcs'
SetVariableList(output, c_sources_name, c_sources)
cxx_sources_name = None
if cxx_sources:
cxx_sources_name = cmake_target_name + '__cxx_srcs'
SetVariableList(output, cxx_sources_name, cxx_sources)
linkable_sources_name = None
if linkable_sources:
linkable_sources_name = cmake_target_name + '__linkable_srcs'
SetVariableList(output, linkable_sources_name, linkable_sources)
other_sources_name = None
if other_sources:
other_sources_name = cmake_target_name + '__other_srcs'
SetVariableList(output, other_sources_name, other_sources)
# CMake gets upset when executable targets provide no sources.
# http://www.cmake.org/pipermail/cmake/2010-July/038461.html
dummy_sources_name = None
has_sources = (s_sources_name or
c_sources_name or
cxx_sources_name or
linkable_sources_name or
other_sources_name)
if target_type == 'executable' and not has_sources:
dummy_sources_name = cmake_target_name + '__dummy_srcs'
SetVariable(output, dummy_sources_name,
"${obj}.${TOOLSET}/${TARGET}/genc/dummy.c")
output.write('if(NOT EXISTS "')
WriteVariable(output, dummy_sources_name)
output.write('")\n')
output.write(' file(WRITE "')
WriteVariable(output, dummy_sources_name)
output.write('" "")\n')
output.write("endif()\n")
# CMake is opposed to setting linker directories and considers the practice
# of setting linker directories dangerous. Instead, it favors the use of
# find_library and passing absolute paths to target_link_libraries.
# However, CMake does provide the command link_directories, which adds
# link directories to targets defined after it is called.
# As a result, link_directories must come before the target definition.
# CMake unfortunately has no means of removing entries from LINK_DIRECTORIES.
library_dirs = config.get('library_dirs')
if library_dirs is not None:
output.write('link_directories(')
for library_dir in library_dirs:
output.write(' ')
output.write(NormjoinPath(path_from_cmakelists_to_gyp, library_dir))
output.write('\n')
output.write(')\n')
output.write(cmake_target_type.command)
output.write('(')
output.write(cmake_target_name)
if cmake_target_type.modifier is not None:
output.write(' ')
output.write(cmake_target_type.modifier)
if s_sources_name:
WriteVariable(output, s_sources_name, ' ')
if c_sources_name:
WriteVariable(output, c_sources_name, ' ')
if cxx_sources_name:
WriteVariable(output, cxx_sources_name, ' ')
if linkable_sources_name:
WriteVariable(output, linkable_sources_name, ' ')
if other_sources_name:
WriteVariable(output, other_sources_name, ' ')
if dummy_sources_name:
WriteVariable(output, dummy_sources_name, ' ')
output.write(')\n')
# Let CMake know if the 'all' target should depend on this target.
exclude_from_all = ('TRUE' if qualified_target not in all_qualified_targets
else 'FALSE')
SetTargetProperty(output, cmake_target_name,
'EXCLUDE_FROM_ALL', exclude_from_all)
for extra_target_name in extra_deps:
SetTargetProperty(output, extra_target_name,
'EXCLUDE_FROM_ALL', exclude_from_all)
# Output name and location.
if target_type != 'none':
# Link as 'C' if there are no other files
if not c_sources and not cxx_sources:
SetTargetProperty(output, cmake_target_name, 'LINKER_LANGUAGE', ['C'])
# Mark uncompiled sources as uncompiled.
if other_sources_name:
output.write('set_source_files_properties(')
WriteVariable(output, other_sources_name, '')
output.write(' PROPERTIES HEADER_FILE_ONLY "TRUE")\n')
# Mark object sources as linkable.
if linkable_sources_name:
output.write('set_source_files_properties(')
WriteVariable(output, other_sources_name, '')
output.write(' PROPERTIES EXTERNAL_OBJECT "TRUE")\n')
# Output directory
target_output_directory = spec.get('product_dir')
if target_output_directory is None:
if target_type in ('executable', 'loadable_module'):
target_output_directory = generator_default_variables['PRODUCT_DIR']
elif target_type == 'shared_library':
target_output_directory = '${builddir}/lib.${TOOLSET}'
elif spec.get('standalone_static_library', False):
target_output_directory = generator_default_variables['PRODUCT_DIR']
else:
base_path = gyp.common.RelativePath(os.path.dirname(gyp_file),
options.toplevel_dir)
target_output_directory = '${obj}.${TOOLSET}'
target_output_directory = (
os.path.join(target_output_directory, base_path))
cmake_target_output_directory = NormjoinPathForceCMakeSource(
path_from_cmakelists_to_gyp,
target_output_directory)
SetTargetProperty(output,
cmake_target_name,
cmake_target_type.property_modifier + '_OUTPUT_DIRECTORY',
cmake_target_output_directory)
# Output name
default_product_prefix = ''
default_product_name = target_name
default_product_ext = ''
if target_type == 'static_library':
static_library_prefix = generator_default_variables['STATIC_LIB_PREFIX']
default_product_name = RemovePrefix(default_product_name,
static_library_prefix)
default_product_prefix = static_library_prefix
default_product_ext = generator_default_variables['STATIC_LIB_SUFFIX']
elif target_type in ('loadable_module', 'shared_library'):
shared_library_prefix = generator_default_variables['SHARED_LIB_PREFIX']
default_product_name = RemovePrefix(default_product_name,
shared_library_prefix)
default_product_prefix = shared_library_prefix
default_product_ext = generator_default_variables['SHARED_LIB_SUFFIX']
elif target_type != 'executable':
print ('ERROR: What output file should be generated?',
'type', target_type, 'target', target_name)
product_prefix = spec.get('product_prefix', default_product_prefix)
product_name = spec.get('product_name', default_product_name)
product_ext = spec.get('product_extension')
if product_ext:
product_ext = '.' + product_ext
else:
product_ext = default_product_ext
SetTargetProperty(output, cmake_target_name, 'PREFIX', product_prefix)
SetTargetProperty(output, cmake_target_name,
cmake_target_type.property_modifier + '_OUTPUT_NAME',
product_name)
SetTargetProperty(output, cmake_target_name, 'SUFFIX', product_ext)
# Make the output of this target referenceable as a source.
cmake_target_output_basename = product_prefix + product_name + product_ext
cmake_target_output = os.path.join(cmake_target_output_directory,
cmake_target_output_basename)
SetFileProperty(output, cmake_target_output, 'GENERATED', ['TRUE'], '')
# Includes
includes = config.get('include_dirs')
if includes:
# This (target include directories) is what requires CMake 2.8.8
includes_name = cmake_target_name + '__include_dirs'
SetVariableList(output, includes_name,
[NormjoinPathForceCMakeSource(path_from_cmakelists_to_gyp, include)
for include in includes])
output.write('set_property(TARGET ')
output.write(cmake_target_name)
output.write(' APPEND PROPERTY INCLUDE_DIRECTORIES ')
WriteVariable(output, includes_name, '')
output.write(')\n')
# Defines
defines = config.get('defines')
if defines is not None:
SetTargetProperty(output,
cmake_target_name,
'COMPILE_DEFINITIONS',
defines,
';')
# Compile Flags - http://www.cmake.org/Bug/view.php?id=6493
# CMake currently does not have target C and CXX flags.
# So, instead of doing...
# cflags_c = config.get('cflags_c')
# if cflags_c is not None:
# SetTargetProperty(output, cmake_target_name,
# 'C_COMPILE_FLAGS', cflags_c, ' ')
# cflags_cc = config.get('cflags_cc')
# if cflags_cc is not None:
# SetTargetProperty(output, cmake_target_name,
# 'CXX_COMPILE_FLAGS', cflags_cc, ' ')
# Instead we must...
cflags = config.get('cflags', [])
cflags_c = config.get('cflags_c', [])
cflags_cxx = config.get('cflags_cc', [])
if (not cflags_c or not c_sources) and (not cflags_cxx or not cxx_sources):
SetTargetProperty(output, cmake_target_name, 'COMPILE_FLAGS', cflags, ' ')
elif c_sources and not (s_sources or cxx_sources):
flags = []
flags.extend(cflags)
flags.extend(cflags_c)
SetTargetProperty(output, cmake_target_name, 'COMPILE_FLAGS', flags, ' ')
elif cxx_sources and not (s_sources or c_sources):
flags = []
flags.extend(cflags)
flags.extend(cflags_cxx)
SetTargetProperty(output, cmake_target_name, 'COMPILE_FLAGS', flags, ' ')
else:
# TODO: This is broken, one cannot generally set properties on files,
# as other targets may require different properties on the same files.
if s_sources and cflags:
SetFilesProperty(output, s_sources_name, 'COMPILE_FLAGS', cflags, ' ')
if c_sources and (cflags or cflags_c):
flags = []
flags.extend(cflags)
flags.extend(cflags_c)
SetFilesProperty(output, c_sources_name, 'COMPILE_FLAGS', flags, ' ')
if cxx_sources and (cflags or cflags_cxx):
flags = []
flags.extend(cflags)
flags.extend(cflags_cxx)
SetFilesProperty(output, cxx_sources_name, 'COMPILE_FLAGS', flags, ' ')
# Linker flags
ldflags = config.get('ldflags')
if ldflags is not None:
SetTargetProperty(output, cmake_target_name, 'LINK_FLAGS', ldflags, ' ')
# Note on Dependencies and Libraries:
# CMake wants to handle link order, resolving the link line up front.
# Gyp does not retain or enforce specifying enough information to do so.
# So do as other gyp generators and use --start-group and --end-group.
# Give CMake as little information as possible so that it doesn't mess it up.
# Dependencies
rawDeps = spec.get('dependencies', [])
static_deps = []
shared_deps = []
other_deps = []
for rawDep in rawDeps:
dep_cmake_name = namer.CreateCMakeTargetName(rawDep)
dep_spec = target_dicts.get(rawDep, {})
dep_target_type = dep_spec.get('type', None)
if dep_target_type == 'static_library':
static_deps.append(dep_cmake_name)
elif dep_target_type == 'shared_library':
shared_deps.append(dep_cmake_name)
else:
other_deps.append(dep_cmake_name)
# ensure all external dependencies are complete before internal dependencies
# extra_deps currently only depend on their own deps, so otherwise run early
if static_deps or shared_deps or other_deps:
for extra_dep in extra_deps:
output.write('add_dependencies(')
output.write(extra_dep)
output.write('\n')
for deps in (static_deps, shared_deps, other_deps):
for dep in gyp.common.uniquer(deps):
output.write(' ')
output.write(dep)
output.write('\n')
output.write(')\n')
linkable = target_type in ('executable', 'loadable_module', 'shared_library')
other_deps.extend(extra_deps)
if other_deps or (not linkable and (static_deps or shared_deps)):
output.write('add_dependencies(')
output.write(cmake_target_name)
output.write('\n')
for dep in gyp.common.uniquer(other_deps):
output.write(' ')
output.write(dep)
output.write('\n')
if not linkable:
for deps in (static_deps, shared_deps):
for lib_dep in gyp.common.uniquer(deps):
output.write(' ')
output.write(lib_dep)
output.write('\n')
output.write(')\n')
# Libraries
if linkable:
external_libs = [lib for lib in spec.get('libraries', []) if len(lib) > 0]
if external_libs or static_deps or shared_deps:
output.write('target_link_libraries(')
output.write(cmake_target_name)
output.write('\n')
if static_deps:
write_group = circular_libs and len(static_deps) > 1
if write_group:
output.write('-Wl,--start-group\n')
for dep in gyp.common.uniquer(static_deps):
output.write(' ')
output.write(dep)
output.write('\n')
if write_group:
output.write('-Wl,--end-group\n')
if shared_deps:
for dep in gyp.common.uniquer(shared_deps):
output.write(' ')
output.write(dep)
output.write('\n')
if external_libs:
for lib in gyp.common.uniquer(external_libs):
output.write(' ')
output.write(lib)
output.write('\n')
output.write(')\n')
UnsetVariable(output, 'TOOLSET')
UnsetVariable(output, 'TARGET')
def GenerateOutputForConfig(target_list, target_dicts, data,
params, config_to_use):
options = params['options']
generator_flags = params['generator_flags']
# generator_dir: relative path from pwd to where make puts build files.
# Makes migrating from make to cmake easier, cmake doesn't put anything here.
# Each Gyp configuration creates a different CMakeLists.txt file
# to avoid incompatibilities between Gyp and CMake configurations.
generator_dir = os.path.relpath(options.generator_output or '.')
# output_dir: relative path from generator_dir to the build directory.
output_dir = generator_flags.get('output_dir', 'out')
# build_dir: relative path from source root to our output files.
# e.g. "out/Debug"
build_dir = os.path.normpath(os.path.join(generator_dir,
output_dir,
config_to_use))
toplevel_build = os.path.join(options.toplevel_dir, build_dir)
output_file = os.path.join(toplevel_build, 'CMakeLists.txt')
gyp.common.EnsureDirExists(output_file)
output = open(output_file, 'w')
output.write('cmake_minimum_required(VERSION 2.8.8 FATAL_ERROR)\n')
output.write('cmake_policy(VERSION 2.8.8)\n')
gyp_file, project_target, _ = gyp.common.ParseQualifiedTarget(target_list[-1])
output.write('project(')
output.write(project_target)
output.write(')\n')
SetVariable(output, 'configuration', config_to_use)
ar = None
cc = None
cxx = None
make_global_settings = data[gyp_file].get('make_global_settings', [])
build_to_top = gyp.common.InvertRelativePath(build_dir,
options.toplevel_dir)
for key, value in make_global_settings:
if key == 'AR':
ar = os.path.join(build_to_top, value)
if key == 'CC':
cc = os.path.join(build_to_top, value)
if key == 'CXX':
cxx = os.path.join(build_to_top, value)
ar = gyp.common.GetEnvironFallback(['AR_target', 'AR'], ar)
cc = gyp.common.GetEnvironFallback(['CC_target', 'CC'], cc)
cxx = gyp.common.GetEnvironFallback(['CXX_target', 'CXX'], cxx)
if ar:
SetVariable(output, 'CMAKE_AR', ar)
if cc:
SetVariable(output, 'CMAKE_C_COMPILER', cc)
if cxx:
SetVariable(output, 'CMAKE_CXX_COMPILER', cxx)
# The following appears to be as-yet undocumented.
# http://public.kitware.com/Bug/view.php?id=8392
output.write('enable_language(ASM)\n')
# ASM-ATT does not support .S files.
# output.write('enable_language(ASM-ATT)\n')
if cc:
SetVariable(output, 'CMAKE_ASM_COMPILER', cc)
SetVariable(output, 'builddir', '${CMAKE_CURRENT_BINARY_DIR}')
SetVariable(output, 'obj', '${builddir}/obj')
output.write('\n')
# TODO: Undocumented/unsupported (the CMake Java generator depends on it).
# CMake by default names the object resulting from foo.c to be foo.c.o.
# Gyp traditionally names the object resulting from foo.c foo.o.
# This should be irrelevant, but some targets extract .o files from .a
# and depend on the name of the extracted .o files.
output.write('set(CMAKE_C_OUTPUT_EXTENSION_REPLACE 1)\n')
output.write('set(CMAKE_CXX_OUTPUT_EXTENSION_REPLACE 1)\n')
output.write('\n')
# Force ninja to use rsp files. Otherwise link and ar lines can get too long,
# resulting in 'Argument list too long' errors.
output.write('set(CMAKE_NINJA_FORCE_RESPONSE_FILE 1)\n')
output.write('\n')
namer = CMakeNamer(target_list)
# The list of targets upon which the 'all' target should depend.
# CMake has it's own implicit 'all' target, one is not created explicitly.
all_qualified_targets = set()
for build_file in params['build_files']:
for qualified_target in gyp.common.AllTargets(target_list,
target_dicts,
os.path.normpath(build_file)):
all_qualified_targets.add(qualified_target)
for qualified_target in target_list:
WriteTarget(namer, qualified_target, target_dicts, build_dir, config_to_use,
options, generator_flags, all_qualified_targets, output)
output.close()
def PerformBuild(data, configurations, params):
options = params['options']
generator_flags = params['generator_flags']
# generator_dir: relative path from pwd to where make puts build files.
# Makes migrating from make to cmake easier, cmake doesn't put anything here.
generator_dir = os.path.relpath(options.generator_output or '.')
# output_dir: relative path from generator_dir to the build directory.
output_dir = generator_flags.get('output_dir', 'out')
for config_name in configurations:
# build_dir: relative path from source root to our output files.
# e.g. "out/Debug"
build_dir = os.path.normpath(os.path.join(generator_dir,
output_dir,
config_name))
arguments = ['cmake', '-G', 'Ninja']
print 'Generating [%s]: %s' % (config_name, arguments)
subprocess.check_call(arguments, cwd=build_dir)
arguments = ['ninja', '-C', build_dir]
print 'Building [%s]: %s' % (config_name, arguments)
subprocess.check_call(arguments)
def CallGenerateOutputForConfig(arglist):
# Ignore the interrupt signal so that the parent process catches it and
# kills all multiprocessing children.
signal.signal(signal.SIGINT, signal.SIG_IGN)
target_list, target_dicts, data, params, config_name = arglist
GenerateOutputForConfig(target_list, target_dicts, data, params, config_name)
def GenerateOutput(target_list, target_dicts, data, params):
user_config = params.get('generator_flags', {}).get('config', None)
if user_config:
GenerateOutputForConfig(target_list, target_dicts, data,
params, user_config)
else:
config_names = target_dicts[target_list[0]]['configurations'].keys()
if params['parallel']:
try:
pool = multiprocessing.Pool(len(config_names))
arglists = []
for config_name in config_names:
arglists.append((target_list, target_dicts, data,
params, config_name))
pool.map(CallGenerateOutputForConfig, arglists)
except KeyboardInterrupt, e:
pool.terminate()
raise e
else:
for config_name in config_names:
GenerateOutputForConfig(target_list, target_dicts, data,
params, config_name)
|
apache-2.0
|
Alwnikrotikz/pyglet
|
experimental/input/dinput.py
|
28
|
15854
|
#!/usr/bin/python
# $Id:$
import ctypes
import pyglet
from pyglet import com
from pyglet.window.win32 import _kernel32
lib = ctypes.oledll.dinput8
LPVOID = ctypes.c_void_p
WORD = ctypes.c_uint16
DWORD = ctypes.c_uint32
LPDWORD = ctypes.POINTER(DWORD)
BOOL = ctypes.c_int
WCHAR = ctypes.c_wchar
UINT = ctypes.c_uint
HWND = ctypes.c_uint32
MAX_PATH = 260
DIENUM_STOP = 0
DIENUM_CONTINUE = 1
DIEDFL_ALLDEVICES = 0x00000000
DIEDFL_ATTACHEDONLY = 0x00000001
DIEDFL_FORCEFEEDBACK = 0x00000100
DIEDFL_INCLUDEALIASES = 0x00010000
DIEDFL_INCLUDEPHANTOMS = 0x00020000
DIEDFL_INCLUDEHIDDEN = 0x00040000
DI8DEVCLASS_ALL = 0
DI8DEVCLASS_DEVICE = 1
DI8DEVCLASS_POINTER = 2
DI8DEVCLASS_KEYBOARD = 3
DI8DEVCLASS_GAMECTRL = 4
DI8DEVTYPE_DEVICE = 0x11
DI8DEVTYPE_MOUSE = 0x12
DI8DEVTYPE_KEYBOARD = 0x13
DI8DEVTYPE_JOYSTICK = 0x14
DI8DEVTYPE_GAMEPAD = 0x15
DI8DEVTYPE_DRIVING = 0x16
DI8DEVTYPE_FLIGHT = 0x17
DI8DEVTYPE_1STPERSON = 0x18
DI8DEVTYPE_DEVICECTRL = 0x19
DI8DEVTYPE_SCREENPOINTER = 0x1A
DI8DEVTYPE_REMOTE = 0x1B
DI8DEVTYPE_SUPPLEMENTAL = 0x1C
DI8DEVTYPEMOUSE_UNKNOWN = 1
DI8DEVTYPEMOUSE_TRADITIONAL = 2
DI8DEVTYPEMOUSE_FINGERSTICK = 3
DI8DEVTYPEMOUSE_TOUCHPAD = 4
DI8DEVTYPEMOUSE_TRACKBALL = 5
DI8DEVTYPEMOUSE_ABSOLUTE = 6
DI8DEVTYPEKEYBOARD_UNKNOWN = 0
DI8DEVTYPEKEYBOARD_PCXT = 1
DI8DEVTYPEKEYBOARD_OLIVETTI = 2
DI8DEVTYPEKEYBOARD_PCAT = 3
DI8DEVTYPEKEYBOARD_PCENH = 4
DI8DEVTYPEKEYBOARD_NOKIA1050 = 5
DI8DEVTYPEKEYBOARD_NOKIA9140 = 6
DI8DEVTYPEKEYBOARD_NEC98 = 7
DI8DEVTYPEKEYBOARD_NEC98LAPTOP = 8
DI8DEVTYPEKEYBOARD_NEC98106 = 9
DI8DEVTYPEKEYBOARD_JAPAN106 = 10
DI8DEVTYPEKEYBOARD_JAPANAX = 11
DI8DEVTYPEKEYBOARD_J3100 = 12
DI8DEVTYPE_LIMITEDGAMESUBTYPE = 1
DI8DEVTYPEJOYSTICK_LIMITED = DI8DEVTYPE_LIMITEDGAMESUBTYPE
DI8DEVTYPEJOYSTICK_STANDARD = 2
DI8DEVTYPEGAMEPAD_LIMITED = DI8DEVTYPE_LIMITEDGAMESUBTYPE
DI8DEVTYPEGAMEPAD_STANDARD = 2
DI8DEVTYPEGAMEPAD_TILT = 3
DI8DEVTYPEDRIVING_LIMITED = DI8DEVTYPE_LIMITEDGAMESUBTYPE
DI8DEVTYPEDRIVING_COMBINEDPEDALS = 2
DI8DEVTYPEDRIVING_DUALPEDALS = 3
DI8DEVTYPEDRIVING_THREEPEDALS = 4
DI8DEVTYPEDRIVING_HANDHELD = 5
DI8DEVTYPEFLIGHT_LIMITED = DI8DEVTYPE_LIMITEDGAMESUBTYPE
DI8DEVTYPEFLIGHT_STICK = 2
DI8DEVTYPEFLIGHT_YOKE = 3
DI8DEVTYPEFLIGHT_RC = 4
DI8DEVTYPE1STPERSON_LIMITED = DI8DEVTYPE_LIMITEDGAMESUBTYPE
DI8DEVTYPE1STPERSON_UNKNOWN = 2
DI8DEVTYPE1STPERSON_SIXDOF = 3
DI8DEVTYPE1STPERSON_SHOOTER = 4
DI8DEVTYPESCREENPTR_UNKNOWN = 2
DI8DEVTYPESCREENPTR_LIGHTGUN = 3
DI8DEVTYPESCREENPTR_LIGHTPEN = 4
DI8DEVTYPESCREENPTR_TOUCH = 5
DI8DEVTYPEREMOTE_UNKNOWN = 2
DI8DEVTYPEDEVICECTRL_UNKNOWN = 2
DI8DEVTYPEDEVICECTRL_COMMSSELECTION = 3
DI8DEVTYPEDEVICECTRL_COMMSSELECTION_HARDWIRED = 4
DI8DEVTYPESUPPLEMENTAL_UNKNOWN = 2
DI8DEVTYPESUPPLEMENTAL_2NDHANDCONTROLLER = 3
DI8DEVTYPESUPPLEMENTAL_HEADTRACKER = 4
DI8DEVTYPESUPPLEMENTAL_HANDTRACKER = 5
DI8DEVTYPESUPPLEMENTAL_SHIFTSTICKGATE = 6
DI8DEVTYPESUPPLEMENTAL_SHIFTER = 7
DI8DEVTYPESUPPLEMENTAL_THROTTLE = 8
DI8DEVTYPESUPPLEMENTAL_SPLITTHROTTLE = 9
DI8DEVTYPESUPPLEMENTAL_COMBINEDPEDALS = 10
DI8DEVTYPESUPPLEMENTAL_DUALPEDALS = 11
DI8DEVTYPESUPPLEMENTAL_THREEPEDALS = 12
DI8DEVTYPESUPPLEMENTAL_RUDDERPEDALS = 13
DIDC_ATTACHED = 0x00000001
DIDC_POLLEDDEVICE = 0x00000002
DIDC_EMULATED = 0x00000004
DIDC_POLLEDDATAFORMAT = 0x00000008
DIDC_FORCEFEEDBACK = 0x00000100
DIDC_FFATTACK = 0x00000200
DIDC_FFFADE = 0x00000400
DIDC_SATURATION = 0x00000800
DIDC_POSNEGCOEFFICIENTS = 0x00001000
DIDC_POSNEGSATURATION = 0x00002000
DIDC_DEADBAND = 0x00004000
DIDC_STARTDELAY = 0x00008000
DIDC_ALIAS = 0x00010000
DIDC_PHANTOM = 0x00020000
DIDC_HIDDEN = 0x00040000
DIDFT_ALL = 0x00000000
DIDFT_RELAXIS = 0x00000001
DIDFT_ABSAXIS = 0x00000002
DIDFT_AXIS = 0x00000003
DIDFT_PSHBUTTON = 0x00000004
DIDFT_TGLBUTTON = 0x00000008
DIDFT_BUTTON = 0x0000000C
DIDFT_POV = 0x00000010
DIDFT_COLLECTION = 0x00000040
DIDFT_NODATA = 0x00000080
DIDFT_ANYINSTANCE = 0x00FFFF00
DIDFT_INSTANCEMASK = DIDFT_ANYINSTANCE
DIDFT_FFACTUATOR = 0x01000000
DIDFT_FFEFFECTTRIGGER = 0x02000000
DIDFT_OUTPUT = 0x10000000
DIDFT_VENDORDEFINED = 0x04000000
DIDFT_ALIAS = 0x08000000
DIDFT_OPTIONAL = 0x80000000
DIDFT_NOCOLLECTION = 0x00FFFF00
DIA_FORCEFEEDBACK = 0x00000001
DIA_APPMAPPED = 0x00000002
DIA_APPNOMAP = 0x00000004
DIA_NORANGE = 0x00000008
DIA_APPFIXED = 0x00000010
DIAH_UNMAPPED = 0x00000000
DIAH_USERCONFIG = 0x00000001
DIAH_APPREQUESTED = 0x00000002
DIAH_HWAPP = 0x00000004
DIAH_HWDEFAULT = 0x00000008
DIAH_DEFAULT = 0x00000020
DIAH_ERROR = 0x80000000
DIAFTS_NEWDEVICELOW = 0xFFFFFFFF
DIAFTS_NEWDEVICEHIGH = 0xFFFFFFFF
DIAFTS_UNUSEDDEVICELOW = 0x00000000
DIAFTS_UNUSEDDEVICEHIGH = 0x00000000
DIDBAM_DEFAULT = 0x00000000
DIDBAM_PRESERVE = 0x00000001
DIDBAM_INITIALIZE = 0x00000002
DIDBAM_HWDEFAULTS = 0x00000004
DIDSAM_DEFAULT = 0x00000000
DIDSAM_NOUSER = 0x00000001
DIDSAM_FORCESAVE = 0x00000002
DICD_DEFAULT = 0x00000000
DICD_EDIT = 0x00000001
DIDOI_FFACTUATOR = 0x00000001
DIDOI_FFEFFECTTRIGGER = 0x00000002
DIDOI_POLLED = 0x00008000
DIDOI_ASPECTPOSITION = 0x00000100
DIDOI_ASPECTVELOCITY = 0x00000200
DIDOI_ASPECTACCEL = 0x00000300
DIDOI_ASPECTFORCE = 0x00000400
DIDOI_ASPECTMASK = 0x00000F00
DIDOI_GUIDISUSAGE = 0x00010000
DIPH_DEVICE = 0
DIPH_BYOFFSET = 1
DIPH_BYID = 2
DIPH_BYUSAGE = 3
DISCL_EXCLUSIVE = 0x00000001
DISCL_NONEXCLUSIVE = 0x00000002
DISCL_FOREGROUND = 0x00000004
DISCL_BACKGROUND = 0x00000008
DISCL_NOWINKEY = 0x00000010
DIPROP_BUFFERSIZE = 1
class DIDEVICEINSTANCE(ctypes.Structure):
_fields_ = (
('dwSize', DWORD),
('guidInstance', com.GUID),
('guidProduct', com.GUID),
('dwDevType', DWORD),
('tszInstanceName', WCHAR * MAX_PATH),
('tszProductName', WCHAR * MAX_PATH),
('guidFFDriver', com.GUID),
('wUsagePage', WORD),
('wUsage', WORD)
)
LPDIDEVICEINSTANCE = ctypes.POINTER(DIDEVICEINSTANCE)
LPDIENUMDEVICESCALLBACK = ctypes.WINFUNCTYPE(BOOL, LPDIDEVICEINSTANCE, LPVOID)
class DIDEVICEOBJECTINSTANCE(ctypes.Structure):
_fields_ = (
('dwSize', DWORD),
('guidType', com.GUID),
('dwOfs', DWORD),
('dwType', DWORD),
('dwFlags', DWORD),
('tszName', WCHAR * MAX_PATH),
('dwFFMaxForce', DWORD),
('dwFFForceResolution', DWORD),
('wCollectionNumber', WORD),
('wDesignatorIndex', WORD),
('wUsagePage', WORD),
('wUsage', WORD),
('dwDimension', DWORD),
('wExponent', WORD),
('wReportId', WORD)
)
LPDIDEVICEOBJECTINSTANCE = ctypes.POINTER(DIDEVICEOBJECTINSTANCE)
LPDIENUMDEVICEOBJECTSCALLBACK = \
ctypes.WINFUNCTYPE( BOOL, LPDIDEVICEOBJECTINSTANCE, LPVOID)
class DIOBJECTDATAFORMAT(ctypes.Structure):
_fields_ = (
('pguid', ctypes.POINTER(com.GUID)),
('dwOfs', DWORD),
('dwType', DWORD),
('dwFlags', DWORD)
)
__slots__ = [n for n, t in _fields_]
LPDIOBJECTDATAFORMAT = ctypes.POINTER(DIOBJECTDATAFORMAT)
class DIDATAFORMAT(ctypes.Structure):
_fields_ = (
('dwSize', DWORD),
('dwObjSize', DWORD),
('dwFlags', DWORD),
('dwDataSize', DWORD),
('dwNumObjs', DWORD),
('rgodf', LPDIOBJECTDATAFORMAT)
)
__slots__ = [n for n, t in _fields_]
LPDIDATAFORMAT = ctypes.POINTER(DIDATAFORMAT)
class DIDEVICEOBJECTDATA(ctypes.Structure):
_fields_ = (
('dwOfs', DWORD),
('dwData', DWORD),
('dwTimeStamp', DWORD),
('dwSequence', DWORD),
('uAppData', ctypes.POINTER(UINT))
)
LPDIDEVICEOBJECTDATA = ctypes.POINTER(DIDEVICEOBJECTDATA)
class DIPROPHEADER(ctypes.Structure):
_fields_ = (
('dwSize', DWORD),
('dwHeaderSize', DWORD),
('dwObj', DWORD),
('dwHow', DWORD)
)
LPDIPROPHEADER = ctypes.POINTER(DIPROPHEADER)
class DIPROPDWORD(ctypes.Structure):
_fields_ = (
('diph', DIPROPHEADER),
('dwData', DWORD)
)
# All method names in the interfaces are filled in, but unused (so far)
# methods have no parameters.. they'll crash when we try and use them, at
# which point we can go in and fill them in.
# IDirect* interfaces are all Unicode (e.g. IDirectInputDevice8W).
class IDirectInputDevice8(com.IUnknown):
_methods_ = [
('GetCapabilities',
com.STDMETHOD()),
('EnumObjects',
com.STDMETHOD(LPDIENUMDEVICEOBJECTSCALLBACK, LPVOID, DWORD)),
('GetProperty',
com.STDMETHOD()),
('SetProperty',
com.STDMETHOD(LPVOID, LPDIPROPHEADER)),
('Acquire',
com.STDMETHOD()),
('Unacquire',
com.STDMETHOD()),
('GetDeviceState',
com.STDMETHOD()),
('GetDeviceData',
com.STDMETHOD(DWORD, LPDIDEVICEOBJECTDATA, LPDWORD, DWORD)),
('SetDataFormat',
com.STDMETHOD(LPDIDATAFORMAT)),
('SetEventNotification',
com.STDMETHOD()),
('SetCooperativeLevel',
com.STDMETHOD(HWND, DWORD)),
('GetObjectInfo',
com.STDMETHOD()),
('GetDeviceInfo',
com.STDMETHOD()),
('RunControlPanel',
com.STDMETHOD()),
('Initialize',
com.STDMETHOD()),
('CreateEffect',
com.STDMETHOD()),
('EnumEffects',
com.STDMETHOD()),
('GetEffectInfo',
com.STDMETHOD()),
('GetForceFeedbackState',
com.STDMETHOD()),
('SendForceFeedbackCommand',
com.STDMETHOD()),
('EnumCreatedEffectObjects',
com.STDMETHOD()),
('Escape',
com.STDMETHOD()),
('Poll',
com.STDMETHOD()),
('SendDeviceData',
com.STDMETHOD()),
('EnumEffectsInFile',
com.STDMETHOD()),
('WriteEffectToFile',
com.STDMETHOD()),
('BuildActionMap',
com.STDMETHOD()),
('SetActionMap',
com.STDMETHOD()),
('GetImageInfo',
com.STDMETHOD()),
]
class IDirectInput8(com.IUnknown):
_methods_ = [
('CreateDevice',
com.STDMETHOD(ctypes.POINTER(com.GUID),
ctypes.POINTER(IDirectInputDevice8),
ctypes.c_void_p)),
('EnumDevices',
com.STDMETHOD(DWORD, LPDIENUMDEVICESCALLBACK, LPVOID, DWORD)),
('GetDeviceStatus',
com.STDMETHOD()),
('RunControlPanel',
com.STDMETHOD()),
('Initialize',
com.STDMETHOD()),
('FindDevice',
com.STDMETHOD()),
('EnumDevicesBySemantics',
com.STDMETHOD()),
('ConfigureDevices',
com.STDMETHOD()),
]
IID_IDirectInput8W = \
com.GUID(0xBF798031,0x483A,0x4DA2,0xAA,0x99,0x5D,0x64,0xED,0x36,0x97,0x00)
lib.DirectInput8Create.argtypes = \
(ctypes.c_void_p, DWORD, com.LPGUID, ctypes.c_void_p, ctypes.c_void_p)
class Element(object):
value = None
def __init__(self, object_instance):
self.name = object_instance.tszName
self._flags = object_instance.dwFlags
self._guid = object_instance.guidType
self._type = object_instance.dwType
def get_value(self):
return self.value
class Device(object):
def __init__(self, device, device_instance):
self.name = device_instance.tszInstanceName
#print self.name, hex(device_instance.dwDevType & 0xff), \
# hex(device_instance.dwDevType & 0xff00)
#print hex(device_instance.wUsagePage), hex(device_instance.wUsage)
self._device = device
self._init_elements()
self._set_format()
def _init_elements(self):
self.elements = []
self._device.EnumObjects(
LPDIENUMDEVICEOBJECTSCALLBACK(self._object_enum), None, DIDFT_ALL)
def _object_enum(self, object_instance, arg):
type = object_instance.contents.dwType
flags = object_instance.contents.dwFlags
if type & DIDFT_NODATA:
return DIENUM_CONTINUE
element = Element(object_instance.contents)
self.elements.append(element)
return DIENUM_CONTINUE
def _set_format(self):
if not self.elements:
return
object_formats = (DIOBJECTDATAFORMAT * len(self.elements))()
offset = 0
for object_format, element in zip(object_formats, self.elements):
object_format.dwOfs = offset
object_format.dwType = element._type
offset += 4
format = DIDATAFORMAT()
format.dwSize = ctypes.sizeof(format)
format.dwObjSize = ctypes.sizeof(DIOBJECTDATAFORMAT)
format.dwFlags = 0
format.dwDataSize = offset
format.dwNumObjs = len(object_formats)
format.rgodf = ctypes.cast(ctypes.pointer(object_formats),
LPDIOBJECTDATAFORMAT)
self._device.SetDataFormat(format)
prop = DIPROPDWORD()
prop.diph.dwSize = ctypes.sizeof(prop)
prop.diph.dwHeaderSize = ctypes.sizeof(prop.diph)
prop.diph.dwObj = 0
prop.diph.dwHow = DIPH_DEVICE
prop.dwData = 64 * ctypes.sizeof(DIDATAFORMAT)
self._device.SetProperty(DIPROP_BUFFERSIZE, ctypes.byref(prop.diph))
def open(self, window=None):
if not self.elements:
return
if window is None:
# Pick any open window, or the shadow window if no windows
# have been created yet.
window = pyglet.gl._shadow_window
for window in pyglet.app.windows:
break
self._device.SetCooperativeLevel(window._hwnd,
DISCL_BACKGROUND | DISCL_NONEXCLUSIVE)
self._device.Acquire()
# XXX HACK
pyglet.clock.schedule(self.dispatch_events)
def close(self):
if not self.elements:
return
self._device.Unacquire()
# XXX HACK?
def dispatch_events(self, dt): # dt HACK
if not self.elements:
return
events = (DIDEVICEOBJECTDATA * 64)()
n_events = DWORD(len(events))
self._device.GetDeviceData(ctypes.sizeof(DIDEVICEOBJECTDATA),
ctypes.cast(ctypes.pointer(events),
LPDIDEVICEOBJECTDATA),
ctypes.byref(n_events),
0)
for event in events[:n_events.value]:
index = event.dwOfs // 4
self.elements[index].value = event.dwData
def _device_enum(device_instance, arg):
device = IDirectInputDevice8()
dinput.CreateDevice(device_instance.contents.guidInstance,
ctypes.byref(device),
None)
_devices.append(Device(device, device_instance.contents))
return DIENUM_CONTINUE
def get_devices():
global _devices
_devices = []
dinput.EnumDevices(DI8DEVCLASS_ALL, LPDIENUMDEVICESCALLBACK(_device_enum),
None, DIEDFL_ATTACHEDONLY)
return _devices
def _init_directinput():
global dinput
dinput = IDirectInput8()
module = _kernel32.GetModuleHandleW(None)
DIRECTINPUT_VERSION = 0x0800
lib.DirectInput8Create(module, DIRECTINPUT_VERSION,
IID_IDirectInput8W, ctypes.byref(dinput), None)
_init_directinput()
'''
#for device in get_devices():
device = get_devices()[0]
device.open(w)
print device.name
pyglet.app.run()
'''
|
bsd-3-clause
|
peg4d/peg4d-java
|
libnez/libnez.py
|
2
|
2876
|
class ParsingObject:
def __init__(self, pos):
self.oid = 0
self.start_pos = pos
self.end_pos = pos
self.tag = '#empty'
self.value = None
self.parent = None
self.child = None
class ParsingLog:
def __init__(self):
self.next = None
self.index = 0
self.childNode = None
class ParsingContext:
def __init__(self, inputs):
self.inputs = inputs
self.pos = 0
self.left = None
self.logStackSize = 0
self.logStack = None
self.unusedLog = None
def newLog(self):
if(self.unusedLog == None):
l = ParsingLog()
l.next = None
l.childNode = None
return l
l = self.unusedLog
self.unusedLog = l.next
l.next = None
return l
def unuseLog(self, log):
log.childNode = None
log.next = self.unusedLog
self.unusedLog = log
def Parsing_markLogStack(self):
return self.logStackSize
def lazyLink(self, parent, index, child):
l = self.newLog()
l.childNode = child
child.parent = parent
l.index = index
l.next = self.logStack
self.logStack = l
self.logStackSize += 1
def lazyJoin(self, left):
l = self.newLog()
l.childNode = left
l.index = -9
l.next = self.logStack
self.logStack = l
self.logStackSize += 1
def commitLog(self, mark, newnode):
first = None
objectSize = 0
while(mark < self.logStackSize) :
cur = self.logStack
self.logStack = self.logStack.next
self.logStackSize--
if(cur.index == -9) : ## lazyCommit
self.commitLog(mark, cur.childNode)
self.unuseLog(cur)
break
if(cur.childNode.parent == newnode) :
cur.next = first
first = cur
objectSize += 1
else :
self.unuseLog(cur)
if(objectSize > 0) :
newnode.child = [None] * objectSize
newnode.child_size = objectSize
i = 0
while(i < objectSize) :
cur = first
first = first.next
if(cur.index == -1) :
cur.index = i
newnode.child[cur.index] = cur.childNode
self.unuseLog(cur)
i += 1
i = 0
while(i < objectSize) :
if(newnode.child[i] == None) :
newnode.child[i] = ParsingObject(0)
i += 1
def abortLog(self, mark):
while(mark < self.logStackSize) :
l = self.logStack
self.logStack = self.logStack.next
self.logStackSize--
self.unusedLog(l)
|
bsd-2-clause
|
BadWolfAlfa/MyaKernel-Note-4
|
ToolChain/share/gdb/python/gdb/types.py
|
126
|
5421
|
# Type utilities.
# Copyright (C) 2010-2014 Free Software Foundation, Inc.
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Utilities for working with gdb.Types."""
import gdb
def get_basic_type(type_):
"""Return the "basic" type of a type.
Arguments:
type_: The type to reduce to its basic type.
Returns:
type_ with const/volatile is stripped away,
and typedefs/references converted to the underlying type.
"""
while (type_.code == gdb.TYPE_CODE_REF or
type_.code == gdb.TYPE_CODE_TYPEDEF):
if type_.code == gdb.TYPE_CODE_REF:
type_ = type_.target()
else:
type_ = type_.strip_typedefs()
return type_.unqualified()
def has_field(type_, field):
"""Return True if a type has the specified field.
Arguments:
type_: The type to examine.
It must be one of gdb.TYPE_CODE_STRUCT, gdb.TYPE_CODE_UNION.
field: The name of the field to look up.
Returns:
True if the field is present either in type_ or any baseclass.
Raises:
TypeError: The type is not a struct or union.
"""
type_ = get_basic_type(type_)
if (type_.code != gdb.TYPE_CODE_STRUCT and
type_.code != gdb.TYPE_CODE_UNION):
raise TypeError("not a struct or union")
for f in type_.fields():
if f.is_base_class:
if has_field(f.type, field):
return True
else:
# NOTE: f.name could be None
if f.name == field:
return True
return False
def make_enum_dict(enum_type):
"""Return a dictionary from a program's enum type.
Arguments:
enum_type: The enum to compute the dictionary for.
Returns:
The dictionary of the enum.
Raises:
TypeError: The type is not an enum.
"""
if enum_type.code != gdb.TYPE_CODE_ENUM:
raise TypeError("not an enum type")
enum_dict = {}
for field in enum_type.fields():
# The enum's value is stored in "enumval".
enum_dict[field.name] = field.enumval
return enum_dict
def deep_items (type_):
"""Return an iterator that recursively traverses anonymous fields.
Arguments:
type_: The type to traverse. It should be one of
gdb.TYPE_CODE_STRUCT or gdb.TYPE_CODE_UNION.
Returns:
an iterator similar to gdb.Type.iteritems(), i.e., it returns
pairs of key, value, but for any anonymous struct or union
field that field is traversed recursively, depth-first.
"""
for k, v in type_.iteritems ():
if k:
yield k, v
else:
for i in deep_items (v.type):
yield i
class TypePrinter(object):
"""The base class for type printers.
Instances of this type can be used to substitute type names during
'ptype'.
A type printer must have at least 'name' and 'enabled' attributes,
and supply an 'instantiate' method.
The 'instantiate' method must either return None, or return an
object which has a 'recognize' method. This method must accept a
gdb.Type argument and either return None, meaning that the type
was not recognized, or a string naming the type.
"""
def __init__(self, name):
self.name = name
self.enabled = True
def instantiate(self):
return None
# Helper function for computing the list of type recognizers.
def _get_some_type_recognizers(result, plist):
for printer in plist:
if printer.enabled:
inst = printer.instantiate()
if inst is not None:
result.append(inst)
return None
def get_type_recognizers():
"Return a list of the enabled type recognizers for the current context."
result = []
# First try the objfiles.
for objfile in gdb.objfiles():
_get_some_type_recognizers(result, objfile.type_printers)
# Now try the program space.
_get_some_type_recognizers(result, gdb.current_progspace().type_printers)
# Finally, globals.
_get_some_type_recognizers(result, gdb.type_printers)
return result
def apply_type_recognizers(recognizers, type_obj):
"""Apply the given list of type recognizers to the type TYPE_OBJ.
If any recognizer in the list recognizes TYPE_OBJ, returns the name
given by the recognizer. Otherwise, this returns None."""
for r in recognizers:
result = r.recognize(type_obj)
if result is not None:
return result
return None
def register_type_printer(locus, printer):
"""Register a type printer.
PRINTER is the type printer instance.
LOCUS is either an objfile, a program space, or None, indicating
global registration."""
if locus is None:
locus = gdb
locus.type_printers.insert(0, printer)
|
gpl-2.0
|
jiangwei1221/django-virtualenv-demo
|
env/lib/python2.7/site-packages/django/contrib/gis/geos/tests/test_geos_mutation.py
|
109
|
5305
|
# Copyright (c) 2008-2009 Aryeh Leib Taurog, all rights reserved.
# Modified from original contribution by Aryeh Leib Taurog, which was
# released under the New BSD license.
from django.utils import unittest
from django.utils.unittest import skipUnless
from .. import HAS_GEOS
if HAS_GEOS:
from .. import *
from ..error import GEOSIndexError
def getItem(o,i): return o[i]
def delItem(o,i): del o[i]
def setItem(o,i,v): o[i] = v
if HAS_GEOS:
def api_get_distance(x): return x.distance(Point(-200,-200))
def api_get_buffer(x): return x.buffer(10)
def api_get_geom_typeid(x): return x.geom_typeid
def api_get_num_coords(x): return x.num_coords
def api_get_centroid(x): return x.centroid
def api_get_empty(x): return x.empty
def api_get_valid(x): return x.valid
def api_get_simple(x): return x.simple
def api_get_ring(x): return x.ring
def api_get_boundary(x): return x.boundary
def api_get_convex_hull(x): return x.convex_hull
def api_get_extent(x): return x.extent
def api_get_area(x): return x.area
def api_get_length(x): return x.length
geos_function_tests = [ val for name, val in vars().items()
if hasattr(val, '__call__')
and name.startswith('api_get_') ]
@skipUnless(HAS_GEOS, "Geos is required.")
class GEOSMutationTest(unittest.TestCase):
"""
Tests Pythonic Mutability of Python GEOS geometry wrappers
get/set/delitem on a slice, normal list methods
"""
def test00_GEOSIndexException(self):
'Testing Geometry GEOSIndexError'
p = Point(1,2)
for i in range(-2,2): p._checkindex(i)
self.assertRaises(GEOSIndexError, p._checkindex, 2)
self.assertRaises(GEOSIndexError, p._checkindex, -3)
def test01_PointMutations(self):
'Testing Point mutations'
for p in (Point(1,2,3), fromstr('POINT (1 2 3)')):
self.assertEqual(p._get_single_external(1), 2.0, 'Point _get_single_external')
# _set_single
p._set_single(0,100)
self.assertEqual(p.coords, (100.0,2.0,3.0), 'Point _set_single')
# _set_list
p._set_list(2,(50,3141))
self.assertEqual(p.coords, (50.0,3141.0), 'Point _set_list')
def test02_PointExceptions(self):
'Testing Point exceptions'
self.assertRaises(TypeError, Point, range(1))
self.assertRaises(TypeError, Point, range(4))
def test03_PointApi(self):
'Testing Point API'
q = Point(4,5,3)
for p in (Point(1,2,3), fromstr('POINT (1 2 3)')):
p[0:2] = [4,5]
for f in geos_function_tests:
self.assertEqual(f(q), f(p), 'Point ' + f.__name__)
def test04_LineStringMutations(self):
'Testing LineString mutations'
for ls in (LineString((1,0),(4,1),(6,-1)),
fromstr('LINESTRING (1 0,4 1,6 -1)')):
self.assertEqual(ls._get_single_external(1), (4.0,1.0), 'LineString _get_single_external')
# _set_single
ls._set_single(0,(-50,25))
self.assertEqual(ls.coords, ((-50.0,25.0),(4.0,1.0),(6.0,-1.0)), 'LineString _set_single')
# _set_list
ls._set_list(2, ((-50.0,25.0),(6.0,-1.0)))
self.assertEqual(ls.coords, ((-50.0,25.0),(6.0,-1.0)), 'LineString _set_list')
lsa = LineString(ls.coords)
for f in geos_function_tests:
self.assertEqual(f(lsa), f(ls), 'LineString ' + f.__name__)
def test05_Polygon(self):
'Testing Polygon mutations'
for pg in (Polygon(((1,0),(4,1),(6,-1),(8,10),(1,0)),
((5,4),(6,4),(6,3),(5,4))),
fromstr('POLYGON ((1 0,4 1,6 -1,8 10,1 0),(5 4,6 4,6 3,5 4))')):
self.assertEqual(pg._get_single_external(0),
LinearRing((1,0),(4,1),(6,-1),(8,10),(1,0)),
'Polygon _get_single_external(0)')
self.assertEqual(pg._get_single_external(1),
LinearRing((5,4),(6,4),(6,3),(5,4)),
'Polygon _get_single_external(1)')
# _set_list
pg._set_list(2, (((1,2),(10,0),(12,9),(-1,15),(1,2)),
((4,2),(5,2),(5,3),(4,2))))
self.assertEqual(pg.coords,
(((1.0,2.0),(10.0,0.0),(12.0,9.0),(-1.0,15.0),(1.0,2.0)),
((4.0,2.0),(5.0,2.0),(5.0,3.0),(4.0,2.0))),
'Polygon _set_list')
lsa = Polygon(*pg.coords)
for f in geos_function_tests:
self.assertEqual(f(lsa), f(pg), 'Polygon ' + f.__name__)
def test06_Collection(self):
'Testing Collection mutations'
for mp in (MultiPoint(*map(Point,((3,4),(-1,2),(5,-4),(2,8)))),
fromstr('MULTIPOINT (3 4,-1 2,5 -4,2 8)')):
self.assertEqual(mp._get_single_external(2), Point(5,-4), 'Collection _get_single_external')
mp._set_list(3, map(Point,((5,5),(3,-2),(8,1))))
self.assertEqual(mp.coords, ((5.0,5.0),(3.0,-2.0),(8.0,1.0)), 'Collection _set_list')
lsa = MultiPoint(*map(Point,((5,5),(3,-2),(8,1))))
for f in geos_function_tests:
self.assertEqual(f(lsa), f(mp), 'MultiPoint ' + f.__name__)
|
unlicense
|
gminds/rapidnewsng
|
django/contrib/gis/db/models/fields.py
|
101
|
11200
|
from django.db.models.fields import Field
from django.db.models.sql.expressions import SQLEvaluator
from django.utils.translation import ugettext_lazy as _
from django.contrib.gis import forms
from django.contrib.gis.db.models.proxy import GeometryProxy
from django.contrib.gis.geometry.backend import Geometry, GeometryException
from django.utils import six
# Local cache of the spatial_ref_sys table, which holds SRID data for each
# spatial database alias. This cache exists so that the database isn't queried
# for SRID info each time a distance query is constructed.
_srid_cache = {}
def get_srid_info(srid, connection):
"""
Returns the units, unit name, and spheroid WKT associated with the
given SRID from the `spatial_ref_sys` (or equivalent) spatial database
table for the given database connection. These results are cached.
"""
global _srid_cache
try:
# The SpatialRefSys model for the spatial backend.
SpatialRefSys = connection.ops.spatial_ref_sys()
except NotImplementedError:
# No `spatial_ref_sys` table in spatial backend (e.g., MySQL).
return None, None, None
if not connection.alias in _srid_cache:
# Initialize SRID dictionary for database if it doesn't exist.
_srid_cache[connection.alias] = {}
if not srid in _srid_cache[connection.alias]:
# Use `SpatialRefSys` model to query for spatial reference info.
sr = SpatialRefSys.objects.using(connection.alias).get(srid=srid)
units, units_name = sr.units
spheroid = SpatialRefSys.get_spheroid(sr.wkt)
_srid_cache[connection.alias][srid] = (units, units_name, spheroid)
return _srid_cache[connection.alias][srid]
class GeometryField(Field):
"The base GIS field -- maps to the OpenGIS Specification Geometry type."
# The OpenGIS Geometry name.
geom_type = 'GEOMETRY'
# Geodetic units.
geodetic_units = ('Decimal Degree', 'degree')
description = _("The base GIS field -- maps to the OpenGIS Specification Geometry type.")
def __init__(self, verbose_name=None, srid=4326, spatial_index=True, dim=2,
geography=False, **kwargs):
"""
The initialization function for geometry fields. Takes the following
as keyword arguments:
srid:
The spatial reference system identifier, an OGC standard.
Defaults to 4326 (WGS84).
spatial_index:
Indicates whether to create a spatial index. Defaults to True.
Set this instead of 'db_index' for geographic fields since index
creation is different for geometry columns.
dim:
The number of dimensions for this geometry. Defaults to 2.
extent:
Customize the extent, in a 4-tuple of WGS 84 coordinates, for the
geometry field entry in the `USER_SDO_GEOM_METADATA` table. Defaults
to (-180.0, -90.0, 180.0, 90.0).
tolerance:
Define the tolerance, in meters, to use for the geometry field
entry in the `USER_SDO_GEOM_METADATA` table. Defaults to 0.05.
"""
# Setting the index flag with the value of the `spatial_index` keyword.
self.spatial_index = spatial_index
# Setting the SRID and getting the units. Unit information must be
# easily available in the field instance for distance queries.
self.srid = srid
# Setting the dimension of the geometry field.
self.dim = dim
# Setting the verbose_name keyword argument with the positional
# first parameter, so this works like normal fields.
kwargs['verbose_name'] = verbose_name
# Is this a geography rather than a geometry column?
self.geography = geography
# Oracle-specific private attributes for creating the entry in
# `USER_SDO_GEOM_METADATA`
self._extent = kwargs.pop('extent', (-180.0, -90.0, 180.0, 90.0))
self._tolerance = kwargs.pop('tolerance', 0.05)
super(GeometryField, self).__init__(**kwargs)
# The following functions are used to get the units, their name, and
# the spheroid corresponding to the SRID of the GeometryField.
def _get_srid_info(self, connection):
# Get attributes from `get_srid_info`.
self._units, self._units_name, self._spheroid = get_srid_info(self.srid, connection)
def spheroid(self, connection):
if not hasattr(self, '_spheroid'):
self._get_srid_info(connection)
return self._spheroid
def units(self, connection):
if not hasattr(self, '_units'):
self._get_srid_info(connection)
return self._units
def units_name(self, connection):
if not hasattr(self, '_units_name'):
self._get_srid_info(connection)
return self._units_name
### Routines specific to GeometryField ###
def geodetic(self, connection):
"""
Returns true if this field's SRID corresponds with a coordinate
system that uses non-projected units (e.g., latitude/longitude).
"""
return self.units_name(connection) in self.geodetic_units
def get_distance(self, value, lookup_type, connection):
"""
Returns a distance number in units of the field. For example, if
`D(km=1)` was passed in and the units of the field were in meters,
then 1000 would be returned.
"""
return connection.ops.get_distance(self, value, lookup_type)
def get_prep_value(self, value):
"""
Spatial lookup values are either a parameter that is (or may be
converted to) a geometry, or a sequence of lookup values that
begins with a geometry. This routine will setup the geometry
value properly, and preserve any other lookup parameters before
returning to the caller.
"""
if isinstance(value, SQLEvaluator):
return value
elif isinstance(value, (tuple, list)):
geom = value[0]
seq_value = True
else:
geom = value
seq_value = False
# When the input is not a GEOS geometry, attempt to construct one
# from the given string input.
if isinstance(geom, Geometry):
pass
elif isinstance(geom, (bytes, six.string_types)) or hasattr(geom, '__geo_interface__'):
try:
geom = Geometry(geom)
except GeometryException:
raise ValueError('Could not create geometry from lookup value.')
else:
raise ValueError('Cannot use object with type %s for a geometry lookup parameter.' % type(geom).__name__)
# Assigning the SRID value.
geom.srid = self.get_srid(geom)
if seq_value:
lookup_val = [geom]
lookup_val.extend(value[1:])
return tuple(lookup_val)
else:
return geom
def get_srid(self, geom):
"""
Returns the default SRID for the given geometry, taking into account
the SRID set for the field. For example, if the input geometry
has no SRID, then that of the field will be returned.
"""
gsrid = geom.srid # SRID of given geometry.
if gsrid is None or self.srid == -1 or (gsrid == -1 and self.srid != -1):
return self.srid
else:
return gsrid
### Routines overloaded from Field ###
def contribute_to_class(self, cls, name):
super(GeometryField, self).contribute_to_class(cls, name)
# Setup for lazy-instantiated Geometry object.
setattr(cls, self.attname, GeometryProxy(Geometry, self))
def db_type(self, connection):
return connection.ops.geo_db_type(self)
def formfield(self, **kwargs):
defaults = {'form_class' : forms.GeometryField,
'null' : self.null,
'geom_type' : self.geom_type,
'srid' : self.srid,
}
defaults.update(kwargs)
return super(GeometryField, self).formfield(**defaults)
def get_db_prep_lookup(self, lookup_type, value, connection, prepared=False):
"""
Prepare for the database lookup, and return any spatial parameters
necessary for the query. This includes wrapping any geometry
parameters with a backend-specific adapter and formatting any distance
parameters into the correct units for the coordinate system of the
field.
"""
if lookup_type in connection.ops.gis_terms:
# special case for isnull lookup
if lookup_type == 'isnull':
return []
# Populating the parameters list, and wrapping the Geometry
# with the Adapter of the spatial backend.
if isinstance(value, (tuple, list)):
params = [connection.ops.Adapter(value[0])]
if lookup_type in connection.ops.distance_functions:
# Getting the distance parameter in the units of the field.
params += self.get_distance(value[1:], lookup_type, connection)
elif lookup_type in connection.ops.truncate_params:
# Lookup is one where SQL parameters aren't needed from the
# given lookup value.
pass
else:
params += value[1:]
elif isinstance(value, SQLEvaluator):
params = []
else:
params = [connection.ops.Adapter(value)]
return params
else:
raise ValueError('%s is not a valid spatial lookup for %s.' %
(lookup_type, self.__class__.__name__))
def get_prep_lookup(self, lookup_type, value):
if lookup_type == 'isnull':
return bool(value)
else:
return self.get_prep_value(value)
def get_db_prep_save(self, value, connection):
"Prepares the value for saving in the database."
if value is None:
return None
else:
return connection.ops.Adapter(self.get_prep_value(value))
def get_placeholder(self, value, connection):
"""
Returns the placeholder for the geometry column for the
given value.
"""
return connection.ops.get_geom_placeholder(self, value)
# The OpenGIS Geometry Type Fields
class PointField(GeometryField):
geom_type = 'POINT'
description = _("Point")
class LineStringField(GeometryField):
geom_type = 'LINESTRING'
description = _("Line string")
class PolygonField(GeometryField):
geom_type = 'POLYGON'
description = _("Polygon")
class MultiPointField(GeometryField):
geom_type = 'MULTIPOINT'
description = _("Multi-point")
class MultiLineStringField(GeometryField):
geom_type = 'MULTILINESTRING'
description = _("Multi-line string")
class MultiPolygonField(GeometryField):
geom_type = 'MULTIPOLYGON'
description = _("Multi polygon")
class GeometryCollectionField(GeometryField):
geom_type = 'GEOMETRYCOLLECTION'
description = _("Geometry collection")
|
bsd-3-clause
|
CapOM/ChromiumGStreamerBackend
|
tools/telemetry/third_party/gsutilz/third_party/boto/boto/mturk/connection.py
|
112
|
42336
|
# Copyright (c) 2006,2007 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
import xml.sax
import datetime
import itertools
from boto import handler
from boto import config
from boto.mturk.price import Price
import boto.mturk.notification
from boto.connection import AWSQueryConnection
from boto.exception import EC2ResponseError
from boto.resultset import ResultSet
from boto.mturk.question import QuestionForm, ExternalQuestion, HTMLQuestion
class MTurkRequestError(EC2ResponseError):
"Error for MTurk Requests"
# todo: subclass from an abstract parent of EC2ResponseError
class MTurkConnection(AWSQueryConnection):
APIVersion = '2012-03-25'
def __init__(self, aws_access_key_id=None, aws_secret_access_key=None,
is_secure=True, port=None, proxy=None, proxy_port=None,
proxy_user=None, proxy_pass=None,
host=None, debug=0,
https_connection_factory=None, security_token=None,
profile_name=None):
if not host:
if config.has_option('MTurk', 'sandbox') and config.get('MTurk', 'sandbox') == 'True':
host = 'mechanicalturk.sandbox.amazonaws.com'
else:
host = 'mechanicalturk.amazonaws.com'
self.debug = debug
super(MTurkConnection, self).__init__(aws_access_key_id,
aws_secret_access_key,
is_secure, port, proxy, proxy_port,
proxy_user, proxy_pass, host, debug,
https_connection_factory,
security_token=security_token,
profile_name=profile_name)
def _required_auth_capability(self):
return ['mturk']
def get_account_balance(self):
"""
"""
params = {}
return self._process_request('GetAccountBalance', params,
[('AvailableBalance', Price),
('OnHoldBalance', Price)])
def register_hit_type(self, title, description, reward, duration,
keywords=None, approval_delay=None, qual_req=None):
"""
Register a new HIT Type
title, description are strings
reward is a Price object
duration can be a timedelta, or an object castable to an int
"""
params = dict(
Title=title,
Description=description,
AssignmentDurationInSeconds=self.duration_as_seconds(duration),
)
params.update(MTurkConnection.get_price_as_price(reward).get_as_params('Reward'))
if keywords:
params['Keywords'] = self.get_keywords_as_string(keywords)
if approval_delay is not None:
d = self.duration_as_seconds(approval_delay)
params['AutoApprovalDelayInSeconds'] = d
if qual_req is not None:
params.update(qual_req.get_as_params())
return self._process_request('RegisterHITType', params,
[('HITTypeId', HITTypeId)])
def set_email_notification(self, hit_type, email, event_types=None):
"""
Performs a SetHITTypeNotification operation to set email
notification for a specified HIT type
"""
return self._set_notification(hit_type, 'Email', email,
'SetHITTypeNotification', event_types)
def set_rest_notification(self, hit_type, url, event_types=None):
"""
Performs a SetHITTypeNotification operation to set REST notification
for a specified HIT type
"""
return self._set_notification(hit_type, 'REST', url,
'SetHITTypeNotification', event_types)
def set_sqs_notification(self, hit_type, queue_url, event_types=None):
"""
Performs a SetHITTypeNotification operation so set SQS notification
for a specified HIT type. Queue URL is of form:
https://queue.amazonaws.com/<CUSTOMER_ID>/<QUEUE_NAME> and can be
found when looking at the details for a Queue in the AWS Console
"""
return self._set_notification(hit_type, "SQS", queue_url,
'SetHITTypeNotification', event_types)
def send_test_event_notification(self, hit_type, url,
event_types=None,
test_event_type='Ping'):
"""
Performs a SendTestEventNotification operation with REST notification
for a specified HIT type
"""
return self._set_notification(hit_type, 'REST', url,
'SendTestEventNotification',
event_types, test_event_type)
def _set_notification(self, hit_type, transport,
destination, request_type,
event_types=None, test_event_type=None):
"""
Common operation to set notification or send a test event
notification for a specified HIT type
"""
params = {'HITTypeId': hit_type}
# from the Developer Guide:
# The 'Active' parameter is optional. If omitted, the active status of
# the HIT type's notification specification is unchanged. All HIT types
# begin with their notification specifications in the "inactive" status.
notification_params = {'Destination': destination,
'Transport': transport,
'Version': boto.mturk.notification.NotificationMessage.NOTIFICATION_VERSION,
'Active': True,
}
# add specific event types if required
if event_types:
self.build_list_params(notification_params, event_types,
'EventType')
# Set up dict of 'Notification.1.Transport' etc. values
notification_rest_params = {}
num = 1
for key in notification_params:
notification_rest_params['Notification.%d.%s' % (num, key)] = notification_params[key]
# Update main params dict
params.update(notification_rest_params)
# If test notification, specify the notification type to be tested
if test_event_type:
params.update({'TestEventType': test_event_type})
# Execute operation
return self._process_request(request_type, params)
def create_hit(self, hit_type=None, question=None, hit_layout=None,
lifetime=datetime.timedelta(days=7),
max_assignments=1,
title=None, description=None, keywords=None,
reward=None, duration=datetime.timedelta(days=7),
approval_delay=None, annotation=None,
questions=None, qualifications=None,
layout_params=None, response_groups=None):
"""
Creates a new HIT.
Returns a ResultSet
See: http://docs.amazonwebservices.com/AWSMechTurk/2012-03-25/AWSMturkAPI/ApiReference_CreateHITOperation.html
"""
# Handle basic required arguments and set up params dict
params = {'LifetimeInSeconds':
self.duration_as_seconds(lifetime),
'MaxAssignments': max_assignments,
}
# handle single or multiple questions or layouts
neither = question is None and questions is None
if hit_layout is None:
both = question is not None and questions is not None
if neither or both:
raise ValueError("Must specify question (single Question instance) or questions (list or QuestionForm instance), but not both")
if question:
questions = [question]
question_param = QuestionForm(questions)
if isinstance(question, QuestionForm):
question_param = question
elif isinstance(question, ExternalQuestion):
question_param = question
elif isinstance(question, HTMLQuestion):
question_param = question
params['Question'] = question_param.get_as_xml()
else:
if not neither:
raise ValueError("Must not specify question (single Question instance) or questions (list or QuestionForm instance) when specifying hit_layout")
params['HITLayoutId'] = hit_layout
if layout_params:
params.update(layout_params.get_as_params())
# if hit type specified then add it
# else add the additional required parameters
if hit_type:
params['HITTypeId'] = hit_type
else:
# Handle keywords
final_keywords = MTurkConnection.get_keywords_as_string(keywords)
# Handle price argument
final_price = MTurkConnection.get_price_as_price(reward)
final_duration = self.duration_as_seconds(duration)
additional_params = dict(
Title=title,
Description=description,
Keywords=final_keywords,
AssignmentDurationInSeconds=final_duration,
)
additional_params.update(final_price.get_as_params('Reward'))
if approval_delay is not None:
d = self.duration_as_seconds(approval_delay)
additional_params['AutoApprovalDelayInSeconds'] = d
# add these params to the others
params.update(additional_params)
# add the annotation if specified
if annotation is not None:
params['RequesterAnnotation'] = annotation
# Add the Qualifications if specified
if qualifications is not None:
params.update(qualifications.get_as_params())
# Handle optional response groups argument
if response_groups:
self.build_list_params(params, response_groups, 'ResponseGroup')
# Submit
return self._process_request('CreateHIT', params, [('HIT', HIT)])
def change_hit_type_of_hit(self, hit_id, hit_type):
"""
Change the HIT type of an existing HIT. Note that the reward associated
with the new HIT type must match the reward of the current HIT type in
order for the operation to be valid.
:type hit_id: str
:type hit_type: str
"""
params = {'HITId': hit_id,
'HITTypeId': hit_type}
return self._process_request('ChangeHITTypeOfHIT', params)
def get_reviewable_hits(self, hit_type=None, status='Reviewable',
sort_by='Expiration', sort_direction='Ascending',
page_size=10, page_number=1):
"""
Retrieve the HITs that have a status of Reviewable, or HITs that
have a status of Reviewing, and that belong to the Requester
calling the operation.
"""
params = {'Status': status,
'SortProperty': sort_by,
'SortDirection': sort_direction,
'PageSize': page_size,
'PageNumber': page_number}
# Handle optional hit_type argument
if hit_type is not None:
params.update({'HITTypeId': hit_type})
return self._process_request('GetReviewableHITs', params,
[('HIT', HIT)])
@staticmethod
def _get_pages(page_size, total_records):
"""
Given a page size (records per page) and a total number of
records, return the page numbers to be retrieved.
"""
pages = total_records / page_size + bool(total_records % page_size)
return list(range(1, pages + 1))
def get_all_hits(self):
"""
Return all of a Requester's HITs
Despite what search_hits says, it does not return all hits, but
instead returns a page of hits. This method will pull the hits
from the server 100 at a time, but will yield the results
iteratively, so subsequent requests are made on demand.
"""
page_size = 100
search_rs = self.search_hits(page_size=page_size)
total_records = int(search_rs.TotalNumResults)
get_page_hits = lambda page: self.search_hits(page_size=page_size, page_number=page)
page_nums = self._get_pages(page_size, total_records)
hit_sets = itertools.imap(get_page_hits, page_nums)
return itertools.chain.from_iterable(hit_sets)
def search_hits(self, sort_by='CreationTime', sort_direction='Ascending',
page_size=10, page_number=1, response_groups=None):
"""
Return a page of a Requester's HITs, on behalf of the Requester.
The operation returns HITs of any status, except for HITs that
have been disposed with the DisposeHIT operation.
Note:
The SearchHITs operation does not accept any search parameters
that filter the results.
"""
params = {'SortProperty': sort_by,
'SortDirection': sort_direction,
'PageSize': page_size,
'PageNumber': page_number}
# Handle optional response groups argument
if response_groups:
self.build_list_params(params, response_groups, 'ResponseGroup')
return self._process_request('SearchHITs', params, [('HIT', HIT)])
def get_assignment(self, assignment_id, response_groups=None):
"""
Retrieves an assignment using the assignment's ID. Requesters can only
retrieve their own assignments, and only assignments whose related HIT
has not been disposed.
The returned ResultSet will have the following attributes:
Request
This element is present only if the Request ResponseGroup
is specified.
Assignment
The assignment. The response includes one Assignment object.
HIT
The HIT associated with this assignment. The response
includes one HIT object.
"""
params = {'AssignmentId': assignment_id}
# Handle optional response groups argument
if response_groups:
self.build_list_params(params, response_groups, 'ResponseGroup')
return self._process_request('GetAssignment', params,
[('Assignment', Assignment),
('HIT', HIT)])
def get_assignments(self, hit_id, status=None,
sort_by='SubmitTime', sort_direction='Ascending',
page_size=10, page_number=1, response_groups=None):
"""
Retrieves completed assignments for a HIT.
Use this operation to retrieve the results for a HIT.
The returned ResultSet will have the following attributes:
NumResults
The number of assignments on the page in the filtered results
list, equivalent to the number of assignments being returned
by this call.
A non-negative integer, as a string.
PageNumber
The number of the page in the filtered results list being
returned.
A positive integer, as a string.
TotalNumResults
The total number of HITs in the filtered results list based
on this call.
A non-negative integer, as a string.
The ResultSet will contain zero or more Assignment objects
"""
params = {'HITId': hit_id,
'SortProperty': sort_by,
'SortDirection': sort_direction,
'PageSize': page_size,
'PageNumber': page_number}
if status is not None:
params['AssignmentStatus'] = status
# Handle optional response groups argument
if response_groups:
self.build_list_params(params, response_groups, 'ResponseGroup')
return self._process_request('GetAssignmentsForHIT', params,
[('Assignment', Assignment)])
def approve_assignment(self, assignment_id, feedback=None):
"""
"""
params = {'AssignmentId': assignment_id}
if feedback:
params['RequesterFeedback'] = feedback
return self._process_request('ApproveAssignment', params)
def reject_assignment(self, assignment_id, feedback=None):
"""
"""
params = {'AssignmentId': assignment_id}
if feedback:
params['RequesterFeedback'] = feedback
return self._process_request('RejectAssignment', params)
def approve_rejected_assignment(self, assignment_id, feedback=None):
"""
"""
params = {'AssignmentId': assignment_id}
if feedback:
params['RequesterFeedback'] = feedback
return self._process_request('ApproveRejectedAssignment', params)
def get_file_upload_url(self, assignment_id, question_identifier):
"""
Generates and returns a temporary URL to an uploaded file. The
temporary URL is used to retrieve the file as an answer to a
FileUploadAnswer question, it is valid for 60 seconds.
Will have a FileUploadURL attribute as per the API Reference.
"""
params = {'AssignmentId': assignment_id,
'QuestionIdentifier': question_identifier}
return self._process_request('GetFileUploadURL', params,
[('FileUploadURL', FileUploadURL)])
def get_hit(self, hit_id, response_groups=None):
"""
"""
params = {'HITId': hit_id}
# Handle optional response groups argument
if response_groups:
self.build_list_params(params, response_groups, 'ResponseGroup')
return self._process_request('GetHIT', params, [('HIT', HIT)])
def set_reviewing(self, hit_id, revert=None):
"""
Update a HIT with a status of Reviewable to have a status of Reviewing,
or reverts a Reviewing HIT back to the Reviewable status.
Only HITs with a status of Reviewable can be updated with a status of
Reviewing. Similarly, only Reviewing HITs can be reverted back to a
status of Reviewable.
"""
params = {'HITId': hit_id}
if revert:
params['Revert'] = revert
return self._process_request('SetHITAsReviewing', params)
def disable_hit(self, hit_id, response_groups=None):
"""
Remove a HIT from the Mechanical Turk marketplace, approves all
submitted assignments that have not already been approved or rejected,
and disposes of the HIT and all assignment data.
Assignments for the HIT that have already been submitted, but not yet
approved or rejected, will be automatically approved. Assignments in
progress at the time of the call to DisableHIT will be approved once
the assignments are submitted. You will be charged for approval of
these assignments. DisableHIT completely disposes of the HIT and
all submitted assignment data. Assignment results data cannot be
retrieved for a HIT that has been disposed.
It is not possible to re-enable a HIT once it has been disabled.
To make the work from a disabled HIT available again, create a new HIT.
"""
params = {'HITId': hit_id}
# Handle optional response groups argument
if response_groups:
self.build_list_params(params, response_groups, 'ResponseGroup')
return self._process_request('DisableHIT', params)
def dispose_hit(self, hit_id):
"""
Dispose of a HIT that is no longer needed.
Only HITs in the "reviewable" state, with all submitted
assignments approved or rejected, can be disposed. A Requester
can call GetReviewableHITs to determine which HITs are
reviewable, then call GetAssignmentsForHIT to retrieve the
assignments. Disposing of a HIT removes the HIT from the
results of a call to GetReviewableHITs. """
params = {'HITId': hit_id}
return self._process_request('DisposeHIT', params)
def expire_hit(self, hit_id):
"""
Expire a HIT that is no longer needed.
The effect is identical to the HIT expiring on its own. The
HIT no longer appears on the Mechanical Turk web site, and no
new Workers are allowed to accept the HIT. Workers who have
accepted the HIT prior to expiration are allowed to complete
it or return it, or allow the assignment duration to elapse
(abandon the HIT). Once all remaining assignments have been
submitted, the expired HIT becomes"reviewable", and will be
returned by a call to GetReviewableHITs.
"""
params = {'HITId': hit_id}
return self._process_request('ForceExpireHIT', params)
def extend_hit(self, hit_id, assignments_increment=None,
expiration_increment=None):
"""
Increase the maximum number of assignments, or extend the
expiration date, of an existing HIT.
NOTE: If a HIT has a status of Reviewable and the HIT is
extended to make it Available, the HIT will not be returned by
GetReviewableHITs, and its submitted assignments will not be
returned by GetAssignmentsForHIT, until the HIT is Reviewable
again. Assignment auto-approval will still happen on its
original schedule, even if the HIT has been extended. Be sure
to retrieve and approve (or reject) submitted assignments
before extending the HIT, if so desired.
"""
# must provide assignment *or* expiration increment
if (assignments_increment is None and expiration_increment is None) or \
(assignments_increment is not None and expiration_increment is not None):
raise ValueError("Must specify either assignments_increment or expiration_increment, but not both")
params = {'HITId': hit_id}
if assignments_increment:
params['MaxAssignmentsIncrement'] = assignments_increment
if expiration_increment:
params['ExpirationIncrementInSeconds'] = expiration_increment
return self._process_request('ExtendHIT', params)
def get_help(self, about, help_type='Operation'):
"""
Return information about the Mechanical Turk Service
operations and response group NOTE - this is basically useless
as it just returns the URL of the documentation
help_type: either 'Operation' or 'ResponseGroup'
"""
params = {'About': about, 'HelpType': help_type}
return self._process_request('Help', params)
def grant_bonus(self, worker_id, assignment_id, bonus_price, reason):
"""
Issues a payment of money from your account to a Worker. To
be eligible for a bonus, the Worker must have submitted
results for one of your HITs, and have had those results
approved or rejected. This payment happens separately from the
reward you pay to the Worker when you approve the Worker's
assignment. The Bonus must be passed in as an instance of the
Price object.
"""
params = bonus_price.get_as_params('BonusAmount', 1)
params['WorkerId'] = worker_id
params['AssignmentId'] = assignment_id
params['Reason'] = reason
return self._process_request('GrantBonus', params)
def block_worker(self, worker_id, reason):
"""
Block a worker from working on my tasks.
"""
params = {'WorkerId': worker_id, 'Reason': reason}
return self._process_request('BlockWorker', params)
def unblock_worker(self, worker_id, reason):
"""
Unblock a worker from working on my tasks.
"""
params = {'WorkerId': worker_id, 'Reason': reason}
return self._process_request('UnblockWorker', params)
def notify_workers(self, worker_ids, subject, message_text):
"""
Send a text message to workers.
"""
params = {'Subject': subject,
'MessageText': message_text}
self.build_list_params(params, worker_ids, 'WorkerId')
return self._process_request('NotifyWorkers', params)
def create_qualification_type(self,
name,
description,
status,
keywords=None,
retry_delay=None,
test=None,
answer_key=None,
answer_key_xml=None,
test_duration=None,
auto_granted=False,
auto_granted_value=1):
"""
Create a new Qualification Type.
name: This will be visible to workers and must be unique for a
given requester.
description: description shown to workers. Max 2000 characters.
status: 'Active' or 'Inactive'
keywords: list of keyword strings or comma separated string.
Max length of 1000 characters when concatenated with commas.
retry_delay: number of seconds after requesting a
qualification the worker must wait before they can ask again.
If not specified, workers can only request this qualification
once.
test: a QuestionForm
answer_key: an XML string of your answer key, for automatically
scored qualification tests.
(Consider implementing an AnswerKey class for this to support.)
test_duration: the number of seconds a worker has to complete the test.
auto_granted: if True, requests for the Qualification are granted
immediately. Can't coexist with a test.
auto_granted_value: auto_granted qualifications are given this value.
"""
params = {'Name': name,
'Description': description,
'QualificationTypeStatus': status,
}
if retry_delay is not None:
params['RetryDelayInSeconds'] = retry_delay
if test is not None:
assert(isinstance(test, QuestionForm))
assert(test_duration is not None)
params['Test'] = test.get_as_xml()
if test_duration is not None:
params['TestDurationInSeconds'] = test_duration
if answer_key is not None:
if isinstance(answer_key, basestring):
params['AnswerKey'] = answer_key # xml
else:
raise TypeError
# Eventually someone will write an AnswerKey class.
if auto_granted:
assert(test is None)
params['AutoGranted'] = True
params['AutoGrantedValue'] = auto_granted_value
if keywords:
params['Keywords'] = self.get_keywords_as_string(keywords)
return self._process_request('CreateQualificationType', params,
[('QualificationType',
QualificationType)])
def get_qualification_type(self, qualification_type_id):
params = {'QualificationTypeId': qualification_type_id }
return self._process_request('GetQualificationType', params,
[('QualificationType', QualificationType)])
def get_all_qualifications_for_qual_type(self, qualification_type_id):
page_size = 100
search_qual = self.get_qualifications_for_qualification_type(qualification_type_id)
total_records = int(search_qual.TotalNumResults)
get_page_quals = lambda page: self.get_qualifications_for_qualification_type(qualification_type_id = qualification_type_id, page_size=page_size, page_number = page)
page_nums = self._get_pages(page_size, total_records)
qual_sets = itertools.imap(get_page_quals, page_nums)
return itertools.chain.from_iterable(qual_sets)
def get_qualifications_for_qualification_type(self, qualification_type_id, page_size=100, page_number = 1):
params = {'QualificationTypeId': qualification_type_id,
'PageSize': page_size,
'PageNumber': page_number}
return self._process_request('GetQualificationsForQualificationType', params,
[('Qualification', Qualification)])
def update_qualification_type(self, qualification_type_id,
description=None,
status=None,
retry_delay=None,
test=None,
answer_key=None,
test_duration=None,
auto_granted=None,
auto_granted_value=None):
params = {'QualificationTypeId': qualification_type_id}
if description is not None:
params['Description'] = description
if status is not None:
params['QualificationTypeStatus'] = status
if retry_delay is not None:
params['RetryDelayInSeconds'] = retry_delay
if test is not None:
assert(isinstance(test, QuestionForm))
params['Test'] = test.get_as_xml()
if test_duration is not None:
params['TestDurationInSeconds'] = test_duration
if answer_key is not None:
if isinstance(answer_key, basestring):
params['AnswerKey'] = answer_key # xml
else:
raise TypeError
# Eventually someone will write an AnswerKey class.
if auto_granted is not None:
params['AutoGranted'] = auto_granted
if auto_granted_value is not None:
params['AutoGrantedValue'] = auto_granted_value
return self._process_request('UpdateQualificationType', params,
[('QualificationType', QualificationType)])
def dispose_qualification_type(self, qualification_type_id):
"""TODO: Document."""
params = {'QualificationTypeId': qualification_type_id}
return self._process_request('DisposeQualificationType', params)
def search_qualification_types(self, query=None, sort_by='Name',
sort_direction='Ascending', page_size=10,
page_number=1, must_be_requestable=True,
must_be_owned_by_caller=True):
"""TODO: Document."""
params = {'Query': query,
'SortProperty': sort_by,
'SortDirection': sort_direction,
'PageSize': page_size,
'PageNumber': page_number,
'MustBeRequestable': must_be_requestable,
'MustBeOwnedByCaller': must_be_owned_by_caller}
return self._process_request('SearchQualificationTypes', params,
[('QualificationType', QualificationType)])
def get_qualification_requests(self, qualification_type_id,
sort_by='Expiration',
sort_direction='Ascending', page_size=10,
page_number=1):
"""TODO: Document."""
params = {'QualificationTypeId': qualification_type_id,
'SortProperty': sort_by,
'SortDirection': sort_direction,
'PageSize': page_size,
'PageNumber': page_number}
return self._process_request('GetQualificationRequests', params,
[('QualificationRequest', QualificationRequest)])
def grant_qualification(self, qualification_request_id, integer_value=1):
"""TODO: Document."""
params = {'QualificationRequestId': qualification_request_id,
'IntegerValue': integer_value}
return self._process_request('GrantQualification', params)
def revoke_qualification(self, subject_id, qualification_type_id,
reason=None):
"""TODO: Document."""
params = {'SubjectId': subject_id,
'QualificationTypeId': qualification_type_id,
'Reason': reason}
return self._process_request('RevokeQualification', params)
def assign_qualification(self, qualification_type_id, worker_id,
value=1, send_notification=True):
params = {'QualificationTypeId': qualification_type_id,
'WorkerId' : worker_id,
'IntegerValue' : value,
'SendNotification' : send_notification}
return self._process_request('AssignQualification', params)
def get_qualification_score(self, qualification_type_id, worker_id):
"""TODO: Document."""
params = {'QualificationTypeId' : qualification_type_id,
'SubjectId' : worker_id}
return self._process_request('GetQualificationScore', params,
[('Qualification', Qualification)])
def update_qualification_score(self, qualification_type_id, worker_id,
value):
"""TODO: Document."""
params = {'QualificationTypeId' : qualification_type_id,
'SubjectId' : worker_id,
'IntegerValue' : value}
return self._process_request('UpdateQualificationScore', params)
def _process_request(self, request_type, params, marker_elems=None):
"""
Helper to process the xml response from AWS
"""
params['Operation'] = request_type
response = self.make_request(None, params, verb='POST')
return self._process_response(response, marker_elems)
def _process_response(self, response, marker_elems=None):
"""
Helper to process the xml response from AWS
"""
body = response.read()
if self.debug == 2:
print(body)
if '<Errors>' not in body:
rs = ResultSet(marker_elems)
h = handler.XmlHandler(rs, self)
xml.sax.parseString(body, h)
return rs
else:
raise MTurkRequestError(response.status, response.reason, body)
@staticmethod
def get_keywords_as_string(keywords):
"""
Returns a comma+space-separated string of keywords from either
a list or a string
"""
if isinstance(keywords, list):
keywords = ', '.join(keywords)
if isinstance(keywords, str):
final_keywords = keywords
elif isinstance(keywords, unicode):
final_keywords = keywords.encode('utf-8')
elif keywords is None:
final_keywords = ""
else:
raise TypeError("keywords argument must be a string or a list of strings; got a %s" % type(keywords))
return final_keywords
@staticmethod
def get_price_as_price(reward):
"""
Returns a Price data structure from either a float or a Price
"""
if isinstance(reward, Price):
final_price = reward
else:
final_price = Price(reward)
return final_price
@staticmethod
def duration_as_seconds(duration):
if isinstance(duration, datetime.timedelta):
duration = duration.days * 86400 + duration.seconds
try:
duration = int(duration)
except TypeError:
raise TypeError("Duration must be a timedelta or int-castable, got %s" % type(duration))
return duration
class BaseAutoResultElement(object):
"""
Base class to automatically add attributes when parsing XML
"""
def __init__(self, connection):
pass
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
setattr(self, name, value)
class HIT(BaseAutoResultElement):
"""
Class to extract a HIT structure from a response (used in ResultSet)
Will have attributes named as per the Developer Guide,
e.g. HITId, HITTypeId, CreationTime
"""
# property helper to determine if HIT has expired
def _has_expired(self):
""" Has this HIT expired yet? """
expired = False
if hasattr(self, 'Expiration'):
now = datetime.datetime.utcnow()
expiration = datetime.datetime.strptime(self.Expiration, '%Y-%m-%dT%H:%M:%SZ')
expired = (now >= expiration)
else:
raise ValueError("ERROR: Request for expired property, but no Expiration in HIT!")
return expired
# are we there yet?
expired = property(_has_expired)
class FileUploadURL(BaseAutoResultElement):
"""
Class to extract an FileUploadURL structure from a response
"""
pass
class HITTypeId(BaseAutoResultElement):
"""
Class to extract an HITTypeId structure from a response
"""
pass
class Qualification(BaseAutoResultElement):
"""
Class to extract an Qualification structure from a response (used in
ResultSet)
Will have attributes named as per the Developer Guide such as
QualificationTypeId, IntegerValue. Does not seem to contain GrantTime.
"""
pass
class QualificationType(BaseAutoResultElement):
"""
Class to extract an QualificationType structure from a response (used in
ResultSet)
Will have attributes named as per the Developer Guide,
e.g. QualificationTypeId, CreationTime, Name, etc
"""
pass
class QualificationRequest(BaseAutoResultElement):
"""
Class to extract an QualificationRequest structure from a response (used in
ResultSet)
Will have attributes named as per the Developer Guide,
e.g. QualificationRequestId, QualificationTypeId, SubjectId, etc
"""
def __init__(self, connection):
super(QualificationRequest, self).__init__(connection)
self.answers = []
def endElement(self, name, value, connection):
# the answer consists of embedded XML, so it needs to be parsed independantly
if name == 'Answer':
answer_rs = ResultSet([('Answer', QuestionFormAnswer)])
h = handler.XmlHandler(answer_rs, connection)
value = connection.get_utf8_value(value)
xml.sax.parseString(value, h)
self.answers.append(answer_rs)
else:
super(QualificationRequest, self).endElement(name, value, connection)
class Assignment(BaseAutoResultElement):
"""
Class to extract an Assignment structure from a response (used in
ResultSet)
Will have attributes named as per the Developer Guide,
e.g. AssignmentId, WorkerId, HITId, Answer, etc
"""
def __init__(self, connection):
super(Assignment, self).__init__(connection)
self.answers = []
def endElement(self, name, value, connection):
# the answer consists of embedded XML, so it needs to be parsed independantly
if name == 'Answer':
answer_rs = ResultSet([('Answer', QuestionFormAnswer)])
h = handler.XmlHandler(answer_rs, connection)
value = connection.get_utf8_value(value)
xml.sax.parseString(value, h)
self.answers.append(answer_rs)
else:
super(Assignment, self).endElement(name, value, connection)
class QuestionFormAnswer(BaseAutoResultElement):
"""
Class to extract Answers from inside the embedded XML
QuestionFormAnswers element inside the Answer element which is
part of the Assignment and QualificationRequest structures
A QuestionFormAnswers element contains an Answer element for each
question in the HIT or Qualification test for which the Worker
provided an answer. Each Answer contains a QuestionIdentifier
element whose value corresponds to the QuestionIdentifier of a
Question in the QuestionForm. See the QuestionForm data structure
for more information about questions and answer specifications.
If the question expects a free-text answer, the Answer element
contains a FreeText element. This element contains the Worker's
answer
*NOTE* - currently really only supports free-text and selection answers
"""
def __init__(self, connection):
super(QuestionFormAnswer, self).__init__(connection)
self.fields = []
self.qid = None
def endElement(self, name, value, connection):
if name == 'QuestionIdentifier':
self.qid = value
elif name in ['FreeText', 'SelectionIdentifier', 'OtherSelectionText'] and self.qid:
self.fields.append(value)
|
bsd-3-clause
|
ademuk/django-oscar
|
src/oscar/apps/checkout/utils.py
|
37
|
7910
|
class CheckoutSessionData(object):
"""
Responsible for marshalling all the checkout session data
Multi-stage checkouts often require several forms to be submitted and their
data persisted until the final order is placed. This class helps store and
organise checkout form data until it is required to write out the final
order.
"""
SESSION_KEY = 'checkout_data'
def __init__(self, request):
self.request = request
if self.SESSION_KEY not in self.request.session:
self.request.session[self.SESSION_KEY] = {}
def _check_namespace(self, namespace):
"""
Ensure a namespace within the session dict is initialised
"""
if namespace not in self.request.session[self.SESSION_KEY]:
self.request.session[self.SESSION_KEY][namespace] = {}
def _get(self, namespace, key, default=None):
"""
Return a value from within a namespace
"""
self._check_namespace(namespace)
if key in self.request.session[self.SESSION_KEY][namespace]:
return self.request.session[self.SESSION_KEY][namespace][key]
return default
def _set(self, namespace, key, value):
"""
Set a namespaced value
"""
self._check_namespace(namespace)
self.request.session[self.SESSION_KEY][namespace][key] = value
self.request.session.modified = True
def _unset(self, namespace, key):
"""
Remove a namespaced value
"""
self._check_namespace(namespace)
if key in self.request.session[self.SESSION_KEY][namespace]:
del self.request.session[self.SESSION_KEY][namespace][key]
self.request.session.modified = True
def _flush_namespace(self, namespace):
"""
Flush a namespace
"""
self.request.session[self.SESSION_KEY][namespace] = {}
self.request.session.modified = True
def flush(self):
"""
Flush all session data
"""
self.request.session[self.SESSION_KEY] = {}
# Guest checkout
# ==============
def set_guest_email(self, email):
self._set('guest', 'email', email)
def get_guest_email(self):
return self._get('guest', 'email')
# Shipping address
# ================
# Options:
# 1. No shipping required (eg digital products)
# 2. Ship to new address (entered in a form)
# 3. Ship to an addressbook address (address chosen from list)
def reset_shipping_data(self):
self._flush_namespace('shipping')
def ship_to_user_address(self, address):
"""
Use an user address (from an address book) as the shipping address.
"""
self.reset_shipping_data()
self._set('shipping', 'user_address_id', address.id)
def ship_to_new_address(self, address_fields):
"""
Use a manually entered address as the shipping address
"""
self._unset('shipping', 'new_address_fields')
phone_number = address_fields.get('phone_number')
if phone_number:
# Phone number is stored as a PhoneNumber instance. As we store
# strings in the session, we need to serialize it.
address_fields = address_fields.copy()
address_fields['phone_number'] = phone_number.as_international
self._set('shipping', 'new_address_fields', address_fields)
def new_shipping_address_fields(self):
"""
Return shipping address fields
"""
return self._get('shipping', 'new_address_fields')
def shipping_user_address_id(self):
"""
Return user address id
"""
return self._get('shipping', 'user_address_id')
# Legacy accessor
user_address_id = shipping_user_address_id
def is_shipping_address_set(self):
"""
Test whether a shipping address has been stored in the session.
This can be from a new address or re-using an existing address.
"""
new_fields = self.new_shipping_address_fields()
has_new_address = new_fields is not None
user_address_id = self.shipping_user_address_id()
has_old_address = user_address_id is not None and user_address_id > 0
return has_new_address or has_old_address
# Shipping method
# ===============
def use_free_shipping(self):
"""
Set "free shipping" code to session
"""
self._set('shipping', 'method_code', '__free__')
def use_shipping_method(self, code):
"""
Set shipping method code to session
"""
self._set('shipping', 'method_code', code)
def shipping_method_code(self, basket):
"""
Return the shipping method code
"""
return self._get('shipping', 'method_code')
def is_shipping_method_set(self, basket):
"""
Test if a valid shipping method is stored in the session
"""
return self.shipping_method_code(basket) is not None
# Billing address fields
# ======================
#
# There are 3 common options:
# 1. Billing address is entered manually through a form
# 2. Billing address is selected from address book
# 3. Billing address is the same as the shipping address
def bill_to_new_address(self, address_fields):
"""
Store address fields for a billing address.
"""
self._flush_namespace('billing')
self._set('billing', 'new_address_fields', address_fields)
def bill_to_user_address(self, address):
"""
Set an address from a user's address book as the billing address
:address: The address object
"""
self._flush_namespace('billing')
self._set('billing', 'user_address_id', address.id)
def bill_to_shipping_address(self):
"""
Record fact that the billing address is to be the same as
the shipping address.
"""
self._flush_namespace('billing')
self._set('billing', 'billing_address_same_as_shipping', True)
# Legacy method name
billing_address_same_as_shipping = bill_to_shipping_address
def is_billing_address_same_as_shipping(self):
return self._get('billing', 'billing_address_same_as_shipping', False)
def billing_user_address_id(self):
"""
Return the ID of the user address being used for billing
"""
return self._get('billing', 'user_address_id')
def new_billing_address_fields(self):
"""
Return fields for a billing address
"""
return self._get('billing', 'new_address_fields')
def is_billing_address_set(self):
"""
Test whether a billing address has been stored in the session.
This can be from a new address or re-using an existing address.
"""
if self.is_billing_address_same_as_shipping():
return True
new_fields = self.new_billing_address_fields()
has_new_address = new_fields is not None
user_address_id = self.billing_user_address_id()
has_old_address = user_address_id is not None and user_address_id > 0
return has_new_address or has_old_address
# Payment methods
# ===============
def pay_by(self, method):
self._set('payment', 'method', method)
def payment_method(self):
return self._get('payment', 'method')
# Submission methods
# ==================
def set_order_number(self, order_number):
self._set('submission', 'order_number', order_number)
def get_order_number(self):
return self._get('submission', 'order_number')
def set_submitted_basket(self, basket):
self._set('submission', 'basket_id', basket.id)
def get_submitted_basket_id(self):
return self._get('submission', 'basket_id')
|
bsd-3-clause
|
itoed/anaconda
|
tests/glade/viewport/check_viewport.py
|
2
|
2833
|
#!/usr/bin/python
#
# Copyright (C) 2014 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation; either version 2.1 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Author: David Shea <[email protected]>
#
"""
Check that widgets that implement GtkScrollable are not placed within a
GtkViewport. If a widget knows how to scroll itself we probably don't want
to add an extra layer.
"""
import argparse
import sys
try:
from lxml import etree
except ImportError:
print("You need to install the python-lxml package to use check_pw_visibility.py")
sys.exit(1)
# I guess we could look at the introspected classes and see if they implement the Scrollable
# interface but that sounds like kind of a pain
SCROLLABLES = ["GtkIconView", "GtkLayout", "GtkTextView", "GtkToolPalette",
"GtkTreeView", "GtkViewport"]
def check_glade_file(glade_file_path):
glade_success = True
with open(glade_file_path) as glade_file:
# Parse the XML
glade_tree = etree.parse(glade_file)
# Look for something like:
# <object class="GtkViewport">
# <child>
# <object class="GtkTreeView">
for scrollable in SCROLLABLES:
for element in glade_tree.xpath(".//object[@class='GtkViewport']/child/object[@class='%s']" % scrollable):
glade_success = False
print("%s contained in GtkViewport at %s:%d" % (scrollable, glade_file_path,
element.sourceline))
return glade_success
if __name__ == "__main__":
parser = argparse.ArgumentParser("Check that password entries have visibility set to False")
# Ignore translation arguments
parser.add_argument("-t", "--translate", action='store_true',
help=argparse.SUPPRESS)
parser.add_argument("-p", "--podir", action='store', type=str,
metavar='PODIR', help=argparse.SUPPRESS, default='./po')
parser.add_argument("glade_files", nargs="+", metavar="GLADE-FILE",
help='The glade file to check')
args = parser.parse_args(args=sys.argv[1:])
success = True
for file_path in args.glade_files:
if not check_glade_file(file_path):
success = False
sys.exit(0 if success else 1)
|
gpl-2.0
|
Kitware/tonic-data-generator
|
python/tonic/cinema/spec-b-converter.py
|
1
|
5917
|
r"""
brew install libtiff libjpeg webp little-cms2
sudo easy_install pip
sudo pip install Pillow
"""
import sys, os, json, math, gzip, shutil
import numpy as np
from PIL import ImImagePlugin
from PIL import Image
from vtk import *
from tonic.paraview import data_converter
def extractFloatArrays(directory, ranges):
for root, dirs, files in os.walk(directory):
for name in files:
if '.png' in name:
fieldName = name[name.index('_')+1:-4]
srcFile = os.path.join(root, name)
destFile = os.path.join(root, name[:-4] + '.float32')
imageSize = data_converter.convertImageToFloat(srcFile, destFile, ranges[fieldName])
# Remove image
os.remove(srcFile)
# Compress data
with open(destFile, 'rb') as f_in, gzip.open(destFile + '.gz', 'wb') as f_out:
shutil.copyfileobj(f_in, f_out)
os.remove(destFile)
return imageSize
def createIntensityArray(directory, nbLayers):
outputArray = vtkUnsignedCharArray()
imageSize = 0
reader = vtkPNGReader()
for layerIdx in range(nbLayers):
luminanceImage = os.path.join(directory, str(layerIdx) + '.luminance')
reader.SetFileName(luminanceImage)
reader.Update()
rgbArray = reader.GetOutput().GetPointData().GetArray(0)
# Extract image size and allocate memory
if imageSize == 0:
imageSize = rgbArray.GetNumberOfTuples()
outputArray.SetNumberOfTuples(imageSize * nbLayers)
# Extract each byte
for idx in range(imageSize):
outputArray.SetValue(layerIdx * imageSize + idx, (rgbArray.GetValue(idx * 3)))
# Remove luminance file
os.remove(luminanceImage)
return outputArray
def createOrderFile(directory, nbLayers, intensityArray, width, height):
# Load *.im, sort pixels, save
# Load data
layerImages = []
totalSize = intensityArray.GetNumberOfTuples()
imageSize = totalSize / nbLayers
for layerIdx in range(nbLayers):
imagePath = os.path.join(directory, str(layerIdx) + '.im')
im = Image.open(str(imagePath))
# im.show()
# try:
# input("Press enter to continue ")
# except NameError:
# pass
layerImages.append(np.array(im, np.float32).reshape(im.size[1] * im.size[0]))
# Create destination structure
orderArray = vtkUnsignedCharArray()
orderArray.SetName('order');
orderArray.SetNumberOfTuples(totalSize)
sortedIntensity = vtkUnsignedCharArray()
sortedIntensity.SetName('intensity');
sortedIntensity.SetNumberOfTuples(totalSize)
for pixelIdx in range(imageSize):
x = int(pixelIdx % width)
y = int(pixelIdx / width)
flipYIdx = width * (height - y - 1) + x
# flipYIdx = imageSize - pixelIdx - 1
# flipYIdx = pixelIdx
depthStack = []
for imageArray in layerImages:
depthStack.append((imageArray[flipYIdx], len(depthStack)))
depthStack.sort(key=lambda tup: tup[0])
for destLayerIdx in range(len(depthStack)):
# if depthStack[destLayerIdx][0] > 255:
# orderArray.SetValue((imageSize * destLayerIdx) + pixelIdx, 255)
# sortedIntensity.SetValue((imageSize * destLayerIdx) + pixelIdx, 0)
# else:
sourceLayerIdx = depthStack[destLayerIdx][1]
# Copy Idx
orderArray.SetValue((imageSize * destLayerIdx) + pixelIdx, sourceLayerIdx)
sortedIntensity.SetValue((imageSize * destLayerIdx) + pixelIdx, intensityArray.GetValue((imageSize * sourceLayerIdx) + pixelIdx))
# Write order file
orderFileName = os.path.join(directory,'order.uint8')
with open(orderFileName, 'wb') as f:
f.write(buffer(orderArray))
# Compress data
with open(orderFileName, 'rb') as f_in, gzip.open(orderFileName + '.gz', 'wb') as f_out:
shutil.copyfileobj(f_in, f_out)
os.remove(orderFileName)
# Write intensity file
intensityFileName = os.path.join(directory,'intensity.uint8')
with open(intensityFileName, 'wb') as f:
f.write(buffer(sortedIntensity))
# Compress data
with open(intensityFileName, 'rb') as f_in, gzip.open(intensityFileName + '.gz', 'wb') as f_out:
shutil.copyfileobj(f_in, f_out)
os.remove(intensityFileName)
# Remove IM files
for layerIdx in range(nbLayers):
imagePath = os.path.join(directory, str(layerIdx) + '.im')
os.remove(imagePath)
# =============================================================================
# Start processing dataset
# =============================================================================
convertFileName = os.path.join(sys.argv[-1], 'convert.json')
tonicFileName = os.path.join(sys.argv[-1], 'index.json')
with open(convertFileName, "r") as f:
convertInfo = json.load(f)
for directory in convertInfo['directories']:
# Convert images to float
imageSize = extractFloatArrays(directory, convertInfo['scalars'])
# Convert luminence to intensity
intensityStack = createIntensityArray(directory, convertInfo['layers'])
# Generate order layer
createOrderFile(directory, convertInfo['layers'], intensityStack, imageSize[0], imageSize[1])
# Update image size inside index.json
with open(tonicFileName, "r") as f:
tonicMeta = json.load(f)
tonicMeta['SortedComposite']['dimensions'] = [ imageSize[0], imageSize[1] ]
print "resolution", imageSize[0], 'x', imageSize[1], '=', (imageSize[0]*imageSize[1])
with open(tonicFileName + '_', 'w') as fw:
fw.write(json.dumps(tonicMeta, indent=4))
os.remove(tonicFileName)
os.rename(tonicFileName + '_', tonicFileName)
os.remove(convertFileName)
|
bsd-3-clause
|
SpotlightKid/mididings
|
mididings/live/livedings.py
|
2
|
10056
|
# -*- coding: utf-8 -*-
#
# mididings
#
# Copyright (C) 2008-2014 Dominic Sacré <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
from mididings.live.widgets import LiveThemedFactory, UnthemedFactory
from mididings.live.osc_control import LiveOSC
import sys
if sys.version_info >= (3,):
unichr = chr
class LiveDings(object):
def __init__(self, options):
self.options = options
# start OSC server
self.osc = LiveOSC(self, self.options.control_port,
self.options.listen_port)
self.osc.start()
if self.options.themed:
widget_factory = LiveThemedFactory(self.options.color,
self.options.color_highlight,
self.options.color_background)
else:
widget_factory = UnthemedFactory()
# create the main window
self.win = widget_factory.Tk(padx=8, pady=8)
self.win.minsize(480, 120)
self.win.geometry('%dx%d' % (self.options.width, self.options.height))
if self.options.name:
self.win.title('livedings - %s' % self.options.name)
else:
self.win.title('livedings')
# track window resizing
self.win.bind('<Configure>',
lambda event: self.win.after_idle(self.update, True))
# configure the grid
self.win.grid_rowconfigure(0, weight=1)
self.win.grid_columnconfigure(1, minsize=self.options.list_width,
weight=0)
for n in range(3, 8):
self.win.grid_columnconfigure(n, weight=1, minsize=64)
# create listbox
self.listbox = widget_factory.Listbox(
self.win, font=self.options.list_font,
selectmode='single', activestyle='none',
highlightthickness=0)
self.listbox.grid(column=1, row=0, rowspan=2, sticky='nsew', padx=8)
self.listbox.bind('<ButtonRelease-1>',
lambda event: self.on_select_scene())
# create scrollbar for listbox. will be attached to the grid only
# when necessary
self.scrollbar = widget_factory.AutoScrollbar(self.win,
orient='vertical')
self.scrollbar.set_show_hide(
lambda: self.scrollbar.grid(column=0, row=0,
rowspan=2, sticky='ns'),
lambda: self.scrollbar.grid_forget()
)
self.scrollbar.config(command=self.listbox.yview)
self.listbox.config(yscrollcommand=self.scrollbar.set)
# create separator
separator = widget_factory.Frame(self.win, width=2)
separator.grid(column=2, row=0, rowspan=2, sticky='ns')
# create canvas
self.canvas = widget_factory.Canvas(self.win, highlightthickness=0)
if self.options.color_background is not None:
self.canvas.config(background=self.options.color_background)
self.canvas.grid(column=3, columnspan=5, row=0, sticky='nsew')
self.canvas.bind('<Button-1>', self.on_button_press)
self.canvas.bind('<ButtonRelease-1>', self.on_button_release)
# create buttons
try:
button_size = int(int(self.options.font.split(' ')[1]) / 1.5)
except IndexError:
button_size = 20
button_font = 'Sans %d bold' % button_size
self.btn_prev_scene = widget_factory.Button(
self.win, text=unichr(0x25c0)*2,
width=64, font=button_font,
command=self.osc.prev_scene)
self.btn_prev_scene.grid(column=3, row=1, padx=8)
self.btn_next_scene = widget_factory.Button(
self.win, text=unichr(0x25b6)*2,
width=64, font=button_font,
command=self.osc.next_scene)
self.btn_next_scene.grid(column=4, row=1, padx=8)
self.btn_prev_subscene = widget_factory.Button(
self.win, text=unichr(0x25c0),
width=64, font=button_font,
command=self.osc.prev_subscene)
self.btn_prev_subscene.grid(column=5, row=1, padx=8)
self.btn_next_subscene = widget_factory.Button(
self.win, text=unichr(0x25b6),
width=64, font=button_font,
command=self.osc.next_subscene)
self.btn_next_subscene.grid(column=6, row=1, padx=8)
self.btn_panic = widget_factory.Button(
self.win, text="!",
width=64, font=button_font,
command=self.osc.panic)
self.btn_panic.grid(column=7, row=1, padx=8)
# attempt to calculate the height of one line in the current font.
# this is crazy...
try:
self.line_height = int(self.options.font.split(' ')[1]) * 2
except Exception:
# whatever...
self.line_height = 72
# some keybindings
self.win.bind('<Left>', lambda event: self.osc.prev_subscene())
self.win.bind('<Right>', lambda event: self.osc.next_subscene())
self.win.bind('<Up>', lambda event: self.osc.prev_scene())
self.win.bind('<Down>', lambda event: self.osc.next_scene())
self.win.bind('<Escape>', lambda event: self.osc.panic())
# prevent left/right keys from scrolling the listbox
self.win.bind_class('Listbox', "<Left>", lambda event: None)
self.win.bind_class('Listbox', "<Right>", lambda event: None)
# get config from mididings
self.osc.query()
self._ready = False
# window dimensions
self._width = 0
self._height = 0
# mouse click position
self._click_x = 0
self._click_y = 0
def on_select_scene(self):
cursel = self.listbox.curselection()
if cursel:
self.osc.switch_scene(sorted(self.scenes.keys())[int(cursel[0])])
def on_button_press(self, event):
self._click_x = event.x
self._click_y = event.y
def on_button_release(self, event):
if (self._ready and
self._click_y > 8 + 3 * self.line_height and
self._click_y < 8 + (len(self.scenes[self.current_scene][1])+3) *
self.line_height):
n = (self._click_y - (8 + 3 * self.line_height)) / self.line_height
self.osc.switch_subscene(n + self.data_offset)
def update(self, resize=False):
width = self.canvas.winfo_width()
height = self.canvas.winfo_height()
# check if the window size really changed
if resize and width == self._width and height == self._height:
return
self.draw_canvas(width, height)
self._width = width
self._height = height
def set_data_offset(self, data_offset):
self.data_offset = data_offset
def set_scenes(self, scenes):
self.scenes = scenes
self.update_scenes()
self._ready = True
def set_current_scene(self, scene, subscene):
self.current_scene = scene
self.current_subscene = subscene
self.listbox.selection_clear(0, 'end')
self.listbox.selection_set(sorted(self.scenes.keys()).index(scene))
self.update()
def update_scenes(self):
self.listbox.delete(0, 'end')
for n in sorted(self.scenes.keys()):
name = self.scenes[n][0]
if name:
self.listbox.insert('end', "%d: %s" % (n, name))
else:
self.listbox.insert('end', "%d" % n)
def draw_canvas(self, width, height):
if not self._ready or not len(self.scenes):
return
scene = self.current_scene
subscene = self.current_subscene
scene_name, subscene_names = self.scenes[scene]
has_subscenes = bool(len(subscene_names))
if not scene_name:
scene_name = "(unnamed)"
self.canvas.delete('all')
# draw scene number
self.canvas.create_text(
24,
8,
text=(("%d.%d" % (scene, subscene))
if has_subscenes else str(scene)),
fill=self.options.color,
font=self.options.font,
anchor='nw'
)
# draw scene name
self.canvas.create_text(
width / 2 + 24,
8 + 1.5 * self.line_height,
text=scene_name,
fill=self.options.color_highlight,
font=self.options.font,
anchor='n'
)
# draw subscenes
for n, s in enumerate(subscene_names):
self.canvas.create_text(
width / 2 + 24,
8 + (n+3) * self.line_height,
text=s if s else "(unnamed)",
fill=(self.options.color_highlight
if n + self.data_offset == subscene
else self.options.color),
font=self.options.font,
anchor='n'
)
# draw indicator
self.canvas.create_text(
16,
8 + 1.5 * self.line_height if not has_subscenes
else 8 + (subscene - self.data_offset + 3) * self.line_height,
text=unichr(0x25b6),
fill=self.options.color_highlight,
font=self.options.font,
anchor='nw'
)
def run(self):
self.win.mainloop()
|
gpl-2.0
|
sharma1nitish/phantomjs
|
src/qt/qtwebkit/Tools/Scripts/webkitpy/tool/steps/closepatch.py
|
145
|
1889
|
# Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from webkitpy.tool.comments import bug_comment_from_commit_text
from webkitpy.tool.steps.abstractstep import AbstractStep
class ClosePatch(AbstractStep):
def run(self, state):
comment_text = bug_comment_from_commit_text(self._tool.scm(), state["commit_text"])
self._tool.bugs.clear_attachment_flags(state["patch"].id(), comment_text)
|
bsd-3-clause
|
FireWRT/OpenWrt-Firefly-Libraries
|
staging_dir/host/lib/python2.7/test/test_thread.py
|
27
|
8274
|
import os
import unittest
import random
from test import test_support
thread = test_support.import_module('thread')
import time
import sys
import weakref
from test import lock_tests
NUMTASKS = 10
NUMTRIPS = 3
_print_mutex = thread.allocate_lock()
def verbose_print(arg):
"""Helper function for printing out debugging output."""
if test_support.verbose:
with _print_mutex:
print arg
class BasicThreadTest(unittest.TestCase):
def setUp(self):
self.done_mutex = thread.allocate_lock()
self.done_mutex.acquire()
self.running_mutex = thread.allocate_lock()
self.random_mutex = thread.allocate_lock()
self.created = 0
self.running = 0
self.next_ident = 0
class ThreadRunningTests(BasicThreadTest):
def newtask(self):
with self.running_mutex:
self.next_ident += 1
verbose_print("creating task %s" % self.next_ident)
thread.start_new_thread(self.task, (self.next_ident,))
self.created += 1
self.running += 1
def task(self, ident):
with self.random_mutex:
delay = random.random() / 10000.0
verbose_print("task %s will run for %sus" % (ident, round(delay*1e6)))
time.sleep(delay)
verbose_print("task %s done" % ident)
with self.running_mutex:
self.running -= 1
if self.created == NUMTASKS and self.running == 0:
self.done_mutex.release()
def test_starting_threads(self):
# Basic test for thread creation.
for i in range(NUMTASKS):
self.newtask()
verbose_print("waiting for tasks to complete...")
self.done_mutex.acquire()
verbose_print("all tasks done")
def test_stack_size(self):
# Various stack size tests.
self.assertEqual(thread.stack_size(), 0, "initial stack size is not 0")
thread.stack_size(0)
self.assertEqual(thread.stack_size(), 0, "stack_size not reset to default")
@unittest.skipIf(os.name not in ("nt", "os2", "posix"), 'test meant for nt, os2, and posix')
def test_nt_and_posix_stack_size(self):
try:
thread.stack_size(4096)
except ValueError:
verbose_print("caught expected ValueError setting "
"stack_size(4096)")
except thread.error:
self.skipTest("platform does not support changing thread stack "
"size")
fail_msg = "stack_size(%d) failed - should succeed"
for tss in (262144, 0x100000, 0):
thread.stack_size(tss)
self.assertEqual(thread.stack_size(), tss, fail_msg % tss)
verbose_print("successfully set stack_size(%d)" % tss)
for tss in (262144, 0x100000):
verbose_print("trying stack_size = (%d)" % tss)
self.next_ident = 0
self.created = 0
for i in range(NUMTASKS):
self.newtask()
verbose_print("waiting for all tasks to complete")
self.done_mutex.acquire()
verbose_print("all tasks done")
thread.stack_size(0)
def test__count(self):
# Test the _count() function.
orig = thread._count()
mut = thread.allocate_lock()
mut.acquire()
started = []
def task():
started.append(None)
mut.acquire()
mut.release()
thread.start_new_thread(task, ())
while not started:
time.sleep(0.01)
self.assertEqual(thread._count(), orig + 1)
# Allow the task to finish.
mut.release()
# The only reliable way to be sure that the thread ended from the
# interpreter's point of view is to wait for the function object to be
# destroyed.
done = []
wr = weakref.ref(task, lambda _: done.append(None))
del task
while not done:
time.sleep(0.01)
self.assertEqual(thread._count(), orig)
def test_save_exception_state_on_error(self):
# See issue #14474
def task():
started.release()
raise SyntaxError
def mywrite(self, *args):
try:
raise ValueError
except ValueError:
pass
real_write(self, *args)
c = thread._count()
started = thread.allocate_lock()
with test_support.captured_output("stderr") as stderr:
real_write = stderr.write
stderr.write = mywrite
started.acquire()
thread.start_new_thread(task, ())
started.acquire()
while thread._count() > c:
time.sleep(0.01)
self.assertIn("Traceback", stderr.getvalue())
class Barrier:
def __init__(self, num_threads):
self.num_threads = num_threads
self.waiting = 0
self.checkin_mutex = thread.allocate_lock()
self.checkout_mutex = thread.allocate_lock()
self.checkout_mutex.acquire()
def enter(self):
self.checkin_mutex.acquire()
self.waiting = self.waiting + 1
if self.waiting == self.num_threads:
self.waiting = self.num_threads - 1
self.checkout_mutex.release()
return
self.checkin_mutex.release()
self.checkout_mutex.acquire()
self.waiting = self.waiting - 1
if self.waiting == 0:
self.checkin_mutex.release()
return
self.checkout_mutex.release()
class BarrierTest(BasicThreadTest):
def test_barrier(self):
self.bar = Barrier(NUMTASKS)
self.running = NUMTASKS
for i in range(NUMTASKS):
thread.start_new_thread(self.task2, (i,))
verbose_print("waiting for tasks to end")
self.done_mutex.acquire()
verbose_print("tasks done")
def task2(self, ident):
for i in range(NUMTRIPS):
if ident == 0:
# give it a good chance to enter the next
# barrier before the others are all out
# of the current one
delay = 0
else:
with self.random_mutex:
delay = random.random() / 10000.0
verbose_print("task %s will run for %sus" %
(ident, round(delay * 1e6)))
time.sleep(delay)
verbose_print("task %s entering %s" % (ident, i))
self.bar.enter()
verbose_print("task %s leaving barrier" % ident)
with self.running_mutex:
self.running -= 1
# Must release mutex before releasing done, else the main thread can
# exit and set mutex to None as part of global teardown; then
# mutex.release() raises AttributeError.
finished = self.running == 0
if finished:
self.done_mutex.release()
class LockTests(lock_tests.LockTests):
locktype = thread.allocate_lock
class TestForkInThread(unittest.TestCase):
def setUp(self):
self.read_fd, self.write_fd = os.pipe()
@unittest.skipIf(sys.platform.startswith('win'),
"This test is only appropriate for POSIX-like systems.")
@test_support.reap_threads
def test_forkinthread(self):
def thread1():
try:
pid = os.fork() # fork in a thread
except RuntimeError:
sys.exit(0) # exit the child
if pid == 0: # child
os.close(self.read_fd)
os.write(self.write_fd, "OK")
sys.exit(0)
else: # parent
os.close(self.write_fd)
thread.start_new_thread(thread1, ())
self.assertEqual(os.read(self.read_fd, 2), "OK",
"Unable to fork() in thread")
def tearDown(self):
try:
os.close(self.read_fd)
except OSError:
pass
try:
os.close(self.write_fd)
except OSError:
pass
def test_main():
test_support.run_unittest(ThreadRunningTests, BarrierTest, LockTests,
TestForkInThread)
if __name__ == "__main__":
test_main()
|
gpl-2.0
|
myd7349/Ongoing-Study
|
c#/Console/PInvoke/MarshalCppClassV1/MarshalCppClass/dump_dll_exports.py
|
1
|
2848
|
# coding: utf-8
import sys
import cppmangle
import plumbum
def main():
if len(sys.argv) != 2:
print('DLL path missing.')
return
dll_path = sys.argv[1]
print('DLL path:', dll_path)
mangled_symbols = []
try:
dependencies = plumbum.local['Dependencies.exe']
dll_exports = dependencies('-exports', dll_path).splitlines()
for line in dll_exports:
parts = line.split()
if len(parts) == 3 and parts[0] == 'Name':
mangled_symbols.append(parts[2])
except plumbum.CommandNotFound:
print('"Dependencies.exe" not found.\n' \
'Please download it from https://github.com/lucasg/Dependencies.\n' \
'After installation, update your PATH environment.')
return
for symbol in mangled_symbols:
try:
print(symbol, '->', cppmangle.cdecl_sym(cppmangle.demangle(symbol)))
except:
print(symbol)
if __name__ == '__main__':
main()
# References:
# https://github.com/AVGTechnologies/cppmangle
# Example:
# In [23]: !dump_dll_exports.py ..\Debug\NativeClassLib.dll
# DLL path: ..\Debug\NativeClassLib.dll
# ??0Person@@QAE@PBDH@Z -> public: __thiscall Person::Person(char const *,int)
# ??_FPerson@@QAEXXZ -> public: void __thiscall Person::`default constructor closure'(void)
# ?Delete@Person@@SAXPAV1@@Z -> public: static void __cdecl Person::Delete(class Person *)
# ?GetAge@Person@@QBEHXZ -> public: int __thiscall Person::GetAge(void)const
# ?GetName@Person@@QBEPBDXZ -> public: char const * __thiscall Person::GetName(void)const
# ?New@Person@@SAPAV1@PBDH@Z -> public: static class Person * __cdecl Person::New(char const *,int)
# ?Say@Person@@QBEXPBD@Z -> public: void __thiscall Person::Say(char const *)const
# ?SetAge@Person@@QAEXH@Z -> public: void __thiscall Person::SetAge(int)
# ?SetName@Person@@QAEXPBD@Z -> public: void __thiscall Person::SetName(char const *)
# GetArch
# In [24]: !dump_dll_exports.py ..\x64\Debug\NativeClassLib.dll
# DLL path: ..\x64\Debug\NativeClassLib.dll
# ??0Person@@QEAA@PEBDH@Z -> public: __cdecl Person::Person(char const *,int)
# ??_FPerson@@QEAAXXZ -> public: void __cdecl Person::`default constructor closure'(void)
# ?Delete@Person@@SAXPEAV1@@Z -> public: static void __cdecl Person::Delete(class Person *)
# ?GetAge@Person@@QEBAHXZ -> public: int __cdecl Person::GetAge(void)const
# ?GetName@Person@@QEBAPEBDXZ -> public: char const * __cdecl Person::GetName(void)const
# ?New@Person@@SAPEAV1@PEBDH@Z -> public: static class Person * __cdecl Person::New(char const *,int)
# ?Say@Person@@QEBAXPEBD@Z -> public: void __cdecl Person::Say(char const *)const
# ?SetAge@Person@@QEAAXH@Z -> public: void __cdecl Person::SetAge(int)
# ?SetName@Person@@QEAAXPEBD@Z -> public: void __cdecl Person::SetName(char const *)
# GetArch
|
lgpl-3.0
|
invalidstatement/grit-i18n
|
grit/tool/diff_structures.py
|
62
|
3923
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
'''The 'grit sdiff' tool.
'''
import os
import getopt
import tempfile
from grit.node import structure
from grit.tool import interface
from grit import constants
from grit import util
# Builds the description for the tool (used as the __doc__
# for the DiffStructures class).
_class_doc = """\
Allows you to view the differences in the structure of two files,
disregarding their translateable content. Translateable portions of
each file are changed to the string "TTTTTT" before invoking the diff program
specified by the P4DIFF environment variable.
Usage: grit sdiff [-t TYPE] [-s SECTION] [-e ENCODING] LEFT RIGHT
LEFT and RIGHT are the files you want to diff. SECTION is required
for structure types like 'dialog' to identify the part of the file to look at.
ENCODING indicates the encoding of the left and right files (default 'cp1252').
TYPE can be one of the following, defaults to 'tr_html':
"""
for gatherer in structure._GATHERERS:
_class_doc += " - %s\n" % gatherer
class DiffStructures(interface.Tool):
__doc__ = _class_doc
def __init__(self):
self.section = None
self.left_encoding = 'cp1252'
self.right_encoding = 'cp1252'
self.structure_type = 'tr_html'
def ShortDescription(self):
return 'View differences without regard for translateable portions.'
def Run(self, global_opts, args):
(opts, args) = getopt.getopt(args, 's:e:t:',
['left_encoding=', 'right_encoding='])
for key, val in opts:
if key == '-s':
self.section = val
elif key == '-e':
self.left_encoding = val
self.right_encoding = val
elif key == '-t':
self.structure_type = val
elif key == '--left_encoding':
self.left_encoding = val
elif key == '--right_encoding':
self.right_encoding == val
if len(args) != 2:
print "Incorrect usage - 'grit help sdiff' for usage details."
return 2
if 'P4DIFF' not in os.environ:
print "Environment variable P4DIFF not set; defaulting to 'windiff'."
diff_program = 'windiff'
else:
diff_program = os.environ['P4DIFF']
left_trans = self.MakeStaticTranslation(args[0], self.left_encoding)
try:
try:
right_trans = self.MakeStaticTranslation(args[1], self.right_encoding)
os.system('%s %s %s' % (diff_program, left_trans, right_trans))
finally:
os.unlink(right_trans)
finally:
os.unlink(left_trans)
def MakeStaticTranslation(self, original_filename, encoding):
"""Given the name of the structure type (self.structure_type), the filename
of the file holding the original structure, and optionally the "section" key
identifying the part of the file to look at (self.section), creates a
temporary file holding a "static" translation of the original structure
(i.e. one where all translateable parts have been replaced with "TTTTTT")
and returns the temporary file name. It is the caller's responsibility to
delete the file when finished.
Args:
original_filename: 'c:\\bingo\\bla.rc'
Return:
'c:\\temp\\werlkjsdf334.tmp'
"""
original = structure._GATHERERS[self.structure_type](original_filename,
extkey=self.section,
encoding=encoding)
original.Parse()
translated = original.Translate(constants.CONSTANT_LANGUAGE, False)
fname = tempfile.mktemp()
with util.WrapOutputStream(open(fname, 'w')) as writer:
writer.write("Original filename: %s\n=============\n\n"
% original_filename)
writer.write(translated) # write in UTF-8
return fname
|
bsd-2-clause
|
misisnik/ExternalInterface
|
ENV/Lib/encodings/cp1250.py
|
272
|
13686
|
""" Python Character Mapping Codec cp1250 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1250.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp1250',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
'\x00' # 0x00 -> NULL
'\x01' # 0x01 -> START OF HEADING
'\x02' # 0x02 -> START OF TEXT
'\x03' # 0x03 -> END OF TEXT
'\x04' # 0x04 -> END OF TRANSMISSION
'\x05' # 0x05 -> ENQUIRY
'\x06' # 0x06 -> ACKNOWLEDGE
'\x07' # 0x07 -> BELL
'\x08' # 0x08 -> BACKSPACE
'\t' # 0x09 -> HORIZONTAL TABULATION
'\n' # 0x0A -> LINE FEED
'\x0b' # 0x0B -> VERTICAL TABULATION
'\x0c' # 0x0C -> FORM FEED
'\r' # 0x0D -> CARRIAGE RETURN
'\x0e' # 0x0E -> SHIFT OUT
'\x0f' # 0x0F -> SHIFT IN
'\x10' # 0x10 -> DATA LINK ESCAPE
'\x11' # 0x11 -> DEVICE CONTROL ONE
'\x12' # 0x12 -> DEVICE CONTROL TWO
'\x13' # 0x13 -> DEVICE CONTROL THREE
'\x14' # 0x14 -> DEVICE CONTROL FOUR
'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
'\x16' # 0x16 -> SYNCHRONOUS IDLE
'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
'\x18' # 0x18 -> CANCEL
'\x19' # 0x19 -> END OF MEDIUM
'\x1a' # 0x1A -> SUBSTITUTE
'\x1b' # 0x1B -> ESCAPE
'\x1c' # 0x1C -> FILE SEPARATOR
'\x1d' # 0x1D -> GROUP SEPARATOR
'\x1e' # 0x1E -> RECORD SEPARATOR
'\x1f' # 0x1F -> UNIT SEPARATOR
' ' # 0x20 -> SPACE
'!' # 0x21 -> EXCLAMATION MARK
'"' # 0x22 -> QUOTATION MARK
'#' # 0x23 -> NUMBER SIGN
'$' # 0x24 -> DOLLAR SIGN
'%' # 0x25 -> PERCENT SIGN
'&' # 0x26 -> AMPERSAND
"'" # 0x27 -> APOSTROPHE
'(' # 0x28 -> LEFT PARENTHESIS
')' # 0x29 -> RIGHT PARENTHESIS
'*' # 0x2A -> ASTERISK
'+' # 0x2B -> PLUS SIGN
',' # 0x2C -> COMMA
'-' # 0x2D -> HYPHEN-MINUS
'.' # 0x2E -> FULL STOP
'/' # 0x2F -> SOLIDUS
'0' # 0x30 -> DIGIT ZERO
'1' # 0x31 -> DIGIT ONE
'2' # 0x32 -> DIGIT TWO
'3' # 0x33 -> DIGIT THREE
'4' # 0x34 -> DIGIT FOUR
'5' # 0x35 -> DIGIT FIVE
'6' # 0x36 -> DIGIT SIX
'7' # 0x37 -> DIGIT SEVEN
'8' # 0x38 -> DIGIT EIGHT
'9' # 0x39 -> DIGIT NINE
':' # 0x3A -> COLON
';' # 0x3B -> SEMICOLON
'<' # 0x3C -> LESS-THAN SIGN
'=' # 0x3D -> EQUALS SIGN
'>' # 0x3E -> GREATER-THAN SIGN
'?' # 0x3F -> QUESTION MARK
'@' # 0x40 -> COMMERCIAL AT
'A' # 0x41 -> LATIN CAPITAL LETTER A
'B' # 0x42 -> LATIN CAPITAL LETTER B
'C' # 0x43 -> LATIN CAPITAL LETTER C
'D' # 0x44 -> LATIN CAPITAL LETTER D
'E' # 0x45 -> LATIN CAPITAL LETTER E
'F' # 0x46 -> LATIN CAPITAL LETTER F
'G' # 0x47 -> LATIN CAPITAL LETTER G
'H' # 0x48 -> LATIN CAPITAL LETTER H
'I' # 0x49 -> LATIN CAPITAL LETTER I
'J' # 0x4A -> LATIN CAPITAL LETTER J
'K' # 0x4B -> LATIN CAPITAL LETTER K
'L' # 0x4C -> LATIN CAPITAL LETTER L
'M' # 0x4D -> LATIN CAPITAL LETTER M
'N' # 0x4E -> LATIN CAPITAL LETTER N
'O' # 0x4F -> LATIN CAPITAL LETTER O
'P' # 0x50 -> LATIN CAPITAL LETTER P
'Q' # 0x51 -> LATIN CAPITAL LETTER Q
'R' # 0x52 -> LATIN CAPITAL LETTER R
'S' # 0x53 -> LATIN CAPITAL LETTER S
'T' # 0x54 -> LATIN CAPITAL LETTER T
'U' # 0x55 -> LATIN CAPITAL LETTER U
'V' # 0x56 -> LATIN CAPITAL LETTER V
'W' # 0x57 -> LATIN CAPITAL LETTER W
'X' # 0x58 -> LATIN CAPITAL LETTER X
'Y' # 0x59 -> LATIN CAPITAL LETTER Y
'Z' # 0x5A -> LATIN CAPITAL LETTER Z
'[' # 0x5B -> LEFT SQUARE BRACKET
'\\' # 0x5C -> REVERSE SOLIDUS
']' # 0x5D -> RIGHT SQUARE BRACKET
'^' # 0x5E -> CIRCUMFLEX ACCENT
'_' # 0x5F -> LOW LINE
'`' # 0x60 -> GRAVE ACCENT
'a' # 0x61 -> LATIN SMALL LETTER A
'b' # 0x62 -> LATIN SMALL LETTER B
'c' # 0x63 -> LATIN SMALL LETTER C
'd' # 0x64 -> LATIN SMALL LETTER D
'e' # 0x65 -> LATIN SMALL LETTER E
'f' # 0x66 -> LATIN SMALL LETTER F
'g' # 0x67 -> LATIN SMALL LETTER G
'h' # 0x68 -> LATIN SMALL LETTER H
'i' # 0x69 -> LATIN SMALL LETTER I
'j' # 0x6A -> LATIN SMALL LETTER J
'k' # 0x6B -> LATIN SMALL LETTER K
'l' # 0x6C -> LATIN SMALL LETTER L
'm' # 0x6D -> LATIN SMALL LETTER M
'n' # 0x6E -> LATIN SMALL LETTER N
'o' # 0x6F -> LATIN SMALL LETTER O
'p' # 0x70 -> LATIN SMALL LETTER P
'q' # 0x71 -> LATIN SMALL LETTER Q
'r' # 0x72 -> LATIN SMALL LETTER R
's' # 0x73 -> LATIN SMALL LETTER S
't' # 0x74 -> LATIN SMALL LETTER T
'u' # 0x75 -> LATIN SMALL LETTER U
'v' # 0x76 -> LATIN SMALL LETTER V
'w' # 0x77 -> LATIN SMALL LETTER W
'x' # 0x78 -> LATIN SMALL LETTER X
'y' # 0x79 -> LATIN SMALL LETTER Y
'z' # 0x7A -> LATIN SMALL LETTER Z
'{' # 0x7B -> LEFT CURLY BRACKET
'|' # 0x7C -> VERTICAL LINE
'}' # 0x7D -> RIGHT CURLY BRACKET
'~' # 0x7E -> TILDE
'\x7f' # 0x7F -> DELETE
'\u20ac' # 0x80 -> EURO SIGN
'\ufffe' # 0x81 -> UNDEFINED
'\u201a' # 0x82 -> SINGLE LOW-9 QUOTATION MARK
'\ufffe' # 0x83 -> UNDEFINED
'\u201e' # 0x84 -> DOUBLE LOW-9 QUOTATION MARK
'\u2026' # 0x85 -> HORIZONTAL ELLIPSIS
'\u2020' # 0x86 -> DAGGER
'\u2021' # 0x87 -> DOUBLE DAGGER
'\ufffe' # 0x88 -> UNDEFINED
'\u2030' # 0x89 -> PER MILLE SIGN
'\u0160' # 0x8A -> LATIN CAPITAL LETTER S WITH CARON
'\u2039' # 0x8B -> SINGLE LEFT-POINTING ANGLE QUOTATION MARK
'\u015a' # 0x8C -> LATIN CAPITAL LETTER S WITH ACUTE
'\u0164' # 0x8D -> LATIN CAPITAL LETTER T WITH CARON
'\u017d' # 0x8E -> LATIN CAPITAL LETTER Z WITH CARON
'\u0179' # 0x8F -> LATIN CAPITAL LETTER Z WITH ACUTE
'\ufffe' # 0x90 -> UNDEFINED
'\u2018' # 0x91 -> LEFT SINGLE QUOTATION MARK
'\u2019' # 0x92 -> RIGHT SINGLE QUOTATION MARK
'\u201c' # 0x93 -> LEFT DOUBLE QUOTATION MARK
'\u201d' # 0x94 -> RIGHT DOUBLE QUOTATION MARK
'\u2022' # 0x95 -> BULLET
'\u2013' # 0x96 -> EN DASH
'\u2014' # 0x97 -> EM DASH
'\ufffe' # 0x98 -> UNDEFINED
'\u2122' # 0x99 -> TRADE MARK SIGN
'\u0161' # 0x9A -> LATIN SMALL LETTER S WITH CARON
'\u203a' # 0x9B -> SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
'\u015b' # 0x9C -> LATIN SMALL LETTER S WITH ACUTE
'\u0165' # 0x9D -> LATIN SMALL LETTER T WITH CARON
'\u017e' # 0x9E -> LATIN SMALL LETTER Z WITH CARON
'\u017a' # 0x9F -> LATIN SMALL LETTER Z WITH ACUTE
'\xa0' # 0xA0 -> NO-BREAK SPACE
'\u02c7' # 0xA1 -> CARON
'\u02d8' # 0xA2 -> BREVE
'\u0141' # 0xA3 -> LATIN CAPITAL LETTER L WITH STROKE
'\xa4' # 0xA4 -> CURRENCY SIGN
'\u0104' # 0xA5 -> LATIN CAPITAL LETTER A WITH OGONEK
'\xa6' # 0xA6 -> BROKEN BAR
'\xa7' # 0xA7 -> SECTION SIGN
'\xa8' # 0xA8 -> DIAERESIS
'\xa9' # 0xA9 -> COPYRIGHT SIGN
'\u015e' # 0xAA -> LATIN CAPITAL LETTER S WITH CEDILLA
'\xab' # 0xAB -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
'\xac' # 0xAC -> NOT SIGN
'\xad' # 0xAD -> SOFT HYPHEN
'\xae' # 0xAE -> REGISTERED SIGN
'\u017b' # 0xAF -> LATIN CAPITAL LETTER Z WITH DOT ABOVE
'\xb0' # 0xB0 -> DEGREE SIGN
'\xb1' # 0xB1 -> PLUS-MINUS SIGN
'\u02db' # 0xB2 -> OGONEK
'\u0142' # 0xB3 -> LATIN SMALL LETTER L WITH STROKE
'\xb4' # 0xB4 -> ACUTE ACCENT
'\xb5' # 0xB5 -> MICRO SIGN
'\xb6' # 0xB6 -> PILCROW SIGN
'\xb7' # 0xB7 -> MIDDLE DOT
'\xb8' # 0xB8 -> CEDILLA
'\u0105' # 0xB9 -> LATIN SMALL LETTER A WITH OGONEK
'\u015f' # 0xBA -> LATIN SMALL LETTER S WITH CEDILLA
'\xbb' # 0xBB -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
'\u013d' # 0xBC -> LATIN CAPITAL LETTER L WITH CARON
'\u02dd' # 0xBD -> DOUBLE ACUTE ACCENT
'\u013e' # 0xBE -> LATIN SMALL LETTER L WITH CARON
'\u017c' # 0xBF -> LATIN SMALL LETTER Z WITH DOT ABOVE
'\u0154' # 0xC0 -> LATIN CAPITAL LETTER R WITH ACUTE
'\xc1' # 0xC1 -> LATIN CAPITAL LETTER A WITH ACUTE
'\xc2' # 0xC2 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
'\u0102' # 0xC3 -> LATIN CAPITAL LETTER A WITH BREVE
'\xc4' # 0xC4 -> LATIN CAPITAL LETTER A WITH DIAERESIS
'\u0139' # 0xC5 -> LATIN CAPITAL LETTER L WITH ACUTE
'\u0106' # 0xC6 -> LATIN CAPITAL LETTER C WITH ACUTE
'\xc7' # 0xC7 -> LATIN CAPITAL LETTER C WITH CEDILLA
'\u010c' # 0xC8 -> LATIN CAPITAL LETTER C WITH CARON
'\xc9' # 0xC9 -> LATIN CAPITAL LETTER E WITH ACUTE
'\u0118' # 0xCA -> LATIN CAPITAL LETTER E WITH OGONEK
'\xcb' # 0xCB -> LATIN CAPITAL LETTER E WITH DIAERESIS
'\u011a' # 0xCC -> LATIN CAPITAL LETTER E WITH CARON
'\xcd' # 0xCD -> LATIN CAPITAL LETTER I WITH ACUTE
'\xce' # 0xCE -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
'\u010e' # 0xCF -> LATIN CAPITAL LETTER D WITH CARON
'\u0110' # 0xD0 -> LATIN CAPITAL LETTER D WITH STROKE
'\u0143' # 0xD1 -> LATIN CAPITAL LETTER N WITH ACUTE
'\u0147' # 0xD2 -> LATIN CAPITAL LETTER N WITH CARON
'\xd3' # 0xD3 -> LATIN CAPITAL LETTER O WITH ACUTE
'\xd4' # 0xD4 -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
'\u0150' # 0xD5 -> LATIN CAPITAL LETTER O WITH DOUBLE ACUTE
'\xd6' # 0xD6 -> LATIN CAPITAL LETTER O WITH DIAERESIS
'\xd7' # 0xD7 -> MULTIPLICATION SIGN
'\u0158' # 0xD8 -> LATIN CAPITAL LETTER R WITH CARON
'\u016e' # 0xD9 -> LATIN CAPITAL LETTER U WITH RING ABOVE
'\xda' # 0xDA -> LATIN CAPITAL LETTER U WITH ACUTE
'\u0170' # 0xDB -> LATIN CAPITAL LETTER U WITH DOUBLE ACUTE
'\xdc' # 0xDC -> LATIN CAPITAL LETTER U WITH DIAERESIS
'\xdd' # 0xDD -> LATIN CAPITAL LETTER Y WITH ACUTE
'\u0162' # 0xDE -> LATIN CAPITAL LETTER T WITH CEDILLA
'\xdf' # 0xDF -> LATIN SMALL LETTER SHARP S
'\u0155' # 0xE0 -> LATIN SMALL LETTER R WITH ACUTE
'\xe1' # 0xE1 -> LATIN SMALL LETTER A WITH ACUTE
'\xe2' # 0xE2 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
'\u0103' # 0xE3 -> LATIN SMALL LETTER A WITH BREVE
'\xe4' # 0xE4 -> LATIN SMALL LETTER A WITH DIAERESIS
'\u013a' # 0xE5 -> LATIN SMALL LETTER L WITH ACUTE
'\u0107' # 0xE6 -> LATIN SMALL LETTER C WITH ACUTE
'\xe7' # 0xE7 -> LATIN SMALL LETTER C WITH CEDILLA
'\u010d' # 0xE8 -> LATIN SMALL LETTER C WITH CARON
'\xe9' # 0xE9 -> LATIN SMALL LETTER E WITH ACUTE
'\u0119' # 0xEA -> LATIN SMALL LETTER E WITH OGONEK
'\xeb' # 0xEB -> LATIN SMALL LETTER E WITH DIAERESIS
'\u011b' # 0xEC -> LATIN SMALL LETTER E WITH CARON
'\xed' # 0xED -> LATIN SMALL LETTER I WITH ACUTE
'\xee' # 0xEE -> LATIN SMALL LETTER I WITH CIRCUMFLEX
'\u010f' # 0xEF -> LATIN SMALL LETTER D WITH CARON
'\u0111' # 0xF0 -> LATIN SMALL LETTER D WITH STROKE
'\u0144' # 0xF1 -> LATIN SMALL LETTER N WITH ACUTE
'\u0148' # 0xF2 -> LATIN SMALL LETTER N WITH CARON
'\xf3' # 0xF3 -> LATIN SMALL LETTER O WITH ACUTE
'\xf4' # 0xF4 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
'\u0151' # 0xF5 -> LATIN SMALL LETTER O WITH DOUBLE ACUTE
'\xf6' # 0xF6 -> LATIN SMALL LETTER O WITH DIAERESIS
'\xf7' # 0xF7 -> DIVISION SIGN
'\u0159' # 0xF8 -> LATIN SMALL LETTER R WITH CARON
'\u016f' # 0xF9 -> LATIN SMALL LETTER U WITH RING ABOVE
'\xfa' # 0xFA -> LATIN SMALL LETTER U WITH ACUTE
'\u0171' # 0xFB -> LATIN SMALL LETTER U WITH DOUBLE ACUTE
'\xfc' # 0xFC -> LATIN SMALL LETTER U WITH DIAERESIS
'\xfd' # 0xFD -> LATIN SMALL LETTER Y WITH ACUTE
'\u0163' # 0xFE -> LATIN SMALL LETTER T WITH CEDILLA
'\u02d9' # 0xFF -> DOT ABOVE
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
|
mit
|
assj/CloudNetworkAllocator
|
cloud-network-allocator/genetic_algorithm/ga.py
|
1
|
7471
|
# coding=utf-8
"""
Implementação de um algoritmo genético para a alocar os nós
de uma requisição
"""
import random
import copy
import utils.quality_evaluator as evaluator
def randomly_allocate_virtual_node(physical_node_list, virtual_node, virtual_node_list):
memory_capacity = virtual_node['memoryCapacity']
processing_capacity = virtual_node['processingCapacity']
storage_capacity = virtual_node['storageCapacity']
forbidden_node_id_list = list(current_virtual_node['physicalNodeId'] for current_virtual_node in virtual_node_list)
physical_node_id_list = list(current_physical_node['id'] for current_physical_node in physical_node_list)
available_node_id_list = filter(lambda current_physical_node_id: current_physical_node_id not in forbidden_node_id_list, physical_node_id_list)
physical_node_id = random.choice(available_node_id_list)
physical_node = filter(lambda current_physical_node: current_physical_node['id'] == physical_node_id, physical_node_list)[0]
remaining_memory_capacity = physical_node['remainingMemoryCapacity']
remaining_processing_capacity = physical_node['remainingProcessingCapacity']
remaining_storage_capacity = physical_node['remainingStorageCapacity']
if (remaining_memory_capacity >= memory_capacity and
remaining_processing_capacity >= processing_capacity and
remaining_storage_capacity >= storage_capacity):
old_physical_node_id = virtual_node['physicalNodeId']
old_physical_node = filter(lambda physical_node: physical_node['id'] == old_physical_node_id, physical_node_list)
if len(old_physical_node) > 0:
old_physical_node = old_physical_node[0]
old_physical_node['remainingMemoryCapacity'] += memory_capacity
old_physical_node['remainingProcessingCapacity'] += processing_capacity
old_physical_node['remainingStorageCapacity'] += storage_capacity
physical_node['remainingMemoryCapacity'] -= memory_capacity
physical_node['remainingProcessingCapacity'] -= processing_capacity
physical_node['remainingStorageCapacity'] -= storage_capacity
virtual_node['physicalNodeId'] = physical_node['id']
def randomly_allocate_virtual_nodes(physical_network_nodes, virtual_network_nodes):
individual = None
physical_network_nodes_aux = copy.deepcopy(physical_network_nodes)
virtual_network_nodes_aux = virtual_network_nodes
num_of_physical_network_nodes = len(physical_network_nodes_aux)
physical_nodes_indices_candidates = range(0, num_of_physical_network_nodes)
cont = 0
for virtual_network_node in virtual_network_nodes_aux:
memory_capacity = virtual_network_node['memoryCapacity']
processing_capacity = virtual_network_node['processingCapacity']
storage_capacity = virtual_network_node['storageCapacity']
for i in physical_nodes_indices_candidates:
j = random.choice(physical_nodes_indices_candidates)
physical_network_node = physical_network_nodes_aux[j]
remaining_memory_capacity = physical_network_node['remainingMemoryCapacity']
remaining_processing_capacity = physical_network_node['remainingProcessingCapacity']
remaining_storage_capacity = physical_network_node['remainingStorageCapacity']
if (remaining_memory_capacity >= memory_capacity and
remaining_processing_capacity >= processing_capacity and
remaining_storage_capacity >= storage_capacity):
physical_network_node['remainingMemoryCapacity'] -= memory_capacity
physical_network_node['remainingProcessingCapacity'] -= processing_capacity
physical_network_node['remainingStorageCapacity'] -= storage_capacity
virtual_network_node['physicalNodeId'] = physical_network_node['id']
physical_nodes_indices_candidates.remove(j)
cont += 1
break
if cont == len(virtual_network_nodes_aux):
individual = (physical_network_nodes_aux, virtual_network_nodes_aux)
return individual
def generate_individuals(physical_network_nodes, virtual_network_nodes, num_of_individuals):
individual_with_fitness_list = []
for i in xrange(0, num_of_individuals):
individual = randomly_allocate_virtual_nodes(physical_network_nodes, virtual_network_nodes)
if individual is None:
continue
individual_fitness = evaluator.network_nodes_fitness(individual[0])
individual_with_fitness = individual_fitness, individual
individual_with_fitness_list.append(individual_with_fitness)
return individual_with_fitness_list
def mutate(individual_with_fitness, num_of_nodes_mutated):
individual_with_fitness_aux = list(individual_with_fitness)
individual_physical_nodes = individual_with_fitness_aux[1][0]
individual_virtual_nodes = individual_with_fitness_aux[1][1]
individual_virtual_nodes_indices = range(0, len(individual_virtual_nodes))
for i in range(0, num_of_nodes_mutated):
index_to_be_mutated = random.choice(individual_virtual_nodes_indices)
randomly_allocate_virtual_node(individual_physical_nodes, individual_virtual_nodes[index_to_be_mutated], individual_virtual_nodes)
individual_with_fitness_aux[0] = evaluator.network_nodes_fitness(individual_physical_nodes)
individual_with_fitness_aux = tuple(individual_with_fitness_aux)
return individual_with_fitness_aux
def evolve(physical_network_nodes, virtual_network_nodes, num_of_individuals, selection_rate=0.5, mutation_rate=0.4):
best_allocation_fitness = -1
individuals_with_fitness_list = generate_individuals(physical_network_nodes, virtual_network_nodes, num_of_individuals)
num_of_cycles_without_improvement = 5
if len(individuals_with_fitness_list) > 0:
num_of_selected_individuals = int(selection_rate * len(individuals_with_fitness_list))
num_of_nodes_mutated = int(mutation_rate * len(virtual_network_nodes))
if num_of_nodes_mutated == 0:
num_of_nodes_mutated = 1
cont = 0
while cont < num_of_cycles_without_improvement:
individuals_with_fitness_list.sort()
if best_allocation_fitness == -1:
best_allocation_fitness = individuals_with_fitness_list[0][0]
cont = 0
elif best_allocation_fitness > individuals_with_fitness_list[0][0]:
best_allocation_fitness = individuals_with_fitness_list[0][0]
cont = 0
else:
cont += 1
best_individuals_with_fitness_list = individuals_with_fitness_list[0:num_of_selected_individuals]
new_individuals_list = list(best_individuals_with_fitness_list)
break_while = False
while True:
for current_individual_with_fitness in best_individuals_with_fitness_list:
new_individual = mutate(current_individual_with_fitness, num_of_nodes_mutated)
new_individuals_list.append(new_individual)
if len(new_individuals_list) == len(individuals_with_fitness_list):
break_while = True
break
if break_while:
break
individuals_with_fitness_list = new_individuals_list
return best_allocation_fitness
|
gpl-3.0
|
grischa/django-filer
|
filer/utils/loader.py
|
38
|
1668
|
#-*- coding: utf-8 -*-
"""
This function is snatched from
https://github.com/ojii/django-load/blob/3058ab9d9d4875589638cc45e84b59e7e1d7c9c3/django_load/core.py#L49
local changes:
* added check for basestring to allow values that are already an object
or method.
"""
from django.utils.importlib import import_module
def load_object(import_path):
"""
Loads an object from an 'import_path', like in MIDDLEWARE_CLASSES and the
likes.
Import paths should be: "mypackage.mymodule.MyObject". It then imports the
module up until the last dot and tries to get the attribute after that dot
from the imported module.
If the import path does not contain any dots, a TypeError is raised.
If the module cannot be imported, an ImportError is raised.
If the attribute does not exist in the module, a AttributeError is raised.
"""
if not isinstance(import_path, basestring):
return import_path
if '.' not in import_path:
raise TypeError(
"'import_path' argument to 'django_load.core.load_object' " +\
"must contain at least one dot.")
module_name, object_name = import_path.rsplit('.', 1)
module = import_module(module_name)
return getattr(module, object_name)
def storage_factory(klass, location, base_url):
"""
This factory returns an instance of the storage class provided.
args:
* klass: must be inherit from ``django.core.files.storage.Storage``
* location: is a string representing the PATH similar to MEDIA_ROOT
* base_url: is a string representing the URL similar to MEDIA_URL
"""
return klass(location=location, base_url=base_url)
|
bsd-3-clause
|
olivierdalang/QGIS
|
python/plugins/processing/algs/grass7/ext/v_net_visibility.py
|
45
|
1240
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
v_net_visibility.py
---------------------
Date : December 2015
Copyright : (C) 2015 by Médéric Ribreux
Email : medspx at medspx dot fr
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Médéric Ribreux'
__date__ = 'December 2015'
__copyright__ = '(C) 2015, Médéric Ribreux'
from .v_net import variableOutput
def processOutputs(alg, parameters, context, feedback):
outputParameter = {'output': ['output', 'line', 1, True]}
variableOutput(alg, outputParameter, parameters, context)
|
gpl-2.0
|
janusnic/wagtail
|
wagtail/wagtailcore/migrations/0002_initial_data.py
|
27
|
2325
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django import VERSION as DJANGO_VERSION
from django.db import migrations
def initial_data(apps, schema_editor):
ContentType = apps.get_model('contenttypes.ContentType')
Group = apps.get_model('auth.Group')
Page = apps.get_model('wagtailcore.Page')
Site = apps.get_model('wagtailcore.Site')
GroupPagePermission = apps.get_model('wagtailcore.GroupPagePermission')
# Create page content type
page_content_type, created = ContentType.objects.get_or_create(
model='page',
app_label='wagtailcore',
defaults={'name': 'page'} if DJANGO_VERSION < (1, 8) else {}
)
# Create root page
root = Page.objects.create(
title="Root",
slug='root',
content_type=page_content_type,
path='0001',
depth=1,
numchild=1,
url_path='/',
)
# Create homepage
homepage = Page.objects.create(
title="Welcome to your new Wagtail site!",
slug='home',
content_type=page_content_type,
path='00010001',
depth=2,
numchild=0,
url_path='/home/',
)
# Create default site
Site.objects.create(
hostname='localhost',
root_page_id=homepage.id,
is_default_site=True
)
# Create auth groups
moderators_group = Group.objects.create(name='Moderators')
editors_group = Group.objects.create(name='Editors')
# Create group permissions
GroupPagePermission.objects.create(
group=moderators_group,
page=root,
permission_type='add',
)
GroupPagePermission.objects.create(
group=moderators_group,
page=root,
permission_type='edit',
)
GroupPagePermission.objects.create(
group=moderators_group,
page=root,
permission_type='publish',
)
GroupPagePermission.objects.create(
group=editors_group,
page=root,
permission_type='add',
)
GroupPagePermission.objects.create(
group=editors_group,
page=root,
permission_type='edit',
)
class Migration(migrations.Migration):
dependencies = [
('wagtailcore', '0001_initial'),
]
operations = [
migrations.RunPython(initial_data),
]
|
bsd-3-clause
|
FedoraScientific/salome-paravis
|
test/VisuPrs/GaussPoints/A2.py
|
1
|
2021
|
# Copyright (C) 2010-2014 CEA/DEN, EDF R&D
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# See http://www.salome-platform.org/ or email : [email protected]
#
# This case corresponds to: /visu/GaussPoints/A2 case
# Create Gauss Points on the field of the MED file
import os
import sys
from paravistest import datadir, pictureext, get_picture_dir
from presentations import GaussPointsOnField, EntityType, get_time, process_prs_for_test
import pvserver as paravis
import pvsimple
# Directory for saving snapshots
picturedir = get_picture_dir("GaussPoints/A2")
if not picturedir.endswith(os.sep):
picturedir += os.sep
# MED file
file_name = datadir + "Fields_group3D.med"
field_name = "scalar field"
timestamp_nb = -1 # last timestamp
paravis.OpenDataFile(file_name)
med_reader = pvsimple.GetActiveSource()
if med_reader is None:
raise RuntimeError("File wasn't imported!!!")
# Create Gauss Points presentation
prs = GaussPointsOnField(med_reader, EntityType.CELL, field_name, timestamp_nb)
if prs is None:
raise RuntimeError, "Created presentation is None!!!"
# Display presentation and get snapshot
view = pvsimple.GetRenderView()
time = get_time(med_reader, timestamp_nb)
pic_name = picturedir + field_name + "_" + str(time) + "_GAUSSPOINTS." + pictureext
process_prs_for_test(prs, view, pic_name)
|
lgpl-2.1
|
saneyuki/servo
|
tests/wpt/web-platform-tests/tools/third_party/html5lib/setup.py
|
25
|
4533
|
from __future__ import print_function
import ast
import codecs
import sys
from os.path import join, dirname
from setuptools import setup, find_packages, __version__ as setuptools_version
from pkg_resources import parse_version
import pkg_resources
try:
import _markerlib.markers
except ImportError:
_markerlib = None
# _markerlib.default_environment() obtains its data from _VARS
# and wraps it in another dict, but _markerlib_evaluate writes
# to the dict while it is iterating the keys, causing an error
# on Python 3 only.
# Replace _markerlib.default_environment to return a custom dict
# that has all the necessary markers, and ignores any writes.
class Python3MarkerDict(dict):
def __setitem__(self, key, value):
pass
def pop(self, i=-1):
return self[i]
if _markerlib and sys.version_info[0] == 3:
env = _markerlib.markers._VARS
for key in list(env.keys()):
new_key = key.replace('.', '_')
if new_key != key:
env[new_key] = env[key]
_markerlib.markers._VARS = Python3MarkerDict(env)
def default_environment():
return _markerlib.markers._VARS
_markerlib.default_environment = default_environment
# Avoid the very buggy pkg_resources.parser, which doesnt consistently
# recognise the markers needed by this setup.py
# Change this to setuptools 20.10.0 to support all markers.
if pkg_resources:
if parse_version(setuptools_version) < parse_version('18.5'):
MarkerEvaluation = pkg_resources.MarkerEvaluation
del pkg_resources.parser
pkg_resources.evaluate_marker = MarkerEvaluation._markerlib_evaluate
MarkerEvaluation.evaluate_marker = MarkerEvaluation._markerlib_evaluate
classifiers = [
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Topic :: Software Development :: Libraries :: Python Modules',
'Topic :: Text Processing :: Markup :: HTML'
]
here = dirname(__file__)
with codecs.open(join(here, 'README.rst'), 'r', 'utf8') as readme_file:
with codecs.open(join(here, 'CHANGES.rst'), 'r', 'utf8') as changes_file:
long_description = readme_file.read() + '\n' + changes_file.read()
version = None
with open(join(here, "html5lib", "__init__.py"), "rb") as init_file:
t = ast.parse(init_file.read(), filename="__init__.py", mode="exec")
assert isinstance(t, ast.Module)
assignments = filter(lambda x: isinstance(x, ast.Assign), t.body)
for a in assignments:
if (len(a.targets) == 1 and
isinstance(a.targets[0], ast.Name) and
a.targets[0].id == "__version__" and
isinstance(a.value, ast.Str)):
version = a.value.s
setup(name='html5lib',
version=version,
url='https://github.com/html5lib/html5lib-python',
license="MIT License",
description='HTML parser based on the WHATWG HTML specification',
long_description=long_description,
classifiers=classifiers,
maintainer='James Graham',
maintainer_email='[email protected]',
packages=find_packages(exclude=["*.tests", "*.tests.*", "tests.*", "tests"]),
install_requires=[
'six>=1.9',
'webencodings',
],
extras_require={
# A conditional extra will only install these items when the extra is
# requested and the condition matches.
"datrie:platform_python_implementation == 'CPython'": ["datrie"],
"lxml:platform_python_implementation == 'CPython'": ["lxml"],
# Standard extras, will be installed when the extra is requested.
"genshi": ["genshi"],
"chardet": ["chardet>=2.2"],
# The all extra combines a standard extra which will be used anytime
# the all extra is requested, and it extends it with a conditional
# extra that will be installed whenever the condition matches and the
# all extra is requested.
"all": ["genshi", "chardet>=2.2"],
"all:platform_python_implementation == 'CPython'": ["datrie", "lxml"],
},
)
|
mpl-2.0
|
SimonBiggs/electronfactor-server
|
electroninserts.py
|
1
|
14607
|
# Copyright (C) 2016 Simon Biggs
# This program is free software: you can redistribute it and/or
# modify it under the terms of the GNU Affero General Public
# License as published by the Free Software Foundation, either
# version 3 of the License, or (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Affero General Public License for more details.
# You should have received a copy of the GNU Affero General Public
# License along with this program. If not, see
# http://www.gnu.org/licenses/.
"""Model insert factors and parameterise inserts as equivalent ellipses."""
import numpy as np
import shapely.geometry as geo
import shapely.affinity as aff
from scipy.interpolate import SmoothBivariateSpline
from scipy.optimize import basinhopping
def spline_model(width_test, ratio_perim_area_test,
width_data, ratio_perim_area_data, factor_data):
"""Return the result of the spline model.
The bounding box is chosen so as to allow extrapolation. The spline orders
are two in the width direction and one in the perimeter/area direction. For
justification on using this method for modelling electron insert factors
see the *Methods: Bivariate spline model* section within
<http://dx.doi.org/10.1016/j.ejmp.2015.11.002>.
Args:
width_test (numpy.array): The width point(s) which are to have the
electron insert factor interpolated.
ratio_perim_area_test (numpy.array): The perimeter/area which are to
have the electron insert factor interpolated.
width_data (numpy.array): The width data points for the relevant
applicator, energy and ssd.
ratio_perim_area_data (numpy.array): The perimeter/area data points for
the relevant applicator, energy and ssd.
factor_data (numpy.array): The insert factor data points for the
relevant applicator, energy and ssd.
Returns:
numpy.array: The interpolated electron insert factors for width_test
and ratio_perim_area_test.
"""
bbox = [
np.min([np.min(width_data), np.min(width_test)]),
np.max([np.max(width_data), np.max(width_test)]),
np.min([np.min(ratio_perim_area_data), np.min(ratio_perim_area_test)]),
np.max([np.max(ratio_perim_area_data), np.max(ratio_perim_area_test)])]
spline = SmoothBivariateSpline(
width_data, ratio_perim_area_data, factor_data, kx=2, ky=1, bbox=bbox)
return spline.ev(width_test, ratio_perim_area_test)
def _single_calculate_deformability(x_test, y_test, x_data, y_data, z_data):
"""Return the result of the deformability test for a single test point.
The deformability test applies a shift to the spline to determine whether
or not sufficient information for modelling is available. For further
details on the deformability test see the *Methods: Defining valid
prediction regions of the spline* section within
<http://dx.doi.org/10.1016/j.ejmp.2015.11.002>.
Args:
x_test (float): The x coordinate of the point to test
y_test (float): The y coordinate of the point to test
x_data (np.array): The x coordinates of the model data to test
y_data (np.array): The y coordinates of the model data to test
z_data (np.array): The z coordinates of the model data to test
Returns:
deformability (float): The resulting deformability between 0 and 1
representing the ratio of deviation the spline model underwent at
the point in question by introducing an outlier at the point in
question.
"""
deviation = 0.02
adjusted_x_data = np.append(x_data, x_test)
adjusted_y_data = np.append(y_data, y_test)
bbox = [
min(adjusted_x_data), max(adjusted_x_data),
min(adjusted_y_data), max(adjusted_y_data)]
initial_model = SmoothBivariateSpline(
x_data, y_data, z_data, bbox=bbox, kx=2, ky=1).ev(x_test, y_test)
pos_adjusted_z_data = np.append(z_data, initial_model + deviation)
neg_adjusted_z_data = np.append(z_data, initial_model - deviation)
pos_adjusted_model = SmoothBivariateSpline(
adjusted_x_data, adjusted_y_data, pos_adjusted_z_data, kx=2, ky=1
).ev(x_test, y_test)
neg_adjusted_model = SmoothBivariateSpline(
adjusted_x_data, adjusted_y_data, neg_adjusted_z_data, kx=2, ky=1
).ev(x_test, y_test)
deformability_from_pos_adjustment = (
pos_adjusted_model - initial_model) / deviation
deformability_from_neg_adjustment = (
initial_model - neg_adjusted_model) / deviation
deformability = np.max(
[deformability_from_pos_adjustment, deformability_from_neg_adjustment])
return deformability
def calculate_deformability(x_test, y_test, x_data, y_data, z_data):
"""Return the result of the deformability test.
This function takes an array of test points and loops over
``_single_calculate_deformability``.
The deformability test applies a shift to the spline to determine whether
or not sufficient information for modelling is available. For further
details on the deformability test see the *Methods: Defining valid
prediction regions of the spline* section within
<http://dx.doi.org/10.1016/j.ejmp.2015.11.002>.
Args:
x_test (np.array): The x coordinate of the point(s) to test
y_test (np.array): The y coordinate of the point(s) to test
x_data (np.array): The x coordinate of the model data to test
y_data (np.array): The y coordinate of the model data to test
z_data (np.array): The z coordinate of the model data to test
Returns:
deformability (float): The resulting deformability between 0 and 1
representing the ratio of deviation the spline model underwent at
the point in question by introducing an outlier at the point in
question.
"""
dim = np.shape(x_test)
if np.size(dim) == 0:
deformability = _single_calculate_deformability(
x_test, y_test, x_data, y_data, z_data)
elif np.size(dim) == 1:
deformability = np.array([
_single_calculate_deformability(
x_test[i], y_test[i], x_data, y_data, z_data)
for i in range(dim[0])
])
else:
deformability = np.array([[
_single_calculate_deformability(
x_test[i, j], y_test[i, j], x_data, y_data, z_data)
for j in range(dim[1])]
for i in range(dim[0])
])
return deformability
def spline_model_with_deformability(width_test, ratio_perim_area_test,
width_data, ratio_perim_area_data,
factor_data):
"""Return the spline model for points with sufficient deformability.
Calls both ``spline_model`` and ``calculate_deformabilty`` and then adjusts
the result so that points with deformability greater than 0.5 return
``numpy.nan``.
Args:
width_test (numpy.array): The width point(s) which are to have the
electron insert factor interpolated.
ratio_perim_area_test (numpy.array): The perimeter/area which are to
have the electron insert factor interpolated.
width_data (numpy.array): The width data points for the relevant
applicator, energy and ssd.
ratio_perim_area_data (numpy.array): The perimeter/area data points for
the relevant applicator, energy and ssd.
factor_data (numpy.array): The insert factor data points for the
relevant applicator, energy and ssd.
Returns:
numpy.array: The interpolated electron insert factors for width_test
and ratio_perim_area_test with points outside the valid prediction
region set to ``numpy.nan``.
"""
deformability = calculate_deformability(
width_test, ratio_perim_area_test,
width_data, ratio_perim_area_data, factor_data)
model_factor = spline_model(
width_test, ratio_perim_area_test,
width_data, ratio_perim_area_data, factor_data)
model_factor[deformability > 0.5] = np.nan
return model_factor
def calculate_percent_prediction_differences(width_data, ratio_perim_area_data,
factor_data):
"""Return the percent prediction differences.
Calculates the model factor for each data point with that point removed
from the data set. Used to determine an estimated uncertainty for
prediction.
Args:
width_data (numpy.array): The width data points for a specific
applicator, energy and ssd.
ratio_perim_area_data (numpy.array): The perimeter/area data points for
a specific applicator, energy and ssd.
factor_data (numpy.array): The insert factor data points for a specific
applicator, energy and ssd.
Returns:
numpy.array: The predicted electron insert factors for each data point
with that given data point removed.
"""
predictions = [
spline_model_with_deformability(
width_data[i], ratio_perim_area_data[i],
np.delete(width_data, i), np.delete(ratio_perim_area_data, i),
np.delete(factor_data, i))
for i in range(len(width_data))
]
return 100 * (factor_data - predictions) / factor_data
def shapely_insert(x, y):
"""Return a shapely object from x and y coordinates."""
return geo.Polygon(np.transpose((x, y)))
def search_for_centre_of_largest_bounded_circle(x, y, callback=None):
"""Find the centre of the largest bounded circle within the insert."""
insert = shapely_insert(x, y)
boundary = insert.boundary
centroid = insert.centroid
furthest_distance = np.hypot(
np.diff(insert.bounds[::2]),
np.diff(insert.bounds[1::2]))
def minimising_function(optimiser_input):
x, y = optimiser_input
point = geo.Point(x, y)
if insert.contains(point):
edge_distance = point.distance(boundary)
else:
edge_distance = -point.distance(boundary)
return -edge_distance
x0 = np.squeeze(centroid.coords)
niter = 200
T = furthest_distance / 3
stepsize = furthest_distance / 2
niter_success = 50
output = basinhopping(
minimising_function, x0, niter=niter, T=T, stepsize=stepsize,
niter_success=niter_success, callback=callback)
circle_centre = output.x
return circle_centre
def calculate_width(x, y, circle_centre):
"""Return the equivalent ellipse width."""
insert = shapely_insert(x, y)
point = geo.Point(*circle_centre)
if insert.contains(point):
distance = point.distance(insert.boundary)
else:
raise Exception("Circle centre not within insert")
return distance * 2
def calculate_length(x, y, width):
"""Return the equivalent ellipse length."""
insert = shapely_insert(x, y)
length = 4 * insert.area / (np.pi * width)
return length
def parameterise_insert(x, y, callback=None):
"""Return the parameterisation of an insert given x and y coords."""
circle_centre = search_for_centre_of_largest_bounded_circle(
x, y, callback=callback)
width = calculate_width(x, y, circle_centre)
length = calculate_length(x, y, width)
return width, length, circle_centre
def visual_alignment_of_equivalent_ellipse(x, y, width, length, callback):
"""Visually align the equivalent ellipse to the insert."""
insert = shapely_insert(x, y)
unit_circle = geo.Point(0, 0).buffer(1)
initial_ellipse = aff.scale(
unit_circle, xfact=width/2, yfact=length/2)
def minimising_function(optimiser_input):
x_shift, y_shift, rotation_angle = optimiser_input
rotated = aff.rotate(
initial_ellipse, rotation_angle, use_radians=True)
translated = aff.translate(
rotated, xoff=x_shift, yoff=y_shift)
disjoint_area = (
translated.difference(insert).area +
insert.difference(translated).area)
return disjoint_area / 400
x0 = np.append(
np.squeeze(insert.centroid.coords), np.pi/4)
niter = 10
T = insert.area / 40000
stepsize = 3
niter_success = 2
output = basinhopping(
minimising_function, x0, niter=niter, T=T, stepsize=stepsize,
niter_success=niter_success, callback=callback)
x_shift, y_shift, rotation_angle = output.x
return x_shift, y_shift, rotation_angle
def parameterise_insert_with_visual_alignment(
x, y, circle_callback=None,
visual_ellipse_callback=None,
complete_parameterisation_callback=None):
"""Return an equivalent ellipse with visual alignment parameters."""
width, length, circle_centre = parameterise_insert(
x, y, callback=circle_callback)
if complete_parameterisation_callback is not(None):
complete_parameterisation_callback(width, length, circle_centre)
x_shift, y_shift, rotation_angle = visual_alignment_of_equivalent_ellipse(
x, y, width, length, callback=visual_ellipse_callback)
return width, length, circle_centre, x_shift, y_shift, rotation_angle
def convert2_ratio_perim_area(width, length):
"""Convert width and length data into ratio of perimeter to area."""
perimeter = (
np.pi / 2 *
(3*(width + length) - np.sqrt((3*width + length)*(3*length + width)))
)
area = np.pi / 4 * width * length
return perimeter / area
def create_transformed_mesh(width_data, length_data, factor_data):
"""Return factor data meshgrid."""
x = np.arange(
np.floor(np.min(width_data)) - 1,
np.ceil(np.max(width_data)) + 1, 0.1)
y = np.arange(
np.floor(np.min(length_data)) - 1,
np.ceil(np.max(length_data)) + 1, 0.1)
xx, yy = np.meshgrid(x, y)
zz = spline_model_with_deformability(
xx, convert2_ratio_perim_area(xx, yy),
width_data, convert2_ratio_perim_area(width_data, length_data),
factor_data)
zz[xx > yy] = np.nan
no_data_x = np.all(np.isnan(zz), axis=0)
no_data_y = np.all(np.isnan(zz), axis=1)
x = x[np.invert(no_data_x)]
y = y[np.invert(no_data_y)]
zz = zz[np.invert(no_data_y), :]
zz = zz[:, np.invert(no_data_x)]
return x, y, zz
|
agpl-3.0
|
greatmazinger/or-tools
|
examples/python/organize_day.py
|
34
|
3011
|
# Copyright 2010 Hakan Kjellerstrand [email protected]
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Organizing a day in Google CP Solver.
Simple scheduling problem.
Problem formulation from ECLiPSe:
Slides on (Finite Domain) Constraint Logic Programming, page 38f
http://eclipse-clp.org/reports/eclipse.ppt
Compare with the following models:
* MiniZinc: http://www.hakank.org/minizinc/organize_day.mzn
* Comet: http://www.hakank.org/comet/organize_day.co
* Gecode: http://hakank.org/gecode/organize_day.cpp
This model was created by Hakan Kjellerstrand ([email protected])
Also see my other Google CP Solver models:
http://www.hakank.org/google_or_tools/
"""
import sys
from ortools.constraint_solver import pywrapcp
#
# No overlapping of tasks s1 and s2
#
def no_overlap(solver, s1, d1, s2, d2):
b1 = solver.IsLessOrEqualVar(s1 + d1, s2) # s1 + d1 <= s2
b2 = solver.IsLessOrEqualVar(s2 + d2, s1) # s2 + d2 <= s1
solver.Add(b1 + b2 >= 1)
def main():
# Create the solver.
solver = pywrapcp.Solver('Organizing a day')
#
# data
#
n = 4
tasks = range(n)
work, mail, shop, bank = tasks
durations = [4, 1, 2, 1]
# task [i,0] must be finished before task [i,1]
before_tasks = [
[bank, shop],
[mail, work]
]
# the valid times of the day
begin = 9
end = 17
#
# declare variables
#
begins = [solver.IntVar(begin, end, 'begins[%i]% % i') for i in tasks]
ends = [solver.IntVar(begin, end, 'ends[%i]% % i') for i in tasks]
#
# constraints
#
for i in tasks:
solver.Add(ends[i] == begins[i] + durations[i])
for i in tasks:
for j in tasks:
if i < j:
no_overlap(solver,
begins[i], durations[i],
begins[j], durations[j])
# specific constraints
for (before, after) in before_tasks:
solver.Add(ends[before] <= begins[after])
solver.Add(begins[work] >= 11)
#
# solution and search
#
db = solver.Phase(begins + ends,
solver.INT_VAR_DEFAULT,
solver.INT_VALUE_DEFAULT)
solver.NewSearch(db)
num_solutions = 0
while solver.NextSolution():
num_solutions += 1
print 'begins:', [begins[i].Value() for i in tasks]
print 'ends:', [ends[i].Value() for i in tasks]
print
print 'num_solutions:', num_solutions
print 'failures:', solver.Failures()
print 'branches:', solver.Branches()
print 'WallTime:', solver.WallTime(), 'ms'
if __name__ == '__main__':
main()
|
apache-2.0
|
axt/angr
|
angr/engines/vex/expressions/__init__.py
|
5
|
1147
|
def translate_expr(expr, state):
expr_name = 'SimIRExpr_' + type(expr).__name__.split('IRExpr')[-1].split('.')[-1]
g = globals()
if expr_name not in g and o.BYPASS_UNSUPPORTED_IREXPR not in state.options:
raise UnsupportedIRExprError("Unsupported expression type %s" % (type(expr)))
elif expr_name not in g:
expr_class = SimIRExpr_Unsupported
else:
expr_class = g[expr_name]
l.debug("Processing expression %s", expr_name)
e = expr_class(expr, state)
e.process()
return e
from ....errors import UnsupportedIRExprError
from .... import sim_options as o
import logging
l = logging.getLogger("angr.engines.vex.expressions.")
from .base import SimIRExpr
from .gsptr import SimIRExpr_GSPTR
from .vecret import SimIRExpr_VECRET
from .rdtmp import SimIRExpr_RdTmp
from .get import SimIRExpr_Get
from .load import SimIRExpr_Load
from .op import SimIRExpr_Unop, SimIRExpr_Binop, SimIRExpr_Triop, SimIRExpr_Qop
from .const import SimIRExpr_Const
from .ccall import SimIRExpr_CCall
from .ite import SimIRExpr_ITE
from .geti import SimIRExpr_GetI
from .unsupported import SimIRExpr_Unsupported
|
bsd-2-clause
|
alphagov/backdrop
|
backdrop/transformers/tasks/user_satisfaction.py
|
1
|
1410
|
from .util import encode_id
def calculate_rating(datum):
# See
# https://github.com/alphagov/spotlight/blob/ca291ffcc86a5397003be340ec263a2466b72cfe/app/common/collections/user-satisfaction.js # noqa
if not datum['total:sum']:
return None
min_score = 1
max_score = 5
score = 0
for rating in range(min_score, max_score + 1):
rating_key = 'rating_{0}:sum'.format(rating)
score += datum[rating_key] * rating
# Set rating key that spotlight expects.
datum['rating_{0}'.format(rating)] = datum[rating_key]
mean = score / (datum['total:sum'])
return (mean - min_score) / (max_score - min_score)
def compute(data, transform, data_set_config=None):
# Calculate rating and set keys that spotlight expects.
computed = []
for datum in data:
computed.append({
'_id': encode_id(datum['_start_at'], datum['_end_at']),
'_timestamp': datum['_start_at'],
'_start_at': datum['_start_at'],
'_end_at': datum['_end_at'],
'rating_1': datum['rating_1:sum'],
'rating_2': datum['rating_2:sum'],
'rating_3': datum['rating_3:sum'],
'rating_4': datum['rating_4:sum'],
'rating_5': datum['rating_5:sum'],
'num_responses': datum['total:sum'],
'score': calculate_rating(datum),
})
return computed
|
mit
|
andreif/django
|
tests/project_template/test_settings.py
|
274
|
1083
|
import unittest
from django.test import TestCase
from django.utils import six
@unittest.skipIf(six.PY2,
'Python 2 cannot import the project template because '
'django/conf/project_template doesn\'t have an __init__.py file.')
class TestStartProjectSettings(TestCase):
def test_middleware_classes_headers(self):
"""
Ensure headers sent by the default MIDDLEWARE_CLASSES do not
inadvertently change. For example, we never want "Vary: Cookie" to
appear in the list since it prevents the caching of responses.
"""
from django.conf.project_template.project_name.settings import MIDDLEWARE_CLASSES
with self.settings(
MIDDLEWARE_CLASSES=MIDDLEWARE_CLASSES,
ROOT_URLCONF='project_template.urls',
):
response = self.client.get('/empty/')
headers = sorted(response.serialize_headers().split(b'\r\n'))
self.assertEqual(headers, [
b'Content-Type: text/html; charset=utf-8',
b'X-Frame-Options: SAMEORIGIN',
])
|
bsd-3-clause
|
spotify/cobbler
|
scripts/demo_connect.py
|
19
|
1527
|
#!/usr/bin/python
"""
Copyright 2007-2009, Red Hat, Inc and Others
Michael DeHaan <michael.dehaan AT gmail>
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
02110-1301 USA
"""
from xmlrpclib import ServerProxy
import optparse
if __name__ == "__main__":
p = optparse.OptionParser()
p.add_option("-u","--user",dest="user",default="test")
p.add_option("-p","--pass",dest="password",default="test")
# NOTE: if you've changed your xmlrpc_rw port or
# disabled xmlrpc_rw this test probably won't work
sp = ServerProxy("http://127.0.0.1:25151")
(options, args) = p.parse_args()
print "- trying to login with user=%s" % options.user
token = sp.login(options.user,options.password)
print "- token: %s" % token
print "- authenticated ok, now seeing if user is authorized"
check = sp.check_access(token,"imaginary_method_name")
print "- access ok? %s" % check
|
gpl-2.0
|
magic0704/neutron
|
neutron/db/migration/alembic_migrations/versions/aae5706a396_nuage_provider_networks.py
|
15
|
1349
|
# Copyright 2014 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""nuage_provider_networks
Revision ID: aae5706a396
Revises: 3b85b693a95f
Create Date: 2014-08-18 16:00:21.898795
"""
revision = 'aae5706a396'
down_revision = '3b85b693a95f'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table(
'nuage_provider_net_bindings',
sa.Column('network_id', sa.String(length=36), nullable=False),
sa.Column('network_type', sa.String(length=32), nullable=False),
sa.Column('physical_network', sa.String(length=64), nullable=False),
sa.Column('vlan_id', sa.Integer(), nullable=False),
sa.ForeignKeyConstraint(
['network_id'], ['networks.id'], ondelete='CASCADE'),
sa.PrimaryKeyConstraint('network_id')
)
|
apache-2.0
|
seize-the-dave/XlsxWriter
|
xlsxwriter/test/comparison/test_chart_gradient05.py
|
8
|
1722
|
###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2015, John McNamara, [email protected]
#
from ..excel_comparsion_test import ExcelComparisonTest
from ...workbook import Workbook
class TestCompareXLSXFiles(ExcelComparisonTest):
"""
Test file created by XlsxWriter against a file created by Excel.
"""
def setUp(self):
self.maxDiff = None
filename = 'chart_gradient05.xlsx'
test_dir = 'xlsxwriter/test/comparison/'
self.got_filename = test_dir + '_test_' + filename
self.exp_filename = test_dir + 'xlsx_files/' + filename
self.ignore_files = []
self.ignore_elements = {}
def test_create_file(self):
"""Test the creation of a simple XlsxWriter file."""
workbook = Workbook(self.got_filename)
worksheet = workbook.add_worksheet()
chart = workbook.add_chart({'type': 'column'})
chart.axis_ids = [61363712, 61365248]
data = [
[1, 2, 3, 4, 5],
[2, 4, 6, 8, 10],
[3, 6, 9, 12, 15],
]
worksheet.write_column('A1', data[0])
worksheet.write_column('B1', data[1])
worksheet.write_column('C1', data[2])
chart.add_series({
'values': '=Sheet1!$A$1:$A$5',
'gradient': {
'colors': ['#DDEBCF', '#9CB86E', '#156B13'],
'type': 'rectangular'
}
})
chart.add_series({'values': '=Sheet1!$B$1:$B$5'})
chart.add_series({'values': '=Sheet1!$C$1:$C$5'})
worksheet.insert_chart('E9', chart)
workbook.close()
self.assertExcelEqual()
|
bsd-2-clause
|
barnsnake351/neutron
|
neutron/agent/l3/router_info.py
|
3
|
30239
|
# Copyright (c) 2014 Openstack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
from oslo_log import log as logging
from neutron.agent.l3 import namespaces
from neutron.agent.linux import ip_lib
from neutron.agent.linux import iptables_manager
from neutron.agent.linux import ra
from neutron.common import constants as l3_constants
from neutron.common import exceptions as n_exc
from neutron.common import ipv6_utils
from neutron.common import utils as common_utils
from neutron.i18n import _LW
LOG = logging.getLogger(__name__)
INTERNAL_DEV_PREFIX = namespaces.INTERNAL_DEV_PREFIX
EXTERNAL_DEV_PREFIX = namespaces.EXTERNAL_DEV_PREFIX
FLOATINGIP_STATUS_NOCHANGE = object()
class RouterInfo(object):
def __init__(self,
router_id,
router,
agent_conf,
interface_driver,
use_ipv6=False):
self.router_id = router_id
self.ex_gw_port = None
self._snat_enabled = None
self.internal_ports = []
self.floating_ips = set()
# Invoke the setter for establishing initial SNAT action
self.router = router
self.use_ipv6 = use_ipv6
self.ns_name = None
self.router_namespace = None
if agent_conf.use_namespaces:
ns = namespaces.RouterNamespace(
router_id, agent_conf, interface_driver, use_ipv6)
self.router_namespace = ns
self.ns_name = ns.name
self.iptables_manager = iptables_manager.IptablesManager(
use_ipv6=use_ipv6,
namespace=self.ns_name)
self.routes = []
self.agent_conf = agent_conf
self.driver = interface_driver
# radvd is a neutron.agent.linux.ra.DaemonMonitor
self.radvd = None
def initialize(self, process_monitor):
"""Initialize the router on the system.
This differs from __init__ in that this method actually affects the
system creating namespaces, starting processes, etc. The other merely
initializes the python object. This separates in-memory object
initialization from methods that actually go do stuff to the system.
:param process_monitor: The agent's process monitor instance.
"""
self.process_monitor = process_monitor
self.radvd = ra.DaemonMonitor(self.router_id,
self.ns_name,
process_monitor,
self.get_internal_device_name)
if self.router_namespace:
self.router_namespace.create()
@property
def router(self):
return self._router
@router.setter
def router(self, value):
self._router = value
if not self._router:
return
# enable_snat by default if it wasn't specified by plugin
self._snat_enabled = self._router.get('enable_snat', True)
@property
def is_ha(self):
# TODO(Carl) Refactoring should render this obsolete. Remove it.
return False
def get_internal_device_name(self, port_id):
return (INTERNAL_DEV_PREFIX + port_id)[:self.driver.DEV_NAME_LEN]
def get_external_device_name(self, port_id):
return (EXTERNAL_DEV_PREFIX + port_id)[:self.driver.DEV_NAME_LEN]
def get_external_device_interface_name(self, ex_gw_port):
return self.get_external_device_name(ex_gw_port['id'])
def _update_routing_table(self, operation, route, namespace):
cmd = ['ip', 'route', operation, 'to', route['destination'],
'via', route['nexthop']]
ip_wrapper = ip_lib.IPWrapper(namespace=namespace)
ip_wrapper.netns.execute(cmd, check_exit_code=False)
def update_routing_table(self, operation, route, namespace=None):
if namespace is None:
namespace = self.ns_name
self._update_routing_table(operation, route, namespace)
def routes_updated(self):
new_routes = self.router['routes']
old_routes = self.routes
adds, removes = common_utils.diff_list_of_dict(old_routes,
new_routes)
for route in adds:
LOG.debug("Added route entry is '%s'", route)
# remove replaced route from deleted route
for del_route in removes:
if route['destination'] == del_route['destination']:
removes.remove(del_route)
#replace success even if there is no existing route
self.update_routing_table('replace', route)
for route in removes:
LOG.debug("Removed route entry is '%s'", route)
self.update_routing_table('delete', route)
self.routes = new_routes
def get_ex_gw_port(self):
return self.router.get('gw_port')
def get_floating_ips(self):
"""Filter Floating IPs to be hosted on this agent."""
return self.router.get(l3_constants.FLOATINGIP_KEY, [])
def floating_forward_rules(self, floating_ip, fixed_ip):
return [('PREROUTING', '-d %s -j DNAT --to %s' %
(floating_ip, fixed_ip)),
('OUTPUT', '-d %s -j DNAT --to %s' %
(floating_ip, fixed_ip)),
('float-snat', '-s %s -j SNAT --to %s' %
(fixed_ip, floating_ip))]
def process_floating_ip_nat_rules(self):
"""Configure NAT rules for the router's floating IPs.
Configures iptables rules for the floating ips of the given router
"""
# Clear out all iptables rules for floating ips
self.iptables_manager.ipv4['nat'].clear_rules_by_tag('floating_ip')
floating_ips = self.get_floating_ips()
# Loop once to ensure that floating ips are configured.
for fip in floating_ips:
# Rebuild iptables rules for the floating ip.
fixed = fip['fixed_ip_address']
fip_ip = fip['floating_ip_address']
for chain, rule in self.floating_forward_rules(fip_ip, fixed):
self.iptables_manager.ipv4['nat'].add_rule(chain, rule,
tag='floating_ip')
self.iptables_manager.apply()
def process_snat_dnat_for_fip(self):
try:
self.process_floating_ip_nat_rules()
except Exception:
# TODO(salv-orlando): Less broad catching
raise n_exc.FloatingIpSetupException(
'L3 agent failure to setup NAT for floating IPs')
def _add_fip_addr_to_device(self, fip, device):
"""Configures the floating ip address on the device.
"""
try:
ip_cidr = common_utils.ip_to_cidr(fip['floating_ip_address'])
device.addr.add(ip_cidr)
return True
except RuntimeError:
# any exception occurred here should cause the floating IP
# to be set in error state
LOG.warn(_LW("Unable to configure IP address for "
"floating IP: %s"), fip['id'])
def add_floating_ip(self, fip, interface_name, device):
raise NotImplementedError()
def remove_floating_ip(self, device, ip_cidr):
device.delete_addr_and_conntrack_state(ip_cidr)
def get_router_cidrs(self, device):
return set([addr['cidr'] for addr in device.addr.list()])
def process_floating_ip_addresses(self, interface_name):
"""Configure IP addresses on router's external gateway interface.
Ensures addresses for existing floating IPs and cleans up
those that should not longer be configured.
"""
fip_statuses = {}
if interface_name is None:
LOG.debug('No Interface for floating IPs router: %s',
self.router['id'])
return fip_statuses
device = ip_lib.IPDevice(interface_name, namespace=self.ns_name)
existing_cidrs = self.get_router_cidrs(device)
new_cidrs = set()
floating_ips = self.get_floating_ips()
# Loop once to ensure that floating ips are configured.
for fip in floating_ips:
fip_ip = fip['floating_ip_address']
ip_cidr = common_utils.ip_to_cidr(fip_ip)
new_cidrs.add(ip_cidr)
fip_statuses[fip['id']] = l3_constants.FLOATINGIP_STATUS_ACTIVE
if ip_cidr not in existing_cidrs:
fip_statuses[fip['id']] = self.add_floating_ip(
fip, interface_name, device)
LOG.debug('Floating ip %(id)s added, status %(status)s',
{'id': fip['id'],
'status': fip_statuses.get(fip['id'])})
# mark the status as not changed. we can't remove it because
# that's how the caller determines that it was removed
if fip_statuses[fip['id']] == fip['status']:
fip_statuses[fip['id']] = FLOATINGIP_STATUS_NOCHANGE
fips_to_remove = (
ip_cidr for ip_cidr in existing_cidrs - new_cidrs
if common_utils.is_cidr_host(ip_cidr))
for ip_cidr in fips_to_remove:
LOG.debug("Removing floating ip %s from interface %s in "
"namespace %s", ip_cidr, interface_name, self.ns_name)
self.remove_floating_ip(device, ip_cidr)
return fip_statuses
def configure_fip_addresses(self, interface_name):
try:
return self.process_floating_ip_addresses(interface_name)
except Exception:
# TODO(salv-orlando): Less broad catching
raise n_exc.FloatingIpSetupException('L3 agent failure to setup '
'floating IPs')
def put_fips_in_error_state(self):
fip_statuses = {}
for fip in self.router.get(l3_constants.FLOATINGIP_KEY, []):
fip_statuses[fip['id']] = l3_constants.FLOATINGIP_STATUS_ERROR
return fip_statuses
def delete(self, agent):
self.router['gw_port'] = None
self.router[l3_constants.INTERFACE_KEY] = []
self.router[l3_constants.FLOATINGIP_KEY] = []
self.process(agent)
self.disable_radvd()
if self.router_namespace:
self.router_namespace.delete()
def _internal_network_updated(self, port, subnet_id, prefix, old_prefix,
updated_cidrs):
interface_name = self.get_internal_device_name(port['id'])
if prefix != l3_constants.PROVISIONAL_IPV6_PD_PREFIX:
fixed_ips = port['fixed_ips']
for fixed_ip in fixed_ips:
if fixed_ip['subnet_id'] == subnet_id:
v6addr = common_utils.ip_to_cidr(fixed_ip['ip_address'],
fixed_ip.get('prefixlen'))
if v6addr not in updated_cidrs:
self.driver.add_ipv6_addr(interface_name, v6addr,
self.ns_name)
else:
self.driver.delete_ipv6_addr_with_prefix(interface_name,
old_prefix,
self.ns_name)
def _internal_network_added(self, ns_name, network_id, port_id,
fixed_ips, mac_address,
interface_name, prefix):
LOG.debug("adding internal network: prefix(%s), port(%s)",
prefix, port_id)
self.driver.plug(network_id, port_id, interface_name, mac_address,
namespace=ns_name,
prefix=prefix)
ip_cidrs = common_utils.fixed_ip_cidrs(fixed_ips)
self.driver.init_router_port(
interface_name, ip_cidrs, namespace=ns_name)
for fixed_ip in fixed_ips:
ip_lib.send_ip_addr_adv_notif(ns_name,
interface_name,
fixed_ip['ip_address'],
self.agent_conf)
def internal_network_added(self, port):
network_id = port['network_id']
port_id = port['id']
fixed_ips = port['fixed_ips']
mac_address = port['mac_address']
interface_name = self.get_internal_device_name(port_id)
self._internal_network_added(self.ns_name,
network_id,
port_id,
fixed_ips,
mac_address,
interface_name,
INTERNAL_DEV_PREFIX)
def internal_network_removed(self, port):
interface_name = self.get_internal_device_name(port['id'])
LOG.debug("removing internal network: port(%s) interface(%s)",
port['id'], interface_name)
if ip_lib.device_exists(interface_name, namespace=self.ns_name):
self.driver.unplug(interface_name, namespace=self.ns_name,
prefix=INTERNAL_DEV_PREFIX)
def _get_existing_devices(self):
ip_wrapper = ip_lib.IPWrapper(namespace=self.ns_name)
ip_devs = ip_wrapper.get_devices(exclude_loopback=True)
return [ip_dev.name for ip_dev in ip_devs]
@staticmethod
def _get_updated_ports(existing_ports, current_ports):
updated_ports = dict()
current_ports_dict = {p['id']: p for p in current_ports}
for existing_port in existing_ports:
current_port = current_ports_dict.get(existing_port['id'])
if current_port:
if sorted(existing_port['fixed_ips']) != (
sorted(current_port['fixed_ips'])):
updated_ports[current_port['id']] = current_port
return updated_ports
@staticmethod
def _port_has_ipv6_subnet(port):
if 'subnets' in port:
for subnet in port['subnets']:
if (netaddr.IPNetwork(subnet['cidr']).version == 6 and
subnet['cidr'] != l3_constants.PROVISIONAL_IPV6_PD_PREFIX):
return True
def enable_radvd(self, internal_ports=None):
LOG.debug('Spawning radvd daemon in router device: %s', self.router_id)
if not internal_ports:
internal_ports = self.internal_ports
self.radvd.enable(internal_ports)
def disable_radvd(self):
LOG.debug('Terminating radvd daemon in router device: %s',
self.router_id)
self.radvd.disable()
def internal_network_updated(self, interface_name, ip_cidrs):
self.driver.init_l3(interface_name, ip_cidrs=ip_cidrs,
namespace=self.ns_name)
def _process_internal_ports(self, pd):
existing_port_ids = set(p['id'] for p in self.internal_ports)
internal_ports = self.router.get(l3_constants.INTERFACE_KEY, [])
current_port_ids = set(p['id'] for p in internal_ports
if p['admin_state_up'])
new_port_ids = current_port_ids - existing_port_ids
new_ports = [p for p in internal_ports if p['id'] in new_port_ids]
old_ports = [p for p in self.internal_ports
if p['id'] not in current_port_ids]
updated_ports = self._get_updated_ports(self.internal_ports,
internal_ports)
enable_ra = False
for p in new_ports:
self.internal_network_added(p)
LOG.debug("appending port %s to internal_ports cache", p)
self.internal_ports.append(p)
enable_ra = enable_ra or self._port_has_ipv6_subnet(p)
for subnet in p['subnets']:
if ipv6_utils.is_ipv6_pd_enabled(subnet):
interface_name = self.get_internal_device_name(p['id'])
pd.enable_subnet(self.router_id, subnet['id'],
subnet['cidr'],
interface_name, p['mac_address'])
for p in old_ports:
self.internal_network_removed(p)
LOG.debug("removing port %s from internal_ports cache", p)
self.internal_ports.remove(p)
enable_ra = enable_ra or self._port_has_ipv6_subnet(p)
for subnet in p['subnets']:
if ipv6_utils.is_ipv6_pd_enabled(subnet):
pd.disable_subnet(self.router_id, subnet['id'])
updated_cidrs = []
if updated_ports:
for index, p in enumerate(internal_ports):
if not updated_ports.get(p['id']):
continue
self.internal_ports[index] = updated_ports[p['id']]
interface_name = self.get_internal_device_name(p['id'])
ip_cidrs = common_utils.fixed_ip_cidrs(p['fixed_ips'])
LOG.debug("updating internal network for port %s", p)
updated_cidrs += ip_cidrs
self.internal_network_updated(interface_name, ip_cidrs)
enable_ra = enable_ra or self._port_has_ipv6_subnet(p)
# Check if there is any pd prefix update
for p in internal_ports:
if p['id'] in (set(current_port_ids) & set(existing_port_ids)):
for subnet in p.get('subnets', []):
if ipv6_utils.is_ipv6_pd_enabled(subnet):
old_prefix = pd.update_subnet(self.router_id,
subnet['id'],
subnet['cidr'])
if old_prefix:
self._internal_network_updated(p, subnet['id'],
subnet['cidr'],
old_prefix,
updated_cidrs)
enable_ra = True
# Enable RA
if enable_ra:
self.enable_radvd(internal_ports)
existing_devices = self._get_existing_devices()
current_internal_devs = set(n for n in existing_devices
if n.startswith(INTERNAL_DEV_PREFIX))
current_port_devs = set(self.get_internal_device_name(port_id)
for port_id in current_port_ids)
stale_devs = current_internal_devs - current_port_devs
for stale_dev in stale_devs:
LOG.debug('Deleting stale internal router device: %s',
stale_dev)
pd.remove_stale_ri_ifname(self.router_id, stale_dev)
self.driver.unplug(stale_dev,
namespace=self.ns_name,
prefix=INTERNAL_DEV_PREFIX)
def _list_floating_ip_cidrs(self):
# Compute a list of addresses this router is supposed to have.
# This avoids unnecessarily removing those addresses and
# causing a momentarily network outage.
floating_ips = self.get_floating_ips()
return [common_utils.ip_to_cidr(ip['floating_ip_address'])
for ip in floating_ips]
def _plug_external_gateway(self, ex_gw_port, interface_name, ns_name):
self.driver.plug(ex_gw_port['network_id'],
ex_gw_port['id'],
interface_name,
ex_gw_port['mac_address'],
bridge=self.agent_conf.external_network_bridge,
namespace=ns_name,
prefix=EXTERNAL_DEV_PREFIX)
def _get_external_gw_ips(self, ex_gw_port):
gateway_ips = []
enable_ra_on_gw = False
if 'subnets' in ex_gw_port:
gateway_ips = [subnet['gateway_ip']
for subnet in ex_gw_port['subnets']
if subnet['gateway_ip']]
if self.use_ipv6 and not self.is_v6_gateway_set(gateway_ips):
# No IPv6 gateway is available, but IPv6 is enabled.
if self.agent_conf.ipv6_gateway:
# ipv6_gateway configured, use address for default route.
gateway_ips.append(self.agent_conf.ipv6_gateway)
else:
# ipv6_gateway is also not configured.
# Use RA for default route.
enable_ra_on_gw = True
return gateway_ips, enable_ra_on_gw
def _external_gateway_added(self, ex_gw_port, interface_name,
ns_name, preserve_ips):
LOG.debug("External gateway added: port(%s), interface(%s), ns(%s)",
ex_gw_port, interface_name, ns_name)
self._plug_external_gateway(ex_gw_port, interface_name, ns_name)
# Build up the interface and gateway IP addresses that
# will be added to the interface.
ip_cidrs = common_utils.fixed_ip_cidrs(ex_gw_port['fixed_ips'])
gateway_ips, enable_ra_on_gw = self._get_external_gw_ips(ex_gw_port)
self.driver.init_router_port(
interface_name,
ip_cidrs,
namespace=ns_name,
gateway_ips=gateway_ips,
extra_subnets=ex_gw_port.get('extra_subnets', []),
preserve_ips=preserve_ips,
enable_ra_on_gw=enable_ra_on_gw,
clean_connections=True)
for fixed_ip in ex_gw_port['fixed_ips']:
ip_lib.send_ip_addr_adv_notif(ns_name,
interface_name,
fixed_ip['ip_address'],
self.agent_conf)
def is_v6_gateway_set(self, gateway_ips):
"""Check to see if list of gateway_ips has an IPv6 gateway.
"""
# Note - don't require a try-except here as all
# gateway_ips elements are valid addresses, if they exist.
return any(netaddr.IPAddress(gw_ip).version == 6
for gw_ip in gateway_ips)
def external_gateway_added(self, ex_gw_port, interface_name):
preserve_ips = self._list_floating_ip_cidrs()
self._external_gateway_added(
ex_gw_port, interface_name, self.ns_name, preserve_ips)
def external_gateway_updated(self, ex_gw_port, interface_name):
preserve_ips = self._list_floating_ip_cidrs()
self._external_gateway_added(
ex_gw_port, interface_name, self.ns_name, preserve_ips)
def external_gateway_removed(self, ex_gw_port, interface_name):
LOG.debug("External gateway removed: port(%s), interface(%s)",
ex_gw_port, interface_name)
self.driver.unplug(interface_name,
bridge=self.agent_conf.external_network_bridge,
namespace=self.ns_name,
prefix=EXTERNAL_DEV_PREFIX)
@staticmethod
def _gateway_ports_equal(port1, port2):
return port1 == port2
def _process_external_gateway(self, ex_gw_port, pd):
# TODO(Carl) Refactor to clarify roles of ex_gw_port vs self.ex_gw_port
ex_gw_port_id = (ex_gw_port and ex_gw_port['id'] or
self.ex_gw_port and self.ex_gw_port['id'])
interface_name = None
if ex_gw_port_id:
interface_name = self.get_external_device_name(ex_gw_port_id)
if ex_gw_port:
if not self.ex_gw_port:
self.external_gateway_added(ex_gw_port, interface_name)
pd.add_gw_interface(self.router['id'], interface_name)
elif not self._gateway_ports_equal(ex_gw_port, self.ex_gw_port):
self.external_gateway_updated(ex_gw_port, interface_name)
elif not ex_gw_port and self.ex_gw_port:
self.external_gateway_removed(self.ex_gw_port, interface_name)
pd.remove_gw_interface(self.router['id'])
existing_devices = self._get_existing_devices()
stale_devs = [dev for dev in existing_devices
if dev.startswith(EXTERNAL_DEV_PREFIX)
and dev != interface_name]
for stale_dev in stale_devs:
LOG.debug('Deleting stale external router device: %s', stale_dev)
pd.remove_gw_interface(self.router['id'])
self.driver.unplug(stale_dev,
bridge=self.agent_conf.external_network_bridge,
namespace=self.ns_name,
prefix=EXTERNAL_DEV_PREFIX)
# Process SNAT rules for external gateway
gw_port = self._router.get('gw_port')
self._handle_router_snat_rules(gw_port, interface_name)
def external_gateway_nat_rules(self, ex_gw_ip, interface_name):
dont_snat_traffic_to_internal_ports_if_not_to_floating_ip = (
'POSTROUTING', '! -i %(interface_name)s '
'! -o %(interface_name)s -m conntrack ! '
'--ctstate DNAT -j ACCEPT' %
{'interface_name': interface_name})
snat_normal_external_traffic = (
'snat', '-o %s -j SNAT --to-source %s' %
(interface_name, ex_gw_ip))
# Makes replies come back through the router to reverse DNAT
ext_in_mark = self.agent_conf.external_ingress_mark
snat_internal_traffic_to_floating_ip = (
'snat', '-m mark ! --mark %s/%s '
'-m conntrack --ctstate DNAT '
'-j SNAT --to-source %s'
% (ext_in_mark, l3_constants.ROUTER_MARK_MASK, ex_gw_ip))
return [dont_snat_traffic_to_internal_ports_if_not_to_floating_ip,
snat_normal_external_traffic,
snat_internal_traffic_to_floating_ip]
def external_gateway_mangle_rules(self, interface_name):
mark = self.agent_conf.external_ingress_mark
mark_packets_entering_external_gateway_port = (
'mark', '-i %s -j MARK --set-xmark %s/%s' %
(interface_name, mark, l3_constants.ROUTER_MARK_MASK))
return [mark_packets_entering_external_gateway_port]
def _empty_snat_chains(self, iptables_manager):
iptables_manager.ipv4['nat'].empty_chain('POSTROUTING')
iptables_manager.ipv4['nat'].empty_chain('snat')
iptables_manager.ipv4['mangle'].empty_chain('mark')
def _add_snat_rules(self, ex_gw_port, iptables_manager,
interface_name):
if self._snat_enabled and ex_gw_port:
# ex_gw_port should not be None in this case
# NAT rules are added only if ex_gw_port has an IPv4 address
for ip_addr in ex_gw_port['fixed_ips']:
ex_gw_ip = ip_addr['ip_address']
if netaddr.IPAddress(ex_gw_ip).version == 4:
rules = self.external_gateway_nat_rules(ex_gw_ip,
interface_name)
for rule in rules:
iptables_manager.ipv4['nat'].add_rule(*rule)
rules = self.external_gateway_mangle_rules(interface_name)
for rule in rules:
iptables_manager.ipv4['mangle'].add_rule(*rule)
break
def _handle_router_snat_rules(self, ex_gw_port, interface_name):
self._empty_snat_chains(self.iptables_manager)
self.iptables_manager.ipv4['nat'].add_rule('snat', '-j $float-snat')
self._add_snat_rules(ex_gw_port,
self.iptables_manager,
interface_name)
def process_external(self, agent):
fip_statuses = {}
existing_floating_ips = self.floating_ips
try:
with self.iptables_manager.defer_apply():
ex_gw_port = self.get_ex_gw_port()
self._process_external_gateway(ex_gw_port, agent.pd)
if not ex_gw_port:
return
# Process SNAT/DNAT rules and addresses for floating IPs
self.process_snat_dnat_for_fip()
# Once NAT rules for floating IPs are safely in place
# configure their addresses on the external gateway port
interface_name = self.get_external_device_interface_name(
ex_gw_port)
fip_statuses = self.configure_fip_addresses(interface_name)
except (n_exc.FloatingIpSetupException,
n_exc.IpTablesApplyException) as e:
# All floating IPs must be put in error state
LOG.exception(e)
fip_statuses = self.put_fips_in_error_state()
finally:
agent.update_fip_statuses(
self, existing_floating_ips, fip_statuses)
@common_utils.exception_logger()
def process(self, agent):
"""Process updates to this router
This method is the point where the agent requests that updates be
applied to this router.
:param agent: Passes the agent in order to send RPC messages.
"""
LOG.debug("process router updates")
self._process_internal_ports(agent.pd)
agent.pd.sync_router(self.router['id'])
self.process_external(agent)
# Process static routes for router
self.routes_updated()
# Update ex_gw_port and enable_snat on the router info cache
self.ex_gw_port = self.get_ex_gw_port()
# TODO(Carl) FWaaS uses this. Why is it set after processing is done?
self.enable_snat = self.router.get('enable_snat')
|
apache-2.0
|
mprinc/KnAllEdge
|
src/services/puzzles/flow/go/python/demo-0.py
|
1
|
1501
|
# __package__ = "use-cases"
import uuid
# from colabo.flow.go import ColaboFlowGo, go_pb2
from colabo.flow.go import ColaboFlowGo
from random import randint
from time import sleep
colaboFlowGo = ColaboFlowGo()
print("colaboFlowGo = %s" % (colaboFlowGo))
# gets the name of function inspectig stack
def gTFN():
import traceback
return traceback.extract_stack(None, 2)[0][2]
# Set of functions
def f1(dataIn):
print(("[%s] started." % (gTFN())))
print(("[%s] dataIn: %s" % (gTFN(), dataIn)))
dataOut = gTFN()+":result:"+dataIn
return dataOut
def f2(dataIn):
print(("[%s] started." % (gTFN())))
print(("[%s] dataIn: %s" % (gTFN(), dataIn)))
dataOut = gTFN()+":result:"+dataIn
return dataOut
def f3(dataIn):
print(("[%s] started." % (gTFN())))
print(("[%s] dataIn: %s" % (gTFN(), dataIn)))
dataOut = gTFN()+":result:"+dataIn
return dataOut
def f4(dataIn):
print(("[%s] started." % (gTFN())))
print(("[%s] dataIn: %s" % (gTFN(), dataIn)))
dataOut = gTFN()+":result:"+dataIn
return dataOut
def f5(dataIn):
print(("[%s] started." % (gTFN())))
print(("[%s] dataIn: %s" % (gTFN(), dataIn)))
dataOut = gTFN()+":result:"+dataIn
return dataOut
# Running functions
print("---")
r1 = f1("hello 1")
print("r1: %s" %(r1))
print("---")
r2 = f2(r1)
print("r2: %s" % (r2))
print("---")
r3 = f3(r2)
print("r3: %s" % (r3))
print("---")
r4 = f4(r3)
print("r4: %s" % (r4))
print("---")
r5 = f5(r4)
print("r5: %s" % (r5))
|
mit
|
cristian69/KernotekV3
|
venv/lib/python2.7/site-packages/pip/_vendor/requests/packages/chardet/charsetprober.py
|
3127
|
1902
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from . import constants
import re
class CharSetProber:
def __init__(self):
pass
def reset(self):
self._mState = constants.eDetecting
def get_charset_name(self):
return None
def feed(self, aBuf):
pass
def get_state(self):
return self._mState
def get_confidence(self):
return 0.0
def filter_high_bit_only(self, aBuf):
aBuf = re.sub(b'([\x00-\x7F])+', b' ', aBuf)
return aBuf
def filter_without_english_letters(self, aBuf):
aBuf = re.sub(b'([A-Za-z])+', b' ', aBuf)
return aBuf
def filter_with_english_letters(self, aBuf):
# TODO
return aBuf
|
gpl-3.0
|
terbolous/SickRage
|
lib/singledispatch_helpers.py
|
46
|
5228
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from abc import ABCMeta
from collections import MutableMapping
import sys
try:
from collections import UserDict
except ImportError:
from UserDict import UserDict
try:
from collections import OrderedDict
except ImportError:
from ordereddict import OrderedDict
try:
from thread import get_ident
except ImportError:
try:
from _thread import get_ident
except ImportError:
from _dummy_thread import get_ident
def recursive_repr(fillvalue='...'):
'Decorator to make a repr function return fillvalue for a recursive call'
def decorating_function(user_function):
repr_running = set()
def wrapper(self):
key = id(self), get_ident()
if key in repr_running:
return fillvalue
repr_running.add(key)
try:
result = user_function(self)
finally:
repr_running.discard(key)
return result
# Can't use functools.wraps() here because of bootstrap issues
wrapper.__module__ = getattr(user_function, '__module__')
wrapper.__doc__ = getattr(user_function, '__doc__')
wrapper.__name__ = getattr(user_function, '__name__')
wrapper.__annotations__ = getattr(user_function, '__annotations__', {})
return wrapper
return decorating_function
class ChainMap(MutableMapping):
''' A ChainMap groups multiple dicts (or other mappings) together
to create a single, updateable view.
The underlying mappings are stored in a list. That list is public and can
accessed or updated using the *maps* attribute. There is no other state.
Lookups search the underlying mappings successively until a key is found.
In contrast, writes, updates, and deletions only operate on the first
mapping.
'''
def __init__(self, *maps):
'''Initialize a ChainMap by setting *maps* to the given mappings.
If no mappings are provided, a single empty dictionary is used.
'''
self.maps = list(maps) or [{}] # always at least one map
def __missing__(self, key):
raise KeyError(key)
def __getitem__(self, key):
for mapping in self.maps:
try:
return mapping[key] # can't use 'key in mapping' with defaultdict
except KeyError:
pass
return self.__missing__(key) # support subclasses that define __missing__
def get(self, key, default=None):
return self[key] if key in self else default
def __len__(self):
return len(set().union(*self.maps)) # reuses stored hash values if possible
def __iter__(self):
return iter(set().union(*self.maps))
def __contains__(self, key):
return any(key in m for m in self.maps)
@recursive_repr()
def __repr__(self):
return '{0.__class__.__name__}({1})'.format(
self, ', '.join(map(repr, self.maps)))
@classmethod
def fromkeys(cls, iterable, *args):
'Create a ChainMap with a single dict created from the iterable.'
return cls(dict.fromkeys(iterable, *args))
def copy(self):
'New ChainMap or subclass with a new copy of maps[0] and refs to maps[1:]'
return self.__class__(self.maps[0].copy(), *self.maps[1:])
__copy__ = copy
def new_child(self): # like Django's Context.push()
'New ChainMap with a new dict followed by all previous maps.'
return self.__class__({}, *self.maps)
@property
def parents(self): # like Django's Context.pop()
'New ChainMap from maps[1:].'
return self.__class__(*self.maps[1:])
def __setitem__(self, key, value):
self.maps[0][key] = value
def __delitem__(self, key):
try:
del self.maps[0][key]
except KeyError:
raise KeyError('Key not found in the first mapping: {!r}'.format(key))
def popitem(self):
'Remove and return an item pair from maps[0]. Raise KeyError is maps[0] is empty.'
try:
return self.maps[0].popitem()
except KeyError:
raise KeyError('No keys found in the first mapping.')
def pop(self, key, *args):
'Remove *key* from maps[0] and return its value. Raise KeyError if *key* not in maps[0].'
try:
return self.maps[0].pop(key, *args)
except KeyError:
raise KeyError('Key not found in the first mapping: {!r}'.format(key))
def clear(self):
'Clear maps[0], leaving maps[1:] intact.'
self.maps[0].clear()
class MappingProxyType(UserDict):
def __init__(self, data):
UserDict.__init__(self)
self.data = data
def get_cache_token():
return ABCMeta._abc_invalidation_counter
class Support(object):
def dummy(self):
pass
def cpython_only(self, func):
if 'PyPy' in sys.version:
return self.dummy
return func
|
gpl-3.0
|
lukecwik/incubator-beam
|
sdks/python/apache_beam/io/parquetio_it_test.py
|
2
|
4943
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# pytype: skip-file
from __future__ import absolute_import
from __future__ import division
import logging
import string
import sys
import unittest
from collections import Counter
from nose.plugins.attrib import attr
from apache_beam import Create
from apache_beam import DoFn
from apache_beam import FlatMap
from apache_beam import Flatten
from apache_beam import Map
from apache_beam import ParDo
from apache_beam import Reshuffle
from apache_beam.io.filesystems import FileSystems
from apache_beam.io.parquetio import ReadAllFromParquet
from apache_beam.io.parquetio import WriteToParquet
from apache_beam.testing.test_pipeline import TestPipeline
from apache_beam.testing.util import BeamAssertException
from apache_beam.transforms import CombineGlobally
from apache_beam.transforms.combiners import Count
try:
import pyarrow as pa
except ImportError:
pa = None
@unittest.skipIf(pa is None, "PyArrow is not installed.")
class TestParquetIT(unittest.TestCase):
@classmethod
def setUpClass(cls):
# Method has been renamed in Python 3
if sys.version_info[0] < 3:
cls.assertCountEqual = cls.assertItemsEqual
def setUp(self):
pass
def tearDown(self):
pass
@attr('IT')
def test_parquetio_it(self):
file_prefix = "parquet_it_test"
init_size = 10
data_size = 20000
with TestPipeline(is_integration_test=True) as p:
pcol = self._generate_data(p, file_prefix, init_size, data_size)
self._verify_data(pcol, init_size, data_size)
@staticmethod
def _sum_verifier(init_size, data_size, x):
expected = sum(range(data_size)) * init_size
if x != expected:
raise BeamAssertException(
"incorrect sum: expected(%d) actual(%d)" % (expected, x))
return []
@staticmethod
def _count_verifier(init_size, data_size, x):
name, count = x[0].decode('utf-8'), x[1]
counter = Counter(
[string.ascii_uppercase[x % 26] for x in range(0, data_size * 4, 4)])
expected_count = counter[name[0]] * init_size
if count != expected_count:
raise BeamAssertException(
"incorrect count(%s): expected(%d) actual(%d)" %
(name, expected_count, count))
return []
def _verify_data(self, pcol, init_size, data_size):
read = pcol | 'read' >> ReadAllFromParquet()
v1 = (
read
| 'get_number' >> Map(lambda x: x['number'])
| 'sum_globally' >> CombineGlobally(sum)
| 'validate_number' >>
FlatMap(lambda x: TestParquetIT._sum_verifier(init_size, data_size, x)))
v2 = (
read
| 'make_pair' >> Map(lambda x: (x['name'], x['number']))
| 'count_per_key' >> Count.PerKey()
| 'validate_name' >> FlatMap(
lambda x: TestParquetIT._count_verifier(init_size, data_size, x)))
_ = ((v1, v2, pcol)
| 'flatten' >> Flatten()
| 'reshuffle' >> Reshuffle()
| 'cleanup' >> Map(lambda x: FileSystems.delete([x])))
def _generate_data(self, p, output_prefix, init_size, data_size):
init_data = [x for x in range(init_size)]
lines = (
p
| 'create' >> Create(init_data)
| 'produce' >> ParDo(ProducerFn(data_size)))
schema = pa.schema([('name', pa.binary()), ('number', pa.int64())])
files = lines | 'write' >> WriteToParquet(
output_prefix, schema, codec='snappy', file_name_suffix='.parquet')
return files
class ProducerFn(DoFn):
def __init__(self, number):
super(ProducerFn, self).__init__()
self._number = number
self._string_index = 0
self._number_index = 0
def process(self, element):
self._string_index = 0
self._number_index = 0
for _ in range(self._number):
yield {'name': self.get_string(4), 'number': self.get_int()}
def get_string(self, length):
s = []
for _ in range(length):
s.append(string.ascii_uppercase[self._string_index])
self._string_index = (self._string_index + 1) % 26
return ''.join(s)
def get_int(self):
i = self._number_index
self._number_index = self._number_index + 1
return i
if __name__ == '__main__':
logging.getLogger().setLevel(logging.INFO)
unittest.main()
|
apache-2.0
|
kadashu/satori
|
satori-rules/plugin/libs/bson/max_key.py
|
55
|
1320
|
# Copyright 2010-2015 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Representation for the MongoDB internal MaxKey type.
"""
class MaxKey(object):
"""MongoDB internal MaxKey type.
.. versionchanged:: 2.7
``MaxKey`` now implements comparison operators.
"""
_type_marker = 127
def __eq__(self, other):
return isinstance(other, MaxKey)
def __hash__(self):
return hash(self._type_marker)
def __ne__(self, other):
return not self == other
def __le__(self, other):
return isinstance(other, MaxKey)
def __lt__(self, dummy):
return False
def __ge__(self, dummy):
return True
def __gt__(self, other):
return not isinstance(other, MaxKey)
def __repr__(self):
return "MaxKey()"
|
apache-2.0
|
bdelzell/creditcoin-org-creditcoin
|
contrib/devtools/test-security-check.py
|
38
|
2651
|
#!/usr/bin/python2
# Copyright (c) 2015-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Test script for security-check.py
'''
from __future__ import division,print_function
import subprocess
import sys
import unittest
def write_testcode(filename):
with open(filename, 'w') as f:
f.write('''
#include <stdio.h>
int main()
{
printf("the quick brown fox jumps over the lazy god\\n");
return 0;
}
''')
def call_security_check(cc, source, executable, options):
subprocess.check_call([cc,source,'-o',executable] + options)
p = subprocess.Popen(['./security-check.py',executable], stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
(stdout, stderr) = p.communicate()
return (p.returncode, stdout.rstrip())
class TestSecurityChecks(unittest.TestCase):
def test_ELF(self):
source = 'test1.c'
executable = 'test1'
cc = 'gcc'
write_testcode(source)
self.assertEqual(call_security_check(cc, source, executable, ['-Wl,-zexecstack','-fno-stack-protector','-Wl,-znorelro']),
(1, executable+': failed PIE NX RELRO Canary'))
self.assertEqual(call_security_check(cc, source, executable, ['-Wl,-znoexecstack','-fno-stack-protector','-Wl,-znorelro']),
(1, executable+': failed PIE RELRO Canary'))
self.assertEqual(call_security_check(cc, source, executable, ['-Wl,-znoexecstack','-fstack-protector-all','-Wl,-znorelro']),
(1, executable+': failed PIE RELRO'))
self.assertEqual(call_security_check(cc, source, executable, ['-Wl,-znoexecstack','-fstack-protector-all','-Wl,-znorelro','-pie','-fPIE']),
(1, executable+': failed RELRO'))
self.assertEqual(call_security_check(cc, source, executable, ['-Wl,-znoexecstack','-fstack-protector-all','-Wl,-zrelro','-Wl,-z,now','-pie','-fPIE']),
(0, ''))
def test_PE(self):
source = 'test1.c'
executable = 'test1.exe'
cc = 'i686-w64-mingw32-gcc'
write_testcode(source)
self.assertEqual(call_security_check(cc, source, executable, []),
(1, executable+': failed PIE NX'))
self.assertEqual(call_security_check(cc, source, executable, ['-Wl,--nxcompat']),
(1, executable+': failed PIE'))
self.assertEqual(call_security_check(cc, source, executable, ['-Wl,--nxcompat','-Wl,--dynamicbase']),
(0, ''))
if __name__ == '__main__':
unittest.main()
|
mit
|
gregdek/ansible
|
test/units/config/manager/test_find_ini_config_file.py
|
27
|
10659
|
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Make coding more python3-ish
from __future__ import (absolute_import, division)
__metaclass__ = type
import os
import os.path
import stat
import pytest
from ansible.config.manager import find_ini_config_file
from ansible.module_utils._text import to_text
real_exists = os.path.exists
real_isdir = os.path.isdir
working_dir = os.path.dirname(__file__)
cfg_in_cwd = os.path.join(working_dir, 'ansible.cfg')
cfg_dir = os.path.join(working_dir, 'data')
cfg_file = os.path.join(cfg_dir, 'ansible.cfg')
alt_cfg_file = os.path.join(cfg_dir, 'test.cfg')
cfg_in_homedir = os.path.expanduser('~/.ansible.cfg')
@pytest.fixture
def setup_env(request):
cur_config = os.environ.get('ANSIBLE_CONFIG', None)
cfg_path = request.param[0]
if cfg_path is None and cur_config:
del os.environ['ANSIBLE_CONFIG']
else:
os.environ['ANSIBLE_CONFIG'] = request.param[0]
yield
if cur_config is None and cfg_path:
del os.environ['ANSIBLE_CONFIG']
else:
os.environ['ANSIBLE_CONFIG'] = cur_config
@pytest.fixture
def setup_existing_files(request, monkeypatch):
def _os_path_exists(path):
if to_text(path) in (request.param[0]):
return True
else:
return False
# Enable user and system dirs so that we know cwd takes precedence
monkeypatch.setattr("os.path.exists", _os_path_exists)
monkeypatch.setattr("os.getcwd", lambda: os.path.dirname(cfg_dir))
monkeypatch.setattr("os.path.isdir", lambda path: True if to_text(path) == cfg_dir else real_isdir(path))
class TestFindIniFile:
# This tells us to run twice, once with a file specified and once with a directory
@pytest.mark.parametrize('setup_env, expected', (([alt_cfg_file], alt_cfg_file), ([cfg_dir], cfg_file)), indirect=['setup_env'])
# This just passes the list of files that exist to the fixture
@pytest.mark.parametrize('setup_existing_files',
[[('/etc/ansible/ansible.cfg', cfg_in_homedir, cfg_in_cwd, alt_cfg_file, cfg_file)]],
indirect=['setup_existing_files'])
def test_env_has_cfg_file(self, setup_env, setup_existing_files, expected):
"""ANSIBLE_CONFIG is specified, use it"""
warnings = set()
assert find_ini_config_file(warnings) == expected
assert warnings == set()
@pytest.mark.parametrize('setup_env', ([alt_cfg_file], [cfg_dir]), indirect=['setup_env'])
@pytest.mark.parametrize('setup_existing_files',
[[('/etc/ansible/ansible.cfg', cfg_in_homedir, cfg_in_cwd)]],
indirect=['setup_existing_files'])
def test_env_has_no_cfg_file(self, setup_env, setup_existing_files):
"""ANSIBLE_CONFIG is specified but the file does not exist"""
warnings = set()
# since the cfg file specified by ANSIBLE_CONFIG doesn't exist, the one at cwd that does
# exist should be returned
assert find_ini_config_file(warnings) == cfg_in_cwd
assert warnings == set()
# ANSIBLE_CONFIG not specified
@pytest.mark.parametrize('setup_env', [[None]], indirect=['setup_env'])
# All config files are present
@pytest.mark.parametrize('setup_existing_files',
[[('/etc/ansible/ansible.cfg', cfg_in_homedir, cfg_in_cwd, cfg_file, alt_cfg_file)]],
indirect=['setup_existing_files'])
def test_ini_in_cwd(self, setup_env, setup_existing_files):
"""ANSIBLE_CONFIG not specified. Use the cwd cfg"""
warnings = set()
assert find_ini_config_file(warnings) == cfg_in_cwd
assert warnings == set()
# ANSIBLE_CONFIG not specified
@pytest.mark.parametrize('setup_env', [[None]], indirect=['setup_env'])
# No config in cwd
@pytest.mark.parametrize('setup_existing_files',
[[('/etc/ansible/ansible.cfg', cfg_in_homedir, cfg_file, alt_cfg_file)]],
indirect=['setup_existing_files'])
def test_ini_in_homedir(self, setup_env, setup_existing_files):
"""First config found is in the homedir"""
warnings = set()
assert find_ini_config_file(warnings) == cfg_in_homedir
assert warnings == set()
# ANSIBLE_CONFIG not specified
@pytest.mark.parametrize('setup_env', [[None]], indirect=['setup_env'])
# No config in cwd
@pytest.mark.parametrize('setup_existing_files', [[('/etc/ansible/ansible.cfg', cfg_file, alt_cfg_file)]], indirect=['setup_existing_files'])
def test_ini_in_systemdir(self, setup_env, setup_existing_files):
"""First config found is the system config"""
warnings = set()
assert find_ini_config_file(warnings) == '/etc/ansible/ansible.cfg'
assert warnings == set()
# ANSIBLE_CONFIG not specified
@pytest.mark.parametrize('setup_env', [[None]], indirect=['setup_env'])
# No config in cwd
@pytest.mark.parametrize('setup_existing_files',
[[('/etc/ansible/ansible.cfg', cfg_in_homedir, cfg_file, alt_cfg_file)]],
indirect=['setup_existing_files'])
def test_cwd_does_not_exist(self, setup_env, setup_existing_files, monkeypatch):
"""Smoketest current working directory doesn't exist"""
def _os_stat(path):
raise OSError('%s does not exist' % path)
monkeypatch.setattr('os.stat', _os_stat)
warnings = set()
assert find_ini_config_file(warnings) == cfg_in_homedir
assert warnings == set()
@pytest.mark.parametrize('setup_env', [[None]], indirect=['setup_env'])
# No config in cwd
@pytest.mark.parametrize('setup_existing_files', [[list()]], indirect=['setup_existing_files'])
def test_no_config(self, setup_env, setup_existing_files):
"""No config present, no config found"""
warnings = set()
assert find_ini_config_file(warnings) is None
assert warnings == set()
# ANSIBLE_CONFIG not specified
@pytest.mark.parametrize('setup_env', [[None]], indirect=['setup_env'])
# All config files are present except in cwd
@pytest.mark.parametrize('setup_existing_files',
[[('/etc/ansible/ansible.cfg', cfg_in_homedir, cfg_file, alt_cfg_file)]],
indirect=['setup_existing_files'])
def test_no_cwd_cfg_no_warning_on_writable(self, setup_env, setup_existing_files, monkeypatch):
"""If the cwd is writable but there is no config file there, move on with no warning"""
real_stat = os.stat
def _os_stat(path):
if path == working_dir:
from posix import stat_result
stat_info = list(real_stat(path))
stat_info[stat.ST_MODE] |= stat.S_IWOTH
return stat_result(stat_info)
else:
return real_stat(path)
monkeypatch.setattr('os.stat', _os_stat)
warnings = set()
assert find_ini_config_file(warnings) == cfg_in_homedir
assert len(warnings) == 0
# ANSIBLE_CONFIG not specified
@pytest.mark.parametrize('setup_env', [[None]], indirect=['setup_env'])
# All config files are present
@pytest.mark.parametrize('setup_existing_files',
[[('/etc/ansible/ansible.cfg', cfg_in_homedir, cfg_in_cwd, cfg_file, alt_cfg_file)]],
indirect=['setup_existing_files'])
def test_cwd_warning_on_writable(self, setup_env, setup_existing_files, monkeypatch):
"""If the cwd is writable, warn and skip it """
real_stat = os.stat
def _os_stat(path):
if path == working_dir:
from posix import stat_result
stat_info = list(real_stat(path))
stat_info[stat.ST_MODE] |= stat.S_IWOTH
return stat_result(stat_info)
else:
return real_stat(path)
monkeypatch.setattr('os.stat', _os_stat)
warnings = set()
assert find_ini_config_file(warnings) == cfg_in_homedir
assert len(warnings) == 1
warning = warnings.pop()
assert u'Ansible is being run in a world writable directory' in warning
assert u'ignoring it as an ansible.cfg source' in warning
# ANSIBLE_CONFIG is sepcified
@pytest.mark.parametrize('setup_env, expected', (([alt_cfg_file], alt_cfg_file), ([cfg_in_cwd], cfg_in_cwd)), indirect=['setup_env'])
# All config files are present
@pytest.mark.parametrize('setup_existing_files',
[[('/etc/ansible/ansible.cfg', cfg_in_homedir, cfg_in_cwd, cfg_file, alt_cfg_file)]],
indirect=['setup_existing_files'])
def test_no_warning_on_writable_if_env_used(self, setup_env, setup_existing_files, monkeypatch, expected):
"""If the cwd is writable but ANSIBLE_CONFIG was used, no warning should be issued"""
real_stat = os.stat
def _os_stat(path):
if path == working_dir:
from posix import stat_result
stat_info = list(real_stat(path))
stat_info[stat.ST_MODE] |= stat.S_IWOTH
return stat_result(stat_info)
else:
return real_stat(path)
monkeypatch.setattr('os.stat', _os_stat)
warnings = set()
assert find_ini_config_file(warnings) == expected
assert warnings == set()
# ANSIBLE_CONFIG not specified
@pytest.mark.parametrize('setup_env', [[None]], indirect=['setup_env'])
# All config files are present
@pytest.mark.parametrize('setup_existing_files',
[[('/etc/ansible/ansible.cfg', cfg_in_homedir, cfg_in_cwd, cfg_file, alt_cfg_file)]],
indirect=['setup_existing_files'])
def test_cwd_warning_on_writable_no_warning_set(self, setup_env, setup_existing_files, monkeypatch):
"""Smoketest that the function succeeds even though no warning set was passed in"""
real_stat = os.stat
def _os_stat(path):
if path == working_dir:
from posix import stat_result
stat_info = list(real_stat(path))
stat_info[stat.ST_MODE] |= stat.S_IWOTH
return stat_result(stat_info)
else:
return real_stat(path)
monkeypatch.setattr('os.stat', _os_stat)
assert find_ini_config_file() == cfg_in_homedir
|
gpl-3.0
|
EmuxEvans/mbed
|
workspace_tools/host_tests/serial_nc_tx_auto.py
|
37
|
1962
|
"""
mbed SDK
Copyright (c) 2011-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import sys
import uuid
import time
import string
from sys import stdout
class SerialNCTXTest():
def test(self, selftest):
selftest.mbed.flush();
selftest.mbed.serial_write("S");
strip_chars = string.whitespace + "\0"
out_str = selftest.mbed.serial_readline()
selftest.notify("HOST: " + out_str)
if not out_str:
selftest.notify("HOST: No output detected")
return selftest.RESULT_IO_SERIAL
out_str_stripped = out_str.strip(strip_chars)
if out_str_stripped != "TX OK - Expected":
selftest.notify("HOST: Unexpected output. Expected 'TX OK - Expected' but received '%s'" % out_str_stripped)
return selftest.RESULT_FAILURE
out_str = selftest.mbed.serial_readline()
# If no characters received, pass the test
if not out_str:
selftest.notify("HOST: No further output detected")
return selftest.RESULT_SUCCESS
else:
out_str_stripped = out_str.strip(strip_chars)
if out_str_stripped == "TX OK - Unexpected":
selftest.notify("HOST: Unexpected output returned indicating TX still functioning")
else:
selftest.notify("HOST: Extraneous output '%s' detected indicating unknown error" % out_str_stripped)
return selftest.RESULT_FAILURE
|
apache-2.0
|
bbc/kamaelia
|
Sketches/MPS/BugReports/FixTests/Kamaelia/Examples/PythonInterpreter/MultiProcessPygamePythonInterpreter.py
|
6
|
1249
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2010 British Broadcasting Corporation and Kamaelia Contributors(1)
#
# (1) Kamaelia Contributors are listed in the AUTHORS file and at
# http://www.kamaelia.org/AUTHORS - please extend this file,
# not this notice.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from Kamaelia.Chassis.Pipeline import Pipeline
from Kamaelia.UI.Pygame.Text import Textbox, TextDisplayer
from Kamaelia.Experimental.PythonInterpreter import InterpreterTransformer
from Axon.experimental.Process import ProcessPipelineComponent
ProcessPipelineComponent(
Textbox(size = (800, 300), position = (100,380)),
InterpreterTransformer(),
TextDisplayer(size = (800, 300), position = (100,40)),
).run()
|
apache-2.0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.