repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
0x46616c6b/ansible | lib/ansible/plugins/action/iosxr.py | 15 | 4388 | #
# (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import sys
import copy
from ansible.plugins.action.normal import ActionModule as _ActionModule
from ansible.module_utils._text import to_bytes
from ansible.utils.path import unfrackpath
from ansible.plugins import connection_loader
from ansible.compat.six import iteritems
from ansible.module_utils.iosxr import iosxr_argument_spec
from ansible.module_utils.basic import AnsibleFallbackNotFound
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
class ActionModule(_ActionModule):
def run(self, tmp=None, task_vars=None):
if self._play_context.connection != 'local':
return dict(
failed=True,
msg='invalid connection specified, expected connection=local, '
'got %s' % self._play_context.connection
)
provider = self.load_provider()
pc = copy.deepcopy(self._play_context)
pc.connection = 'network_cli'
pc.network_os = 'iosxr'
pc.port = provider['port'] or self._play_context.port or 22
pc.remote_user = provider['username'] or self._play_context.connection_user
pc.password = provider['password'] or self._play_context.password
pc.timeout = provider['timeout'] or self._play_context.timeout
connection = self._shared_loader_obj.connection_loader.get('persistent', pc, sys.stdin)
socket_path = self._get_socket_path(pc)
if not os.path.exists(socket_path):
# start the connection if it isn't started
display.vvvv('calling open_shell()', pc.remote_addr)
rc, out, err = connection.exec_command('open_shell()')
if rc != 0:
return {'failed': True, 'msg': 'unable to open shell', 'rc': rc}
else:
# make sure we are in the right cli context which should be
# enable mode and not config module
rc, out, err = connection.exec_command('prompt()')
while str(out).strip().endswith(')#'):
display.vvvv('wrong context, sending exit to device', self._play_context.remote_addr)
connection.exec_command('exit')
rc, out, err = connection.exec_command('prompt()')
task_vars['ansible_socket'] = socket_path
result = super(ActionModule, self).run(tmp, task_vars)
return result
def _get_socket_path(self, play_context):
ssh = connection_loader.get('ssh', class_only=True)
cp = ssh._create_control_path(play_context.remote_addr, play_context.port, play_context.remote_user)
path = unfrackpath("$HOME/.ansible/pc")
return cp % dict(directory=path)
def load_provider(self):
provider = self._task.args.get('provider', {})
for key, value in iteritems(iosxr_argument_spec):
if key != 'provider' and key not in provider:
if key in self._task.args:
provider[key] = self._task.args[key]
elif 'fallback' in value:
provider[key] = self._fallback(value['fallback'])
elif key not in provider:
provider[key] = None
return provider
def _fallback(self, fallback):
strategy = fallback[0]
args = []
kwargs = {}
for item in fallback[1:]:
if isinstance(item, dict):
kwargs = item
else:
args = item
try:
return strategy(*args, **kwargs)
except AnsibleFallbackNotFound:
pass
| gpl-3.0 |
TalShafir/ansible | contrib/inventory/rax.py | 27 | 16821 | #!/usr/bin/env python
# (c) 2013, Jesse Keating <[email protected],
# Paul Durivage <[email protected]>,
# Matt Martz <[email protected]>
#
# This file is part of Ansible.
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
"""
Rackspace Cloud Inventory
Authors:
Jesse Keating <[email protected],
Paul Durivage <[email protected]>,
Matt Martz <[email protected]>
Description:
Generates inventory that Ansible can understand by making API request to
Rackspace Public Cloud API
When run against a specific host, this script returns variables similar to:
rax_os-ext-sts_task_state
rax_addresses
rax_links
rax_image
rax_os-ext-sts_vm_state
rax_flavor
rax_id
rax_rax-bandwidth_bandwidth
rax_user_id
rax_os-dcf_diskconfig
rax_accessipv4
rax_accessipv6
rax_progress
rax_os-ext-sts_power_state
rax_metadata
rax_status
rax_updated
rax_hostid
rax_name
rax_created
rax_tenant_id
rax_loaded
Configuration:
rax.py can be configured using a rax.ini file or via environment
variables. The rax.ini file should live in the same directory along side
this script.
The section header for configuration values related to this
inventory plugin is [rax]
[rax]
creds_file = ~/.rackspace_cloud_credentials
regions = IAD,ORD,DFW
env = prod
meta_prefix = meta
access_network = public
access_ip_version = 4
Each of these configurations also has a corresponding environment variable.
An environment variable will override a configuration file value.
creds_file:
Environment Variable: RAX_CREDS_FILE
An optional configuration that points to a pyrax-compatible credentials
file.
If not supplied, rax.py will look for a credentials file
at ~/.rackspace_cloud_credentials. It uses the Rackspace Python SDK,
and therefore requires a file formatted per the SDK's specifications.
https://github.com/rackspace/pyrax/blob/master/docs/getting_started.md
regions:
Environment Variable: RAX_REGION
An optional environment variable to narrow inventory search
scope. If used, needs a value like ORD, DFW, SYD (a Rackspace
datacenter) and optionally accepts a comma-separated list.
environment:
Environment Variable: RAX_ENV
A configuration that will use an environment as configured in
~/.pyrax.cfg, see
https://github.com/rackspace/pyrax/blob/master/docs/getting_started.md
meta_prefix:
Environment Variable: RAX_META_PREFIX
Default: meta
A configuration that changes the prefix used for meta key/value groups.
For compatibility with ec2.py set to "tag"
access_network:
Environment Variable: RAX_ACCESS_NETWORK
Default: public
A configuration that will tell the inventory script to use a specific
server network to determine the ansible_ssh_host value. If no address
is found, ansible_ssh_host will not be set. Accepts a comma-separated
list of network names, the first found wins.
access_ip_version:
Environment Variable: RAX_ACCESS_IP_VERSION
Default: 4
A configuration related to "access_network" that will attempt to
determine the ansible_ssh_host value for either IPv4 or IPv6. If no
address is found, ansible_ssh_host will not be set.
Acceptable values are: 4 or 6. Values other than 4 or 6
will be ignored, and 4 will be used. Accepts a comma-separated list,
the first found wins.
Examples:
List server instances
$ RAX_CREDS_FILE=~/.raxpub rax.py --list
List servers in ORD datacenter only
$ RAX_CREDS_FILE=~/.raxpub RAX_REGION=ORD rax.py --list
List servers in ORD and DFW datacenters
$ RAX_CREDS_FILE=~/.raxpub RAX_REGION=ORD,DFW rax.py --list
Get server details for server named "server.example.com"
$ RAX_CREDS_FILE=~/.raxpub rax.py --host server.example.com
Use the instance private IP to connect (instead of public IP)
$ RAX_CREDS_FILE=~/.raxpub RAX_ACCESS_NETWORK=private rax.py --list
"""
import os
import re
import sys
import argparse
import warnings
import collections
import ConfigParser
from six import iteritems
import json
try:
import pyrax
from pyrax.utils import slugify
except ImportError:
sys.exit('pyrax is required for this module')
from time import time
from ansible.constants import get_config
from ansible.module_utils.parsing.convert_bool import boolean
from ansible.module_utils.six import text_type
NON_CALLABLES = (text_type, str, bool, dict, int, list, type(None))
def load_config_file():
p = ConfigParser.ConfigParser()
config_file = os.path.join(os.path.dirname(os.path.realpath(__file__)),
'rax.ini')
try:
p.read(config_file)
except ConfigParser.Error:
return None
else:
return p
def rax_slugify(value):
return 'rax_%s' % (re.sub(r'[^\w-]', '_', value).lower().lstrip('_'))
def to_dict(obj):
instance = {}
for key in dir(obj):
value = getattr(obj, key)
if isinstance(value, NON_CALLABLES) and not key.startswith('_'):
key = rax_slugify(key)
instance[key] = value
return instance
def host(regions, hostname):
hostvars = {}
for region in regions:
# Connect to the region
cs = pyrax.connect_to_cloudservers(region=region)
for server in cs.servers.list():
if server.name == hostname:
for key, value in to_dict(server).items():
hostvars[key] = value
# And finally, add an IP address
hostvars['ansible_ssh_host'] = server.accessIPv4
print(json.dumps(hostvars, sort_keys=True, indent=4))
def _list_into_cache(regions):
groups = collections.defaultdict(list)
hostvars = collections.defaultdict(dict)
images = {}
cbs_attachments = collections.defaultdict(dict)
prefix = get_config(p, 'rax', 'meta_prefix', 'RAX_META_PREFIX', 'meta')
try:
# Ansible 2.3+
networks = get_config(p, 'rax', 'access_network',
'RAX_ACCESS_NETWORK', 'public', value_type='list')
except TypeError:
# Ansible 2.2.x and below
# pylint: disable=unexpected-keyword-arg
networks = get_config(p, 'rax', 'access_network',
'RAX_ACCESS_NETWORK', 'public', islist=True)
try:
try:
# Ansible 2.3+
ip_versions = map(int, get_config(p, 'rax', 'access_ip_version',
'RAX_ACCESS_IP_VERSION', 4, value_type='list'))
except TypeError:
# Ansible 2.2.x and below
# pylint: disable=unexpected-keyword-arg
ip_versions = map(int, get_config(p, 'rax', 'access_ip_version',
'RAX_ACCESS_IP_VERSION', 4, islist=True))
except:
ip_versions = [4]
else:
ip_versions = [v for v in ip_versions if v in [4, 6]]
if not ip_versions:
ip_versions = [4]
# Go through all the regions looking for servers
for region in regions:
# Connect to the region
cs = pyrax.connect_to_cloudservers(region=region)
if cs is None:
warnings.warn(
'Connecting to Rackspace region "%s" has caused Pyrax to '
'return None. Is this a valid region?' % region,
RuntimeWarning)
continue
for server in cs.servers.list():
# Create a group on region
groups[region].append(server.name)
# Check if group metadata key in servers' metadata
group = server.metadata.get('group')
if group:
groups[group].append(server.name)
for extra_group in server.metadata.get('groups', '').split(','):
if extra_group:
groups[extra_group].append(server.name)
# Add host metadata
for key, value in to_dict(server).items():
hostvars[server.name][key] = value
hostvars[server.name]['rax_region'] = region
for key, value in iteritems(server.metadata):
groups['%s_%s_%s' % (prefix, key, value)].append(server.name)
groups['instance-%s' % server.id].append(server.name)
groups['flavor-%s' % server.flavor['id']].append(server.name)
# Handle boot from volume
if not server.image:
if not cbs_attachments[region]:
cbs = pyrax.connect_to_cloud_blockstorage(region)
for vol in cbs.list():
if boolean(vol.bootable, strict=False):
for attachment in vol.attachments:
metadata = vol.volume_image_metadata
server_id = attachment['server_id']
cbs_attachments[region][server_id] = {
'id': metadata['image_id'],
'name': slugify(metadata['image_name'])
}
image = cbs_attachments[region].get(server.id)
if image:
server.image = {'id': image['id']}
hostvars[server.name]['rax_image'] = server.image
hostvars[server.name]['rax_boot_source'] = 'volume'
images[image['id']] = image['name']
else:
hostvars[server.name]['rax_boot_source'] = 'local'
try:
imagegroup = 'image-%s' % images[server.image['id']]
groups[imagegroup].append(server.name)
groups['image-%s' % server.image['id']].append(server.name)
except KeyError:
try:
image = cs.images.get(server.image['id'])
except cs.exceptions.NotFound:
groups['image-%s' % server.image['id']].append(server.name)
else:
images[image.id] = image.human_id
groups['image-%s' % image.human_id].append(server.name)
groups['image-%s' % server.image['id']].append(server.name)
# And finally, add an IP address
ansible_ssh_host = None
# use accessIPv[46] instead of looping address for 'public'
for network_name in networks:
if ansible_ssh_host:
break
if network_name == 'public':
for version_name in ip_versions:
if ansible_ssh_host:
break
if version_name == 6 and server.accessIPv6:
ansible_ssh_host = server.accessIPv6
elif server.accessIPv4:
ansible_ssh_host = server.accessIPv4
if not ansible_ssh_host:
addresses = server.addresses.get(network_name, [])
for address in addresses:
for version_name in ip_versions:
if ansible_ssh_host:
break
if address.get('version') == version_name:
ansible_ssh_host = address.get('addr')
break
if ansible_ssh_host:
hostvars[server.name]['ansible_ssh_host'] = ansible_ssh_host
if hostvars:
groups['_meta'] = {'hostvars': hostvars}
with open(get_cache_file_path(regions), 'w') as cache_file:
json.dump(groups, cache_file)
def get_cache_file_path(regions):
regions_str = '.'.join([reg.strip().lower() for reg in regions])
ansible_tmp_path = os.path.join(os.path.expanduser("~"), '.ansible', 'tmp')
if not os.path.exists(ansible_tmp_path):
os.makedirs(ansible_tmp_path)
return os.path.join(ansible_tmp_path,
'ansible-rax-%s-%s.cache' % (
pyrax.identity.username, regions_str))
def _list(regions, refresh_cache=True):
cache_max_age = int(get_config(p, 'rax', 'cache_max_age',
'RAX_CACHE_MAX_AGE', 600))
if (not os.path.exists(get_cache_file_path(regions)) or
refresh_cache or
(time() - os.stat(get_cache_file_path(regions))[-1]) > cache_max_age):
# Cache file doesn't exist or older than 10m or refresh cache requested
_list_into_cache(regions)
with open(get_cache_file_path(regions), 'r') as cache_file:
groups = json.load(cache_file)
print(json.dumps(groups, sort_keys=True, indent=4))
def parse_args():
parser = argparse.ArgumentParser(description='Ansible Rackspace Cloud '
'inventory module')
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument('--list', action='store_true',
help='List active servers')
group.add_argument('--host', help='List details about the specific host')
parser.add_argument('--refresh-cache', action='store_true', default=False,
help=('Force refresh of cache, making API requests to'
'RackSpace (default: False - use cache files)'))
return parser.parse_args()
def setup():
default_creds_file = os.path.expanduser('~/.rackspace_cloud_credentials')
env = get_config(p, 'rax', 'environment', 'RAX_ENV', None)
if env:
pyrax.set_environment(env)
keyring_username = pyrax.get_setting('keyring_username')
# Attempt to grab credentials from environment first
creds_file = get_config(p, 'rax', 'creds_file',
'RAX_CREDS_FILE', None)
if creds_file is not None:
creds_file = os.path.expanduser(creds_file)
else:
# But if that fails, use the default location of
# ~/.rackspace_cloud_credentials
if os.path.isfile(default_creds_file):
creds_file = default_creds_file
elif not keyring_username:
sys.exit('No value in environment variable %s and/or no '
'credentials file at %s'
% ('RAX_CREDS_FILE', default_creds_file))
identity_type = pyrax.get_setting('identity_type')
pyrax.set_setting('identity_type', identity_type or 'rackspace')
region = pyrax.get_setting('region')
try:
if keyring_username:
pyrax.keyring_auth(keyring_username, region=region)
else:
pyrax.set_credential_file(creds_file, region=region)
except Exception as e:
sys.exit("%s: %s" % (e, e.message))
regions = []
if region:
regions.append(region)
else:
try:
# Ansible 2.3+
region_list = get_config(p, 'rax', 'regions', 'RAX_REGION', 'all',
value_type='list')
except TypeError:
# Ansible 2.2.x and below
# pylint: disable=unexpected-keyword-arg
region_list = get_config(p, 'rax', 'regions', 'RAX_REGION', 'all',
islist=True)
for region in region_list:
region = region.strip().upper()
if region == 'ALL':
regions = pyrax.regions
break
elif region not in pyrax.regions:
sys.exit('Unsupported region %s' % region)
elif region not in regions:
regions.append(region)
return regions
def main():
args = parse_args()
regions = setup()
if args.list:
_list(regions, refresh_cache=args.refresh_cache)
elif args.host:
host(regions, args.host)
sys.exit(0)
p = load_config_file()
if __name__ == '__main__':
main()
| gpl-3.0 |
nttks/edx-platform | biz/djangoapps/ga_login/tests/test_views.py | 1 | 37037 | import ddt
import json
from mock import patch
from django.conf import settings
from django.core.urlresolvers import reverse
from django.test.utils import override_settings
from django.utils.crypto import get_random_string
from biz.djangoapps.ga_contract.models import ContractAuth
from biz.djangoapps.ga_invitation.models import (
INPUT_INVITATION_CODE, REGISTER_INVITATION_CODE, UNREGISTER_INVITATION_CODE
)
from biz.djangoapps.ga_invitation.tests.factories import ContractRegisterFactory
from biz.djangoapps.ga_invitation.tests.test_views import BizContractTestBase
from biz.djangoapps.ga_login.models import BizUser
from biz.djangoapps.ga_login.tests.factories import BizUserFactory
class LoginViewsTestBase(BizContractTestBase):
def _url_index(self, url_code):
return reverse('biz:login:index', kwargs={'url_code': url_code})
def _url_submit(self):
return reverse('biz:login:submit')
@property
def url_code(self):
return ContractAuth.objects.get(contract=self.contract_auth).url_code
@property
def url_code_disabled(self):
return ContractAuth.objects.get(contract=self.contract_auth_disabled).url_code
@property
def url_code_student_cannot_register(self):
return ContractAuth.objects.get(contract=self.contract_auth_student_cannot_register).url_code
def _assert_call(self, mock_patch, call_withes=None):
call_withes = call_withes or []
self.assertEqual(len(call_withes), mock_patch.call_count)
for call_with in call_withes:
mock_patch.assert_any_call(call_with)
def setup_user(self, login_code=None, do_activate=True, do_login=True):
self.username = get_random_string(16)
self.password = get_random_string(8)
self.email = self.username + '@test.com'
self.user = self.create_account(
self.username,
self.email,
self.password,
)
self.login_code = login_code
if login_code:
BizUserFactory.create(user=self.user, login_code=login_code)
if do_activate:
self.activate_user(self.email)
if do_login:
self.login(self.email, self.password)
return self.user
class LoginViewsIndexTest(LoginViewsTestBase):
@patch('biz.djangoapps.ga_login.views.log.warning')
def test_no_contract(self, warning_log):
self.assert_request_status_code(404, self._url_index('hogehoge'))
self._assert_call(warning_log, [
"Not found contract with url_code:hogehoge"
])
@patch('biz.djangoapps.ga_login.views.log.warning')
def test_disabled_contract(self, warning_log):
self.assert_request_status_code(404, self._url_index(self.url_code_disabled))
self._assert_call(warning_log, [
"Disabled contract:{} with url_code:{}".format(self.contract_auth_disabled.id, self.url_code_disabled),
])
@patch('biz.djangoapps.ga_login.views.log.warning')
def test_success(self, warning_log):
self.assert_request_status_code(200, self._url_index(self.url_code))
self._assert_call(warning_log)
@patch('biz.djangoapps.ga_login.views.log.warning')
def test_logined_no_login_code(self, warning_log):
self.setup_user()
self.assert_request_status_code(404, self._url_index(self.url_code))
self._assert_call(warning_log)
@patch('biz.djangoapps.ga_login.views.log.warning')
def test_logined_without_contract_register(self, warning_log):
self.setup_user('test-login-code')
self.assert_request_status_code(404, self._url_index(self.url_code))
self._assert_call(warning_log, [
"Unknown login_code:{} with contract:{}".format(self.login_code, self.contract_auth.id)
])
@patch('biz.djangoapps.ga_login.views.log.warning')
def test_logined_with_contract_register_input(self, warning_log):
self.setup_user('test-login-code')
self.create_contract_register(self.user, self.contract_auth, INPUT_INVITATION_CODE)
response = self.assert_request_status_code(302, self._url_index(self.url_code))
self.assertTrue(response.url.endswith(reverse('biz:invitation:confirm', kwargs={'invitation_code': self.contract_auth.invitation_code})))
self._assert_call(warning_log)
@patch('biz.djangoapps.ga_login.views.log.warning')
def test_logined_with_contract_register_register(self, warning_log):
self.setup_user('test-login-code')
self.create_contract_register(self.user, self.contract_auth, REGISTER_INVITATION_CODE)
response = self.assert_request_status_code(302, self._url_index(self.url_code))
self.assertTrue(response.url.endswith(reverse('dashboard')))
self._assert_call(warning_log)
@patch('biz.djangoapps.ga_login.views.log.warning')
def test_logined_with_contract_register_unregister(self, warning_log):
self.setup_user('test-login-code')
self.create_contract_register(self.user, self.contract_auth, UNREGISTER_INVITATION_CODE)
self.assert_request_status_code(404, self._url_index(self.url_code))
self._assert_call(warning_log, [
"Unregister status user:{} with contract:{}".format(self.user.id, self.contract_auth.id)
])
@patch('biz.djangoapps.ga_login.views.log.warning')
def test_logined_with_contract_register_type_director_input(self, warning_log):
self.setup_user('test-login-code')
self.create_contract_register(self.user, self.contract_auth_student_cannot_register, INPUT_INVITATION_CODE)
self.assert_request_status_code(404, self._url_index(self.url_code_student_cannot_register))
self._assert_call(warning_log, [
"Student can not be registered, status is input, user:{} contract:{}".format(self.user.id, self.contract_auth_student_cannot_register.id)
])
@patch('biz.djangoapps.ga_login.views.log.warning')
def test_logined_with_contract_register_type_director_register(self, warning_log):
self.setup_user('test-login-code')
self.create_contract_register(self.user, self.contract_auth_student_cannot_register, REGISTER_INVITATION_CODE)
response = self.assert_request_status_code(302, self._url_index(self.url_code_student_cannot_register))
self.assertTrue(response.url.endswith(reverse('dashboard')))
self._assert_call(warning_log)
@patch('biz.djangoapps.ga_login.views.log.warning')
def test_logined_with_contract_register_type_director_unregister(self, warning_log):
self.setup_user('test-login-code')
self.create_contract_register(self.user, self.contract_auth_student_cannot_register, UNREGISTER_INVITATION_CODE)
self.assert_request_status_code(404, self._url_index(self.url_code_student_cannot_register))
self._assert_call(warning_log, [
"Unregister status user:{} with contract:{}".format(self.user.id, self.contract_auth_student_cannot_register.id)
])
FEATURES_SQUELCH_PII_IN_LOGS_ENABLED = settings.FEATURES.copy()
FEATURES_SQUELCH_PII_IN_LOGS_ENABLED['SQUELCH_PII_IN_LOGS'] = True
FEATURES_SQUELCH_PII_IN_LOGS_DISABLED = settings.FEATURES.copy()
FEATURES_SQUELCH_PII_IN_LOGS_DISABLED['SQUELCH_PII_IN_LOGS'] = False
@ddt.ddt
class LoginViewsSubmitTest(LoginViewsTestBase):
def setUp(self):
super(LoginViewsSubmitTest, self).setUp()
# Make mock to setup_user with LMS_SEGMENT_KEY.
dummy_patcher = patch('student.views.analytics')
dummy_patcher.start()
self.addCleanup(dummy_patcher.stop)
analytics_patcher = patch('biz.djangoapps.ga_login.views.analytics')
self.mock_tracker = analytics_patcher.start()
self.addCleanup(analytics_patcher.stop)
@patch('biz.djangoapps.ga_login.views.AUDIT_LOG.critical')
@patch('biz.djangoapps.ga_login.views.log.critical')
@patch('biz.djangoapps.ga_login.views.AUDIT_LOG.warning')
@patch('biz.djangoapps.ga_login.views.log.warning')
@ddt.data(
({'url_code': 'hoge', 'login_code': 'hoge'}, None),
({'url_code': 'hoge', 'password': 'hoge'}, None),
({'login_code': 'hoge', 'password': 'hoge'}, None),
({'url_code': 'hoge', 'login_code': 'hoge', 'password': 'hoge'}, ["Not found contract with url_code:hoge"]),
)
@ddt.unpack
def test_no_param_no_contract(self, data, warning_call_with, warning_log, audit_warning_log, critical_log, audit_critical_log):
response = self.assert_request_status_code(403, self._url_submit(), 'POST', data=data)
self.assertEqual(response.content, u"There was an error receiving your login information. Please email us.")
self._assert_call(warning_log, warning_call_with)
self._assert_call(audit_warning_log)
self._assert_call(critical_log)
self._assert_call(audit_critical_log)
@override_settings(FEATURES=FEATURES_SQUELCH_PII_IN_LOGS_ENABLED)
@patch('biz.djangoapps.ga_login.views.AUDIT_LOG.critical')
@patch('biz.djangoapps.ga_login.views.log.critical')
@patch('biz.djangoapps.ga_login.views.AUDIT_LOG.warning')
@patch('biz.djangoapps.ga_login.views.log.warning')
def test_not_found_login_code_squelch_on(self, warning_log, audit_warning_log, critical_log, audit_critical_log):
self.setup_user(do_login=False)
response = self.assert_request_status_code(403, self._url_submit(), 'POST', data={
'url_code': self.url_code,
'login_code': 'hoge',
'password': 'hoge',
})
self.assertEqual(response.content, u"Login code or password is incorrect.")
self._assert_call(warning_log, [
"Unknown login_code:{} with contract:{}".format('hoge', self.contract_auth.id),
"Login failed contract:{0} - Unknown user".format(self.contract_auth.id),
])
self._assert_call(audit_warning_log)
self._assert_call(critical_log)
self._assert_call(audit_critical_log)
@override_settings(FEATURES=FEATURES_SQUELCH_PII_IN_LOGS_DISABLED)
@patch('biz.djangoapps.ga_login.views.AUDIT_LOG.critical')
@patch('biz.djangoapps.ga_login.views.log.critical')
@patch('biz.djangoapps.ga_login.views.AUDIT_LOG.warning')
@patch('biz.djangoapps.ga_login.views.log.warning')
def test_not_found_login_code_squelch_off(self, warning_log, audit_warning_log, critical_log, audit_critical_log):
self.setup_user(do_login=False)
response = self.assert_request_status_code(403, self._url_submit(), 'POST', data={
'url_code': self.url_code,
'login_code': 'hoge',
'password': 'hoge',
})
self.assertEqual(response.content, u"Login code or password is incorrect.")
self._assert_call(warning_log, [
"Unknown login_code:{} with contract:{}".format('hoge', self.contract_auth.id),
"Login failed contract:{0} - Unknown user {1}".format(self.contract_auth.id, 'hoge'),
])
self._assert_call(audit_warning_log)
self._assert_call(critical_log)
self._assert_call(audit_critical_log)
@override_settings(FEATURES=FEATURES_SQUELCH_PII_IN_LOGS_ENABLED)
@patch('biz.djangoapps.ga_login.views.AUDIT_LOG.critical')
@patch('biz.djangoapps.ga_login.views.log.critical')
@patch('biz.djangoapps.ga_login.views.AUDIT_LOG.warning')
@patch('biz.djangoapps.ga_login.views.log.warning')
def test_not_found_contract_register_squelch_on(self, warning_log, audit_warning_log, critical_log, audit_critical_log):
self.setup_user('Test-Login-Code', do_login=False)
response = self.assert_request_status_code(403, self._url_submit(), 'POST', data={
'url_code': self.url_code,
'login_code': self.login_code,
'password': 'hoge',
})
self.assertEqual(response.content, u"Login code or password is incorrect.")
self._assert_call(warning_log, [
"Unknown login_code:{} with contract:{}".format(self.login_code, self.contract_auth.id),
"Login failed contract:{0} - Unknown user".format(self.contract_auth.id),
])
self._assert_call(audit_warning_log)
self._assert_call(critical_log)
self._assert_call(audit_critical_log)
@override_settings(FEATURES=FEATURES_SQUELCH_PII_IN_LOGS_DISABLED)
@patch('biz.djangoapps.ga_login.views.AUDIT_LOG.critical')
@patch('biz.djangoapps.ga_login.views.log.critical')
@patch('biz.djangoapps.ga_login.views.AUDIT_LOG.warning')
@patch('biz.djangoapps.ga_login.views.log.warning')
def test_not_found_contract_register_squelch_off(self, warning_log, audit_warning_log, critical_log, audit_critical_log):
self.setup_user('Test-Login-Code', do_login=False)
response = self.assert_request_status_code(403, self._url_submit(), 'POST', data={
'url_code': self.url_code,
'login_code': self.login_code,
'password': 'hoge',
})
self.assertEqual(response.content, u"Login code or password is incorrect.")
self._assert_call(warning_log, [
"Unknown login_code:{} with contract:{}".format(self.login_code, self.contract_auth.id),
"Login failed contract:{0} - Unknown user {1}".format(self.contract_auth.id, self.login_code),
])
self._assert_call(audit_warning_log)
self._assert_call(critical_log)
self._assert_call(audit_critical_log)
@override_settings(FEATURES=FEATURES_SQUELCH_PII_IN_LOGS_ENABLED)
@patch('biz.djangoapps.ga_login.views.AUDIT_LOG.critical')
@patch('biz.djangoapps.ga_login.views.log.critical')
@patch('biz.djangoapps.ga_login.views.AUDIT_LOG.warning')
@patch('biz.djangoapps.ga_login.views.log.warning')
def test_found_contract_register_unregister_squelch_on(self, warning_log, audit_warning_log, critical_log, audit_critical_log):
self.setup_user('Test-Login-Code', do_login=False)
self.create_contract_register(self.user, self.contract_auth, UNREGISTER_INVITATION_CODE)
response = self.assert_request_status_code(403, self._url_submit(), 'POST', data={
'url_code': self.url_code,
'login_code': self.login_code,
'password': 'hoge',
})
self.assertEqual(response.content, u"Login code or password is incorrect.")
self._assert_call(warning_log, [
"Unregister status user:{} with contract:{}".format(self.user.id, self.contract_auth.id),
"Login failed contract:{0} - Unknown user".format(self.contract_auth.id),
])
self._assert_call(audit_warning_log)
self._assert_call(critical_log)
self._assert_call(audit_critical_log)
@override_settings(FEATURES=FEATURES_SQUELCH_PII_IN_LOGS_DISABLED)
@patch('biz.djangoapps.ga_login.views.AUDIT_LOG.critical')
@patch('biz.djangoapps.ga_login.views.log.critical')
@patch('biz.djangoapps.ga_login.views.AUDIT_LOG.warning')
@patch('biz.djangoapps.ga_login.views.log.warning')
def test_found_contract_register_unregister_squelch_off(self, warning_log, audit_warning_log, critical_log, audit_critical_log):
self.setup_user('Test-Login-Code', do_login=False)
self.create_contract_register(self.user, self.contract_auth, UNREGISTER_INVITATION_CODE)
response = self.assert_request_status_code(403, self._url_submit(), 'POST', data={
'url_code': self.url_code,
'login_code': self.login_code,
'password': 'hoge',
})
self.assertEqual(response.content, u"Login code or password is incorrect.")
self._assert_call(warning_log, [
"Unregister status user:{} with contract:{}".format(self.user.id, self.contract_auth.id),
"Login failed contract:{0} - Unknown user {1}".format(self.contract_auth.id, self.login_code),
])
self._assert_call(audit_warning_log)
self._assert_call(critical_log)
self._assert_call(audit_critical_log)
@override_settings(FEATURES=FEATURES_SQUELCH_PII_IN_LOGS_ENABLED)
@patch('biz.djangoapps.ga_login.views.AUDIT_LOG.critical')
@patch('biz.djangoapps.ga_login.views.log.critical')
@patch('biz.djangoapps.ga_login.views.AUDIT_LOG.warning')
@patch('biz.djangoapps.ga_login.views.log.warning')
def test_found_contract_register_type_director_input_squelch_on(self, warning_log, audit_warning_log, critical_log, audit_critical_log):
self.setup_user('Test-Login-Code', do_login=False)
self.create_contract_register(self.user, self.contract_auth_student_cannot_register, INPUT_INVITATION_CODE)
response = self.assert_request_status_code(403, self._url_submit(), 'POST', data={
'url_code': self.url_code_student_cannot_register,
'login_code': self.login_code,
'password': self.password,
})
self.assertEqual(response.content, u"Please ask your administrator to register the invitation code.")
self._assert_call(warning_log, [
"Student can not be registered, status is input, contract:{0}, user.id:{1}".format(self.contract_auth_student_cannot_register.id, self.user.id),
])
self._assert_call(audit_warning_log)
self._assert_call(critical_log)
self._assert_call(audit_critical_log)
@override_settings(FEATURES=FEATURES_SQUELCH_PII_IN_LOGS_DISABLED)
@patch('biz.djangoapps.ga_login.views.AUDIT_LOG.critical')
@patch('biz.djangoapps.ga_login.views.log.critical')
@patch('biz.djangoapps.ga_login.views.AUDIT_LOG.warning')
@patch('biz.djangoapps.ga_login.views.log.warning')
def test_found_contract_register_type_director_input_squelch_off(self, warning_log, audit_warning_log, critical_log, audit_critical_log):
self.setup_user('Test-Login-Code', do_login=False)
self.create_contract_register(self.user, self.contract_auth_student_cannot_register, INPUT_INVITATION_CODE)
response = self.assert_request_status_code(403, self._url_submit(), 'POST', data={
'url_code': self.url_code_student_cannot_register,
'login_code': self.login_code,
'password': self.password,
})
self.assertEqual(response.content, u"Please ask your administrator to register the invitation code.")
self._assert_call(warning_log, [
"Student can not be registered, status is input, contract:{0}, user {1}".format(self.contract_auth_student_cannot_register.id, self.login_code),
])
self._assert_call(audit_warning_log)
self._assert_call(critical_log)
self._assert_call(audit_critical_log)
@patch('biz.djangoapps.ga_login.views.AUDIT_LOG.critical')
@patch('biz.djangoapps.ga_login.views.log.critical')
@patch('biz.djangoapps.ga_login.views.AUDIT_LOG.warning')
@patch('biz.djangoapps.ga_login.views.log.warning')
@patch('biz.djangoapps.ga_login.views.log.debug')
def test_found_contract_register_type_director_register(self, debug_log, warning_log, audit_warning_log, critical_log, audit_critical_log):
self.setup_user('Test-Login-Code', do_login=False)
self.create_contract_register(self.user, self.contract_auth_student_cannot_register, REGISTER_INVITATION_CODE)
response = self.assert_request_status_code(204, self._url_submit(), 'POST', data={
'url_code': self.url_code_student_cannot_register,
'login_code': self.login_code,
'password': self.password,
})
self.assertEqual(response.content, u"")
self.assertEqual(0, self.mock_tracker.identify.call_count)
self.assertEqual(0, self.mock_tracker.track.call_count)
self._assert_call(debug_log)
self._assert_call(warning_log)
self._assert_call(audit_warning_log)
self._assert_call(critical_log)
self._assert_call(audit_critical_log)
@override_settings(FEATURES=FEATURES_SQUELCH_PII_IN_LOGS_ENABLED)
@patch('biz.djangoapps.ga_login.views.AUDIT_LOG.critical')
@patch('biz.djangoapps.ga_login.views.log.critical')
@patch('biz.djangoapps.ga_login.views.AUDIT_LOG.warning')
@patch('biz.djangoapps.ga_login.views.log.warning')
def test_found_contract_register_type_director_unregister_squelch_on(self, warning_log, audit_warning_log, critical_log, audit_critical_log):
self.setup_user('Test-Login-Code', do_login=False)
self.create_contract_register(self.user, self.contract_auth_student_cannot_register, UNREGISTER_INVITATION_CODE)
response = self.assert_request_status_code(403, self._url_submit(), 'POST', data={
'url_code': self.url_code_student_cannot_register,
'login_code': self.login_code,
'password': 'hoge',
})
self.assertEqual(response.content, u"Login code or password is incorrect.")
self._assert_call(warning_log, [
"Unregister status user:{} with contract:{}".format(self.user.id, self.contract_auth_student_cannot_register.id),
"Login failed contract:{0} - Unknown user".format(self.contract_auth_student_cannot_register.id),
])
self._assert_call(audit_warning_log)
self._assert_call(critical_log)
self._assert_call(audit_critical_log)
@override_settings(FEATURES=FEATURES_SQUELCH_PII_IN_LOGS_DISABLED)
@patch('biz.djangoapps.ga_login.views.AUDIT_LOG.critical')
@patch('biz.djangoapps.ga_login.views.log.critical')
@patch('biz.djangoapps.ga_login.views.AUDIT_LOG.warning')
@patch('biz.djangoapps.ga_login.views.log.warning')
def test_found_contract_register_type_director_unregister_squelch_off(self, warning_log, audit_warning_log, critical_log, audit_critical_log):
self.setup_user('Test-Login-Code', do_login=False)
self.create_contract_register(self.user, self.contract_auth_student_cannot_register, UNREGISTER_INVITATION_CODE)
response = self.assert_request_status_code(403, self._url_submit(), 'POST', data={
'url_code': self.url_code_student_cannot_register,
'login_code': self.login_code,
'password': 'hoge',
})
self.assertEqual(response.content, u"Login code or password is incorrect.")
self._assert_call(warning_log, [
"Unregister status user:{} with contract:{}".format(self.user.id, self.contract_auth_student_cannot_register.id),
"Login failed contract:{0} - Unknown user {1}".format(self.contract_auth_student_cannot_register.id, self.login_code),
])
self._assert_call(audit_warning_log)
self._assert_call(critical_log)
self._assert_call(audit_critical_log)
@override_settings(FEATURES=FEATURES_SQUELCH_PII_IN_LOGS_ENABLED)
@patch('biz.djangoapps.ga_login.views.AUDIT_LOG.critical')
@patch('biz.djangoapps.ga_login.views.log.critical')
@patch('biz.djangoapps.ga_login.views.AUDIT_LOG.warning')
@patch('biz.djangoapps.ga_login.views.log.warning')
def test_not_authenticate_squelch_on(self, warning_log, audit_warning_log, critical_log, audit_critical_log):
self.setup_user('Test-Login-Code', do_login=False)
self.create_contract_register(self.user, self.contract_auth, INPUT_INVITATION_CODE)
response = self.assert_request_status_code(403, self._url_submit(), 'POST', data={
'url_code': self.url_code,
'login_code': self.login_code,
'password': 'hoge',
})
self.assertEqual(response.content, u"Login code or password is incorrect.")
self._assert_call(warning_log)
self._assert_call(audit_warning_log, [
"Login failed contract:{0} - password for user.id:{1} is invalid".format(self.contract_auth.id, self.user.id),
])
self._assert_call(critical_log)
self._assert_call(audit_critical_log)
@override_settings(FEATURES=FEATURES_SQUELCH_PII_IN_LOGS_DISABLED)
@patch('biz.djangoapps.ga_login.views.AUDIT_LOG.critical')
@patch('biz.djangoapps.ga_login.views.log.critical')
@patch('biz.djangoapps.ga_login.views.AUDIT_LOG.warning')
@patch('biz.djangoapps.ga_login.views.log.warning')
def test_not_authenticate_squelch_off(self, warning_log, audit_warning_log, critical_log, audit_critical_log):
self.setup_user('Test-Login-Code', do_login=False)
self.create_contract_register(self.user, self.contract_auth, INPUT_INVITATION_CODE)
response = self.assert_request_status_code(403, self._url_submit(), 'POST', data={
'url_code': self.url_code,
'login_code': self.login_code,
'password': 'hoge',
})
self.assertEqual(response.content, u"Login code or password is incorrect.")
self._assert_call(warning_log)
self._assert_call(audit_warning_log, [
"Login failed contract:{0} - password for {1} is invalid".format(self.contract_auth.id, self.login_code),
])
self._assert_call(critical_log)
self._assert_call(audit_critical_log)
@override_settings(LMS_SEGMENT_KEY=None)
@patch('biz.djangoapps.ga_login.views.AUDIT_LOG.critical')
@patch('biz.djangoapps.ga_login.views.log.critical')
@patch('biz.djangoapps.ga_login.views.AUDIT_LOG.warning')
@patch('biz.djangoapps.ga_login.views.log.warning')
@patch('biz.djangoapps.ga_login.views.log.debug')
def test_success(self, debug_log, warning_log, audit_warning_log, critical_log, audit_critical_log):
self.setup_user('Test-Login-Code', do_login=False)
self.create_contract_register(self.user, self.contract_auth, INPUT_INVITATION_CODE)
response = self.assert_request_status_code(204, self._url_submit(), 'POST', data={
'url_code': self.url_code,
'login_code': self.login_code,
'password': self.password,
})
self.assertEqual(response.content, u"")
self.assertEqual(0, self.mock_tracker.identify.call_count)
self.assertEqual(0, self.mock_tracker.track.call_count)
self._assert_call(debug_log)
self._assert_call(warning_log)
self._assert_call(audit_warning_log)
self._assert_call(critical_log)
self._assert_call(audit_critical_log)
@override_settings(LMS_SEGMENT_KEY='hoge')
@patch('biz.djangoapps.ga_login.views.AUDIT_LOG.critical')
@patch('biz.djangoapps.ga_login.views.log.critical')
@patch('biz.djangoapps.ga_login.views.AUDIT_LOG.warning')
@patch('biz.djangoapps.ga_login.views.log.warning')
@patch('biz.djangoapps.ga_login.views.log.debug')
def test_success_analytics_track(self, debug_log, warning_log, audit_warning_log, critical_log, audit_critical_log):
self.setup_user('Test-Login-Code', do_login=False)
self.create_contract_register(self.user, self.contract_auth, INPUT_INVITATION_CODE)
response = self.assert_request_status_code(204, self._url_submit(), 'POST', data={
'url_code': self.url_code,
'login_code': self.login_code,
'password': self.password,
})
self.assertEqual(response.content, u"")
self.assertEqual(1, self.mock_tracker.identify.call_count)
self.assertEqual(1, self.mock_tracker.track.call_count)
self._assert_call(debug_log)
self._assert_call(warning_log)
self._assert_call(audit_warning_log)
self._assert_call(critical_log)
self._assert_call(audit_critical_log)
@patch('biz.djangoapps.ga_login.views.AUDIT_LOG.critical')
@patch('biz.djangoapps.ga_login.views.log.critical')
@patch('biz.djangoapps.ga_login.views.AUDIT_LOG.warning')
@patch('biz.djangoapps.ga_login.views.log.warning')
@patch('biz.djangoapps.ga_login.views.log.debug')
def test_success_remember(self, debug_log, warning_log, audit_warning_log, critical_log, audit_critical_log):
self.setup_user('Test-Login-Code', do_login=False)
self.create_contract_register(self.user, self.contract_auth, INPUT_INVITATION_CODE)
response = self.assert_request_status_code(204, self._url_submit(), 'POST', data={
'url_code': self.url_code,
'login_code': self.login_code,
'password': self.password,
'remember': 'true',
})
self.assertEqual(response.content, u"")
self._assert_call(debug_log, [
"Setting user session to never expire"
])
self._assert_call(warning_log)
self._assert_call(audit_warning_log)
self._assert_call(critical_log)
self._assert_call(audit_critical_log)
@patch('biz.djangoapps.ga_login.views.AUDIT_LOG.critical')
@patch('biz.djangoapps.ga_login.views.log.critical')
@patch('biz.djangoapps.ga_login.views.AUDIT_LOG.warning')
@patch('biz.djangoapps.ga_login.views.log.warning')
@patch('biz.djangoapps.ga_login.views.log.debug')
def test_fail(self, debug_log, warning_log, audit_warning_log, critical_log, audit_critical_log):
self.setup_user('Test-Login-Code', do_login=False)
self.create_contract_register(self.user, self.contract_auth, INPUT_INVITATION_CODE)
with self.assertRaises(Exception), patch('biz.djangoapps.ga_login.views.login', side_effect=Exception):
response = self.assert_request_status_code(500, self._url_submit(), 'POST', data={
'url_code': self.url_code,
'login_code': self.login_code,
'password': self.password,
'remember': 'true',
})
self._assert_call(debug_log)
self._assert_call(warning_log)
self._assert_call(audit_warning_log)
self._assert_call(critical_log, [
"Login failed - Could not create session. Is memcached running?"
])
self._assert_call(audit_critical_log, [
"Login failed - Could not create session. Is memcached running?"
])
@override_settings(FEATURES=FEATURES_SQUELCH_PII_IN_LOGS_ENABLED)
@patch('biz.djangoapps.ga_login.views.AUDIT_LOG.critical')
@patch('biz.djangoapps.ga_login.views.log.critical')
@patch('biz.djangoapps.ga_login.views.AUDIT_LOG.warning')
@patch('biz.djangoapps.ga_login.views.log.warning')
def test_no_activate_squelch_on(self, warning_log, audit_warning_log, critical_log, audit_critical_log):
self.setup_user('Test-Login-Code', do_login=False, do_activate=False)
self.create_contract_register(self.user, self.contract_auth, INPUT_INVITATION_CODE)
with patch('biz.djangoapps.ga_login.views.reactivation_email_for_user') as patch_reactivation_email_for_user:
response = self.assert_request_status_code(400, self._url_submit(), 'POST', data={
'url_code': self.url_code,
'login_code': self.login_code,
'password': self.password,
})
patch_reactivation_email_for_user.assert_called_once_with(self.user)
self.assertEqual(
response.content,
u"This account has not been activated. We have sent another activation message. Please check your email for the activation instructions."
)
self._assert_call(warning_log)
self._assert_call(audit_warning_log, [
"Login failed contract:{0} - Account not active for user.id:{1}, resending activation".format(self.contract_auth.id, self.user.id)
])
self._assert_call(critical_log)
self._assert_call(audit_critical_log)
@override_settings(FEATURES=FEATURES_SQUELCH_PII_IN_LOGS_DISABLED)
@patch('biz.djangoapps.ga_login.views.AUDIT_LOG.critical')
@patch('biz.djangoapps.ga_login.views.log.critical')
@patch('biz.djangoapps.ga_login.views.AUDIT_LOG.warning')
@patch('biz.djangoapps.ga_login.views.log.warning')
def test_no_activate_squelch_off(self, warning_log, audit_warning_log, critical_log, audit_critical_log):
self.setup_user('Test-Login-Code', do_login=False, do_activate=False)
self.create_contract_register(self.user, self.contract_auth, INPUT_INVITATION_CODE)
with patch('biz.djangoapps.ga_login.views.reactivation_email_for_user') as patch_reactivation_email_for_user:
response = self.assert_request_status_code(400, self._url_submit(), 'POST', data={
'url_code': self.url_code,
'login_code': self.login_code,
'password': self.password,
})
patch_reactivation_email_for_user.assert_called_once_with(self.user)
self.assertEqual(
response.content,
u"This account has not been activated. We have sent another activation message. Please check your email for the activation instructions."
)
self._assert_call(warning_log)
self._assert_call(audit_warning_log, [
"Login failed contract:{0} - Account not active for user {1}, resending activation".format(self.contract_auth.id, self.login_code)
])
self._assert_call(critical_log)
self._assert_call(audit_critical_log)
@patch('biz.djangoapps.ga_login.views.LoginFailures.clear_lockout_counter')
@patch('biz.djangoapps.ga_login.views.LoginFailures.increment_lockout_counter')
@ddt.data(
([True,True], [True], None, 403, True, False, 0, 0), # lock
([True,True], [False], None, 204, False, False, 0, 1), # ok
([True,True], [True], 'hoge', 403, True, False, 0, 0), # lock
([True,True], [False], 'hoge', 403, False, True, 1, 0), # no auth
([False,False], [True], None, 204, False, False, 0, 0), # ok
([False,False], [False], None, 204, False, False, 0, 0), # ok
([False,False], [True], 'hoge', 403, False, True, 0, 0), # no auth
([False,False], [False], 'hoge', 403, False, True, 0, 0), # no auth
)
@ddt.unpack
def test_login_failures(self, is_feature_enabled, is_user_locked_out, password, status_code, lock_user, not_authenticate_user,
increment_lockout_counter_call_count, clear_lockout_counter_call_count,
increment_lockout_counter, clear_lockout_counter):
self.setup_user('Test-Login-Code', do_login=False)
self.create_contract_register(self.user, self.contract_auth, INPUT_INVITATION_CODE)
with patch(
'biz.djangoapps.ga_login.views.LoginFailures.is_feature_enabled', side_effect=is_feature_enabled
), patch(
'biz.djangoapps.ga_login.views.LoginFailures.is_user_locked_out', side_effect=is_user_locked_out
):
response = self.assert_request_status_code(status_code, self._url_submit(), 'POST', data={
'url_code': self.url_code,
'login_code': self.login_code,
'password': password or self.password,
})
if lock_user:
self.assertEqual(
response.content,
u"This account has been temporarily locked due to excessive login failures. Try again later."
)
elif not_authenticate_user:
self.assertEqual(response.content, u"Login code or password is incorrect.")
else:
self.assertEqual(response.content, u"")
self.assertEqual(increment_lockout_counter_call_count, increment_lockout_counter.call_count)
self.assertEqual(clear_lockout_counter_call_count, clear_lockout_counter.call_count)
@patch('biz.djangoapps.ga_login.views.AUDIT_LOG.critical')
@patch('biz.djangoapps.ga_login.views.log.critical')
@patch('biz.djangoapps.ga_login.views.AUDIT_LOG.warning')
@patch('biz.djangoapps.ga_login.views.log.warning')
@patch('biz.djangoapps.ga_login.views.log.debug')
def test_success_same_login_code(self, debug_log, warning_log, audit_warning_log, critical_log, audit_critical_log):
# first user, do not use test
self.setup_user('Test-Login-Code', do_login=False)
self.create_contract_register(self.user, self.contract_auth_disabled, INPUT_INVITATION_CODE)
# second user, to use test
self.setup_user(self.login_code, do_login=False)
self.create_contract_register(self.user, self.contract_auth, INPUT_INVITATION_CODE)
# assert users have same login-code
self.assertEquals(2, BizUser.objects.filter(login_code=self.login_code).count())
response = self.assert_request_status_code(204, self._url_submit(), 'POST', data={
'url_code': self.url_code,
'login_code': self.login_code,
'password': self.password,
})
self.assertEqual(response.content, u"")
self.assertEqual(0, self.mock_tracker.identify.call_count)
self.assertEqual(0, self.mock_tracker.track.call_count)
self._assert_call(debug_log)
self._assert_call(warning_log)
self._assert_call(audit_warning_log)
self._assert_call(critical_log)
self._assert_call(audit_critical_log)
| agpl-3.0 |
3lnc/elasticsearch-dsl-py | test_elasticsearch_dsl/test_integration/test_index.py | 1 | 3113 | from elasticsearch_dsl import Document, Index, Text, Keyword, Date, analysis, IndexTemplate
class Post(Document):
title = Text(analyzer=analysis.analyzer('my_analyzer', tokenizer='keyword'))
published_from = Date()
def test_index_template_works(write_client):
it = IndexTemplate('test-template', 'test-*')
it.document(Post)
it.settings(number_of_replicas=0, number_of_shards=1)
it.save()
i = Index('test-blog')
i.create()
assert {
'test-blog': {
'mappings': {
'properties': {
'title': {'type': 'text', 'analyzer': 'my_analyzer'},
'published_from': {'type': 'date'},
}
}
}
} == write_client.indices.get_mapping(index='test-blog')
def test_index_can_be_saved_even_with_settings(write_client):
i = Index('test-blog', using=write_client)
i.settings(number_of_shards=3, number_of_replicas=0)
i.save()
i.settings(number_of_replicas=1)
i.save()
assert '1' == i.get_settings()['test-blog']['settings']['index']['number_of_replicas']
def test_index_exists(data_client):
assert Index('git').exists()
assert not Index('not-there').exists()
def test_index_can_be_created_with_settings_and_mappings(write_client):
i = Index('test-blog', using=write_client)
i.document(Post)
i.settings(number_of_replicas=0, number_of_shards=1)
i.create()
assert {
'test-blog': {
'mappings': {
'properties': {
'title': {'type': 'text', 'analyzer': 'my_analyzer'},
'published_from': {'type': 'date'}
}
}
}
} == write_client.indices.get_mapping(index='test-blog')
settings = write_client.indices.get_settings(index='test-blog')
assert settings['test-blog']['settings']['index']['number_of_replicas'] == '0'
assert settings['test-blog']['settings']['index']['number_of_shards'] == '1'
assert settings['test-blog']['settings']['index']['analysis'] == {
'analyzer': {
'my_analyzer': {
'type': 'custom',
'tokenizer': 'keyword'
}
}
}
def test_delete(write_client):
write_client.indices.create(
index='test-index',
body={'settings': {'number_of_replicas': 0, 'number_of_shards': 1}}
)
i = Index('test-index', using=write_client)
i.delete()
assert not write_client.indices.exists(index='test-index')
def test_multiple_indices_with_same_doc_type_work(write_client):
i1 = Index('test-index-1', using=write_client)
i2 = Index('test-index-2', using=write_client)
for i in (i1, i2):
i.document(Post)
i.create()
for i in ('test-index-1', 'test-index-2'):
settings = write_client.indices.get_settings(index=i)
assert settings[i]['settings']['index']['analysis'] == {
'analyzer': {
'my_analyzer': {
'type': 'custom',
'tokenizer': 'keyword'
}
}
}
| apache-2.0 |
sarthakmeh03/django | tests/sitemaps_tests/test_utils.py | 11 | 1832 | from django.contrib.sitemaps import (
SitemapNotFound, _get_sitemap_full_url, ping_google,
)
from django.core.exceptions import ImproperlyConfigured
from django.test import mock, modify_settings, override_settings
from django.utils.six.moves.urllib.parse import urlencode
from .base import SitemapTestsBase
class PingGoogleTests(SitemapTestsBase):
@mock.patch('django.contrib.sitemaps.urlopen')
def test_something(self, urlopen):
ping_google()
params = urlencode({'sitemap': 'http://example.com/sitemap-without-entries/sitemap.xml'})
full_url = 'https://www.google.com/webmasters/tools/ping?%s' % params
urlopen.assert_called_with(full_url)
def test_get_sitemap_full_url_global(self):
self.assertEqual(_get_sitemap_full_url(None), 'http://example.com/sitemap-without-entries/sitemap.xml')
@override_settings(ROOT_URLCONF='sitemaps_tests.urls.index_only')
def test_get_sitemap_full_url_index(self):
self.assertEqual(_get_sitemap_full_url(None), 'http://example.com/simple/index.xml')
@override_settings(ROOT_URLCONF='sitemaps_tests.urls.empty')
def test_get_sitemap_full_url_not_detected(self):
msg = "You didn't provide a sitemap_url, and the sitemap URL couldn't be auto-detected."
with self.assertRaisesMessage(SitemapNotFound, msg):
_get_sitemap_full_url(None)
def test_get_sitemap_full_url_exact_url(self):
self.assertEqual(_get_sitemap_full_url('/foo.xml'), 'http://example.com/foo.xml')
@modify_settings(INSTALLED_APPS={'remove': 'django.contrib.sites'})
def test_get_sitemap_full_url_no_sites(self):
msg = "ping_google requires django.contrib.sites, which isn't installed."
with self.assertRaisesMessage(ImproperlyConfigured, msg):
_get_sitemap_full_url(None)
| bsd-3-clause |
sarthakmeh03/django | tests/update/models.py | 282 | 1196 | """
Tests for the update() queryset method that allows in-place, multi-object
updates.
"""
from django.db import models
from django.utils import six
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class DataPoint(models.Model):
name = models.CharField(max_length=20)
value = models.CharField(max_length=20)
another_value = models.CharField(max_length=20, blank=True)
def __str__(self):
return six.text_type(self.name)
@python_2_unicode_compatible
class RelatedPoint(models.Model):
name = models.CharField(max_length=20)
data = models.ForeignKey(DataPoint, models.CASCADE)
def __str__(self):
return six.text_type(self.name)
class A(models.Model):
x = models.IntegerField(default=10)
class B(models.Model):
a = models.ForeignKey(A, models.CASCADE)
y = models.IntegerField(default=10)
class C(models.Model):
y = models.IntegerField(default=10)
class D(C):
a = models.ForeignKey(A, models.CASCADE)
class Foo(models.Model):
target = models.CharField(max_length=10, unique=True)
class Bar(models.Model):
foo = models.ForeignKey(Foo, models.CASCADE, to_field='target')
| bsd-3-clause |
jesramirez/odoo | addons/hr_attendance/wizard/__init__.py | 375 | 1073 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import hr_attendance_error
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
OsirisSPS/osiris-sps | client/share/plugins/AF9A4C281070FDB0F34CF417CDB168AB38C8A388/lib/bsddb/test/test_queue.py | 75 | 4118 | """
TestCases for exercising a Queue DB.
"""
import os, string
from pprint import pprint
import unittest
from test_all import db, verbose, get_new_database_path
#----------------------------------------------------------------------
class SimpleQueueTestCase(unittest.TestCase):
def setUp(self):
self.filename = get_new_database_path()
def tearDown(self):
try:
os.remove(self.filename)
except os.error:
pass
def test01_basic(self):
# Basic Queue tests using the deprecated DBCursor.consume method.
if verbose:
print '\n', '-=' * 30
print "Running %s.test01_basic..." % self.__class__.__name__
d = db.DB()
d.set_re_len(40) # Queues must be fixed length
d.open(self.filename, db.DB_QUEUE, db.DB_CREATE)
if verbose:
print "before appends" + '-' * 30
pprint(d.stat())
for x in string.letters:
d.append(x * 40)
self.assertEqual(len(d), len(string.letters))
d.put(100, "some more data")
d.put(101, "and some more ")
d.put(75, "out of order")
d.put(1, "replacement data")
self.assertEqual(len(d), len(string.letters)+3)
if verbose:
print "before close" + '-' * 30
pprint(d.stat())
d.close()
del d
d = db.DB()
d.open(self.filename)
if verbose:
print "after open" + '-' * 30
pprint(d.stat())
# Test "txn" as a positional parameter
d.append("one more", None)
# Test "txn" as a keyword parameter
d.append("another one", txn=None)
c = d.cursor()
if verbose:
print "after append" + '-' * 30
pprint(d.stat())
rec = c.consume()
while rec:
if verbose:
print rec
rec = c.consume()
c.close()
if verbose:
print "after consume loop" + '-' * 30
pprint(d.stat())
self.assertEqual(len(d), 0, \
"if you see this message then you need to rebuild " \
"Berkeley DB 3.1.17 with the patch in patches/qam_stat.diff")
d.close()
def test02_basicPost32(self):
# Basic Queue tests using the new DB.consume method in DB 3.2+
# (No cursor needed)
if verbose:
print '\n', '-=' * 30
print "Running %s.test02_basicPost32..." % self.__class__.__name__
if db.version() < (3, 2, 0):
if verbose:
print "Test not run, DB not new enough..."
return
d = db.DB()
d.set_re_len(40) # Queues must be fixed length
d.open(self.filename, db.DB_QUEUE, db.DB_CREATE)
if verbose:
print "before appends" + '-' * 30
pprint(d.stat())
for x in string.letters:
d.append(x * 40)
self.assertEqual(len(d), len(string.letters))
d.put(100, "some more data")
d.put(101, "and some more ")
d.put(75, "out of order")
d.put(1, "replacement data")
self.assertEqual(len(d), len(string.letters)+3)
if verbose:
print "before close" + '-' * 30
pprint(d.stat())
d.close()
del d
d = db.DB()
d.open(self.filename)
#d.set_get_returns_none(true)
if verbose:
print "after open" + '-' * 30
pprint(d.stat())
d.append("one more")
if verbose:
print "after append" + '-' * 30
pprint(d.stat())
rec = d.consume()
while rec:
if verbose:
print rec
rec = d.consume()
if verbose:
print "after consume loop" + '-' * 30
pprint(d.stat())
d.close()
#----------------------------------------------------------------------
def test_suite():
return unittest.makeSuite(SimpleQueueTestCase)
if __name__ == '__main__':
unittest.main(defaultTest='test_suite')
| gpl-3.0 |
stefan-caraiman/cloudbase-init-ci | argus/config/ci.py | 1 | 5208 | # Copyright 2016 Cloudbase Solutions Srl
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Config options available for the argus framework setup."""
from oslo_config import cfg
from argus.config import base as conf_base
RESOURCES_LINK = ('https://raw.githubusercontent.com/cloudbase/'
'cloudbase-init-ci/master/argus/resources')
IO_UPPER_TIMEOUT_MULTIPLIER = 3
DEFAULT_UPPER_TIMEOUT = 60 * 6
IO_UPPER_TIMEOUT = DEFAULT_UPPER_TIMEOUT * IO_UPPER_TIMEOUT_MULTIPLIER
class ArgusOptions(conf_base.Options):
"""Config options available for the argus framework setup."""
def __init__(self, config):
super(ArgusOptions, self).__init__(config, group="argus")
self._options = [
cfg.StrOpt(
"argus_log_file", default='argus.log', required=True,
help="Path to the file where argus will do the logging"),
cfg.StrOpt(
"resources", default=RESOURCES_LINK, required=True,
help="An url that holds the resources usually from "
"/argus/resources available on the web"),
cfg.BoolOpt("pause", default=False,
help="Pauses the CI after the installation process if "
"set on True."),
cfg.ListOpt(
"dns_nameservers", default=['8.8.8.8', '8.8.4.4'],
help="A comma separated list of DNS IPs, which will be used "
"for network connectivity inside the instance."),
cfg.StrOpt("output_directory", default=None,
help="The output directory path for where to save "
"instance details, if None is given, the current "
"working directory will be chosen."),
cfg.StrOpt("build", default="Beta", required=True,
help="The build version type of the Cloudbase-init "
"installer that will be used."),
cfg.StrOpt("arch", default="x64", required=True,
help="The architecture type that will be used for the "
"Cloudbase-init installer on the underlying "
"instance. A 'x64' option will be provided "
"for systems with an 64bit architecture, "
"and 'x86' for the 32bit systems."),
cfg.StrOpt("patch_install", default=None,
help="Path to a link or file on the disk containing a "
"zip file with an updated version of "
"Cloudbase-init."),
cfg.StrOpt("git_command", default=None,
help="Represents a git command that will be used to "
"checkout, clone or fetch a modified version of "
"Cloudbase-init, for replacing the present code "
"used by it."),
cfg.IntOpt("upper_timeout", default=DEFAULT_UPPER_TIMEOUT,
help="Upper timeout for each command sent to the "
"remote instance."),
cfg.IntOpt("io_upper_timeout", default=IO_UPPER_TIMEOUT,
help="Upper timeout for each command that reads or "
"writes to a file in the remote instance."),
cfg.IntOpt("retry_count", default=15,
help="The retry counts for a failing command."),
cfg.IntOpt("retry_delay", default=10,
help="The number of seconds between the retries "
" of a failed command."),
cfg.BoolOpt("log_each_scenario", default=False,
help="Create individual log files for each scenario."),
cfg.StrOpt(
"installer_root_url",
default="http://www.cloudbase.it/downloads",
help="Represents the web resource where the msi file can "
"be found"),
cfg.StrOpt(
"cbinit_git_repository",
default="https://github.com/openstack/cloudbase-init",
help="Cloudbase-init repository."),
]
def register(self):
"""Register the current options to the global ConfigOpts object."""
group = cfg.OptGroup(self.group_name, title='Argus Options')
self._config.register_group(group)
self._config.register_opts(self._options, group=group)
def list(self):
"""Return a list which contains all the available options."""
return self._options
| apache-2.0 |
bijandhakal/pattern | examples/03-en/03-parse.py | 21 | 2095 | import os, sys; sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", ".."))
from pattern.en import parse, pprint, tag
# The en module contains a fast regular expressions-based parser.
# A parser identifies words in a sentence, word part-of-speech tags (e.g. noun, verb)
# and groups of words that belong together (e.g. noun phrases).
# Common part-of-speech tags: NN (noun), VB (verb), JJ (adjective), PP (preposition).
# A tag can have a suffix, for example NNS (plural noun) or VBG (gerund verb).
# Overview of tags: http://www.clips.ua.ac.be/pages/mbsp-tags
s = "I eat pizza with a fork."
s = parse(s,
tokenize = True, # Tokenize the input, i.e. split punctuation from words.
tags = True, # Find part-of-speech tags.
chunks = True, # Find chunk tags, e.g. "the black cat" = NP = noun phrase.
relations = True, # Find relations between chunks.
lemmata = True, # Find word lemmata.
light = False)
# The light parameter determines how unknown words are handled.
# By default, unknown words are tagged NN and then improved with a set of rules.
# light=False uses Brill's lexical and contextual rules,
# light=True uses a set of custom rules that is less accurate but faster (5x-10x).
# The output is a string with each sentence on a new line.
# Words in a sentence have been annotated with tags,
# for example: fork/NN/I-NP/I-PNP
# NN = noun, NP = part of a noun phrase, PNP = part of a prepositional phrase.
print s
print
# Prettier output can be obtained with the pprint() command:
pprint(s)
print
# The string's split() method will (unless a split character is given),
# split into a list of sentences, where each sentence is a list of words
# and each word is a list with the word + its tags.
print s.split()
print
# The tag() command returns a list of (word, POS-tag)-tuples.
# With light=True, this is the fastest and simplest way to get an idea
# of a sentence's constituents:
s = "I eat pizza with a fork."
s = tag(s)
print s
for word, tag in s:
if tag == "NN": # Find all nouns in the input string.
print word
| bsd-3-clause |
rlkelly/StockPy | Test/StockPy_HMM.py | 3 | 4878 | import pandas.io.data as web
import datetime as dt
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.widgets as wd
import sklearn.hmm as lrn
def stkHMM(lrndata, n_components):
model = lrn.GaussianHMM(n_components, covariance_type="tied", n_iter=20)
model.fit([lrndata])
hidden_states = model.predict(lrndata)
return [model, hidden_states]
def plot_data(stkname, fig, topplt, botplt, mlrnplt, sidplt):
#Get data from yahoo
#Calculate olling mean, mean and current value of stock
#Also calculate length of data
startdate = dt.date(2007, 1, 1)
stkdata = web.DataReader(stkname, 'yahoo', startdate)
stklen = len(stkdata.index)
enddate = dt.datetime.date(stkdata.index[stklen-1])
stkrolmean = pd.ewma(stkdata['Close'], 60)
stkmean = stkdata['Close'].mean(1).round(2)
stkcur = stkdata['Close'][stklen-1]
stkmax = stkdata['Close'].max(1)
stkmin = stkdata['Close'].min(1)
stkmchnlrndata = np.column_stack([stkdata['Close'], stkdata['Volume']])
ncomponents = 5
lrnmodel, hiddenstates = stkHMM(stkmchnlrndata, ncomponents)
nxtstateidx = lrnmodel.transmat_[hiddenstates[len(hiddenstates)-1], :]
nxtstateprob = np.amax(nxtstateidx)
nxtstate = np.argmax(nxtstateidx)
#Decoration for annotation of latest trading value
props = dict(boxstyle='round', facecolor='wheat', alpha=0.5)
#Clear all axes
topplt.cla()
botplt.cla()
mlrnplt.cla()
sidplt.cla()
#Top plot: Closing data, mean and rolling mean
topplt.plot(stkdata.index, stkdata['Close'], stkdata.index,
stkmean*np.ones(stklen), stkdata.index, stkrolmean,)
topplt.set_title('{} Stock Price from {} to {}'.format(stkname,
startdate, enddate))
topplt.grid(True)
topymin, topymax = topplt.get_ylim()
topplt.text(0.05, 0.95, 'Trading price on {}: ${}'.format(enddate,
stkcur), transform=topplt.transAxes, fontsize=14,
verticalalignment='top', bbox=props)
topplt.fill_between(stkdata.index, stkdata['Close'],
(topymin+0.01)*np.ones(stklen), alpha=0.5)
topplt.legend(('Close', 'Mean', 'EWMA'), 'lower right', shadow=True,
fancybox=True, fontsize=8)
#Bottom plot: Bar Graph, trading volume
botplt.bar(stkdata.index, stkdata['Volume'])
botplt.set_title('{} Trading Volume'.format(stkname))
#Machine Learn plot
for i in xrange(ncomponents):
idx = (hiddenstates == i)
mlrnplt.plot_date(stkdata.index[idx], stkdata['Close'][idx], 'o',
label='Hidden state: {}'.format(i))
mlrnplt.legend(loc='best', fancybox=True, shadow=True, fontsize=8)
mlrnplt.grid(True)
mlrnplt.text(0.99, 0.1,
'Next State: {} with {:.2f}% probability'
.format(nxtstate, nxtstateprob*100),
transform=mlrnplt.transAxes, fontsize=10,
horizontalalignment='right', verticalalignment='center',
bbox=props)
mlrnplt.set_title('Hidden Markov Model States')
#Side plot: histogram of 'high-low'
sidplt.hist(stkdata['High']-stkdata['Low'], bins=50, normed=True)
sidplt.set_title('Stock Value Variation')
sidplt.grid(True)
sidplt.text(0.70, 0.50, '{} Trading Value Stats\nMean:${}\nHighest:${}'
'\nLowest:${}'.format(stkname, stkmean, stkmax, stkmin),
transform=sidplt.transAxes, fontsize=12,
verticalalignment='top', horizontalalignment='center',
bbox=props)
#Remove xticklabels on top plot
plt.setp(topplt.get_xticklabels(), visible=False)
plt.setp(botplt.get_xticklabels(), visible=False)
plt.tight_layout()
return fig
def setup():
stklst = sorted(('ABB', 'AMZN', 'GE', 'GOOG', 'MSFT', 'YHOO', 'EBAY'))
#Setup figure
#Top, Bottom, Side with top and bottom plot sharing x axis
fig = plt.figure()
top = plt.subplot2grid((3, 3), (0, 0), colspan=2)
bot = plt.subplot2grid((3, 3), (1, 0), colspan=2, sharex=top)
mlrn = plt.subplot2grid((3, 3), (2, 0), colspan=2, sharex=top)
sid = plt.subplot2grid((3, 3), (0, 2), rowspan=3)
fig = plot_data(stklst[0], fig, top, bot, mlrn, sid)
#Setup for radio bottoms
axcolor = 'lightgoldenrodyellow'
ylen = len(stklst)/50.0
prop_radio = plt.axes([0.95, 1-ylen, 0.048, ylen], axisbg=axcolor)
radio = wd.RadioButtons(prop_radio, stklst)
return [fig, top, bot, sid, mlrn, radio]
if __name__ == "__main__":
fig, top, bot, sid, mlrn, radio = setup()
#Setup multicursor between top and bottom plot
multi = wd.MultiCursor(fig.canvas, (top, bot, mlrn), color='r', lw=2)
def stocksel(label):
plot_data(label, fig, top, bot, mlrn, sid)
radio.on_clicked(stocksel)
#Show plot
plt.show()
| gpl-2.0 |
shastah/spacewalk | client/tools/rhncfg/config_common/cfg_exceptions.py | 17 | 1790 | #
# Copyright (c) 2008--2013 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
class InvalidSession(Exception):
pass
class AuthenticationError(Exception):
pass
class ConfigChannelNotInRepo(Exception):
pass
class ConfigChannelAlreadyExistsError(Exception):
pass
class ConfigChannelNotEmptyError(Exception):
pass
class ConfigNotManaged(Exception):
pass
class ConfigurationError(Exception):
pass
class BinaryFileDiffError(Exception):
pass
class RepositoryFileError(Exception):
pass
class RepositoryLocalFileError(Exception):
pass
class RepositoryFileMissingError(Exception):
pass
class RepositoryFilePushError(RepositoryFileError):
pass
class ConfigFileTooLargeError(RepositoryFilePushError):
pass
class QuotaExceeded(RepositoryFilePushError):
pass
class RepositoryFileExistsError(RepositoryFilePushError):
"Attempted to add a file that already exists"
pass
class RepositoryFileVersionMismatchError(RepositoryFilePushError):
"File upload failed because the version changed underneath"
pass
class FileEntryIsDirectory(Exception):
pass
class DirectoryEntryIsFile(Exception):
pass
class UserNotFound(Exception):
pass
class GroupNotFound(Exception):
pass
| gpl-2.0 |
devs1991/test_edx_docmode | lms/djangoapps/verified_track_content/tests/test_forms.py | 28 | 1632 | """
Test for forms helpers.
"""
from xmodule.modulestore.tests.factories import CourseFactory
from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase
from verified_track_content.forms import VerifiedTrackCourseForm
class TestVerifiedTrackCourseForm(SharedModuleStoreTestCase):
"""
Test form validation.
"""
FAKE_COURSE = 'edX/Test_Course/Run'
BAD_COURSE_KEY = 'bad_course_key'
@classmethod
def setUpClass(cls):
super(TestVerifiedTrackCourseForm, cls).setUpClass()
cls.course = CourseFactory.create()
def test_form_validation_success(self):
form_data = {
'course_key': unicode(self.course.id), 'verified_cohort_name': 'Verified Learners', 'enabled': True
}
form = VerifiedTrackCourseForm(data=form_data)
self.assertTrue(form.is_valid())
def test_form_validation_failure(self):
form_data = {'course_key': self.FAKE_COURSE, 'verified_cohort_name': 'Verified Learners', 'enabled': True}
form = VerifiedTrackCourseForm(data=form_data)
self.assertFalse(form.is_valid())
self.assertEqual(
form.errors['course_key'],
['COURSE NOT FOUND. Please check that the course ID is valid.']
)
form_data = {'course_key': self.BAD_COURSE_KEY, 'verified_cohort_name': 'Verified Learners', 'enabled': True}
form = VerifiedTrackCourseForm(data=form_data)
self.assertFalse(form.is_valid())
self.assertEqual(
form.errors['course_key'],
['COURSE NOT FOUND. Please check that the course ID is valid.']
)
| agpl-3.0 |
priorknowledge/loom | loom/test/test_crossvalidate.py | 1 | 1686 | # Copyright (c) 2014, Salesforce.com, Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# - Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# - Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# - Neither the name of Salesforce.com nor the names of its contributors
# may be used to endorse or promote products derived from this
# software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
# OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import loom.crossvalidate
DATASET = 'bb-10-10-0.5'
def test_crossvalidate():
loom.crossvalidate.crossvalidate(DATASET, extra_passes=1.0)
| bsd-3-clause |
ravimalik20/CryptoLibSHA-256 | Testing/test.py | 1 | 3109 | #! /usr/bin/python
#/*==========================================================================================
#||
#|| Copyright (C) 2013 Ravi Malik
#||
#|| This program is free software; you can redistribute it and/or
#|| modify it under the terms of the GNU General Public License
#|| as published by the Free Software Foundation; either version 2
#|| of the License, or (at your option) any later version.
#||
#|| This program is distributed in the hope that it will be useful,
#|| but WITHOUT ANY WARRANTY; without even the implied warranty of
#|| MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
#|| GNU General Public License for more details.
#||
#|| You should have received a copy of the GNU General Public License
#|| along with this program; if not, write to the Free Software
#|| Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#||
#||
#*==========================================================================================*/
import os
files_to_compile=['generate_hash_string','generate_hash_file']
# Compiling Files if not Compiled.
for i in files_to_compile:
if os.path.exists(i):
print "File %s.c already compiled."%i
else:
os.system("gcc -o %s %s.c"%(i,i))
# Declaring Test Cases:
test_hash_file={'TestCases/1':'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855',
'TestCases/2':'edeaaff3f1774ad2888673770c6d64097e391bc362d7d6fb34982ddf0efd18cb',
'TestCases/3':'7eb61810316c9f0c9a839b1dc619ac2097c8f808c37e4f6112b3bfa3dec401f4',
'TestCases/4':'62859ed9427511e49b0e4b23a1551d838d8314e8dad60a8a0ce61e98fb74d639',
'TestCases/5':'23cde71f9494f837f3e1ee7f6a5b8f646a73d30dcbb625250d8724e177431523',
}
test_hash_string={'':'e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855',
'abc':'ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad',
'message digest':'f7846f55cf23e14eebeab5b4e1550cad5b509e3348fbc4efa3a1413d393cb650',
'secure hash algorithm':'f30ceb2bb2829e79e4ca9753d35a8ecc00262d164cc077080295381cbd643f0d',
'SHA256 is considered to be safe':'6819d915c73f4d1e77e4e1b52d1fa0f9cf9beaead3939f15874bd988e2a23630',
'abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq':'248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1',
'For this sample, this 63-byte string will be used as input data':'f08a78cbbaee082b052ae0708f32fa1e50c5c421aa772ba5dbb406a2ea6be342',
'This is exactly 64 bytes long, not counting the terminating byte':'ab64eff7e88e2e46165e29f2bce41826bd4c7b3552f6b382a9e7d3af47c245f8'
}
print "\n\nNow testing hash_file.h\n\n"
for i in test_hash_file:
if os.path.exists(i):
print "Test Case:%s"%i
print "Hash Generated is:"
os.system('./generate_hash_file "%s"'%i)
print "Hash Should be:"
print test_hash_file[i]
else:
print "File %s missing!!! Cannot operate test using it."%i
print "\n\nNow testing hash_string.h\n\n"
for i in test_hash_string:
print "Test Case:%s"%(i)
print "Hash Generated is:"
os.system('./generate_hash_string "%s"'%i)
print "Hash Should be:"
print test_hash_string[i]
print "\n\nTEST COMPLETE!!\n\n"
| gpl-2.0 |
tonytan4ever/canary | canary/tasks/taskflow/driver.py | 1 | 3783 | # Copyright (c) 2015 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo.config import cfg
from taskflow.jobs import backends as job_backends
from taskflow.persistence import backends as persistence_backends
from canary.tasks import base
from canary.openstack.common import log
LOG = log.getLogger(__name__)
TASKFLOW_OPTIONS = [
cfg.StrOpt('jobboard_backend_type', default='zookeeper',
help='Default jobboard backend type'),
cfg.StrOpt('persistent_backend_type', default='zookeeper',
help='Default jobboard persistent backend type'),
cfg.ListOpt('jobboard_backend_host', default=['localhost'],
help='Default jobboard backend server host'),
cfg.IntOpt('jobboard_backend_port', default=2181,
help='Default jobboard backend server port (e.g: ampq)'),
cfg.ListOpt('persistent_backend_host', default=['localhost'],
help='Default persistent backend server host'),
cfg.IntOpt('persistent_backend_port', default=2181,
help='Default persistent backend server port (e.g: ampq)'),
cfg.StrOpt('canary_worker_path',
default='/taskflow/jobs/canary_jobs',
help='Default Zookeeper path for canary jobs'),
cfg.StrOpt('canary_worker_jobboard',
default='canary_jobs',
help='Default jobboard name associated with canary worker jobs'),
]
TASKFLOW_GROUP = 'tasks:taskflow'
class TaskFlowDistributedTaskDriver(base.Driver):
"""TaskFlow distributed task Driver."""
def __init__(self, conf):
super(TaskFlowDistributedTaskDriver, self).__init__(conf)
conf.register_opts(TASKFLOW_OPTIONS, group=TASKFLOW_GROUP)
self.distributed_task_conf = conf[TASKFLOW_GROUP]
job_backends_hosts = ','.join(['%s:%s' % (
host, self.distributed_task_conf.jobboard_backend_port)
for host in
self.distributed_task_conf.jobboard_backend_host])
self.jobboard_backend_conf_worker = {
# This topic could become more complicated
"board": self.distributed_task_conf.jobboard_backend_type,
"hosts": job_backends_hosts,
"path": self.distributed_task_conf.canary_worker_path,
}
persistence_backends_hosts = ','.join(['%s:%s' % (
host, self.distributed_task_conf.jobboard_backend_port)
for host in
self.distributed_task_conf.jobboard_backend_host])
self.persistence_backend_conf = {
# This topic could become more complicated
"connection": self.distributed_task_conf.persistent_backend_type,
"hosts": persistence_backends_hosts,
}
def is_alive(self):
"""Health check for TaskFlow worker."""
return True
def persistence(self):
return persistence_backends.backend(
self.persistence_backend_conf.copy())
def job_board(self, conf, persistence, **kwargs):
return job_backends.backend(
self.distributed_task_conf.canary_worker_jobboard,
conf.copy(), persistence=persistence)
@property
def vendor_name(self):
"""storage name.
:returns 'TaskFlow'
"""
return 'TaskFlow'
| apache-2.0 |
guorendong/iridium-browser-ubuntu | third_party/mesa/src/scons/llvm.py | 12 | 8552 | """llvm
Tool-specific initialization for LLVM
"""
#
# Copyright (c) 2009 VMware, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
import os
import os.path
import re
import sys
import distutils.version
import SCons.Errors
import SCons.Util
def generate(env):
env['llvm'] = False
try:
llvm_dir = os.environ['LLVM']
except KeyError:
# Do nothing -- use the system headers/libs
llvm_dir = None
else:
if not os.path.isdir(llvm_dir):
raise SCons.Errors.InternalError, "Specified LLVM directory not found"
if env['debug']:
llvm_subdir = 'Debug'
else:
llvm_subdir = 'Release'
llvm_bin_dir = os.path.join(llvm_dir, llvm_subdir, 'bin')
if not os.path.isdir(llvm_bin_dir):
llvm_bin_dir = os.path.join(llvm_dir, 'bin')
if not os.path.isdir(llvm_bin_dir):
raise SCons.Errors.InternalError, "LLVM binary directory not found"
env.PrependENVPath('PATH', llvm_bin_dir)
if env['platform'] == 'windows':
# XXX: There is no llvm-config on Windows, so assume a standard layout
if llvm_dir is None:
print 'scons: LLVM environment variable must be specified when building for windows'
return
# Try to determine the LLVM version from llvm/Config/config.h
llvm_config = os.path.join(llvm_dir, 'include/llvm/Config/config.h')
if not os.path.exists(llvm_config):
print 'scons: could not find %s' % llvm_config
return
llvm_version_re = re.compile(r'^#define PACKAGE_VERSION "([^"]*)"')
llvm_version = None
for line in open(llvm_config, 'rt'):
mo = llvm_version_re.match(line)
if mo:
llvm_version = mo.group(1)
llvm_version = distutils.version.LooseVersion(llvm_version)
break
if llvm_version is None:
print 'scons: could not determine the LLVM version from %s' % llvm_config
return
env.Prepend(CPPPATH = [os.path.join(llvm_dir, 'include')])
env.AppendUnique(CPPDEFINES = [
'__STDC_LIMIT_MACROS',
'__STDC_CONSTANT_MACROS',
'HAVE_STDINT_H',
])
env.Prepend(LIBPATH = [os.path.join(llvm_dir, 'lib')])
if llvm_version >= distutils.version.LooseVersion('3.0'):
# 3.0
env.Prepend(LIBS = [
'LLVMBitWriter', 'LLVMX86Disassembler', 'LLVMX86AsmParser',
'LLVMX86CodeGen', 'LLVMX86Desc', 'LLVMSelectionDAG',
'LLVMAsmPrinter', 'LLVMMCParser', 'LLVMX86AsmPrinter',
'LLVMX86Utils', 'LLVMX86Info', 'LLVMJIT',
'LLVMExecutionEngine', 'LLVMCodeGen', 'LLVMScalarOpts',
'LLVMInstCombine', 'LLVMTransformUtils', 'LLVMipa',
'LLVMAnalysis', 'LLVMTarget', 'LLVMMC', 'LLVMCore',
'LLVMSupport'
])
elif llvm_version >= distutils.version.LooseVersion('2.9'):
# 2.9
env.Prepend(LIBS = [
'LLVMObject', 'LLVMMCJIT', 'LLVMMCDisassembler',
'LLVMLinker', 'LLVMipo', 'LLVMInterpreter',
'LLVMInstrumentation', 'LLVMJIT', 'LLVMExecutionEngine',
'LLVMBitWriter', 'LLVMX86Disassembler', 'LLVMX86AsmParser',
'LLVMMCParser', 'LLVMX86AsmPrinter', 'LLVMX86CodeGen',
'LLVMSelectionDAG', 'LLVMX86Utils', 'LLVMX86Info', 'LLVMAsmPrinter',
'LLVMCodeGen', 'LLVMScalarOpts', 'LLVMInstCombine',
'LLVMTransformUtils', 'LLVMipa', 'LLVMAsmParser',
'LLVMArchive', 'LLVMBitReader', 'LLVMAnalysis', 'LLVMTarget',
'LLVMCore', 'LLVMMC', 'LLVMSupport',
])
elif llvm_version >= distutils.version.LooseVersion('2.7'):
# 2.7
env.Prepend(LIBS = [
'LLVMLinker', 'LLVMipo', 'LLVMInterpreter',
'LLVMInstrumentation', 'LLVMJIT', 'LLVMExecutionEngine',
'LLVMBitWriter', 'LLVMX86Disassembler', 'LLVMX86AsmParser',
'LLVMMCParser', 'LLVMX86AsmPrinter', 'LLVMX86CodeGen',
'LLVMSelectionDAG', 'LLVMX86Info', 'LLVMAsmPrinter',
'LLVMCodeGen', 'LLVMScalarOpts', 'LLVMInstCombine',
'LLVMTransformUtils', 'LLVMipa', 'LLVMAsmParser',
'LLVMArchive', 'LLVMBitReader', 'LLVMAnalysis', 'LLVMTarget',
'LLVMMC', 'LLVMCore', 'LLVMSupport', 'LLVMSystem',
])
else:
# 2.6
env.Prepend(LIBS = [
'LLVMX86AsmParser', 'LLVMX86AsmPrinter', 'LLVMX86CodeGen',
'LLVMX86Info', 'LLVMLinker', 'LLVMipo', 'LLVMInterpreter',
'LLVMInstrumentation', 'LLVMJIT', 'LLVMExecutionEngine',
'LLVMDebugger', 'LLVMBitWriter', 'LLVMAsmParser',
'LLVMArchive', 'LLVMBitReader', 'LLVMSelectionDAG',
'LLVMAsmPrinter', 'LLVMCodeGen', 'LLVMScalarOpts',
'LLVMTransformUtils', 'LLVMipa', 'LLVMAnalysis',
'LLVMTarget', 'LLVMMC', 'LLVMCore', 'LLVMSupport',
'LLVMSystem',
])
env.Append(LIBS = [
'imagehlp',
'psapi',
'shell32',
'advapi32'
])
if env['msvc']:
# Some of the LLVM C headers use the inline keyword without
# defining it.
env.Append(CPPDEFINES = [('inline', '__inline')])
if env['build'] in ('debug', 'checked'):
# LLVM libraries are static, build with /MT, and they
# automatically link agains LIBCMT. When we're doing a
# debug build we'll be linking against LIBCMTD, so disable
# that.
env.Append(LINKFLAGS = ['/nodefaultlib:LIBCMT'])
else:
if not env.Detect('llvm-config'):
print 'scons: llvm-config script not found' % llvm_version
return
llvm_version = env.backtick('llvm-config --version').rstrip()
llvm_version = distutils.version.LooseVersion(llvm_version)
try:
# Treat --cppflags specially to prevent NDEBUG from disabling
# assertion failures in debug builds.
cppflags = env.ParseFlags('!llvm-config --cppflags')
try:
cppflags['CPPDEFINES'].remove('NDEBUG')
except ValueError:
pass
env.MergeFlags(cppflags)
components = ['engine', 'bitwriter', 'x86asmprinter']
if llvm_version >= distutils.version.LooseVersion('3.1'):
components.append('mcjit')
env.ParseConfig('llvm-config --libs ' + ' '.join(components))
env.ParseConfig('llvm-config --ldflags')
except OSError:
print 'scons: llvm-config version %s failed' % llvm_version
return
assert llvm_version is not None
env['llvm'] = True
print 'scons: Found LLVM version %s' % llvm_version
env['LLVM_VERSION'] = llvm_version
# Define HAVE_LLVM macro with the major/minor version number (e.g., 0x0206 for 2.6)
llvm_version_major = int(llvm_version.version[0])
llvm_version_minor = int(llvm_version.version[1])
llvm_version_hex = '0x%02x%02x' % (llvm_version_major, llvm_version_minor)
env.Prepend(CPPDEFINES = [('HAVE_LLVM', llvm_version_hex)])
def exists(env):
return True
# vim:set ts=4 sw=4 et:
| bsd-3-clause |
makermade/arm_android-19_arm-linux-androideabi-4.8 | lib/python2.7/email/mime/message.py | 573 | 1286 | # Copyright (C) 2001-2006 Python Software Foundation
# Author: Barry Warsaw
# Contact: [email protected]
"""Class representing message/* MIME documents."""
__all__ = ['MIMEMessage']
from email import message
from email.mime.nonmultipart import MIMENonMultipart
class MIMEMessage(MIMENonMultipart):
"""Class representing message/* MIME documents."""
def __init__(self, _msg, _subtype='rfc822'):
"""Create a message/* type MIME document.
_msg is a message object and must be an instance of Message, or a
derived class of Message, otherwise a TypeError is raised.
Optional _subtype defines the subtype of the contained message. The
default is "rfc822" (this is defined by the MIME standard, even though
the term "rfc822" is technically outdated by RFC 2822).
"""
MIMENonMultipart.__init__(self, 'message', _subtype)
if not isinstance(_msg, message.Message):
raise TypeError('Argument is not an instance of Message')
# It's convenient to use this base class method. We need to do it
# this way or we'll get an exception
message.Message.attach(self, _msg)
# And be sure our default type is set correctly
self.set_default_type('message/rfc822')
| gpl-2.0 |
TUBAME/migration-tool | src/tubame.portability/resources/tubame-search-modules/src/migration/jbmst_search_java.py | 2 | 11779 | # -*- coding: utf-8 -*-
"""
jbmst_search_java.py
Created on 2013/06/28
Copyright (C) 2011-2013 Nippon Telegraph and Telephone Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
"""
Java search process.
Extension to search for the file processing of java.
Do not search for the comment text.
If only Search Keyword1, and returns the results of the search in Search Keyword1.
If the Search Keyword2 is also present, and returns the results to find the search file again by the Search Keyword2.
[Environment] Python 2.7
"""
import re, sys,os
SINGLE_COMMENT = "SINGLE_COMMENT"
MULTI_COMMENT = "MULTI_COMMENT"
MULTI_COMMENT_END = "MULTI_COMMENT_END"
JAVA_SOURCE = "JAVA_SOURCE"
"""
Check single comment, multi comment, whether the source is searched record,
and returns a status corresponding to the type of statement.
@param pLine:Record to search for files
@retutn Type of sentence of one line to search for file
"""
def isSingleComment(pLine,LINE_HEAD_COMMENT_STR = "//"):
JAVA_SOURCE_EXCLUSION_END_OF_LINE_COMMENT =None
m = re.search("^\s*"+LINE_HEAD_COMMENT_STR,pLine)
if m:
return SINGLE_COMMENT,JAVA_SOURCE_EXCLUSION_END_OF_LINE_COMMENT
else:
#support end of line comment
m = re.search("(\s*\w*)"+LINE_HEAD_COMMENT_STR,pLine)
if m:
m = re.search("[^"+LINE_HEAD_COMMENT_STR+"]*",pLine)
if m != None:
JAVA_SOURCE_EXCLUSION_END_OF_LINE_COMMENT = m.group()
m = re.search("^\s*/\*",pLine)
if m:
m = re.search("\*/\s*$",pLine)
if m:
return SINGLE_COMMENT,JAVA_SOURCE_EXCLUSION_END_OF_LINE_COMMENT
else:
return MULTI_COMMENT,JAVA_SOURCE_EXCLUSION_END_OF_LINE_COMMENT
else:
#support end of line comment
m = re.search("(\s*\w*)/\*.*\*/$",pLine)
if m:
result = m.group()
if result != None:
index = len(result)
JAVA_SOURCE_EXCLUSION_END_OF_LINE_COMMENT =pLine[:-index]
return JAVA_SOURCE,JAVA_SOURCE_EXCLUSION_END_OF_LINE_COMMENT
"""
Search records it is determined whether the end of the multi comment statement.
@param pLine:Record to search for files
@retutn Type of sentence of one line to search for file
"""
def isMultiCommentEnd(pLine):
m = re.search("\*/\s*$",pLine)
if m:
return MULTI_COMMENT_END
return MULTI_COMMENT
"""
Function is not using
@param pSeachKey
@param pLine
@return
"""
def searchByLine(pSeachKey,pLine):
m = re.search(pSeachKey,pLine)
if m:
return "FOUND"
return "NOT_FOUND"
"""
If this is not the comment text, to search for in the Search Keyword1 or Search Keyword2.
Set the search list the corresponding line number of the line that matches the above.
@param pSearchFile File to be searched
@param pSearchStr Search Keyword1 or Search Keyword2
@return List of search corresponding line
"""
def search_open_file(pSearchFile,pSearchStr,isFirstMatchExit=False,LINE_HEAD_COMMENT_STR = "//",isSemicolonParser=False,pSearchStr2="",pFlag=0):
current_line_status = "NONE"
line_count = 0
line_count_list = []
searchTargetBody = ""
searchTargetBodyIncludedComment= ""
# Open the search files
f = open(pSearchFile, "r")
for line in f:
searchTargetBodyIncludedComment += line
line_count += 1
# Determine the type of sentence
line_status ,JAVA_SOURCE_EXCLUSION_END_OF_LINE_COMMENT= isSingleComment(line,LINE_HEAD_COMMENT_STR)
# Distributes the processing according to the type of sentence
if ( current_line_status == MULTI_COMMENT):
# If multi-sentence comment
if (isMultiCommentEnd(line) == MULTI_COMMENT_END):
# If the multi-comment statement is completed
current_line_status = JAVA_SOURCE
else:
if (line_status == JAVA_SOURCE):
# If this is not the comment text
# suuport end of line comment
if JAVA_SOURCE_EXCLUSION_END_OF_LINE_COMMENT != None:
line = JAVA_SOURCE_EXCLUSION_END_OF_LINE_COMMENT
#セミコロンまでをひとつのトークンとして検索を行う
if isSemicolonParser == True:
searchTargetBody += line
if hasEndSemicolon(searchTargetBody) == True:
find_result = findByKeywords(pSearchStr,pSearchStr2,LINE_HEAD_COMMENT_STR,searchTargetBody,searchTargetBodyIncludedComment.rstrip(),line_count,pFlag)
line_count_list += find_result
searchTargetBodyIncludedComment = ""
searchTargetBody = ""
else:
m = findAll(pSearchStr,line,pFlag)
if m:
for hit in m:
line_count_list.append(line_count)
if isFirstMatchExit == True:
f.close()
return line_count_list
current_line_status = line_status
f.close()
return line_count_list
def findAll(pSearchStr,pLine,pFlag=0):
return re.findall(pSearchStr,pLine,pFlag)
def hasEndSemicolon(pTarget):
if re.search(".*;\s*$",pTarget):
return True
return False
def hasEndBackSlash(pTarget):
if re.search(".*\\s*$",pTarget):
return True
return False
# def getIndexBaseEndofLine(body,match):
# print 'body:',body
# tokens = body.split(';')
# if len(tokens) != 0:
# if not match.end() +1 > len(body):
# match_after_line = body[match.end()+1:]
# print 'match_after_line' ,match_after_line
# m = match_after_line.split(';')
# if m:
# return m[0].count('\n')
# else:
# return 0
def getMatch(pSearchStr2,append_line):
match = re.finditer(pSearchStr2,append_line)
return len(match),match
def findByKeywords(pSearchStr1,pSearchStr2,LINE_HEAD_COMMENT_STR,pSearchTarget,pSearchTargetIncludedComment,pline_count,pFlag=0):
result_list = []
#print pSearchTarget
#print pSearchStr1
# コメントを除去したものを対象がヒットしない場合は処理しない
m= re.findall(pSearchStr1,pSearchTarget.replace('\n',''),pFlag)
if len(m) == 0:
return result_list
if pSearchStr2 == "":
searchKey =pSearchStr1
else:
searchKey =pSearchStr2
lines = pSearchTargetIncludedComment.split('\n')
line_length = len(lines)
line_count = 0
current_line_status = "NONE"
firstMatch = False
append_line = ""
match_len = 0
for line in lines:
line_count += 1
line_status ,JAVA_SOURCE_EXCLUSION_END_OF_LINE_COMMENT= isSingleComment(line,LINE_HEAD_COMMENT_STR)
if current_line_status == MULTI_COMMENT:
# If multi-sentence comment
if isMultiCommentEnd(line) == MULTI_COMMENT_END:
# If the multi-comment statement is completed
current_line_status = JAVA_SOURCE
else:
if line_status == JAVA_SOURCE:
# If this is not the comment text
# suuport end of line comment
if JAVA_SOURCE_EXCLUSION_END_OF_LINE_COMMENT != None:
line = JAVA_SOURCE_EXCLUSION_END_OF_LINE_COMMENT
append_line += line
if firstMatch == False:
match = re.finditer(searchKey,append_line,pFlag)
i = 0
for m in match:
result_list.append(pline_count - (line_length -line_count))
i += 1
firstMatch = True
if i !=0:
match_len = i
else:
match = re.finditer(searchKey,append_line,pFlag)
i = 0
for m in match:
if i >= match_len:
result_list.append(pline_count - (line_length -line_count))
i = i + 1
if i > 0:
match_len = i
current_line_status = line_status
return result_list
def searchInterfaceMethod(pSearchFile,LINE_HEAD_COMMENT_STR="//"):
current_line_status = "NONE"
line_count = 0
methodname_list = []
# Open the search files
f = open(pSearchFile, "r")
for line in f:
line_count += 1
# Determine the type of sentence
line_status,JAVA_SOURCE_EXCLUSION_END_OF_LINE_COMMENT= isSingleComment(line,LINE_HEAD_COMMENT_STR)
# Distributes the processing according to the type of sentence
if ( current_line_status == MULTI_COMMENT):
# If multi-sentence comment
if (isMultiCommentEnd(line) == MULTI_COMMENT_END):
# If the multi-comment statement is completed
current_line_status = JAVA_SOURCE
else:
if (line_status == JAVA_SOURCE):
# If this is not the comment text
# suuport end of line comment
if JAVA_SOURCE_EXCLUSION_END_OF_LINE_COMMENT != None:
line = JAVA_SOURCE_EXCLUSION_END_OF_LINE_COMMENT
m = re.search("^(?!.*\s+(static|new)\s+).*$",line)
if m != None:
m =re.search("\w+\s+(\w+)\s*\(.*",line)
if m:
method_name=m.group(1)
methodname_list.append(method_name)
f.close()
return methodname_list
"""
If only Search Keyword1, and returns the results of the search in Search Keyword1.
If the Search Keyword2 is also present, and returns the results to find the search file again by the Search Keyword2.
@param pSearchFile File to be searched
@param pSearchStr1 Search Keyword1
@param pSearchStr2 Search Keyword2
@retutn List of lines that hit the search
"""
def searchByFile(pSearchFile,pSearchStr1,pSearchStr2,isFirstMatchExit=False,LINE_HEAD_COMMENT_STR = "//",IS_SEMICOLON_PARSER=False,FLAG=0):
result_hit_count_list = []
if pSearchStr2 != "" and IS_SEMICOLON_PARSER == True:
#SEMICOLON_PARSERの場合のみ、そのまま、第2キワードで検索を実施する。
return search_open_file(pSearchFile,pSearchStr1,True,LINE_HEAD_COMMENT_STR,IS_SEMICOLON_PARSER,pSearchStr2,FLAG)
else:
result_hit_count_list = search_open_file(pSearchFile,pSearchStr1,False,LINE_HEAD_COMMENT_STR,IS_SEMICOLON_PARSER,"",FLAG)
hit_total_cnt = len(result_hit_count_list)
if hit_total_cnt!= 0 and pSearchStr2 != "":
result_hit_count_list = search_open_file(pSearchFile,pSearchStr2,isFirstMatchExit,LINE_HEAD_COMMENT_STR,IS_SEMICOLON_PARSER,"",FLAG)
return result_hit_count_list
def wrapSearchByFile(param):
try:
return (searchByFile(*param),param[0])
except Exception,e:
raise Exception, '%s , searchTargetFile = %s' % (e,param[0])
def wrapSearchOpenFile(param):
try:
return (search_open_file(*param),param[0])
except Exception,e:
raise Exception, '%s , searchTargetFile = %s' % (e,param[0]) | apache-2.0 |
jiegec/gnuradio | gr-wxgui/python/wxgui/waterfallsink2.py | 92 | 1483 | #
# Copyright 2008,2009 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr
p = gr.prefs()
style = p.get_string('wxgui', 'style', 'auto')
if style == 'auto' or style == 'gl':
try:
import wx.glcanvas
from OpenGL.GL import *
from waterfallsink_gl import waterfall_sink_f, waterfall_sink_c
except ImportError:
if style == 'gl':
raise RuntimeError("Unable to import OpenGL. Are Python wrappers for OpenGL installed?")
else:
# Fall backto non-gl sinks
from waterfallsink_nongl import waterfall_sink_f, waterfall_sink_c
elif style == 'nongl':
from waterfallsink_nongl import waterfall_sink_f, waterfall_sink_c
else:
raise RuntimeError("Unknown wxgui style")
| gpl-3.0 |
matsumoto-r/synciga | src/third_party/jsoncpp/source/makerelease.py | 169 | 15483 | """Tag the sandbox for release, make source and doc tarballs.
Requires Python 2.6
Example of invocation (use to test the script):
python makerelease.py --platform=msvc6,msvc71,msvc80,msvc90,mingw -ublep 0.6.0 0.7.0-dev
When testing this script:
python makerelease.py --force --retag --platform=msvc6,msvc71,msvc80,mingw -ublep test-0.6.0 test-0.6.1-dev
Example of invocation when doing a release:
python makerelease.py 0.5.0 0.6.0-dev
"""
import os.path
import subprocess
import sys
import doxybuild
import subprocess
import xml.etree.ElementTree as ElementTree
import shutil
import urllib2
import tempfile
import os
import time
from devtools import antglob, fixeol, tarball
import amalgamate
SVN_ROOT = 'https://jsoncpp.svn.sourceforge.net/svnroot/jsoncpp/'
SVN_TAG_ROOT = SVN_ROOT + 'tags/jsoncpp'
SCONS_LOCAL_URL = 'http://sourceforge.net/projects/scons/files/scons-local/1.2.0/scons-local-1.2.0.tar.gz/download'
SOURCEFORGE_PROJECT = 'jsoncpp'
def set_version( version ):
with open('version','wb') as f:
f.write( version.strip() )
def rmdir_if_exist( dir_path ):
if os.path.isdir( dir_path ):
shutil.rmtree( dir_path )
class SVNError(Exception):
pass
def svn_command( command, *args ):
cmd = ['svn', '--non-interactive', command] + list(args)
print 'Running:', ' '.join( cmd )
process = subprocess.Popen( cmd,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT )
stdout = process.communicate()[0]
if process.returncode:
error = SVNError( 'SVN command failed:\n' + stdout )
error.returncode = process.returncode
raise error
return stdout
def check_no_pending_commit():
"""Checks that there is no pending commit in the sandbox."""
stdout = svn_command( 'status', '--xml' )
etree = ElementTree.fromstring( stdout )
msg = []
for entry in etree.getiterator( 'entry' ):
path = entry.get('path')
status = entry.find('wc-status').get('item')
if status != 'unversioned' and path != 'version':
msg.append( 'File "%s" has pending change (status="%s")' % (path, status) )
if msg:
msg.insert(0, 'Pending change to commit found in sandbox. Commit them first!' )
return '\n'.join( msg )
def svn_join_url( base_url, suffix ):
if not base_url.endswith('/'):
base_url += '/'
if suffix.startswith('/'):
suffix = suffix[1:]
return base_url + suffix
def svn_check_if_tag_exist( tag_url ):
"""Checks if a tag exist.
Returns: True if the tag exist, False otherwise.
"""
try:
list_stdout = svn_command( 'list', tag_url )
except SVNError, e:
if e.returncode != 1 or not str(e).find('tag_url'):
raise e
# otherwise ignore error, meaning tag does not exist
return False
return True
def svn_commit( message ):
"""Commit the sandbox, providing the specified comment.
"""
svn_command( 'ci', '-m', message )
def svn_tag_sandbox( tag_url, message ):
"""Makes a tag based on the sandbox revisions.
"""
svn_command( 'copy', '-m', message, '.', tag_url )
def svn_remove_tag( tag_url, message ):
"""Removes an existing tag.
"""
svn_command( 'delete', '-m', message, tag_url )
def svn_export( tag_url, export_dir ):
"""Exports the tag_url revision to export_dir.
Target directory, including its parent is created if it does not exist.
If the directory export_dir exist, it is deleted before export proceed.
"""
rmdir_if_exist( export_dir )
svn_command( 'export', tag_url, export_dir )
def fix_sources_eol( dist_dir ):
"""Set file EOL for tarball distribution.
"""
print 'Preparing exported source file EOL for distribution...'
prune_dirs = antglob.prune_dirs + 'scons-local* ./build* ./libs ./dist'
win_sources = antglob.glob( dist_dir,
includes = '**/*.sln **/*.vcproj',
prune_dirs = prune_dirs )
unix_sources = antglob.glob( dist_dir,
includes = '''**/*.h **/*.cpp **/*.inl **/*.txt **/*.dox **/*.py **/*.html **/*.in
sconscript *.json *.expected AUTHORS LICENSE''',
excludes = antglob.default_excludes + 'scons.py sconsign.py scons-*',
prune_dirs = prune_dirs )
for path in win_sources:
fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\r\n' )
for path in unix_sources:
fixeol.fix_source_eol( path, is_dry_run = False, verbose = True, eol = '\n' )
def download( url, target_path ):
"""Download file represented by url to target_path.
"""
f = urllib2.urlopen( url )
try:
data = f.read()
finally:
f.close()
fout = open( target_path, 'wb' )
try:
fout.write( data )
finally:
fout.close()
def check_compile( distcheck_top_dir, platform ):
cmd = [sys.executable, 'scons.py', 'platform=%s' % platform, 'check']
print 'Running:', ' '.join( cmd )
log_path = os.path.join( distcheck_top_dir, 'build-%s.log' % platform )
flog = open( log_path, 'wb' )
try:
process = subprocess.Popen( cmd,
stdout=flog,
stderr=subprocess.STDOUT,
cwd=distcheck_top_dir )
stdout = process.communicate()[0]
status = (process.returncode == 0)
finally:
flog.close()
return (status, log_path)
def write_tempfile( content, **kwargs ):
fd, path = tempfile.mkstemp( **kwargs )
f = os.fdopen( fd, 'wt' )
try:
f.write( content )
finally:
f.close()
return path
class SFTPError(Exception):
pass
def run_sftp_batch( userhost, sftp, batch, retry=0 ):
path = write_tempfile( batch, suffix='.sftp', text=True )
# psftp -agent -C blep,[email protected] -batch -b batch.sftp -bc
cmd = [sftp, '-agent', '-C', '-batch', '-b', path, '-bc', userhost]
error = None
for retry_index in xrange(0, max(1,retry)):
heading = retry_index == 0 and 'Running:' or 'Retrying:'
print heading, ' '.join( cmd )
process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT )
stdout = process.communicate()[0]
if process.returncode != 0:
error = SFTPError( 'SFTP batch failed:\n' + stdout )
else:
break
if error:
raise error
return stdout
def sourceforge_web_synchro( sourceforge_project, doc_dir,
user=None, sftp='sftp' ):
"""Notes: does not synchronize sub-directory of doc-dir.
"""
userhost = '%s,%[email protected]' % (user, sourceforge_project)
stdout = run_sftp_batch( userhost, sftp, """
cd htdocs
dir
exit
""" )
existing_paths = set()
collect = 0
for line in stdout.split('\n'):
line = line.strip()
if not collect and line.endswith('> dir'):
collect = True
elif collect and line.endswith('> exit'):
break
elif collect == 1:
collect = 2
elif collect == 2:
path = line.strip().split()[-1:]
if path and path[0] not in ('.', '..'):
existing_paths.add( path[0] )
upload_paths = set( [os.path.basename(p) for p in antglob.glob( doc_dir )] )
paths_to_remove = existing_paths - upload_paths
if paths_to_remove:
print 'Removing the following file from web:'
print '\n'.join( paths_to_remove )
stdout = run_sftp_batch( userhost, sftp, """cd htdocs
rm %s
exit""" % ' '.join(paths_to_remove) )
print 'Uploading %d files:' % len(upload_paths)
batch_size = 10
upload_paths = list(upload_paths)
start_time = time.time()
for index in xrange(0,len(upload_paths),batch_size):
paths = upload_paths[index:index+batch_size]
file_per_sec = (time.time() - start_time) / (index+1)
remaining_files = len(upload_paths) - index
remaining_sec = file_per_sec * remaining_files
print '%d/%d, ETA=%.1fs' % (index+1, len(upload_paths), remaining_sec)
run_sftp_batch( userhost, sftp, """cd htdocs
lcd %s
mput %s
exit""" % (doc_dir, ' '.join(paths) ), retry=3 )
def sourceforge_release_tarball( sourceforge_project, paths, user=None, sftp='sftp' ):
userhost = '%s,%[email protected]' % (user, sourceforge_project)
run_sftp_batch( userhost, sftp, """
mput %s
exit
""" % (' '.join(paths),) )
def main():
usage = """%prog release_version next_dev_version
Update 'version' file to release_version and commit.
Generates the document tarball.
Tags the sandbox revision with release_version.
Update 'version' file to next_dev_version and commit.
Performs an svn export of tag release version, and build a source tarball.
Must be started in the project top directory.
Warning: --force should only be used when developping/testing the release script.
"""
from optparse import OptionParser
parser = OptionParser(usage=usage)
parser.allow_interspersed_args = False
parser.add_option('--dot', dest="dot_path", action='store', default=doxybuild.find_program('dot'),
help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""")
parser.add_option('--doxygen', dest="doxygen_path", action='store', default=doxybuild.find_program('doxygen'),
help="""Path to Doxygen tool. [Default: %default]""")
parser.add_option('--force', dest="ignore_pending_commit", action='store_true', default=False,
help="""Ignore pending commit. [Default: %default]""")
parser.add_option('--retag', dest="retag_release", action='store_true', default=False,
help="""Overwrite release existing tag if it exist. [Default: %default]""")
parser.add_option('-p', '--platforms', dest="platforms", action='store', default='',
help="""Comma separated list of platform passed to scons for build check.""")
parser.add_option('--no-test', dest="no_test", action='store_true', default=False,
help="""Skips build check.""")
parser.add_option('--no-web', dest="no_web", action='store_true', default=False,
help="""Do not update web site.""")
parser.add_option('-u', '--upload-user', dest="user", action='store',
help="""Sourceforge user for SFTP documentation upload.""")
parser.add_option('--sftp', dest='sftp', action='store', default=doxybuild.find_program('psftp', 'sftp'),
help="""Path of the SFTP compatible binary used to upload the documentation.""")
parser.enable_interspersed_args()
options, args = parser.parse_args()
if len(args) != 2:
parser.error( 'release_version missing on command-line.' )
release_version = args[0]
next_version = args[1]
if not options.platforms and not options.no_test:
parser.error( 'You must specify either --platform or --no-test option.' )
if options.ignore_pending_commit:
msg = ''
else:
msg = check_no_pending_commit()
if not msg:
print 'Setting version to', release_version
set_version( release_version )
svn_commit( 'Release ' + release_version )
tag_url = svn_join_url( SVN_TAG_ROOT, release_version )
if svn_check_if_tag_exist( tag_url ):
if options.retag_release:
svn_remove_tag( tag_url, 'Overwriting previous tag' )
else:
print 'Aborting, tag %s already exist. Use --retag to overwrite it!' % tag_url
sys.exit( 1 )
svn_tag_sandbox( tag_url, 'Release ' + release_version )
print 'Generated doxygen document...'
## doc_dirname = r'jsoncpp-api-html-0.5.0'
## doc_tarball_path = r'e:\prg\vc\Lib\jsoncpp-trunk\dist\jsoncpp-api-html-0.5.0.tar.gz'
doc_tarball_path, doc_dirname = doxybuild.build_doc( options, make_release=True )
doc_distcheck_dir = 'dist/doccheck'
tarball.decompress( doc_tarball_path, doc_distcheck_dir )
doc_distcheck_top_dir = os.path.join( doc_distcheck_dir, doc_dirname )
export_dir = 'dist/export'
svn_export( tag_url, export_dir )
fix_sources_eol( export_dir )
source_dir = 'jsoncpp-src-' + release_version
source_tarball_path = 'dist/%s.tar.gz' % source_dir
print 'Generating source tarball to', source_tarball_path
tarball.make_tarball( source_tarball_path, [export_dir], export_dir, prefix_dir=source_dir )
amalgamation_tarball_path = 'dist/%s-amalgamation.tar.gz' % source_dir
print 'Generating amalgamation source tarball to', amalgamation_tarball_path
amalgamation_dir = 'dist/amalgamation'
amalgamate.amalgamate_source( export_dir, '%s/jsoncpp.cpp' % amalgamation_dir, 'json/json.h' )
amalgamation_source_dir = 'jsoncpp-src-amalgamation' + release_version
tarball.make_tarball( amalgamation_tarball_path, [amalgamation_dir],
amalgamation_dir, prefix_dir=amalgamation_source_dir )
# Decompress source tarball, download and install scons-local
distcheck_dir = 'dist/distcheck'
distcheck_top_dir = distcheck_dir + '/' + source_dir
print 'Decompressing source tarball to', distcheck_dir
rmdir_if_exist( distcheck_dir )
tarball.decompress( source_tarball_path, distcheck_dir )
scons_local_path = 'dist/scons-local.tar.gz'
print 'Downloading scons-local to', scons_local_path
download( SCONS_LOCAL_URL, scons_local_path )
print 'Decompressing scons-local to', distcheck_top_dir
tarball.decompress( scons_local_path, distcheck_top_dir )
# Run compilation
print 'Compiling decompressed tarball'
all_build_status = True
for platform in options.platforms.split(','):
print 'Testing platform:', platform
build_status, log_path = check_compile( distcheck_top_dir, platform )
print 'see build log:', log_path
print build_status and '=> ok' or '=> FAILED'
all_build_status = all_build_status and build_status
if not build_status:
print 'Testing failed on at least one platform, aborting...'
svn_remove_tag( tag_url, 'Removing tag due to failed testing' )
sys.exit(1)
if options.user:
if not options.no_web:
print 'Uploading documentation using user', options.user
sourceforge_web_synchro( SOURCEFORGE_PROJECT, doc_distcheck_top_dir, user=options.user, sftp=options.sftp )
print 'Completed documentation upload'
print 'Uploading source and documentation tarballs for release using user', options.user
sourceforge_release_tarball( SOURCEFORGE_PROJECT,
[source_tarball_path, doc_tarball_path],
user=options.user, sftp=options.sftp )
print 'Source and doc release tarballs uploaded'
else:
print 'No upload user specified. Web site and download tarbal were not uploaded.'
print 'Tarball can be found at:', doc_tarball_path
# Set next version number and commit
set_version( next_version )
svn_commit( 'Released ' + release_version )
else:
sys.stderr.write( msg + '\n' )
if __name__ == '__main__':
main()
| bsd-3-clause |
bop/foundation | lib/python2.7/site-packages/django/http/__init__.py | 41 | 30763 | from __future__ import absolute_import
import datetime
import os
import re
import sys
import time
import warnings
from pprint import pformat
from urllib import urlencode, quote
from urlparse import urljoin, urlparse
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
try:
# The mod_python version is more efficient, so try importing it first.
from mod_python.util import parse_qsl
except ImportError:
try:
# Python 2.6 and greater
from urlparse import parse_qsl
except ImportError:
# Python 2.5. Works on Python 2.6 but raises PendingDeprecationWarning
from cgi import parse_qsl
import Cookie
# httponly support exists in Python 2.6's Cookie library,
# but not in Python 2.5.
_morsel_supports_httponly = 'httponly' in Cookie.Morsel._reserved
# Some versions of Python 2.7 and later won't need this encoding bug fix:
_cookie_encodes_correctly = Cookie.SimpleCookie().value_encode(';') == (';', '"\\073"')
# See ticket #13007, http://bugs.python.org/issue2193 and http://trac.edgewall.org/ticket/2256
_tc = Cookie.SimpleCookie()
try:
_tc.load('foo:bar=1')
_cookie_allows_colon_in_names = True
except Cookie.CookieError:
_cookie_allows_colon_in_names = False
if _morsel_supports_httponly and _cookie_encodes_correctly and _cookie_allows_colon_in_names:
SimpleCookie = Cookie.SimpleCookie
else:
if not _morsel_supports_httponly:
class Morsel(Cookie.Morsel):
def __setitem__(self, K, V):
K = K.lower()
if K == "httponly":
if V:
# The superclass rejects httponly as a key,
# so we jump to the grandparent.
super(Cookie.Morsel, self).__setitem__(K, V)
else:
super(Morsel, self).__setitem__(K, V)
def OutputString(self, attrs=None):
output = super(Morsel, self).OutputString(attrs)
if "httponly" in self:
output += "; httponly"
return output
else:
Morsel = Cookie.Morsel
class SimpleCookie(Cookie.SimpleCookie):
if not _cookie_encodes_correctly:
def value_encode(self, val):
# Some browsers do not support quoted-string from RFC 2109,
# including some versions of Safari and Internet Explorer.
# These browsers split on ';', and some versions of Safari
# are known to split on ', '. Therefore, we encode ';' and ','
# SimpleCookie already does the hard work of encoding and decoding.
# It uses octal sequences like '\\012' for newline etc.
# and non-ASCII chars. We just make use of this mechanism, to
# avoid introducing two encoding schemes which would be confusing
# and especially awkward for javascript.
# NB, contrary to Python docs, value_encode returns a tuple containing
# (real val, encoded_val)
val, encoded = super(SimpleCookie, self).value_encode(val)
encoded = encoded.replace(";", "\\073").replace(",","\\054")
# If encoded now contains any quoted chars, we need double quotes
# around the whole string.
if "\\" in encoded and not encoded.startswith('"'):
encoded = '"' + encoded + '"'
return val, encoded
if not _cookie_allows_colon_in_names or not _morsel_supports_httponly:
def load(self, rawdata):
self.bad_cookies = set()
super(SimpleCookie, self).load(rawdata)
for key in self.bad_cookies:
del self[key]
# override private __set() method:
# (needed for using our Morsel, and for laxness with CookieError
def _BaseCookie__set(self, key, real_value, coded_value):
try:
M = self.get(key, Morsel())
M.set(key, real_value, coded_value)
dict.__setitem__(self, key, M)
except Cookie.CookieError:
self.bad_cookies.add(key)
dict.__setitem__(self, key, Cookie.Morsel())
class CompatCookie(SimpleCookie):
def __init__(self, *args, **kwargs):
super(CompatCookie, self).__init__(*args, **kwargs)
warnings.warn("CompatCookie is deprecated. Use django.http.SimpleCookie instead.", DeprecationWarning)
from django.conf import settings
from django.core import signing
from django.core.exceptions import ImproperlyConfigured, SuspiciousOperation
from django.core.files import uploadhandler
from django.http.multipartparser import MultiPartParser
from django.http.utils import *
from django.utils.datastructures import MultiValueDict, ImmutableList
from django.utils.encoding import smart_str, iri_to_uri, force_unicode
from django.utils.http import cookie_date
from django.utils import timezone
RESERVED_CHARS="!*'();:@&=+$,/?%#[]"
absolute_http_url_re = re.compile(r"^https?://", re.I)
host_validation_re = re.compile(r"^([a-z0-9.-]+|\[[a-f0-9]*:[a-f0-9:]+\])(:\d+)?$")
class Http404(Exception):
pass
RAISE_ERROR = object()
def build_request_repr(request, path_override=None, GET_override=None,
POST_override=None, COOKIES_override=None,
META_override=None):
"""
Builds and returns the request's representation string. The request's
attributes may be overridden by pre-processed values.
"""
# Since this is called as part of error handling, we need to be very
# robust against potentially malformed input.
try:
get = (pformat(GET_override)
if GET_override is not None
else pformat(request.GET))
except:
get = '<could not parse>'
if request._post_parse_error:
post = '<could not parse>'
else:
try:
post = (pformat(POST_override)
if POST_override is not None
else pformat(request.POST))
except:
post = '<could not parse>'
try:
cookies = (pformat(COOKIES_override)
if COOKIES_override is not None
else pformat(request.COOKIES))
except:
cookies = '<could not parse>'
try:
meta = (pformat(META_override)
if META_override is not None
else pformat(request.META))
except:
meta = '<could not parse>'
path = path_override if path_override is not None else request.path
return smart_str(u'<%s\npath:%s,\nGET:%s,\nPOST:%s,\nCOOKIES:%s,\nMETA:%s>' %
(request.__class__.__name__,
path,
unicode(get),
unicode(post),
unicode(cookies),
unicode(meta)))
class UnreadablePostError(IOError):
pass
class HttpRequest(object):
"""A basic HTTP request."""
# The encoding used in GET/POST dicts. None means use default setting.
_encoding = None
_upload_handlers = []
def __init__(self):
self.GET, self.POST, self.COOKIES, self.META, self.FILES = {}, {}, {}, {}, {}
self.path = ''
self.path_info = ''
self.method = None
self._post_parse_error = False
def __repr__(self):
return build_request_repr(self)
def get_host(self):
"""Returns the HTTP host using the environment or request headers."""
# We try three options, in order of decreasing preference.
if settings.USE_X_FORWARDED_HOST and (
'HTTP_X_FORWARDED_HOST' in self.META):
host = self.META['HTTP_X_FORWARDED_HOST']
elif 'HTTP_HOST' in self.META:
host = self.META['HTTP_HOST']
else:
# Reconstruct the host using the algorithm from PEP 333.
host = self.META['SERVER_NAME']
server_port = str(self.META['SERVER_PORT'])
if server_port != (self.is_secure() and '443' or '80'):
host = '%s:%s' % (host, server_port)
allowed_hosts = ['*'] if settings.DEBUG else settings.ALLOWED_HOSTS
if validate_host(host, allowed_hosts):
return host
else:
raise SuspiciousOperation(
"Invalid HTTP_HOST header (you may need to set ALLOWED_HOSTS): %s" % host)
def get_full_path(self):
# RFC 3986 requires query string arguments to be in the ASCII range.
# Rather than crash if this doesn't happen, we encode defensively.
return '%s%s' % (self.path, self.META.get('QUERY_STRING', '') and ('?' + iri_to_uri(self.META.get('QUERY_STRING', ''))) or '')
def get_signed_cookie(self, key, default=RAISE_ERROR, salt='', max_age=None):
"""
Attempts to return a signed cookie. If the signature fails or the
cookie has expired, raises an exception... unless you provide the
default argument in which case that value will be returned instead.
"""
try:
cookie_value = self.COOKIES[key].encode('utf-8')
except KeyError:
if default is not RAISE_ERROR:
return default
else:
raise
try:
value = signing.get_cookie_signer(salt=key + salt).unsign(
cookie_value, max_age=max_age)
except signing.BadSignature:
if default is not RAISE_ERROR:
return default
else:
raise
return value
def build_absolute_uri(self, location=None):
"""
Builds an absolute URI from the location and the variables available in
this request. If no location is specified, the absolute URI is built on
``request.get_full_path()``.
"""
if not location:
location = self.get_full_path()
if not absolute_http_url_re.match(location):
current_uri = '%s://%s%s' % (self.is_secure() and 'https' or 'http',
self.get_host(), self.path)
location = urljoin(current_uri, location)
return iri_to_uri(location)
def _is_secure(self):
return os.environ.get("HTTPS") == "on"
def is_secure(self):
# First, check the SECURE_PROXY_SSL_HEADER setting.
if settings.SECURE_PROXY_SSL_HEADER:
try:
header, value = settings.SECURE_PROXY_SSL_HEADER
except ValueError:
raise ImproperlyConfigured('The SECURE_PROXY_SSL_HEADER setting must be a tuple containing two values.')
if self.META.get(header, None) == value:
return True
# Failing that, fall back to _is_secure(), which is a hook for
# subclasses to implement.
return self._is_secure()
def is_ajax(self):
return self.META.get('HTTP_X_REQUESTED_WITH') == 'XMLHttpRequest'
def _set_encoding(self, val):
"""
Sets the encoding used for GET/POST accesses. If the GET or POST
dictionary has already been created, it is removed and recreated on the
next access (so that it is decoded correctly).
"""
self._encoding = val
if hasattr(self, '_get'):
del self._get
if hasattr(self, '_post'):
del self._post
def _get_encoding(self):
return self._encoding
encoding = property(_get_encoding, _set_encoding)
def _initialize_handlers(self):
self._upload_handlers = [uploadhandler.load_handler(handler, self)
for handler in settings.FILE_UPLOAD_HANDLERS]
def _set_upload_handlers(self, upload_handlers):
if hasattr(self, '_files'):
raise AttributeError("You cannot set the upload handlers after the upload has been processed.")
self._upload_handlers = upload_handlers
def _get_upload_handlers(self):
if not self._upload_handlers:
# If there are no upload handlers defined, initialize them from settings.
self._initialize_handlers()
return self._upload_handlers
upload_handlers = property(_get_upload_handlers, _set_upload_handlers)
def parse_file_upload(self, META, post_data):
"""Returns a tuple of (POST QueryDict, FILES MultiValueDict)."""
self.upload_handlers = ImmutableList(
self.upload_handlers,
warning = "You cannot alter upload handlers after the upload has been processed."
)
parser = MultiPartParser(META, post_data, self.upload_handlers, self.encoding)
return parser.parse()
@property
def body(self):
if not hasattr(self, '_body'):
if self._read_started:
raise Exception("You cannot access body after reading from request's data stream")
try:
self._body = self.read()
except IOError, e:
raise UnreadablePostError, e, sys.exc_traceback
self._stream = StringIO(self._body)
return self._body
@property
def raw_post_data(self):
warnings.warn('HttpRequest.raw_post_data has been deprecated. Use HttpRequest.body instead.', PendingDeprecationWarning)
return self.body
def _mark_post_parse_error(self):
self._post = QueryDict('')
self._files = MultiValueDict()
self._post_parse_error = True
def _load_post_and_files(self):
# Populates self._post and self._files
if self.method != 'POST':
self._post, self._files = QueryDict('', encoding=self._encoding), MultiValueDict()
return
if self._read_started and not hasattr(self, '_body'):
self._mark_post_parse_error()
return
if self.META.get('CONTENT_TYPE', '').startswith('multipart'):
if hasattr(self, '_body'):
# Use already read data
data = StringIO(self._body)
else:
data = self
try:
self._post, self._files = self.parse_file_upload(self.META, data)
except:
# An error occured while parsing POST data. Since when
# formatting the error the request handler might access
# self.POST, set self._post and self._file to prevent
# attempts to parse POST data again.
# Mark that an error occured. This allows self.__repr__ to
# be explicit about it instead of simply representing an
# empty POST
self._mark_post_parse_error()
raise
else:
self._post, self._files = QueryDict(self.body, encoding=self._encoding), MultiValueDict()
## File-like and iterator interface.
##
## Expects self._stream to be set to an appropriate source of bytes by
## a corresponding request subclass (WSGIRequest or ModPythonRequest).
## Also when request data has already been read by request.POST or
## request.body, self._stream points to a StringIO instance
## containing that data.
def read(self, *args, **kwargs):
self._read_started = True
return self._stream.read(*args, **kwargs)
def readline(self, *args, **kwargs):
self._read_started = True
return self._stream.readline(*args, **kwargs)
def xreadlines(self):
while True:
buf = self.readline()
if not buf:
break
yield buf
__iter__ = xreadlines
def readlines(self):
return list(iter(self))
class QueryDict(MultiValueDict):
"""
A specialized MultiValueDict that takes a query string when initialized.
This is immutable unless you create a copy of it.
Values retrieved from this class are converted from the given encoding
(DEFAULT_CHARSET by default) to unicode.
"""
# These are both reset in __init__, but is specified here at the class
# level so that unpickling will have valid values
_mutable = True
_encoding = None
def __init__(self, query_string, mutable=False, encoding=None):
MultiValueDict.__init__(self)
if not encoding:
encoding = settings.DEFAULT_CHARSET
self.encoding = encoding
for key, value in parse_qsl((query_string or ''), True): # keep_blank_values=True
self.appendlist(force_unicode(key, encoding, errors='replace'),
force_unicode(value, encoding, errors='replace'))
self._mutable = mutable
def _get_encoding(self):
if self._encoding is None:
self._encoding = settings.DEFAULT_CHARSET
return self._encoding
def _set_encoding(self, value):
self._encoding = value
encoding = property(_get_encoding, _set_encoding)
def _assert_mutable(self):
if not self._mutable:
raise AttributeError("This QueryDict instance is immutable")
def __setitem__(self, key, value):
self._assert_mutable()
key = str_to_unicode(key, self.encoding)
value = str_to_unicode(value, self.encoding)
MultiValueDict.__setitem__(self, key, value)
def __delitem__(self, key):
self._assert_mutable()
super(QueryDict, self).__delitem__(key)
def __copy__(self):
result = self.__class__('', mutable=True, encoding=self.encoding)
for key, value in dict.items(self):
dict.__setitem__(result, key, value)
return result
def __deepcopy__(self, memo):
import copy
result = self.__class__('', mutable=True, encoding=self.encoding)
memo[id(self)] = result
for key, value in dict.items(self):
dict.__setitem__(result, copy.deepcopy(key, memo), copy.deepcopy(value, memo))
return result
def setlist(self, key, list_):
self._assert_mutable()
key = str_to_unicode(key, self.encoding)
list_ = [str_to_unicode(elt, self.encoding) for elt in list_]
MultiValueDict.setlist(self, key, list_)
def setlistdefault(self, key, default_list=()):
self._assert_mutable()
if key not in self:
self.setlist(key, default_list)
return MultiValueDict.getlist(self, key)
def appendlist(self, key, value):
self._assert_mutable()
key = str_to_unicode(key, self.encoding)
value = str_to_unicode(value, self.encoding)
MultiValueDict.appendlist(self, key, value)
def update(self, other_dict):
self._assert_mutable()
f = lambda s: str_to_unicode(s, self.encoding)
if hasattr(other_dict, 'lists'):
for key, valuelist in other_dict.lists():
for value in valuelist:
MultiValueDict.update(self, {f(key): f(value)})
else:
d = dict([(f(k), f(v)) for k, v in other_dict.items()])
MultiValueDict.update(self, d)
def pop(self, key, *args):
self._assert_mutable()
return MultiValueDict.pop(self, key, *args)
def popitem(self):
self._assert_mutable()
return MultiValueDict.popitem(self)
def clear(self):
self._assert_mutable()
MultiValueDict.clear(self)
def setdefault(self, key, default=None):
self._assert_mutable()
key = str_to_unicode(key, self.encoding)
default = str_to_unicode(default, self.encoding)
return MultiValueDict.setdefault(self, key, default)
def copy(self):
"""Returns a mutable copy of this object."""
return self.__deepcopy__({})
def urlencode(self, safe=None):
"""
Returns an encoded string of all query string arguments.
:arg safe: Used to specify characters which do not require quoting, for
example::
>>> q = QueryDict('', mutable=True)
>>> q['next'] = '/a&b/'
>>> q.urlencode()
'next=%2Fa%26b%2F'
>>> q.urlencode(safe='/')
'next=/a%26b/'
"""
output = []
if safe:
encode = lambda k, v: '%s=%s' % ((quote(k, safe), quote(v, safe)))
else:
encode = lambda k, v: urlencode({k: v})
for k, list_ in self.lists():
k = smart_str(k, self.encoding)
output.extend([encode(k, smart_str(v, self.encoding))
for v in list_])
return '&'.join(output)
def parse_cookie(cookie):
if cookie == '':
return {}
if not isinstance(cookie, Cookie.BaseCookie):
try:
c = SimpleCookie()
c.load(cookie)
except Cookie.CookieError:
# Invalid cookie
return {}
else:
c = cookie
cookiedict = {}
for key in c.keys():
cookiedict[key] = c.get(key).value
return cookiedict
class BadHeaderError(ValueError):
pass
class HttpResponse(object):
"""A basic HTTP response, with content and dictionary-accessed headers."""
status_code = 200
def __init__(self, content='', mimetype=None, status=None,
content_type=None):
# _headers is a mapping of the lower-case name to the original case of
# the header (required for working with legacy systems) and the header
# value. Both the name of the header and its value are ASCII strings.
self._headers = {}
self._charset = settings.DEFAULT_CHARSET
if mimetype: # For backwards compatibility.
content_type = mimetype
if not content_type:
content_type = "%s; charset=%s" % (settings.DEFAULT_CONTENT_TYPE,
self._charset)
self.content = content
self.cookies = SimpleCookie()
if status:
self.status_code = status
self['Content-Type'] = content_type
def __str__(self):
"""Full HTTP message, including headers."""
return '\n'.join(['%s: %s' % (key, value)
for key, value in self._headers.values()]) \
+ '\n\n' + self.content
def _convert_to_ascii(self, *values):
"""Converts all values to ascii strings."""
for value in values:
if isinstance(value, unicode):
try:
value = value.encode('us-ascii')
except UnicodeError, e:
e.reason += ', HTTP response headers must be in US-ASCII format'
raise
else:
value = str(value)
if '\n' in value or '\r' in value:
raise BadHeaderError("Header values can't contain newlines (got %r)" % (value))
yield value
def __setitem__(self, header, value):
header, value = self._convert_to_ascii(header, value)
self._headers[header.lower()] = (header, value)
def __delitem__(self, header):
try:
del self._headers[header.lower()]
except KeyError:
pass
def __getitem__(self, header):
return self._headers[header.lower()][1]
def __getstate__(self):
# SimpleCookie is not pickeable with pickle.HIGHEST_PROTOCOL, so we
# serialise to a string instead
state = self.__dict__.copy()
state['cookies'] = str(state['cookies'])
return state
def __setstate__(self, state):
self.__dict__.update(state)
self.cookies = SimpleCookie(self.cookies)
def has_header(self, header):
"""Case-insensitive check for a header."""
return header.lower() in self._headers
__contains__ = has_header
def items(self):
return self._headers.values()
def get(self, header, alternate=None):
return self._headers.get(header.lower(), (None, alternate))[1]
def set_cookie(self, key, value='', max_age=None, expires=None, path='/',
domain=None, secure=False, httponly=False):
"""
Sets a cookie.
``expires`` can be:
- a string in the correct format,
- a naive ``datetime.datetime`` object in UTC,
- an aware ``datetime.datetime`` object in any time zone.
If it is a ``datetime.datetime`` object then ``max_age`` will be calculated.
"""
self.cookies[key] = value
if expires is not None:
if isinstance(expires, datetime.datetime):
if timezone.is_aware(expires):
expires = timezone.make_naive(expires, timezone.utc)
delta = expires - expires.utcnow()
# Add one second so the date matches exactly (a fraction of
# time gets lost between converting to a timedelta and
# then the date string).
delta = delta + datetime.timedelta(seconds=1)
# Just set max_age - the max_age logic will set expires.
expires = None
max_age = max(0, delta.days * 86400 + delta.seconds)
else:
self.cookies[key]['expires'] = expires
if max_age is not None:
self.cookies[key]['max-age'] = max_age
# IE requires expires, so set it if hasn't been already.
if not expires:
self.cookies[key]['expires'] = cookie_date(time.time() +
max_age)
if path is not None:
self.cookies[key]['path'] = path
if domain is not None:
self.cookies[key]['domain'] = domain
if secure:
self.cookies[key]['secure'] = True
if httponly:
self.cookies[key]['httponly'] = True
def set_signed_cookie(self, key, value, salt='', **kwargs):
value = signing.get_cookie_signer(salt=key + salt).sign(value)
return self.set_cookie(key, value, **kwargs)
def delete_cookie(self, key, path='/', domain=None):
self.set_cookie(key, max_age=0, path=path, domain=domain,
expires='Thu, 01-Jan-1970 00:00:00 GMT')
def _get_content(self):
if self.has_header('Content-Encoding'):
return ''.join([str(e) for e in self._container])
return ''.join([smart_str(e, self._charset) for e in self._container])
def _set_content(self, value):
if hasattr(value, '__iter__'):
self._container = value
self._base_content_is_iter = True
else:
self._container = [value]
self._base_content_is_iter = False
content = property(_get_content, _set_content)
def __iter__(self):
self._iterator = iter(self._container)
return self
def next(self):
chunk = self._iterator.next()
if isinstance(chunk, unicode):
chunk = chunk.encode(self._charset)
return str(chunk)
def close(self):
if hasattr(self._container, 'close'):
self._container.close()
# The remaining methods partially implement the file-like object interface.
# See http://docs.python.org/lib/bltin-file-objects.html
def write(self, content):
if self._base_content_is_iter:
raise Exception("This %s instance is not writable" % self.__class__)
self._container.append(content)
def flush(self):
pass
def tell(self):
if self._base_content_is_iter:
raise Exception("This %s instance cannot tell its position" % self.__class__)
return sum([len(str(chunk)) for chunk in self._container])
class HttpResponseRedirectBase(HttpResponse):
allowed_schemes = ['http', 'https', 'ftp']
def __init__(self, redirect_to):
super(HttpResponseRedirectBase, self).__init__()
parsed = urlparse(redirect_to)
if parsed.scheme and parsed.scheme not in self.allowed_schemes:
raise SuspiciousOperation("Unsafe redirect to URL with scheme '%s'" % parsed.scheme)
self['Location'] = iri_to_uri(redirect_to)
class HttpResponseRedirect(HttpResponseRedirectBase):
status_code = 302
class HttpResponsePermanentRedirect(HttpResponseRedirectBase):
status_code = 301
class HttpResponseNotModified(HttpResponse):
status_code = 304
class HttpResponseBadRequest(HttpResponse):
status_code = 400
class HttpResponseNotFound(HttpResponse):
status_code = 404
class HttpResponseForbidden(HttpResponse):
status_code = 403
class HttpResponseNotAllowed(HttpResponse):
status_code = 405
def __init__(self, permitted_methods):
super(HttpResponseNotAllowed, self).__init__()
self['Allow'] = ', '.join(permitted_methods)
class HttpResponseGone(HttpResponse):
status_code = 410
class HttpResponseServerError(HttpResponse):
status_code = 500
# A backwards compatible alias for HttpRequest.get_host.
def get_host(request):
return request.get_host()
# It's neither necessary nor appropriate to use
# django.utils.encoding.smart_unicode for parsing URLs and form inputs. Thus,
# this slightly more restricted function.
def str_to_unicode(s, encoding):
"""
Converts basestring objects to unicode, using the given encoding. Illegally
encoded input characters are replaced with Unicode "unknown" codepoint
(\ufffd).
Returns any non-basestring objects without change.
"""
if isinstance(s, str):
return unicode(s, encoding, 'replace')
else:
return s
def validate_host(host, allowed_hosts):
"""
Validate the given host header value for this site.
Check that the host looks valid and matches a host or host pattern in the
given list of ``allowed_hosts``. Any pattern beginning with a period
matches a domain and all its subdomains (e.g. ``.example.com`` matches
``example.com`` and any subdomain), ``*`` matches anything, and anything
else must match exactly.
Return ``True`` for a valid host, ``False`` otherwise.
"""
# All validation is case-insensitive
host = host.lower()
# Basic sanity check
if not host_validation_re.match(host):
return False
# Validate only the domain part.
if host[-1] == ']':
# It's an IPv6 address without a port.
domain = host
else:
domain = host.rsplit(':', 1)[0]
for pattern in allowed_hosts:
pattern = pattern.lower()
match = (
pattern == '*' or
pattern.startswith('.') and (
domain.endswith(pattern) or domain == pattern[1:]
) or
pattern == domain
)
if match:
return True
return False
| gpl-2.0 |
ATIX-AG/ansible | lib/ansible/modules/network/f5/bigip_smtp.py | 18 | 16716 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2017, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: bigip_smtp
short_description: Manages SMTP settings on the BIG-IP
description:
- Allows configuring of the BIG-IP to send mail via an SMTP server by
configuring the parameters of an SMTP server.
version_added: 2.6
options:
name:
description:
- Specifies the name of the SMTP server configuration.
required: True
partition:
description:
- Device partition to manage resources on.
default: Common
smtp_server:
description:
- SMTP server host name in the format of a fully qualified domain name.
- This value is required when create a new SMTP configuration.
smtp_server_port:
description:
- Specifies the SMTP port number.
- When creating a new SMTP configuration, the default is C(25) when
C(encryption) is C(none) or C(tls). The default is C(465) when C(ssl)
is selected.
local_host_name:
description:
- Host name used in SMTP headers in the format of a fully qualified
domain name. This setting does not refer to the BIG-IP system's hostname.
from_address:
description:
- Email address that the email is being sent from. This is the "Reply-to"
address that the recipient sees.
encryption:
description:
- Specifies whether the SMTP server requires an encrypted connection in
order to send mail.
choices:
- none
- ssl
- tls
authentication:
description:
- Credentials can be set on an SMTP server's configuration even if that
authentication is not used (think staging configs or emergency changes).
This parameter acts as a switch to make the specified C(smtp_server_username)
and C(smtp_server_password) parameters active or not.
- When C(yes), the authentication parameters will be active.
- When C(no), the authentication parameters will be inactive.
type: bool
smtp_server_username:
description:
- User name that the SMTP server requires when validating a user.
smtp_server_password:
description:
- Password that the SMTP server requires when validating a user.
state:
description:
- When C(present), ensures that the SMTP configuration exists.
- When C(absent), ensures that the SMTP configuration does not exist.
required: False
default: present
choices:
- present
- absent
update_password:
description:
- Passwords are stored encrypted, so the module cannot know if the supplied
C(smtp_server_password) is the same or different than the existing password.
This parameter controls the updating of the C(smtp_server_password)
credential.
- When C(always), will always update the password.
- When C(on_create), will only set the password for newly created SMTP server
configurations.
default: always
choices:
- always
- on_create
extends_documentation_fragment: f5
notes:
- Requires the netaddr Python package on the host. This is as easy as
C(pip install netaddr).
requirements:
- netaddr
author:
- Tim Rupp (@caphrim007)
'''
EXAMPLES = r'''
- name: Create a base SMTP server configuration
bigip_smtp:
name: my-smtp
smtp_server: 1.1.1.1
smtp_server_username: mail-admin
smtp_server_password: mail-secret
local_host_name: smtp.mydomain.com
from_address: [email protected]
password: secret
server: lb.mydomain.com
state: present
user: admin
delegate_to: localhost
'''
RETURN = r'''
smtp_server:
description: The new C(smtp_server) value of the SMTP configuration.
returned: changed
type: string
sample: mail.mydomain.com
smtp_server_port:
description: The new C(smtp_server_port) value of the SMTP configuration.
returned: changed
type: int
sample: 25
local_host_name:
description: The new C(local_host_name) value of the SMTP configuration.
returned: changed
type: string
sample: smtp.mydomain.com
from_address:
description: The new C(from_address) value of the SMTP configuration.
returned: changed
type: string
sample: [email protected]
encryption:
description: The new C(encryption) value of the SMTP configuration.
returned: changed
type: string
sample: tls
authentication:
description: Whether the authentication parameters are active or not.
returned: changed
type: bool
sample: yes
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.basic import env_fallback
try:
from library.module_utils.network.f5.bigip import HAS_F5SDK
from library.module_utils.network.f5.bigip import F5Client
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import cleanup_tokens
from library.module_utils.network.f5.common import is_valid_hostname
from library.module_utils.network.f5.common import f5_argument_spec
try:
from library.module_utils.network.f5.common import iControlUnexpectedHTTPError
except ImportError:
HAS_F5SDK = False
except ImportError:
from ansible.module_utils.network.f5.bigip import HAS_F5SDK
from ansible.module_utils.network.f5.bigip import F5Client
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import cleanup_tokens
from ansible.module_utils.network.f5.common import is_valid_hostname
from ansible.module_utils.network.f5.common import f5_argument_spec
try:
from ansible.module_utils.network.f5.common import iControlUnexpectedHTTPError
except ImportError:
HAS_F5SDK = False
try:
import netaddr
HAS_NETADDR = True
except ImportError:
HAS_NETADDR = False
class Parameters(AnsibleF5Parameters):
api_map = {
'username': 'smtp_server_username',
'passwordEncrypted': 'smtp_server_password',
'localHostName': 'local_host_name',
'smtpServerHostName': 'smtp_server',
'smtpServerPort': 'smtp_server_port',
'encryptedConnection': 'encryption',
'authenticationEnabled': 'authentication_enabled',
'authenticationDisabled': 'authentication_disabled',
'fromAddress': 'from_address'
}
api_attributes = [
'username', 'passwordEncrypted', 'localHostName', 'smtpServerHostName',
'smtpServerPort', 'encryptedConnection', 'authenticationEnabled',
'authenticationDisabled', 'fromAddress'
]
returnables = [
'smtp_server_username', 'smtp_server_password', 'local_host_name',
'smtp_server', 'smtp_server_port', 'encryption', 'authentication',
'from_address'
]
updatables = [
'smtp_server_username', 'smtp_server_password', 'local_host_name',
'smtp_server', 'smtp_server_port', 'encryption', 'authentication',
'from_address'
]
class ApiParameters(Parameters):
pass
class ModuleParameters(Parameters):
@property
def local_host_name(self):
if self._values['local_host_name'] is None:
return None
try:
# Check for valid IPv4 or IPv6 entries
netaddr.IPNetwork(self._values['local_host_name'])
return self._values['local_host_name']
except netaddr.core.AddrFormatError:
# else fallback to checking reasonably well formatted hostnames
if is_valid_hostname(self._values['local_host_name']):
return str(self._values['local_host_name'])
raise F5ModuleError(
"The provided 'local_host_name' value {0} is not a valid IP or hostname".format(
str(self._values['local_host_name'])
)
)
@property
def authentication_enabled(self):
if self._values['authentication'] is None:
return None
if self._values['authentication']:
return True
@property
def authentication_disabled(self):
if self._values['authentication'] is None:
return None
if not self._values['authentication']:
return True
@property
def smtp_server_port(self):
if self._values['smtp_server_port'] is None:
return None
return int(self._values['smtp_server_port'])
class Changes(Parameters):
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
except Exception:
pass
return result
class UsableChanges(Changes):
pass
class ReportableChanges(Changes):
@property
def smtp_server_password(self):
return None
@property
def smtp_server_username(self):
return None
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
return self.__default(param)
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
@property
def smtp_server_password(self):
if self.want.update_password == 'on_create':
return None
return self.want.smtp_server_password
@property
def authentication(self):
if self.want.authentication_enabled:
if self.want.authentication_enabled != self.have.authentication_enabled:
return dict(
authentication_enabled=self.want.authentication_enabled
)
if self.want.authentication_disabled:
if self.want.authentication_disabled != self.have.authentication_disabled:
return dict(
authentication_disable=self.want.authentication_disabled
)
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = kwargs.get('client', None)
self.want = ModuleParameters(params=self.module.params)
self.have = ApiParameters()
self.changes = UsableChanges()
def _set_changed_options(self):
changed = {}
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = UsableChanges(params=changed)
def _update_changed_options(self):
diff = Difference(self.want, self.have)
updatables = Parameters.updatables
changed = dict()
for k in updatables:
change = diff.compare(k)
if change is None:
continue
else:
if isinstance(change, dict):
changed.update(change)
else:
changed[k] = change
if changed:
self.changes = UsableChanges(params=changed)
return True
return False
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def exec_module(self):
changed = False
result = dict()
state = self.want.state
try:
if state == "present":
changed = self.present()
elif state == "absent":
changed = self.absent()
except iControlUnexpectedHTTPError as e:
raise F5ModuleError(str(e))
reportable = ReportableChanges(params=self.changes.to_return())
changes = reportable.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations(result)
return result
def _announce_deprecations(self, result):
warnings = result.pop('__warnings', [])
for warning in warnings:
self.client.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def present(self):
if self.exists():
return self.update()
else:
return self.create()
def exists(self):
result = self.client.api.tm.sys.smtp_servers.smtp_server.exists(
name=self.want.name,
partition=self.want.partition
)
return result
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.module.check_mode:
return True
self.update_on_device()
return True
def remove(self):
if self.module.check_mode:
return True
self.remove_from_device()
if self.exists():
raise F5ModuleError("Failed to delete the resource.")
return True
def create(self):
self._set_changed_options()
if self.module.check_mode:
return True
self.create_on_device()
return True
def create_on_device(self):
params = self.want.api_params()
self.client.api.tm.sys.smtp_servers.smtp_server.create(
name=self.want.name,
partition=self.want.partition,
**params
)
def update_on_device(self):
params = self.want.api_params()
resource = self.client.api.tm.sys.smtp_servers.smtp_server.load(
name=self.want.name,
partition=self.want.partition
)
resource.modify(**params)
def absent(self):
if self.exists():
return self.remove()
return False
def remove_from_device(self):
resource = self.client.api.tm.sys.smtp_servers.smtp_server.load(
name=self.want.name,
partition=self.want.partition
)
if resource:
resource.delete()
def read_current_from_device(self):
resource = self.client.api.tm.sys.smtp_servers.smtp_server.load(
name=self.want.name,
partition=self.want.partition
)
result = resource.attrs
return ApiParameters(params=result)
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
name=dict(required=True),
smtp_server=dict(),
smtp_server_port=dict(type='int'),
smtp_server_username=dict(no_log=True),
smtp_server_password=dict(no_log=True),
local_host_name=dict(),
encryption=dict(choices=['none', 'ssl', 'tls']),
update_password=dict(
default='always',
choices=['always', 'on_create']
),
from_address=dict(),
authentication=dict(type='bool'),
partition=dict(
default='Common',
fallback=(env_fallback, ['F5_PARTITION'])
),
state=dict(
default='present',
choices=['present', 'absent']
)
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode
)
if not HAS_F5SDK:
module.fail_json(msg="The python f5-sdk module is required")
if not HAS_NETADDR:
module.fail_json(msg="The python netaddr module is required")
try:
client = F5Client(**module.params)
mm = ModuleManager(module=module, client=client)
results = mm.exec_module()
cleanup_tokens(client)
module.exit_json(**results)
except F5ModuleError as ex:
cleanup_tokens(client)
module.fail_json(msg=str(ex))
if __name__ == '__main__':
main()
| gpl-3.0 |
dikien/Machine-Learning-Newspaper | nytimes/step4_analysis_unsupervised_2.py | 1 | 3602 | # -*- coding: UTF-8 -*-
from sklearn.metrics import accuracy_score
from time import time
import numpy as np
import pickle
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn import preprocessing
from sklearn.feature_selection import SelectPercentile, f_classif, chi2
import matplotlib.pyplot as plt
from itertools import cycle
from sklearn.cluster import KMeans, SpectralClustering, AgglomerativeClustering
from sklearn.metrics import accuracy_score
def plot(nFeatures, data):
colors = cycle('rgbcmykw')
algorithm = sorted(data)
fig = plt.figure()
ax = fig.add_subplot(111)
for j, c in zip(algorithm, colors):
ax.plot(nFeatures, data[j], label=j, color=c)
ax.scatter(nFeatures, data[j], color=c)
plt.xlabel("#-Features(SelectPercentile)")
plt.ylabel("Accuracy")
plt.title("Accuracy vs #-Features for different classifiers")
# ax.set_xscale("log")
box = ax.get_position()
ax.set_position([box.x0, box.y0 + box.height * 0.3,
box.width, box.height * 0.7])
ax.legend(loc="upper center", bbox_to_anchor=(0.5, -0.15), fancybox=True, shadow=True, ncol=3)
plt.legend(loc=2)
plt.show()
def preprocess(article_file, lable_file, k):
features = pickle.load(open(article_file))
features = np.array(features)
# transform non-numerical labels (as long as they are hashable and comparable) to numerical labels
lables = pickle.load(open(lable_file))
le = preprocessing.LabelEncoder()
le.fit(lables)
lables = le.transform(lables)
# print le.inverse_transform([0])
### text vectorization--go from strings to lists of numbers
vectorizer = TfidfVectorizer(sublinear_tf=True, max_df=0.5, min_df=1,
stop_words='english')
features_train_transformed = vectorizer.fit_transform(features)
# selector : SelectPercentile
selector = SelectPercentile(f_classif, percentile=k)
selector.fit(features_train_transformed, lables)
# selector : chi2
# selector = SelectPercentile(score_func=chi2)
# selector.fit(features_train_transformed, lables)
features_train_transformed = selector.transform(features_train_transformed).toarray()
return features_train_transformed, lables, vectorizer, selector, le, features
nFeatures = np.arange(10, 100, 10)
# nFeatures = [10, 100, 200, 300, 400, 500, 600, 700, 800, 900, 1000]
data = {}
for k in nFeatures:
features, labels, vectorizer, selector, le, features_data = preprocess("pkl/article_2_people.pkl", "pkl/lable_2_people.pkl", k)
for name, clf in [
('k_means', KMeans(n_clusters=2, n_init=5)),
('SpectralClustering', SpectralClustering(n_clusters=2, n_init=5)),
('AgglomerativeClustering_ward', AgglomerativeClustering(n_clusters=2, linkage='ward')),
('AgglomerativeClustering_complete', AgglomerativeClustering(n_clusters=2, linkage='complete')),
('AgglomerativeClustering_average', AgglomerativeClustering(n_clusters=2, linkage='average'))
]:
if not data.has_key(name):
data[name] = []
print "*" * 100
print('Method: {}'.format(name) + ' the number of feature is {}'.format(k))
# Fit on the whole data:
t0 = time()
y_pred = clf.fit(features).labels_
print "fit time:", round(time()-t0, 3), "s"
score_accuracy = accuracy_score(y_pred, labels, normalize=True)
print('accuracy score on training: {}'.format(score_accuracy))
print "*"* 100
data[name].append(score_accuracy)
plot(nFeatures, data) | bsd-3-clause |
burnpanck/chaco | chaco/colormapped_selection_overlay.py | 3 | 6043 | """ Defines the ColormappedSelectionOverlay class.
"""
from numpy import logical_and
# Enthought library imports
from traits.api import Any, Bool, Float, Instance, Property, Enum
# Local imports
from abstract_overlay import AbstractOverlay
from colormapped_scatterplot import ColormappedScatterPlot
class ColormappedSelectionOverlay(AbstractOverlay):
"""
Overlays and changes a ColormappedScatterPlot to fade its non-selected
points to a very low alpha.
"""
# The ColormappedScatterPlot that this overlay is listening to.
# By default, it looks at self.component
plot = Property
# The amount to fade the unselected points.
fade_alpha = Float(0.15)
# The minimum difference, in float percent, between the starting and ending
# selection values, if range selection mode is enabled
minimum_delta = Float(0.01)
# Outline width for selected points.
selected_outline_width = Float(1.0)
# Outline width for unselected points.
unselected_outline_width = Float(0.0)
# The type of selection used by the data source.
selection_type = Enum('range', 'mask')
_plot = Instance(ColormappedScatterPlot)
_visible = Bool(False)
_old_alpha = Float
_old_outline_color = Any
_old_line_width = Float(0.0)
def __init__(self, component=None, **kw):
super(ColormappedSelectionOverlay, self).__init__(**kw)
self.component = component
return
def overlay(self, component, gc, view_bounds=None, mode="normal"):
""" Draws this component overlaid on another component.
Implements AbstractOverlay.
"""
if not self._visible:
return
plot = self.plot
datasource = plot.color_data
if self.selection_type == 'range':
selections = datasource.metadata["selections"]
if selections is not None and len(selections) == 0:
return
low, high = selections
if abs(high - low) / abs(high + low) < self.minimum_delta:
return
# Mask the data with just the points falling within the data
# range selected on the colorbar
data_pts = datasource.get_data()
mask = (data_pts >= low) & (data_pts <= high)
elif self.selection_type == 'mask':
mask = reduce(logical_and, datasource.metadata["selection_masks"])
if sum(mask)<2:
return
datasource.set_mask(mask)
# Store the current plot color settings before overwriting them
fade_outline_color = plot.outline_color_
# Overwrite marker outline color and fill alpha settings of
# the plot, then manually invoke the plot to draw onto the GC.
plot.outline_color = list(self._old_outline_color[:3]) + [1.0]
plot.fill_alpha = 1.0
plot.line_width = self.selected_outline_width
plot._draw_plot(gc, view_bounds, mode)
# Restore the plot's previous color settings and data mask.
plot.fill_alpha = self.fade_alpha
plot.outline_color = fade_outline_color
plot.line_width = self.unselected_outline_width
datasource.remove_mask()
return
def _component_changed(self, old, new):
if old:
old.on_trait_change(self.datasource_change_handler, "color_data", remove=True)
if new:
new.on_trait_change(self.datasource_change_handler, "color_data")
self._old_alpha = new.fill_alpha
self._old_outline_color = new.outline_color
self._old_line_width = new.line_width
self.datasource_change_handler(new, "color_data", None, new.color_data)
return
def datasource_change_handler(self, obj, name, old, new):
if old:
old.on_trait_change(self.selection_change_handler, "metadata_changed", remove=True)
if new:
new.on_trait_change(self.selection_change_handler, "metadata_changed")
self.selection_change_handler(new, "metadata_changed", None, new.metadata)
return
def selection_change_handler(self, obj, name, old, new):
if self.selection_type == 'range':
selection_key = 'selections'
elif self.selection_type == 'mask':
selection_key = 'selection_masks'
if type(new) == dict and new.get(selection_key, None) is not None \
and len(new[selection_key]) > 0:
if not self._visible:
# We have a new selection, so replace the colors on the plot with the
# faded alpha and colors
plot = self.plot
# Save the line width and set it to zero for the unselected points
self._old_line_width = plot.line_width
plot.line_width = self.unselected_outline_width
# Save the outline color and set it to the faded version
self._old_outline_color = plot.outline_color_
outline_color = list(plot.outline_color_)
if len(outline_color) == 3:
outline_color += [self.fade_alpha]
else:
outline_color[3] = self.fade_alpha
plot.outline_color = outline_color
# Save the alpha value and set it to a faded version
self._old_alpha = plot.fill_alpha
plot.fill_alpha = self.fade_alpha
self.plot.invalidate_draw()
self._visible = True
else:
self.plot.fill_alpha = self._old_alpha
self.plot.outline_color = self._old_outline_color
self.plot.line_width = self._old_line_width
self.plot.invalidate_draw()
self._visible = False
self.plot.request_redraw()
return
def _get_plot(self):
if self._plot is not None:
return self._plot
else:
return self.component
def _set_plot(self, val):
self._plot = val
# EOF
| bsd-3-clause |
lgeiger/ide-python | lib/debugger/VendorLib/vs-py-debugger/pythonFiles/jedi/debug.py | 8 | 3428 | from jedi._compatibility import encoding, is_py3, u
import os
import time
def _lazy_colorama_init():
"""
Lazily init colorama if necessary, not to screw up stdout is debug not
enabled.
This version of the function does nothing.
"""
pass
_inited=False
try:
if os.name == 'nt':
# Does not work on Windows, as pyreadline and colorama interfere
raise ImportError
else:
# Use colorama for nicer console output.
from colorama import Fore, init
from colorama import initialise
def _lazy_colorama_init():
"""
Lazily init colorama if necessary, not to screw up stdout is
debug not enabled.
This version of the function does init colorama.
"""
global _inited
if not _inited:
# pytest resets the stream at the end - causes troubles. Since
# after every output the stream is reset automatically we don't
# need this.
initialise.atexit_done = True
try:
init(strip=False)
except Exception:
# Colorama fails with initializing under vim and is buggy in
# version 0.3.6.
pass
_inited = True
except ImportError:
class Fore(object):
RED = ''
GREEN = ''
YELLOW = ''
MAGENTA = ''
RESET = ''
NOTICE = object()
WARNING = object()
SPEED = object()
enable_speed = False
enable_warning = False
enable_notice = False
# callback, interface: level, str
debug_function = None
_debug_indent = 0
_start_time = time.time()
def reset_time():
global _start_time, _debug_indent
_start_time = time.time()
_debug_indent = 0
def increase_indent(func):
"""Decorator for makin """
def wrapper(*args, **kwargs):
global _debug_indent
_debug_indent += 1
try:
return func(*args, **kwargs)
finally:
_debug_indent -= 1
return wrapper
def dbg(message, *args, **kwargs):
""" Looks at the stack, to see if a debug message should be printed. """
# Python 2 compatibility, because it doesn't understand default args
color = kwargs.pop('color', 'GREEN')
assert color
if debug_function and enable_notice:
i = ' ' * _debug_indent
_lazy_colorama_init()
debug_function(color, i + 'dbg: ' + message % tuple(u(repr(a)) for a in args))
def warning(message, *args, **kwargs):
format = kwargs.pop('format', True)
assert not kwargs
if debug_function and enable_warning:
i = ' ' * _debug_indent
if format:
message = message % tuple(u(repr(a)) for a in args)
debug_function('RED', i + 'warning: ' + message)
def speed(name):
if debug_function and enable_speed:
now = time.time()
i = ' ' * _debug_indent
debug_function('YELLOW', i + 'speed: ' + '%s %s' % (name, now - _start_time))
def print_to_stdout(color, str_out):
"""
The default debug function that prints to standard out.
:param str color: A string that is an attribute of ``colorama.Fore``.
"""
col = getattr(Fore, color)
_lazy_colorama_init()
if not is_py3:
str_out = str_out.encode(encoding, 'replace')
print(col + str_out + Fore.RESET)
# debug_function = print_to_stdout
| mit |
boundarydevices/android_external_chromium_org | tools/perf/page_sets/startup_pages.py | 9 | 1474 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# pylint: disable=W0401,W0614
from telemetry.page.actions.all_page_actions import *
from telemetry.page import page as page_module
from telemetry.page import page_set as page_set_module
class StartedPage(page_module.Page):
def __init__(self, url, startup_url, page_set):
super(StartedPage, self).__init__(url=url, page_set=page_set)
self.archive_data_file = 'data/startup_pages.json'
self.startup_url = startup_url
def RunNavigateSteps(self, action_runner):
action_runner.Wait(10)
class StartupPagesPageSet(page_set_module.PageSet):
""" Pages for testing starting Chrome with a URL.
Note that this file can't be used with record_wpr, since record_wpr requires
a true navigate step, which we do not want for startup testing. Instead use
record_wpr startup_pages_record to record data for this test.
"""
def __init__(self):
super(StartupPagesPageSet, self).__init__(
archive_data_file='data/startup_pages.json',
bucket=page_set_module.PARTNER_BUCKET)
# Typical page.
self.AddPage(StartedPage('about:blank', 'about:blank', self))
# Typical page.
self.AddPage(StartedPage('http://bbc.co.uk', 'http://bbc.co.uk', self))
# Horribly complex page - stress test!
self.AddPage(StartedPage('http://kapook.com', 'http://kapook.com', self))
| bsd-3-clause |
kuiwei/edx-platform | lms/djangoapps/courseware/tests/test_view_authentication.py | 12 | 17212 | import datetime
import pytz
from mock import patch
from django.core.urlresolvers import reverse
from django.test.utils import override_settings
# Need access to internal func to put users in the right group
from courseware.access import has_access
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from student.tests.factories import UserFactory, CourseEnrollmentFactory
from courseware.tests.helpers import LoginEnrollmentTestCase
from courseware.tests.modulestore_config import TEST_DATA_MIXED_MODULESTORE
from courseware.tests.factories import (
BetaTesterFactory,
StaffFactory,
GlobalStaffFactory,
InstructorFactory,
OrgStaffFactory,
OrgInstructorFactory,
)
from xmodule.modulestore.django import modulestore
@override_settings(MODULESTORE=TEST_DATA_MIXED_MODULESTORE)
class TestViewAuth(ModuleStoreTestCase, LoginEnrollmentTestCase):
"""
Check that view authentication works properly.
"""
ACCOUNT_INFO = [('[email protected]', 'foo'), ('[email protected]', 'foo')]
@staticmethod
def _reverse_urls(names, course):
"""
Reverse a list of course urls.
`names` is a list of URL names that correspond to sections in a course.
`course` is the instance of CourseDescriptor whose section URLs are to be returned.
Returns a list URLs corresponding to section in the passed in course.
"""
return [reverse(name, kwargs={'course_id': course.id.to_deprecated_string()})
for name in names]
def _check_non_staff_light(self, course):
"""
Check that non-staff have access to light urls.
`course` is an instance of CourseDescriptor.
"""
urls = [reverse('about_course', kwargs={'course_id': course.id.to_deprecated_string()}),
reverse('courses')]
for url in urls:
self.assert_request_status_code(200, url)
def _check_non_staff_dark(self, course):
"""
Check that non-staff don't have access to dark urls.
"""
names = ['courseware', 'instructor_dashboard', 'progress']
urls = self._reverse_urls(names, course)
urls.extend([
reverse('book', kwargs={'course_id': course.id.to_deprecated_string(),
'book_index': index})
for index, __ in enumerate(course.textbooks)
])
for url in urls:
self.assert_request_status_code(404, url)
def _check_staff(self, course):
"""
Check that access is right for staff in course.
"""
names = ['about_course', 'instructor_dashboard', 'progress']
urls = self._reverse_urls(names, course)
urls.extend([
reverse('book', kwargs={'course_id': course.id.to_deprecated_string(),
'book_index': index})
for index in xrange(len(course.textbooks))
])
for url in urls:
self.assert_request_status_code(200, url)
# The student progress tab is not accessible to a student
# before launch, so the instructor view-as-student feature
# should return a 404 as well.
# TODO (vshnayder): If this is not the behavior we want, will need
# to make access checking smarter and understand both the effective
# user (the student), and the requesting user (the prof)
url = reverse(
'student_progress',
kwargs={
'course_id': course.id.to_deprecated_string(),
'student_id': self.enrolled_user.id,
}
)
self.assert_request_status_code(404, url)
# The courseware url should redirect, not 200
url = self._reverse_urls(['courseware'], course)[0]
self.assert_request_status_code(302, url)
def login(self, user):
return super(TestViewAuth, self).login(user.email, 'test')
def setUp(self):
super(TestViewAuth, self).setUp()
self.course = CourseFactory.create(number='999', display_name='Robot_Super_Course')
self.courseware_chapter = ItemFactory.create(display_name='courseware')
self.overview_chapter = ItemFactory.create(
parent_location=self.course.location,
display_name='Super Overview'
)
self.welcome_section = ItemFactory.create(
parent_location=self.overview_chapter.location,
display_name='Super Welcome'
)
self.welcome_unit = ItemFactory.create(
parent_location=self.welcome_section.location,
display_name='Super Unit'
)
self.course = modulestore().get_course(self.course.id)
self.test_course = CourseFactory.create(org=self.course.id.org)
self.other_org_course = CourseFactory.create(org='Other_Org_Course')
self.sub_courseware_chapter = ItemFactory.create(
parent_location=self.test_course.location,
display_name='courseware'
)
self.sub_overview_chapter = ItemFactory.create(
parent_location=self.sub_courseware_chapter.location,
display_name='Overview'
)
self.sub_welcome_section = ItemFactory.create(
parent_location=self.sub_overview_chapter.location,
display_name='Welcome'
)
self.sub_welcome_unit = ItemFactory.create(
parent_location=self.sub_welcome_section.location,
display_name='New Unit'
)
self.test_course = modulestore().get_course(self.test_course.id)
self.global_staff_user = GlobalStaffFactory()
self.unenrolled_user = UserFactory(last_name="Unenrolled")
self.enrolled_user = UserFactory(last_name="Enrolled")
CourseEnrollmentFactory(user=self.enrolled_user, course_id=self.course.id)
CourseEnrollmentFactory(user=self.enrolled_user, course_id=self.test_course.id)
self.staff_user = StaffFactory(course_key=self.course.id)
self.instructor_user = InstructorFactory(course_key=self.course.id)
self.org_staff_user = OrgStaffFactory(course_key=self.course.id)
self.org_instructor_user = OrgInstructorFactory(course_key=self.course.id)
def test_redirection_unenrolled(self):
"""
Verify unenrolled student is redirected to the 'about' section of the chapter
instead of the 'Welcome' section after clicking on the courseware tab.
"""
self.login(self.unenrolled_user)
response = self.client.get(reverse('courseware',
kwargs={'course_id': self.course.id.to_deprecated_string()}))
self.assertRedirects(
response,
reverse(
'about_course',
args=[self.course.id.to_deprecated_string()]
)
)
def test_redirection_enrolled(self):
"""
Verify enrolled student is redirected to the 'Welcome' section of
the chapter after clicking on the courseware tab.
"""
self.login(self.enrolled_user)
response = self.client.get(
reverse(
'courseware',
kwargs={'course_id': self.course.id.to_deprecated_string()}
)
)
self.assertRedirects(
response,
reverse(
'courseware_section',
kwargs={'course_id': self.course.id.to_deprecated_string(),
'chapter': self.overview_chapter.url_name,
'section': self.welcome_section.url_name}
)
)
def test_instructor_page_access_nonstaff(self):
"""
Verify non-staff cannot load the instructor
dashboard, the grade views, and student profile pages.
"""
self.login(self.enrolled_user)
urls = [reverse('instructor_dashboard', kwargs={'course_id': self.course.id.to_deprecated_string()}),
reverse('instructor_dashboard', kwargs={'course_id': self.test_course.id.to_deprecated_string()})]
# Shouldn't be able to get to the instructor pages
for url in urls:
self.assert_request_status_code(404, url)
def test_staff_course_access(self):
"""
Verify staff can load the staff dashboard, the grade views,
and student profile pages for their course.
"""
self.login(self.staff_user)
# Now should be able to get to self.course, but not self.test_course
url = reverse('instructor_dashboard', kwargs={'course_id': self.course.id.to_deprecated_string()})
self.assert_request_status_code(200, url)
url = reverse('instructor_dashboard', kwargs={'course_id': self.test_course.id.to_deprecated_string()})
self.assert_request_status_code(404, url)
def test_instructor_course_access(self):
"""
Verify instructor can load the instructor dashboard, the grade views,
and student profile pages for their course.
"""
self.login(self.instructor_user)
# Now should be able to get to self.course, but not self.test_course
url = reverse('instructor_dashboard', kwargs={'course_id': self.course.id.to_deprecated_string()})
self.assert_request_status_code(200, url)
url = reverse('instructor_dashboard', kwargs={'course_id': self.test_course.id.to_deprecated_string()})
self.assert_request_status_code(404, url)
def test_org_staff_access(self):
"""
Verify org staff can load the instructor dashboard, the grade views,
and student profile pages for course in their org.
"""
self.login(self.org_staff_user)
url = reverse('instructor_dashboard', kwargs={'course_id': self.course.id.to_deprecated_string()})
self.assert_request_status_code(200, url)
url = reverse('instructor_dashboard', kwargs={'course_id': self.test_course.id.to_deprecated_string()})
self.assert_request_status_code(200, url)
url = reverse('instructor_dashboard', kwargs={'course_id': self.other_org_course.id.to_deprecated_string()})
self.assert_request_status_code(404, url)
def test_org_instructor_access(self):
"""
Verify org instructor can load the instructor dashboard, the grade views,
and student profile pages for course in their org.
"""
self.login(self.org_instructor_user)
url = reverse('instructor_dashboard', kwargs={'course_id': self.course.id.to_deprecated_string()})
self.assert_request_status_code(200, url)
url = reverse('instructor_dashboard', kwargs={'course_id': self.test_course.id.to_deprecated_string()})
self.assert_request_status_code(200, url)
url = reverse('instructor_dashboard', kwargs={'course_id': self.other_org_course.id.to_deprecated_string()})
self.assert_request_status_code(404, url)
def test_global_staff_access(self):
"""
Verify the global staff user can access any course.
"""
self.login(self.global_staff_user)
# and now should be able to load both
urls = [reverse('instructor_dashboard', kwargs={'course_id': self.course.id.to_deprecated_string()}),
reverse('instructor_dashboard', kwargs={'course_id': self.test_course.id.to_deprecated_string()})]
for url in urls:
self.assert_request_status_code(200, url)
@patch.dict('courseware.access.settings.FEATURES', {'DISABLE_START_DATES': False})
def test_dark_launch_enrolled_student(self):
"""
Make sure that before course start, students can't access course
pages.
"""
# Make courses start in the future
now = datetime.datetime.now(pytz.UTC)
tomorrow = now + datetime.timedelta(days=1)
self.course.start = tomorrow
self.test_course.start = tomorrow
self.course = self.update_course(self.course, self.user.id)
self.test_course = self.update_course(self.test_course, self.user.id)
self.assertFalse(self.course.has_started())
self.assertFalse(self.test_course.has_started())
# First, try with an enrolled student
self.login(self.enrolled_user)
# shouldn't be able to get to anything except the light pages
self._check_non_staff_light(self.course)
self._check_non_staff_dark(self.course)
self._check_non_staff_light(self.test_course)
self._check_non_staff_dark(self.test_course)
@patch.dict('courseware.access.settings.FEATURES', {'DISABLE_START_DATES': False})
def test_dark_launch_instructor(self):
"""
Make sure that before course start instructors can access the
page for their course.
"""
now = datetime.datetime.now(pytz.UTC)
tomorrow = now + datetime.timedelta(days=1)
self.course.start = tomorrow
self.test_course.start = tomorrow
self.course = self.update_course(self.course, self.user.id)
self.test_course = self.update_course(self.test_course, self.user.id)
self.login(self.instructor_user)
# Enroll in the classes---can't see courseware otherwise.
self.enroll(self.course, True)
self.enroll(self.test_course, True)
# should now be able to get to everything for self.course
self._check_non_staff_light(self.test_course)
self._check_non_staff_dark(self.test_course)
self._check_staff(self.course)
@patch.dict('courseware.access.settings.FEATURES', {'DISABLE_START_DATES': False})
def test_dark_launch_global_staff(self):
"""
Make sure that before course start staff can access
course pages.
"""
now = datetime.datetime.now(pytz.UTC)
tomorrow = now + datetime.timedelta(days=1)
self.course.start = tomorrow
self.test_course.start = tomorrow
self.course = self.update_course(self.course, self.user.id)
self.test_course = self.update_course(self.test_course, self.user.id)
self.login(self.global_staff_user)
self.enroll(self.course, True)
self.enroll(self.test_course, True)
# and now should be able to load both
self._check_staff(self.course)
self._check_staff(self.test_course)
@patch.dict('courseware.access.settings.FEATURES', {'DISABLE_START_DATES': False})
def test_enrollment_period(self):
"""
Check that enrollment periods work.
"""
# Make courses start in the future
now = datetime.datetime.now(pytz.UTC)
tomorrow = now + datetime.timedelta(days=1)
nextday = tomorrow + datetime.timedelta(days=1)
yesterday = now - datetime.timedelta(days=1)
# self.course's enrollment period hasn't started
self.course.enrollment_start = tomorrow
self.course.enrollment_end = nextday
# test_course course's has
self.test_course.enrollment_start = yesterday
self.test_course.enrollment_end = tomorrow
self.course = self.update_course(self.course, self.user.id)
self.test_course = self.update_course(self.test_course, self.user.id)
# First, try with an enrolled student
self.login(self.unenrolled_user)
self.assertFalse(self.enroll(self.course))
self.assertTrue(self.enroll(self.test_course))
self.logout()
self.login(self.instructor_user)
self.assertTrue(self.enroll(self.course))
# unenroll and try again
self.login(self.global_staff_user)
self.assertTrue(self.enroll(self.course))
@override_settings(MODULESTORE=TEST_DATA_MIXED_MODULESTORE)
class TestBetatesterAccess(ModuleStoreTestCase):
def setUp(self):
now = datetime.datetime.now(pytz.UTC)
tomorrow = now + datetime.timedelta(days=1)
self.course = CourseFactory(days_early_for_beta=2, start=tomorrow)
self.content = ItemFactory(parent=self.course)
self.normal_student = UserFactory()
self.beta_tester = BetaTesterFactory(course_key=self.course.id)
@patch.dict('courseware.access.settings.FEATURES', {'DISABLE_START_DATES': False})
def test_course_beta_period(self):
"""
Check that beta-test access works for courses.
"""
self.assertFalse(self.course.has_started())
# student user shouldn't see it
self.assertFalse(has_access(self.normal_student, 'load', self.course))
# now the student should see it
self.assertTrue(has_access(self.beta_tester, 'load', self.course))
@patch.dict('courseware.access.settings.FEATURES', {'DISABLE_START_DATES': False})
def test_content_beta_period(self):
"""
Check that beta-test access works for content.
"""
# student user shouldn't see it
self.assertFalse(has_access(self.normal_student, 'load', self.content, self.course.id))
# now the student should see it
self.assertTrue(has_access(self.beta_tester, 'load', self.content, self.course.id))
| agpl-3.0 |
rxwen/local_wget_site_converter | bs4/builder/_lxml.py | 36 | 6297 | __all__ = [
'LXMLTreeBuilderForXML',
'LXMLTreeBuilder',
]
from StringIO import StringIO
import collections
from lxml import etree
from bs4.element import Comment, Doctype, NamespacedAttribute
from bs4.builder import (
FAST,
HTML,
HTMLTreeBuilder,
PERMISSIVE,
TreeBuilder,
XML)
from bs4.dammit import UnicodeDammit
LXML = 'lxml'
class LXMLTreeBuilderForXML(TreeBuilder):
DEFAULT_PARSER_CLASS = etree.XMLParser
is_xml = True
# Well, it's permissive by XML parser standards.
features = [LXML, XML, FAST, PERMISSIVE]
CHUNK_SIZE = 512
@property
def default_parser(self):
# This can either return a parser object or a class, which
# will be instantiated with default arguments.
return etree.XMLParser(target=self, strip_cdata=False, recover=True)
def __init__(self, parser=None, empty_element_tags=None):
if empty_element_tags is not None:
self.empty_element_tags = set(empty_element_tags)
if parser is None:
# Use the default parser.
parser = self.default_parser
if isinstance(parser, collections.Callable):
# Instantiate the parser with default arguments
parser = parser(target=self, strip_cdata=False)
self.parser = parser
self.soup = None
self.nsmaps = None
def _getNsTag(self, tag):
# Split the namespace URL out of a fully-qualified lxml tag
# name. Copied from lxml's src/lxml/sax.py.
if tag[0] == '{':
return tuple(tag[1:].split('}', 1))
else:
return (None, tag)
def prepare_markup(self, markup, user_specified_encoding=None,
document_declared_encoding=None):
"""
:return: A 3-tuple (markup, original encoding, encoding
declared within markup).
"""
if isinstance(markup, unicode):
return markup, None, None, False
try_encodings = [user_specified_encoding, document_declared_encoding]
dammit = UnicodeDammit(markup, try_encodings, is_html=True)
return (dammit.markup, dammit.original_encoding,
dammit.declared_html_encoding,
dammit.contains_replacement_characters)
def feed(self, markup):
if isinstance(markup, basestring):
markup = StringIO(markup)
# Call feed() at least once, even if the markup is empty,
# or the parser won't be initialized.
data = markup.read(self.CHUNK_SIZE)
self.parser.feed(data)
while data != '':
# Now call feed() on the rest of the data, chunk by chunk.
data = markup.read(self.CHUNK_SIZE)
if data != '':
self.parser.feed(data)
self.parser.close()
def close(self):
self.nsmaps = None
def start(self, name, attrs, nsmap={}):
# Make sure attrs is a mutable dict--lxml may send an immutable dictproxy.
attrs = dict(attrs)
nsprefix = None
# Invert each namespace map as it comes in.
if len(nsmap) == 0 and self.nsmaps != None:
# There are no new namespaces for this tag, but namespaces
# are in play, so we need a separate tag stack to know
# when they end.
self.nsmaps.append(None)
elif len(nsmap) > 0:
# A new namespace mapping has come into play.
if self.nsmaps is None:
self.nsmaps = []
inverted_nsmap = dict((value, key) for key, value in nsmap.items())
self.nsmaps.append(inverted_nsmap)
# Also treat the namespace mapping as a set of attributes on the
# tag, so we can recreate it later.
attrs = attrs.copy()
for prefix, namespace in nsmap.items():
attribute = NamespacedAttribute(
"xmlns", prefix, "http://www.w3.org/2000/xmlns/")
attrs[attribute] = namespace
namespace, name = self._getNsTag(name)
if namespace is not None:
for inverted_nsmap in reversed(self.nsmaps):
if inverted_nsmap is not None and namespace in inverted_nsmap:
nsprefix = inverted_nsmap[namespace]
break
self.soup.handle_starttag(name, namespace, nsprefix, attrs)
def end(self, name):
self.soup.endData()
completed_tag = self.soup.tagStack[-1]
namespace, name = self._getNsTag(name)
nsprefix = None
if namespace is not None:
for inverted_nsmap in reversed(self.nsmaps):
if inverted_nsmap is not None and namespace in inverted_nsmap:
nsprefix = inverted_nsmap[namespace]
break
self.soup.handle_endtag(name, nsprefix)
if self.nsmaps != None:
# This tag, or one of its parents, introduced a namespace
# mapping, so pop it off the stack.
self.nsmaps.pop()
if len(self.nsmaps) == 0:
# Namespaces are no longer in play, so don't bother keeping
# track of the namespace stack.
self.nsmaps = None
def pi(self, target, data):
pass
def data(self, content):
self.soup.handle_data(content)
def doctype(self, name, pubid, system):
self.soup.endData()
doctype = Doctype.for_name_and_ids(name, pubid, system)
self.soup.object_was_parsed(doctype)
def comment(self, content):
"Handle comments as Comment objects."
self.soup.endData()
self.soup.handle_data(content)
self.soup.endData(Comment)
def test_fragment_to_document(self, fragment):
"""See `TreeBuilder`."""
return u'<?xml version="1.0" encoding="utf-8"?>\n%s' % fragment
class LXMLTreeBuilder(HTMLTreeBuilder, LXMLTreeBuilderForXML):
features = [LXML, HTML, FAST, PERMISSIVE]
is_xml = False
@property
def default_parser(self):
return etree.HTMLParser
def feed(self, markup):
self.parser.feed(markup)
self.parser.close()
def test_fragment_to_document(self, fragment):
"""See `TreeBuilder`."""
return u'<html><body>%s</body></html>' % fragment
| mit |
cswiercz/sympy | sympy/integrals/tests/test_failing_integrals.py | 71 | 3488 | # A collection of failing integrals from the issues.
from __future__ import division
from sympy import (
integrate, Integral, exp, oo, pi, sign, sqrt, sin, cos,
tan, S, log, gamma, sinh,
)
from sympy.utilities.pytest import XFAIL, SKIP, slow, skip, ON_TRAVIS
from sympy.abc import x, k, c, y, R, b, h, a, m
import signal
class TimeOutError(Exception):
pass
def timeout(signum, frame, time):
raise TimeOutError("Timed out after %d seconds" % time)
def run_with_timeout(test, time):
# Set the signal handler and a 5-second alarm
signal.signal(signal.SIGALRM, lambda s, f: timeout(s, f, time))
signal.alarm(time)
r = eval(test)
signal.alarm(0) # Disable the alarm
return r
@SKIP("Too slow for @slow")
@XFAIL
def test_issue_3880():
# integrate_hyperexponential(Poly(t*2*(1 - t0**2)*t0*(x**3 + x**2), t), Poly((1 + t0**2)**2*2*(x**2 + x + 1), t), [Poly(1, x), Poly(1 + t0**2, t0), Poly(t, t)], [x, t0, t], [exp, tan])
assert not integrate(exp(x)*cos(2*x)*sin(2*x) * (x**3 + x**2)/(2*(x**2 + x + 1)), x).has(Integral)
@XFAIL
def test_issue_4212():
assert not integrate(sign(x), x).has(Integral)
@XFAIL
def test_issue_4326():
assert integrate(((h*(x - R + b))/b)*sqrt(R**2 - x**2), (x, R - b, R)).has(Integral)
@XFAIL
def test_issue_4491():
assert not integrate(x*sqrt(x**2 + 2*x + 4), x).has(Integral)
@XFAIL
@slow
def test_issue_4511():
# This works, but gives a complicated answer. The correct answer is x - cos(x).
# The last one is what Maple gives. It is also quite slow.
assert integrate(cos(x)**2 / (1 - sin(x))) in [x - cos(x), 1 - cos(x) + x,
-2/(tan((S(1)/2)*x)**2 + 1) + x]
@XFAIL
def test_issue_4514():
# The correct answer is 2*sin(x)
assert not integrate(sin(2*x)/ sin(x)).has(Integral)
@XFAIL
def test_issue_4525():
# Warning: takes a long time
assert not integrate((x**m * (1 - x)**n * (a + b*x + c*x**2))/(1 + x**2), (x, 0, 1)).has(Integral)
@XFAIL
@slow
def test_issue_4540():
if ON_TRAVIS:
skip("Too slow for travis.")
# Note, this integral is probably nonelementary
assert not integrate(
(sin(1/x) - x*exp(x)) /
((-sin(1/x) + x*exp(x))*x + x*sin(1/x)), x).has(Integral)
@XFAIL
def test_issue_4551():
assert integrate(1/(x*sqrt(1 - x**2)), x).has(Integral)
@XFAIL
def test_issue_4737a():
# Implementation of Si()
assert integrate(sin(x)/x, x).has(Integral)
@XFAIL
def test_issue_1638b():
assert integrate(sin(x)/x, (x, -oo, oo)) == pi/2
@XFAIL
@slow
def test_issue_4891():
# Requires the hypergeometric function.
assert not integrate(cos(x)**y, x).has(Integral)
@XFAIL
@slow
def test_issue_1796a():
assert not integrate(exp(2*b*x)*exp(-a*x**2), x).has(Integral)
@XFAIL
def test_issue_4895b():
assert not integrate(exp(2*b*x)*exp(-a*x**2), (x, -oo, 0)).has(Integral)
@XFAIL
def test_issue_4895c():
assert not integrate(exp(2*b*x)*exp(-a*x**2), (x, -oo, oo)).has(Integral)
@XFAIL
def test_issue_4895d():
assert not integrate(exp(2*b*x)*exp(-a*x**2), (x, 0, oo)).has(Integral)
@XFAIL
@slow
def test_issue_4941():
if ON_TRAVIS:
skip("Too slow for travis.")
assert not integrate(sqrt(1 + sinh(x/20)**2), (x, -25, 25)).has(Integral)
@XFAIL
def test_issue_4992():
# Nonelementary integral. Requires hypergeometric/Meijer-G handling.
assert not integrate(log(x) * x**(k - 1) * exp(-x) / gamma(k), (x, 0, oo)).has(Integral)
| bsd-3-clause |
android-ia/platform_external_chromium_org | native_client_sdk/src/build_tools/tests/test_server.py | 170 | 2165 | # Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import multiprocessing
import os
import SimpleHTTPServer
class LocalHTTPServer(object):
"""Class to start a local HTTP server as a child process."""
def __init__(self, serve_dir):
parent_conn, child_conn = multiprocessing.Pipe()
self.process = multiprocessing.Process(target=_HTTPServerProcess,
args=(child_conn, serve_dir))
self.process.start()
if parent_conn.poll(10): # wait 10 seconds
self.port = parent_conn.recv()
else:
raise Exception('Unable to launch HTTP server.')
self.conn = parent_conn
def Shutdown(self):
"""Send a message to the child HTTP server process and wait for it to
finish."""
self.conn.send(False)
self.process.join()
def GetURL(self, rel_url):
"""Get the full url for a file on the local HTTP server.
Args:
rel_url: A URL fragment to convert to a full URL. For example,
GetURL('foobar.baz') -> 'http://localhost:1234/foobar.baz'
"""
return 'http://localhost:%d/%s' % (self.port, rel_url)
class QuietHTTPRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
def log_message(self, msg_format, *args):
pass
def _HTTPServerProcess(conn, serve_dir):
"""Run a local httpserver with a randomly-chosen port.
This function assumes it is run as a child process using multiprocessing.
Args:
conn: A connection to the parent process. The child process sends
the local port, and waits for a message from the parent to
stop serving.
serve_dir: The directory to serve. All files are accessible through
http://localhost:<port>/path/to/filename.
"""
import BaseHTTPServer
os.chdir(serve_dir)
httpd = BaseHTTPServer.HTTPServer(('', 0), QuietHTTPRequestHandler)
conn.send(httpd.server_address[1]) # the chosen port number
httpd.timeout = 0.5 # seconds
running = True
while running:
httpd.handle_request()
if conn.poll():
running = conn.recv()
conn.close()
| bsd-3-clause |
waseem18/oh-mainline | vendor/packages/PyYaml/tests/lib/test_canonical.py | 60 | 1135 |
import yaml, canonical
def test_canonical_scanner(canonical_filename, verbose=False):
data = open(canonical_filename, 'rb').read()
tokens = list(yaml.canonical_scan(data))
assert tokens, tokens
if verbose:
for token in tokens:
print token
test_canonical_scanner.unittest = ['.canonical']
def test_canonical_parser(canonical_filename, verbose=False):
data = open(canonical_filename, 'rb').read()
events = list(yaml.canonical_parse(data))
assert events, events
if verbose:
for event in events:
print event
test_canonical_parser.unittest = ['.canonical']
def test_canonical_error(data_filename, canonical_filename, verbose=False):
data = open(data_filename, 'rb').read()
try:
output = list(yaml.canonical_load_all(data))
except yaml.YAMLError, exc:
if verbose:
print exc
else:
raise AssertionError("expected an exception")
test_canonical_error.unittest = ['.data', '.canonical']
test_canonical_error.skip = ['.empty']
if __name__ == '__main__':
import test_appliance
test_appliance.run(globals())
| agpl-3.0 |
Achuth17/scikit-learn | sklearn/neighbors/setup.py | 308 | 1219 | import os
def configuration(parent_package='', top_path=None):
import numpy
from numpy.distutils.misc_util import Configuration
config = Configuration('neighbors', parent_package, top_path)
libraries = []
if os.name == 'posix':
libraries.append('m')
config.add_extension('ball_tree',
sources=['ball_tree.c'],
include_dirs=[numpy.get_include()],
libraries=libraries)
config.add_extension('kd_tree',
sources=['kd_tree.c'],
include_dirs=[numpy.get_include()],
libraries=libraries)
config.add_extension('dist_metrics',
sources=['dist_metrics.c'],
include_dirs=[numpy.get_include(),
os.path.join(numpy.get_include(),
'numpy')],
libraries=libraries)
config.add_extension('typedefs',
sources=['typedefs.c'],
include_dirs=[numpy.get_include()],
libraries=libraries)
return config
| bsd-3-clause |
stanxii/ngb | node.js/app/bower_components/socket.io/support/expresso/deps/jscoverage/js/build/win32/pgomerge.py | 79 | 1628 | #!/usr/bin/python
# Usage: pgomerge.py <binary basename> <dist/bin>
# Gathers .pgc files from dist/bin and merges them into
# $PWD/$basename.pgd using pgomgr, then deletes them.
# No errors if any of these files don't exist.
import sys, os, os.path, subprocess
if not sys.platform == "win32":
raise Exception("This script was only meant for Windows.")
def MergePGOFiles(basename, pgddir, pgcdir):
"""Merge pgc files produced from an instrumented binary
into the pgd file for the second pass of profile-guided optimization
with MSVC. |basename| is the name of the DLL or EXE without the
extension. |pgddir| is the path that contains <basename>.pgd
(should be the objdir it was built in). |pgcdir| is the path
containing basename!N.pgc files, which is probably dist/bin.
Calls pgomgr to merge each pgc file into the pgd, then deletes
the pgc files."""
if not os.path.isdir(pgddir) or not os.path.isdir(pgcdir):
return
pgdfile = os.path.abspath(os.path.join(pgddir, basename + ".pgd"))
if not os.path.isfile(pgdfile):
return
for file in os.listdir(pgcdir):
if file.startswith(basename) and file.endswith(".pgc"):
try:
pgcfile = os.path.normpath(os.path.join(pgcdir, file))
subprocess.call(['pgomgr', '-merge',
pgcfile,
pgdfile])
os.remove(pgcfile)
except OSError:
pass
if __name__ == '__main__':
if len(sys.argv) != 3:
print >>sys.stderr, "Usage: pgomerge.py <binary basename> <dist/bin>"
sys.exit(1)
MergePGOFiles(sys.argv[1], os.getcwd(), sys.argv[2])
| mit |
raajitr/django_hangman | env/lib/python2.7/site-packages/rest_framework/decorators.py | 20 | 4560 | """
The most important decorator in this module is `@api_view`, which is used
for writing function-based views with REST framework.
There are also various decorators for setting the API policies on function
based views, as well as the `@detail_route` and `@list_route` decorators, which are
used to annotate methods on viewsets that should be included by routers.
"""
from __future__ import unicode_literals
import types
from django.utils import six
from rest_framework.views import APIView
def api_view(http_method_names=None, exclude_from_schema=False):
"""
Decorator that converts a function-based view into an APIView subclass.
Takes a list of allowed methods for the view as an argument.
"""
http_method_names = ['GET'] if (http_method_names is None) else http_method_names
def decorator(func):
WrappedAPIView = type(
six.PY3 and 'WrappedAPIView' or b'WrappedAPIView',
(APIView,),
{'__doc__': func.__doc__}
)
# Note, the above allows us to set the docstring.
# It is the equivalent of:
#
# class WrappedAPIView(APIView):
# pass
# WrappedAPIView.__doc__ = func.doc <--- Not possible to do this
# api_view applied without (method_names)
assert not(isinstance(http_method_names, types.FunctionType)), \
'@api_view missing list of allowed HTTP methods'
# api_view applied with eg. string instead of list of strings
assert isinstance(http_method_names, (list, tuple)), \
'@api_view expected a list of strings, received %s' % type(http_method_names).__name__
allowed_methods = set(http_method_names) | set(('options',))
WrappedAPIView.http_method_names = [method.lower() for method in allowed_methods]
def handler(self, *args, **kwargs):
return func(*args, **kwargs)
for method in http_method_names:
setattr(WrappedAPIView, method.lower(), handler)
WrappedAPIView.__name__ = func.__name__
WrappedAPIView.__module__ = func.__module__
WrappedAPIView.renderer_classes = getattr(func, 'renderer_classes',
APIView.renderer_classes)
WrappedAPIView.parser_classes = getattr(func, 'parser_classes',
APIView.parser_classes)
WrappedAPIView.authentication_classes = getattr(func, 'authentication_classes',
APIView.authentication_classes)
WrappedAPIView.throttle_classes = getattr(func, 'throttle_classes',
APIView.throttle_classes)
WrappedAPIView.permission_classes = getattr(func, 'permission_classes',
APIView.permission_classes)
WrappedAPIView.exclude_from_schema = exclude_from_schema
return WrappedAPIView.as_view()
return decorator
def renderer_classes(renderer_classes):
def decorator(func):
func.renderer_classes = renderer_classes
return func
return decorator
def parser_classes(parser_classes):
def decorator(func):
func.parser_classes = parser_classes
return func
return decorator
def authentication_classes(authentication_classes):
def decorator(func):
func.authentication_classes = authentication_classes
return func
return decorator
def throttle_classes(throttle_classes):
def decorator(func):
func.throttle_classes = throttle_classes
return func
return decorator
def permission_classes(permission_classes):
def decorator(func):
func.permission_classes = permission_classes
return func
return decorator
def detail_route(methods=None, **kwargs):
"""
Used to mark a method on a ViewSet that should be routed for detail requests.
"""
methods = ['get'] if (methods is None) else methods
def decorator(func):
func.bind_to_methods = methods
func.detail = True
func.kwargs = kwargs
return func
return decorator
def list_route(methods=None, **kwargs):
"""
Used to mark a method on a ViewSet that should be routed for list requests.
"""
methods = ['get'] if (methods is None) else methods
def decorator(func):
func.bind_to_methods = methods
func.detail = False
func.kwargs = kwargs
return func
return decorator
| mit |
DreadPirateRobert/stock_visualiser | stock_visualiser_virtualenv/lib/python3.5/site-packages/pip/vcs/__init__.py | 344 | 12374 | """Handles all VCS (version control) support"""
from __future__ import absolute_import
import errno
import logging
import os
import shutil
import sys
from pip._vendor.six.moves.urllib import parse as urllib_parse
from pip.exceptions import BadCommand
from pip.utils import (display_path, backup_dir, call_subprocess,
rmtree, ask_path_exists)
__all__ = ['vcs', 'get_src_requirement']
logger = logging.getLogger(__name__)
class VcsSupport(object):
_registry = {}
schemes = ['ssh', 'git', 'hg', 'bzr', 'sftp', 'svn']
def __init__(self):
# Register more schemes with urlparse for various version control
# systems
urllib_parse.uses_netloc.extend(self.schemes)
# Python >= 2.7.4, 3.3 doesn't have uses_fragment
if getattr(urllib_parse, 'uses_fragment', None):
urllib_parse.uses_fragment.extend(self.schemes)
super(VcsSupport, self).__init__()
def __iter__(self):
return self._registry.__iter__()
@property
def backends(self):
return list(self._registry.values())
@property
def dirnames(self):
return [backend.dirname for backend in self.backends]
@property
def all_schemes(self):
schemes = []
for backend in self.backends:
schemes.extend(backend.schemes)
return schemes
def register(self, cls):
if not hasattr(cls, 'name'):
logger.warning('Cannot register VCS %s', cls.__name__)
return
if cls.name not in self._registry:
self._registry[cls.name] = cls
logger.debug('Registered VCS backend: %s', cls.name)
def unregister(self, cls=None, name=None):
if name in self._registry:
del self._registry[name]
elif cls in self._registry.values():
del self._registry[cls.name]
else:
logger.warning('Cannot unregister because no class or name given')
def get_backend_name(self, location):
"""
Return the name of the version control backend if found at given
location, e.g. vcs.get_backend_name('/path/to/vcs/checkout')
"""
for vc_type in self._registry.values():
if vc_type.controls_location(location):
logger.debug('Determine that %s uses VCS: %s',
location, vc_type.name)
return vc_type.name
return None
def get_backend(self, name):
name = name.lower()
if name in self._registry:
return self._registry[name]
def get_backend_from_location(self, location):
vc_type = self.get_backend_name(location)
if vc_type:
return self.get_backend(vc_type)
return None
vcs = VcsSupport()
class VersionControl(object):
name = ''
dirname = ''
# List of supported schemes for this Version Control
schemes = ()
def __init__(self, url=None, *args, **kwargs):
self.url = url
super(VersionControl, self).__init__(*args, **kwargs)
def _is_local_repository(self, repo):
"""
posix absolute paths start with os.path.sep,
win32 ones start with drive (like c:\\folder)
"""
drive, tail = os.path.splitdrive(repo)
return repo.startswith(os.path.sep) or drive
# See issue #1083 for why this method was introduced:
# https://github.com/pypa/pip/issues/1083
def translate_egg_surname(self, surname):
# For example, Django has branches of the form "stable/1.7.x".
return surname.replace('/', '_')
def export(self, location):
"""
Export the repository at the url to the destination location
i.e. only download the files, without vcs informations
"""
raise NotImplementedError
def get_url_rev(self):
"""
Returns the correct repository URL and revision by parsing the given
repository URL
"""
error_message = (
"Sorry, '%s' is a malformed VCS url. "
"The format is <vcs>+<protocol>://<url>, "
"e.g. svn+http://myrepo/svn/MyApp#egg=MyApp"
)
assert '+' in self.url, error_message % self.url
url = self.url.split('+', 1)[1]
scheme, netloc, path, query, frag = urllib_parse.urlsplit(url)
rev = None
if '@' in path:
path, rev = path.rsplit('@', 1)
url = urllib_parse.urlunsplit((scheme, netloc, path, query, ''))
return url, rev
def get_info(self, location):
"""
Returns (url, revision), where both are strings
"""
assert not location.rstrip('/').endswith(self.dirname), \
'Bad directory: %s' % location
return self.get_url(location), self.get_revision(location)
def normalize_url(self, url):
"""
Normalize a URL for comparison by unquoting it and removing any
trailing slash.
"""
return urllib_parse.unquote(url).rstrip('/')
def compare_urls(self, url1, url2):
"""
Compare two repo URLs for identity, ignoring incidental differences.
"""
return (self.normalize_url(url1) == self.normalize_url(url2))
def obtain(self, dest):
"""
Called when installing or updating an editable package, takes the
source path of the checkout.
"""
raise NotImplementedError
def switch(self, dest, url, rev_options):
"""
Switch the repo at ``dest`` to point to ``URL``.
"""
raise NotImplementedError
def update(self, dest, rev_options):
"""
Update an already-existing repo to the given ``rev_options``.
"""
raise NotImplementedError
def check_version(self, dest, rev_options):
"""
Return True if the version is identical to what exists and
doesn't need to be updated.
"""
raise NotImplementedError
def check_destination(self, dest, url, rev_options, rev_display):
"""
Prepare a location to receive a checkout/clone.
Return True if the location is ready for (and requires) a
checkout/clone, False otherwise.
"""
checkout = True
prompt = False
if os.path.exists(dest):
checkout = False
if os.path.exists(os.path.join(dest, self.dirname)):
existing_url = self.get_url(dest)
if self.compare_urls(existing_url, url):
logger.debug(
'%s in %s exists, and has correct URL (%s)',
self.repo_name.title(),
display_path(dest),
url,
)
if not self.check_version(dest, rev_options):
logger.info(
'Updating %s %s%s',
display_path(dest),
self.repo_name,
rev_display,
)
self.update(dest, rev_options)
else:
logger.info(
'Skipping because already up-to-date.')
else:
logger.warning(
'%s %s in %s exists with URL %s',
self.name,
self.repo_name,
display_path(dest),
existing_url,
)
prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ',
('s', 'i', 'w', 'b'))
else:
logger.warning(
'Directory %s already exists, and is not a %s %s.',
dest,
self.name,
self.repo_name,
)
prompt = ('(i)gnore, (w)ipe, (b)ackup ', ('i', 'w', 'b'))
if prompt:
logger.warning(
'The plan is to install the %s repository %s',
self.name,
url,
)
response = ask_path_exists('What to do? %s' % prompt[0],
prompt[1])
if response == 's':
logger.info(
'Switching %s %s to %s%s',
self.repo_name,
display_path(dest),
url,
rev_display,
)
self.switch(dest, url, rev_options)
elif response == 'i':
# do nothing
pass
elif response == 'w':
logger.warning('Deleting %s', display_path(dest))
rmtree(dest)
checkout = True
elif response == 'b':
dest_dir = backup_dir(dest)
logger.warning(
'Backing up %s to %s', display_path(dest), dest_dir,
)
shutil.move(dest, dest_dir)
checkout = True
elif response == 'a':
sys.exit(-1)
return checkout
def unpack(self, location):
"""
Clean up current location and download the url repository
(and vcs infos) into location
"""
if os.path.exists(location):
rmtree(location)
self.obtain(location)
def get_src_requirement(self, dist, location):
"""
Return a string representing the requirement needed to
redownload the files currently present in location, something
like:
{repository_url}@{revision}#egg={project_name}-{version_identifier}
"""
raise NotImplementedError
def get_url(self, location):
"""
Return the url used at location
Used in get_info or check_destination
"""
raise NotImplementedError
def get_revision(self, location):
"""
Return the current revision of the files at location
Used in get_info
"""
raise NotImplementedError
def run_command(self, cmd, show_stdout=True, cwd=None,
on_returncode='raise',
command_desc=None,
extra_environ=None, spinner=None):
"""
Run a VCS subcommand
This is simply a wrapper around call_subprocess that adds the VCS
command name, and checks that the VCS is available
"""
cmd = [self.name] + cmd
try:
return call_subprocess(cmd, show_stdout, cwd,
on_returncode,
command_desc, extra_environ,
spinner)
except OSError as e:
# errno.ENOENT = no such file or directory
# In other words, the VCS executable isn't available
if e.errno == errno.ENOENT:
raise BadCommand('Cannot find command %r' % self.name)
else:
raise # re-raise exception if a different error occurred
@classmethod
def controls_location(cls, location):
"""
Check if a location is controlled by the vcs.
It is meant to be overridden to implement smarter detection
mechanisms for specific vcs.
"""
logger.debug('Checking in %s for %s (%s)...',
location, cls.dirname, cls.name)
path = os.path.join(location, cls.dirname)
return os.path.exists(path)
def get_src_requirement(dist, location):
version_control = vcs.get_backend_from_location(location)
if version_control:
try:
return version_control().get_src_requirement(dist,
location)
except BadCommand:
logger.warning(
'cannot determine version of editable source in %s '
'(%s command not found in path)',
location,
version_control.name,
)
return dist.as_requirement()
logger.warning(
'cannot determine version of editable source in %s (is not SVN '
'checkout, Git clone, Mercurial clone or Bazaar branch)',
location,
)
return dist.as_requirement()
| mit |
JI007/flasky | app/auth/views.py | 2 | 5996 | from flask import render_template, redirect, request, url_for, flash
from flask.ext.login import login_user, logout_user, login_required, \
current_user
from . import auth
from .. import db
from ..models import User
from ..email import send_email
from .forms import LoginForm, RegistrationForm, ChangePasswordForm,\
PasswordResetRequestForm, PasswordResetForm, ChangeEmailForm
@auth.before_app_request
def before_request():
if current_user.is_authenticated():
current_user.ping()
if not current_user.confirmed \
and request.endpoint[:5] != 'auth.':
return redirect(url_for('auth.unconfirmed'))
@auth.route('/unconfirmed')
def unconfirmed():
if current_user.is_anonymous() or current_user.confirmed:
return redirect('main.index')
return render_template('auth/unconfirmed.html')
@auth.route('/login', methods=['GET', 'POST'])
def login():
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data).first()
if user is not None and user.verify_password(form.password.data):
login_user(user, form.remember_me.data)
return redirect(request.args.get('next') or url_for('main.index'))
flash('Invalid username or password.')
return render_template('auth/login.html', form=form)
@auth.route('/logout')
@login_required
def logout():
logout_user()
flash('You have been logged out.')
return redirect(url_for('main.index'))
@auth.route('/register', methods=['GET', 'POST'])
def register():
form = RegistrationForm()
if form.validate_on_submit():
user = User(email=form.email.data,
username=form.username.data,
password=form.password.data)
db.session.add(user)
db.session.commit()
token = user.generate_confirmation_token()
send_email(user.email, 'Confirm Your Account',
'auth/email/confirm', user=user, token=token)
flash('A confirmation email has been sent to you by email.')
return redirect(url_for('auth.login'))
return render_template('auth/register.html', form=form)
@auth.route('/confirm/<token>')
@login_required
def confirm(token):
if current_user.confirmed:
return redirect(url_for('main.index'))
if current_user.confirm(token):
flash('You have confirmed your account. Thanks!')
else:
flash('The confirmation link is invalid or has expired.')
return redirect(url_for('main.index'))
@auth.route('/confirm')
@login_required
def resend_confirmation():
token = current_user.generate_confirmation_token()
send_email(current_user.email, 'Confirm Your Account',
'auth/email/confirm', user=current_user, token=token)
flash('A new confirmation email has been sent to you by email.')
return redirect(url_for('main.index'))
@auth.route('/change-password', methods=['GET', 'POST'])
@login_required
def change_password():
form = ChangePasswordForm()
if form.validate_on_submit():
if current_user.verify_password(form.old_password.data):
current_user.password = form.password.data
db.session.add(current_user)
flash('Your password has been updated.')
return redirect(url_for('main.index'))
else:
flash('Invalid password.')
return render_template("auth/change_password.html", form=form)
@auth.route('/reset', methods=['GET', 'POST'])
def password_reset_request():
if not current_user.is_anonymous():
return redirect(url_for('main.index'))
form = PasswordResetRequestForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data).first()
if user:
token = user.generate_reset_token()
send_email(user.email, 'Reset Your Password',
'auth/email/reset_password',
user=user, token=token,
next=request.args.get('next'))
flash('An email with instructions to reset your password has been '
'sent to you.')
return redirect(url_for('auth.login'))
return render_template('auth/reset_password.html', form=form)
@auth.route('/reset/<token>', methods=['GET', 'POST'])
def password_reset(token):
if not current_user.is_anonymous():
return redirect(url_for('main.index'))
form = PasswordResetForm()
if form.validate_on_submit():
user = User.query.filter_by(email=form.email.data).first()
if user is None:
return redirect(url_for('main.index'))
if user.reset_password(token, form.password.data):
flash('Your password has been updated.')
return redirect(url_for('auth.login'))
else:
return redirect(url_for('main.index'))
return render_template('auth/reset_password.html', form=form)
@auth.route('/change-email', methods=['GET', 'POST'])
@login_required
def change_email_request():
form = ChangeEmailForm()
if form.validate_on_submit():
if current_user.verify_password(form.password.data):
new_email = form.email.data
token = current_user.generate_email_change_token(new_email)
send_email(new_email, 'Confirm your email address',
'auth/email/change_email',
user=current_user, token=token)
flash('An email with instructions to confirm your new email '
'address has been sent to you.')
return redirect(url_for('main.index'))
else:
flash('Invalid email or password.')
return render_template("auth/change_email.html", form=form)
@auth.route('/change-email/<token>')
@login_required
def change_email(token):
if current_user.change_email(token):
flash('Your email address has been updated.')
else:
flash('Invalid request.')
return redirect(url_for('main.index'))
| mit |
kevinmel2000/sl4a | python/src/Lib/plat-mac/Carbon/Folders.py | 81 | 9308 | # Generated from 'Folders.h'
def FOUR_CHAR_CODE(x): return x
true = True
false = False
kOnSystemDisk = -32768L
kOnAppropriateDisk = -32767
kSystemDomain = -32766
kLocalDomain = -32765
kNetworkDomain = -32764
kUserDomain = -32763
kClassicDomain = -32762
kCreateFolder = true
kDontCreateFolder = false
kSystemFolderType = FOUR_CHAR_CODE('macs')
kDesktopFolderType = FOUR_CHAR_CODE('desk')
kSystemDesktopFolderType = FOUR_CHAR_CODE('sdsk')
kTrashFolderType = FOUR_CHAR_CODE('trsh')
kSystemTrashFolderType = FOUR_CHAR_CODE('strs')
kWhereToEmptyTrashFolderType = FOUR_CHAR_CODE('empt')
kPrintMonitorDocsFolderType = FOUR_CHAR_CODE('prnt')
kStartupFolderType = FOUR_CHAR_CODE('strt')
kShutdownFolderType = FOUR_CHAR_CODE('shdf')
kAppleMenuFolderType = FOUR_CHAR_CODE('amnu')
kControlPanelFolderType = FOUR_CHAR_CODE('ctrl')
kSystemControlPanelFolderType = FOUR_CHAR_CODE('sctl')
kExtensionFolderType = FOUR_CHAR_CODE('extn')
kFontsFolderType = FOUR_CHAR_CODE('font')
kPreferencesFolderType = FOUR_CHAR_CODE('pref')
kSystemPreferencesFolderType = FOUR_CHAR_CODE('sprf')
kTemporaryFolderType = FOUR_CHAR_CODE('temp')
kExtensionDisabledFolderType = FOUR_CHAR_CODE('extD')
kControlPanelDisabledFolderType = FOUR_CHAR_CODE('ctrD')
kSystemExtensionDisabledFolderType = FOUR_CHAR_CODE('macD')
kStartupItemsDisabledFolderType = FOUR_CHAR_CODE('strD')
kShutdownItemsDisabledFolderType = FOUR_CHAR_CODE('shdD')
kApplicationsFolderType = FOUR_CHAR_CODE('apps')
kDocumentsFolderType = FOUR_CHAR_CODE('docs')
kVolumeRootFolderType = FOUR_CHAR_CODE('root')
kChewableItemsFolderType = FOUR_CHAR_CODE('flnt')
kApplicationSupportFolderType = FOUR_CHAR_CODE('asup')
kTextEncodingsFolderType = FOUR_CHAR_CODE('\xc4tex')
kStationeryFolderType = FOUR_CHAR_CODE('odst')
kOpenDocFolderType = FOUR_CHAR_CODE('odod')
kOpenDocShellPlugInsFolderType = FOUR_CHAR_CODE('odsp')
kEditorsFolderType = FOUR_CHAR_CODE('oded')
kOpenDocEditorsFolderType = FOUR_CHAR_CODE('\xc4odf')
kOpenDocLibrariesFolderType = FOUR_CHAR_CODE('odlb')
kGenEditorsFolderType = FOUR_CHAR_CODE('\xc4edi')
kHelpFolderType = FOUR_CHAR_CODE('\xc4hlp')
kInternetPlugInFolderType = FOUR_CHAR_CODE('\xc4net')
kModemScriptsFolderType = FOUR_CHAR_CODE('\xc4mod')
kPrinterDescriptionFolderType = FOUR_CHAR_CODE('ppdf')
kPrinterDriverFolderType = FOUR_CHAR_CODE('\xc4prd')
kScriptingAdditionsFolderType = FOUR_CHAR_CODE('\xc4scr')
kSharedLibrariesFolderType = FOUR_CHAR_CODE('\xc4lib')
kVoicesFolderType = FOUR_CHAR_CODE('fvoc')
kControlStripModulesFolderType = FOUR_CHAR_CODE('sdev')
kAssistantsFolderType = FOUR_CHAR_CODE('ast\xc4')
kUtilitiesFolderType = FOUR_CHAR_CODE('uti\xc4')
kAppleExtrasFolderType = FOUR_CHAR_CODE('aex\xc4')
kContextualMenuItemsFolderType = FOUR_CHAR_CODE('cmnu')
kMacOSReadMesFolderType = FOUR_CHAR_CODE('mor\xc4')
kALMModulesFolderType = FOUR_CHAR_CODE('walk')
kALMPreferencesFolderType = FOUR_CHAR_CODE('trip')
kALMLocationsFolderType = FOUR_CHAR_CODE('fall')
kColorSyncProfilesFolderType = FOUR_CHAR_CODE('prof')
kThemesFolderType = FOUR_CHAR_CODE('thme')
kFavoritesFolderType = FOUR_CHAR_CODE('favs')
kInternetFolderType = FOUR_CHAR_CODE('int\xc4')
kAppearanceFolderType = FOUR_CHAR_CODE('appr')
kSoundSetsFolderType = FOUR_CHAR_CODE('snds')
kDesktopPicturesFolderType = FOUR_CHAR_CODE('dtp\xc4')
kInternetSearchSitesFolderType = FOUR_CHAR_CODE('issf')
kFindSupportFolderType = FOUR_CHAR_CODE('fnds')
kFindByContentFolderType = FOUR_CHAR_CODE('fbcf')
kInstallerLogsFolderType = FOUR_CHAR_CODE('ilgf')
kScriptsFolderType = FOUR_CHAR_CODE('scr\xc4')
kFolderActionsFolderType = FOUR_CHAR_CODE('fasf')
kLauncherItemsFolderType = FOUR_CHAR_CODE('laun')
kRecentApplicationsFolderType = FOUR_CHAR_CODE('rapp')
kRecentDocumentsFolderType = FOUR_CHAR_CODE('rdoc')
kRecentServersFolderType = FOUR_CHAR_CODE('rsvr')
kSpeakableItemsFolderType = FOUR_CHAR_CODE('spki')
kKeychainFolderType = FOUR_CHAR_CODE('kchn')
kQuickTimeExtensionsFolderType = FOUR_CHAR_CODE('qtex')
kDisplayExtensionsFolderType = FOUR_CHAR_CODE('dspl')
kMultiprocessingFolderType = FOUR_CHAR_CODE('mpxf')
kPrintingPlugInsFolderType = FOUR_CHAR_CODE('pplg')
kDomainTopLevelFolderType = FOUR_CHAR_CODE('dtop')
kDomainLibraryFolderType = FOUR_CHAR_CODE('dlib')
kColorSyncFolderType = FOUR_CHAR_CODE('sync')
kColorSyncCMMFolderType = FOUR_CHAR_CODE('ccmm')
kColorSyncScriptingFolderType = FOUR_CHAR_CODE('cscr')
kPrintersFolderType = FOUR_CHAR_CODE('impr')
kSpeechFolderType = FOUR_CHAR_CODE('spch')
kCarbonLibraryFolderType = FOUR_CHAR_CODE('carb')
kDocumentationFolderType = FOUR_CHAR_CODE('info')
kDeveloperDocsFolderType = FOUR_CHAR_CODE('ddoc')
kDeveloperHelpFolderType = FOUR_CHAR_CODE('devh')
kISSDownloadsFolderType = FOUR_CHAR_CODE('issd')
kUserSpecificTmpFolderType = FOUR_CHAR_CODE('utmp')
kCachedDataFolderType = FOUR_CHAR_CODE('cach')
kFrameworksFolderType = FOUR_CHAR_CODE('fram')
kPrivateFrameworksFolderType = FOUR_CHAR_CODE('pfrm')
kClassicDesktopFolderType = FOUR_CHAR_CODE('sdsk')
kDeveloperFolderType = FOUR_CHAR_CODE('devf')
kSystemSoundsFolderType = FOUR_CHAR_CODE('ssnd')
kComponentsFolderType = FOUR_CHAR_CODE('cmpd')
kQuickTimeComponentsFolderType = FOUR_CHAR_CODE('wcmp')
kCoreServicesFolderType = FOUR_CHAR_CODE('csrv')
kPictureDocumentsFolderType = FOUR_CHAR_CODE('pdoc')
kMovieDocumentsFolderType = FOUR_CHAR_CODE('mdoc')
kMusicDocumentsFolderType = FOUR_CHAR_CODE('\xb5doc')
kInternetSitesFolderType = FOUR_CHAR_CODE('site')
kPublicFolderType = FOUR_CHAR_CODE('pubb')
kAudioSupportFolderType = FOUR_CHAR_CODE('adio')
kAudioSoundsFolderType = FOUR_CHAR_CODE('asnd')
kAudioSoundBanksFolderType = FOUR_CHAR_CODE('bank')
kAudioAlertSoundsFolderType = FOUR_CHAR_CODE('alrt')
kAudioPlugInsFolderType = FOUR_CHAR_CODE('aplg')
kAudioComponentsFolderType = FOUR_CHAR_CODE('acmp')
kKernelExtensionsFolderType = FOUR_CHAR_CODE('kext')
kDirectoryServicesFolderType = FOUR_CHAR_CODE('dsrv')
kDirectoryServicesPlugInsFolderType = FOUR_CHAR_CODE('dplg')
kInstallerReceiptsFolderType = FOUR_CHAR_CODE('rcpt')
kFileSystemSupportFolderType = FOUR_CHAR_CODE('fsys')
kAppleShareSupportFolderType = FOUR_CHAR_CODE('shar')
kAppleShareAuthenticationFolderType = FOUR_CHAR_CODE('auth')
kMIDIDriversFolderType = FOUR_CHAR_CODE('midi')
kLocalesFolderType = FOUR_CHAR_CODE('\xc4loc')
kFindByContentPluginsFolderType = FOUR_CHAR_CODE('fbcp')
kUsersFolderType = FOUR_CHAR_CODE('usrs')
kCurrentUserFolderType = FOUR_CHAR_CODE('cusr')
kCurrentUserRemoteFolderLocation = FOUR_CHAR_CODE('rusf')
kCurrentUserRemoteFolderType = FOUR_CHAR_CODE('rusr')
kSharedUserDataFolderType = FOUR_CHAR_CODE('sdat')
kVolumeSettingsFolderType = FOUR_CHAR_CODE('vsfd')
kAppleshareAutomountServerAliasesFolderType = FOUR_CHAR_CODE('srv\xc4')
kPreMacOS91ApplicationsFolderType = FOUR_CHAR_CODE('\x8cpps')
kPreMacOS91InstallerLogsFolderType = FOUR_CHAR_CODE('\x94lgf')
kPreMacOS91AssistantsFolderType = FOUR_CHAR_CODE('\x8cst\xc4')
kPreMacOS91UtilitiesFolderType = FOUR_CHAR_CODE('\x9fti\xc4')
kPreMacOS91AppleExtrasFolderType = FOUR_CHAR_CODE('\x8cex\xc4')
kPreMacOS91MacOSReadMesFolderType = FOUR_CHAR_CODE('\xb5or\xc4')
kPreMacOS91InternetFolderType = FOUR_CHAR_CODE('\x94nt\xc4')
kPreMacOS91AutomountedServersFolderType = FOUR_CHAR_CODE('\xa7rv\xc4')
kPreMacOS91StationeryFolderType = FOUR_CHAR_CODE('\xbfdst')
kCreateFolderAtBoot = 0x00000002
kCreateFolderAtBootBit = 1
kFolderCreatedInvisible = 0x00000004
kFolderCreatedInvisibleBit = 2
kFolderCreatedNameLocked = 0x00000008
kFolderCreatedNameLockedBit = 3
kFolderCreatedAdminPrivs = 0x00000010
kFolderCreatedAdminPrivsBit = 4
kFolderInUserFolder = 0x00000020
kFolderInUserFolderBit = 5
kFolderTrackedByAlias = 0x00000040
kFolderTrackedByAliasBit = 6
kFolderInRemoteUserFolderIfAvailable = 0x00000080
kFolderInRemoteUserFolderIfAvailableBit = 7
kFolderNeverMatchedInIdentifyFolder = 0x00000100
kFolderNeverMatchedInIdentifyFolderBit = 8
kFolderMustStayOnSameVolume = 0x00000200
kFolderMustStayOnSameVolumeBit = 9
kFolderManagerFolderInMacOS9FolderIfMacOSXIsInstalledMask = 0x00000400
kFolderManagerFolderInMacOS9FolderIfMacOSXIsInstalledBit = 10
kFolderInLocalOrRemoteUserFolder = kFolderInUserFolder | kFolderInRemoteUserFolderIfAvailable
kRelativeFolder = FOUR_CHAR_CODE('relf')
kSpecialFolder = FOUR_CHAR_CODE('spcf')
kBlessedFolder = FOUR_CHAR_CODE('blsf')
kRootFolder = FOUR_CHAR_CODE('rotf')
kCurrentUserFolderLocation = FOUR_CHAR_CODE('cusf')
kFindFolderRedirectionFlagUseDistinctUserFoldersBit = 0
kFindFolderRedirectionFlagUseGivenVRefAndDirIDAsUserFolderBit = 1
kFindFolderRedirectionFlagsUseGivenVRefNumAndDirIDAsRemoteUserFolderBit = 2
kFolderManagerUserRedirectionGlobalsCurrentVersion = 1
kFindFolderExtendedFlagsDoNotFollowAliasesBit = 0
kFindFolderExtendedFlagsDoNotUseUserFolderBit = 1
kFindFolderExtendedFlagsUseOtherUserRecord = 0x01000000
kFolderManagerNotificationMessageUserLogIn = FOUR_CHAR_CODE('log+')
kFolderManagerNotificationMessagePreUserLogIn = FOUR_CHAR_CODE('logj')
kFolderManagerNotificationMessageUserLogOut = FOUR_CHAR_CODE('log-')
kFolderManagerNotificationMessagePostUserLogOut = FOUR_CHAR_CODE('logp')
kFolderManagerNotificationDiscardCachedData = FOUR_CHAR_CODE('dche')
kFolderManagerNotificationMessageLoginStartup = FOUR_CHAR_CODE('stup')
kDoNotRemoveWhenCurrentApplicationQuitsBit = 0
kDoNotRemoveWheCurrentApplicationQuitsBit = kDoNotRemoveWhenCurrentApplicationQuitsBit
kStopIfAnyNotificationProcReturnsErrorBit = 31
| apache-2.0 |
facebookresearch/ParlAI | parlai/chat_service/services/websocket/websocket_manager.py | 1 | 10460 | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""
Websocket Manager Module Contains implementation of the WebsocketManager which helps run
ParlAI via websockets.
"""
import json
import asyncio
import logging
from parlai.core.agents import create_agent
from parlai.chat_service.core.chat_service_manager import ChatServiceManager
import parlai.chat_service.utils.logging as log_utils
import parlai.chat_service.utils.misc as utils
from parlai.chat_service.services.websocket.sockets import MessageSocketHandler
from .agents import WebsocketAgent
import tornado
from tornado.options import options
class WebsocketManager(ChatServiceManager):
"""
Manages interactions between agents on a websocket as well as direct interactions
between agents and an overworld.
"""
class MessageSender(ChatServiceManager.ChatServiceMessageSender):
def send_read(self, receiver_id):
pass
def typing_on(self, receiver_id, persona_id=None):
pass
def __init__(self, opt):
"""
Create a WebsocketManager using the given setup options.
"""
super().__init__(opt)
self.opt = opt
self.port = opt.get('port')
self.subs = {}
self.app = None
self.debug = opt.get('is_debug', False)
self.message_sender = WebsocketManager.MessageSender()
self.service_reference_id = None
self._parse_config(opt)
self._complete_setup()
def parse_additional_args(self, opt):
self.should_load_model = self.config['additional_args'].get('load_model', True)
def _complete_setup(self):
"""
Complete necessary setup items.
"""
self.agent_pool = {}
self.messenger_agent_states = {}
self.agent_id_to_overworld_future = {}
self.task_to_agent_ids = {}
self._load_model()
def _load_model(self):
"""
Load model if necessary.
"""
if 'models' in self.opt and self.should_load_model:
model_params = {}
model_info = {}
for model in self.opt['models']:
model_opt = self.opt['models'][model]
override = model_opt.get('override', {})
if type(override) is list:
model_opt['override'] = override[0]
model_params[model] = create_agent(model_opt).share()
model_info[model] = {'override': override}
self.runner_opt['model_info'] = model_info
self.runner_opt['shared_bot_params'] = model_params
def _handle_message_read(self, event):
"""
Send read receipt back to user who sent message This function is left empty as
it is not applicable to websockets since read receipts are not supported.
"""
pass
def _manager_loop_fn(self):
"""
An iteration of the manager's main loop to launch worlds.
"""
with self.agent_pool_change_condition:
valid_pools = self._get_unique_pool()
for world_type, agent_pool in valid_pools.items():
# check if agent has exceeded max time in pool
world_config = self.task_configs[world_type]
if world_config.max_time_in_pool is not None:
self.check_timeout_in_pool(
world_type,
agent_pool,
world_config.max_time_in_pool,
world_config.backup_task,
)
needed_agents = self.max_agents_for[world_type]
if len(agent_pool) >= needed_agents:
log_utils.print_and_log(
logging.INFO, 'starting pool', should_print=True
)
# enough agents in pool to start new conversation
self.conversation_index += 1
task_id = 't_{}'.format(self.conversation_index)
# Add the required number of valid agents to the conv
agent_states = [w for w in agent_pool[:needed_agents]]
agents = []
for state in agent_states:
agent = self._create_agent(task_id, state.get_id())
agent.onboard_data = state.onboard_data
agent.data = state.data
state.assign_agent_to_task(agent, task_id)
state.set_active_agent(agent)
agents.append(agent)
# reset wait message state
state.stored_data['seen_wait_message'] = False
assign_role_function = utils.get_assign_roles_fn(
self.world_module, self.taskworld_map[world_type]
)
if assign_role_function is None:
assign_role_function = utils.default_assign_roles_fn
assign_role_function(agents)
# Allow task creator to filter out workers and run
# versions of the task that require fewer agents
for a in agents:
# Remove selected workers from the agent pool
self.remove_agent_from_pool(
self.get_agent_state(a.id),
world_type=world_type,
mark_removed=False,
)
for a in agents:
partner_list = agents.copy()
partner_list.remove(a)
a.message_partners = partner_list
done_callback = self._get_done_callback_for_agents(
task_id, world_type, agents
)
# launch task world.
future = self.world_runner.launch_task_world(
task_id, self.taskworld_map[world_type], agents
)
future.add_done_callback(done_callback)
self.active_worlds[task_id] = future
def start_task(self):
"""
Begin handling task.
"""
self.running = True
self.app = self._make_app()
self.app.listen(self.port)
# Must use a tornado callback to run the main loop
callback_time = utils.THREAD_MEDIUM_SLEEP * 1000
tornado.ioloop.PeriodicCallback(
callback=self._manager_loop_fn, callback_time=callback_time
).start()
tornado.ioloop.IOLoop.current().start()
def shutdown(self):
"""
Defined to shutown the tornado application.
"""
try:
self.world_runner.shutdown()
self._expire_all_conversations()
finally:
pass
tornado.ioloop.IOLoop.current().stop()
def _create_agent(self, task_id, socketID):
"""
Initialize an agent and return it.
Called each time an agent is placed into a new task.
:param task_id:
string task identifier
:param agent_id:
int agent id
"""
return WebsocketAgent(self.opt, self, socketID, task_id)
def _make_app(self):
"""
Starts the tornado application.
"""
message_callback = self._on_new_message
options['log_to_stderr'] = True
tornado.options.parse_command_line([])
return tornado.web.Application(
[
(
r"/websocket",
MessageSocketHandler,
{'subs': self.subs, 'message_callback': message_callback},
)
],
debug=self.debug,
)
def observe_message(self, socket_id, message, quick_replies=None):
"""
Send a message through the message manager.
:param socket_id:
int identifier for agent socket to send message to
:param message:
(str) message to send through the socket.
:param quick_replies:
(list) list of strings to send as quick replies.
Returns a tornado future for tracking the `write_message` action.
"""
if quick_replies is not None:
quick_replies = list(quick_replies)
message = json.dumps(
{'text': message.replace('\n', '<br />'), 'quick_replies': quick_replies}
)
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
if socket_id not in self.subs:
self.agent_id_to_overworld_future[socket_id].cancel()
return
return loop.run_until_complete(self.subs[socket_id].write_message(message))
def observe_payload(self, socket_id, payload, quick_replies=None):
"""
Send a message through the message manager.
:param socket_id:
int identifier for agent socket to send message to
:param payload:
(dict) payload to send through the socket. The mandatory keys are:
'type': (str) Type of the payload (e.g. 'image')
'data': str. base64 encoded content
If 'type' is 'image', the 'mime_type' (str) key can be provided
to specify the Mime type of the image
Returns a tornado future for tracking the `write_message` action.
"""
message = {'text': '', 'payload': payload, 'quick_replies': quick_replies}
payload = json.dumps(message)
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
if socket_id not in self.subs:
self.agent_id_to_overworld_future[socket_id].cancel()
return
return loop.run_until_complete(self.subs[socket_id].write_message(message))
def restructure_message(self, message):
"""
This is to restructure a new message to conform to the message structure defined
in the `chat_service` README.
"""
return message
def _handle_bot_read(self, agent_id):
pass
def _confirm_message_delivery(self, event):
pass
def setup_server(self):
pass
def setup_socket(self):
pass
| mit |
xsyntrex/selenium | py/test/selenium/webdriver/common/interactions_tests.py | 3 | 8356 | # Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Tests for advanced user interactions."""
import pytest
from selenium.common.exceptions import WebDriverException
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.support.ui import WebDriverWait
def performDragAndDropWithMouse(driver, pages):
"""Copied from org.openqa.selenium.interactions.TestBasicMouseInterface."""
pages.load("draggableLists.html")
dragReporter = driver.find_element_by_id("dragging_reports")
toDrag = driver.find_element_by_id("rightitem-3")
dragInto = driver.find_element_by_id("sortable1")
holdItem = ActionChains(driver).click_and_hold(toDrag)
moveToSpecificItem = ActionChains(driver) \
.move_to_element(driver.find_element_by_id("leftitem-4"))
moveToOtherList = ActionChains(driver).move_to_element(dragInto)
drop = ActionChains(driver).release(dragInto)
assert "Nothing happened." == dragReporter.text
holdItem.perform()
moveToSpecificItem.perform()
moveToOtherList.perform()
assert "Nothing happened. DragOut" == dragReporter.text
drop.perform()
@pytest.mark.xfail_marionette(
reason='https://bugzilla.mozilla.org/show_bug.cgi?id=1292178',
raises=WebDriverException)
def testDraggingElementWithMouseMovesItToAnotherList(driver, pages):
"""Copied from org.openqa.selenium.interactions.TestBasicMouseInterface."""
performDragAndDropWithMouse(driver, pages)
dragInto = driver.find_element_by_id("sortable1")
assert 6 == len(dragInto.find_elements_by_tag_name("li"))
@pytest.mark.xfail_marionette(
reason='https://bugzilla.mozilla.org/show_bug.cgi?id=1292178',
raises=WebDriverException)
def testDraggingElementWithMouseFiresEvents(driver, pages):
"""Copied from org.openqa.selenium.interactions.TestBasicMouseInterface."""
performDragAndDropWithMouse(driver, pages)
dragReporter = driver.find_element_by_id("dragging_reports")
assert "Nothing happened. DragOut DropIn RightItem 3" == dragReporter.text
def _isElementAvailable(driver, id):
"""Copied from org.openqa.selenium.interactions.TestBasicMouseInterface."""
try:
driver.find_element_by_id(id)
return True
except Exception:
return False
@pytest.mark.xfail_marionette(
reason='https://bugzilla.mozilla.org/show_bug.cgi?id=1292178',
raises=WebDriverException)
def testDragAndDrop(driver, pages):
"""Copied from org.openqa.selenium.interactions.TestBasicMouseInterface."""
element_available_timeout = 15
wait = WebDriverWait(driver, element_available_timeout)
pages.load("droppableItems.html")
wait.until(lambda dr: _isElementAvailable(driver, "draggable"))
if not _isElementAvailable(driver, "draggable"):
raise AssertionError("Could not find draggable element after 15 seconds.")
toDrag = driver.find_element_by_id("draggable")
dropInto = driver.find_element_by_id("droppable")
holdDrag = ActionChains(driver) \
.click_and_hold(toDrag)
move = ActionChains(driver) \
.move_to_element(dropInto)
drop = ActionChains(driver).release(dropInto)
holdDrag.perform()
move.perform()
drop.perform()
dropInto = driver.find_element_by_id("droppable")
text = dropInto.find_element_by_tag_name("p").text
assert "Dropped!" == text
@pytest.mark.xfail_marionette(
reason='https://bugzilla.mozilla.org/show_bug.cgi?id=1292178',
raises=WebDriverException)
def testDoubleClick(driver, pages):
"""Copied from org.openqa.selenium.interactions.TestBasicMouseInterface."""
pages.load("javascriptPage.html")
toDoubleClick = driver.find_element_by_id("doubleClickField")
dblClick = ActionChains(driver) \
.double_click(toDoubleClick)
dblClick.perform()
assert "DoubleClicked" == toDoubleClick.get_attribute('value')
@pytest.mark.xfail_marionette(
reason='https://bugzilla.mozilla.org/show_bug.cgi?id=1292178',
raises=WebDriverException)
@pytest.mark.xfail_phantomjs(
reason='https://github.com/ariya/phantomjs/issues/14005')
def testContextClick(driver, pages):
"""Copied from org.openqa.selenium.interactions.TestBasicMouseInterface."""
pages.load("javascriptPage.html")
toContextClick = driver.find_element_by_id("doubleClickField")
contextClick = ActionChains(driver) \
.context_click(toContextClick)
contextClick.perform()
assert "ContextClicked" == toContextClick.get_attribute('value')
@pytest.mark.xfail_marionette(
reason='https://bugzilla.mozilla.org/show_bug.cgi?id=1292178')
def testMoveAndClick(driver, pages):
"""Copied from org.openqa.selenium.interactions.TestBasicMouseInterface."""
pages.load("javascriptPage.html")
toClick = driver.find_element_by_id("clickField")
click = ActionChains(driver) \
.move_to_element(toClick) \
.click()
click.perform()
assert "Clicked" == toClick.get_attribute('value')
@pytest.mark.xfail_marionette(
reason='https://bugzilla.mozilla.org/show_bug.cgi?id=1292178')
def testCannotMoveToANullLocator(driver, pages):
"""Copied from org.openqa.selenium.interactions.TestBasicMouseInterface."""
pages.load("javascriptPage.html")
with pytest.raises(AttributeError):
move = ActionChains(driver) \
.move_to_element(None)
move.perform()
@pytest.mark.xfail_marionette(
reason='https://bugzilla.mozilla.org/show_bug.cgi?id=1292178')
@pytest.mark.xfail_phantomjs
def testClickingOnFormElements(driver, pages):
"""Copied from org.openqa.selenium.interactions.CombinedInputActionsTest."""
pages.load("formSelectionPage.html")
options = driver.find_elements_by_tag_name("option")
selectThreeOptions = ActionChains(driver) \
.click(options[1]) \
.key_down(Keys.SHIFT) \
.click(options[2]) \
.click(options[3]) \
.key_up(Keys.SHIFT)
selectThreeOptions.perform()
showButton = driver.find_element_by_name("showselected")
showButton.click()
resultElement = driver.find_element_by_id("result")
assert "roquefort parmigiano cheddar" == resultElement.text
@pytest.mark.xfail_marionette(
reason='https://bugzilla.mozilla.org/show_bug.cgi?id=1292178')
@pytest.mark.xfail_phantomjs
def testSelectingMultipleItems(driver, pages):
"""Copied from org.openqa.selenium.interactions.CombinedInputActionsTest."""
pages.load("selectableItems.html")
reportingElement = driver.find_element_by_id("infodiv")
assert "no info" == reportingElement.text
listItems = driver.find_elements_by_tag_name("li")
selectThreeItems = ActionChains(driver) \
.key_down(Keys.CONTROL) \
.click(listItems[1]) \
.click(listItems[3]) \
.click(listItems[5]) \
.key_up(Keys.CONTROL)
selectThreeItems.perform()
assert "#item2 #item4 #item6" == reportingElement.text
# Now click on another element, make sure that's the only one selected.
actionsBuilder = ActionChains(driver)
actionsBuilder.click(listItems[6]).perform()
assert "#item7" == reportingElement.text
@pytest.mark.xfail_marionette(
reason='https://bugzilla.mozilla.org/show_bug.cgi?id=1292178')
def testSendingKeysToActiveElementWithModifier(driver, pages):
pages.load("formPage.html")
e = driver.find_element_by_id("working")
e.click()
ActionChains(driver) \
.key_down(Keys.SHIFT) \
.send_keys("abc") \
.key_up(Keys.SHIFT) \
.perform()
assert "ABC" == e.get_attribute('value')
| apache-2.0 |
40223211/cadpw16 | wsgi/pybean.py | 562 | 8617 | #coding: utf-8
import sqlite3
from pkg_resources import parse_version
__version__ = "0.2.1"
__author__ = "Mickael Desfrenes"
__email__ = "[email protected]"
# Yen 2013.04.08, 將 Python2 的 .next() 改為 next(), 以便在 Python 3 中使用
class SQLiteWriter(object):
"""
In frozen mode (the default), the writer will not alter db schema.
Just add frozen=False to enable column creation (or just add False
as second parameter):
query_writer = SQLiteWriter(":memory:", False)
"""
def __init__(self, db_path=":memory:", frozen=True):
self.db = sqlite3.connect(db_path)
self.db.isolation_level = None
self.db.row_factory = sqlite3.Row
self.frozen = frozen
self.cursor = self.db.cursor()
self.cursor.execute("PRAGMA foreign_keys=ON;")
self.cursor.execute('PRAGMA encoding = "UTF-8";')
self.cursor.execute('BEGIN;')
def __del__(self):
self.db.close()
def replace(self, bean):
keys = []
values = []
write_operation = "replace"
if "id" not in bean.__dict__:
write_operation = "insert"
keys.append("id")
values.append(None)
self.__create_table(bean.__class__.__name__)
columns = self.__get_columns(bean.__class__.__name__)
for key in bean.__dict__:
keys.append(key)
if key not in columns:
self.__create_column(bean.__class__.__name__, key,
type(bean.__dict__[key]))
values.append(bean.__dict__[key])
sql = write_operation + " into " + bean.__class__.__name__ + "("
sql += ",".join(keys) + ") values ("
sql += ",".join(["?" for i in keys]) + ")"
self.cursor.execute(sql, values)
if write_operation == "insert":
bean.id = self.cursor.lastrowid
return bean.id
def __create_column(self, table, column, sqltype):
if self.frozen:
return
if sqltype in [float, int, bool]:
sqltype = "NUMERIC"
else:
sqltype = "TEXT"
sql = "alter table " + table + " add " + column + " " + sqltype
self.cursor.execute(sql)
def __get_columns(self, table):
columns = []
if self.frozen:
return columns
self.cursor.execute("PRAGMA table_info(" + table + ")")
for row in self.cursor:
columns.append(row["name"])
return columns
def __create_table(self, table):
if self.frozen:
return
sql = "create table if not exists " + table + "(id INTEGER PRIMARY KEY AUTOINCREMENT)"
self.cursor.execute(sql)
def get_rows(self, table_name, sql = "1", replace = None):
if replace is None : replace = []
self.__create_table(table_name)
sql = "SELECT * FROM " + table_name + " WHERE " + sql
try:
self.cursor.execute(sql, replace)
for row in self.cursor:
yield row
except sqlite3.OperationalError:
return
def get_count(self, table_name, sql="1", replace = None):
if replace is None : replace = []
self.__create_table(table_name)
sql = "SELECT count(*) AS cnt FROM " + table_name + " WHERE " + sql
try:
self.cursor.execute(sql, replace)
except sqlite3.OperationalError:
return 0
for row in self.cursor:
return row["cnt"]
def delete(self, bean):
self.__create_table(bean.__class__.__name__)
sql = "delete from " + bean.__class__.__name__ + " where id=?"
self.cursor.execute(sql,[bean.id])
def link(self, bean_a, bean_b):
self.replace(bean_a)
self.replace(bean_b)
table_a = bean_a.__class__.__name__
table_b = bean_b.__class__.__name__
assoc_table = self.__create_assoc_table(table_a, table_b)
sql = "replace into " + assoc_table + "(" + table_a + "_id," + table_b
sql += "_id) values(?,?)"
self.cursor.execute(sql,
[bean_a.id, bean_b.id])
def unlink(self, bean_a, bean_b):
table_a = bean_a.__class__.__name__
table_b = bean_b.__class__.__name__
assoc_table = self.__create_assoc_table(table_a, table_b)
sql = "delete from " + assoc_table + " where " + table_a
sql += "_id=? and " + table_b + "_id=?"
self.cursor.execute(sql,
[bean_a.id, bean_b.id])
def get_linked_rows(self, bean, table_name):
bean_table = bean.__class__.__name__
assoc_table = self.__create_assoc_table(bean_table, table_name)
sql = "select t.* from " + table_name + " t inner join " + assoc_table
sql += " a on a." + table_name + "_id = t.id where a."
sql += bean_table + "_id=?"
self.cursor.execute(sql,[bean.id])
for row in self.cursor:
yield row
def __create_assoc_table(self, table_a, table_b):
assoc_table = "_".join(sorted([table_a, table_b]))
if not self.frozen:
sql = "create table if not exists " + assoc_table + "("
sql+= table_a + "_id NOT NULL REFERENCES " + table_a + "(id) ON DELETE cascade,"
sql+= table_b + "_id NOT NULL REFERENCES " + table_b + "(id) ON DELETE cascade,"
sql+= " PRIMARY KEY (" + table_a + "_id," + table_b + "_id));"
self.cursor.execute(sql)
# no real support for foreign keys until sqlite3 v3.6.19
# so here's the hack
if cmp(parse_version(sqlite3.sqlite_version),parse_version("3.6.19")) < 0:
sql = "create trigger if not exists fk_" + table_a + "_" + assoc_table
sql+= " before delete on " + table_a
sql+= " for each row begin delete from " + assoc_table + " where " + table_a + "_id = OLD.id;end;"
self.cursor.execute(sql)
sql = "create trigger if not exists fk_" + table_b + "_" + assoc_table
sql+= " before delete on " + table_b
sql+= " for each row begin delete from " + assoc_table + " where " + table_b + "_id = OLD.id;end;"
self.cursor.execute(sql)
return assoc_table
def delete_all(self, table_name, sql = "1", replace = None):
if replace is None : replace = []
self.__create_table(table_name)
sql = "DELETE FROM " + table_name + " WHERE " + sql
try:
self.cursor.execute(sql, replace)
return True
except sqlite3.OperationalError:
return False
def commit(self):
self.db.commit()
class Store(object):
"""
A SQL writer should be passed to the constructor:
beans_save = Store(SQLiteWriter(":memory"), frozen=False)
"""
def __init__(self, SQLWriter):
self.writer = SQLWriter
def new(self, table_name):
new_object = type(table_name,(object,),{})()
return new_object
def save(self, bean):
self.writer.replace(bean)
def load(self, table_name, id):
for row in self.writer.get_rows(table_name, "id=?", [id]):
return self.row_to_object(table_name, row)
def count(self, table_name, sql = "1", replace=None):
return self.writer.get_count(table_name, sql, replace if replace is not None else [])
def find(self, table_name, sql = "1", replace=None):
for row in self.writer.get_rows(table_name, sql, replace if replace is not None else []):
yield self.row_to_object(table_name, row)
def find_one(self, table_name, sql = "1", replace=None):
try:
return next(self.find(table_name, sql, replace))
except StopIteration:
return None
def delete(self, bean):
self.writer.delete(bean)
def link(self, bean_a, bean_b):
self.writer.link(bean_a, bean_b)
def unlink(self, bean_a, bean_b):
self.writer.unlink(bean_a, bean_b)
def get_linked(self, bean, table_name):
for row in self.writer.get_linked_rows(bean, table_name):
yield self.row_to_object(table_name, row)
def delete_all(self, table_name, sql = "1", replace=None):
return self.writer.delete_all(table_name, sql, replace if replace is not None else [])
def row_to_object(self, table_name, row):
new_object = type(table_name,(object,),{})()
for key in row.keys():
new_object.__dict__[key] = row[key]
return new_object
def commit(self):
self.writer.commit()
| gpl-3.0 |
Glorf/servo | tests/wpt/css-tests/css-text-decor-3_dev/html/support/generate-text-emphasis-position-property-tests.py | 841 | 3343 | #!/usr/bin/env python
# - * - coding: UTF-8 - * -
"""
This script generates tests text-emphasis-position-property-001 ~ 006
which cover all possible values of text-emphasis-position property with
all combination of three main writing modes and two orientations. Only
test files are generated by this script. It also outputs a list of all
tests it generated in the format of Mozilla reftest.list to the stdout.
"""
from __future__ import unicode_literals
import itertools
TEST_FILE = 'text-emphasis-position-property-{:03}{}.html'
REF_FILE = 'text-emphasis-position-property-{:03}-ref.html'
TEST_TEMPLATE = '''<!DOCTYPE html>
<meta charset="utf-8">
<title>CSS Test: text-emphasis-position: {value}, {title}</title>
<link rel="author" title="Xidorn Quan" href="https://www.upsuper.org">
<link rel="author" title="Mozilla" href="https://www.mozilla.org">
<link rel="help" href="https://drafts.csswg.org/css-text-decor-3/#text-emphasis-position-property">
<meta name="assert" content="'text-emphasis-position: {value}' with 'writing-mode: {wm}' puts emphasis marks {position} the text.">
<link rel="match" href="text-emphasis-position-property-{index:03}-ref.html">
<p>Pass if the emphasis marks are {position} the text below:</p>
<div style="line-height: 5; text-emphasis: circle; writing-mode: {wm}; text-orientation: {orient}; text-emphasis-position: {value}">試験テスト</div>
'''
SUFFIXES = ['', 'a', 'b', 'c', 'd', 'e', 'f', 'g']
WRITING_MODES = ["horizontal-tb", "vertical-rl", "vertical-lr"]
POSITION_HORIZONTAL = ["over", "under"]
POSITION_VERTICAL = ["right", "left"]
REF_MAP_MIXED = { "over": 1, "under": 2, "right": 3, "left": 4 }
REF_MAP_SIDEWAYS = { "right": 5, "left": 6 }
POSITION_TEXT = { "over": "over", "under": "under",
"right": "to the right of", "left": "to the left of" }
suffixes = [iter(SUFFIXES) for i in range(6)]
reftest_items = []
def write_file(filename, content):
with open(filename, 'wb') as f:
f.write(content.encode('UTF-8'))
def write_test_file(idx, suffix, wm, orient, value, position):
filename = TEST_FILE.format(idx, suffix)
write_file(filename, TEST_TEMPLATE.format(
value=value, wm=wm, orient=orient, index=idx, position=position,
title=(wm if orient == "mixed" else "{}, {}".format(wm, orient))))
reftest_items.append("== {} {}".format(filename, REF_FILE.format(idx)))
def write_test_files(wm, orient, pos1, pos2):
idx = (REF_MAP_MIXED if orient == "mixed" else REF_MAP_SIDEWAYS)[pos1]
position = POSITION_TEXT[pos1]
suffix = suffixes[idx - 1]
write_test_file(idx, next(suffix), wm, orient, pos1 + " " + pos2, position)
write_test_file(idx, next(suffix), wm, orient, pos2 + " " + pos1, position)
for wm in WRITING_MODES:
if wm == "horizontal-tb":
effective_pos = POSITION_HORIZONTAL
ineffective_pos = POSITION_VERTICAL
else:
effective_pos = POSITION_VERTICAL
ineffective_pos = POSITION_HORIZONTAL
for pos1, pos2 in itertools.product(effective_pos, ineffective_pos):
write_test_files(wm, "mixed", pos1, pos2)
if wm != "horizontal-tb":
write_test_files(wm, "sideways", pos1, pos2)
print("# START tests from {}".format(__file__))
reftest_items.sort()
for item in reftest_items:
print(item)
print("# END tests from {}".format(__file__))
| mpl-2.0 |
dmitry-r/incubator-airflow | airflow/contrib/auth/backends/ldap_auth.py | 22 | 10752 | # -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from future.utils import native
import flask_login
from flask_login import login_required, current_user, logout_user
from flask import flash
from wtforms import (
Form, PasswordField, StringField)
from wtforms.validators import InputRequired
from ldap3 import Server, Connection, Tls, LEVEL, SUBTREE, BASE
import ssl
from flask import url_for, redirect
from airflow import settings
from airflow import models
from airflow import configuration
from airflow.configuration import AirflowConfigException
import logging
import traceback
import re
login_manager = flask_login.LoginManager()
login_manager.login_view = 'airflow.login' # Calls login() below
login_manager.login_message = None
LOG = logging.getLogger(__name__)
class AuthenticationError(Exception):
pass
class LdapException(Exception):
pass
def get_ldap_connection(dn=None, password=None):
tls_configuration = None
use_ssl = False
try:
cacert = configuration.get("ldap", "cacert")
tls_configuration = Tls(validate=ssl.CERT_REQUIRED, ca_certs_file=cacert)
use_ssl = True
except:
pass
server = Server(configuration.get("ldap", "uri"), use_ssl, tls_configuration)
conn = Connection(server, native(dn), native(password))
if not conn.bind():
LOG.error("Cannot bind to ldap server: %s ", conn.last_error)
raise AuthenticationError("Cannot bind to ldap server")
return conn
def group_contains_user(conn, search_base, group_filter, user_name_attr, username):
search_filter = '(&({0}))'.format(group_filter)
if not conn.search(native(search_base), native(search_filter),
attributes=[native(user_name_attr)]):
LOG.warning("Unable to find group for %s %s", search_base, search_filter)
else:
for resp in conn.response:
if (
'attributes' in resp and (
resp['attributes'].get(user_name_attr)[0] == username or
resp['attributes'].get(user_name_attr) == username
)
):
return True
return False
def groups_user(conn, search_base, user_filter, user_name_att, username):
search_filter = "(&({0})({1}={2}))".format(user_filter, user_name_att, username)
try:
memberof_attr = configuration.get("ldap", "group_member_attr")
except:
memberof_attr = "memberOf"
res = conn.search(native(search_base), native(search_filter), attributes=[native(memberof_attr)])
if not res:
LOG.info("Cannot find user %s", username)
raise AuthenticationError("Invalid username or password")
if conn.response and memberof_attr not in conn.response[0]["attributes"]:
LOG.warning("""Missing attribute "%s" when looked-up in Ldap database.
The user does not seem to be a member of a group and therefore won't see any dag
if the option filter_by_owner=True and owner_mode=ldapgroup are set""", memberof_attr)
return []
user_groups = conn.response[0]["attributes"][memberof_attr]
regex = re.compile("cn=([^,]*).*", re.IGNORECASE)
groups_list = []
try:
groups_list = [regex.search(i).group(1) for i in user_groups]
except IndexError:
LOG.warning("Parsing error when retrieving the user's group(s)."
" Check if the user belongs to at least one group"
" or if the user's groups name do not contain special characters")
return groups_list
class LdapUser(models.User):
def __init__(self, user):
self.user = user
self.ldap_groups = []
# Load and cache superuser and data_profiler settings.
conn = get_ldap_connection(configuration.get("ldap", "bind_user"),
configuration.get("ldap", "bind_password"))
try:
self.superuser = group_contains_user(conn,
configuration.get("ldap", "basedn"),
configuration.get("ldap", "superuser_filter"),
configuration.get("ldap", "user_name_attr"),
user.username)
except AirflowConfigException:
self.superuser = True
LOG.debug("Missing configuration for superuser settings. Skipping.")
try:
self.data_profiler = group_contains_user(conn,
configuration.get("ldap", "basedn"),
configuration.get("ldap", "data_profiler_filter"),
configuration.get("ldap", "user_name_attr"),
user.username)
except AirflowConfigException:
self.data_profiler = True
LOG.debug("Missing configuration for dataprofiler settings. Skipping")
# Load the ldap group(s) a user belongs to
try:
self.ldap_groups = groups_user(conn,
configuration.get("ldap", "basedn"),
configuration.get("ldap", "user_filter"),
configuration.get("ldap", "user_name_attr"),
user.username)
except AirflowConfigException:
LOG.debug("Missing configuration for ldap settings. Skipping")
@staticmethod
def try_login(username, password):
conn = get_ldap_connection(configuration.get("ldap", "bind_user"),
configuration.get("ldap", "bind_password"))
search_filter = "(&({0})({1}={2}))".format(
configuration.get("ldap", "user_filter"),
configuration.get("ldap", "user_name_attr"),
username
)
search_scopes = {
"LEVEL": LEVEL,
"SUBTREE": SUBTREE,
"BASE": BASE
}
search_scope = LEVEL
if configuration.has_option("ldap", "search_scope"):
search_scope = SUBTREE if configuration.get("ldap", "search_scope") == "SUBTREE" else LEVEL
# todo: BASE or ONELEVEL?
res = conn.search(native(configuration.get("ldap", "basedn")),
native(search_filter),
search_scope=native(search_scope))
# todo: use list or result?
if not res:
LOG.info("Cannot find user %s", username)
raise AuthenticationError("Invalid username or password")
entry = conn.response[0]
conn.unbind()
if 'dn' not in entry:
# The search filter for the user did not return any values, so an
# invalid user was used for credentials.
raise AuthenticationError("Invalid username or password")
try:
conn = get_ldap_connection(entry['dn'], password)
except KeyError as e:
LOG.error("""
Unable to parse LDAP structure. If you're using Active Directory and not specifying an OU, you must set search_scope=SUBTREE in airflow.cfg.
%s
""" % traceback.format_exc())
raise LdapException("Could not parse LDAP structure. Try setting search_scope in airflow.cfg, or check logs")
if not conn:
LOG.info("Password incorrect for user %s", username)
raise AuthenticationError("Invalid username or password")
def is_active(self):
'''Required by flask_login'''
return True
def is_authenticated(self):
'''Required by flask_login'''
return True
def is_anonymous(self):
'''Required by flask_login'''
return False
def get_id(self):
'''Returns the current user id as required by flask_login'''
return self.user.get_id()
def data_profiling(self):
'''Provides access to data profiling tools'''
return self.data_profiler
def is_superuser(self):
'''Access all the things'''
return self.superuser
@login_manager.user_loader
def load_user(userid):
LOG.debug("Loading user %s", userid)
if not userid or userid == 'None':
return None
session = settings.Session()
user = session.query(models.User).filter(models.User.id == int(userid)).first()
session.expunge_all()
session.commit()
session.close()
return LdapUser(user)
def login(self, request):
if current_user.is_authenticated():
flash("You are already logged in")
return redirect(url_for('admin.index'))
username = None
password = None
form = LoginForm(request.form)
if request.method == 'POST' and form.validate():
username = request.form.get("username")
password = request.form.get("password")
if not username or not password:
return self.render('airflow/login.html',
title="Airflow - Login",
form=form)
try:
LdapUser.try_login(username, password)
LOG.info("User %s successfully authenticated", username)
session = settings.Session()
user = session.query(models.User).filter(
models.User.username == username).first()
if not user:
user = models.User(
username=username,
is_superuser=False)
session.merge(user)
session.commit()
flask_login.login_user(LdapUser(user))
session.commit()
session.close()
return redirect(request.args.get("next") or url_for("admin.index"))
except (LdapException, AuthenticationError) as e:
if type(e) == LdapException:
flash(e, "error")
else:
flash("Incorrect login details")
return self.render('airflow/login.html',
title="Airflow - Login",
form=form)
class LoginForm(Form):
username = StringField('Username', [InputRequired()])
password = PasswordField('Password', [InputRequired()])
| apache-2.0 |
chennqqi/splayer | Test/Update_Unittest/web/scripts/patchgen.py | 18 | 2508 | #!/usr/bin/python
#create patch files for ShooterPlayer
#
#
#
import glob
import optparse
import os
import re
import shutil
import sys
import hashlib
import subprocess
def filemd5(fileName):
m = hashlib.md5()
try:
fd = open(fileName,"rb")
except IOError:
print "Unable to open the file in readmode:", filename
return
content = fd.read()
fd.close()
m.update(content)
return m.hexdigest()
def createPatch(folder, options):
patchtool = ".\\courgette.exe"
cmd = patchtool + " -gen "
cmd +=options.OldBuildDir + "\\" + folder + "\\splayer.exe "
cmd +=options.latest + " " + options.outputdir + "\\" + folder + ".patch"
print cmd
retcode = subprocess.call(cmd)
return retcode
def main(options, args):
latestMD5 = filemd5(options.latest)
print latestMD5
revisions = []
folders = [f for f in os.listdir(options.OldBuildDir) if os.path.isdir(os.path.join(options.OldBuildDir, f))]
for x in folders:
revisions.append(int(x))
revisions.sort()
revisions.reverse()
foldercount = options.Count
if (len(revisions)<foldercount):
foldercount = len(revisions)
if not os.path.exists(options.outputdir):
os.mkdir(options.outputdir)
descfile = open(options.outputdir + "\\desc.txt", "wa")
for index in range(foldercount):
createPatch(str(revisions[index]), options)
folder = str(revisions[index])
md51 = filemd5(options.OldBuildDir + "\\" + folder + "\\splayer.exe")
try:
descfile.write(md51)
descfile.write(", ")
descfile.write(folder + ".patch")
descfile.write("\n")
except Exception,e:
print e
descfile.close();
return 0
if '__main__' == __name__:
option_parser = optparse.OptionParser()
option_parser.add_option('', '--latest', default='.\\splayer.exe',
help='path to latest release')
option_parser.add_option('', '--OldBuildDir', default='.\\archieve',
help='path to the old builds directory')
option_parser.add_option('', '--Count', default=10,
help='How many patches to be created')
option_parser.add_option('', '--outputdir', default='.\\output',
help='output path')
options, args = option_parser.parse_args()
sys.exit(main(options, args)) | gpl-2.0 |
bastibl/gnuradio | gr-blocks/python/blocks/qa_mute.py | 7 | 3056 | #!/usr/bin/env python
#
# Copyright 2004,2005,2007,2010,2013 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr, gr_unittest, blocks
class test_mute(gr_unittest.TestCase):
def setUp(self):
self.tb = gr.top_block()
def tearDown(self):
self.tb = None
def help_ii(self, src_data, exp_data, op):
for s in zip(list(range(len(src_data))), src_data):
src = blocks.vector_source_i(s[1])
self.tb.connect(src, (op, s[0]))
dst = blocks.vector_sink_i()
self.tb.connect(op, dst)
self.tb.run()
result_data = dst.data()
self.assertEqual(exp_data, result_data)
def help_ff(self, src_data, exp_data, op):
for s in zip(list(range(len(src_data))), src_data):
src = blocks.vector_source_f(s[1])
self.tb.connect(src, (op, s[0]))
dst = blocks.vector_sink_f()
self.tb.connect(op, dst)
self.tb.run()
result_data = dst.data()
self.assertEqual(exp_data, result_data)
def help_cc(self, src_data, exp_data, op):
for s in zip(list(range(len(src_data))), src_data):
src = blocks.vector_source_c(s[1])
self.tb.connect(src, (op, s[0]))
dst = blocks.vector_sink_c()
self.tb.connect(op, dst)
self.tb.run()
result_data = dst.data()
self.assertEqual(exp_data, result_data)
def test_unmute_ii(self):
src_data = (1, 2, 3, 4, 5)
expected_result = (1, 2, 3, 4, 5)
op = blocks.mute_ii(False)
self.help_ii((src_data,), expected_result, op)
def test_mute_ii(self):
src_data = (1, 2, 3, 4, 5)
expected_result = (0, 0, 0, 0, 0)
op = blocks.mute_ii(True)
self.help_ii((src_data,), expected_result, op)
def test_unmute_cc(self):
src_data = (1+5j, 2+5j, 3+5j, 4+5j, 5+5j)
expected_result = (1+5j, 2+5j, 3+5j, 4+5j, 5+5j)
op = blocks.mute_cc(False)
self.help_cc((src_data,), expected_result, op)
def test_unmute_cc(self):
src_data = (1+5j, 2+5j, 3+5j, 4+5j, 5+5j)
expected_result =(0+0j, 0+0j, 0+0j, 0+0j, 0+0j)
op = blocks.mute_cc(True)
self.help_cc((src_data,), expected_result, op)
if __name__ == '__main__':
gr_unittest.run(test_mute, "test_mute.xml")
| gpl-3.0 |
zhuwenping/python-for-android | python-build/python-libs/gdata/tests/gdata_tests/apps_test.py | 128 | 22040 | #!/usr/bin/python
#
# Copyright (C) 2007 SIOS Technology, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = '[email protected] (Takashi MATSUO)'
import unittest
try:
from xml.etree import ElementTree
except ImportError:
from elementtree import ElementTree
import atom
import gdata
from gdata import test_data
import gdata.apps
class AppsEmailListRecipientFeedTest(unittest.TestCase):
def setUp(self):
self.rcpt_feed = gdata.apps.EmailListRecipientFeedFromString(
test_data.EMAIL_LIST_RECIPIENT_FEED)
def testEmailListRecipientEntryCount(self):
"""Count EmailListRecipient entries in EmailListRecipientFeed"""
self.assertEquals(len(self.rcpt_feed.entry), 2)
def testLinkFinderFindsHtmlLink(self):
"""Tests the return value of GetXXXLink() methods"""
self.assert_(self.rcpt_feed.GetSelfLink() is not None)
self.assert_(self.rcpt_feed.GetNextLink() is not None)
self.assert_(self.rcpt_feed.GetEditLink() is None)
self.assert_(self.rcpt_feed.GetHtmlLink() is None)
def testStartItem(self):
"""Tests the existence of <openSearch:startIndex> in
EmailListRecipientFeed and verifies the value"""
self.assert_(isinstance(self.rcpt_feed.start_index, gdata.StartIndex),
"EmailListRecipient feed <openSearch:startIndex> element must be " +
"an instance of gdata.OpenSearch: %s" % self.rcpt_feed.start_index)
self.assertEquals(self.rcpt_feed.start_index.text, "1")
def testEmailListRecipientEntries(self):
"""Tests the existence of <atom:entry> in EmailListRecipientFeed
and simply verifies the value"""
for a_entry in self.rcpt_feed.entry:
self.assert_(isinstance(a_entry, gdata.apps.EmailListRecipientEntry),
"EmailListRecipient Feed <atom:entry> must be an instance of " +
"apps.EmailListRecipientEntry: %s" % a_entry)
self.assertEquals(self.rcpt_feed.entry[0].who.email, "[email protected]")
self.assertEquals(self.rcpt_feed.entry[1].who.email, "[email protected]")
class AppsEmailListFeedTest(unittest.TestCase):
def setUp(self):
self.list_feed = gdata.apps.EmailListFeedFromString(
test_data.EMAIL_LIST_FEED)
def testEmailListEntryCount(self):
"""Count EmailList entries in EmailListFeed"""
self.assertEquals(len(self.list_feed.entry), 2)
def testLinkFinderFindsHtmlLink(self):
"""Tests the return value of GetXXXLink() methods"""
self.assert_(self.list_feed.GetSelfLink() is not None)
self.assert_(self.list_feed.GetNextLink() is not None)
self.assert_(self.list_feed.GetEditLink() is None)
self.assert_(self.list_feed.GetHtmlLink() is None)
def testStartItem(self):
"""Tests the existence of <openSearch:startIndex> in EmailListFeed
and verifies the value"""
self.assert_(isinstance(self.list_feed.start_index, gdata.StartIndex),
"EmailList feed <openSearch:startIndex> element must be an instance " +
"of gdata.OpenSearch: %s" % self.list_feed.start_index)
self.assertEquals(self.list_feed.start_index.text, "1")
def testUserEntries(self):
"""Tests the existence of <atom:entry> in EmailListFeed and simply
verifies the value"""
for a_entry in self.list_feed.entry:
self.assert_(isinstance(a_entry, gdata.apps.EmailListEntry),
"EmailList Feed <atom:entry> must be an instance of " +
"apps.EmailListEntry: %s" % a_entry)
self.assertEquals(self.list_feed.entry[0].email_list.name, "us-sales")
self.assertEquals(self.list_feed.entry[1].email_list.name, "us-eng")
class AppsUserFeedTest(unittest.TestCase):
def setUp(self):
self.user_feed = gdata.apps.UserFeedFromString(test_data.USER_FEED)
def testUserEntryCount(self):
"""Count User entries in UserFeed"""
self.assertEquals(len(self.user_feed.entry), 2)
def testLinkFinderFindsHtmlLink(self):
"""Tests the return value of GetXXXLink() methods"""
self.assert_(self.user_feed.GetSelfLink() is not None)
self.assert_(self.user_feed.GetNextLink() is not None)
self.assert_(self.user_feed.GetEditLink() is None)
self.assert_(self.user_feed.GetHtmlLink() is None)
def testStartItem(self):
"""Tests the existence of <openSearch:startIndex> in UserFeed and
verifies the value"""
self.assert_(isinstance(self.user_feed.start_index, gdata.StartIndex),
"User feed <openSearch:startIndex> element must be an instance " +
"of gdata.OpenSearch: %s" % self.user_feed.start_index)
self.assertEquals(self.user_feed.start_index.text, "1")
def testUserEntries(self):
"""Tests the existence of <atom:entry> in UserFeed and simply
verifies the value"""
for a_entry in self.user_feed.entry:
self.assert_(isinstance(a_entry, gdata.apps.UserEntry),
"User Feed <atom:entry> must be an instance of " +
"apps.UserEntry: %s" % a_entry)
self.assertEquals(self.user_feed.entry[0].login.user_name, "TestUser")
self.assertEquals(self.user_feed.entry[0].who.email,
"[email protected]")
self.assertEquals(self.user_feed.entry[1].login.user_name, "JohnSmith")
self.assertEquals(self.user_feed.entry[1].who.email,
"[email protected]")
class AppsNicknameFeedTest(unittest.TestCase):
def setUp(self):
self.nick_feed = gdata.apps.NicknameFeedFromString(test_data.NICK_FEED)
def testNicknameEntryCount(self):
"""Count Nickname entries in NicknameFeed"""
self.assertEquals(len(self.nick_feed.entry), 2)
def testId(self):
"""Tests the existence of <atom:id> in NicknameFeed and verifies
the value"""
self.assert_(isinstance(self.nick_feed.id, atom.Id),
"Nickname feed <atom:id> element must be an instance of " +
"atom.Id: %s" % self.nick_feed.id)
self.assertEquals(self.nick_feed.id.text,
"http://apps-apis.google.com/a/feeds/example.com/nickname/2.0")
def testStartItem(self):
"""Tests the existence of <openSearch:startIndex> in NicknameFeed
and verifies the value"""
self.assert_(isinstance(self.nick_feed.start_index, gdata.StartIndex),
"Nickname feed <openSearch:startIndex> element must be an instance " +
"of gdata.OpenSearch: %s" % self.nick_feed.start_index)
self.assertEquals(self.nick_feed.start_index.text, "1")
def testItemsPerPage(self):
"""Tests the existence of <openSearch:itemsPerPage> in
NicknameFeed and verifies the value"""
self.assert_(isinstance(self.nick_feed.items_per_page, gdata.ItemsPerPage),
"Nickname feed <openSearch:itemsPerPage> element must be an " +
"instance of gdata.ItemsPerPage: %s" % self.nick_feed.items_per_page)
self.assertEquals(self.nick_feed.items_per_page.text, "2")
def testLinkFinderFindsHtmlLink(self):
"""Tests the return value of GetXXXLink() methods"""
self.assert_(self.nick_feed.GetSelfLink() is not None)
self.assert_(self.nick_feed.GetEditLink() is None)
self.assert_(self.nick_feed.GetHtmlLink() is None)
def testNicknameEntries(self):
"""Tests the existence of <atom:entry> in NicknameFeed and simply
verifies the value"""
for a_entry in self.nick_feed.entry:
self.assert_(isinstance(a_entry, gdata.apps.NicknameEntry),
"Nickname Feed <atom:entry> must be an instance of " +
"apps.NicknameEntry: %s" % a_entry)
self.assertEquals(self.nick_feed.entry[0].nickname.name, "Foo")
self.assertEquals(self.nick_feed.entry[1].nickname.name, "Bar")
class AppsEmailListRecipientEntryTest(unittest.TestCase):
def setUp(self):
self.rcpt_entry = gdata.apps.EmailListRecipientEntryFromString(
test_data.EMAIL_LIST_RECIPIENT_ENTRY)
def testId(self):
"""Tests the existence of <atom:id> in EmailListRecipientEntry and
verifies the value"""
self.assert_(
isinstance(self.rcpt_entry.id, atom.Id),
"EmailListRecipient entry <atom:id> element must be an instance of " +
"atom.Id: %s" %
self.rcpt_entry.id)
self.assertEquals(
self.rcpt_entry.id.text,
'https://apps-apis.google.com/a/feeds/example.com/emailList/2.0/us-sales/' +
'recipient/TestUser%40example.com')
def testUpdated(self):
"""Tests the existence of <atom:updated> in
EmailListRecipientEntry and verifies the value"""
self.assert_(
isinstance(self.rcpt_entry.updated, atom.Updated),
"EmailListRecipient entry <atom:updated> element must be an instance " +
"of atom.Updated: %s" % self.rcpt_entry.updated)
self.assertEquals(self.rcpt_entry.updated.text,
'1970-01-01T00:00:00.000Z')
def testCategory(self):
"""Tests the existence of <atom:category> in
EmailListRecipientEntry and verifies the value"""
for a_category in self.rcpt_entry.category:
self.assert_(
isinstance(a_category, atom.Category),
"EmailListRecipient entry <atom:category> element must be an " +
"instance of atom.Category: %s" % a_category)
self.assertEquals(a_category.scheme,
"http://schemas.google.com/g/2005#kind")
self.assertEquals(a_category.term,
"http://schemas.google.com/apps/2006#" +
"emailList.recipient")
def testTitle(self):
"""Tests the existence of <atom:title> in EmailListRecipientEntry
and verifies the value"""
self.assert_(
isinstance(self.rcpt_entry.title, atom.Title),
"EmailListRecipient entry <atom:title> element must be an instance of " +
"atom.Title: %s" % self.rcpt_entry.title)
self.assertEquals(self.rcpt_entry.title.text, 'TestUser')
def testLinkFinderFindsHtmlLink(self):
"""Tests the return value of GetXXXLink() methods"""
self.assert_(self.rcpt_entry.GetSelfLink() is not None)
self.assert_(self.rcpt_entry.GetEditLink() is not None)
self.assert_(self.rcpt_entry.GetHtmlLink() is None)
def testWho(self):
"""Tests the existence of a <gdata:who> in EmailListRecipientEntry
and verifies the value"""
self.assert_(isinstance(self.rcpt_entry.who, gdata.apps.Who),
"EmailListRecipient entry <gdata:who> must be an instance of " +
"apps.Who: %s" % self.rcpt_entry.who)
self.assertEquals(self.rcpt_entry.who.email, '[email protected]')
class AppsEmailListEntryTest(unittest.TestCase):
def setUp(self):
self.list_entry = gdata.apps.EmailListEntryFromString(
test_data.EMAIL_LIST_ENTRY)
def testId(self):
"""Tests the existence of <atom:id> in EmailListEntry and verifies
the value"""
self.assert_(
isinstance(self.list_entry.id, atom.Id),
"EmailList entry <atom:id> element must be an instance of atom.Id: %s" %
self.list_entry.id)
self.assertEquals(
self.list_entry.id.text,
'https://apps-apis.google.com/a/feeds/example.com/emailList/2.0/testlist')
def testUpdated(self):
"""Tests the existence of <atom:updated> in EmailListEntry and
verifies the value"""
self.assert_(
isinstance(self.list_entry.updated, atom.Updated),
"EmailList entry <atom:updated> element must be an instance of " +
"atom.Updated: %s" % self.list_entry.updated)
self.assertEquals(self.list_entry.updated.text,
'1970-01-01T00:00:00.000Z')
def testCategory(self):
"""Tests the existence of <atom:category> in EmailListEntry and
verifies the value"""
for a_category in self.list_entry.category:
self.assert_(
isinstance(a_category, atom.Category),
"EmailList entry <atom:category> element must be an instance " +
"of atom.Category: %s" % a_category)
self.assertEquals(a_category.scheme,
"http://schemas.google.com/g/2005#kind")
self.assertEquals(a_category.term,
"http://schemas.google.com/apps/2006#emailList")
def testTitle(self):
"""Tests the existence of <atom:title> in EmailListEntry and verifies
the value"""
self.assert_(
isinstance(self.list_entry.title, atom.Title),
"EmailList entry <atom:title> element must be an instance of " +
"atom.Title: %s" % self.list_entry.title)
self.assertEquals(self.list_entry.title.text, 'testlist')
def testLinkFinderFindsHtmlLink(self):
"""Tests the return value of GetXXXLink() methods"""
self.assert_(self.list_entry.GetSelfLink() is not None)
self.assert_(self.list_entry.GetEditLink() is not None)
self.assert_(self.list_entry.GetHtmlLink() is None)
def testEmailList(self):
"""Tests the existence of a <apps:emailList> in EmailListEntry and
verifies the value"""
self.assert_(isinstance(self.list_entry.email_list, gdata.apps.EmailList),
"EmailList entry <apps:emailList> must be an instance of " +
"apps.EmailList: %s" % self.list_entry.email_list)
self.assertEquals(self.list_entry.email_list.name, 'testlist')
def testFeedLink(self):
"""Test the existence of a <gdata:feedLink> in EmailListEntry and
verifies the value"""
for an_feed_link in self.list_entry.feed_link:
self.assert_(isinstance(an_feed_link, gdata.FeedLink),
"EmailList entry <gdata:feedLink> must be an instance of " +
"gdata.FeedLink: %s" % an_feed_link)
self.assertEquals(self.list_entry.feed_link[0].rel,
'http://schemas.google.com/apps/2006#' +
'emailList.recipients')
self.assertEquals(self.list_entry.feed_link[0].href,
'http://apps-apis.google.com/a/feeds/example.com/emailList/' +
'2.0/testlist/recipient/')
class AppsNicknameEntryTest(unittest.TestCase):
def setUp(self):
self.nick_entry = gdata.apps.NicknameEntryFromString(test_data.NICK_ENTRY)
def testId(self):
"""Tests the existence of <atom:id> in NicknameEntry and verifies
the value"""
self.assert_(
isinstance(self.nick_entry.id, atom.Id),
"Nickname entry <atom:id> element must be an instance of atom.Id: %s" %
self.nick_entry.id)
self.assertEquals(
self.nick_entry.id.text,
'https://apps-apis.google.com/a/feeds/example.com/nickname/2.0/Foo')
def testCategory(self):
"""Tests the existence of <atom:category> in NicknameEntry and
verifies the value"""
for a_category in self.nick_entry.category:
self.assert_(
isinstance(a_category, atom.Category),
"Nickname entry <atom:category> element must be an instance " +
"of atom.Category: %s" % a_category)
self.assertEquals(a_category.scheme,
"http://schemas.google.com/g/2005#kind")
self.assertEquals(a_category.term,
"http://schemas.google.com/apps/2006#nickname")
def testTitle(self):
"""Tests the existence of <atom:title> in NicknameEntry and
verifies the value"""
self.assert_(isinstance(self.nick_entry.title, atom.Title),
"Nickname entry <atom:title> element must be an instance " +
"of atom.Title: %s" % self.nick_entry.title)
self.assertEquals(self.nick_entry.title.text, "Foo")
def testLogin(self):
"""Tests the existence of <apps:login> in NicknameEntry and
verifies the value"""
self.assert_(isinstance(self.nick_entry.login, gdata.apps.Login),
"Nickname entry <apps:login> element must be an instance " +
"of apps.Login: %s" % self.nick_entry.login)
self.assertEquals(self.nick_entry.login.user_name, "TestUser")
def testNickname(self):
"""Tests the existence of <apps:nickname> in NicknameEntry and
verifies the value"""
self.assert_(isinstance(self.nick_entry.nickname, gdata.apps.Nickname),
"Nickname entry <apps:nickname> element must be an instance " +
"of apps.Nickname: %s" % self.nick_entry.nickname)
self.assertEquals(self.nick_entry.nickname.name, "Foo")
def testLinkFinderFindsHtmlLink(self):
"""Tests the return value of GetXXXLink() methods"""
self.assert_(self.nick_entry.GetSelfLink() is not None)
self.assert_(self.nick_entry.GetEditLink() is not None)
self.assert_(self.nick_entry.GetHtmlLink() is None)
class AppsUserEntryTest(unittest.TestCase):
def setUp(self):
self.user_entry = gdata.apps.UserEntryFromString(test_data.USER_ENTRY)
def testId(self):
"""Tests the existence of <atom:id> in UserEntry and verifies the
value"""
self.assert_(
isinstance(self.user_entry.id, atom.Id),
"User entry <atom:id> element must be an instance of atom.Id: %s" %
self.user_entry.id)
self.assertEquals(
self.user_entry.id.text,
'https://apps-apis.google.com/a/feeds/example.com/user/2.0/TestUser')
def testUpdated(self):
"""Tests the existence of <atom:updated> in UserEntry and verifies
the value"""
self.assert_(
isinstance(self.user_entry.updated, atom.Updated),
"User entry <atom:updated> element must be an instance of " +
"atom.Updated: %s" % self.user_entry.updated)
self.assertEquals(self.user_entry.updated.text,
'1970-01-01T00:00:00.000Z')
def testCategory(self):
"""Tests the existence of <atom:category> in UserEntry and
verifies the value"""
for a_category in self.user_entry.category:
self.assert_(
isinstance(a_category, atom.Category),
"User entry <atom:category> element must be an instance " +
"of atom.Category: %s" % a_category)
self.assertEquals(a_category.scheme,
"http://schemas.google.com/g/2005#kind")
self.assertEquals(a_category.term,
"http://schemas.google.com/apps/2006#user")
def testTitle(self):
"""Tests the existence of <atom:title> in UserEntry and verifies
the value"""
self.assert_(
isinstance(self.user_entry.title, atom.Title),
"User entry <atom:title> element must be an instance of atom.Title: %s" %
self.user_entry.title)
self.assertEquals(self.user_entry.title.text, 'TestUser')
def testLinkFinderFindsHtmlLink(self):
"""Tests the return value of GetXXXLink() methods"""
self.assert_(self.user_entry.GetSelfLink() is not None)
self.assert_(self.user_entry.GetEditLink() is not None)
self.assert_(self.user_entry.GetHtmlLink() is None)
def testLogin(self):
"""Tests the existence of <apps:login> in UserEntry and verifies
the value"""
self.assert_(isinstance(self.user_entry.login, gdata.apps.Login),
"User entry <apps:login> element must be an instance of apps.Login: %s"
% self.user_entry.login)
self.assertEquals(self.user_entry.login.user_name, 'TestUser')
self.assertEquals(self.user_entry.login.password, 'password')
self.assertEquals(self.user_entry.login.suspended, 'false')
self.assertEquals(self.user_entry.login.ip_whitelisted, 'false')
self.assertEquals(self.user_entry.login.hash_function_name, 'SHA-1')
def testName(self):
"""Tests the existence of <apps:name> in UserEntry and verifies
the value"""
self.assert_(isinstance(self.user_entry.name, gdata.apps.Name),
"User entry <apps:name> element must be an instance of apps.Name: %s"
% self.user_entry.name)
self.assertEquals(self.user_entry.name.family_name, 'Test')
self.assertEquals(self.user_entry.name.given_name, 'User')
def testQuota(self):
"""Tests the existence of <apps:quota> in UserEntry and verifies
the value"""
self.assert_(isinstance(self.user_entry.quota, gdata.apps.Quota),
"User entry <apps:quota> element must be an instance of apps.Quota: %s"
% self.user_entry.quota)
self.assertEquals(self.user_entry.quota.limit, '1024')
def testFeedLink(self):
"""Test the existence of a <gdata:feedLink> in UserEntry and
verifies the value"""
for an_feed_link in self.user_entry.feed_link:
self.assert_(isinstance(an_feed_link, gdata.FeedLink),
"User entry <gdata:feedLink> must be an instance of gdata.FeedLink" +
": %s" % an_feed_link)
self.assertEquals(self.user_entry.feed_link[0].rel,
'http://schemas.google.com/apps/2006#user.nicknames')
self.assertEquals(self.user_entry.feed_link[0].href,
'https://apps-apis.google.com/a/feeds/example.com/nickname/' +
'2.0?username=Test-3121')
self.assertEquals(self.user_entry.feed_link[1].rel,
'http://schemas.google.com/apps/2006#user.emailLists')
self.assertEquals(self.user_entry.feed_link[1].href,
'https://apps-apis.google.com/a/feeds/example.com/emailList/' +
'[email protected]')
def testUpdate(self):
"""Tests for modifing attributes of UserEntry"""
self.user_entry.name.family_name = 'ModifiedFamilyName'
self.user_entry.name.given_name = 'ModifiedGivenName'
self.user_entry.quota.limit = '2048'
self.user_entry.login.password = 'ModifiedPassword'
self.user_entry.login.suspended = 'true'
modified = gdata.apps.UserEntryFromString(self.user_entry.ToString())
self.assertEquals(modified.name.family_name, 'ModifiedFamilyName')
self.assertEquals(modified.name.given_name, 'ModifiedGivenName')
self.assertEquals(modified.quota.limit, '2048')
self.assertEquals(modified.login.password, 'ModifiedPassword')
self.assertEquals(modified.login.suspended, 'true')
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
blueskycoco/sam | tools/perf/scripts/python/failed-syscalls-by-pid.py | 11180 | 2058 | # failed system call counts, by pid
# (c) 2010, Tom Zanussi <[email protected]>
# Licensed under the terms of the GNU GPL License version 2
#
# Displays system-wide failed system call totals, broken down by pid.
# If a [comm] arg is specified, only syscalls called by [comm] are displayed.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
usage = "perf script -s syscall-counts-by-pid.py [comm|pid]\n";
for_comm = None
for_pid = None
if len(sys.argv) > 2:
sys.exit(usage)
if len(sys.argv) > 1:
try:
for_pid = int(sys.argv[1])
except:
for_comm = sys.argv[1]
syscalls = autodict()
def trace_begin():
print "Press control+C to stop and show the summary"
def trace_end():
print_error_totals()
def raw_syscalls__sys_exit(event_name, context, common_cpu,
common_secs, common_nsecs, common_pid, common_comm,
id, ret):
if (for_comm and common_comm != for_comm) or \
(for_pid and common_pid != for_pid ):
return
if ret < 0:
try:
syscalls[common_comm][common_pid][id][ret] += 1
except TypeError:
syscalls[common_comm][common_pid][id][ret] = 1
def print_error_totals():
if for_comm is not None:
print "\nsyscall errors for %s:\n\n" % (for_comm),
else:
print "\nsyscall errors:\n\n",
print "%-30s %10s\n" % ("comm [pid]", "count"),
print "%-30s %10s\n" % ("------------------------------", \
"----------"),
comm_keys = syscalls.keys()
for comm in comm_keys:
pid_keys = syscalls[comm].keys()
for pid in pid_keys:
print "\n%s [%d]\n" % (comm, pid),
id_keys = syscalls[comm][pid].keys()
for id in id_keys:
print " syscall: %-16s\n" % syscall_name(id),
ret_keys = syscalls[comm][pid][id].keys()
for ret, val in sorted(syscalls[comm][pid][id].iteritems(), key = lambda(k, v): (v, k), reverse = True):
print " err = %-20s %10d\n" % (strerror(ret), val),
| gpl-2.0 |
affo/nova | nova/tests/unit/compute/test_compute_mgr.py | 2 | 164687 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Unit tests for ComputeManager()."""
import contextlib
import time
import uuid
from cinderclient import exceptions as cinder_exception
from eventlet import event as eventlet_event
import mock
from mox3 import mox
from oslo_config import cfg
import oslo_messaging as messaging
from oslo_utils import importutils
from oslo_utils import timeutils
from nova.compute import build_results
from nova.compute import manager
from nova.compute import power_state
from nova.compute import task_states
from nova.compute import utils as compute_utils
from nova.compute import vm_states
from nova.conductor import api as conductor_api
from nova.conductor import rpcapi as conductor_rpcapi
from nova import context
from nova import db
from nova import exception
from nova.network import api as network_api
from nova.network import model as network_model
from nova import objects
from nova.objects import block_device as block_device_obj
from nova.openstack.common import uuidutils
from nova import test
from nova.tests.unit.compute import fake_resource_tracker
from nova.tests.unit import fake_block_device
from nova.tests.unit import fake_instance
from nova.tests.unit import fake_server_actions
from nova.tests.unit.objects import test_instance_fault
from nova.tests.unit.objects import test_instance_info_cache
from nova import utils
from nova.virt import driver as virt_driver
from nova.virt import event as virtevent
from nova.virt import hardware
CONF = cfg.CONF
CONF.import_opt('compute_manager', 'nova.service')
class ComputeManagerUnitTestCase(test.NoDBTestCase):
def setUp(self):
super(ComputeManagerUnitTestCase, self).setUp()
self.compute = importutils.import_object(CONF.compute_manager)
self.context = context.RequestContext('fake', 'fake')
fake_server_actions.stub_out_action_events(self.stubs)
@mock.patch.object(manager.ComputeManager, '_sync_instance_power_state')
@mock.patch.object(objects.Instance, 'get_by_uuid')
def test_handle_lifecycle_event(self, mock_get, mock_sync):
event_map = {virtevent.EVENT_LIFECYCLE_STOPPED: power_state.SHUTDOWN,
virtevent.EVENT_LIFECYCLE_STARTED: power_state.RUNNING,
virtevent.EVENT_LIFECYCLE_PAUSED: power_state.PAUSED,
virtevent.EVENT_LIFECYCLE_RESUMED: power_state.RUNNING}
event = mock.Mock()
event.get_instance_uuid.return_value = mock.sentinel.uuid
for transition, pwr_state in event_map.iteritems():
event.get_transition.return_value = transition
self.compute.handle_lifecycle_event(event)
mock_get.assert_called_with(mock.ANY, mock.sentinel.uuid,
expected_attrs=[])
mock_sync.assert_called_with(mock.ANY, mock_get.return_value,
pwr_state)
def test_allocate_network_succeeds_after_retries(self):
self.flags(network_allocate_retries=8)
nwapi = self.compute.network_api
self.mox.StubOutWithMock(nwapi, 'allocate_for_instance')
self.mox.StubOutWithMock(self.compute, '_instance_update')
self.mox.StubOutWithMock(time, 'sleep')
instance = fake_instance.fake_instance_obj(
self.context, expected_attrs=['system_metadata'])
is_vpn = 'fake-is-vpn'
req_networks = 'fake-req-networks'
macs = 'fake-macs'
sec_groups = 'fake-sec-groups'
final_result = 'meow'
dhcp_options = None
expected_sleep_times = [1, 2, 4, 8, 16, 30, 30, 30]
for sleep_time in expected_sleep_times:
nwapi.allocate_for_instance(
self.context, instance, vpn=is_vpn,
requested_networks=req_networks, macs=macs,
security_groups=sec_groups,
dhcp_options=dhcp_options).AndRaise(
test.TestingException())
time.sleep(sleep_time)
nwapi.allocate_for_instance(
self.context, instance, vpn=is_vpn,
requested_networks=req_networks, macs=macs,
security_groups=sec_groups,
dhcp_options=dhcp_options).AndReturn(final_result)
self.compute._instance_update(self.context, instance['uuid'],
system_metadata={'network_allocated': 'True'})
self.mox.ReplayAll()
res = self.compute._allocate_network_async(self.context, instance,
req_networks,
macs,
sec_groups,
is_vpn,
dhcp_options)
self.assertEqual(final_result, res)
def test_allocate_network_maintains_context(self):
# override tracker with a version that doesn't need the database:
class FakeResourceTracker(object):
@staticmethod
def instance_claim(context, instance, limits):
return mock.MagicMock()
self.mox.StubOutWithMock(self.compute, '_get_resource_tracker')
self.mox.StubOutWithMock(self.compute, '_reschedule_or_error')
self.mox.StubOutWithMock(self.compute, '_allocate_network')
self.mox.StubOutWithMock(objects.BlockDeviceMappingList,
'get_by_instance_uuid')
instance = fake_instance.fake_instance_obj(self.context)
objects.BlockDeviceMappingList.get_by_instance_uuid(
mox.IgnoreArg(), instance.uuid).AndReturn([])
node = 'fake_node'
self.compute._get_resource_tracker(node).AndReturn(
FakeResourceTracker())
self.admin_context = False
def fake_allocate(context, *args, **kwargs):
if context.is_admin:
self.admin_context = True
raise test.TestingException()
# NOTE(vish): The nice mox parameter matchers here don't work well
# because they raise an exception that gets wrapped by
# the retry exception handling, so use a side effect
# to keep track of whether allocate was called with admin
# context.
self.compute._allocate_network(mox.IgnoreArg(), instance,
mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg(),
mox.IgnoreArg()).WithSideEffects(fake_allocate)
self.compute._reschedule_or_error(mox.IgnoreArg(), instance,
mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg(),
mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg(),
mox.IgnoreArg(), mox.IgnoreArg(),
mox.IgnoreArg())
self.mox.ReplayAll()
self.assertRaises(test.TestingException,
self.compute._build_instance,
self.context, {}, {},
None, None, None, True,
node, instance,
{}, False)
self.assertFalse(self.admin_context,
"_allocate_network called with admin context")
def test_reschedule_maintains_context(self):
# override tracker with a version that causes a reschedule
class FakeResourceTracker(object):
def instance_claim(self, context, instance, limits):
raise test.TestingException()
self.mox.StubOutWithMock(self.compute, '_get_resource_tracker')
self.mox.StubOutWithMock(self.compute, '_reschedule_or_error')
self.mox.StubOutWithMock(objects.BlockDeviceMappingList,
'get_by_instance_uuid')
instance = fake_instance.fake_instance_obj(self.context)
objects.BlockDeviceMappingList.get_by_instance_uuid(
mox.IgnoreArg(), instance.uuid).AndReturn([])
node = 'fake_node'
self.compute._get_resource_tracker(node).AndReturn(
FakeResourceTracker())
self.admin_context = False
def fake_retry_or_error(context, *args, **kwargs):
if context.is_admin:
self.admin_context = True
# NOTE(vish): we could use a mos parameter matcher here but it leads
# to a very cryptic error message, so use the same method
# as the allocate_network_maintains_context test.
self.compute._reschedule_or_error(mox.IgnoreArg(), instance,
mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg(),
mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg(),
mox.IgnoreArg(), mox.IgnoreArg(),
mox.IgnoreArg()).WithSideEffects(fake_retry_or_error)
self.mox.ReplayAll()
self.assertRaises(test.TestingException,
self.compute._build_instance, self.context, {}, {},
None, None, None, True, node, instance, {}, False)
self.assertFalse(self.admin_context,
"_reschedule_or_error called with admin context")
def test_allocate_network_fails(self):
self.flags(network_allocate_retries=0)
nwapi = self.compute.network_api
self.mox.StubOutWithMock(nwapi, 'allocate_for_instance')
instance = {}
is_vpn = 'fake-is-vpn'
req_networks = 'fake-req-networks'
macs = 'fake-macs'
sec_groups = 'fake-sec-groups'
dhcp_options = None
nwapi.allocate_for_instance(
self.context, instance, vpn=is_vpn,
requested_networks=req_networks, macs=macs,
security_groups=sec_groups,
dhcp_options=dhcp_options).AndRaise(test.TestingException())
self.mox.ReplayAll()
self.assertRaises(test.TestingException,
self.compute._allocate_network_async,
self.context, instance, req_networks, macs,
sec_groups, is_vpn, dhcp_options)
def test_allocate_network_neg_conf_value_treated_as_zero(self):
self.flags(network_allocate_retries=-1)
nwapi = self.compute.network_api
self.mox.StubOutWithMock(nwapi, 'allocate_for_instance')
instance = {}
is_vpn = 'fake-is-vpn'
req_networks = 'fake-req-networks'
macs = 'fake-macs'
sec_groups = 'fake-sec-groups'
dhcp_options = None
# Only attempted once.
nwapi.allocate_for_instance(
self.context, instance, vpn=is_vpn,
requested_networks=req_networks, macs=macs,
security_groups=sec_groups,
dhcp_options=dhcp_options).AndRaise(test.TestingException())
self.mox.ReplayAll()
self.assertRaises(test.TestingException,
self.compute._allocate_network_async,
self.context, instance, req_networks, macs,
sec_groups, is_vpn, dhcp_options)
@mock.patch.object(network_api.API, 'allocate_for_instance')
@mock.patch.object(manager.ComputeManager, '_instance_update')
@mock.patch.object(time, 'sleep')
def test_allocate_network_with_conf_value_is_one(
self, sleep, _instance_update, allocate_for_instance):
self.flags(network_allocate_retries=1)
instance = fake_instance.fake_instance_obj(
self.context, expected_attrs=['system_metadata'])
is_vpn = 'fake-is-vpn'
req_networks = 'fake-req-networks'
macs = 'fake-macs'
sec_groups = 'fake-sec-groups'
dhcp_options = None
final_result = 'zhangtralon'
allocate_for_instance.side_effect = [test.TestingException(),
final_result]
res = self.compute._allocate_network_async(self.context, instance,
req_networks,
macs,
sec_groups,
is_vpn,
dhcp_options)
self.assertEqual(final_result, res)
self.assertEqual(1, sleep.call_count)
@mock.patch('nova.utils.spawn_n')
@mock.patch('nova.compute.manager.ComputeManager.'
'_do_build_and_run_instance')
def _test_max_concurrent_builds(self, mock_dbari, mock_spawn):
mock_spawn.side_effect = lambda f, *a, **k: f(*a, **k)
with mock.patch.object(self.compute,
'_build_semaphore') as mock_sem:
instance = objects.Instance(uuid=str(uuid.uuid4()))
for i in (1, 2, 3):
self.compute.build_and_run_instance(self.context, instance,
mock.sentinel.image,
mock.sentinel.request_spec,
{})
self.assertEqual(3, mock_sem.__enter__.call_count)
def test_max_concurrent_builds_limited(self):
self.flags(max_concurrent_builds=2)
self._test_max_concurrent_builds()
def test_max_concurrent_builds_unlimited(self):
self.flags(max_concurrent_builds=0)
self._test_max_concurrent_builds()
def test_max_concurrent_builds_semaphore_limited(self):
self.flags(max_concurrent_builds=123)
self.assertEqual(123,
manager.ComputeManager()._build_semaphore.balance)
def test_max_concurrent_builds_semaphore_unlimited(self):
self.flags(max_concurrent_builds=0)
compute = manager.ComputeManager()
self.assertEqual(0, compute._build_semaphore.balance)
self.assertIsInstance(compute._build_semaphore,
compute_utils.UnlimitedSemaphore)
def test_init_host(self):
our_host = self.compute.host
fake_context = 'fake-context'
inst = fake_instance.fake_db_instance(
vm_state=vm_states.ACTIVE,
info_cache=dict(test_instance_info_cache.fake_info_cache,
network_info=None),
security_groups=None)
startup_instances = [inst, inst, inst]
def _do_mock_calls(defer_iptables_apply):
self.compute.driver.init_host(host=our_host)
context.get_admin_context().AndReturn(fake_context)
db.instance_get_all_by_host(
fake_context, our_host, columns_to_join=['info_cache'],
use_slave=False
).AndReturn(startup_instances)
if defer_iptables_apply:
self.compute.driver.filter_defer_apply_on()
self.compute._destroy_evacuated_instances(fake_context)
self.compute._init_instance(fake_context,
mox.IsA(objects.Instance))
self.compute._init_instance(fake_context,
mox.IsA(objects.Instance))
self.compute._init_instance(fake_context,
mox.IsA(objects.Instance))
if defer_iptables_apply:
self.compute.driver.filter_defer_apply_off()
self.mox.StubOutWithMock(self.compute.driver, 'init_host')
self.mox.StubOutWithMock(self.compute.driver,
'filter_defer_apply_on')
self.mox.StubOutWithMock(self.compute.driver,
'filter_defer_apply_off')
self.mox.StubOutWithMock(db, 'instance_get_all_by_host')
self.mox.StubOutWithMock(context, 'get_admin_context')
self.mox.StubOutWithMock(self.compute,
'_destroy_evacuated_instances')
self.mox.StubOutWithMock(self.compute,
'_init_instance')
# Test with defer_iptables_apply
self.flags(defer_iptables_apply=True)
_do_mock_calls(True)
self.mox.ReplayAll()
self.compute.init_host()
self.mox.VerifyAll()
# Test without defer_iptables_apply
self.mox.ResetAll()
self.flags(defer_iptables_apply=False)
_do_mock_calls(False)
self.mox.ReplayAll()
self.compute.init_host()
# tearDown() uses context.get_admin_context(), so we have
# to do the verification here and unstub it.
self.mox.VerifyAll()
self.mox.UnsetStubs()
@mock.patch('nova.objects.InstanceList')
def test_cleanup_host(self, mock_instance_list):
# just testing whether the cleanup_host method
# when fired will invoke the underlying driver's
# equivalent method.
mock_instance_list.get_by_host.return_value = []
with mock.patch.object(self.compute, 'driver') as mock_driver:
self.compute.init_host()
mock_driver.init_host.assert_called_once_with(host='fake-mini')
self.compute.cleanup_host()
mock_driver.cleanup_host.assert_called_once_with(host='fake-mini')
def test_init_host_with_deleted_migration(self):
our_host = self.compute.host
not_our_host = 'not-' + our_host
fake_context = 'fake-context'
deleted_instance = fake_instance.fake_instance_obj(
self.context, host=not_our_host, uuid='fake-uuid')
self.mox.StubOutWithMock(self.compute.driver, 'init_host')
self.mox.StubOutWithMock(self.compute.driver, 'destroy')
self.mox.StubOutWithMock(db, 'instance_get_all_by_host')
self.mox.StubOutWithMock(context, 'get_admin_context')
self.mox.StubOutWithMock(self.compute, 'init_virt_events')
self.mox.StubOutWithMock(self.compute, '_get_instances_on_driver')
self.mox.StubOutWithMock(self.compute, '_init_instance')
self.mox.StubOutWithMock(self.compute, '_get_instance_nw_info')
self.compute.driver.init_host(host=our_host)
context.get_admin_context().AndReturn(fake_context)
db.instance_get_all_by_host(fake_context, our_host,
columns_to_join=['info_cache'],
use_slave=False
).AndReturn([])
self.compute.init_virt_events()
# simulate failed instance
self.compute._get_instances_on_driver(
fake_context, {'deleted': False}).AndReturn([deleted_instance])
self.compute._get_instance_nw_info(fake_context, deleted_instance
).AndRaise(exception.InstanceNotFound(
instance_id=deleted_instance['uuid']))
# ensure driver.destroy is called so that driver may
# clean up any dangling files
self.compute.driver.destroy(fake_context, deleted_instance,
mox.IgnoreArg(), mox.IgnoreArg(), mox.IgnoreArg())
self.mox.ReplayAll()
self.compute.init_host()
# tearDown() uses context.get_admin_context(), so we have
# to do the verification here and unstub it.
self.mox.VerifyAll()
self.mox.UnsetStubs()
def test_init_instance_with_binding_failed_vif_type(self):
# this instance will plug a 'binding_failed' vif
instance = fake_instance.fake_instance_obj(
self.context,
uuid='fake-uuid',
info_cache=None,
power_state=power_state.RUNNING,
vm_state=vm_states.ACTIVE,
task_state=None,
expected_attrs=['info_cache'])
with contextlib.nested(
mock.patch.object(context, 'get_admin_context',
return_value=self.context),
mock.patch.object(compute_utils, 'get_nw_info_for_instance',
return_value=network_model.NetworkInfo()),
mock.patch.object(self.compute.driver, 'plug_vifs',
side_effect=exception.VirtualInterfacePlugException(
"Unexpected vif_type=binding_failed")),
mock.patch.object(self.compute, '_set_instance_error_state')
) as (get_admin_context, get_nw_info, plug_vifs, set_error_state):
self.compute._init_instance(self.context, instance)
set_error_state.assert_called_once_with(self.context, instance)
def test__get_power_state_InstanceNotFound(self):
instance = fake_instance.fake_instance_obj(
self.context,
power_state=power_state.RUNNING)
with mock.patch.object(self.compute.driver,
'get_info',
side_effect=exception.InstanceNotFound(instance_id=1)):
self.assertEqual(self.compute._get_power_state(self.context,
instance),
power_state.NOSTATE)
def test__get_power_state_NotFound(self):
instance = fake_instance.fake_instance_obj(
self.context,
power_state=power_state.RUNNING)
with mock.patch.object(self.compute.driver,
'get_info',
side_effect=exception.NotFound()):
self.assertRaises(exception.NotFound,
self.compute._get_power_state,
self.context, instance)
def test_init_instance_failed_resume_sets_error(self):
instance = fake_instance.fake_instance_obj(
self.context,
uuid='fake-uuid',
info_cache=None,
power_state=power_state.RUNNING,
vm_state=vm_states.ACTIVE,
task_state=None,
expected_attrs=['info_cache'])
self.flags(resume_guests_state_on_host_boot=True)
self.mox.StubOutWithMock(self.compute, '_get_power_state')
self.mox.StubOutWithMock(self.compute.driver, 'plug_vifs')
self.mox.StubOutWithMock(self.compute.driver,
'resume_state_on_host_boot')
self.mox.StubOutWithMock(self.compute,
'_get_instance_block_device_info')
self.mox.StubOutWithMock(self.compute,
'_set_instance_error_state')
self.compute._get_power_state(mox.IgnoreArg(),
instance).AndReturn(power_state.SHUTDOWN)
self.compute._get_power_state(mox.IgnoreArg(),
instance).AndReturn(power_state.SHUTDOWN)
self.compute._get_power_state(mox.IgnoreArg(),
instance).AndReturn(power_state.SHUTDOWN)
self.compute.driver.plug_vifs(instance, mox.IgnoreArg())
self.compute._get_instance_block_device_info(mox.IgnoreArg(),
instance).AndReturn('fake-bdm')
self.compute.driver.resume_state_on_host_boot(mox.IgnoreArg(),
instance, mox.IgnoreArg(),
'fake-bdm').AndRaise(test.TestingException)
self.compute._set_instance_error_state(mox.IgnoreArg(), instance)
self.mox.ReplayAll()
self.compute._init_instance('fake-context', instance)
def test_init_instance_stuck_in_deleting(self):
instance = fake_instance.fake_instance_obj(
self.context,
uuid='fake-uuid',
power_state=power_state.RUNNING,
vm_state=vm_states.ACTIVE,
task_state=task_states.DELETING)
self.mox.StubOutWithMock(objects.BlockDeviceMappingList,
'get_by_instance_uuid')
self.mox.StubOutWithMock(self.compute, '_delete_instance')
self.mox.StubOutWithMock(instance, 'obj_load_attr')
bdms = []
instance.obj_load_attr('metadata')
instance.obj_load_attr('system_metadata')
objects.BlockDeviceMappingList.get_by_instance_uuid(
self.context, instance.uuid).AndReturn(bdms)
self.compute._delete_instance(self.context, instance, bdms,
mox.IgnoreArg())
self.mox.ReplayAll()
self.compute._init_instance(self.context, instance)
def _test_init_instance_reverts_crashed_migrations(self,
old_vm_state=None):
power_on = True if (not old_vm_state or
old_vm_state == vm_states.ACTIVE) else False
sys_meta = {
'old_vm_state': old_vm_state
}
instance = fake_instance.fake_instance_obj(
self.context,
uuid='foo',
vm_state=vm_states.ERROR,
task_state=task_states.RESIZE_MIGRATING,
power_state=power_state.SHUTDOWN,
system_metadata=sys_meta,
expected_attrs=['system_metadata'])
self.mox.StubOutWithMock(compute_utils, 'get_nw_info_for_instance')
self.mox.StubOutWithMock(self.compute.driver, 'plug_vifs')
self.mox.StubOutWithMock(self.compute.driver,
'finish_revert_migration')
self.mox.StubOutWithMock(self.compute,
'_get_instance_block_device_info')
self.mox.StubOutWithMock(self.compute.driver, 'get_info')
self.mox.StubOutWithMock(instance, 'save')
self.mox.StubOutWithMock(self.compute, '_retry_reboot')
self.compute._retry_reboot(self.context, instance).AndReturn(
(False, None))
compute_utils.get_nw_info_for_instance(instance).AndReturn(
network_model.NetworkInfo())
self.compute.driver.plug_vifs(instance, [])
self.compute._get_instance_block_device_info(
self.context, instance).AndReturn([])
self.compute.driver.finish_revert_migration(self.context, instance,
[], [], power_on)
instance.save()
self.compute.driver.get_info(instance).AndReturn(
hardware.InstanceInfo(state=power_state.SHUTDOWN))
self.compute.driver.get_info(instance).AndReturn(
hardware.InstanceInfo(state=power_state.SHUTDOWN))
self.mox.ReplayAll()
self.compute._init_instance(self.context, instance)
self.assertIsNone(instance.task_state)
def test_init_instance_reverts_crashed_migration_from_active(self):
self._test_init_instance_reverts_crashed_migrations(
old_vm_state=vm_states.ACTIVE)
def test_init_instance_reverts_crashed_migration_from_stopped(self):
self._test_init_instance_reverts_crashed_migrations(
old_vm_state=vm_states.STOPPED)
def test_init_instance_reverts_crashed_migration_no_old_state(self):
self._test_init_instance_reverts_crashed_migrations(old_vm_state=None)
def test_init_instance_resets_crashed_live_migration(self):
instance = fake_instance.fake_instance_obj(
self.context,
uuid='foo',
vm_state=vm_states.ACTIVE,
task_state=task_states.MIGRATING)
with contextlib.nested(
mock.patch.object(instance, 'save'),
mock.patch('nova.compute.utils.get_nw_info_for_instance',
return_value=network_model.NetworkInfo())
) as (save, get_nw_info):
self.compute._init_instance(self.context, instance)
save.assert_called_once_with(expected_task_state=['migrating'])
get_nw_info.assert_called_once_with(instance)
self.assertIsNone(instance.task_state)
self.assertEqual(vm_states.ACTIVE, instance.vm_state)
def _test_init_instance_sets_building_error(self, vm_state,
task_state=None):
instance = fake_instance.fake_instance_obj(
self.context,
uuid='foo',
vm_state=vm_state,
task_state=task_state)
with mock.patch.object(instance, 'save') as save:
self.compute._init_instance(self.context, instance)
save.assert_called_once_with()
self.assertIsNone(instance.task_state)
self.assertEqual(vm_states.ERROR, instance.vm_state)
def test_init_instance_sets_building_error(self):
self._test_init_instance_sets_building_error(vm_states.BUILDING)
def test_init_instance_sets_rebuilding_errors(self):
tasks = [task_states.REBUILDING,
task_states.REBUILD_BLOCK_DEVICE_MAPPING,
task_states.REBUILD_SPAWNING]
vms = [vm_states.ACTIVE, vm_states.STOPPED]
for vm_state in vms:
for task_state in tasks:
self._test_init_instance_sets_building_error(
vm_state, task_state)
def _test_init_instance_sets_building_tasks_error(self, instance):
with mock.patch.object(instance, 'save') as save:
self.compute._init_instance(self.context, instance)
save.assert_called_once_with()
self.assertIsNone(instance.task_state)
self.assertEqual(vm_states.ERROR, instance.vm_state)
def test_init_instance_sets_building_tasks_error_scheduling(self):
instance = fake_instance.fake_instance_obj(
self.context,
uuid='foo',
vm_state=None,
task_state=task_states.SCHEDULING)
self._test_init_instance_sets_building_tasks_error(instance)
def test_init_instance_sets_building_tasks_error_block_device(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.vm_state = None
instance.task_state = task_states.BLOCK_DEVICE_MAPPING
self._test_init_instance_sets_building_tasks_error(instance)
def test_init_instance_sets_building_tasks_error_networking(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.vm_state = None
instance.task_state = task_states.NETWORKING
self._test_init_instance_sets_building_tasks_error(instance)
def test_init_instance_sets_building_tasks_error_spawning(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.vm_state = None
instance.task_state = task_states.SPAWNING
self._test_init_instance_sets_building_tasks_error(instance)
def _test_init_instance_cleans_image_states(self, instance):
with mock.patch.object(instance, 'save') as save:
self.compute._get_power_state = mock.Mock()
self.compute.driver.post_interrupted_snapshot_cleanup = mock.Mock()
instance.info_cache = None
instance.power_state = power_state.RUNNING
self.compute._init_instance(self.context, instance)
save.assert_called_once_with()
self.compute.driver.post_interrupted_snapshot_cleanup.\
assert_called_once_with(self.context, instance)
self.assertIsNone(instance.task_state)
@mock.patch('nova.compute.manager.ComputeManager._get_power_state',
return_value=power_state.RUNNING)
@mock.patch.object(objects.BlockDeviceMappingList, 'get_by_instance_uuid')
def _test_init_instance_cleans_task_states(self, powerstate, state,
mock_get_uuid, mock_get_power_state):
instance = objects.Instance(self.context)
instance.uuid = 'fake-uuid'
instance.info_cache = None
instance.power_state = power_state.RUNNING
instance.vm_state = vm_states.ACTIVE
instance.task_state = state
mock_get_power_state.return_value = powerstate
self.compute._init_instance(self.context, instance)
return instance
def test_init_instance_cleans_image_state_pending_upload(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.IMAGE_PENDING_UPLOAD
self._test_init_instance_cleans_image_states(instance)
def test_init_instance_cleans_image_state_uploading(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.IMAGE_UPLOADING
self._test_init_instance_cleans_image_states(instance)
def test_init_instance_cleans_image_state_snapshot(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.IMAGE_SNAPSHOT
self._test_init_instance_cleans_image_states(instance)
def test_init_instance_cleans_image_state_snapshot_pending(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.IMAGE_SNAPSHOT_PENDING
self._test_init_instance_cleans_image_states(instance)
@mock.patch.object(objects.Instance, 'save')
def test_init_instance_cleans_running_pausing(self, mock_save):
instance = self._test_init_instance_cleans_task_states(
power_state.RUNNING, task_states.PAUSING)
mock_save.assert_called_once_with()
self.assertEqual(vm_states.ACTIVE, instance.vm_state)
self.assertIsNone(instance.task_state)
@mock.patch.object(objects.Instance, 'save')
def test_init_instance_cleans_running_unpausing(self, mock_save):
instance = self._test_init_instance_cleans_task_states(
power_state.RUNNING, task_states.UNPAUSING)
mock_save.assert_called_once_with()
self.assertEqual(vm_states.ACTIVE, instance.vm_state)
self.assertIsNone(instance.task_state)
@mock.patch('nova.compute.manager.ComputeManager.unpause_instance')
def test_init_instance_cleans_paused_unpausing(self, mock_unpause):
def fake_unpause(context, instance):
instance.task_state = None
mock_unpause.side_effect = fake_unpause
instance = self._test_init_instance_cleans_task_states(
power_state.PAUSED, task_states.UNPAUSING)
mock_unpause.assert_called_once_with(self.context, instance)
self.assertEqual(vm_states.ACTIVE, instance.vm_state)
self.assertIsNone(instance.task_state)
def test_init_instance_errors_when_not_migrating(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.vm_state = vm_states.ERROR
instance.task_state = task_states.IMAGE_UPLOADING
self.mox.StubOutWithMock(compute_utils, 'get_nw_info_for_instance')
self.mox.ReplayAll()
self.compute._init_instance(self.context, instance)
self.mox.VerifyAll()
def test_init_instance_deletes_error_deleting_instance(self):
instance = fake_instance.fake_instance_obj(
self.context,
uuid='fake',
vm_state=vm_states.ERROR,
task_state=task_states.DELETING)
self.mox.StubOutWithMock(objects.BlockDeviceMappingList,
'get_by_instance_uuid')
self.mox.StubOutWithMock(self.compute, '_delete_instance')
self.mox.StubOutWithMock(instance, 'obj_load_attr')
bdms = []
instance.obj_load_attr('metadata')
instance.obj_load_attr('system_metadata')
objects.BlockDeviceMappingList.get_by_instance_uuid(
self.context, instance.uuid).AndReturn(bdms)
self.compute._delete_instance(self.context, instance, bdms,
mox.IgnoreArg())
self.mox.ReplayAll()
self.compute._init_instance(self.context, instance)
self.mox.VerifyAll()
@mock.patch('nova.context.RequestContext.elevated')
@mock.patch('nova.compute.utils.get_nw_info_for_instance')
@mock.patch(
'nova.compute.manager.ComputeManager._get_instance_block_device_info')
@mock.patch('nova.virt.driver.ComputeDriver.destroy')
@mock.patch('nova.virt.driver.ComputeDriver.get_volume_connector')
def test_shutdown_instance_endpoint_not_found(self, mock_connector,
mock_destroy, mock_blk_device_info, mock_nw_info, mock_elevated):
mock_connector.side_effect = cinder_exception.EndpointNotFound
mock_elevated.return_value = self.context
instance = fake_instance.fake_instance_obj(
self.context,
uuid='fake',
vm_state=vm_states.ERROR,
task_state=task_states.DELETING)
bdms = [mock.Mock(id=1, is_volume=True)]
self.compute._shutdown_instance(self.context, instance, bdms,
notify=False, try_deallocate_networks=False)
def _test_init_instance_retries_reboot(self, instance, reboot_type,
return_power_state):
with contextlib.nested(
mock.patch.object(self.compute, '_get_power_state',
return_value=return_power_state),
mock.patch.object(self.compute.compute_rpcapi, 'reboot_instance'),
mock.patch.object(compute_utils, 'get_nw_info_for_instance')
) as (
_get_power_state,
reboot_instance,
get_nw_info_for_instance
):
self.compute._init_instance(self.context, instance)
call = mock.call(self.context, instance, block_device_info=None,
reboot_type=reboot_type)
reboot_instance.assert_has_calls([call])
def test_init_instance_retries_reboot_pending(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.task_state = task_states.REBOOT_PENDING
for state in vm_states.ALLOW_SOFT_REBOOT:
instance.vm_state = state
self._test_init_instance_retries_reboot(instance, 'SOFT',
power_state.RUNNING)
def test_init_instance_retries_reboot_pending_hard(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.task_state = task_states.REBOOT_PENDING_HARD
for state in vm_states.ALLOW_HARD_REBOOT:
# NOTE(dave-mcnally) while a reboot of a vm in error state is
# possible we don't attempt to recover an error during init
if state == vm_states.ERROR:
continue
instance.vm_state = state
self._test_init_instance_retries_reboot(instance, 'HARD',
power_state.RUNNING)
def test_init_instance_retries_reboot_started(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.REBOOT_STARTED
self._test_init_instance_retries_reboot(instance, 'HARD',
power_state.NOSTATE)
def test_init_instance_retries_reboot_started_hard(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.REBOOT_STARTED_HARD
self._test_init_instance_retries_reboot(instance, 'HARD',
power_state.NOSTATE)
def _test_init_instance_cleans_reboot_state(self, instance):
with contextlib.nested(
mock.patch.object(self.compute, '_get_power_state',
return_value=power_state.RUNNING),
mock.patch.object(instance, 'save', autospec=True),
mock.patch.object(compute_utils, 'get_nw_info_for_instance')
) as (
_get_power_state,
instance_save,
get_nw_info_for_instance
):
self.compute._init_instance(self.context, instance)
instance_save.assert_called_once_with()
self.assertIsNone(instance.task_state)
self.assertEqual(vm_states.ACTIVE, instance.vm_state)
def test_init_instance_cleans_image_state_reboot_started(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.REBOOT_STARTED
instance.power_state = power_state.RUNNING
self._test_init_instance_cleans_reboot_state(instance)
def test_init_instance_cleans_image_state_reboot_started_hard(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.REBOOT_STARTED_HARD
instance.power_state = power_state.RUNNING
self._test_init_instance_cleans_reboot_state(instance)
def test_init_instance_retries_power_off(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.id = 1
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.POWERING_OFF
with mock.patch.object(self.compute, 'stop_instance'):
self.compute._init_instance(self.context, instance)
call = mock.call(self.context, instance)
self.compute.stop_instance.assert_has_calls([call])
def test_init_instance_retries_power_on(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.id = 1
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.POWERING_ON
with mock.patch.object(self.compute, 'start_instance'):
self.compute._init_instance(self.context, instance)
call = mock.call(self.context, instance)
self.compute.start_instance.assert_has_calls([call])
def test_init_instance_retries_power_on_silent_exception(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.id = 1
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.POWERING_ON
with mock.patch.object(self.compute, 'start_instance',
return_value=Exception):
init_return = self.compute._init_instance(self.context, instance)
call = mock.call(self.context, instance)
self.compute.start_instance.assert_has_calls([call])
self.assertIsNone(init_return)
def test_init_instance_retries_power_off_silent_exception(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.id = 1
instance.vm_state = vm_states.ACTIVE
instance.task_state = task_states.POWERING_OFF
with mock.patch.object(self.compute, 'stop_instance',
return_value=Exception):
init_return = self.compute._init_instance(self.context, instance)
call = mock.call(self.context, instance)
self.compute.stop_instance.assert_has_calls([call])
self.assertIsNone(init_return)
def test_get_instances_on_driver(self):
fake_context = context.get_admin_context()
driver_instances = []
for x in xrange(10):
driver_instances.append(fake_instance.fake_db_instance())
self.mox.StubOutWithMock(self.compute.driver,
'list_instance_uuids')
self.mox.StubOutWithMock(db, 'instance_get_all_by_filters')
self.compute.driver.list_instance_uuids().AndReturn(
[inst['uuid'] for inst in driver_instances])
db.instance_get_all_by_filters(
fake_context,
{'uuid': [inst['uuid'] for
inst in driver_instances]},
'created_at', 'desc', columns_to_join=None,
limit=None, marker=None,
use_slave=True).AndReturn(
driver_instances)
self.mox.ReplayAll()
result = self.compute._get_instances_on_driver(fake_context)
self.assertEqual([x['uuid'] for x in driver_instances],
[x['uuid'] for x in result])
@mock.patch('nova.virt.driver.ComputeDriver.list_instance_uuids')
@mock.patch('nova.db.api.instance_get_all_by_filters')
def test_get_instances_on_driver_empty(self, mock_list, mock_db):
fake_context = context.get_admin_context()
mock_list.return_value = []
result = self.compute._get_instances_on_driver(fake_context)
# instance_get_all_by_filters should not be called
self.assertEqual(0, mock_db.call_count)
self.assertEqual([],
[x['uuid'] for x in result])
def test_get_instances_on_driver_fallback(self):
# Test getting instances when driver doesn't support
# 'list_instance_uuids'
self.compute.host = 'host'
filters = {'host': self.compute.host}
fake_context = context.get_admin_context()
self.flags(instance_name_template='inst-%i')
all_instances = []
driver_instances = []
for x in xrange(10):
instance = fake_instance.fake_db_instance(name='inst-%i' % x,
id=x)
if x % 2:
driver_instances.append(instance)
all_instances.append(instance)
self.mox.StubOutWithMock(self.compute.driver,
'list_instance_uuids')
self.mox.StubOutWithMock(self.compute.driver,
'list_instances')
self.mox.StubOutWithMock(db, 'instance_get_all_by_filters')
self.compute.driver.list_instance_uuids().AndRaise(
NotImplementedError())
self.compute.driver.list_instances().AndReturn(
[inst['name'] for inst in driver_instances])
db.instance_get_all_by_filters(
fake_context, filters,
'created_at', 'desc', columns_to_join=None,
limit=None, marker=None,
use_slave=True).AndReturn(all_instances)
self.mox.ReplayAll()
result = self.compute._get_instances_on_driver(fake_context, filters)
self.assertEqual([x['uuid'] for x in driver_instances],
[x['uuid'] for x in result])
def test_instance_usage_audit(self):
instances = [objects.Instance(uuid='foo')]
@classmethod
def fake_get(*a, **k):
return instances
self.flags(instance_usage_audit=True)
self.stubs.Set(compute_utils, 'has_audit_been_run',
lambda *a, **k: False)
self.stubs.Set(objects.InstanceList,
'get_active_by_window_joined', fake_get)
self.stubs.Set(compute_utils, 'start_instance_usage_audit',
lambda *a, **k: None)
self.stubs.Set(compute_utils, 'finish_instance_usage_audit',
lambda *a, **k: None)
self.mox.StubOutWithMock(self.compute.conductor_api,
'notify_usage_exists')
self.compute.conductor_api.notify_usage_exists(
self.context, instances[0], ignore_missing_network_data=False)
self.mox.ReplayAll()
self.compute._instance_usage_audit(self.context)
@mock.patch.object(objects.InstanceList, 'get_by_host')
def test_sync_power_states(self, mock_get):
instance = mock.Mock()
mock_get.return_value = [instance]
with mock.patch.object(self.compute._sync_power_pool,
'spawn_n') as mock_spawn:
self.compute._sync_power_states(mock.sentinel.context)
mock_get.assert_called_with(mock.sentinel.context,
self.compute.host, expected_attrs=[],
use_slave=True)
mock_spawn.assert_called_once_with(mock.ANY, instance)
def _get_sync_instance(self, power_state, vm_state, task_state=None,
shutdown_terminate=False):
instance = objects.Instance()
instance.uuid = 'fake-uuid'
instance.power_state = power_state
instance.vm_state = vm_state
instance.host = self.compute.host
instance.task_state = task_state
instance.shutdown_terminate = shutdown_terminate
self.mox.StubOutWithMock(instance, 'refresh')
self.mox.StubOutWithMock(instance, 'save')
return instance
def test_sync_instance_power_state_match(self):
instance = self._get_sync_instance(power_state.RUNNING,
vm_states.ACTIVE)
instance.refresh(use_slave=False)
self.mox.ReplayAll()
self.compute._sync_instance_power_state(self.context, instance,
power_state.RUNNING)
def test_sync_instance_power_state_running_stopped(self):
instance = self._get_sync_instance(power_state.RUNNING,
vm_states.ACTIVE)
instance.refresh(use_slave=False)
instance.save()
self.mox.ReplayAll()
self.compute._sync_instance_power_state(self.context, instance,
power_state.SHUTDOWN)
self.assertEqual(instance.power_state, power_state.SHUTDOWN)
def _test_sync_to_stop(self, power_state, vm_state, driver_power_state,
stop=True, force=False, shutdown_terminate=False):
instance = self._get_sync_instance(
power_state, vm_state, shutdown_terminate=shutdown_terminate)
instance.refresh(use_slave=False)
instance.save()
self.mox.StubOutWithMock(self.compute.compute_api, 'stop')
self.mox.StubOutWithMock(self.compute.compute_api, 'delete')
self.mox.StubOutWithMock(self.compute.compute_api, 'force_stop')
if shutdown_terminate:
self.compute.compute_api.delete(self.context, instance)
elif stop:
if force:
self.compute.compute_api.force_stop(self.context, instance)
else:
self.compute.compute_api.stop(self.context, instance)
self.mox.ReplayAll()
self.compute._sync_instance_power_state(self.context, instance,
driver_power_state)
self.mox.VerifyAll()
self.mox.UnsetStubs()
def test_sync_instance_power_state_to_stop(self):
for ps in (power_state.SHUTDOWN, power_state.CRASHED,
power_state.SUSPENDED):
self._test_sync_to_stop(power_state.RUNNING, vm_states.ACTIVE, ps)
for ps in (power_state.SHUTDOWN, power_state.CRASHED):
self._test_sync_to_stop(power_state.PAUSED, vm_states.PAUSED, ps,
force=True)
self._test_sync_to_stop(power_state.SHUTDOWN, vm_states.STOPPED,
power_state.RUNNING, force=True)
def test_sync_instance_power_state_to_terminate(self):
self._test_sync_to_stop(power_state.RUNNING, vm_states.ACTIVE,
power_state.SHUTDOWN,
force=False, shutdown_terminate=True)
def test_sync_instance_power_state_to_no_stop(self):
for ps in (power_state.PAUSED, power_state.NOSTATE):
self._test_sync_to_stop(power_state.RUNNING, vm_states.ACTIVE, ps,
stop=False)
for vs in (vm_states.SOFT_DELETED, vm_states.DELETED):
for ps in (power_state.NOSTATE, power_state.SHUTDOWN):
self._test_sync_to_stop(power_state.RUNNING, vs, ps,
stop=False)
@mock.patch('nova.compute.manager.ComputeManager.'
'_sync_instance_power_state')
def test_query_driver_power_state_and_sync_pending_task(
self, mock_sync_power_state):
with mock.patch.object(self.compute.driver,
'get_info') as mock_get_info:
db_instance = objects.Instance(uuid='fake-uuid',
task_state=task_states.POWERING_OFF)
self.compute._query_driver_power_state_and_sync(self.context,
db_instance)
self.assertFalse(mock_get_info.called)
self.assertFalse(mock_sync_power_state.called)
@mock.patch('nova.compute.manager.ComputeManager.'
'_sync_instance_power_state')
def test_query_driver_power_state_and_sync_not_found_driver(
self, mock_sync_power_state):
error = exception.InstanceNotFound(instance_id=1)
with mock.patch.object(self.compute.driver,
'get_info', side_effect=error) as mock_get_info:
db_instance = objects.Instance(uuid='fake-uuid', task_state=None)
self.compute._query_driver_power_state_and_sync(self.context,
db_instance)
mock_get_info.assert_called_once_with(db_instance)
mock_sync_power_state.assert_called_once_with(self.context,
db_instance,
power_state.NOSTATE,
use_slave=True)
def test_run_pending_deletes(self):
self.flags(instance_delete_interval=10)
class FakeInstance(object):
def __init__(self, uuid, name, smd):
self.uuid = uuid
self.name = name
self.system_metadata = smd
self.cleaned = False
def __getitem__(self, name):
return getattr(self, name)
def save(self):
pass
class FakeInstanceList(object):
def get_by_filters(self, *args, **kwargs):
return []
a = FakeInstance('123', 'apple', {'clean_attempts': '100'})
b = FakeInstance('456', 'orange', {'clean_attempts': '3'})
c = FakeInstance('789', 'banana', {})
self.mox.StubOutWithMock(objects.InstanceList,
'get_by_filters')
objects.InstanceList.get_by_filters(
{'read_deleted': 'yes'},
{'deleted': True, 'soft_deleted': False, 'host': 'fake-mini',
'cleaned': False},
expected_attrs=['info_cache', 'security_groups',
'system_metadata'],
use_slave=True).AndReturn([a, b, c])
self.mox.StubOutWithMock(self.compute.driver, 'delete_instance_files')
self.compute.driver.delete_instance_files(
mox.IgnoreArg()).AndReturn(True)
self.compute.driver.delete_instance_files(
mox.IgnoreArg()).AndReturn(False)
self.mox.ReplayAll()
self.compute._run_pending_deletes({})
self.assertFalse(a.cleaned)
self.assertEqual('100', a.system_metadata['clean_attempts'])
self.assertTrue(b.cleaned)
self.assertEqual('4', b.system_metadata['clean_attempts'])
self.assertFalse(c.cleaned)
self.assertEqual('1', c.system_metadata['clean_attempts'])
def test_attach_interface_failure(self):
# Test that the fault methods are invoked when an attach fails
db_instance = fake_instance.fake_db_instance()
f_instance = objects.Instance._from_db_object(self.context,
objects.Instance(),
db_instance)
e = exception.InterfaceAttachFailed(instance_uuid=f_instance.uuid)
@mock.patch.object(compute_utils, 'add_instance_fault_from_exc')
@mock.patch.object(self.compute.network_api,
'allocate_port_for_instance',
side_effect=e)
def do_test(meth, add_fault):
self.assertRaises(exception.InterfaceAttachFailed,
self.compute.attach_interface,
self.context, f_instance, 'net_id', 'port_id',
None)
add_fault.assert_has_calls(
mock.call(self.context, f_instance, e,
mock.ANY))
do_test()
def test_detach_interface_failure(self):
# Test that the fault methods are invoked when a detach fails
# Build test data that will cause a PortNotFound exception
f_instance = mock.MagicMock()
f_instance.info_cache = mock.MagicMock()
f_instance.info_cache.network_info = []
@mock.patch.object(compute_utils, 'add_instance_fault_from_exc')
@mock.patch.object(self.compute, '_set_instance_error_state')
def do_test(meth, add_fault):
self.assertRaises(exception.PortNotFound,
self.compute.detach_interface,
self.context, f_instance, 'port_id')
add_fault.assert_has_calls(
mock.call(self.context, f_instance, mock.ANY, mock.ANY))
do_test()
def test_swap_volume_volume_api_usage(self):
# This test ensures that volume_id arguments are passed to volume_api
# and that volume states are OK
volumes = {}
old_volume_id = uuidutils.generate_uuid()
volumes[old_volume_id] = {'id': old_volume_id,
'display_name': 'old_volume',
'status': 'detaching',
'size': 1}
new_volume_id = uuidutils.generate_uuid()
volumes[new_volume_id] = {'id': new_volume_id,
'display_name': 'new_volume',
'status': 'available',
'size': 2}
def fake_vol_api_roll_detaching(context, volume_id):
self.assertTrue(uuidutils.is_uuid_like(volume_id))
if volumes[volume_id]['status'] == 'detaching':
volumes[volume_id]['status'] = 'in-use'
fake_bdm = fake_block_device.FakeDbBlockDeviceDict(
{'device_name': '/dev/vdb', 'source_type': 'volume',
'destination_type': 'volume', 'instance_uuid': 'fake',
'connection_info': '{"foo": "bar"}'})
def fake_vol_api_func(context, volume, *args):
self.assertTrue(uuidutils.is_uuid_like(volume))
return {}
def fake_vol_get(context, volume_id):
self.assertTrue(uuidutils.is_uuid_like(volume_id))
return volumes[volume_id]
def fake_vol_unreserve(context, volume_id):
self.assertTrue(uuidutils.is_uuid_like(volume_id))
if volumes[volume_id]['status'] == 'attaching':
volumes[volume_id]['status'] = 'available'
def fake_vol_migrate_volume_completion(context, old_volume_id,
new_volume_id, error=False):
self.assertTrue(uuidutils.is_uuid_like(old_volume_id))
self.assertTrue(uuidutils.is_uuid_like(new_volume_id))
volumes[old_volume_id]['status'] = 'in-use'
return {'save_volume_id': new_volume_id}
def fake_func_exc(*args, **kwargs):
raise AttributeError # Random exception
def fake_swap_volume(old_connection_info, new_connection_info,
instance, mountpoint, resize_to):
self.assertEqual(resize_to, 2)
self.stubs.Set(self.compute.volume_api, 'roll_detaching',
fake_vol_api_roll_detaching)
self.stubs.Set(self.compute.volume_api, 'get', fake_vol_get)
self.stubs.Set(self.compute.volume_api, 'initialize_connection',
fake_vol_api_func)
self.stubs.Set(self.compute.volume_api, 'unreserve_volume',
fake_vol_unreserve)
self.stubs.Set(self.compute.volume_api, 'terminate_connection',
fake_vol_api_func)
self.stubs.Set(db, 'block_device_mapping_get_by_volume_id',
lambda x, y, z: fake_bdm)
self.stubs.Set(self.compute.driver, 'get_volume_connector',
lambda x: {})
self.stubs.Set(self.compute.driver, 'swap_volume',
fake_swap_volume)
self.stubs.Set(self.compute.volume_api, 'migrate_volume_completion',
fake_vol_migrate_volume_completion)
self.stubs.Set(db, 'block_device_mapping_update',
lambda *a, **k: fake_bdm)
self.stubs.Set(db,
'instance_fault_create',
lambda x, y:
test_instance_fault.fake_faults['fake-uuid'][0])
# Good path
self.compute.swap_volume(self.context, old_volume_id, new_volume_id,
fake_instance.fake_instance_obj(
self.context, **{'uuid': 'fake'}))
self.assertEqual(volumes[old_volume_id]['status'], 'in-use')
# Error paths
volumes[old_volume_id]['status'] = 'detaching'
volumes[new_volume_id]['status'] = 'attaching'
self.stubs.Set(self.compute.driver, 'swap_volume', fake_func_exc)
self.assertRaises(AttributeError, self.compute.swap_volume,
self.context, old_volume_id, new_volume_id,
fake_instance.fake_instance_obj(
self.context, **{'uuid': 'fake'}))
self.assertEqual(volumes[old_volume_id]['status'], 'in-use')
self.assertEqual(volumes[new_volume_id]['status'], 'available')
volumes[old_volume_id]['status'] = 'detaching'
volumes[new_volume_id]['status'] = 'attaching'
self.stubs.Set(self.compute.volume_api, 'initialize_connection',
fake_func_exc)
self.assertRaises(AttributeError, self.compute.swap_volume,
self.context, old_volume_id, new_volume_id,
fake_instance.fake_instance_obj(
self.context, **{'uuid': 'fake'}))
self.assertEqual(volumes[old_volume_id]['status'], 'in-use')
self.assertEqual(volumes[new_volume_id]['status'], 'available')
def test_check_can_live_migrate_source(self):
is_volume_backed = 'volume_backed'
dest_check_data = dict(foo='bar')
db_instance = fake_instance.fake_db_instance()
instance = objects.Instance._from_db_object(
self.context, objects.Instance(), db_instance)
expected_dest_check_data = dict(dest_check_data,
is_volume_backed=is_volume_backed)
self.mox.StubOutWithMock(self.compute.compute_api,
'is_volume_backed_instance')
self.mox.StubOutWithMock(self.compute,
'_get_instance_block_device_info')
self.mox.StubOutWithMock(self.compute.driver,
'check_can_live_migrate_source')
self.compute.compute_api.is_volume_backed_instance(
self.context, instance).AndReturn(is_volume_backed)
self.compute._get_instance_block_device_info(
self.context, instance, refresh_conn_info=True
).AndReturn({'block_device_mapping': 'fake'})
self.compute.driver.check_can_live_migrate_source(
self.context, instance, expected_dest_check_data,
{'block_device_mapping': 'fake'})
self.mox.ReplayAll()
self.compute.check_can_live_migrate_source(
self.context, instance=instance,
dest_check_data=dest_check_data)
def _test_check_can_live_migrate_destination(self, do_raise=False,
has_mig_data=False):
db_instance = fake_instance.fake_db_instance(host='fake-host')
instance = objects.Instance._from_db_object(
self.context, objects.Instance(), db_instance)
instance.host = 'fake-host'
block_migration = 'block_migration'
disk_over_commit = 'disk_over_commit'
src_info = 'src_info'
dest_info = 'dest_info'
dest_check_data = dict(foo='bar')
mig_data = dict(cow='moo')
expected_result = dict(mig_data)
if has_mig_data:
dest_check_data['migrate_data'] = dict(cat='meow')
expected_result.update(cat='meow')
self.mox.StubOutWithMock(self.compute, '_get_compute_info')
self.mox.StubOutWithMock(self.compute.driver,
'check_can_live_migrate_destination')
self.mox.StubOutWithMock(self.compute.compute_rpcapi,
'check_can_live_migrate_source')
self.mox.StubOutWithMock(self.compute.driver,
'check_can_live_migrate_destination_cleanup')
self.compute._get_compute_info(self.context,
'fake-host').AndReturn(src_info)
self.compute._get_compute_info(self.context,
CONF.host).AndReturn(dest_info)
self.compute.driver.check_can_live_migrate_destination(
self.context, instance, src_info, dest_info,
block_migration, disk_over_commit).AndReturn(dest_check_data)
mock_meth = self.compute.compute_rpcapi.check_can_live_migrate_source(
self.context, instance, dest_check_data)
if do_raise:
mock_meth.AndRaise(test.TestingException())
self.mox.StubOutWithMock(db, 'instance_fault_create')
db.instance_fault_create(
self.context, mox.IgnoreArg()).AndReturn(
test_instance_fault.fake_faults['fake-uuid'][0])
else:
mock_meth.AndReturn(mig_data)
self.compute.driver.check_can_live_migrate_destination_cleanup(
self.context, dest_check_data)
self.mox.ReplayAll()
result = self.compute.check_can_live_migrate_destination(
self.context, instance=instance,
block_migration=block_migration,
disk_over_commit=disk_over_commit)
self.assertEqual(expected_result, result)
def test_check_can_live_migrate_destination_success(self):
self._test_check_can_live_migrate_destination()
def test_check_can_live_migrate_destination_success_w_mig_data(self):
self._test_check_can_live_migrate_destination(has_mig_data=True)
def test_check_can_live_migrate_destination_fail(self):
self.assertRaises(
test.TestingException,
self._test_check_can_live_migrate_destination,
do_raise=True)
@mock.patch('nova.compute.manager.InstanceEvents._lock_name')
def test_prepare_for_instance_event(self, lock_name_mock):
inst_obj = objects.Instance(uuid='foo')
result = self.compute.instance_events.prepare_for_instance_event(
inst_obj, 'test-event')
self.assertIn('foo', self.compute.instance_events._events)
self.assertIn('test-event',
self.compute.instance_events._events['foo'])
self.assertEqual(
result,
self.compute.instance_events._events['foo']['test-event'])
self.assertTrue(hasattr(result, 'send'))
lock_name_mock.assert_called_once_with(inst_obj)
@mock.patch('nova.compute.manager.InstanceEvents._lock_name')
def test_pop_instance_event(self, lock_name_mock):
event = eventlet_event.Event()
self.compute.instance_events._events = {
'foo': {
'test-event': event,
}
}
inst_obj = objects.Instance(uuid='foo')
event_obj = objects.InstanceExternalEvent(name='test-event',
tag=None)
result = self.compute.instance_events.pop_instance_event(inst_obj,
event_obj)
self.assertEqual(result, event)
lock_name_mock.assert_called_once_with(inst_obj)
@mock.patch('nova.compute.manager.InstanceEvents._lock_name')
def test_clear_events_for_instance(self, lock_name_mock):
event = eventlet_event.Event()
self.compute.instance_events._events = {
'foo': {
'test-event': event,
}
}
inst_obj = objects.Instance(uuid='foo')
result = self.compute.instance_events.clear_events_for_instance(
inst_obj)
self.assertEqual(result, {'test-event': event})
lock_name_mock.assert_called_once_with(inst_obj)
def test_instance_events_lock_name(self):
inst_obj = objects.Instance(uuid='foo')
result = self.compute.instance_events._lock_name(inst_obj)
self.assertEqual(result, 'foo-events')
def test_prepare_for_instance_event_again(self):
inst_obj = objects.Instance(uuid='foo')
self.compute.instance_events.prepare_for_instance_event(
inst_obj, 'test-event')
# A second attempt will avoid creating a new list; make sure we
# get the current list
result = self.compute.instance_events.prepare_for_instance_event(
inst_obj, 'test-event')
self.assertIn('foo', self.compute.instance_events._events)
self.assertIn('test-event',
self.compute.instance_events._events['foo'])
self.assertEqual(
result,
self.compute.instance_events._events['foo']['test-event'])
self.assertTrue(hasattr(result, 'send'))
def test_process_instance_event(self):
event = eventlet_event.Event()
self.compute.instance_events._events = {
'foo': {
'test-event': event,
}
}
inst_obj = objects.Instance(uuid='foo')
event_obj = objects.InstanceExternalEvent(name='test-event', tag=None)
self.compute._process_instance_event(inst_obj, event_obj)
self.assertTrue(event.ready())
self.assertEqual(event_obj, event.wait())
self.assertEqual({}, self.compute.instance_events._events)
def test_external_instance_event(self):
instances = [
objects.Instance(id=1, uuid='uuid1'),
objects.Instance(id=2, uuid='uuid2')]
events = [
objects.InstanceExternalEvent(name='network-changed',
tag='tag1',
instance_uuid='uuid1'),
objects.InstanceExternalEvent(name='foo', instance_uuid='uuid2',
tag='tag2')]
@mock.patch.object(self.compute.network_api, 'get_instance_nw_info')
@mock.patch.object(self.compute, '_process_instance_event')
def do_test(_process_instance_event, get_instance_nw_info):
self.compute.external_instance_event(self.context,
instances, events)
get_instance_nw_info.assert_called_once_with(self.context,
instances[0])
_process_instance_event.assert_called_once_with(instances[1],
events[1])
do_test()
def test_retry_reboot_pending_soft(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.task_state = task_states.REBOOT_PENDING
instance.vm_state = vm_states.ACTIVE
with mock.patch.object(self.compute, '_get_power_state',
return_value=power_state.RUNNING):
allow_reboot, reboot_type = self.compute._retry_reboot(
context, instance)
self.assertTrue(allow_reboot)
self.assertEqual(reboot_type, 'SOFT')
def test_retry_reboot_pending_hard(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.task_state = task_states.REBOOT_PENDING_HARD
instance.vm_state = vm_states.ACTIVE
with mock.patch.object(self.compute, '_get_power_state',
return_value=power_state.RUNNING):
allow_reboot, reboot_type = self.compute._retry_reboot(
context, instance)
self.assertTrue(allow_reboot)
self.assertEqual(reboot_type, 'HARD')
def test_retry_reboot_starting_soft_off(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.task_state = task_states.REBOOT_STARTED
with mock.patch.object(self.compute, '_get_power_state',
return_value=power_state.NOSTATE):
allow_reboot, reboot_type = self.compute._retry_reboot(
context, instance)
self.assertTrue(allow_reboot)
self.assertEqual(reboot_type, 'HARD')
def test_retry_reboot_starting_hard_off(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.task_state = task_states.REBOOT_STARTED_HARD
with mock.patch.object(self.compute, '_get_power_state',
return_value=power_state.NOSTATE):
allow_reboot, reboot_type = self.compute._retry_reboot(
context, instance)
self.assertTrue(allow_reboot)
self.assertEqual(reboot_type, 'HARD')
def test_retry_reboot_starting_hard_on(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.task_state = task_states.REBOOT_STARTED_HARD
with mock.patch.object(self.compute, '_get_power_state',
return_value=power_state.RUNNING):
allow_reboot, reboot_type = self.compute._retry_reboot(
context, instance)
self.assertFalse(allow_reboot)
self.assertEqual(reboot_type, 'HARD')
def test_retry_reboot_no_reboot(self):
instance = objects.Instance(self.context)
instance.uuid = 'foo'
instance.task_state = 'bar'
with mock.patch.object(self.compute, '_get_power_state',
return_value=power_state.RUNNING):
allow_reboot, reboot_type = self.compute._retry_reboot(
context, instance)
self.assertFalse(allow_reboot)
self.assertEqual(reboot_type, 'HARD')
@mock.patch('nova.objects.BlockDeviceMapping.get_by_volume_id')
@mock.patch('nova.compute.manager.ComputeManager._detach_volume')
@mock.patch('nova.objects.Instance._from_db_object')
def test_remove_volume_connection(self, inst_from_db, detach, bdm_get):
bdm = mock.sentinel.bdm
inst_obj = mock.sentinel.inst_obj
bdm_get.return_value = bdm
inst_from_db.return_value = inst_obj
with mock.patch.object(self.compute, 'volume_api'):
self.compute.remove_volume_connection(self.context, 'vol',
inst_obj)
detach.assert_called_once_with(self.context, inst_obj, bdm)
def _test_rescue(self, clean_shutdown=True):
instance = fake_instance.fake_instance_obj(
self.context, vm_state=vm_states.ACTIVE)
fake_nw_info = network_model.NetworkInfo()
rescue_image_meta = {'id': 'fake', 'name': 'fake'}
with contextlib.nested(
mock.patch.object(self.context, 'elevated',
return_value=self.context),
mock.patch.object(self.compute, '_get_instance_nw_info',
return_value=fake_nw_info),
mock.patch.object(self.compute, '_get_rescue_image',
return_value=rescue_image_meta),
mock.patch.object(self.compute, '_notify_about_instance_usage'),
mock.patch.object(self.compute, '_power_off_instance'),
mock.patch.object(self.compute.driver, 'rescue'),
mock.patch.object(self.compute.conductor_api,
'notify_usage_exists'),
mock.patch.object(self.compute, '_get_power_state',
return_value=power_state.RUNNING),
mock.patch.object(instance, 'save')
) as (
elevated_context, get_nw_info,
get_rescue_image, notify_instance_usage, power_off_instance,
driver_rescue, notify_usage_exists, get_power_state, instance_save
):
self.compute.rescue_instance(
self.context, instance, rescue_password='verybadpass',
rescue_image_ref=None, clean_shutdown=clean_shutdown)
# assert the field values on the instance object
self.assertEqual(vm_states.RESCUED, instance.vm_state)
self.assertIsNone(instance.task_state)
self.assertEqual(power_state.RUNNING, instance.power_state)
self.assertIsNotNone(instance.launched_at)
# assert our mock calls
get_nw_info.assert_called_once_with(self.context, instance)
get_rescue_image.assert_called_once_with(
self.context, instance, None)
extra_usage_info = {'rescue_image_name': 'fake'}
notify_calls = [
mock.call(self.context, instance, "rescue.start",
extra_usage_info=extra_usage_info,
network_info=fake_nw_info),
mock.call(self.context, instance, "rescue.end",
extra_usage_info=extra_usage_info,
network_info=fake_nw_info)
]
notify_instance_usage.assert_has_calls(notify_calls)
power_off_instance.assert_called_once_with(self.context, instance,
clean_shutdown)
driver_rescue.assert_called_once_with(
self.context, instance, fake_nw_info, rescue_image_meta,
'verybadpass')
notify_usage_exists.assert_called_once_with(
self.context, instance, current_period=True)
instance_save.assert_called_once_with(
expected_task_state=task_states.RESCUING)
def test_rescue(self):
self._test_rescue()
def test_rescue_forced_shutdown(self):
self._test_rescue(clean_shutdown=False)
def test_unrescue(self):
instance = fake_instance.fake_instance_obj(
self.context, vm_state=vm_states.RESCUED)
fake_nw_info = network_model.NetworkInfo()
with contextlib.nested(
mock.patch.object(self.context, 'elevated',
return_value=self.context),
mock.patch.object(self.compute, '_get_instance_nw_info',
return_value=fake_nw_info),
mock.patch.object(self.compute, '_notify_about_instance_usage'),
mock.patch.object(self.compute.driver, 'unrescue'),
mock.patch.object(self.compute, '_get_power_state',
return_value=power_state.RUNNING),
mock.patch.object(instance, 'save')
) as (
elevated_context, get_nw_info,
notify_instance_usage, driver_unrescue, get_power_state,
instance_save
):
self.compute.unrescue_instance(self.context, instance)
# assert the field values on the instance object
self.assertEqual(vm_states.ACTIVE, instance.vm_state)
self.assertIsNone(instance.task_state)
self.assertEqual(power_state.RUNNING, instance.power_state)
# assert our mock calls
get_nw_info.assert_called_once_with(self.context, instance)
notify_calls = [
mock.call(self.context, instance, "unrescue.start",
network_info=fake_nw_info),
mock.call(self.context, instance, "unrescue.end",
network_info=fake_nw_info)
]
notify_instance_usage.assert_has_calls(notify_calls)
driver_unrescue.assert_called_once_with(instance, fake_nw_info)
instance_save.assert_called_once_with(
expected_task_state=task_states.UNRESCUING)
@mock.patch('nova.compute.manager.ComputeManager._get_power_state',
return_value=power_state.RUNNING)
@mock.patch.object(objects.Instance, 'save')
@mock.patch('nova.utils.generate_password', return_value='fake-pass')
def test_set_admin_password(self, gen_password_mock,
instance_save_mock, power_state_mock):
# Ensure instance can have its admin password set.
instance = fake_instance.fake_instance_obj(
self.context,
vm_state=vm_states.ACTIVE,
task_state=task_states.UPDATING_PASSWORD)
@mock.patch.object(self.context, 'elevated', return_value=self.context)
@mock.patch.object(self.compute.driver, 'set_admin_password')
def do_test(driver_mock, elevated_mock):
# call the manager method
self.compute.set_admin_password(self.context, instance, None)
# make our assertions
self.assertEqual(vm_states.ACTIVE, instance.vm_state)
self.assertIsNone(instance.task_state)
power_state_mock.assert_called_once_with(self.context, instance)
driver_mock.assert_called_once_with(instance, 'fake-pass')
instance_save_mock.assert_called_once_with(
expected_task_state=task_states.UPDATING_PASSWORD)
do_test()
@mock.patch('nova.compute.manager.ComputeManager._get_power_state',
return_value=power_state.NOSTATE)
@mock.patch.object(objects.Instance, 'save')
@mock.patch.object(compute_utils, 'add_instance_fault_from_exc')
def test_set_admin_password_bad_state(self, add_fault_mock,
instance_save_mock,
power_state_mock):
# Test setting password while instance is rebuilding.
instance = fake_instance.fake_instance_obj(self.context)
with mock.patch.object(self.context, 'elevated',
return_value=self.context):
# call the manager method
self.assertRaises(exception.InstancePasswordSetFailed,
self.compute.set_admin_password,
self.context, instance, None)
# make our assertions
power_state_mock.assert_called_once_with(self.context, instance)
instance_save_mock.assert_called_once_with(
expected_task_state=task_states.UPDATING_PASSWORD)
add_fault_mock.assert_called_once_with(
self.context, instance, mock.ANY, mock.ANY)
@mock.patch('nova.utils.generate_password', return_value='fake-pass')
@mock.patch('nova.compute.manager.ComputeManager._get_power_state',
return_value=power_state.RUNNING)
@mock.patch('nova.compute.manager.ComputeManager._instance_update')
@mock.patch.object(objects.Instance, 'save')
@mock.patch.object(compute_utils, 'add_instance_fault_from_exc')
def _do_test_set_admin_password_driver_error(self, exc,
expected_vm_state,
expected_task_state,
expected_exception,
add_fault_mock,
instance_save_mock,
update_mock,
power_state_mock,
gen_password_mock):
# Ensure expected exception is raised if set_admin_password fails.
instance = fake_instance.fake_instance_obj(
self.context,
vm_state=vm_states.ACTIVE,
task_state=task_states.UPDATING_PASSWORD)
@mock.patch.object(self.context, 'elevated', return_value=self.context)
@mock.patch.object(self.compute.driver, 'set_admin_password',
side_effect=exc)
def do_test(driver_mock, elevated_mock):
# error raised from the driver should not reveal internal
# information so a new error is raised
self.assertRaises(expected_exception,
self.compute.set_admin_password,
self.context,
instance=instance,
new_pass=None)
if expected_exception == NotImplementedError:
instance_save_mock.assert_called_once_with(
expected_task_state=task_states.UPDATING_PASSWORD)
else:
# setting the instance to error state
instance_save_mock.assert_called_once_with()
self.assertEqual(expected_vm_state, instance.vm_state)
# check revert_task_state decorator
update_mock.assert_called_once_with(
self.context, instance.uuid,
task_state=expected_task_state)
# check wrap_instance_fault decorator
add_fault_mock.assert_called_once_with(
self.context, instance, mock.ANY, mock.ANY)
do_test()
def test_set_admin_password_driver_not_authorized(self):
# Ensure expected exception is raised if set_admin_password not
# authorized.
exc = exception.Forbidden('Internal error')
expected_exception = exception.InstancePasswordSetFailed
self._do_test_set_admin_password_driver_error(
exc, vm_states.ERROR, None, expected_exception)
def test_set_admin_password_driver_not_implemented(self):
# Ensure expected exception is raised if set_admin_password not
# implemented by driver.
exc = NotImplementedError()
expected_exception = NotImplementedError
self._do_test_set_admin_password_driver_error(
exc, vm_states.ACTIVE, None, expected_exception)
def _test_init_host_with_partial_migration(self, task_state=None,
vm_state=vm_states.ACTIVE):
our_host = self.compute.host
instance_1 = objects.Instance(self.context)
instance_1.uuid = 'foo'
instance_1.task_state = task_state
instance_1.vm_state = vm_state
instance_1.host = 'not-' + our_host
instance_2 = objects.Instance(self.context)
instance_2.uuid = 'bar'
instance_2.task_state = None
instance_2.vm_state = vm_states.ACTIVE
instance_2.host = 'not-' + our_host
with contextlib.nested(
mock.patch.object(self.compute, '_get_instances_on_driver',
return_value=[instance_1,
instance_2]),
mock.patch.object(self.compute, '_get_instance_nw_info',
return_value=None),
mock.patch.object(self.compute, '_get_instance_block_device_info',
return_value={}),
mock.patch.object(self.compute, '_is_instance_storage_shared',
return_value=False),
mock.patch.object(self.compute.driver, 'destroy')
) as (_get_instances_on_driver, _get_instance_nw_info,
_get_instance_block_device_info, _is_instance_storage_shared,
destroy):
self.compute._destroy_evacuated_instances(self.context)
destroy.assert_called_once_with(self.context, instance_2, None,
{}, True)
def test_init_host_with_partial_migration_migrating(self):
self._test_init_host_with_partial_migration(
task_state=task_states.MIGRATING)
def test_init_host_with_partial_migration_resize_migrating(self):
self._test_init_host_with_partial_migration(
task_state=task_states.RESIZE_MIGRATING)
def test_init_host_with_partial_migration_resize_migrated(self):
self._test_init_host_with_partial_migration(
task_state=task_states.RESIZE_MIGRATED)
def test_init_host_with_partial_migration_finish_resize(self):
self._test_init_host_with_partial_migration(
task_state=task_states.RESIZE_FINISH)
def test_init_host_with_partial_migration_resized(self):
self._test_init_host_with_partial_migration(
vm_state=vm_states.RESIZED)
@mock.patch('nova.compute.manager.ComputeManager._instance_update')
def test_error_out_instance_on_exception_not_implemented_err(self,
inst_update_mock):
instance = fake_instance.fake_instance_obj(self.context)
def do_test():
with self.compute._error_out_instance_on_exception(
self.context, instance, instance_state=vm_states.STOPPED):
raise NotImplementedError('test')
self.assertRaises(NotImplementedError, do_test)
inst_update_mock.assert_called_once_with(
self.context, instance.uuid,
vm_state=vm_states.STOPPED, task_state=None)
@mock.patch('nova.compute.manager.ComputeManager._instance_update')
def test_error_out_instance_on_exception_inst_fault_rollback(self,
inst_update_mock):
instance = fake_instance.fake_instance_obj(self.context)
def do_test():
with self.compute._error_out_instance_on_exception(self.context,
instance):
raise exception.InstanceFaultRollback(
inner_exception=test.TestingException('test'))
self.assertRaises(test.TestingException, do_test)
inst_update_mock.assert_called_once_with(
self.context, instance.uuid,
vm_state=vm_states.ACTIVE, task_state=None)
@mock.patch('nova.compute.manager.ComputeManager.'
'_set_instance_error_state')
def test_error_out_instance_on_exception_unknown_with_quotas(self,
set_error):
instance = fake_instance.fake_instance_obj(self.context)
quotas = mock.create_autospec(objects.Quotas, spec_set=True)
def do_test():
with self.compute._error_out_instance_on_exception(
self.context, instance, quotas):
raise test.TestingException('test')
self.assertRaises(test.TestingException, do_test)
self.assertEqual(1, len(quotas.method_calls))
self.assertEqual(mock.call.rollback(), quotas.method_calls[0])
set_error.assert_called_once_with(self.context, instance)
def test_cleanup_volumes(self):
instance = fake_instance.fake_instance_obj(self.context)
bdm_do_not_delete_dict = fake_block_device.FakeDbBlockDeviceDict(
{'volume_id': 'fake-id1', 'source_type': 'image',
'delete_on_termination': False})
bdm_delete_dict = fake_block_device.FakeDbBlockDeviceDict(
{'volume_id': 'fake-id2', 'source_type': 'image',
'delete_on_termination': True})
bdms = block_device_obj.block_device_make_list(self.context,
[bdm_do_not_delete_dict, bdm_delete_dict])
with mock.patch.object(self.compute.volume_api,
'delete') as volume_delete:
self.compute._cleanup_volumes(self.context, instance.uuid, bdms)
volume_delete.assert_called_once_with(self.context,
bdms[1].volume_id)
def test_cleanup_volumes_exception_do_not_raise(self):
instance = fake_instance.fake_instance_obj(self.context)
bdm_dict1 = fake_block_device.FakeDbBlockDeviceDict(
{'volume_id': 'fake-id1', 'source_type': 'image',
'delete_on_termination': True})
bdm_dict2 = fake_block_device.FakeDbBlockDeviceDict(
{'volume_id': 'fake-id2', 'source_type': 'image',
'delete_on_termination': True})
bdms = block_device_obj.block_device_make_list(self.context,
[bdm_dict1, bdm_dict2])
with mock.patch.object(self.compute.volume_api,
'delete',
side_effect=[test.TestingException(), None]) as volume_delete:
self.compute._cleanup_volumes(self.context, instance.uuid, bdms,
raise_exc=False)
calls = [mock.call(self.context, bdm.volume_id) for bdm in bdms]
self.assertEqual(calls, volume_delete.call_args_list)
def test_cleanup_volumes_exception_raise(self):
instance = fake_instance.fake_instance_obj(self.context)
bdm_dict1 = fake_block_device.FakeDbBlockDeviceDict(
{'volume_id': 'fake-id1', 'source_type': 'image',
'delete_on_termination': True})
bdm_dict2 = fake_block_device.FakeDbBlockDeviceDict(
{'volume_id': 'fake-id2', 'source_type': 'image',
'delete_on_termination': True})
bdms = block_device_obj.block_device_make_list(self.context,
[bdm_dict1, bdm_dict2])
with mock.patch.object(self.compute.volume_api,
'delete',
side_effect=[test.TestingException(), None]) as volume_delete:
self.assertRaises(test.TestingException,
self.compute._cleanup_volumes, self.context, instance.uuid,
bdms)
calls = [mock.call(self.context, bdm.volume_id) for bdm in bdms]
self.assertEqual(calls, volume_delete.call_args_list)
def test_start_building(self):
instance = fake_instance.fake_instance_obj(self.context)
with mock.patch.object(self.compute, '_instance_update') as update:
self.compute._start_building(self.context, instance)
update.assert_called_once_with(
self.context, instance.uuid, vm_state=vm_states.BUILDING,
task_state=None, expected_task_state=(task_states.SCHEDULING,
None))
def _test_prebuild_instance_build_abort_exception(self, exc):
instance = fake_instance.fake_instance_obj(self.context)
with contextlib.nested(
mock.patch.object(self.compute, '_check_instance_exists'),
mock.patch.object(self.compute, '_start_building',
side_effect=exc)
) as (
check, start
):
# run the code
self.assertRaises(exception.BuildAbortException,
self.compute._prebuild_instance,
self.context, instance)
# assert the calls
check.assert_called_once_with(self.context, instance)
start.assert_called_once_with(self.context, instance)
def test_prebuild_instance_instance_not_found(self):
self._test_prebuild_instance_build_abort_exception(
exception.InstanceNotFound(instance_id='fake'))
def test_prebuild_instance_unexpected_deleting_task_state_err(self):
self._test_prebuild_instance_build_abort_exception(
exception.UnexpectedDeletingTaskStateError(expected='foo',
actual='bar'))
def test_stop_instance_task_state_none_power_state_shutdown(self):
# Tests that stop_instance doesn't puke when the instance power_state
# is shutdown and the task_state is None.
instance = fake_instance.fake_instance_obj(
self.context, vm_state=vm_states.ACTIVE,
task_state=None, power_state=power_state.SHUTDOWN)
@mock.patch.object(self.compute, '_get_power_state',
return_value=power_state.SHUTDOWN)
@mock.patch.object(self.compute, '_notify_about_instance_usage')
@mock.patch.object(self.compute, '_power_off_instance')
@mock.patch.object(instance, 'save')
def do_test(save_mock, power_off_mock, notify_mock, get_state_mock):
# run the code
self.compute.stop_instance(self.context, instance)
# assert the calls
self.assertEqual(2, get_state_mock.call_count)
notify_mock.assert_has_calls([
mock.call(self.context, instance, 'power_off.start'),
mock.call(self.context, instance, 'power_off.end')
])
power_off_mock.assert_called_once_with(
self.context, instance, True)
save_mock.assert_called_once_with(
expected_task_state=[task_states.POWERING_OFF, None])
self.assertEqual(power_state.SHUTDOWN, instance.power_state)
self.assertIsNone(instance.task_state)
self.assertEqual(vm_states.STOPPED, instance.vm_state)
do_test()
def test_reset_network_driver_not_implemented(self):
instance = fake_instance.fake_instance_obj(self.context)
@mock.patch.object(self.compute.driver, 'reset_network',
side_effect=NotImplementedError())
@mock.patch.object(compute_utils, 'add_instance_fault_from_exc')
def do_test(mock_add_fault, mock_reset):
self.assertRaises(messaging.ExpectedException,
self.compute.reset_network,
self.context,
instance)
self.compute = utils.ExceptionHelper(self.compute)
self.assertRaises(NotImplementedError,
self.compute.reset_network,
self.context,
instance)
do_test()
def test_rebuild_default_impl(self):
def _detach(context, bdms):
pass
def _attach(context, instance, bdms, do_check_attach=True):
return {'block_device_mapping': 'shared_block_storage'}
def _spawn(context, instance, image_meta, injected_files,
admin_password, network_info=None, block_device_info=None):
self.assertEqual(block_device_info['block_device_mapping'],
'shared_block_storage')
with contextlib.nested(
mock.patch.object(self.compute.driver, 'destroy',
return_value=None),
mock.patch.object(self.compute.driver, 'spawn',
side_effect=_spawn),
mock.patch.object(objects.Instance, 'save',
return_value=None)
) as(
mock_destroy,
mock_spawn,
mock_save
):
instance = fake_instance.fake_instance_obj(self.context)
instance.task_state = task_states.REBUILDING
instance.save(expected_task_state=[task_states.REBUILDING])
self.compute._rebuild_default_impl(self.context,
instance,
None,
[],
admin_password='new_pass',
bdms=[],
detach_block_devices=_detach,
attach_block_devices=_attach,
network_info=None,
recreate=True,
block_device_info=None,
preserve_ephemeral=False)
self.assertFalse(mock_destroy.called)
self.assertTrue(mock_save.called)
self.assertTrue(mock_spawn.called)
@mock.patch.object(utils, 'last_completed_audit_period',
return_value=(0, 0))
@mock.patch.object(time, 'time', side_effect=[10, 20, 21])
@mock.patch.object(objects.InstanceList, 'get_by_host', return_value=[])
@mock.patch.object(objects.BandwidthUsage, 'get_by_instance_uuid_and_mac')
@mock.patch.object(db, 'bw_usage_update')
def test_poll_bandwidth_usage(self, bw_usage_update, get_by_uuid_mac,
get_by_host, time, last_completed_audit):
bw_counters = [{'uuid': 'fake-uuid', 'mac_address': 'fake-mac',
'bw_in': 1, 'bw_out': 2}]
usage = objects.BandwidthUsage()
usage.bw_in = 3
usage.bw_out = 4
usage.last_ctr_in = 0
usage.last_ctr_out = 0
self.flags(bandwidth_poll_interval=1)
get_by_uuid_mac.return_value = usage
_time = timeutils.utcnow()
bw_usage_update.return_value = {'instance_uuid': '', 'mac': '',
'start_period': _time, 'last_refreshed': _time, 'bw_in': 0,
'bw_out': 0, 'last_ctr_in': 0, 'last_ctr_out': 0, 'deleted': 0,
'created_at': _time, 'updated_at': _time, 'deleted_at': _time}
with mock.patch.object(self.compute.driver,
'get_all_bw_counters', return_value=bw_counters):
self.compute._poll_bandwidth_usage(self.context)
get_by_uuid_mac.assert_called_once_with(self.context, 'fake-uuid',
'fake-mac', start_period=0, use_slave=True)
# NOTE(sdague): bw_usage_update happens at some time in
# the future, so what last_refreshed is is irrelevant.
bw_usage_update.assert_called_once_with(self.context, 'fake-uuid',
'fake-mac', 0, 4, 6, 1, 2,
last_refreshed=mock.ANY,
update_cells=False)
class ComputeManagerBuildInstanceTestCase(test.NoDBTestCase):
def setUp(self):
super(ComputeManagerBuildInstanceTestCase, self).setUp()
self.compute = importutils.import_object(CONF.compute_manager)
self.context = context.RequestContext('fake', 'fake')
self.instance = fake_instance.fake_instance_obj(self.context,
vm_state=vm_states.ACTIVE,
expected_attrs=['metadata', 'system_metadata', 'info_cache'])
self.admin_pass = 'pass'
self.injected_files = []
self.image = {}
self.node = 'fake-node'
self.limits = {}
self.requested_networks = []
self.security_groups = []
self.block_device_mapping = []
self.filter_properties = {'retry': {'num_attempts': 1,
'hosts': [[self.compute.host,
'fake-node']]}}
def fake_network_info():
return network_model.NetworkInfo()
self.network_info = network_model.NetworkInfoAsyncWrapper(
fake_network_info)
self.block_device_info = self.compute._prep_block_device(context,
self.instance, self.block_device_mapping)
# override tracker with a version that doesn't need the database:
fake_rt = fake_resource_tracker.FakeResourceTracker(self.compute.host,
self.compute.driver, self.node)
self.compute._resource_tracker_dict[self.node] = fake_rt
def _do_build_instance_update(self, reschedule_update=False):
self.mox.StubOutWithMock(self.instance, 'save')
self.instance.save(
expected_task_state=(task_states.SCHEDULING, None)).AndReturn(
self.instance)
if reschedule_update:
self.instance.save().AndReturn(self.instance)
def _build_and_run_instance_update(self):
self.mox.StubOutWithMock(self.instance, 'save')
self._build_resources_instance_update(stub=False)
self.instance.save(expected_task_state=
task_states.BLOCK_DEVICE_MAPPING).AndReturn(self.instance)
def _build_resources_instance_update(self, stub=True):
if stub:
self.mox.StubOutWithMock(self.instance, 'save')
self.instance.save().AndReturn(self.instance)
def _notify_about_instance_usage(self, event, stub=True, **kwargs):
if stub:
self.mox.StubOutWithMock(self.compute,
'_notify_about_instance_usage')
self.compute._notify_about_instance_usage(self.context, self.instance,
event, **kwargs)
def _instance_action_events(self):
self.mox.StubOutWithMock(objects.InstanceActionEvent, 'event_start')
self.mox.StubOutWithMock(objects.InstanceActionEvent,
'event_finish_with_failure')
objects.InstanceActionEvent.event_start(
self.context, self.instance.uuid, mox.IgnoreArg(),
want_result=False)
objects.InstanceActionEvent.event_finish_with_failure(
self.context, self.instance.uuid, mox.IgnoreArg(),
exc_val=mox.IgnoreArg(), exc_tb=mox.IgnoreArg(),
want_result=False)
@staticmethod
def _assert_build_instance_hook_called(mock_hooks, result):
# NOTE(coreywright): we want to test the return value of
# _do_build_and_run_instance, but it doesn't bubble all the way up, so
# mock the hooking, which allows us to test that too, though a little
# too intimately
mock_hooks.setdefault().run_post.assert_called_once_with(
'build_instance', result, mock.ANY, mock.ANY, f=None)
@mock.patch('nova.hooks._HOOKS')
@mock.patch('nova.utils.spawn_n')
def test_build_and_run_instance_called_with_proper_args(self, mock_spawn,
mock_hooks):
mock_spawn.side_effect = lambda f, *a, **k: f(*a, **k)
self.mox.StubOutWithMock(self.compute, '_build_and_run_instance')
self._do_build_instance_update()
self.compute._build_and_run_instance(self.context, self.instance,
self.image, self.injected_files, self.admin_pass,
self.requested_networks, self.security_groups,
self.block_device_mapping, self.node, self.limits,
self.filter_properties)
self._instance_action_events()
self.mox.ReplayAll()
self.compute.build_and_run_instance(self.context, self.instance,
self.image, request_spec={},
filter_properties=self.filter_properties,
injected_files=self.injected_files,
admin_password=self.admin_pass,
requested_networks=self.requested_networks,
security_groups=self.security_groups,
block_device_mapping=self.block_device_mapping, node=self.node,
limits=self.limits)
self._assert_build_instance_hook_called(mock_hooks,
build_results.ACTIVE)
# This test when sending an icehouse compatible rpc call to juno compute
# node, NetworkRequest object can load from three items tuple.
@mock.patch('nova.objects.Instance.save')
@mock.patch('nova.compute.manager.ComputeManager._build_and_run_instance')
@mock.patch('nova.utils.spawn_n')
def test_build_and_run_instance_with_icehouse_requested_network(
self, mock_spawn, mock_build_and_run, mock_save):
fake_server_actions.stub_out_action_events(self.stubs)
mock_spawn.side_effect = lambda f, *a, **k: f(*a, **k)
mock_save.return_value = self.instance
self.compute.build_and_run_instance(self.context, self.instance,
self.image, request_spec={},
filter_properties=self.filter_properties,
injected_files=self.injected_files,
admin_password=self.admin_pass,
requested_networks=[('fake_network_id', '10.0.0.1',
'fake_port_id')],
security_groups=self.security_groups,
block_device_mapping=self.block_device_mapping, node=self.node,
limits=self.limits)
requested_network = mock_build_and_run.call_args[0][5][0]
self.assertEqual('fake_network_id', requested_network.network_id)
self.assertEqual('10.0.0.1', str(requested_network.address))
self.assertEqual('fake_port_id', requested_network.port_id)
@mock.patch('nova.hooks._HOOKS')
@mock.patch('nova.utils.spawn_n')
def test_build_abort_exception(self, mock_spawn, mock_hooks):
def fake_spawn(f, *args, **kwargs):
# NOTE(danms): Simulate the detached nature of spawn so that
# we confirm that the inner task has the fault logic
try:
return f(*args, **kwargs)
except Exception:
pass
mock_spawn.side_effect = fake_spawn
self.mox.StubOutWithMock(self.compute, '_build_and_run_instance')
self.mox.StubOutWithMock(self.compute, '_cleanup_allocated_networks')
self.mox.StubOutWithMock(self.compute, '_cleanup_volumes')
self.mox.StubOutWithMock(compute_utils, 'add_instance_fault_from_exc')
self.mox.StubOutWithMock(self.compute, '_set_instance_error_state')
self.mox.StubOutWithMock(self.compute.compute_task_api,
'build_instances')
self._do_build_instance_update()
self.compute._build_and_run_instance(self.context, self.instance,
self.image, self.injected_files, self.admin_pass,
self.requested_networks, self.security_groups,
self.block_device_mapping, self.node, self.limits,
self.filter_properties).AndRaise(
exception.BuildAbortException(reason='',
instance_uuid=self.instance.uuid))
self.compute._cleanup_allocated_networks(self.context, self.instance,
self.requested_networks)
self.compute._cleanup_volumes(self.context, self.instance.uuid,
self.block_device_mapping, raise_exc=False)
compute_utils.add_instance_fault_from_exc(self.context,
self.instance, mox.IgnoreArg(), mox.IgnoreArg())
self.compute._set_instance_error_state(self.context, self.instance)
self._instance_action_events()
self.mox.ReplayAll()
self.compute.build_and_run_instance(self.context, self.instance,
self.image, request_spec={},
filter_properties=self.filter_properties,
injected_files=self.injected_files,
admin_password=self.admin_pass,
requested_networks=self.requested_networks,
security_groups=self.security_groups,
block_device_mapping=self.block_device_mapping, node=self.node,
limits=self.limits)
self._assert_build_instance_hook_called(mock_hooks,
build_results.FAILED)
@mock.patch('nova.hooks._HOOKS')
@mock.patch('nova.utils.spawn_n')
def test_rescheduled_exception(self, mock_spawn, mock_hooks):
mock_spawn.side_effect = lambda f, *a, **k: f(*a, **k)
self.mox.StubOutWithMock(self.compute, '_build_and_run_instance')
self.mox.StubOutWithMock(self.compute, '_set_instance_error_state')
self.mox.StubOutWithMock(self.compute.compute_task_api,
'build_instances')
self.mox.StubOutWithMock(self.compute.network_api,
'cleanup_instance_network_on_host')
self._do_build_instance_update(reschedule_update=True)
self.compute._build_and_run_instance(self.context, self.instance,
self.image, self.injected_files, self.admin_pass,
self.requested_networks, self.security_groups,
self.block_device_mapping, self.node, self.limits,
self.filter_properties).AndRaise(
exception.RescheduledException(reason='',
instance_uuid=self.instance.uuid))
self.compute.network_api.cleanup_instance_network_on_host(self.context,
self.instance, self.compute.host)
self.compute.compute_task_api.build_instances(self.context,
[self.instance], self.image, self.filter_properties,
self.admin_pass, self.injected_files, self.requested_networks,
self.security_groups, self.block_device_mapping)
self._instance_action_events()
self.mox.ReplayAll()
self.compute.build_and_run_instance(self.context, self.instance,
self.image, request_spec={},
filter_properties=self.filter_properties,
injected_files=self.injected_files,
admin_password=self.admin_pass,
requested_networks=self.requested_networks,
security_groups=self.security_groups,
block_device_mapping=self.block_device_mapping, node=self.node,
limits=self.limits)
self._assert_build_instance_hook_called(mock_hooks,
build_results.RESCHEDULED)
def test_rescheduled_exception_with_non_ascii_exception(self):
exc = exception.NovaException(u's\xe9quence')
self.mox.StubOutWithMock(self.compute.driver, 'spawn')
self.mox.StubOutWithMock(conductor_rpcapi.ConductorAPI,
'instance_update')
self.mox.StubOutWithMock(self.compute, '_build_networks_for_instance')
self.mox.StubOutWithMock(self.compute, '_shutdown_instance')
self.compute._build_networks_for_instance(self.context, self.instance,
self.requested_networks, self.security_groups).AndReturn(
self.network_info)
self.compute._shutdown_instance(self.context, self.instance,
self.block_device_mapping, self.requested_networks,
try_deallocate_networks=False)
self._notify_about_instance_usage('create.start',
extra_usage_info={'image_name': self.image.get('name')})
self._build_and_run_instance_update()
self.compute.driver.spawn(self.context, self.instance, self.image,
self.injected_files, self.admin_pass,
network_info=self.network_info,
block_device_info=self.block_device_info,
flavor=None).AndRaise(exc)
self._notify_about_instance_usage('create.error',
fault=exc, stub=False)
conductor_rpcapi.ConductorAPI.instance_update(
self.context, self.instance['uuid'], mox.IgnoreArg(), 'conductor')
self.mox.ReplayAll()
self.assertRaises(exception.RescheduledException,
self.compute._build_and_run_instance, self.context,
self.instance, self.image, self.injected_files,
self.admin_pass, self.requested_networks, self.security_groups,
self.block_device_mapping, self.node,
self.limits, self.filter_properties)
@mock.patch.object(manager.ComputeManager, '_build_and_run_instance')
@mock.patch.object(conductor_api.ComputeTaskAPI, 'build_instances')
@mock.patch.object(network_api.API, 'cleanup_instance_network_on_host')
@mock.patch.object(objects.Instance, 'save')
@mock.patch.object(objects.InstanceActionEvent, 'event_start')
@mock.patch.object(objects.InstanceActionEvent,
'event_finish_with_failure')
@mock.patch.object(virt_driver.ComputeDriver, 'macs_for_instance')
def test_rescheduled_exception_with_network_allocated(self,
mock_macs_for_instance, mock_event_finish,
mock_event_start, mock_ins_save, mock_cleanup_network,
mock_build_ins, mock_build_and_run):
instance = fake_instance.fake_instance_obj(self.context,
vm_state=vm_states.ACTIVE,
system_metadata={'network_allocated': 'True'},
expected_attrs=['metadata', 'system_metadata', 'info_cache'])
mock_ins_save.return_value = instance
mock_macs_for_instance.return_value = []
mock_build_and_run.side_effect = exception.RescheduledException(
reason='', instance_uuid=self.instance.uuid)
self.compute._do_build_and_run_instance(self.context, instance,
self.image, request_spec={},
filter_properties=self.filter_properties,
injected_files=self.injected_files,
admin_password=self.admin_pass,
requested_networks=self.requested_networks,
security_groups=self.security_groups,
block_device_mapping=self.block_device_mapping, node=self.node,
limits=self.limits)
mock_build_and_run.assert_called_once_with(self.context,
instance,
self.image, self.injected_files, self.admin_pass,
self.requested_networks, self.security_groups,
self.block_device_mapping, self.node, self.limits,
self.filter_properties)
mock_cleanup_network.assert_called_once_with(
self.context, instance, self.compute.host)
mock_build_ins.assert_called_once_with(self.context,
[instance], self.image, self.filter_properties,
self.admin_pass, self.injected_files, self.requested_networks,
self.security_groups, self.block_device_mapping)
@mock.patch('nova.hooks._HOOKS')
@mock.patch('nova.utils.spawn_n')
def test_rescheduled_exception_without_retry(self, mock_spawn, mock_hooks):
mock_spawn.side_effect = lambda f, *a, **k: f(*a, **k)
self.mox.StubOutWithMock(self.compute, '_build_and_run_instance')
self.mox.StubOutWithMock(compute_utils, 'add_instance_fault_from_exc')
self.mox.StubOutWithMock(self.compute, '_set_instance_error_state')
self.mox.StubOutWithMock(self.compute, '_cleanup_allocated_networks')
self.mox.StubOutWithMock(self.compute, '_cleanup_volumes')
self._do_build_instance_update()
self.compute._build_and_run_instance(self.context, self.instance,
self.image, self.injected_files, self.admin_pass,
self.requested_networks, self.security_groups,
self.block_device_mapping, self.node, self.limits,
{}).AndRaise(
exception.RescheduledException(reason='',
instance_uuid=self.instance.uuid))
self.compute._cleanup_allocated_networks(self.context, self.instance,
self.requested_networks)
compute_utils.add_instance_fault_from_exc(self.context, self.instance,
mox.IgnoreArg(), mox.IgnoreArg())
self.compute._set_instance_error_state(self.context,
self.instance)
self._instance_action_events()
self.mox.ReplayAll()
self.compute.build_and_run_instance(self.context, self.instance,
self.image, request_spec={},
filter_properties={},
injected_files=self.injected_files,
admin_password=self.admin_pass,
requested_networks=self.requested_networks,
security_groups=self.security_groups,
block_device_mapping=self.block_device_mapping, node=self.node,
limits=self.limits)
self._assert_build_instance_hook_called(mock_hooks,
build_results.FAILED)
@mock.patch('nova.hooks._HOOKS')
@mock.patch('nova.utils.spawn_n')
def test_rescheduled_exception_do_not_deallocate_network(self, mock_spawn,
mock_hooks):
mock_spawn.side_effect = lambda f, *a, **k: f(*a, **k)
self.mox.StubOutWithMock(self.compute, '_build_and_run_instance')
self.mox.StubOutWithMock(self.compute.driver,
'deallocate_networks_on_reschedule')
self.mox.StubOutWithMock(self.compute, '_cleanup_allocated_networks')
self.mox.StubOutWithMock(self.compute.compute_task_api,
'build_instances')
self.mox.StubOutWithMock(self.compute.network_api,
'cleanup_instance_network_on_host')
self._do_build_instance_update(reschedule_update=True)
self.compute._build_and_run_instance(self.context, self.instance,
self.image, self.injected_files, self.admin_pass,
self.requested_networks, self.security_groups,
self.block_device_mapping, self.node, self.limits,
self.filter_properties).AndRaise(
exception.RescheduledException(reason='',
instance_uuid=self.instance.uuid))
self.compute.driver.deallocate_networks_on_reschedule(
self.instance).AndReturn(False)
self.compute.network_api.cleanup_instance_network_on_host(
self.context, self.instance, self.compute.host)
self.compute.compute_task_api.build_instances(self.context,
[self.instance], self.image, self.filter_properties,
self.admin_pass, self.injected_files, self.requested_networks,
self.security_groups, self.block_device_mapping)
self._instance_action_events()
self.mox.ReplayAll()
self.compute.build_and_run_instance(self.context, self.instance,
self.image, request_spec={},
filter_properties=self.filter_properties,
injected_files=self.injected_files,
admin_password=self.admin_pass,
requested_networks=self.requested_networks,
security_groups=self.security_groups,
block_device_mapping=self.block_device_mapping, node=self.node,
limits=self.limits)
self._assert_build_instance_hook_called(mock_hooks,
build_results.RESCHEDULED)
@mock.patch('nova.hooks._HOOKS')
@mock.patch('nova.utils.spawn_n')
def test_rescheduled_exception_deallocate_network(self, mock_spawn,
mock_hooks):
mock_spawn.side_effect = lambda f, *a, **k: f(*a, **k)
self.mox.StubOutWithMock(self.compute, '_build_and_run_instance')
self.mox.StubOutWithMock(self.compute.driver,
'deallocate_networks_on_reschedule')
self.mox.StubOutWithMock(self.compute, '_cleanup_allocated_networks')
self.mox.StubOutWithMock(self.compute.compute_task_api,
'build_instances')
self._do_build_instance_update(reschedule_update=True)
self.compute._build_and_run_instance(self.context, self.instance,
self.image, self.injected_files, self.admin_pass,
self.requested_networks, self.security_groups,
self.block_device_mapping, self.node, self.limits,
self.filter_properties).AndRaise(
exception.RescheduledException(reason='',
instance_uuid=self.instance.uuid))
self.compute.driver.deallocate_networks_on_reschedule(
self.instance).AndReturn(True)
self.compute._cleanup_allocated_networks(self.context, self.instance,
self.requested_networks)
self.compute.compute_task_api.build_instances(self.context,
[self.instance], self.image, self.filter_properties,
self.admin_pass, self.injected_files, self.requested_networks,
self.security_groups, self.block_device_mapping)
self._instance_action_events()
self.mox.ReplayAll()
self.compute.build_and_run_instance(self.context, self.instance,
self.image, request_spec={},
filter_properties=self.filter_properties,
injected_files=self.injected_files,
admin_password=self.admin_pass,
requested_networks=self.requested_networks,
security_groups=self.security_groups,
block_device_mapping=self.block_device_mapping, node=self.node,
limits=self.limits)
self._assert_build_instance_hook_called(mock_hooks,
build_results.RESCHEDULED)
def _test_build_and_run_exceptions(self, exc, set_error=False,
cleanup_volumes=False):
self.mox.StubOutWithMock(self.compute, '_build_and_run_instance')
self.mox.StubOutWithMock(self.compute, '_cleanup_allocated_networks')
self.mox.StubOutWithMock(self.compute, '_cleanup_volumes')
self.mox.StubOutWithMock(self.compute.compute_task_api,
'build_instances')
self._do_build_instance_update()
self.compute._build_and_run_instance(self.context, self.instance,
self.image, self.injected_files, self.admin_pass,
self.requested_networks, self.security_groups,
self.block_device_mapping, self.node, self.limits,
self.filter_properties).AndRaise(exc)
self.compute._cleanup_allocated_networks(self.context, self.instance,
self.requested_networks)
if cleanup_volumes:
self.compute._cleanup_volumes(self.context, self.instance.uuid,
self.block_device_mapping, raise_exc=False)
if set_error:
self.mox.StubOutWithMock(self.compute, '_set_instance_error_state')
self.mox.StubOutWithMock(compute_utils,
'add_instance_fault_from_exc')
compute_utils.add_instance_fault_from_exc(self.context,
self.instance, mox.IgnoreArg(), mox.IgnoreArg())
self.compute._set_instance_error_state(self.context, self.instance)
self._instance_action_events()
self.mox.ReplayAll()
with contextlib.nested(
mock.patch('nova.utils.spawn_n'),
mock.patch('nova.hooks._HOOKS')
) as (
mock_spawn,
mock_hooks
):
mock_spawn.side_effect = lambda f, *a, **k: f(*a, **k)
self.compute.build_and_run_instance(self.context, self.instance,
self.image, request_spec={},
filter_properties=self.filter_properties,
injected_files=self.injected_files,
admin_password=self.admin_pass,
requested_networks=self.requested_networks,
security_groups=self.security_groups,
block_device_mapping=self.block_device_mapping, node=self.node,
limits=self.limits)
self._assert_build_instance_hook_called(mock_hooks,
build_results.FAILED)
def test_build_and_run_notfound_exception(self):
self._test_build_and_run_exceptions(exception.InstanceNotFound(
instance_id=''))
def test_build_and_run_unexpecteddeleting_exception(self):
self._test_build_and_run_exceptions(
exception.UnexpectedDeletingTaskStateError(expected='',
actual=''))
def test_build_and_run_buildabort_exception(self):
self._test_build_and_run_exceptions(exception.BuildAbortException(
instance_uuid='', reason=''), set_error=True, cleanup_volumes=True)
def test_build_and_run_unhandled_exception(self):
self._test_build_and_run_exceptions(test.TestingException(),
set_error=True, cleanup_volumes=True)
def test_instance_not_found(self):
exc = exception.InstanceNotFound(instance_id=1)
self.mox.StubOutWithMock(self.compute.driver, 'spawn')
self.mox.StubOutWithMock(conductor_rpcapi.ConductorAPI,
'instance_update')
self.mox.StubOutWithMock(self.compute, '_build_networks_for_instance')
self.mox.StubOutWithMock(self.compute, '_shutdown_instance')
self.compute._build_networks_for_instance(self.context, self.instance,
self.requested_networks, self.security_groups).AndReturn(
self.network_info)
self.compute._shutdown_instance(self.context, self.instance,
self.block_device_mapping, self.requested_networks,
try_deallocate_networks=False)
self._notify_about_instance_usage('create.start',
extra_usage_info={'image_name': self.image.get('name')})
self._build_and_run_instance_update()
self.compute.driver.spawn(self.context, self.instance, self.image,
self.injected_files, self.admin_pass,
network_info=self.network_info,
block_device_info=self.block_device_info,
flavor=None).AndRaise(exc)
self._notify_about_instance_usage('create.end',
fault=exc, stub=False)
conductor_rpcapi.ConductorAPI.instance_update(
self.context, self.instance.uuid, mox.IgnoreArg(), 'conductor')
self.mox.ReplayAll()
self.assertRaises(exception.InstanceNotFound,
self.compute._build_and_run_instance, self.context,
self.instance, self.image, self.injected_files,
self.admin_pass, self.requested_networks, self.security_groups,
self.block_device_mapping, self.node,
self.limits, self.filter_properties)
def test_reschedule_on_exception(self):
self.mox.StubOutWithMock(self.compute.driver, 'spawn')
self.mox.StubOutWithMock(conductor_rpcapi.ConductorAPI,
'instance_update')
self.mox.StubOutWithMock(self.compute, '_build_networks_for_instance')
self.mox.StubOutWithMock(self.compute, '_shutdown_instance')
self.compute._build_networks_for_instance(self.context, self.instance,
self.requested_networks, self.security_groups).AndReturn(
self.network_info)
self.compute._shutdown_instance(self.context, self.instance,
self.block_device_mapping, self.requested_networks,
try_deallocate_networks=False)
self._notify_about_instance_usage('create.start',
extra_usage_info={'image_name': self.image.get('name')})
self._build_and_run_instance_update()
exc = test.TestingException()
self.compute.driver.spawn(self.context, self.instance, self.image,
self.injected_files, self.admin_pass,
network_info=self.network_info,
block_device_info=self.block_device_info,
flavor=None).AndRaise(exc)
conductor_rpcapi.ConductorAPI.instance_update(
self.context, self.instance.uuid, mox.IgnoreArg(), 'conductor')
self._notify_about_instance_usage('create.error',
fault=exc, stub=False)
self.mox.ReplayAll()
self.assertRaises(exception.RescheduledException,
self.compute._build_and_run_instance, self.context,
self.instance, self.image, self.injected_files,
self.admin_pass, self.requested_networks, self.security_groups,
self.block_device_mapping, self.node,
self.limits, self.filter_properties)
def test_spawn_network_alloc_failure(self):
# Because network allocation is asynchronous, failures may not present
# themselves until the virt spawn method is called.
self._test_build_and_run_spawn_exceptions(exception.NoMoreNetworks())
def test_build_and_run_no_more_fixedips_exception(self):
self._test_build_and_run_spawn_exceptions(
exception.NoMoreFixedIps("error messge"))
def test_build_and_run_flavor_disk_too_small_exception(self):
self._test_build_and_run_spawn_exceptions(
exception.FlavorDiskTooSmall())
def test_build_and_run_flavor_memory_too_small_exception(self):
self._test_build_and_run_spawn_exceptions(
exception.FlavorMemoryTooSmall())
def test_build_and_run_image_not_active_exception(self):
self._test_build_and_run_spawn_exceptions(
exception.ImageNotActive(image_id=self.image.get('id')))
def test_build_and_run_image_unacceptable_exception(self):
self._test_build_and_run_spawn_exceptions(
exception.ImageUnacceptable(image_id=self.image.get('id'),
reason=""))
def _test_build_and_run_spawn_exceptions(self, exc):
with contextlib.nested(
mock.patch.object(self.compute.driver, 'spawn',
side_effect=exc),
mock.patch.object(conductor_rpcapi.ConductorAPI,
'instance_update'),
mock.patch.object(self.instance, 'save',
side_effect=[self.instance, self.instance]),
mock.patch.object(self.compute,
'_build_networks_for_instance',
return_value=self.network_info),
mock.patch.object(self.compute,
'_notify_about_instance_usage'),
mock.patch.object(self.compute,
'_shutdown_instance'),
mock.patch.object(self.compute,
'_validate_instance_group_policy')
) as (spawn, instance_update, save,
_build_networks_for_instance, _notify_about_instance_usage,
_shutdown_instance, _validate_instance_group_policy):
self.assertRaises(exception.BuildAbortException,
self.compute._build_and_run_instance, self.context,
self.instance, self.image, self.injected_files,
self.admin_pass, self.requested_networks,
self.security_groups, self.block_device_mapping, self.node,
self.limits, self.filter_properties)
_validate_instance_group_policy.assert_called_once_with(
self.context, self.instance, self.filter_properties)
_build_networks_for_instance.assert_has_calls(
mock.call(self.context, self.instance,
self.requested_networks, self.security_groups))
_notify_about_instance_usage.assert_has_calls([
mock.call(self.context, self.instance, 'create.start',
extra_usage_info={'image_name': self.image.get('name')}),
mock.call(self.context, self.instance, 'create.error',
fault=exc)])
save.assert_has_calls([
mock.call(),
mock.call(
expected_task_state=task_states.BLOCK_DEVICE_MAPPING)])
spawn.assert_has_calls(mock.call(self.context, self.instance,
self.image, self.injected_files, self.admin_pass,
network_info=self.network_info,
block_device_info=self.block_device_info,
flavor=None))
instance_update.assert_has_calls(mock.call(self.context,
self.instance.uuid, mock.ANY, 'conductor'))
_shutdown_instance.assert_called_once_with(self.context,
self.instance, self.block_device_mapping,
self.requested_networks, try_deallocate_networks=False)
@mock.patch('nova.compute.manager.ComputeManager._get_power_state')
def test_spawn_waits_for_network_and_saves_info_cache(self, gps):
inst = mock.MagicMock()
network_info = mock.MagicMock()
with mock.patch.object(self.compute, 'driver'):
self.compute._spawn(self.context, inst, {}, network_info, None,
None, None, flavor=None)
network_info.wait.assert_called_once_with(do_raise=True)
self.assertEqual(network_info, inst.info_cache.network_info)
inst.save.assert_called_with(expected_task_state=task_states.SPAWNING)
@mock.patch('nova.utils.spawn_n')
def test_reschedule_on_resources_unavailable(self, mock_spawn):
mock_spawn.side_effect = lambda f, *a, **k: f(*a, **k)
reason = 'resource unavailable'
exc = exception.ComputeResourcesUnavailable(reason=reason)
class FakeResourceTracker(object):
def instance_claim(self, context, instance, limits):
raise exc
self.mox.StubOutWithMock(self.compute, '_get_resource_tracker')
self.mox.StubOutWithMock(self.compute.compute_task_api,
'build_instances')
self.mox.StubOutWithMock(self.compute.network_api,
'cleanup_instance_network_on_host')
self.compute._get_resource_tracker(self.node).AndReturn(
FakeResourceTracker())
self._do_build_instance_update(reschedule_update=True)
self._notify_about_instance_usage('create.start',
extra_usage_info={'image_name': self.image.get('name')})
self._notify_about_instance_usage('create.error',
fault=exc, stub=False)
self.compute.network_api.cleanup_instance_network_on_host(
self.context, self.instance, self.compute.host)
self.compute.compute_task_api.build_instances(self.context,
[self.instance], self.image, self.filter_properties,
self.admin_pass, self.injected_files, self.requested_networks,
self.security_groups, self.block_device_mapping)
self._instance_action_events()
self.mox.ReplayAll()
self.compute.build_and_run_instance(self.context, self.instance,
self.image, request_spec={},
filter_properties=self.filter_properties,
injected_files=self.injected_files,
admin_password=self.admin_pass,
requested_networks=self.requested_networks,
security_groups=self.security_groups,
block_device_mapping=self.block_device_mapping, node=self.node,
limits=self.limits)
def test_build_resources_buildabort_reraise(self):
exc = exception.BuildAbortException(
instance_uuid=self.instance.uuid, reason='')
self.mox.StubOutWithMock(self.compute, '_build_resources')
self.mox.StubOutWithMock(conductor_rpcapi.ConductorAPI,
'instance_update')
conductor_rpcapi.ConductorAPI.instance_update(
self.context, self.instance.uuid, mox.IgnoreArg(), 'conductor')
self._notify_about_instance_usage('create.start',
extra_usage_info={'image_name': self.image.get('name')})
self.compute._build_resources(self.context, self.instance,
self.requested_networks, self.security_groups, self.image,
self.block_device_mapping).AndRaise(exc)
self._notify_about_instance_usage('create.error',
fault=exc, stub=False)
self.mox.ReplayAll()
self.assertRaises(exception.BuildAbortException,
self.compute._build_and_run_instance, self.context,
self.instance, self.image, self.injected_files,
self.admin_pass, self.requested_networks,
self.security_groups, self.block_device_mapping, self.node,
self.limits, self.filter_properties)
def test_build_resources_reraises_on_failed_bdm_prep(self):
self.mox.StubOutWithMock(self.compute, '_prep_block_device')
self.mox.StubOutWithMock(self.compute, '_build_networks_for_instance')
self.compute._build_networks_for_instance(self.context, self.instance,
self.requested_networks, self.security_groups).AndReturn(
self.network_info)
self._build_resources_instance_update()
self.compute._prep_block_device(self.context, self.instance,
self.block_device_mapping).AndRaise(test.TestingException())
self.mox.ReplayAll()
try:
with self.compute._build_resources(self.context, self.instance,
self.requested_networks, self.security_groups,
self.image, self.block_device_mapping):
pass
except Exception as e:
self.assertIsInstance(e, exception.BuildAbortException)
def test_failed_bdm_prep_from_delete_raises_unexpected(self):
with contextlib.nested(
mock.patch.object(self.compute,
'_build_networks_for_instance',
return_value=self.network_info),
mock.patch.object(self.instance, 'save',
side_effect=exception.UnexpectedDeletingTaskStateError(
actual=task_states.DELETING, expected='None')),
) as (_build_networks_for_instance, save):
try:
with self.compute._build_resources(self.context, self.instance,
self.requested_networks, self.security_groups,
self.image, self.block_device_mapping):
pass
except Exception as e:
self.assertIsInstance(e,
exception.UnexpectedDeletingTaskStateError)
_build_networks_for_instance.assert_has_calls(
mock.call(self.context, self.instance,
self.requested_networks, self.security_groups))
save.assert_has_calls(mock.call())
def test_build_resources_aborts_on_failed_network_alloc(self):
self.mox.StubOutWithMock(self.compute, '_build_networks_for_instance')
self.compute._build_networks_for_instance(self.context, self.instance,
self.requested_networks, self.security_groups).AndRaise(
test.TestingException())
self.mox.ReplayAll()
try:
with self.compute._build_resources(self.context, self.instance,
self.requested_networks, self.security_groups, self.image,
self.block_device_mapping):
pass
except Exception as e:
self.assertIsInstance(e, exception.BuildAbortException)
def test_failed_network_alloc_from_delete_raises_unexpected(self):
with mock.patch.object(self.compute,
'_build_networks_for_instance') as _build_networks:
exc = exception.UnexpectedDeletingTaskStateError
_build_networks.side_effect = exc(actual=task_states.DELETING,
expected='None')
try:
with self.compute._build_resources(self.context, self.instance,
self.requested_networks, self.security_groups,
self.image, self.block_device_mapping):
pass
except Exception as e:
self.assertIsInstance(e, exc)
_build_networks.assert_has_calls(
mock.call(self.context, self.instance,
self.requested_networks, self.security_groups))
def test_build_resources_with_network_info_obj_on_spawn_failure(self):
self.mox.StubOutWithMock(self.compute, '_build_networks_for_instance')
self.mox.StubOutWithMock(self.compute, '_shutdown_instance')
self.compute._build_networks_for_instance(self.context, self.instance,
self.requested_networks, self.security_groups).AndReturn(
network_model.NetworkInfo())
self.compute._shutdown_instance(self.context, self.instance,
self.block_device_mapping, self.requested_networks,
try_deallocate_networks=False)
self._build_resources_instance_update()
self.mox.ReplayAll()
test_exception = test.TestingException()
def fake_spawn():
raise test_exception
try:
with self.compute._build_resources(self.context, self.instance,
self.requested_networks, self.security_groups,
self.image, self.block_device_mapping):
fake_spawn()
except Exception as e:
self.assertEqual(test_exception, e)
def test_build_resources_cleans_up_and_reraises_on_spawn_failure(self):
self.mox.StubOutWithMock(self.compute, '_build_networks_for_instance')
self.mox.StubOutWithMock(self.compute, '_shutdown_instance')
self.compute._build_networks_for_instance(self.context, self.instance,
self.requested_networks, self.security_groups).AndReturn(
self.network_info)
self.compute._shutdown_instance(self.context, self.instance,
self.block_device_mapping, self.requested_networks,
try_deallocate_networks=False)
self._build_resources_instance_update()
self.mox.ReplayAll()
test_exception = test.TestingException()
def fake_spawn():
raise test_exception
try:
with self.compute._build_resources(self.context, self.instance,
self.requested_networks, self.security_groups,
self.image, self.block_device_mapping):
fake_spawn()
except Exception as e:
self.assertEqual(test_exception, e)
@mock.patch('nova.network.model.NetworkInfoAsyncWrapper.wait')
@mock.patch(
'nova.compute.manager.ComputeManager._build_networks_for_instance')
@mock.patch('nova.objects.Instance.save')
def test_build_resources_instance_not_found_before_yield(
self, mock_save, mock_build_network, mock_info_wait):
mock_build_network.return_value = self.network_info
expected_exc = exception.InstanceNotFound(
instance_id=self.instance.uuid)
mock_save.side_effect = expected_exc
try:
with self.compute._build_resources(self.context, self.instance,
self.requested_networks, self.security_groups,
self.image, self.block_device_mapping):
raise
except Exception as e:
self.assertEqual(expected_exc, e)
mock_build_network.assert_called_once_with(self.context, self.instance,
self.requested_networks, self.security_groups)
mock_info_wait.assert_called_once_with(do_raise=False)
@mock.patch('nova.network.model.NetworkInfoAsyncWrapper.wait')
@mock.patch(
'nova.compute.manager.ComputeManager._build_networks_for_instance')
@mock.patch('nova.objects.Instance.save')
def test_build_resources_unexpected_task_error_before_yield(
self, mock_save, mock_build_network, mock_info_wait):
mock_build_network.return_value = self.network_info
mock_save.side_effect = exception.UnexpectedTaskStateError(
expected='', actual='')
try:
with self.compute._build_resources(self.context, self.instance,
self.requested_networks, self.security_groups,
self.image, self.block_device_mapping):
raise
except exception.BuildAbortException:
pass
mock_build_network.assert_called_once_with(self.context, self.instance,
self.requested_networks, self.security_groups)
mock_info_wait.assert_called_once_with(do_raise=False)
@mock.patch('nova.network.model.NetworkInfoAsyncWrapper.wait')
@mock.patch(
'nova.compute.manager.ComputeManager._build_networks_for_instance')
@mock.patch('nova.objects.Instance.save')
def test_build_resources_exception_before_yield(
self, mock_save, mock_build_network, mock_info_wait):
mock_build_network.return_value = self.network_info
mock_save.side_effect = Exception()
try:
with self.compute._build_resources(self.context, self.instance,
self.requested_networks, self.security_groups,
self.image, self.block_device_mapping):
raise
except exception.BuildAbortException:
pass
mock_build_network.assert_called_once_with(self.context, self.instance,
self.requested_networks, self.security_groups)
mock_info_wait.assert_called_once_with(do_raise=False)
def test_build_resources_aborts_on_cleanup_failure(self):
self.mox.StubOutWithMock(self.compute, '_build_networks_for_instance')
self.mox.StubOutWithMock(self.compute, '_shutdown_instance')
self.compute._build_networks_for_instance(self.context, self.instance,
self.requested_networks, self.security_groups).AndReturn(
self.network_info)
self.compute._shutdown_instance(self.context, self.instance,
self.block_device_mapping, self.requested_networks,
try_deallocate_networks=False).AndRaise(
test.TestingException())
self._build_resources_instance_update()
self.mox.ReplayAll()
def fake_spawn():
raise test.TestingException()
try:
with self.compute._build_resources(self.context, self.instance,
self.requested_networks, self.security_groups,
self.image, self.block_device_mapping):
fake_spawn()
except Exception as e:
self.assertIsInstance(e, exception.BuildAbortException)
@mock.patch('nova.objects.Instance.get_by_uuid')
def test_get_instance_nw_info_properly_queries_for_sysmeta(self,
mock_get):
instance = objects.Instance(uuid=uuid.uuid4().hex)
with mock.patch.object(self.compute, 'network_api'):
self.compute._get_instance_nw_info(self.context, instance)
mock_get.assert_called_once_with(self.context, instance.uuid,
expected_attrs=['system_metadata'],
use_slave=False)
def test_build_networks_if_not_allocated(self):
instance = fake_instance.fake_instance_obj(self.context,
system_metadata={},
expected_attrs=['system_metadata'])
self.mox.StubOutWithMock(self.compute, '_get_instance_nw_info')
self.mox.StubOutWithMock(self.compute, '_allocate_network')
self.compute._allocate_network(self.context, instance,
self.requested_networks, None, self.security_groups, None)
self.mox.ReplayAll()
self.compute._build_networks_for_instance(self.context, instance,
self.requested_networks, self.security_groups)
def test_build_networks_if_allocated_false(self):
instance = fake_instance.fake_instance_obj(self.context,
system_metadata=dict(network_allocated='False'),
expected_attrs=['system_metadata'])
self.mox.StubOutWithMock(self.compute, '_get_instance_nw_info')
self.mox.StubOutWithMock(self.compute, '_allocate_network')
self.compute._allocate_network(self.context, instance,
self.requested_networks, None, self.security_groups, None)
self.mox.ReplayAll()
self.compute._build_networks_for_instance(self.context, instance,
self.requested_networks, self.security_groups)
def test_return_networks_if_found(self):
instance = fake_instance.fake_instance_obj(self.context,
system_metadata=dict(network_allocated='True'),
expected_attrs=['system_metadata'])
def fake_network_info():
return network_model.NetworkInfo([{'address': '123.123.123.123'}])
self.mox.StubOutWithMock(self.compute, '_get_instance_nw_info')
self.mox.StubOutWithMock(self.compute, '_allocate_network')
self.mox.StubOutWithMock(self.compute.network_api,
'setup_instance_network_on_host')
self.compute.network_api.setup_instance_network_on_host(
self.context, instance, instance.host)
self.compute._get_instance_nw_info(self.context, instance).AndReturn(
network_model.NetworkInfoAsyncWrapper(fake_network_info))
self.mox.ReplayAll()
self.compute._build_networks_for_instance(self.context, instance,
self.requested_networks, self.security_groups)
def test_cleanup_allocated_networks_instance_not_found(self):
with contextlib.nested(
mock.patch.object(self.compute, '_deallocate_network'),
mock.patch.object(self.instance, 'save',
side_effect=exception.InstanceNotFound(instance_id=''))
) as (_deallocate_network, save):
# Testing that this doesn't raise an exeption
self.compute._cleanup_allocated_networks(self.context,
self.instance, self.requested_networks)
save.assert_called_once_with()
self.assertEqual('False',
self.instance.system_metadata['network_allocated'])
@mock.patch.object(conductor_rpcapi.ConductorAPI, 'instance_update')
def test_launched_at_in_create_end_notification(self,
mock_instance_update):
def fake_notify(*args, **kwargs):
if args[2] == 'create.end':
# Check that launched_at is set on the instance
self.assertIsNotNone(args[1].launched_at)
with contextlib.nested(
mock.patch.object(self.compute.driver, 'spawn'),
mock.patch.object(self.compute,
'_build_networks_for_instance', return_value=[]),
mock.patch.object(self.instance, 'save'),
mock.patch.object(self.compute, '_notify_about_instance_usage',
side_effect=fake_notify)
) as (mock_spawn, mock_networks, mock_save, mock_notify):
self.compute._build_and_run_instance(self.context, self.instance,
self.image, self.injected_files, self.admin_pass,
self.requested_networks, self.security_groups,
self.block_device_mapping, self.node, self.limits,
self.filter_properties)
expected_call = mock.call(self.context, self.instance,
'create.end', extra_usage_info={'message': u'Success'},
network_info=[])
create_end_call = mock_notify.call_args_list[
mock_notify.call_count - 1]
self.assertEqual(expected_call, create_end_call)
@mock.patch.object(conductor_rpcapi.ConductorAPI, 'instance_update')
def test_create_end_on_instance_delete(self, mock_instance_update):
def fake_notify(*args, **kwargs):
if args[2] == 'create.end':
# Check that launched_at is set on the instance
self.assertIsNotNone(args[1].launched_at)
exc = exception.InstanceNotFound(instance_id='')
with contextlib.nested(
mock.patch.object(self.compute.driver, 'spawn'),
mock.patch.object(self.compute,
'_build_networks_for_instance', return_value=[]),
mock.patch.object(self.instance, 'save',
side_effect=[None, None, exc]),
mock.patch.object(self.compute, '_notify_about_instance_usage',
side_effect=fake_notify)
) as (mock_spawn, mock_networks, mock_save, mock_notify):
self.assertRaises(exception.InstanceNotFound,
self.compute._build_and_run_instance, self.context,
self.instance, self.image, self.injected_files,
self.admin_pass, self.requested_networks,
self.security_groups, self.block_device_mapping, self.node,
self.limits, self.filter_properties)
expected_call = mock.call(self.context, self.instance,
'create.end', fault=exc)
create_end_call = mock_notify.call_args_list[
mock_notify.call_count - 1]
self.assertEqual(expected_call, create_end_call)
class ComputeManagerMigrationTestCase(test.NoDBTestCase):
def setUp(self):
super(ComputeManagerMigrationTestCase, self).setUp()
self.compute = importutils.import_object(CONF.compute_manager)
self.context = context.RequestContext('fake', 'fake')
self.image = {}
self.instance = fake_instance.fake_instance_obj(self.context,
vm_state=vm_states.ACTIVE,
expected_attrs=['metadata', 'system_metadata', 'info_cache'])
self.migration = objects.Migration(context=self.context.elevated(),
new_instance_type_id=7)
self.migration.status = 'migrating'
fake_server_actions.stub_out_action_events(self.stubs)
def test_finish_resize_failure(self):
with contextlib.nested(
mock.patch.object(self.compute, '_finish_resize',
side_effect=exception.ResizeError(reason='')),
mock.patch.object(db, 'instance_fault_create'),
mock.patch.object(self.compute, '_instance_update'),
mock.patch.object(self.migration, 'save'),
mock.patch.object(self.migration, 'obj_as_admin',
return_value=mock.MagicMock())
) as (meth, fault_create, instance_update,
migration_save, migration_obj_as_admin):
fault_create.return_value = (
test_instance_fault.fake_faults['fake-uuid'][0])
self.assertRaises(
exception.ResizeError, self.compute.finish_resize,
context=self.context, disk_info=[], image=self.image,
instance=self.instance, reservations=[],
migration=self.migration
)
self.assertEqual("error", self.migration.status)
migration_save.assert_called_once_with()
migration_obj_as_admin.assert_called_once_with()
def test_resize_instance_failure(self):
self.migration.dest_host = None
with contextlib.nested(
mock.patch.object(self.compute.driver,
'migrate_disk_and_power_off',
side_effect=exception.ResizeError(reason='')),
mock.patch.object(db, 'instance_fault_create'),
mock.patch.object(self.compute, '_instance_update'),
mock.patch.object(self.migration, 'save'),
mock.patch.object(self.migration, 'obj_as_admin',
return_value=mock.MagicMock()),
mock.patch.object(self.compute, '_get_instance_nw_info',
return_value=None),
mock.patch.object(self.instance, 'save'),
mock.patch.object(self.compute, '_notify_about_instance_usage'),
mock.patch.object(self.compute,
'_get_instance_block_device_info',
return_value=None),
mock.patch.object(objects.BlockDeviceMappingList,
'get_by_instance_uuid',
return_value=None),
mock.patch.object(objects.Flavor,
'get_by_id',
return_value=None)
) as (meth, fault_create, instance_update,
migration_save, migration_obj_as_admin, nw_info, save_inst,
notify, vol_block_info, bdm, flavor):
fault_create.return_value = (
test_instance_fault.fake_faults['fake-uuid'][0])
self.assertRaises(
exception.ResizeError, self.compute.resize_instance,
context=self.context, instance=self.instance, image=self.image,
reservations=[], migration=self.migration, instance_type='type'
)
self.assertEqual("error", self.migration.status)
self.assertEqual([mock.call(), mock.call()],
migration_save.mock_calls)
self.assertEqual([mock.call(), mock.call()],
migration_obj_as_admin.mock_calls)
def test_revert_resize_instance_destroy_disks(self):
# This test asserts that _is_instance_storage_shared() is called from
# revert_resize() and the return value is passed to driver.destroy().
# Otherwise we could regress this.
@mock.patch.object(self.compute, '_get_instance_nw_info')
@mock.patch.object(self.compute, '_is_instance_storage_shared')
@mock.patch.object(self.compute, 'finish_revert_resize')
@mock.patch.object(self.compute, '_instance_update')
@mock.patch.object(self.compute, '_get_resource_tracker')
@mock.patch.object(self.compute.driver, 'destroy')
@mock.patch.object(self.compute.network_api, 'setup_networks_on_host')
@mock.patch.object(self.compute.network_api, 'migrate_instance_start')
@mock.patch.object(self.compute.conductor_api, 'notify_usage_exists')
@mock.patch.object(self.migration, 'save')
@mock.patch.object(objects.BlockDeviceMappingList,
'get_by_instance_uuid')
def do_test(get_by_instance_uuid,
migration_save,
notify_usage_exists,
migrate_instance_start,
setup_networks_on_host,
destroy,
_get_resource_tracker,
_instance_update,
finish_revert_resize,
_is_instance_storage_shared,
_get_instance_nw_info):
self.migration.source_compute = self.instance['host']
# inform compute that this instance uses shared storage
_is_instance_storage_shared.return_value = True
self.compute.revert_resize(context=self.context,
migration=self.migration,
instance=self.instance,
reservations=None)
_is_instance_storage_shared.assert_called_once_with(self.context,
self.instance)
# since shared storage is used, we should not be instructed to
# destroy disks here
destroy.assert_called_once_with(self.context, self.instance,
mock.ANY, mock.ANY, False)
do_test()
| apache-2.0 |
AudioGod/DTS-Eagle-Integration_CAF-Android-kernel | tools/perf/tests/attr.py | 3174 | 9441 | #! /usr/bin/python
import os
import sys
import glob
import optparse
import tempfile
import logging
import shutil
import ConfigParser
class Fail(Exception):
def __init__(self, test, msg):
self.msg = msg
self.test = test
def getMsg(self):
return '\'%s\' - %s' % (self.test.path, self.msg)
class Unsup(Exception):
def __init__(self, test):
self.test = test
def getMsg(self):
return '\'%s\'' % self.test.path
class Event(dict):
terms = [
'cpu',
'flags',
'type',
'size',
'config',
'sample_period',
'sample_type',
'read_format',
'disabled',
'inherit',
'pinned',
'exclusive',
'exclude_user',
'exclude_kernel',
'exclude_hv',
'exclude_idle',
'mmap',
'comm',
'freq',
'inherit_stat',
'enable_on_exec',
'task',
'watermark',
'precise_ip',
'mmap_data',
'sample_id_all',
'exclude_host',
'exclude_guest',
'exclude_callchain_kernel',
'exclude_callchain_user',
'wakeup_events',
'bp_type',
'config1',
'config2',
'branch_sample_type',
'sample_regs_user',
'sample_stack_user',
]
def add(self, data):
for key, val in data:
log.debug(" %s = %s" % (key, val))
self[key] = val
def __init__(self, name, data, base):
log.debug(" Event %s" % name);
self.name = name;
self.group = ''
self.add(base)
self.add(data)
def compare_data(self, a, b):
# Allow multiple values in assignment separated by '|'
a_list = a.split('|')
b_list = b.split('|')
for a_item in a_list:
for b_item in b_list:
if (a_item == b_item):
return True
elif (a_item == '*') or (b_item == '*'):
return True
return False
def equal(self, other):
for t in Event.terms:
log.debug(" [%s] %s %s" % (t, self[t], other[t]));
if not self.has_key(t) or not other.has_key(t):
return False
if not self.compare_data(self[t], other[t]):
return False
return True
def diff(self, other):
for t in Event.terms:
if not self.has_key(t) or not other.has_key(t):
continue
if not self.compare_data(self[t], other[t]):
log.warning("expected %s=%s, got %s" % (t, self[t], other[t]))
# Test file description needs to have following sections:
# [config]
# - just single instance in file
# - needs to specify:
# 'command' - perf command name
# 'args' - special command arguments
# 'ret' - expected command return value (0 by default)
#
# [eventX:base]
# - one or multiple instances in file
# - expected values assignments
class Test(object):
def __init__(self, path, options):
parser = ConfigParser.SafeConfigParser()
parser.read(path)
log.warning("running '%s'" % path)
self.path = path
self.test_dir = options.test_dir
self.perf = options.perf
self.command = parser.get('config', 'command')
self.args = parser.get('config', 'args')
try:
self.ret = parser.get('config', 'ret')
except:
self.ret = 0
self.expect = {}
self.result = {}
log.debug(" loading expected events");
self.load_events(path, self.expect)
def is_event(self, name):
if name.find("event") == -1:
return False
else:
return True
def load_events(self, path, events):
parser_event = ConfigParser.SafeConfigParser()
parser_event.read(path)
# The event record section header contains 'event' word,
# optionaly followed by ':' allowing to load 'parent
# event' first as a base
for section in filter(self.is_event, parser_event.sections()):
parser_items = parser_event.items(section);
base_items = {}
# Read parent event if there's any
if (':' in section):
base = section[section.index(':') + 1:]
parser_base = ConfigParser.SafeConfigParser()
parser_base.read(self.test_dir + '/' + base)
base_items = parser_base.items('event')
e = Event(section, parser_items, base_items)
events[section] = e
def run_cmd(self, tempdir):
cmd = "PERF_TEST_ATTR=%s %s %s -o %s/perf.data %s" % (tempdir,
self.perf, self.command, tempdir, self.args)
ret = os.WEXITSTATUS(os.system(cmd))
log.info(" '%s' ret %d " % (cmd, ret))
if ret != int(self.ret):
raise Unsup(self)
def compare(self, expect, result):
match = {}
log.debug(" compare");
# For each expected event find all matching
# events in result. Fail if there's not any.
for exp_name, exp_event in expect.items():
exp_list = []
log.debug(" matching [%s]" % exp_name)
for res_name, res_event in result.items():
log.debug(" to [%s]" % res_name)
if (exp_event.equal(res_event)):
exp_list.append(res_name)
log.debug(" ->OK")
else:
log.debug(" ->FAIL");
log.debug(" match: [%s] matches %s" % (exp_name, str(exp_list)))
# we did not any matching event - fail
if (not exp_list):
exp_event.diff(res_event)
raise Fail(self, 'match failure');
match[exp_name] = exp_list
# For each defined group in the expected events
# check we match the same group in the result.
for exp_name, exp_event in expect.items():
group = exp_event.group
if (group == ''):
continue
for res_name in match[exp_name]:
res_group = result[res_name].group
if res_group not in match[group]:
raise Fail(self, 'group failure')
log.debug(" group: [%s] matches group leader %s" %
(exp_name, str(match[group])))
log.debug(" matched")
def resolve_groups(self, events):
for name, event in events.items():
group_fd = event['group_fd'];
if group_fd == '-1':
continue;
for iname, ievent in events.items():
if (ievent['fd'] == group_fd):
event.group = iname
log.debug('[%s] has group leader [%s]' % (name, iname))
break;
def run(self):
tempdir = tempfile.mkdtemp();
try:
# run the test script
self.run_cmd(tempdir);
# load events expectation for the test
log.debug(" loading result events");
for f in glob.glob(tempdir + '/event*'):
self.load_events(f, self.result);
# resolve group_fd to event names
self.resolve_groups(self.expect);
self.resolve_groups(self.result);
# do the expectation - results matching - both ways
self.compare(self.expect, self.result)
self.compare(self.result, self.expect)
finally:
# cleanup
shutil.rmtree(tempdir)
def run_tests(options):
for f in glob.glob(options.test_dir + '/' + options.test):
try:
Test(f, options).run()
except Unsup, obj:
log.warning("unsupp %s" % obj.getMsg())
def setup_log(verbose):
global log
level = logging.CRITICAL
if verbose == 1:
level = logging.WARNING
if verbose == 2:
level = logging.INFO
if verbose >= 3:
level = logging.DEBUG
log = logging.getLogger('test')
log.setLevel(level)
ch = logging.StreamHandler()
ch.setLevel(level)
formatter = logging.Formatter('%(message)s')
ch.setFormatter(formatter)
log.addHandler(ch)
USAGE = '''%s [OPTIONS]
-d dir # tests dir
-p path # perf binary
-t test # single test
-v # verbose level
''' % sys.argv[0]
def main():
parser = optparse.OptionParser(usage=USAGE)
parser.add_option("-t", "--test",
action="store", type="string", dest="test")
parser.add_option("-d", "--test-dir",
action="store", type="string", dest="test_dir")
parser.add_option("-p", "--perf",
action="store", type="string", dest="perf")
parser.add_option("-v", "--verbose",
action="count", dest="verbose")
options, args = parser.parse_args()
if args:
parser.error('FAILED wrong arguments %s' % ' '.join(args))
return -1
setup_log(options.verbose)
if not options.test_dir:
print 'FAILED no -d option specified'
sys.exit(-1)
if not options.test:
options.test = 'test*'
try:
run_tests(options)
except Fail, obj:
print "FAILED %s" % obj.getMsg();
sys.exit(-1)
sys.exit(0)
if __name__ == '__main__':
main()
| gpl-2.0 |
pythonprobr/pythonpro-website | pythonpro/domain/tests/test_topic_interaction_aggregate.py | 1 | 3504 | from datetime import datetime
from typing import List
import pytest
import pytz
from django.urls import reverse
from django.utils import timezone
from freezegun import freeze_time
from model_bakery import baker
from pythonpro.dashboard.models import TopicInteraction
from pythonpro.dashboard.templatetags.dashboard_tags import duration
from pythonpro.django_assertions import dj_assert_contains
from pythonpro.modules.models import Topic
@pytest.fixture
def interactions(logged_user, topic):
with freeze_time("2019-07-22 00:00:00"):
first_interaction = baker.make(
TopicInteraction,
user=logged_user,
topic=topic,
topic_duration=125,
total_watched_time=125,
max_watched_time=95
)
with freeze_time("2019-07-22 01:00:00"):
second_interaction = baker.make(
TopicInteraction,
user=logged_user,
topic=topic,
topic_duration=125,
total_watched_time=34,
max_watched_time=14
)
with freeze_time("2019-07-22 00:30:00"):
third_interaction = baker.make(
TopicInteraction,
user=logged_user,
topic=topic,
topic_duration=125,
total_watched_time=64,
max_watched_time=34
)
return [
first_interaction,
second_interaction,
third_interaction,
]
@pytest.fixture
def resp(client_with_lead, interactions):
return client_with_lead.get(
reverse('dashboard:home'),
secure=True
)
def test_status_code(resp):
return resp.status_code == 200
def test_topic_title_is_present(resp, topic):
dj_assert_contains(resp, topic.title)
def test_table_instructions(resp, topic):
dj_assert_contains(resp, 'Confira os dados consolidados por tópico')
def test_topic_url(resp, topic: Topic):
dj_assert_contains(resp, topic.get_absolute_url())
def test_module_table_row(resp, topic: Topic):
module = topic.find_module()
dj_assert_contains(resp, f'<a href="{module.get_absolute_url()}">{module.title}</a>')
def test_max_creation(resp, interactions):
tz = timezone.get_current_timezone()
last_interaction_utc = datetime(2019, 7, 22, 1, 0, 0, tzinfo=pytz.utc)
last_interaction_local = last_interaction_utc.astimezone(tz).strftime('%d/%m/%Y %H:%M:%S')
dj_assert_contains(resp, last_interaction_local)
def test_max_watched_time(resp, interactions: List[TopicInteraction]):
max_watched_time = max(interaction.max_watched_time for interaction in interactions)
max_watched_time_str = duration(max_watched_time)
dj_assert_contains(resp, max_watched_time_str)
def test_total_watched_time(resp, interactions: List[TopicInteraction]):
total_watched_time = sum(interaction.total_watched_time for interaction in interactions)
total_watched_time_str = duration(total_watched_time)
dj_assert_contains(resp, total_watched_time_str)
def test_interactions_count(resp, interactions: List[TopicInteraction]):
interactions_count = len(interactions)
dj_assert_contains(resp, f'<td>{interactions_count}</td>')
@pytest.fixture
def resp_without_interactions(client_with_lead):
return client_with_lead.get(
reverse('dashboard:home'),
secure=True
)
def test_not_existing_aggregation_msg_is_present(resp_without_interactions, topic):
dj_assert_contains(resp_without_interactions, "Ainda não existem dados agregados")
| agpl-3.0 |
mzizzi/aenea | server/osx/server_osx.py | 1 | 16816 | #!/usr/bin/python
# This file is part of Aenea
#
# Aenea is free software: you can redistribute it and/or modify it under
# the terms of version 3 of the GNU Lesser General Public License as
# published by the Free Software Foundation.
#
# Aenea is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with Aenea. If not, see <http://www.gnu.org/licenses/>.
#
# Copyright (2014) Alex Roper
# Alex Roper <[email protected]>
import os
import sys
import time
import re
import jsonrpclib
import jsonrpclib.SimpleJSONRPCServer
import config
import logging
# logging.basicConfig(level=logging.DEBUG)
import applescript
from Quartz.CoreGraphics import *
_MOUSE_BUTTONS = {
'left': 1,
'middle': 2,
'right': 3,
'wheelup': 4,
'wheeldown': 5
}
_MOUSE_CLICKS = {
'click': 'click',
'down': 'mousedown',
'up': 'mouseup'
}
_KEY_PRESSES = {
'press': '',
'up': 'up',
'down': 'down'
}
_MOUSE_MOVE_COMMANDS = {
'absolute': 'mousemove',
'relative': 'mousemove_relative',
'relative_active': 'mousemove_active'
}
_SERVER_INFO = {
'window_manager': 'osx',
'operating_system': 'darwin',
'platform': 'darwin',
'display': 'cocoa',
'server': 'aenea_reference',
'server_version': 1
}
_MOD_TRANSLATION = {
'alt': 'alt',
'shift': 'shift',
'control': 'control',
'super': 'command',
'command': 'command'
}
# The key maps are broken up into different sections because AppleScript has
# different semantics for keypress versus keycode
# these are used via keystroke, and need quoting.
_QUOTED_KEY_TRANSLATION = {
'ampersand': '&',
'apostrophe': "'",
'asterisk': '*',
'at': '@',
'backslash': '\\',
'backtick': '`',
'bar': '-',
'caret': '^',
'colon': ':',
'comma': ',',
'dollar': '$',
'dot': '.',
'dquote': '"',
'equal': '=',
'exclamation': '!',
'hash': '#',
'hyphen': '-',
'langle': '<',
'lbrace': '{',
'lbracket': '[',
'lparen': '(',
'minus': '-',
'percent': '%',
'plus': '+',
'question': '?',
'rangle': '>',
'rbrace': '}',
'rbracket': ']',
'rparen': ')',
'semicolon': ';',
'slash': '/',
'space': ' ',
'squote': "'",
'tilde': '~',
'underscore': '_'
}
_MODIFIER_KEY_DIRECT = {
# modifiers
'command': 'command',
'shift': 'shift',
'option': 'option',
'control': 'control',
'rightshift': 'rightshift',
'rightoption': 'rightoption',
'rightcontrol': 'rightcontrol',
'function': 'function'
}
# from /System/Library/Frameworks/Carbon.framework/Versions/A/Frameworks/ \
# HIToolbox.framework/Versions/A/Headers/Events.h
# these need applescript "key code " and a number to operate
_KEYCODE_TRANSLATION = {
# 'apps': 'Menu', ???
'a': 0,
's': 1,
'd': 2,
'f': 3,
'h': 4,
'g': 5,
'z': 6,
'x': 7,
'c': 8,
'v': 9,
'b': 11,
'q': 12,
'w': 13,
'e': 14,
'r': 15,
'y': 16,
't': 17,
'1': 18,
'2': 19,
'3': 20,
'4': 21,
'6': 22,
'5': 23,
'equal': 24,
'9': 25,
'7': 26,
'minus': 27,
'8': 28,
'0': 29,
'rbracket': 30, # rightbracket
'o': 31,
'u': 32,
'lbracket': 33, # leftbracket
'i': 34,
'p': 35,
'enter': 36, # return
'l': 37,
'j': 38,
'quote': 39,
'k': 40,
'semicolon': 41,
'backslash': 42,
'comma': 43,
'slash': 44,
'n': 45,
'm': 46,
'period': 47,
'tab': 48,
'space': 49,
'grave': 50,
'backspace': 51, # delete
'escape': 53,
'capslock': 57,
'f17': 64,
'npdecimal': 65,
'npmultiply': 67,
'npplus': 69,
'npclear': 71,
'volumeup': 72,
'volumedown': 73,
'mute': 74,
'npdivide': 75,
'npenter': 76,
'npminus': 78,
'f18': 79,
'f19': 80,
'keypadequals': 81,
'np0': 82, # np = numberpad
'np1': 83,
'np2': 84,
'np3': 85,
'np4': 86,
'np5': 87,
'np6': 88,
'np7': 89,
'f20': 90,
'np8': 91,
'np9': 92,
'jis_yen': 93,
'jis_underscore': 94,
'jis_keypadcomma': 95,
'f5': 96,
'f6': 97,
'f7': 98,
'f3': 99,
'f8': 100,
'f9': 101,
'jis_eisu': 102,
'f11': 103,
'jis_kana': 104,
'f13': 105,
'f16': 106,
'f14': 107,
'f10': 109,
'f12': 111,
'f15': 113,
'help': 114,
'home': 115,
'pgup': 116, # pageup
'del': 117, # forwarddelete
'f4': 118,
'end': 119,
'f2': 120,
'pgdown': 121, # pagedown
'f1': 122,
'left': 123, # leftarrow
'right': 124, # rightarrow
'down': 125, # downarrow
'up': 126 # uparrow
}
def write_command(message, arguments=' -f -', executable='???'):
print 'echo \'%s\' | %s %s' % (message, executable, arguments)
with os.popen('%s %s' % (executable, arguments), 'w') as fd:
fd.write(message)
def get_active_window():
'''Returns the window id and title of the active window.'''
script = applescript.AppleScript('''
global frontApp, frontAppName, windowTitle
set windowTitle to ""
tell application "System Events"
set frontApp to first application process whose frontmost is true
set frontAppName to name of frontApp
tell process frontAppName
tell (1st window whose value of attribute "AXMain" is true)
set windowTitle to value of attribute "AXTitle"
end tell
end tell
end tell
return {frontAppName, windowTitle}
''')
# window_id isn't really a unique id, instead it's just the app name -- but
# still useful for automating through applescript
window_id, window_title = script.run()
if window_id:
return window_id.encode('utf-8'), window_title.encode('utf-8')
else:
return None, None
def map_window_properties(properties):
p = {}
for key in properties:
short_key = re.match(r".*\('(.*)'\).*", str(key)) # is there a better
# way to access keys that are instances?
p[str(short_key.group(1))] = properties[key]
return p
def get_geometry(window_id=None):
p = get_window_properties(window_id)
frame = {'x': p['posn'][0],
'y': p['posn'][1],
'width': p['ptsz'][0],
'height': p['ptsz'][1]}
return frame # what to do about screen?
def get_window_properties(window_id=None):
if window_id is None:
window_id, _ = get_active_window()
cmd = '''tell application "System Events" to tell application process "%s"
try
get properties of window 1
on error errmess
log errmess
end try
end tell
''' % window_id
script = applescript.AppleScript(cmd)
properties = script.run()
p = map_window_properties(properties)
return p
def transform_relative_mouse_event(event):
geo = get_geometry()
dx, dy = map(int, map(float, event.split()))
return [('mousemove', '%i %i' % (geo['x'] + dx, geo['y'] + dy))]
def get_context():
'''return a dictionary of window properties for the currently active
window. it is fine to include platform specific information, but
at least include title and executable.'''
window_id, window_title = get_active_window()
properties = get_window_properties(window_id)
properties['id'] = window_id
properties['title'] = window_title
# Types in 'breaking' throw an exception in jsonrpclib, so
# they need to be converted to strings.
breaking = [objc.pyobjc_unicode, # NSString
applescript.aecodecs.AEType] # AppleEvent
for key in properties:
for c in breaking:
if isinstance(properties[key], c):
if hasattr(properties[key], 'encode') and \
callable(getattr(properties[key], 'encode')):
# pyobjc_unicode aren't accepted by 'str()', but need
# to be converted or jsonrpclib balks.
properties[key] = properties[key].encode('utf-8')
else:
# AEType doesn't respond to 'encode()'
properties[key] = str(properties[key])
break
logging.debug(properties)
return properties
def key_press(
key,
modifiers=(),
direction='press',
count=1,
count_delay=None
):
'''press a key possibly modified by modifiers. direction may be
'press', 'down', or 'up'. modifiers may contain 'alt', 'shift',
'control', 'super'. this X11 server also supports 'hyper',
'meta', and 'flag' (same as super). count is number of times to
press it. count_delay delay in ms between presses.'''
logging.debug("\nkey = {key} modifiers = {modifiers} " +
"direction = {direction} " +
"count = {count} count_delay = {count_delay} ".
format(modifiers=modifiers,
direction=direction,
count=count,
count_delay=count_delay,
key=key))
if count_delay is None or count < 2:
delay = ''
else:
delay = 'delay %i ' % (count_delay / 1000.0)
if modifiers and hasattr(modifiers, 'lower'):
modifiers = [modifiers]
modifiers = [_MOD_TRANSLATION.get(mod, mod) for mod in modifiers]
logging.debug("modifiers = %s" % modifiers)
key_to_press = _MODIFIER_KEY_DIRECT.get(key.lower(), None)
if key_to_press:
if direction == 'down' or direction == 'up':
command = '{key_to_press} key {direction}'.format(
key_to_press=key_to_press, direction=direction)
if not key_to_press:
key_to_press = _QUOTED_KEY_TRANSLATION.get(key.lower(), None)
if key_to_press:
command = 'keystroke "{0}"'.format(key_to_press)
elif not key_to_press:
key_to_press = _KEYCODE_TRANSLATION.get(key.lower(), None)
command = 'key code "{0}"'.format(key_to_press)
if key_to_press is None:
raise RuntimeError("Don't know how to handle keystroke {0}".format(key))
if modifiers:
elems = map(lambda s: "%s down" % s, modifiers)
key_command = "%s using {%s} " % (command, ', '.join(elems))
else:
key_command = command
script = applescript.AppleScript('''
tell application "System Events"
try
repeat {count} times
{key_command}
{delay}
end repeat
on error
key up {{control, shift, option, command}}
end try
end tell
'''.format(key_command=key_command, count=count, delay=delay))
script.run()
def write_text(text, paste=False):
'''send text formatted exactly as written to active window. will use
pbpaste clipboard to paste the text instead of typing it.'''
logging.debug("text = %s paste = %s" % (text, paste))
if text:
# copy the pasted text to the clipboard
write_command(text, arguments='', executable='pbcopy')
# paste
key_press('v', 'super')
def mouseEvent(type, posx, posy, clickCount=1):
theEvent = CGEventCreateMouseEvent(
None, type, (posx, posy), kCGMouseButtonLeft)
CGEventSetIntegerValueField(theEvent, kCGMouseEventClickState, clickCount)
CGEventPost(kCGHIDEventTap, theEvent)
CGEventSetType(theEvent, type)
def mousemove(posx, posy):
mouseEvent(kCGEventMouseMoved, posx, posy)
def trigger_mouseclick(button, direction, posx, posy, clickCount=1):
# button: number 1-5, direction {click, up, down}
click_mapping = {
1: [kCGEventLeftMouseDown, kCGEventLeftMouseUp],
2: [kCGEventOtherMouseDown, kCGEventOtherMouseUp],
3: [kCGEventRightMouseDown, kCGEventRightMouseUp]
}
if button == 4 or button == 5:
yScroll = -10 if button == 5 else 10 # wheeldown -, wheelup +
theEvent = CGEventCreateScrollWheelEvent(
None, kCGScrollEventUnitLine, 1, yScroll)
for _ in xrange(clickCount):
CGEventPost(kCGHIDEventTap, theEvent)
elif direction == 'click':
theEvent = CGEventCreateMouseEvent(
None, click_mapping[button][0], (posx, posy), kCGMouseButtonLeft)
for _ in xrange(clickCount):
CGEventSetType(theEvent, click_mapping[button][0])
CGEventSetIntegerValueField(
theEvent, kCGMouseEventClickState, clickCount)
CGEventPost(kCGHIDEventTap, theEvent)
CGEventSetType(theEvent, click_mapping[button][1])
CGEventPost(kCGHIDEventTap, theEvent)
# else: # up or down
def click_mouse(
button,
direction='click',
count=1,
count_delay=None
):
'''click the mouse button specified. button maybe one of 'right',
'left', 'middle', 'wheeldown', 'wheelup'.'''
logging.debug("button = "+button)
if count_delay is None or count < 2:
delay = 0
else:
delay = count_delay
try:
button = _MOUSE_BUTTONS[button]
except KeyError:
button = int(button)
logging.debug('_MOUSE_CLICKS[direction]' + _MOUSE_CLICKS[direction])
ourEvent = CGEventCreate(None)
currentpos = CGEventGetLocation(ourEvent) # Save current mouse position
trigger_mouseclick(
button, _MOUSE_CLICKS[direction],
int(currentpos.x), int(currentpos.y), count)
def move_mouse(
x,
y,
reference='absolute',
proportional=False,
phantom=None
):
'''move the mouse to the specified coordinates. reference may be one
of 'absolute', 'relative', or 'relative_active'. if phantom is not
None, it is a button as click_mouse. If possible, click that
location without moving the mouse. If not, the server will move the
mouse there and click.'''
geo = get_geometry()
if proportional:
x = geo['width'] * x
y = geo['height'] * y
mousemove(x, y)
if phantom is not None:
trigger_mouseclick(1, 'click', x, y, 1)
def pause(amount):
'''pause amount in ms.'''
time.sleep(amount / 1000.)
def server_info():
return _SERVER_INFO
def list_rpc_commands():
_RPC_COMMANDS = {
'get_context': get_context,
'key_press': key_press,
'write_text': write_text,
'click_mouse': click_mouse,
'move_mouse': move_mouse,
'server_info': server_info,
'pause': pause,
}
return _RPC_COMMANDS
def multiple_actions(actions):
'''execute multiple rpc commands, aborting on any error. will not
return anything ever. actions is an array of objects, possessing
'method', 'params', and 'optional' keys. See also JSON-RPC
multicall. Guaranteed to execute in specified order.'''
for (method, parameters, optional) in actions:
commands = list_rpc_commands()
if method in commands:
commands[method](*parameters, **optional)
else:
break
def setup_server(host, port):
print "started on host = %s port = %s " % (host, port)
server = jsonrpclib.SimpleJSONRPCServer.SimpleJSONRPCServer((host, port))
for command in list_rpc_commands():
server.register_function(globals()[command])
server.register_function(multiple_actions)
return server
if __name__ == '__main__':
if len(sys.argv) == 2 and sys.argv[-1] == 'getcontext':
ctx = get_context()
try:
import pprint
pprint.pprint(ctx)
except ImportError:
print ctx
else:
if '-d' in sys.argv or '--daemon' in sys.argv:
if os.fork() == 0:
os.setsid()
if os.fork() == 0:
os.chdir('/')
os.umask(0)
# Safe upper bound on number of fds we could
# possibly have opened.
for fd in range(64):
try:
os.close(fd)
except OSError:
pass
os.open(os.devnull, os.O_RDWR)
os.dup2(0, 1)
os.dup2(0, 2)
else:
os._exit(0)
else:
os._exit(0)
server = setup_server(config.HOST, config.PORT)
server.serve_forever()
| lgpl-3.0 |
CoDEmanX/ArangoDB | 3rdParty/V8-4.3.61/build/gyp/test/intermediate_dir/gyptest-intermediate-dir.py | 243 | 1398 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that targets have independent INTERMEDIATE_DIRs.
"""
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('test.gyp', chdir='src')
test.build('test.gyp', 'target1', chdir='src')
# Check stuff exists.
intermediate_file1 = test.read('src/outfile.txt')
test.must_contain(intermediate_file1, 'target1')
shared_intermediate_file1 = test.read('src/shared_outfile.txt')
test.must_contain(shared_intermediate_file1, 'shared_target1')
test.run_gyp('test2.gyp', chdir='src')
# Force the shared intermediate to be rebuilt.
test.sleep()
test.touch('src/shared_infile.txt')
test.build('test2.gyp', 'target2', chdir='src')
# Check INTERMEDIATE_DIR file didn't get overwritten but SHARED_INTERMEDIATE_DIR
# file did.
intermediate_file2 = test.read('src/outfile.txt')
test.must_contain(intermediate_file1, 'target1')
test.must_contain(intermediate_file2, 'target2')
shared_intermediate_file2 = test.read('src/shared_outfile.txt')
if shared_intermediate_file1 != shared_intermediate_file2:
test.fail_test(shared_intermediate_file1 + ' != ' + shared_intermediate_file2)
test.must_contain(shared_intermediate_file1, 'shared_target2')
test.must_contain(shared_intermediate_file2, 'shared_target2')
test.pass_test()
| apache-2.0 |
hortonworks/hortonworks-sandbox | desktop/core/ext-py/ctypes-1.0.2/ctypes/test/test_strings.py | 15 | 6775 | import unittest
from ctypes import *
class StringArrayTestCase(unittest.TestCase):
def test(self):
BUF = c_char * 4
buf = BUF("a", "b", "c")
self.failUnlessEqual(buf.value, "abc")
self.failUnlessEqual(buf.raw, "abc\000")
buf.value = "ABCD"
self.failUnlessEqual(buf.value, "ABCD")
self.failUnlessEqual(buf.raw, "ABCD")
buf.value = "x"
self.failUnlessEqual(buf.value, "x")
self.failUnlessEqual(buf.raw, "x\000CD")
buf[1] = "Z"
self.failUnlessEqual(buf.value, "xZCD")
self.failUnlessEqual(buf.raw, "xZCD")
self.assertRaises(ValueError, setattr, buf, "value", "aaaaaaaa")
self.assertRaises(TypeError, setattr, buf, "value", 42)
def test_c_buffer_value(self):
buf = c_buffer(32)
buf.value = "Hello, World"
self.failUnlessEqual(buf.value, "Hello, World")
self.failUnlessRaises(TypeError, setattr, buf, "value", buffer("Hello, World"))
self.assertRaises(TypeError, setattr, buf, "value", buffer("abc"))
self.assertRaises(ValueError, setattr, buf, "raw", buffer("x" * 100))
def test_c_buffer_raw(self):
buf = c_buffer(32)
buf.raw = buffer("Hello, World")
self.failUnlessEqual(buf.value, "Hello, World")
self.assertRaises(TypeError, setattr, buf, "value", buffer("abc"))
self.assertRaises(ValueError, setattr, buf, "raw", buffer("x" * 100))
def test_param_1(self):
BUF = c_char * 4
buf = BUF()
## print c_char_p.from_param(buf)
def test_param_2(self):
BUF = c_char * 4
buf = BUF()
## print BUF.from_param(c_char_p("python"))
## print BUF.from_param(BUF(*"pyth"))
try:
c_wchar
except NameError:
pass
else:
class WStringArrayTestCase(unittest.TestCase):
def test(self):
BUF = c_wchar * 4
buf = BUF(u"a", u"b", u"c")
self.failUnlessEqual(buf.value, u"abc")
buf.value = u"ABCD"
self.failUnlessEqual(buf.value, u"ABCD")
buf.value = u"x"
self.failUnlessEqual(buf.value, u"x")
buf[1] = u"Z"
self.failUnlessEqual(buf.value, u"xZCD")
class StringTestCase(unittest.TestCase):
def XX_test_basic_strings(self):
cs = c_string("abcdef")
# Cannot call len on a c_string any longer
self.assertRaises(TypeError, len, cs)
self.failUnlessEqual(sizeof(cs), 7)
# The value property is the string up to the first terminating NUL.
self.failUnlessEqual(cs.value, "abcdef")
self.failUnlessEqual(c_string("abc\000def").value, "abc")
# The raw property is the total buffer contents:
self.failUnlessEqual(cs.raw, "abcdef\000")
self.failUnlessEqual(c_string("abc\000def").raw, "abc\000def\000")
# We can change the value:
cs.value = "ab"
self.failUnlessEqual(cs.value, "ab")
self.failUnlessEqual(cs.raw, "ab\000\000\000\000\000")
cs.raw = "XY"
self.failUnlessEqual(cs.value, "XY")
self.failUnlessEqual(cs.raw, "XY\000\000\000\000\000")
self.assertRaises(TypeError, c_string, u"123")
def XX_test_sized_strings(self):
# New in releases later than 0.4.0:
self.assertRaises(TypeError, c_string, None)
# New in releases later than 0.4.0:
# c_string(number) returns an empty string of size number
self.failUnless(len(c_string(32).raw) == 32)
self.assertRaises(ValueError, c_string, -1)
self.assertRaises(ValueError, c_string, 0)
# These tests fail, because it is no longer initialized
## self.failUnless(c_string(2).value == "")
## self.failUnless(c_string(2).raw == "\000\000")
self.failUnless(c_string(2).raw[-1] == "\000")
self.failUnless(len(c_string(2).raw) == 2)
def XX_test_initialized_strings(self):
self.failUnless(c_string("ab", 4).raw[:2] == "ab")
self.failUnless(c_string("ab", 4).raw[-1] == "\000")
self.failUnless(c_string("ab", 2).raw == "a\000")
def XX_test_toolong(self):
cs = c_string("abcdef")
# Much too long string:
self.assertRaises(ValueError, setattr, cs, "value", "123456789012345")
# One char too long values:
self.assertRaises(ValueError, setattr, cs, "value", "1234567")
## def test_perf(self):
## check_perf()
try:
c_wchar
except NameError:
pass
else:
class WStringTestCase(unittest.TestCase):
def test_wchar(self):
c_wchar(u"x")
repr(byref(c_wchar(u"x")))
c_wchar("x")
def X_test_basic_wstrings(self):
cs = c_wstring(u"abcdef")
# XXX This behaviour is about to change:
# len returns the size of the internal buffer in bytes.
# This includes the terminating NUL character.
self.failUnless(sizeof(cs) == 14)
# The value property is the string up to the first terminating NUL.
self.failUnless(cs.value == u"abcdef")
self.failUnless(c_wstring(u"abc\000def").value == u"abc")
self.failUnless(c_wstring(u"abc\000def").value == u"abc")
# The raw property is the total buffer contents:
self.failUnless(cs.raw == u"abcdef\000")
self.failUnless(c_wstring(u"abc\000def").raw == u"abc\000def\000")
# We can change the value:
cs.value = u"ab"
self.failUnless(cs.value == u"ab")
self.failUnless(cs.raw == u"ab\000\000\000\000\000")
self.assertRaises(TypeError, c_wstring, "123")
self.assertRaises(ValueError, c_wstring, 0)
def X_test_toolong(self):
cs = c_wstring(u"abcdef")
# Much too long string:
self.assertRaises(ValueError, setattr, cs, "value", u"123456789012345")
# One char too long values:
self.assertRaises(ValueError, setattr, cs, "value", u"1234567")
def run_test(rep, msg, func, arg):
items = range(rep)
from time import clock
start = clock()
for i in items:
func(arg); func(arg); func(arg); func(arg); func(arg)
stop = clock()
print "%20s: %.2f us" % (msg, ((stop-start)*1e6/5/rep))
def check_perf():
# Construct 5 objects
REP = 200000
run_test(REP, "c_string(None)", c_string, None)
run_test(REP, "c_string('abc')", c_string, 'abc')
# Python 2.3 -OO, win2k, P4 700 MHz:
#
# c_string(None): 1.75 us
# c_string('abc'): 2.74 us
# Python 2.2 -OO, win2k, P4 700 MHz:
#
# c_string(None): 2.95 us
# c_string('abc'): 3.67 us
if __name__ == '__main__':
## check_perf()
unittest.main()
| apache-2.0 |
codeforberlin/wfs-downloader | wfs_downloader/download.py | 1 | 4730 | from __future__ import print_function
import argparse
import os
import yaml
import sys
try:
# py2
from urllib import urlretrieve
import urllib
except ImportError:
# py3
from urllib.request import urlretrieve
import urllib.request
from lxml import etree
def main():
parser = argparse.ArgumentParser(usage='Downloads GML files from a set of WFS service in a pseudo-paginated way using bounding boxes and combine them again to one file. The WFS services are specified in settings.py.')
parser.add_argument('config', help='config file')
parser.add_argument('--no-download', help='skip the download', action='store_true')
parser.add_argument('--no-combine', help='skip the combine', action='store_true')
args = parser.parse_args()
with open(args.config) as f:
config = yaml.load(f.read())
if not args.no_download:
download_files(config)
if not args.no_combine:
combine_files(config)
def download_files(config):
if (sys.version_info > (3, 0)):
# Python 3 code in this block
opener = urllib.request.build_opener()
opener.addheaders = [('User-agent', 'wfs-downloader/0.1')]
urllib.request.install_opener(opener)
else:
# Python 2 code in this block
urllib.URLopener.version = "wfs-downloader/0.1"
west_range = list(arange(config['bbox']['west'], config['bbox']['east'], config['size']))
south_range = list(arange(config['bbox']['south'], config['bbox']['north'], config['size']))
for west in west_range:
for south in south_range:
url = '%(url)s?service=WFS&request=GetFeature&version=2.0.0&typeNames=%(layer)s&srsName=%(srid)s&BBOX=%(west)f,%(south)f,%(east)f,%(north)f' % {
'url': config['url'],
'layer': config['layer'],
'srid': config['projection'],
'west': west,
'east': west + config['size'],
'south': south,
'north': south + config['size']
}
name, extension = os.path.splitext(config['outputfile'])
filename = os.path.join(config['tmpdir'], '%(name)s_%(west)s_%(south)s%(extension)s' % {
'name': name,
'west': west,
'south': south,
'extension': extension
})
print('fetching %s' % filename)
urlretrieve(url, filename)
def combine_files(config):
# read the first xml file
name, extension = os.path.splitext(config['outputfile'])
first_filename = os.path.join(config['tmpdir'], '%(name)s_%(west)s_%(south)s%(extension)s' % {
'name': name,
'west': config['bbox']['west'],
'south': config['bbox']['south'],
'extension': extension
})
first_xml = etree.parse(first_filename)
first_root = first_xml.getroot()
nsmap = first_root.nsmap
try:
number_matched = int(first_root.get('numberMatched'))
except ValueError:
number_matched = False
try:
number_returned = int(first_root.get('numberReturned'))
except ValueError:
number_returned = False
for filename in os.listdir(config['tmpdir']):
if filename.startswith(name):
abs_filename = os.path.join(config['tmpdir'], filename)
if abs_filename != first_filename:
print('merging', abs_filename)
xml = etree.parse(abs_filename)
root = xml.getroot()
if number_matched is not False:
number_matched += int(root.get('numberMatched'))
if number_returned is not False:
number_returned += int(root.get('numberReturned'))
for node in xml.xpath('.//wfs:member', namespaces=nsmap):
first_root.append(node)
# manipulate numberMatched numberReturned
if number_matched is not False:
first_root.set('numberMatched', str(number_matched))
if number_returned is not False:
first_root.set('numberReturned', str(number_returned))
# manipulate the extend / bounding box
first_root.xpath('.//wfs:boundedBy/gml:Envelope/gml:lowerCorner', namespaces=nsmap)[0].text = '%s %s' % (config['bbox']['west'], config['bbox']['east'])
first_root.xpath('.//wfs:boundedBy/gml:Envelope/gml:upperCorner', namespaces=nsmap)[0].text = '%s %s' % (config['bbox']['south'], config['bbox']['north'])
with open(config['outputfile'], 'wb') as f:
f.write(etree.tostring(first_xml))
def arange(start, stop, step):
current = start
while current < stop:
yield current
current += step
if __name__ == "__main__":
main()
| mit |
ErikKringen/kafka | tests/kafkatest/services/zookeeper.py | 8 | 5816 | # Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import re
import time
from ducktape.services.service import Service
from ducktape.utils.util import wait_until
from ducktape.cluster.remoteaccount import RemoteCommandError
from kafkatest.directory_layout.kafka_path import KafkaPathResolverMixin
from kafkatest.services.security.security_config import SecurityConfig
from kafkatest.version import DEV_BRANCH
class ZookeeperService(KafkaPathResolverMixin, Service):
logs = {
"zk_log": {
"path": "/mnt/zk.log",
"collect_default": True},
"zk_data": {
"path": "/mnt/zookeeper",
"collect_default": False}
}
def __init__(self, context, num_nodes, zk_sasl = False):
"""
:type context
"""
self.kafka_opts = ""
self.zk_sasl = zk_sasl
super(ZookeeperService, self).__init__(context, num_nodes)
@property
def security_config(self):
return SecurityConfig(self.context, zk_sasl=self.zk_sasl)
@property
def security_system_properties(self):
return "-Dzookeeper.authProvider.1=org.apache.zookeeper.server.auth.SASLAuthenticationProvider " \
"-DjaasLoginRenew=3600000 " \
"-Djava.security.auth.login.config=%s " \
"-Djava.security.krb5.conf=%s " % (self.security_config.JAAS_CONF_PATH, self.security_config.KRB5CONF_PATH)
@property
def zk_principals(self):
return " zkclient " + ' '.join(['zookeeper/' + zk_node.account.hostname for zk_node in self.nodes])
def start_node(self, node):
idx = self.idx(node)
self.logger.info("Starting ZK node %d on %s", idx, node.account.hostname)
node.account.ssh("mkdir -p /mnt/zookeeper")
node.account.ssh("echo %d > /mnt/zookeeper/myid" % idx)
self.security_config.setup_node(node)
config_file = self.render('zookeeper.properties')
self.logger.info("zookeeper.properties:")
self.logger.info(config_file)
node.account.create_file("/mnt/zookeeper.properties", config_file)
start_cmd = "export KAFKA_OPTS=\"%s\";" % (self.kafka_opts + ' ' + self.security_system_properties) \
if self.security_config.zk_sasl else self.kafka_opts
start_cmd += "%s " % self.path.script("zookeeper-server-start.sh", node)
start_cmd += "/mnt/zookeeper.properties 1>> %(path)s 2>> %(path)s &" % self.logs["zk_log"]
node.account.ssh(start_cmd)
time.sleep(5) # give it some time to start
def pids(self, node):
try:
cmd = "ps ax | grep -i zookeeper | grep java | grep -v grep | awk '{print $1}'"
pid_arr = [pid for pid in node.account.ssh_capture(cmd, allow_fail=True, callback=int)]
return pid_arr
except (RemoteCommandError, ValueError) as e:
return []
def alive(self, node):
return len(self.pids(node)) > 0
def stop_node(self, node):
idx = self.idx(node)
self.logger.info("Stopping %s node %d on %s" % (type(self).__name__, idx, node.account.hostname))
node.account.kill_process("zookeeper", allow_fail=False)
wait_until(lambda: not self.alive(node), timeout_sec=5, err_msg="Timed out waiting for zookeeper to stop.")
def clean_node(self, node):
self.logger.info("Cleaning ZK node %d on %s", self.idx(node), node.account.hostname)
if self.alive(node):
self.logger.warn("%s %s was still alive at cleanup time. Killing forcefully..." %
(self.__class__.__name__, node.account))
node.account.kill_process("zookeeper", clean_shutdown=False, allow_fail=True)
node.account.ssh("rm -rf /mnt/zookeeper /mnt/zookeeper.properties /mnt/zk.log", allow_fail=False)
def connect_setting(self):
return ','.join([node.account.hostname + ':2181' for node in self.nodes])
#
# This call is used to simulate a rolling upgrade to enable/disable
# the use of ZooKeeper ACLs.
#
def zookeeper_migration(self, node, zk_acl):
la_migra_cmd = "%s --zookeeper.acl=%s --zookeeper.connect=%s" % \
(self.path.script("zookeeper-security-migration.sh", node), zk_acl, self.connect_setting())
node.account.ssh(la_migra_cmd)
def query(self, path):
"""
Queries zookeeper for data associated with 'path' and returns all fields in the schema
"""
kafka_run_class = self.path.script("kafka-run-class.sh", DEV_BRANCH)
cmd = "%s kafka.tools.ZooKeeperMainWrapper -server %s get %s" % \
(kafka_run_class, self.connect_setting(), path)
self.logger.debug(cmd)
node = self.nodes[0]
result = None
for line in node.account.ssh_capture(cmd):
# loop through all lines in the output, but only hold on to the first match
if result is None:
match = re.match("^({.+})$", line)
if match is not None:
result = match.groups()[0]
return result
| apache-2.0 |
rossburton/yocto-autobuilder | lib/python2.7/site-packages/python_dateutil-1.5-py2.7.egg/dateutil/tzwin.py | 304 | 5828 | # This code was originally contributed by Jeffrey Harris.
import datetime
import struct
import _winreg
__author__ = "Jeffrey Harris & Gustavo Niemeyer <[email protected]>"
__all__ = ["tzwin", "tzwinlocal"]
ONEWEEK = datetime.timedelta(7)
TZKEYNAMENT = r"SOFTWARE\Microsoft\Windows NT\CurrentVersion\Time Zones"
TZKEYNAME9X = r"SOFTWARE\Microsoft\Windows\CurrentVersion\Time Zones"
TZLOCALKEYNAME = r"SYSTEM\CurrentControlSet\Control\TimeZoneInformation"
def _settzkeyname():
global TZKEYNAME
handle = _winreg.ConnectRegistry(None, _winreg.HKEY_LOCAL_MACHINE)
try:
_winreg.OpenKey(handle, TZKEYNAMENT).Close()
TZKEYNAME = TZKEYNAMENT
except WindowsError:
TZKEYNAME = TZKEYNAME9X
handle.Close()
_settzkeyname()
class tzwinbase(datetime.tzinfo):
"""tzinfo class based on win32's timezones available in the registry."""
def utcoffset(self, dt):
if self._isdst(dt):
return datetime.timedelta(minutes=self._dstoffset)
else:
return datetime.timedelta(minutes=self._stdoffset)
def dst(self, dt):
if self._isdst(dt):
minutes = self._dstoffset - self._stdoffset
return datetime.timedelta(minutes=minutes)
else:
return datetime.timedelta(0)
def tzname(self, dt):
if self._isdst(dt):
return self._dstname
else:
return self._stdname
def list():
"""Return a list of all time zones known to the system."""
handle = _winreg.ConnectRegistry(None, _winreg.HKEY_LOCAL_MACHINE)
tzkey = _winreg.OpenKey(handle, TZKEYNAME)
result = [_winreg.EnumKey(tzkey, i)
for i in range(_winreg.QueryInfoKey(tzkey)[0])]
tzkey.Close()
handle.Close()
return result
list = staticmethod(list)
def display(self):
return self._display
def _isdst(self, dt):
dston = picknthweekday(dt.year, self._dstmonth, self._dstdayofweek,
self._dsthour, self._dstminute,
self._dstweeknumber)
dstoff = picknthweekday(dt.year, self._stdmonth, self._stddayofweek,
self._stdhour, self._stdminute,
self._stdweeknumber)
if dston < dstoff:
return dston <= dt.replace(tzinfo=None) < dstoff
else:
return not dstoff <= dt.replace(tzinfo=None) < dston
class tzwin(tzwinbase):
def __init__(self, name):
self._name = name
handle = _winreg.ConnectRegistry(None, _winreg.HKEY_LOCAL_MACHINE)
tzkey = _winreg.OpenKey(handle, "%s\%s" % (TZKEYNAME, name))
keydict = valuestodict(tzkey)
tzkey.Close()
handle.Close()
self._stdname = keydict["Std"].encode("iso-8859-1")
self._dstname = keydict["Dlt"].encode("iso-8859-1")
self._display = keydict["Display"]
# See http://ww_winreg.jsiinc.com/SUBA/tip0300/rh0398.htm
tup = struct.unpack("=3l16h", keydict["TZI"])
self._stdoffset = -tup[0]-tup[1] # Bias + StandardBias * -1
self._dstoffset = self._stdoffset-tup[2] # + DaylightBias * -1
(self._stdmonth,
self._stddayofweek, # Sunday = 0
self._stdweeknumber, # Last = 5
self._stdhour,
self._stdminute) = tup[4:9]
(self._dstmonth,
self._dstdayofweek, # Sunday = 0
self._dstweeknumber, # Last = 5
self._dsthour,
self._dstminute) = tup[12:17]
def __repr__(self):
return "tzwin(%s)" % repr(self._name)
def __reduce__(self):
return (self.__class__, (self._name,))
class tzwinlocal(tzwinbase):
def __init__(self):
handle = _winreg.ConnectRegistry(None, _winreg.HKEY_LOCAL_MACHINE)
tzlocalkey = _winreg.OpenKey(handle, TZLOCALKEYNAME)
keydict = valuestodict(tzlocalkey)
tzlocalkey.Close()
self._stdname = keydict["StandardName"].encode("iso-8859-1")
self._dstname = keydict["DaylightName"].encode("iso-8859-1")
try:
tzkey = _winreg.OpenKey(handle, "%s\%s"%(TZKEYNAME, self._stdname))
_keydict = valuestodict(tzkey)
self._display = _keydict["Display"]
tzkey.Close()
except OSError:
self._display = None
handle.Close()
self._stdoffset = -keydict["Bias"]-keydict["StandardBias"]
self._dstoffset = self._stdoffset-keydict["DaylightBias"]
# See http://ww_winreg.jsiinc.com/SUBA/tip0300/rh0398.htm
tup = struct.unpack("=8h", keydict["StandardStart"])
(self._stdmonth,
self._stddayofweek, # Sunday = 0
self._stdweeknumber, # Last = 5
self._stdhour,
self._stdminute) = tup[1:6]
tup = struct.unpack("=8h", keydict["DaylightStart"])
(self._dstmonth,
self._dstdayofweek, # Sunday = 0
self._dstweeknumber, # Last = 5
self._dsthour,
self._dstminute) = tup[1:6]
def __reduce__(self):
return (self.__class__, ())
def picknthweekday(year, month, dayofweek, hour, minute, whichweek):
"""dayofweek == 0 means Sunday, whichweek 5 means last instance"""
first = datetime.datetime(year, month, 1, hour, minute)
weekdayone = first.replace(day=((dayofweek-first.isoweekday())%7+1))
for n in xrange(whichweek):
dt = weekdayone+(whichweek-n)*ONEWEEK
if dt.month == month:
return dt
def valuestodict(key):
"""Convert a registry key's values to a dictionary."""
dict = {}
size = _winreg.QueryInfoKey(key)[1]
for i in range(size):
data = _winreg.EnumValue(key, i)
dict[data[0]] = data[1]
return dict
| gpl-2.0 |
capsid/capsid-pipeline | setup.py | 1 | 1980 | #!/usr/bin/env python
# Copyright 2011(c) The Ontario Institute for Cancer Reserach. All rights reserved.
#
# This program and the accompanying materials are made available under the
# terms of the GNU Public License v3.0.
#
# You should have received a copy of the GNU General Public License along with
# this program. If not, see <http://www.gnu.org/licenses/>.i
from setuptools import setup
import sys, os
if sys.version_info < (2, 6):
exec('raise Error, "Python 2.6 or later is required"')
def read(*path):
return open(os.path.join(os.path.abspath(os.path.dirname(__file__)), *path)).read()
VERSION = '1.6.2'
README = read('README.rst')
NEWS = read('NEWS.rst')
install_requires = ['cython', 'numpy', 'pysam', 'pymongo==2.8', 'biopython', 'bx-python']
if sys.version_info < (2, 7):
install_requires.append('argparse')
# Get classifiers from http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers = """
Development Status :: 5 - Production/Stable
License :: OSI Approved :: GNU General Public License (GPL)
Environment :: Console
Intended Audience :: Science/Research
Intended Audience :: Developers
Programming Language :: Python :: 2.7
Topic :: Scientific/Engineering :: Bio-Informatics
Operating System :: Unix
"""
config = {
'name': 'capsid',
'version': VERSION,
'description': 'CaPSID: Computational Pathogen Sequence Identification',
'long_description': README + '\n\n' + NEWS,
'license': 'GNU General Public License, Version 3.0',
'author': 'Shane Wilson',
'author_email': '[email protected]',
'url': 'https://github.com/capsid/capsid',
'download_url': 'https://github.com/capsid/capsid',
'classifiers': filter(None, classifiers.split("\n")),
'scripts': ['bin/capsid'],
'packages': ['capsid'],
'zip_safe': True,
'install_requires': install_requires
}
def setup_package():
"""Setup Package"""
setup(**config)
if __name__ == '__main__':
setup_package()
| gpl-3.0 |
miaoski/stemtokstop | tinysegmenter.py | 2 | 23035 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# TinySegmenter 0.1 -- Super compact Japanese tokenizer in Javascript
# (c) 2008 Taku Kudo <[email protected]>
# TinySegmenter is freely distributable under the terms of a new BSD licence.
# For details, see http://lilyx.net/pages/tinysegmenter_licence.txt
# "TinySegmenter in Python" is written by Masato Hagiwara
# See http://lilyx.net/pages/tinysegmenterp.html
import re
class TinySegmenter():
def __init__(self):
self._patterns = {
u"[一二三四五六七八九十百千万億兆]":u"M",
u"[一-龠々〆ヵヶ]":u"H",
u"[ぁ-ん]":u"I",
u"[ァ-ヴーア-ン゙ー]":u"K",
u"[a-zA-Za-zA-Z]":u"A",
u"[0-90-9]":u"N"
}
self._chartype = []
for pat in self._patterns:
regexp = re.compile(pat)
self._chartype.append([regexp, self._patterns[pat]])
self._BIAS = -332
self._BC1 = {u"HH":6,u"II":2461,u"KH":406,u"OH":-1378}
self._BC2 = {u"AA":-3267,u"AI":2744,u"AN":-878,u"HH":-4070,u"HM":-1711,u"HN":4012,u"HO":3761,u"IA":1327,u"IH":-1184,u"II":-1332,u"IK":1721,u"IO":5492,u"KI":3831,u"KK":-8741,u"MH":-3132,u"MK":3334,u"OO":-2920}
self._BC3 = {u"HH":996,u"HI":626,u"HK":-721,u"HN":-1307,u"HO":-836,u"IH":-301,u"KK":2762,u"MK":1079,u"MM":4034,u"OA":-1652,u"OH":266}
self._BP1 = {u"BB":295,u"OB":304,u"OO":-125,u"UB":352}
self._BP2 = {u"BO":60,u"OO":-1762}
self._BQ1 = {u"BHH":1150,u"BHM":1521,u"BII":-1158,u"BIM":886,u"BMH":1208,u"BNH":449,u"BOH":-91,u"BOO":-2597,u"OHI":451,u"OIH":-296,u"OKA":1851,u"OKH":-1020,u"OKK":904,u"OOO":2965}
self._BQ2 = {u"BHH":118,u"BHI":-1159,u"BHM":466,u"BIH":-919,u"BKK":-1720,u"BKO":864,u"OHH":-1139,u"OHM":-181,u"OIH":153,u"UHI":-1146}
self._BQ3 = {u"BHH":-792,u"BHI":2664,u"BII":-299,u"BKI":419,u"BMH":937,u"BMM":8335,u"BNN":998,u"BOH":775,u"OHH":2174,u"OHM":439,u"OII":280,u"OKH":1798,u"OKI":-793,u"OKO":-2242,u"OMH":-2402,u"OOO":11699}
self._BQ4 = {u"BHH":-3895,u"BIH":3761,u"BII":-4654,u"BIK":1348,u"BKK":-1806,u"BMI":-3385,u"BOO":-12396,u"OAH":926,u"OHH":266,u"OHK":-2036,u"ONN":-973}
self._BW1 = {u",と":660,u",同":727,u"B1あ":1404,u"B1同":542,u"、と":660,u"、同":727,u"」と":1682,u"あっ":1505,u"いう":1743,u"いっ":-2055,u"いる":672,u"うし":-4817,u"うん":665,u"から":3472,u"がら":600,u"こう":-790,u"こと":2083,u"こん":-1262,u"さら":-4143,u"さん":4573,u"した":2641,u"して":1104,u"すで":-3399,u"そこ":1977,u"それ":-871,u"たち":1122,u"ため":601,u"った":3463,u"つい":-802,u"てい":805,u"てき":1249,u"でき":1127,u"です":3445,u"では":844,u"とい":-4915,u"とみ":1922,u"どこ":3887,u"ない":5713,u"なっ":3015,u"など":7379,u"なん":-1113,u"にし":2468,u"には":1498,u"にも":1671,u"に対":-912,u"の一":-501,u"の中":741,u"ませ":2448,u"まで":1711,u"まま":2600,u"まる":-2155,u"やむ":-1947,u"よっ":-2565,u"れた":2369,u"れで":-913,u"をし":1860,u"を見":731,u"亡く":-1886,u"京都":2558,u"取り":-2784,u"大き":-2604,u"大阪":1497,u"平方":-2314,u"引き":-1336,u"日本":-195,u"本当":-2423,u"毎日":-2113,u"目指":-724,u"B1あ":1404,u"B1同":542,u"」と":1682}
self._BW2 = {u"..":-11822,u"11":-669,u"――":-5730,u"−−":-13175,u"いう":-1609,u"うか":2490,u"かし":-1350,u"かも":-602,u"から":-7194,u"かれ":4612,u"がい":853,u"がら":-3198,u"きた":1941,u"くな":-1597,u"こと":-8392,u"この":-4193,u"させ":4533,u"され":13168,u"さん":-3977,u"しい":-1819,u"しか":-545,u"した":5078,u"して":972,u"しな":939,u"その":-3744,u"たい":-1253,u"たた":-662,u"ただ":-3857,u"たち":-786,u"たと":1224,u"たは":-939,u"った":4589,u"って":1647,u"っと":-2094,u"てい":6144,u"てき":3640,u"てく":2551,u"ては":-3110,u"ても":-3065,u"でい":2666,u"でき":-1528,u"でし":-3828,u"です":-4761,u"でも":-4203,u"とい":1890,u"とこ":-1746,u"とと":-2279,u"との":720,u"とみ":5168,u"とも":-3941,u"ない":-2488,u"なが":-1313,u"など":-6509,u"なの":2614,u"なん":3099,u"にお":-1615,u"にし":2748,u"にな":2454,u"によ":-7236,u"に対":-14943,u"に従":-4688,u"に関":-11388,u"のか":2093,u"ので":-7059,u"のに":-6041,u"のの":-6125,u"はい":1073,u"はが":-1033,u"はず":-2532,u"ばれ":1813,u"まし":-1316,u"まで":-6621,u"まれ":5409,u"めて":-3153,u"もい":2230,u"もの":-10713,u"らか":-944,u"らし":-1611,u"らに":-1897,u"りし":651,u"りま":1620,u"れた":4270,u"れて":849,u"れば":4114,u"ろう":6067,u"われ":7901,u"を通":-11877,u"んだ":728,u"んな":-4115,u"一人":602,u"一方":-1375,u"一日":970,u"一部":-1051,u"上が":-4479,u"会社":-1116,u"出て":2163,u"分の":-7758,u"同党":970,u"同日":-913,u"大阪":-2471,u"委員":-1250,u"少な":-1050,u"年度":-8669,u"年間":-1626,u"府県":-2363,u"手権":-1982,u"新聞":-4066,u"日新":-722,u"日本":-7068,u"日米":3372,u"曜日":-601,u"朝鮮":-2355,u"本人":-2697,u"東京":-1543,u"然と":-1384,u"社会":-1276,u"立て":-990,u"第に":-1612,u"米国":-4268,u"11":-669};
self._BW3 = {u"あた":-2194,u"あり":719,u"ある":3846,u"い.":-1185,u"い。":-1185,u"いい":5308,u"いえ":2079,u"いく":3029,u"いた":2056,u"いっ":1883,u"いる":5600,u"いわ":1527,u"うち":1117,u"うと":4798,u"えと":1454,u"か.":2857,u"か。":2857,u"かけ":-743,u"かっ":-4098,u"かに":-669,u"から":6520,u"かり":-2670,u"が,u":1816,u"が、":1816,u"がき":-4855,u"がけ":-1127,u"がっ":-913,u"がら":-4977,u"がり":-2064,u"きた":1645,u"けど":1374,u"こと":7397,u"この":1542,u"ころ":-2757,u"さい":-714,u"さを":976,u"し,u":1557,u"し、":1557,u"しい":-3714,u"した":3562,u"して":1449,u"しな":2608,u"しま":1200,u"す.":-1310,u"す。":-1310,u"する":6521,u"ず,u":3426,u"ず、":3426,u"ずに":841,u"そう":428,u"た.":8875,u"た。":8875,u"たい":-594,u"たの":812,u"たり":-1183,u"たる":-853,u"だ.":4098,u"だ。":4098,u"だっ":1004,u"った":-4748,u"って":300,u"てい":6240,u"てお":855,u"ても":302,u"です":1437,u"でに":-1482,u"では":2295,u"とう":-1387,u"とし":2266,u"との":541,u"とも":-3543,u"どう":4664,u"ない":1796,u"なく":-903,u"など":2135,u"に,u":-1021,u"に、":-1021,u"にし":1771,u"にな":1906,u"には":2644,u"の,u":-724,u"の、":-724,u"の子":-1000,u"は,u":1337,u"は、":1337,u"べき":2181,u"まし":1113,u"ます":6943,u"まっ":-1549,u"まで":6154,u"まれ":-793,u"らし":1479,u"られ":6820,u"るる":3818,u"れ,u":854,u"れ、":854,u"れた":1850,u"れて":1375,u"れば":-3246,u"れる":1091,u"われ":-605,u"んだ":606,u"んで":798,u"カ月":990,u"会議":860,u"入り":1232,u"大会":2217,u"始め":1681,u"市":965,u"新聞":-5055,u"日,u":974,u"日、":974,u"社会":2024,u"カ月":990};
self._TC1 = {u"AAA":1093,u"HHH":1029,u"HHM":580,u"HII":998,u"HOH":-390,u"HOM":-331,u"IHI":1169,u"IOH":-142,u"IOI":-1015,u"IOM":467,u"MMH":187,u"OOI":-1832};
self._TC2 = {u"HHO":2088,u"HII":-1023,u"HMM":-1154,u"IHI":-1965,u"KKH":703,u"OII":-2649};
self._TC3 = {u"AAA":-294,u"HHH":346,u"HHI":-341,u"HII":-1088,u"HIK":731,u"HOH":-1486,u"IHH":128,u"IHI":-3041,u"IHO":-1935,u"IIH":-825,u"IIM":-1035,u"IOI":-542,u"KHH":-1216,u"KKA":491,u"KKH":-1217,u"KOK":-1009,u"MHH":-2694,u"MHM":-457,u"MHO":123,u"MMH":-471,u"NNH":-1689,u"NNO":662,u"OHO":-3393};
self._TC4 = {u"HHH":-203,u"HHI":1344,u"HHK":365,u"HHM":-122,u"HHN":182,u"HHO":669,u"HIH":804,u"HII":679,u"HOH":446,u"IHH":695,u"IHO":-2324,u"IIH":321,u"III":1497,u"IIO":656,u"IOO":54,u"KAK":4845,u"KKA":3386,u"KKK":3065,u"MHH":-405,u"MHI":201,u"MMH":-241,u"MMM":661,u"MOM":841};
self._TQ1 = {u"BHHH":-227,u"BHHI":316,u"BHIH":-132,u"BIHH":60,u"BIII":1595,u"BNHH":-744,u"BOHH":225,u"BOOO":-908,u"OAKK":482,u"OHHH":281,u"OHIH":249,u"OIHI":200,u"OIIH":-68};
self._TQ2 = {u"BIHH":-1401,u"BIII":-1033,u"BKAK":-543,u"BOOO":-5591};
self._TQ3 = {u"BHHH":478,u"BHHM":-1073,u"BHIH":222,u"BHII":-504,u"BIIH":-116,u"BIII":-105,u"BMHI":-863,u"BMHM":-464,u"BOMH":620,u"OHHH":346,u"OHHI":1729,u"OHII":997,u"OHMH":481,u"OIHH":623,u"OIIH":1344,u"OKAK":2792,u"OKHH":587,u"OKKA":679,u"OOHH":110,u"OOII":-685};
self._TQ4 = {u"BHHH":-721,u"BHHM":-3604,u"BHII":-966,u"BIIH":-607,u"BIII":-2181,u"OAAA":-2763,u"OAKK":180,u"OHHH":-294,u"OHHI":2446,u"OHHO":480,u"OHIH":-1573,u"OIHH":1935,u"OIHI":-493,u"OIIH":626,u"OIII":-4007,u"OKAK":-8156};
self._TW1 = {u"につい":-4681,u"東京都":2026};
self._TW2 = {u"ある程":-2049,u"いった":-1256,u"ころが":-2434,u"しょう":3873,u"その後":-4430,u"だって":-1049,u"ていた":1833,u"として":-4657,u"ともに":-4517,u"もので":1882,u"一気に":-792,u"初めて":-1512,u"同時に":-8097,u"大きな":-1255,u"対して":-2721,u"社会党":-3216};
self._TW3 = {u"いただ":-1734,u"してい":1314,u"として":-4314,u"につい":-5483,u"にとっ":-5989,u"に当た":-6247,u"ので,u":-727,u"ので、":-727,u"のもの":-600,u"れから":-3752,u"十二月":-2287};
self._TW4 = {u"いう.":8576,u"いう。":8576,u"からな":-2348,u"してい":2958,u"たが,u":1516,u"たが、":1516,u"ている":1538,u"という":1349,u"ました":5543,u"ません":1097,u"ようと":-4258,u"よると":5865};
self._UC1 = {u"A":484,u"K":93,u"M":645,u"O":-505};
self._UC2 = {u"A":819,u"H":1059,u"I":409,u"M":3987,u"N":5775,u"O":646};
self._UC3 = {u"A":-1370,u"I":2311};
self._UC4 = {u"A":-2643,u"H":1809,u"I":-1032,u"K":-3450,u"M":3565,u"N":3876,u"O":6646};
self._UC5 = {u"H":313,u"I":-1238,u"K":-799,u"M":539,u"O":-831};
self._UC6 = {u"H":-506,u"I":-253,u"K":87,u"M":247,u"O":-387};
self._UP1 = {u"O":-214};
self._UP2 = {u"B":69,u"O":935};
self._UP3 = {u"B":189};
self._UQ1 = {u"BH":21,u"BI":-12,u"BK":-99,u"BN":142,u"BO":-56,u"OH":-95,u"OI":477,u"OK":410,u"OO":-2422};
self._UQ2 = {u"BH":216,u"BI":113,u"OK":1759};
self._UQ3 = {u"BA":-479,u"BH":42,u"BI":1913,u"BK":-7198,u"BM":3160,u"BN":6427,u"BO":14761,u"OI":-827,u"ON":-3212};
self._UW1 = {u",u":156,u"、":156,u"「":-463,u"あ":-941,u"う":-127,u"が":-553,u"き":121,u"こ":505,u"で":-201,u"と":-547,u"ど":-123,u"に":-789,u"の":-185,u"は":-847,u"も":-466,u"や":-470,u"よ":182,u"ら":-292,u"り":208,u"れ":169,u"を":-446,u"ん":-137,u"・":-135,u"主":-402,u"京":-268,u"区":-912,u"午":871,u"国":-460,u"大":561,u"委":729,u"市":-411,u"日":-141,u"理":361,u"生":-408,u"県":-386,u"都":-718,u"「":-463,u"・":-135};
self._UW2 = {u",u":-829,u"、":-829,u"〇":892,u"「":-645,u"」":3145,u"あ":-538,u"い":505,u"う":134,u"お":-502,u"か":1454,u"が":-856,u"く":-412,u"こ":1141,u"さ":878,u"ざ":540,u"し":1529,u"す":-675,u"せ":300,u"そ":-1011,u"た":188,u"だ":1837,u"つ":-949,u"て":-291,u"で":-268,u"と":-981,u"ど":1273,u"な":1063,u"に":-1764,u"の":130,u"は":-409,u"ひ":-1273,u"べ":1261,u"ま":600,u"も":-1263,u"や":-402,u"よ":1639,u"り":-579,u"る":-694,u"れ":571,u"を":-2516,u"ん":2095,u"ア":-587,u"カ":306,u"キ":568,u"ッ":831,u"三":-758,u"不":-2150,u"世":-302,u"中":-968,u"主":-861,u"事":492,u"人":-123,u"会":978,u"保":362,u"入":548,u"初":-3025,u"副":-1566,u"北":-3414,u"区":-422,u"大":-1769,u"天":-865,u"太":-483,u"子":-1519,u"学":760,u"実":1023,u"小":-2009,u"市":-813,u"年":-1060,u"強":1067,u"手":-1519,u"揺":-1033,u"政":1522,u"文":-1355,u"新":-1682,u"日":-1815,u"明":-1462,u"最":-630,u"朝":-1843,u"本":-1650,u"東":-931,u"果":-665,u"次":-2378,u"民":-180,u"気":-1740,u"理":752,u"発":529,u"目":-1584,u"相":-242,u"県":-1165,u"立":-763,u"第":810,u"米":509,u"自":-1353,u"行":838,u"西":-744,u"見":-3874,u"調":1010,u"議":1198,u"込":3041,u"開":1758,u"間":-1257,u"「":-645,u"」":3145,u"ッ":831,u"ア":-587,u"カ":306,u"キ":568};
self._UW3 = {u",u":4889,u"1":-800,u"−":-1723,u"、":4889,u"々":-2311,u"〇":5827,u"」":2670,u"〓":-3573,u"あ":-2696,u"い":1006,u"う":2342,u"え":1983,u"お":-4864,u"か":-1163,u"が":3271,u"く":1004,u"け":388,u"げ":401,u"こ":-3552,u"ご":-3116,u"さ":-1058,u"し":-395,u"す":584,u"せ":3685,u"そ":-5228,u"た":842,u"ち":-521,u"っ":-1444,u"つ":-1081,u"て":6167,u"で":2318,u"と":1691,u"ど":-899,u"な":-2788,u"に":2745,u"の":4056,u"は":4555,u"ひ":-2171,u"ふ":-1798,u"へ":1199,u"ほ":-5516,u"ま":-4384,u"み":-120,u"め":1205,u"も":2323,u"や":-788,u"よ":-202,u"ら":727,u"り":649,u"る":5905,u"れ":2773,u"わ":-1207,u"を":6620,u"ん":-518,u"ア":551,u"グ":1319,u"ス":874,u"ッ":-1350,u"ト":521,u"ム":1109,u"ル":1591,u"ロ":2201,u"ン":278,u"・":-3794,u"一":-1619,u"下":-1759,u"世":-2087,u"両":3815,u"中":653,u"主":-758,u"予":-1193,u"二":974,u"人":2742,u"今":792,u"他":1889,u"以":-1368,u"低":811,u"何":4265,u"作":-361,u"保":-2439,u"元":4858,u"党":3593,u"全":1574,u"公":-3030,u"六":755,u"共":-1880,u"円":5807,u"再":3095,u"分":457,u"初":2475,u"別":1129,u"前":2286,u"副":4437,u"力":365,u"動":-949,u"務":-1872,u"化":1327,u"北":-1038,u"区":4646,u"千":-2309,u"午":-783,u"協":-1006,u"口":483,u"右":1233,u"各":3588,u"合":-241,u"同":3906,u"和":-837,u"員":4513,u"国":642,u"型":1389,u"場":1219,u"外":-241,u"妻":2016,u"学":-1356,u"安":-423,u"実":-1008,u"家":1078,u"小":-513,u"少":-3102,u"州":1155,u"市":3197,u"平":-1804,u"年":2416,u"広":-1030,u"府":1605,u"度":1452,u"建":-2352,u"当":-3885,u"得":1905,u"思":-1291,u"性":1822,u"戸":-488,u"指":-3973,u"政":-2013,u"教":-1479,u"数":3222,u"文":-1489,u"新":1764,u"日":2099,u"旧":5792,u"昨":-661,u"時":-1248,u"曜":-951,u"最":-937,u"月":4125,u"期":360,u"李":3094,u"村":364,u"東":-805,u"核":5156,u"森":2438,u"業":484,u"氏":2613,u"民":-1694,u"決":-1073,u"法":1868,u"海":-495,u"無":979,u"物":461,u"特":-3850,u"生":-273,u"用":914,u"町":1215,u"的":7313,u"直":-1835,u"省":792,u"県":6293,u"知":-1528,u"私":4231,u"税":401,u"立":-960,u"第":1201,u"米":7767,u"系":3066,u"約":3663,u"級":1384,u"統":-4229,u"総":1163,u"線":1255,u"者":6457,u"能":725,u"自":-2869,u"英":785,u"見":1044,u"調":-562,u"財":-733,u"費":1777,u"車":1835,u"軍":1375,u"込":-1504,u"通":-1136,u"選":-681,u"郎":1026,u"郡":4404,u"部":1200,u"金":2163,u"長":421,u"開":-1432,u"間":1302,u"関":-1282,u"雨":2009,u"電":-1045,u"非":2066,u"駅":1620,u"1":-800,u"」":2670,u"・":-3794,u"ッ":-1350,u"ア":551,u"グ":1319,u"ス":874,u"ト":521,u"ム":1109,u"ル":1591,u"ロ":2201,u"ン":278};
self._UW4 = {u",u":3930,u".":3508,u"―":-4841,u"、":3930,u"。":3508,u"〇":4999,u"「":1895,u"」":3798,u"〓":-5156,u"あ":4752,u"い":-3435,u"う":-640,u"え":-2514,u"お":2405,u"か":530,u"が":6006,u"き":-4482,u"ぎ":-3821,u"く":-3788,u"け":-4376,u"げ":-4734,u"こ":2255,u"ご":1979,u"さ":2864,u"し":-843,u"じ":-2506,u"す":-731,u"ず":1251,u"せ":181,u"そ":4091,u"た":5034,u"だ":5408,u"ち":-3654,u"っ":-5882,u"つ":-1659,u"て":3994,u"で":7410,u"と":4547,u"な":5433,u"に":6499,u"ぬ":1853,u"ね":1413,u"の":7396,u"は":8578,u"ば":1940,u"ひ":4249,u"び":-4134,u"ふ":1345,u"へ":6665,u"べ":-744,u"ほ":1464,u"ま":1051,u"み":-2082,u"む":-882,u"め":-5046,u"も":4169,u"ゃ":-2666,u"や":2795,u"ょ":-1544,u"よ":3351,u"ら":-2922,u"り":-9726,u"る":-14896,u"れ":-2613,u"ろ":-4570,u"わ":-1783,u"を":13150,u"ん":-2352,u"カ":2145,u"コ":1789,u"セ":1287,u"ッ":-724,u"ト":-403,u"メ":-1635,u"ラ":-881,u"リ":-541,u"ル":-856,u"ン":-3637,u"・":-4371,u"ー":-11870,u"一":-2069,u"中":2210,u"予":782,u"事":-190,u"井":-1768,u"人":1036,u"以":544,u"会":950,u"体":-1286,u"作":530,u"側":4292,u"先":601,u"党":-2006,u"共":-1212,u"内":584,u"円":788,u"初":1347,u"前":1623,u"副":3879,u"力":-302,u"動":-740,u"務":-2715,u"化":776,u"区":4517,u"協":1013,u"参":1555,u"合":-1834,u"和":-681,u"員":-910,u"器":-851,u"回":1500,u"国":-619,u"園":-1200,u"地":866,u"場":-1410,u"塁":-2094,u"士":-1413,u"多":1067,u"大":571,u"子":-4802,u"学":-1397,u"定":-1057,u"寺":-809,u"小":1910,u"屋":-1328,u"山":-1500,u"島":-2056,u"川":-2667,u"市":2771,u"年":374,u"庁":-4556,u"後":456,u"性":553,u"感":916,u"所":-1566,u"支":856,u"改":787,u"政":2182,u"教":704,u"文":522,u"方":-856,u"日":1798,u"時":1829,u"最":845,u"月":-9066,u"木":-485,u"来":-442,u"校":-360,u"業":-1043,u"氏":5388,u"民":-2716,u"気":-910,u"沢":-939,u"済":-543,u"物":-735,u"率":672,u"球":-1267,u"生":-1286,u"産":-1101,u"田":-2900,u"町":1826,u"的":2586,u"目":922,u"省":-3485,u"県":2997,u"空":-867,u"立":-2112,u"第":788,u"米":2937,u"系":786,u"約":2171,u"経":1146,u"統":-1169,u"総":940,u"線":-994,u"署":749,u"者":2145,u"能":-730,u"般":-852,u"行":-792,u"規":792,u"警":-1184,u"議":-244,u"谷":-1000,u"賞":730,u"車":-1481,u"軍":1158,u"輪":-1433,u"込":-3370,u"近":929,u"道":-1291,u"選":2596,u"郎":-4866,u"都":1192,u"野":-1100,u"銀":-2213,u"長":357,u"間":-2344,u"院":-2297,u"際":-2604,u"電":-878,u"領":-1659,u"題":-792,u"館":-1984,u"首":1749,u"高":2120,u"「":1895,u"」":3798,u"・":-4371,u"ッ":-724,u"ー":-11870,u"カ":2145,u"コ":1789,u"セ":1287,u"ト":-403,u"メ":-1635,u"ラ":-881,u"リ":-541,u"ル":-856,u"ン":-3637};
self._UW5 = {u",u":465,u".":-299,u"1":-514,u"E2":-32768,u"]":-2762,u"、":465,u"。":-299,u"「":363,u"あ":1655,u"い":331,u"う":-503,u"え":1199,u"お":527,u"か":647,u"が":-421,u"き":1624,u"ぎ":1971,u"く":312,u"げ":-983,u"さ":-1537,u"し":-1371,u"す":-852,u"だ":-1186,u"ち":1093,u"っ":52,u"つ":921,u"て":-18,u"で":-850,u"と":-127,u"ど":1682,u"な":-787,u"に":-1224,u"の":-635,u"は":-578,u"べ":1001,u"み":502,u"め":865,u"ゃ":3350,u"ょ":854,u"り":-208,u"る":429,u"れ":504,u"わ":419,u"を":-1264,u"ん":327,u"イ":241,u"ル":451,u"ン":-343,u"中":-871,u"京":722,u"会":-1153,u"党":-654,u"務":3519,u"区":-901,u"告":848,u"員":2104,u"大":-1296,u"学":-548,u"定":1785,u"嵐":-1304,u"市":-2991,u"席":921,u"年":1763,u"思":872,u"所":-814,u"挙":1618,u"新":-1682,u"日":218,u"月":-4353,u"査":932,u"格":1356,u"機":-1508,u"氏":-1347,u"田":240,u"町":-3912,u"的":-3149,u"相":1319,u"省":-1052,u"県":-4003,u"研":-997,u"社":-278,u"空":-813,u"統":1955,u"者":-2233,u"表":663,u"語":-1073,u"議":1219,u"選":-1018,u"郎":-368,u"長":786,u"間":1191,u"題":2368,u"館":-689,u"1":-514,u"E2":-32768,u"「":363,u"イ":241,u"ル":451,u"ン":-343};
self._UW6 = {u",u":227,u".":808,u"1":-270,u"E1":306,u"、":227,u"。":808,u"あ":-307,u"う":189,u"か":241,u"が":-73,u"く":-121,u"こ":-200,u"じ":1782,u"す":383,u"た":-428,u"っ":573,u"て":-1014,u"で":101,u"と":-105,u"な":-253,u"に":-149,u"の":-417,u"は":-236,u"も":-206,u"り":187,u"る":-135,u"を":195,u"ル":-673,u"ン":-496,u"一":-277,u"中":201,u"件":-800,u"会":624,u"前":302,u"区":1792,u"員":-1212,u"委":798,u"学":-960,u"市":887,u"広":-695,u"後":535,u"業":-697,u"相":753,u"社":-507,u"福":974,u"空":-822,u"者":1811,u"連":463,u"郎":1082,u"1":-270,u"E1":306,u"ル":-673,u"ン":-496};
def _ts(self, dict, key):
if not key in dict:
return 0
else:
return dict[key]
def _ctype(self, str):
for elem in self._chartype:
match = re.match(elem[0], str)
if match:
return elem[1]
return u"O"
def tokenize(self, text):
if text == "":
return []
result = []
seg = [u"B3",u"B2",u"B1"]
ctype = [u"O",u"O",u"O"]
o = list(text)
for i in range(0, len(o)):
seg.append(o[i])
ctype.append(self._ctype(o[i]))
seg.append(u"E1")
seg.append(u"E2")
seg.append(u"E3")
ctype.append(u"O")
ctype.append(u"O")
ctype.append(u"O")
word = seg[3]
p1 = u"U"
p2 = u"U"
p3 = u"U"
for i in range(4, len(seg) - 3):
score = self._BIAS
w1 = seg[i-3]
w2 = seg[i-2]
w3 = seg[i-1]
w4 = seg[i]
w5 = seg[i+1]
w6 = seg[i+2]
c1 = ctype[i-3]
c2 = ctype[i-2]
c3 = ctype[i-1]
c4 = ctype[i]
c5 = ctype[i+1]
c6 = ctype[i+2]
score += self._ts(self._UP1, p1)
score += self._ts(self._UP2, p2)
score += self._ts(self._UP3, p3)
score += self._ts(self._BP1, p1 + p2)
score += self._ts(self._BP2, p2 + p3)
score += self._ts(self._UW1, w1)
score += self._ts(self._UW2, w2)
score += self._ts(self._UW3, w3)
score += self._ts(self._UW4, w4)
score += self._ts(self._UW5, w5)
score += self._ts(self._UW6, w6)
score += self._ts(self._BW1, w2 + w3)
score += self._ts(self._BW2, w3 + w4)
score += self._ts(self._BW3, w4 + w5)
score += self._ts(self._TW1, w1 + w2 + w3)
score += self._ts(self._TW2, w2 + w3 + w4)
score += self._ts(self._TW3, w3 + w4 + w5)
score += self._ts(self._TW4, w4 + w5 + w6)
score += self._ts(self._UC1, c1)
score += self._ts(self._UC2, c2)
score += self._ts(self._UC3, c3)
score += self._ts(self._UC4, c4)
score += self._ts(self._UC5, c5)
score += self._ts(self._UC6, c6)
score += self._ts(self._BC1, c2 + c3)
score += self._ts(self._BC2, c3 + c4)
score += self._ts(self._BC3, c4 + c5)
score += self._ts(self._TC1, c1 + c2 + c3)
score += self._ts(self._TC2, c2 + c3 + c4)
score += self._ts(self._TC3, c3 + c4 + c5)
score += self._ts(self._TC4, c4 + c5 + c6)
# score += self._ts(self._TC5, c4 + c5 + c6)
score += self._ts(self._UQ1, p1 + c1)
score += self._ts(self._UQ2, p2 + c2)
score += self._ts(self._UQ1, p3 + c3)
score += self._ts(self._BQ1, p2 + c2 + c3)
score += self._ts(self._BQ2, p2 + c3 + c4)
score += self._ts(self._BQ3, p3 + c2 + c3)
score += self._ts(self._BQ4, p3 + c3 + c4)
score += self._ts(self._TQ1, p2 + c1 + c2 + c3)
score += self._ts(self._TQ2, p2 + c2 + c3 + c4)
score += self._ts(self._TQ3, p3 + c1 + c2 + c3)
score += self._ts(self._TQ4, p3 + c2 + c3 + c4)
p = u"O"
if score > 0:
result.append(word)
word = ""
p = u"B"
p1 = p2
p2 = p3
p3 = p
word += seg[i]
result.append(word)
return result
def demo():
segmenter = TinySegmenter()
print ' | '.join(segmenter.tokenize(u"私の名前は中野です"))
if __name__ == '__main__':
demo()
| apache-2.0 |
MattFaus/CrowdTube-Connector | lib/gdata-2.0.18/tests/all_tests_cached.py | 41 | 1091 | #!/usr/bin/env python
#
# Copyright (C) 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This module is used for version 2 of the Google Data APIs.
__author__ = '[email protected] (Jeff Scudder)'
import unittest
import all_tests
import gdata.test_config as conf
conf.options.set_value('runlive', 'true')
conf.options.set_value('savecache', 'true')
conf.options.set_value('clearcache', 'false')
def suite():
return unittest.TestSuite((atom_tests.core_test.suite(),))
if __name__ == '__main__':
unittest.TextTestRunner().run(all_tests.suite())
| mit |
belokop-an/agenda-tools | code/MaKaC/webinterface/pages/links.py | 1 | 7493 | import MaKaC.webinterface.wcomponents as wcomponents
import MaKaC.webinterface.urlHandlers as urlHandlers
import MaKaC.webinterface.navigation as navigation
from MaKaC.webinterface.pages.conferences import WPConferenceBase,WPConferenceDefaultDisplayBase
class WPLinkBase( WPConferenceBase ):
def __init__( self, rh, file ):
self._file = file
WPConferenceBase.__init__( self, rh, self._file.getConference() )
class WPLinkDisplayBase( WPConferenceDefaultDisplayBase ):
def __init__(self, rh, link):
self._link = link
WPConferenceDefaultDisplayBase.__init__( self, rh, self._link.getConference() )
self._navigationTarget = link
class WPLinkModifBase( WPLinkBase ):
def _getHeader( self ):
"""
"""
wc = wcomponents.WManagementHeader( self._getAW().getUser(), self._getNavigationDrawer() )
return wc.getHTML( { "loginURL": self.getLoginURL(),\
"logoutURL": self.getLogoutURL() } )
def _getNavigationDrawer(self):
pars = { "categDisplayURLGen": urlHandlers.UHCategoryModification.getURL, \
"confDisplayURLGen": urlHandlers.UHConferenceModification.getURL, \
"conf": self._conf, "category": self._parentCateg }
return wcomponents.WConferenceNavigationDrawer( pars )
def _createTabCtrl( self ):
self._tabCtrl = wcomponents.TabControl()
self._tabMain = self._tabCtrl.newTab( "main", "Main", \
urlHandlers.UHLinkModification.getURL( self._file ) )
self._tabAC = self._tabCtrl.newTab( "ac", "Access Control", \
urlHandlers.UHLinkModifAC.getURL( self._file ) )
self._setActiveTab()
def _setActiveTab( self ):
pass
def _applyFrame( self, body ):
frame = wcomponents.WLinkModifFrame( self._file, self._getAW() )
p = { "categDisplayURLGen": urlHandlers.UHCategoryDisplay.getURL, \
"confDisplayURLGen": urlHandlers.UHConferenceDisplay.getURL, \
"confModifURLGen": urlHandlers.UHConferenceModification.getURL, \
"contribDisplayURLGen": urlHandlers.UHContributionDisplay.getURL, \
"contribModifURLGen": urlHandlers.UHContributionModification.getURL, \
"subContribDisplayURLGen": urlHandlers.UHSubContributionDisplay.getURL, \
"subContribModifURLGen": urlHandlers.UHSubContributionModification.getURL, \
"sessionDisplayURLGen": urlHandlers.UHSessionDisplay.getURL, \
"sessionModifURLGen": urlHandlers.UHSessionModification.getURL, \
"materialDisplayURLGen": urlHandlers.UHMaterialDisplay.getURL, \
"materialModifURLGen": urlHandlers.UHMaterialModification.getURL }
return frame.getHTML( body, **p )
def _getBody( self, params ):
self._createTabCtrl()
html = wcomponents.WTabControl( self._tabCtrl ).getHTML( self._getTabContent( params ) )
return self._applyFrame( html )
def _getTabContent( self, params ):
return "nothing"
class WLinkModifMain( wcomponents.WTemplated ):
def __init__( self, link ):
self._link = link
def getVars( self ):
vars = wcomponents.WTemplated.getVars( self )
vars["title"] = self._link.getName()
vars["description"] = self._link.getDescription()
vars["url"] = self._link.getURL()
return vars
class WPLinkModification( WPLinkModifBase ):
def _setActiveTab( self ):
self._tabMain.setActive()
def _getTabContent( self, params ):
wc = WLinkModifMain( self._file )
pars = { \
"modifyURL": urlHandlers.UHLinkModifyData.getURL( self._file ) }
return wc.getHTML( pars )
class WLinkDataModification(wcomponents.WResourceDataModification):
def getVars( self ):
vars = wcomponents.WResourceDataModification.getVars( self )
vars["url"] = self._resource.getURL()
return vars
class WPLinkDataModification( WPLinkModifBase ):
def _setActiveTab( self ):
self._tabMain.setActive()
def _getTabContent( self, params ):
wc = WLinkDataModification( self._file )
pars = { "postURL": urlHandlers.UHLinkPerformModifyData.getURL() }
return wc.getHTML( pars )
class WLinkModifAC( wcomponents.WTemplated ):
def __init__( self, link ):
self._link = link
def getVars( self ):
vars = wcomponents.WTemplated.getVars( self )
vars["accessControlFrame"] = wcomponents.WAccessControlFrame().getHTML(\
self._link,\
vars["setVisibilityURL"],\
vars["addAllowedURL"],\
vars["removeAllowedURL"] )
return vars
class WPLinkModifAC( WPLinkModifBase ):
def _setActiveTab( self ):
self._tabAC.setActive()
def _getTabContent( self, params ):
wc = WLinkModifAC( self._file )
pars = { \
"setVisibilityURL": urlHandlers.UHLinkSetVisibility.getURL() , \
"addAllowedURL": urlHandlers.UHLinkSelectAllowed.getURL(), \
"removeAllowedURL": urlHandlers.UHLinkRemoveAllowed.getURL() }
return wc.getHTML( pars )
class WPLinkSelectAllowed( WPLinkModifAC ):
def _getTabContent( self, params ):
wc = wcomponents.WPrincipalSelection( urlHandlers.UHLinkSelectAllowed.getURL() )
params["addURL"] = urlHandlers.UHLinkAddAllowed.getURL()
return wc.getHTML( params )
#class WPLinkDisplayModification( WPLinkDisplayBase ):
# navigationEntry = navigation.NELinkDisplayModification
#
# def _getBody( self, params ):
# wc = WLinkDisplayModification( self._link )
# pars = { \
# "modifyURL": urlHandlers.UHLinkDisplayDataModification.getURL( self._link ) }
# return wc.getHTML( pars )
#class WLinkDisplayModification(wcomponents.WTemplated):
#
# def __init__( self, link ):
# self._link = link
#
# def getVars( self ):
# vars = wcomponents.WTemplated.getVars( self )
# vars["title"] = self._link.getName()
# vars["description"] = self._link.getDescription()
# vars["url"] = self._link.getURL()
# return vars
#class WPLinkDisplayDataModification( WPLinkDisplayBase ):
# navigationEntry = navigation.NELinkDisplayModification
#
# def _getBody( self, params ):
# wc = WLinkDisplayDataModification( self._link )
# pars = { "postURL": urlHandlers.UHLinkDisplayPerformDataModification.getURL() }
# return wc.getHTML( pars )
#class WLinkDisplayDataModification( wcomponents.WTemplated ):
#
# def __init__( self, resource ):
# self._resource = resource
#
# def getHTML(self, params):
# str = """
# <form action="%s" method="POST" enctype="multipart/form-data">
# %s
# %s
# </form>
# """%(params["postURL"],\
# self._resource.getLocator().getWebForm(),\
# wcomponents.WTemplated.getHTML( self, params ) )
# return str
#
# def getVars( self ):
# vars = wcomponents.WTemplated.getVars( self )
# vars["title"] = self._resource.getName()
# vars["description"] = self._resource.getDescription()
# vars["url"] = self._resource.getURL()
# return vars
| gpl-2.0 |
OpenWhere/scrapy | scrapy/middleware.py | 68 | 2901 | import logging
from collections import defaultdict
from scrapy.exceptions import NotConfigured
from scrapy.utils.misc import load_object
from scrapy.utils.defer import process_parallel, process_chain, process_chain_both
logger = logging.getLogger(__name__)
class MiddlewareManager(object):
"""Base class for implementing middleware managers"""
component_name = 'foo middleware'
def __init__(self, *middlewares):
self.middlewares = middlewares
self.methods = defaultdict(list)
for mw in middlewares:
self._add_middleware(mw)
@classmethod
def _get_mwlist_from_settings(cls, settings):
raise NotImplementedError
@classmethod
def from_settings(cls, settings, crawler=None):
mwlist = cls._get_mwlist_from_settings(settings)
middlewares = []
for clspath in mwlist:
try:
mwcls = load_object(clspath)
if crawler and hasattr(mwcls, 'from_crawler'):
mw = mwcls.from_crawler(crawler)
elif hasattr(mwcls, 'from_settings'):
mw = mwcls.from_settings(settings)
else:
mw = mwcls()
middlewares.append(mw)
except NotConfigured as e:
if e.args:
clsname = clspath.split('.')[-1]
logger.warning("Disabled %(clsname)s: %(eargs)s",
{'clsname': clsname, 'eargs': e.args[0]},
extra={'crawler': crawler})
enabled = [x.__class__.__name__ for x in middlewares]
logger.info("Enabled %(componentname)ss: %(enabledlist)s",
{'componentname': cls.component_name,
'enabledlist': ', '.join(enabled)},
extra={'crawler': crawler})
return cls(*middlewares)
@classmethod
def from_crawler(cls, crawler):
return cls.from_settings(crawler.settings, crawler)
def _add_middleware(self, mw):
if hasattr(mw, 'open_spider'):
self.methods['open_spider'].append(mw.open_spider)
if hasattr(mw, 'close_spider'):
self.methods['close_spider'].insert(0, mw.close_spider)
def _process_parallel(self, methodname, obj, *args):
return process_parallel(self.methods[methodname], obj, *args)
def _process_chain(self, methodname, obj, *args):
return process_chain(self.methods[methodname], obj, *args)
def _process_chain_both(self, cb_methodname, eb_methodname, obj, *args):
return process_chain_both(self.methods[cb_methodname], \
self.methods[eb_methodname], obj, *args)
def open_spider(self, spider):
return self._process_parallel('open_spider', spider)
def close_spider(self, spider):
return self._process_parallel('close_spider', spider)
| bsd-3-clause |
GeorgEncinas/backtraking | path/generation_data.py | 1 | 2555 | import numpy as np
from dfs.vertex import Vertex
from dfs.edge import Edge
from dfs.graph import Graph
class Generator_Data():
def __init__(self):
self.graph = Graph()
self.come_back = False
def generate_node(self, data):
'''
data_sensor = data["distance_sensor"]
time = data["time"]
end = data["end"]'''
data_sensor = data[0]
end = data[1]
if not end:
vertex = Vertex(data_sensor)
vertex = self.graph.insert_vertex(vertex, self.come_back)
#obtiene la direccion a la cual se tiene que mover el auto y la posicion a la que tiene que volver si no existe camino
direction_movement = self.dfs(vertex)
else:
direction_movement = None
return direction_movement
def dfs(self, vertex):
direction_movement = None
size_list_vertex = self.graph.get_size_list_vertex()
if (size_list_vertex >= 1):
direction_movement = vertex.get_direction_movement()
self.come_back = direction_movement is None
if self.come_back:
direction_movement = vertex.get_opposite(vertex.direction)
return direction_movement
'''
def look(self, vertex_1, vertex_2):
direction_movement = vertex_2.get_direction_movement()
#compare direction_movement if None -> retro, else to follow
position = None
if (direction_movement is None):
direction_movement = vertex_2.get_opposite_direction(vertex_2.get_direction)
position = vertex_1.get_position()
if (vertex_1 is not vertex_2):
list_conn_vertex = self.get_connected_vertexs(vertex_1)
# print list_conn_vertex
pos = self.list_vertex.index(vertex_1)
# print pos
if (len(list_conn_vertex) == 0):
self.j += 1
self.list_vertex[pos].set_post_visit(self.j)
stack.pop()
else:
for i in range(0, len(list_conn_vertex)):
self.j += 1
self.look(self.list_vertex[list_conn_vertex[i]], stack, vertex_2)
self.j += 1
self.list_vertex[pos].set_post_visit(self.j)
else:
print "end"
self.print_list(stack)
# print "imprimiendo"
# self.print_list(self.list_vertex)
return direction_movement, position'''
| mit |
heat-extras/heat-lib | lib/boot-config/elements/heat-config-cfn-init/install.d/hook-cfn-init.py | 11 | 2527 | #!/usr/bin/env python
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import logging
import os
import subprocess
import sys
# Ideally this path would be /var/lib/heat-cfntools/cfn-init-data
# but this is where all boot metadata is stored
LAST_METADATA_DIR = os.environ.get('HEAT_CFN_INIT_LAST_METADATA_DIR',
'/var/cache/heat-cfntools')
CFN_INIT_CMD = os.environ.get('HEAT_CFN_INIT_CMD',
'cfn-init')
def main(argv=sys.argv, stdin=sys.stdin, stdout=sys.stdout, stderr=sys.stderr):
log = logging.getLogger('heat-config')
handler = logging.StreamHandler(stderr)
handler.setFormatter(
logging.Formatter(
'[%(asctime)s] (%(name)s) [%(levelname)s] %(message)s'))
log.addHandler(handler)
log.setLevel('DEBUG')
c = json.load(stdin)
config = c.get('config', {})
if not isinstance(config, dict):
config = json.loads(config)
meta = {'AWS::CloudFormation::Init': config}
if not os.path.isdir(LAST_METADATA_DIR):
os.makedirs(LAST_METADATA_DIR, 0o700)
fn = os.path.join(LAST_METADATA_DIR, 'last_metadata')
with os.fdopen(os.open(fn, os.O_CREAT | os.O_WRONLY | os.O_TRUNC, 0o700),
'w') as f:
json.dump(meta, f)
log.debug('Running %s' % CFN_INIT_CMD)
subproc = subprocess.Popen([CFN_INIT_CMD], stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
cstdout, cstderr = subproc.communicate()
if cstdout:
log.info(cstdout)
if cstderr:
log.info(cstderr)
if subproc.returncode:
log.error("Error running %s. [%s]\n" % (
CFN_INIT_CMD, subproc.returncode))
else:
log.info('Completed %s' % CFN_INIT_CMD)
response = {
'deploy_stdout': cstdout,
'deploy_stderr': cstderr,
'deploy_status_code': subproc.returncode,
}
json.dump(response, stdout)
if __name__ == '__main__':
sys.exit(main())
| apache-2.0 |
zstackio/zstack-utility | bm-instance-agent/bm_instance_agent/api/controllers/v2/__init__.py | 1 | 1632 | from pecan import expose
from pecan.rest import RestController
from bm_instance_agent.api.controllers.v2 import console
from bm_instance_agent.api.controllers.v2 import default_route
from bm_instance_agent.api.controllers.v2 import nic
from bm_instance_agent.api.controllers.v2 import password
from bm_instance_agent.api.controllers.v2 import ping
from bm_instance_agent.api.controllers.v2 import reboot
from bm_instance_agent.api.controllers.v2 import stop
from bm_instance_agent.api.controllers.v2 import volume
class Controller(RestController):
_subcontroller_map = {
'console': console.ConsoleController,
'defaultRoute': default_route.DefaultRouteController,
'nic': nic.NicController,
'password': password.PasswordController,
'ping': ping.PingController,
'reboot': reboot.RebootController,
'stop': stop.StopController,
'volume': volume.VolumeController
}
# @expose()
# def _lookup(self, bm_uuid, *remainder):
# if not remainder:
# return
# subcontroller = self._subcontroller_map.get(remainder[0])
# if subcontroller:
# return subcontroller(bm_uuid=bm_uuid), remainder[1:]
@expose()
def _lookup(self, *remainder):
if not remainder:
return
subcontroller = self._subcontroller_map.get(remainder[0])
if subcontroller:
return subcontroller(), remainder[1:]
@expose(template='json')
def get(self):
return {
'name': 'Baremetal instance agent v2',
'description': ''
}
__all__ = ('Controller',)
| apache-2.0 |
popazerty/bnigma2 | lib/python/Screens/Screen.py | 14 | 4499 | from Tools.Profile import profile
profile("LOAD:GUISkin")
from Components.GUISkin import GUISkin
profile("LOAD:Source")
from Components.Sources.Source import Source
profile("LOAD:GUIComponent")
from Components.GUIComponent import GUIComponent
profile("LOAD:eRCInput")
from enigma import eRCInput
class Screen(dict, GUISkin):
False, SUSPEND_STOPS, SUSPEND_PAUSES = range(3)
ALLOW_SUSPEND = False
global_screen = None
def __init__(self, session, parent = None):
dict.__init__(self)
self.skinName = self.__class__.__name__
self.session = session
self.parent = parent
GUISkin.__init__(self)
self.onClose = [ ]
self.onFirstExecBegin = [ ]
self.onExecBegin = [ ]
self.onShown = [ ]
self.onShow = [ ]
self.onHide = [ ]
self.execing = False
self.shown = True
# already shown is false until the screen is really shown (after creation)
self.already_shown = False
self.renderer = [ ]
# in order to support screens *without* a help,
# we need the list in every screen. how ironic.
self.helpList = [ ]
self.close_on_next_exec = None
# stand alone screens (for example web screens)
# don't care about having or not having focus.
self.stand_alone = False
self.keyboardMode = None
def saveKeyboardMode(self):
rcinput = eRCInput.getInstance()
self.keyboardMode = rcinput.getKeyboardMode()
def setKeyboardModeAscii(self):
rcinput = eRCInput.getInstance()
rcinput.setKeyboardMode(rcinput.kmAscii)
def setKeyboardModeNone(self):
rcinput = eRCInput.getInstance()
rcinput.setKeyboardMode(rcinput.kmNone)
def restoreKeyboardMode(self):
rcinput = eRCInput.getInstance()
if self.keyboardMode is not None:
rcinput.setKeyboardMode(self.keyboardMode)
def execBegin(self):
self.active_components = [ ]
if self.close_on_next_exec is not None:
tmp = self.close_on_next_exec
self.close_on_next_exec = None
self.execing = True
self.close(*tmp)
else:
single = self.onFirstExecBegin
self.onFirstExecBegin = []
for x in self.onExecBegin + single:
x()
if not self.stand_alone and self.session.current_dialog != self:
return
# assert self.session == None, "a screen can only exec once per time"
# self.session = session
for val in self.values() + self.renderer:
val.execBegin()
if not self.stand_alone and self.session.current_dialog != self:
return
self.active_components.append(val)
self.execing = True
for x in self.onShown:
x()
def execEnd(self):
active_components = self.active_components
# for (name, val) in self.items():
self.active_components = None
for val in active_components:
val.execEnd()
# assert self.session != None, "execEnd on non-execing screen!"
# self.session = None
self.execing = False
# never call this directly - it will be called from the session!
def doClose(self):
self.hide()
for x in self.onClose:
x()
# fixup circular references
del self.helpList
GUISkin.close(self)
# first disconnect all render from their sources.
# we might split this out into a "unskin"-call,
# but currently we destroy the screen afterwards
# anyway.
for val in self.renderer:
val.disconnectAll() # disconnected converter/sources and probably destroy them. Sources will not be destroyed.
del self.session
for (name, val) in self.items():
val.destroy()
del self[name]
self.renderer = [ ]
# really delete all elements now
self.__dict__.clear()
def close(self, *retval):
if not self.execing:
self.close_on_next_exec = retval
else:
self.session.close(self, *retval)
def setFocus(self, o):
self.instance.setFocus(o.instance)
def show(self):
if (self.shown and self.already_shown) or not self.instance:
return
self.shown = True
self.already_shown = True
self.instance.show()
for x in self.onShow:
x()
for val in self.values() + self.renderer:
if isinstance(val, GUIComponent) or isinstance(val, Source):
val.onShow()
def hide(self):
if not self.shown or not self.instance:
return
self.shown = False
self.instance.hide()
for x in self.onHide:
x()
for val in self.values() + self.renderer:
if isinstance(val, GUIComponent) or isinstance(val, Source):
val.onHide()
def __repr__(self):
return str(type(self))
def getRelatedScreen(self, name):
if name == "session":
return self.session.screen
elif name == "parent":
return self.parent
elif name == "global":
return self.global_screen
else:
return None
| gpl-2.0 |
EricNeedham/assignment-1 | venv/lib/python2.7/site-packages/jinja2/testsuite/security.py | 415 | 6204 | # -*- coding: utf-8 -*-
"""
jinja2.testsuite.security
~~~~~~~~~~~~~~~~~~~~~~~~~
Checks the sandbox and other security features.
:copyright: (c) 2010 by the Jinja Team.
:license: BSD, see LICENSE for more details.
"""
import unittest
from jinja2.testsuite import JinjaTestCase
from jinja2 import Environment
from jinja2.sandbox import SandboxedEnvironment, \
ImmutableSandboxedEnvironment, unsafe
from jinja2 import Markup, escape
from jinja2.exceptions import SecurityError, TemplateSyntaxError, \
TemplateRuntimeError
from jinja2._compat import text_type
class PrivateStuff(object):
def bar(self):
return 23
@unsafe
def foo(self):
return 42
def __repr__(self):
return 'PrivateStuff'
class PublicStuff(object):
bar = lambda self: 23
_foo = lambda self: 42
def __repr__(self):
return 'PublicStuff'
class SandboxTestCase(JinjaTestCase):
def test_unsafe(self):
env = SandboxedEnvironment()
self.assert_raises(SecurityError, env.from_string("{{ foo.foo() }}").render,
foo=PrivateStuff())
self.assert_equal(env.from_string("{{ foo.bar() }}").render(foo=PrivateStuff()), '23')
self.assert_raises(SecurityError, env.from_string("{{ foo._foo() }}").render,
foo=PublicStuff())
self.assert_equal(env.from_string("{{ foo.bar() }}").render(foo=PublicStuff()), '23')
self.assert_equal(env.from_string("{{ foo.__class__ }}").render(foo=42), '')
self.assert_equal(env.from_string("{{ foo.func_code }}").render(foo=lambda:None), '')
# security error comes from __class__ already.
self.assert_raises(SecurityError, env.from_string(
"{{ foo.__class__.__subclasses__() }}").render, foo=42)
def test_immutable_environment(self):
env = ImmutableSandboxedEnvironment()
self.assert_raises(SecurityError, env.from_string(
'{{ [].append(23) }}').render)
self.assert_raises(SecurityError, env.from_string(
'{{ {1:2}.clear() }}').render)
def test_restricted(self):
env = SandboxedEnvironment()
self.assert_raises(TemplateSyntaxError, env.from_string,
"{% for item.attribute in seq %}...{% endfor %}")
self.assert_raises(TemplateSyntaxError, env.from_string,
"{% for foo, bar.baz in seq %}...{% endfor %}")
def test_markup_operations(self):
# adding two strings should escape the unsafe one
unsafe = '<script type="application/x-some-script">alert("foo");</script>'
safe = Markup('<em>username</em>')
assert unsafe + safe == text_type(escape(unsafe)) + text_type(safe)
# string interpolations are safe to use too
assert Markup('<em>%s</em>') % '<bad user>' == \
'<em><bad user></em>'
assert Markup('<em>%(username)s</em>') % {
'username': '<bad user>'
} == '<em><bad user></em>'
# an escaped object is markup too
assert type(Markup('foo') + 'bar') is Markup
# and it implements __html__ by returning itself
x = Markup("foo")
assert x.__html__() is x
# it also knows how to treat __html__ objects
class Foo(object):
def __html__(self):
return '<em>awesome</em>'
def __unicode__(self):
return 'awesome'
assert Markup(Foo()) == '<em>awesome</em>'
assert Markup('<strong>%s</strong>') % Foo() == \
'<strong><em>awesome</em></strong>'
# escaping and unescaping
assert escape('"<>&\'') == '"<>&''
assert Markup("<em>Foo & Bar</em>").striptags() == "Foo & Bar"
assert Markup("<test>").unescape() == "<test>"
def test_template_data(self):
env = Environment(autoescape=True)
t = env.from_string('{% macro say_hello(name) %}'
'<p>Hello {{ name }}!</p>{% endmacro %}'
'{{ say_hello("<blink>foo</blink>") }}')
escaped_out = '<p>Hello <blink>foo</blink>!</p>'
assert t.render() == escaped_out
assert text_type(t.module) == escaped_out
assert escape(t.module) == escaped_out
assert t.module.say_hello('<blink>foo</blink>') == escaped_out
assert escape(t.module.say_hello('<blink>foo</blink>')) == escaped_out
def test_attr_filter(self):
env = SandboxedEnvironment()
tmpl = env.from_string('{{ cls|attr("__subclasses__")() }}')
self.assert_raises(SecurityError, tmpl.render, cls=int)
def test_binary_operator_intercepting(self):
def disable_op(left, right):
raise TemplateRuntimeError('that operator so does not work')
for expr, ctx, rv in ('1 + 2', {}, '3'), ('a + 2', {'a': 2}, '4'):
env = SandboxedEnvironment()
env.binop_table['+'] = disable_op
t = env.from_string('{{ %s }}' % expr)
assert t.render(ctx) == rv
env.intercepted_binops = frozenset(['+'])
t = env.from_string('{{ %s }}' % expr)
try:
t.render(ctx)
except TemplateRuntimeError as e:
pass
else:
self.fail('expected runtime error')
def test_unary_operator_intercepting(self):
def disable_op(arg):
raise TemplateRuntimeError('that operator so does not work')
for expr, ctx, rv in ('-1', {}, '-1'), ('-a', {'a': 2}, '-2'):
env = SandboxedEnvironment()
env.unop_table['-'] = disable_op
t = env.from_string('{{ %s }}' % expr)
assert t.render(ctx) == rv
env.intercepted_unops = frozenset(['-'])
t = env.from_string('{{ %s }}' % expr)
try:
t.render(ctx)
except TemplateRuntimeError as e:
pass
else:
self.fail('expected runtime error')
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(SandboxTestCase))
return suite
| mit |
tensorflow/neural-structured-learning | research/carls/dynamic_embedding_ops_test.py | 1 | 10884 | # Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for neural_structured_learning.research.carls.dynamic_embedding_ops."""
import itertools
from absl.testing import parameterized
from research.carls import context
from research.carls import dynamic_embedding_ops as de_ops
from research.carls.testing import test_util
import numpy as np
import tensorflow as tf
class DynamicEmbeddingOpsTest(tf.test.TestCase, parameterized.TestCase):
def setUp(self):
super(DynamicEmbeddingOpsTest, self).setUp()
self._config = test_util.default_de_config(2)
self._service_server = test_util.start_kbs_server()
self._kbs_address = 'localhost:%d' % self._service_server.port()
context.clear_all_collection()
def tearDown(self):
self._service_server.Terminate()
super(DynamicEmbeddingOpsTest, self).tearDown()
@parameterized.parameters(itertools.product((True, False), (1000, -1)))
def testLookup_1DInput(self, skip_gradient, timeout_ms):
init = self._config.knowledge_bank_config.initializer
init.default_embedding.value.append(1)
init.default_embedding.value.append(2)
embedding = de_ops.dynamic_embedding_lookup(
['first'],
self._config,
'emb',
service_address=self._kbs_address,
skip_gradient_update=skip_gradient,
timeout_ms=timeout_ms)
self.assertAllClose(embedding.numpy(), [[1, 2]])
embedding = de_ops.dynamic_embedding_lookup(
['first', 'second', ''],
self._config,
'emb',
service_address=self._kbs_address,
skip_gradient_update=skip_gradient)
self.assertAllClose(embedding.numpy(), [[1, 2], [1, 2], [0, 0]])
@parameterized.parameters({True, False})
def testLookup_2DInput(self, skip_gradient):
init = self._config.knowledge_bank_config.initializer
init.default_embedding.value.append(1)
init.default_embedding.value.append(2)
embedding = de_ops.dynamic_embedding_lookup(
[['first', 'second'], ['third', '']],
self._config,
'emb',
service_address=self._kbs_address,
skip_gradient_update=skip_gradient)
self.assertAllClose(embedding.numpy(), [[[1, 2], [1, 2]], [[1, 2], [0, 0]]])
@parameterized.parameters((True, True), (True, False), (False, False),
(False, True))
def testUpdate_1DInput(self, use_kbs_address, skip_gradient):
init = self._config.knowledge_bank_config.initializer
init.default_embedding.value.append(1)
init.default_embedding.value.append(2)
embedding = de_ops.dynamic_embedding_lookup(
['first'],
self._config,
'emb',
service_address=self._kbs_address,
skip_gradient_update=skip_gradient)
self.assertAllClose(embedding.numpy(), [[1, 2]])
update_res = de_ops.dynamic_embedding_update(
['first'],
tf.constant([[2.0, 4.0]]),
self._config,
'emb',
service_address=self._kbs_address,
)
self.assertAllClose(update_res.numpy(), [[2, 4]])
embedding = de_ops.dynamic_embedding_lookup(
['first'],
self._config,
'emb',
service_address=self._kbs_address,
skip_gradient_update=skip_gradient)
self.assertAllClose(embedding.numpy(), [[2, 4]])
# Allows keys' shape to be [N, 1] and values shape to be [N, D].
update_res = de_ops.dynamic_embedding_update(
[['first']],
tf.constant([[4.0, 5.0]]),
self._config,
'emb',
service_address=self._kbs_address)
self.assertAllClose(update_res.numpy(), [[4, 5]])
embedding = de_ops.dynamic_embedding_lookup(
['first'],
self._config,
'emb',
service_address=self._kbs_address,
skip_gradient_update=skip_gradient)
self.assertAllClose(embedding.numpy(), [[4, 5]])
@parameterized.parameters({True, False})
def testUpdate_2DInput(self, skip_gradient):
init = self._config.knowledge_bank_config.initializer
init.default_embedding.value.append(1)
init.default_embedding.value.append(2)
embedding = de_ops.dynamic_embedding_lookup(
[['first', 'second'], ['third', '']],
self._config,
'emb',
service_address=self._kbs_address,
skip_gradient_update=skip_gradient)
self.assertAllClose(embedding.numpy(), [[[1, 2], [1, 2]], [[1, 2], [0, 0]]])
# The values for an empty key should be ignored.
update_res = de_ops.dynamic_embedding_update(
[['first', 'second'], ['third', '']],
tf.constant([[[2.0, 4.0], [4.0, 8.0]], [[8.0, 16.0], [16.0, 32.0]]]),
self._config,
'emb',
service_address=self._kbs_address,
)
self.assertAllClose(update_res.numpy(),
[[[2, 4], [4, 8]], [[8, 16], [0, 0]]])
embedding = de_ops.dynamic_embedding_lookup(
[['first', 'second'], ['third', '']],
self._config,
'emb',
service_address=self._kbs_address,
skip_gradient_update=skip_gradient)
self.assertAllClose(embedding.numpy(),
[[[2, 4], [4, 8]], [[8, 16], [0, 0]]])
# Allows keys' shape to be [N1, N2, 1] and values shape to be [N1, N2, D].
update_res = de_ops.dynamic_embedding_update(
[[['first'], ['second']], [['third'], ['']]],
tf.constant([[[3.0, 5.0], [5.0, 9.0]], [[9.0, 17.0], [17.0, 33.0]]]),
self._config,
'emb',
service_address=self._kbs_address,
)
self.assertAllClose(update_res.numpy(),
[[[3, 5], [5, 9]], [[9, 17], [0, 0]]])
embedding = de_ops.dynamic_embedding_lookup(
[['first', 'second'], ['third', '']],
self._config,
'emb',
service_address=self._kbs_address,
skip_gradient_update=skip_gradient)
self.assertAllClose(embedding.numpy(),
[[[3, 5], [5, 9]], [[9, 17], [0, 0]]])
def testWrongAddress(self):
init = self._config.knowledge_bank_config.initializer
init.default_embedding.value.append(1)
init.default_embedding.value.append(2)
with self.assertRaisesRegex(Exception, 'DynamicEmbeddingManager is NULL.'):
de_ops.dynamic_embedding_lookup(['first', 'second', ''],
self._config,
'emb',
'wrongaddress',
timeout_ms=10)
def testTrainingLogistic(self):
embedding_dimension = 5
self._config.embedding_dimension = embedding_dimension
# Set initial embedding to be all zero's.
init = self._config.knowledge_bank_config.initializer
for _ in range(embedding_dimension):
init.default_embedding.value.append(0)
# Create variables.
initializer = tf.ones_initializer()
w = tf.Variable(
initializer(shape=[embedding_dimension, 1], dtype=tf.float32))
b = tf.Variable(0.0)
trainable_variables = [w, b]
# Create an optimizer.
optimizer = tf.keras.optimizers.SGD(learning_rate=0.1)
# Conducts one step of gradient descent.
ids = np.array(['yes', 'no', 'good', 'bad'])
y = np.array([[1], [0], [1], [0]])
with tf.GradientTape() as tape:
embedding = de_ops.dynamic_embedding_lookup(
ids,
self._config,
'emb',
service_address=self._kbs_address,
skip_gradient_update=False)
logit = tf.linalg.matmul(embedding, w) + b
pred = 1 / (1 + tf.exp(-logit))
loss = y * tf.math.log(pred) + (1 - y) * tf.math.log(1 - pred)
grads = tape.gradient(loss, trainable_variables)
# Update the trainable variables w.r.t. the logistic loss
optimizer.apply_gradients(zip(grads, trainable_variables))
# Checks that the embeddings are updated.
new_embedding = de_ops.dynamic_embedding_lookup(
ids,
self._config,
'emb',
service_address=self._kbs_address,
skip_gradient_update=False)
distance = np.sum((new_embedding.numpy() - embedding.numpy())**2)
self.assertGreater(distance, 0)
# Checks that the new loss is smaller.
new_logit = tf.linalg.matmul(new_embedding, w) + b
new_pred = 1 / (1 + tf.exp(-new_logit))
new_loss = y * tf.math.log(new_pred) + (1 - y) * tf.math.log(1 - new_pred)
for old, new in zip(loss.numpy(), new_loss.numpy()):
self.assertLess(new[0], old[0])
def _create_dataset(self):
"""Returns a tf.data.Dataset with dynamic embedding as input."""
dataset = tf.data.Dataset.range(100)
dataset = dataset.batch(batch_size=4, drop_remainder=True)
def _parse(example):
string_ids = tf.strings.as_string(example)
input_embed = de_ops.dynamic_embedding_lookup(
string_ids,
self._config,
'input_embed',
service_address=self._kbs_address,
skip_gradient_update=True)
return input_embed
dataset = dataset.map(_parse, num_parallel_calls=2)
return dataset
def testDynamicEmbeddingTfDataset(self):
"""Test DynamicEmbedding's compatibility with tf.data.Dataset API."""
dataset = self._create_dataset()
for data in dataset:
self.assertAllEqual([4, 2], data.shape)
def testDynamicEmbeddingKerasInterface_KerasLayer(self):
de_layer = de_ops.DynamicEmbeddingLookup(
self._config, 'embed', service_address=self._kbs_address)
# 1D case.
embed = de_layer(np.array(['key1', 'key2', 'key3']))
self.assertEqual((3, 2), embed.shape)
# 2D case.
embed = de_layer(np.array([['key1', 'key2'], ['key3', '']]))
self.assertEqual((2, 2, 2), embed.shape)
def testDynamicEmbeddingKerasInterface_KerasModel(self):
"""A simple Logistic Regression Keras model."""
string_ids = np.array([['yes'], ['no'], ['good'], ['bad']])
y_train = np.array([[[1, 0]], [[0, 1]], [[1, 0]], [[0, 1]]])
model = tf.keras.models.Sequential([
de_ops.DynamicEmbeddingLookup(
self._config, 'embed', service_address=self._kbs_address),
tf.keras.layers.Dense(2, activation='softmax')
])
model.compile(
optimizer='sgd', loss='categorical_crossentropy', metrics=['accuracy'])
history = model.fit(string_ids, y_train, epochs=10)
# Checks that the loss is decreased.
self.assertLess(history.history['loss'][-1], history.history['loss'][0])
if __name__ == '__main__':
tf.test.main()
| apache-2.0 |
rlindner81/pyload | module/plugins/hoster/PromptfileCom.py | 1 | 1896 | # -*- coding: utf-8 -*-
import re
from module.plugins.internal.SimpleHoster import SimpleHoster
class PromptfileCom(SimpleHoster):
__name__ = "PromptfileCom"
__type__ = "hoster"
__version__ = "0.18"
__status__ = "testing"
__pattern__ = r'https?://(?:www\.)?promptfile\.com/'
__config__ = [("activated", "bool", "Activated", True),
("use_premium", "bool", "Use premium account if available", True),
("fallback", "bool",
"Fallback to free download if premium fails", True),
("chk_filesize", "bool", "Check file size", True),
("max_wait", "int", "Reconnect if waiting time is greater than minutes", 10)]
__description__ = """Promptfile.com hoster plugin"""
__license__ = "GPLv3"
__authors__ = [("igel", "[email protected]"),
("ondrej", "[email protected]")]
INFO_PATTERN = r'<span style=".+?" title=".+?">(?P<N>.*?) \((?P<S>[\d.,]+) (?P<U>[\w^_]+)\)</span>'
OFFLINE_PATTERN = r'<span style=".+?" title="File Not Found">File Not Found</span>'
CHASH_PATTERN = r'input.+"([a-z\d]{10,})".+"([a-z\d]{10,})"'
MODIFY_PATTERN = r'\$\("#chash"\)\.val\("(.+)"\+\$\("#chash"\)'
LINK_FREE_PATTERN = r'<a href="(http://www\.promptfile\.com/file/[^"]+)'
def handle_free(self, pyfile):
#: STAGE 1: get link to continue
m = re.search(self.CHASH_PATTERN, self.data)
if m is None:
self.error(_("CHASH_PATTERN not found"))
mod = re.search(self.MODIFY_PATTERN, self.data)
payload = {
m.group(1): mod.group(1) + m.group(2)
}
self.log_debug("Read chash: " + str(payload))
#: Continue to stage2
self.data = self.load(pyfile.url, post=payload)
#: STAGE 2: get the direct link
return super(PromptfileCom, self).handle_free(pyfile)
| gpl-3.0 |
wangd/rhythmbox | plugins/jamendo/JamendoConfigureDialog.py | 1 | 1924 | # -*- coding: utf-8 -*-
# JamendoConfigureDialog.py
#
# Copyright (C) 2007 - Guillaume Desmottes
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import rb
from gi.repository import Gtk, Gio, GObject, PeasGtk
format_list = ['ogg3', 'mp32']
class JamendoConfigureDialog (GObject.Object, PeasGtk.Configurable):
__gtype_name__ = 'JamendoConfigureDialog'
object = GObject.property(type=GObject.Object)
def __init__(self):
GObject.Object.__init__(self)
self.settings = Gio.Settings("org.gnome.rhythmbox.plugins.jamendo")
self.hate = self
def do_create_configure_widget(self):
builder = Gtk.Builder()
builder.add_from_file(rb.find_plugin_file(self, "jamendo-prefs.ui"))
self.config = builder.get_object('config')
self.audio_combobox = builder.get_object("audio_combobox")
# probably should just bind this, but too lazy
format_text = self.settings['format']
if not format_text:
format_text = "ogg3"
try:
format = format_list.index(format_text)
except ValueError:
format = 0
self.audio_combobox.set_active(format)
self.audio_combobox.connect("changed", self.audio_combobox_changed)
return self.config
def audio_combobox_changed (self, combobox):
format = self.audio_combobox.get_active()
self.settings['format'] = format_list[format]
| gpl-2.0 |
CoolCloud/python-oauth2 | docs/examples/base_server.py | 5 | 2404 | from wsgiref.simple_server import make_server
import oauth2
import oauth2.grant
import oauth2.error
import oauth2.store.memory
import oauth2.tokengenerator
import oauth2.web.wsgi
# Create a SiteAdapter to interact with the user.
# This can be used to display confirmation dialogs and the like.
class ExampleSiteAdapter(oauth2.web.AuthorizationCodeGrantSiteAdapter,
oauth2.web.ImplicitGrantSiteAdapter):
def authenticate(self, request, environ, scopes, client):
# Check if the user has granted access
if request.post_param("confirm") == "confirm":
return {}
raise oauth2.error.UserNotAuthenticated
def render_auth_page(self, request, response, environ, scopes, client):
response.body = '''
<html>
<body>
<form method="POST" name="confirmation_form">
<input type="submit" name="confirm" value="confirm" />
<input type="submit" name="deny" value="deny" />
</form>
</body>
</html>'''
return response
def user_has_denied_access(self, request):
# Check if the user has denied access
if request.post_param("deny") == "deny":
return True
return False
# Create an in-memory storage to store your client apps.
client_store = oauth2.store.memory.ClientStore()
# Add a client
client_store.add_client(client_id="abc", client_secret="xyz",
redirect_uris=["http://localhost/callback"])
site_adapter = ExampleSiteAdapter()
# Create an in-memory storage to store issued tokens.
# LocalTokenStore can store access and auth tokens
token_store = oauth2.store.memory.TokenStore()
# Create the controller.
provider = oauth2.Provider(
access_token_store=token_store,
auth_code_store=token_store,
client_store=client_store,
token_generator=oauth2.tokengenerator.Uuid4()
)
# Add Grants you want to support
provider.add_grant(oauth2.grant.AuthorizationCodeGrant(site_adapter=site_adapter))
provider.add_grant(oauth2.grant.ImplicitGrant(site_adapter=site_adapter))
# Add refresh token capability and set expiration time of access tokens
# to 30 days
provider.add_grant(oauth2.grant.RefreshToken(expires_in=2592000))
# Wrap the controller with the Wsgi adapter
app = oauth2.web.wsgi.Application(provider=provider)
if __name__ == "__main__":
httpd = make_server('', 8080, app)
httpd.serve_forever()
| mit |
kamijawa/ogc_server | soap_server.py | 2 | 4194 | # -*- coding: utf-8 -*-
import os, sys
import json
import datetime
from gevent import pywsgi
import gevent
from pysimplesoap.server import SoapDispatcher, WSGISOAPHandler
import exceptions
import time
import socket
import urllib, urllib2, urlparse
from socket import error
import errno
import cgi
import configobj
from lxml import etree
ENCODING = 'utf-8'
CONFIGFILE = 'ogc-config.ini'
gConfig = configobj.ConfigObj(CONFIGFILE, encoding='UTF8')
def webservice_login(username, password):
return 'OK'
#in0(开始时间)、in1(结束时间)参数以字符串方式传递(例如:”20070808200800”, “20070808200810”)
#时间要精确到秒 YYYYMMddHHmmss
def webservice_GetFlashofDate(in0, in1):
#1、这段时间有数据
#<?xml version="1.0" encoding="UTF-8"?>
#<Flashs>
#<Flash>
#<Time>2007-01-01 16:00:06</Time>
#<Lat>26.860467</Lat>
#<Long>119.207484</Long>
#<Current>-11.8</Current>
#<Mult>1</Mult>
#<Tdf>3</Tdf>
#</Flash>
#</Flashs>
#2、这段时间无数据
#<?xml version="1.0" encoding="UTF-8"?>
#<Flashs>
#</Flashs>
root = etree.Element("Flashs")#, xmlns=gConfig['webservice']['namespace'], version="1.0.0")
for i in range(500):
Flash = etree.SubElement(root, "Flash")
Time = etree.SubElement(Flash, "Time").text = time.strftime('%Y-%m-%d %H:%M:%S')
Lat = etree.SubElement(Flash, "Lat").text = '26.860467'
Long = etree.SubElement(Flash, "Long").text = '119.207484'
Current = etree.SubElement(Flash, "Current").text = '-11.8'
Mult = etree.SubElement(Flash, "Mult").text = '1'
Tdf = etree.SubElement(Flash, "Tdf").text = '3'
ret = etree.tostring(root, pretty_print=True, xml_declaration=True, encoding=ENCODING)
#time.sleep(4)
return ret
#in0-startTime、in1-endTime、in2-经度(西) in3-经度(东) in4-纬度(北) in5- 纬度(南)。
def webservice_GetFlashofEnvelope(in0, in1, in2, in3, in4, in5):
root = etree.Element("Flashs")
for i in range(500):
Flash = etree.SubElement(root, "Flash")
Time = etree.SubElement(Flash, "Time").text = time.strftime('%Y-%m-%d %H:%M:%S')
Lat = etree.SubElement(Flash, "Lat").text = '26.860467'
Long = etree.SubElement(Flash, "Long").text = '119.207484'
Current = etree.SubElement(Flash, "Current").text = '-11.8'
Mult = etree.SubElement(Flash, "Mult").text = '1'
Tdf = etree.SubElement(Flash, "Tdf").text = '3'
ret = etree.tostring(root, pretty_print=True, xml_declaration=True, encoding=ENCODING)
return ret
def get_wsapplication():
dispatcher = SoapDispatcher(
'thunder_counter_dispatcher',
location = str(gConfig['webservice']['location']),
action = str(gConfig['webservice']['action']),
namespace = str(gConfig['webservice']['namespace']),
prefix = str(gConfig['webservice']['prefix']),
trace = True,
ns = True)
dispatcher.register_function('login',
webservice_login,
returns={'Result': str},
args={'username': str, 'password': str})
dispatcher.register_function('GetFlashofDate',
webservice_GetFlashofDate,
returns={'Result': str},
args={'in0': str, 'in1': str})
dispatcher.register_function('GetFlashofEnvelope',
webservice_GetFlashofEnvelope,
returns={'Result': str},
args={'in0': str, 'in1': str, 'in2': str,'in3': str, 'in4': str, 'in5': str})
wsapplication = WSGISOAPHandler(dispatcher)
return wsapplication
if __name__=="__main__":
if gConfig['webservice']['enable'] in [u'true', u'TRUE']:
h, p = gConfig['webservice']['host'], int(gConfig['webservice']['port'])
print('listening webservice at http://%s:%d/webservice' % (h, p))
server = pywsgi.WSGIServer((h, p), get_wsapplication())
server.start()
server.serve_forever()
| mit |
twobob/buildroot-kindle | output/build/host-python-2.7.2/Lib/sets.py | 262 | 19050 | """Classes to represent arbitrary sets (including sets of sets).
This module implements sets using dictionaries whose values are
ignored. The usual operations (union, intersection, deletion, etc.)
are provided as both methods and operators.
Important: sets are not sequences! While they support 'x in s',
'len(s)', and 'for x in s', none of those operations are unique for
sequences; for example, mappings support all three as well. The
characteristic operation for sequences is subscripting with small
integers: s[i], for i in range(len(s)). Sets don't support
subscripting at all. Also, sequences allow multiple occurrences and
their elements have a definite order; sets on the other hand don't
record multiple occurrences and don't remember the order of element
insertion (which is why they don't support s[i]).
The following classes are provided:
BaseSet -- All the operations common to both mutable and immutable
sets. This is an abstract class, not meant to be directly
instantiated.
Set -- Mutable sets, subclass of BaseSet; not hashable.
ImmutableSet -- Immutable sets, subclass of BaseSet; hashable.
An iterable argument is mandatory to create an ImmutableSet.
_TemporarilyImmutableSet -- A wrapper around a Set, hashable,
giving the same hash value as the immutable set equivalent
would have. Do not use this class directly.
Only hashable objects can be added to a Set. In particular, you cannot
really add a Set as an element to another Set; if you try, what is
actually added is an ImmutableSet built from it (it compares equal to
the one you tried adding).
When you ask if `x in y' where x is a Set and y is a Set or
ImmutableSet, x is wrapped into a _TemporarilyImmutableSet z, and
what's tested is actually `z in y'.
"""
# Code history:
#
# - Greg V. Wilson wrote the first version, using a different approach
# to the mutable/immutable problem, and inheriting from dict.
#
# - Alex Martelli modified Greg's version to implement the current
# Set/ImmutableSet approach, and make the data an attribute.
#
# - Guido van Rossum rewrote much of the code, made some API changes,
# and cleaned up the docstrings.
#
# - Raymond Hettinger added a number of speedups and other
# improvements.
from itertools import ifilter, ifilterfalse
__all__ = ['BaseSet', 'Set', 'ImmutableSet']
import warnings
warnings.warn("the sets module is deprecated", DeprecationWarning,
stacklevel=2)
class BaseSet(object):
"""Common base class for mutable and immutable sets."""
__slots__ = ['_data']
# Constructor
def __init__(self):
"""This is an abstract class."""
# Don't call this from a concrete subclass!
if self.__class__ is BaseSet:
raise TypeError, ("BaseSet is an abstract class. "
"Use Set or ImmutableSet.")
# Standard protocols: __len__, __repr__, __str__, __iter__
def __len__(self):
"""Return the number of elements of a set."""
return len(self._data)
def __repr__(self):
"""Return string representation of a set.
This looks like 'Set([<list of elements>])'.
"""
return self._repr()
# __str__ is the same as __repr__
__str__ = __repr__
def _repr(self, sorted=False):
elements = self._data.keys()
if sorted:
elements.sort()
return '%s(%r)' % (self.__class__.__name__, elements)
def __iter__(self):
"""Return an iterator over the elements or a set.
This is the keys iterator for the underlying dict.
"""
return self._data.iterkeys()
# Three-way comparison is not supported. However, because __eq__ is
# tried before __cmp__, if Set x == Set y, x.__eq__(y) returns True and
# then cmp(x, y) returns 0 (Python doesn't actually call __cmp__ in this
# case).
def __cmp__(self, other):
raise TypeError, "can't compare sets using cmp()"
# Equality comparisons using the underlying dicts. Mixed-type comparisons
# are allowed here, where Set == z for non-Set z always returns False,
# and Set != z always True. This allows expressions like "x in y" to
# give the expected result when y is a sequence of mixed types, not
# raising a pointless TypeError just because y contains a Set, or x is
# a Set and y contain's a non-set ("in" invokes only __eq__).
# Subtle: it would be nicer if __eq__ and __ne__ could return
# NotImplemented instead of True or False. Then the other comparand
# would get a chance to determine the result, and if the other comparand
# also returned NotImplemented then it would fall back to object address
# comparison (which would always return False for __eq__ and always
# True for __ne__). However, that doesn't work, because this type
# *also* implements __cmp__: if, e.g., __eq__ returns NotImplemented,
# Python tries __cmp__ next, and the __cmp__ here then raises TypeError.
def __eq__(self, other):
if isinstance(other, BaseSet):
return self._data == other._data
else:
return False
def __ne__(self, other):
if isinstance(other, BaseSet):
return self._data != other._data
else:
return True
# Copying operations
def copy(self):
"""Return a shallow copy of a set."""
result = self.__class__()
result._data.update(self._data)
return result
__copy__ = copy # For the copy module
def __deepcopy__(self, memo):
"""Return a deep copy of a set; used by copy module."""
# This pre-creates the result and inserts it in the memo
# early, in case the deep copy recurses into another reference
# to this same set. A set can't be an element of itself, but
# it can certainly contain an object that has a reference to
# itself.
from copy import deepcopy
result = self.__class__()
memo[id(self)] = result
data = result._data
value = True
for elt in self:
data[deepcopy(elt, memo)] = value
return result
# Standard set operations: union, intersection, both differences.
# Each has an operator version (e.g. __or__, invoked with |) and a
# method version (e.g. union).
# Subtle: Each pair requires distinct code so that the outcome is
# correct when the type of other isn't suitable. For example, if
# we did "union = __or__" instead, then Set().union(3) would return
# NotImplemented instead of raising TypeError (albeit that *why* it
# raises TypeError as-is is also a bit subtle).
def __or__(self, other):
"""Return the union of two sets as a new set.
(I.e. all elements that are in either set.)
"""
if not isinstance(other, BaseSet):
return NotImplemented
return self.union(other)
def union(self, other):
"""Return the union of two sets as a new set.
(I.e. all elements that are in either set.)
"""
result = self.__class__(self)
result._update(other)
return result
def __and__(self, other):
"""Return the intersection of two sets as a new set.
(I.e. all elements that are in both sets.)
"""
if not isinstance(other, BaseSet):
return NotImplemented
return self.intersection(other)
def intersection(self, other):
"""Return the intersection of two sets as a new set.
(I.e. all elements that are in both sets.)
"""
if not isinstance(other, BaseSet):
other = Set(other)
if len(self) <= len(other):
little, big = self, other
else:
little, big = other, self
common = ifilter(big._data.__contains__, little)
return self.__class__(common)
def __xor__(self, other):
"""Return the symmetric difference of two sets as a new set.
(I.e. all elements that are in exactly one of the sets.)
"""
if not isinstance(other, BaseSet):
return NotImplemented
return self.symmetric_difference(other)
def symmetric_difference(self, other):
"""Return the symmetric difference of two sets as a new set.
(I.e. all elements that are in exactly one of the sets.)
"""
result = self.__class__()
data = result._data
value = True
selfdata = self._data
try:
otherdata = other._data
except AttributeError:
otherdata = Set(other)._data
for elt in ifilterfalse(otherdata.__contains__, selfdata):
data[elt] = value
for elt in ifilterfalse(selfdata.__contains__, otherdata):
data[elt] = value
return result
def __sub__(self, other):
"""Return the difference of two sets as a new Set.
(I.e. all elements that are in this set and not in the other.)
"""
if not isinstance(other, BaseSet):
return NotImplemented
return self.difference(other)
def difference(self, other):
"""Return the difference of two sets as a new Set.
(I.e. all elements that are in this set and not in the other.)
"""
result = self.__class__()
data = result._data
try:
otherdata = other._data
except AttributeError:
otherdata = Set(other)._data
value = True
for elt in ifilterfalse(otherdata.__contains__, self):
data[elt] = value
return result
# Membership test
def __contains__(self, element):
"""Report whether an element is a member of a set.
(Called in response to the expression `element in self'.)
"""
try:
return element in self._data
except TypeError:
transform = getattr(element, "__as_temporarily_immutable__", None)
if transform is None:
raise # re-raise the TypeError exception we caught
return transform() in self._data
# Subset and superset test
def issubset(self, other):
"""Report whether another set contains this set."""
self._binary_sanity_check(other)
if len(self) > len(other): # Fast check for obvious cases
return False
for elt in ifilterfalse(other._data.__contains__, self):
return False
return True
def issuperset(self, other):
"""Report whether this set contains another set."""
self._binary_sanity_check(other)
if len(self) < len(other): # Fast check for obvious cases
return False
for elt in ifilterfalse(self._data.__contains__, other):
return False
return True
# Inequality comparisons using the is-subset relation.
__le__ = issubset
__ge__ = issuperset
def __lt__(self, other):
self._binary_sanity_check(other)
return len(self) < len(other) and self.issubset(other)
def __gt__(self, other):
self._binary_sanity_check(other)
return len(self) > len(other) and self.issuperset(other)
# We inherit object.__hash__, so we must deny this explicitly
__hash__ = None
# Assorted helpers
def _binary_sanity_check(self, other):
# Check that the other argument to a binary operation is also
# a set, raising a TypeError otherwise.
if not isinstance(other, BaseSet):
raise TypeError, "Binary operation only permitted between sets"
def _compute_hash(self):
# Calculate hash code for a set by xor'ing the hash codes of
# the elements. This ensures that the hash code does not depend
# on the order in which elements are added to the set. This is
# not called __hash__ because a BaseSet should not be hashable;
# only an ImmutableSet is hashable.
result = 0
for elt in self:
result ^= hash(elt)
return result
def _update(self, iterable):
# The main loop for update() and the subclass __init__() methods.
data = self._data
# Use the fast update() method when a dictionary is available.
if isinstance(iterable, BaseSet):
data.update(iterable._data)
return
value = True
if type(iterable) in (list, tuple, xrange):
# Optimized: we know that __iter__() and next() can't
# raise TypeError, so we can move 'try:' out of the loop.
it = iter(iterable)
while True:
try:
for element in it:
data[element] = value
return
except TypeError:
transform = getattr(element, "__as_immutable__", None)
if transform is None:
raise # re-raise the TypeError exception we caught
data[transform()] = value
else:
# Safe: only catch TypeError where intended
for element in iterable:
try:
data[element] = value
except TypeError:
transform = getattr(element, "__as_immutable__", None)
if transform is None:
raise # re-raise the TypeError exception we caught
data[transform()] = value
class ImmutableSet(BaseSet):
"""Immutable set class."""
__slots__ = ['_hashcode']
# BaseSet + hashing
def __init__(self, iterable=None):
"""Construct an immutable set from an optional iterable."""
self._hashcode = None
self._data = {}
if iterable is not None:
self._update(iterable)
def __hash__(self):
if self._hashcode is None:
self._hashcode = self._compute_hash()
return self._hashcode
def __getstate__(self):
return self._data, self._hashcode
def __setstate__(self, state):
self._data, self._hashcode = state
class Set(BaseSet):
""" Mutable set class."""
__slots__ = []
# BaseSet + operations requiring mutability; no hashing
def __init__(self, iterable=None):
"""Construct a set from an optional iterable."""
self._data = {}
if iterable is not None:
self._update(iterable)
def __getstate__(self):
# getstate's results are ignored if it is not
return self._data,
def __setstate__(self, data):
self._data, = data
# In-place union, intersection, differences.
# Subtle: The xyz_update() functions deliberately return None,
# as do all mutating operations on built-in container types.
# The __xyz__ spellings have to return self, though.
def __ior__(self, other):
"""Update a set with the union of itself and another."""
self._binary_sanity_check(other)
self._data.update(other._data)
return self
def union_update(self, other):
"""Update a set with the union of itself and another."""
self._update(other)
def __iand__(self, other):
"""Update a set with the intersection of itself and another."""
self._binary_sanity_check(other)
self._data = (self & other)._data
return self
def intersection_update(self, other):
"""Update a set with the intersection of itself and another."""
if isinstance(other, BaseSet):
self &= other
else:
self._data = (self.intersection(other))._data
def __ixor__(self, other):
"""Update a set with the symmetric difference of itself and another."""
self._binary_sanity_check(other)
self.symmetric_difference_update(other)
return self
def symmetric_difference_update(self, other):
"""Update a set with the symmetric difference of itself and another."""
data = self._data
value = True
if not isinstance(other, BaseSet):
other = Set(other)
if self is other:
self.clear()
for elt in other:
if elt in data:
del data[elt]
else:
data[elt] = value
def __isub__(self, other):
"""Remove all elements of another set from this set."""
self._binary_sanity_check(other)
self.difference_update(other)
return self
def difference_update(self, other):
"""Remove all elements of another set from this set."""
data = self._data
if not isinstance(other, BaseSet):
other = Set(other)
if self is other:
self.clear()
for elt in ifilter(data.__contains__, other):
del data[elt]
# Python dict-like mass mutations: update, clear
def update(self, iterable):
"""Add all values from an iterable (such as a list or file)."""
self._update(iterable)
def clear(self):
"""Remove all elements from this set."""
self._data.clear()
# Single-element mutations: add, remove, discard
def add(self, element):
"""Add an element to a set.
This has no effect if the element is already present.
"""
try:
self._data[element] = True
except TypeError:
transform = getattr(element, "__as_immutable__", None)
if transform is None:
raise # re-raise the TypeError exception we caught
self._data[transform()] = True
def remove(self, element):
"""Remove an element from a set; it must be a member.
If the element is not a member, raise a KeyError.
"""
try:
del self._data[element]
except TypeError:
transform = getattr(element, "__as_temporarily_immutable__", None)
if transform is None:
raise # re-raise the TypeError exception we caught
del self._data[transform()]
def discard(self, element):
"""Remove an element from a set if it is a member.
If the element is not a member, do nothing.
"""
try:
self.remove(element)
except KeyError:
pass
def pop(self):
"""Remove and return an arbitrary set element."""
return self._data.popitem()[0]
def __as_immutable__(self):
# Return a copy of self as an immutable set
return ImmutableSet(self)
def __as_temporarily_immutable__(self):
# Return self wrapped in a temporarily immutable set
return _TemporarilyImmutableSet(self)
class _TemporarilyImmutableSet(BaseSet):
# Wrap a mutable set as if it was temporarily immutable.
# This only supplies hashing and equality comparisons.
def __init__(self, set):
self._set = set
self._data = set._data # Needed by ImmutableSet.__eq__()
def __hash__(self):
return self._set._compute_hash()
| gpl-2.0 |
mozilla/MozDef | alerts/lib/celery_scheduler/periodic_task.py | 3 | 4871 | # Copyright 2013 Regents of the University of Michigan
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at http://www.apache.org/licenses/LICENSE-2.0
import datetime
import celery.schedules
from bson.objectid import ObjectId
from mozdef_util.utilities.toUTC import toUTC
class Interval():
def __init__(self, every, period):
self.every = every
self.period = period
@property
def schedule(self):
return celery.schedules.schedule(datetime.timedelta(**{self.period: self.every}))
@property
def period_singular(self):
return self.period[:-1]
def to_dict(self):
return {
"every": self.every,
"period": self.period
}
def to_str(self):
return "{0} {1}".format(
self.every,
self.period
)
class Crontab():
def __init__(self, minute, hour, day_of_week, day_of_month, month_of_year):
self.minute = minute
self.hour = hour
self.day_of_week = day_of_week
self.day_of_month = day_of_month
self.month_of_year = month_of_year
@property
def schedule(self):
return celery.schedules.crontab(
minute=self.minute,
hour=self.hour,
day_of_week=self.day_of_week,
day_of_month=self.day_of_month,
month_of_year=self.month_of_year
)
def to_dict(self):
return {
"minute": self.minute,
"hour": self.hour,
"day_of_week": self.day_of_week,
"day_of_month": self.day_of_month,
"month_of_year": self.month_of_year,
}
def to_str(self):
return "{0} {1} {2} {3} {4}".format(
self.minute,
self.hour,
self.day_of_week,
self.day_of_month,
self.month_of_year
)
class PeriodicTask():
def __init__(
self,
name,
task,
enabled,
_id=None,
_cls='PeriodicTask',
args=[],
kwargs={},
celery_schedule=None,
schedule_type=None,
schedule_str=None,
expires=None,
queue=None,
exchange=None,
routing_key=None,
last_run_at=None,
run_immediately=False,
total_run_count=0,
modifiedat=None,
modifiedby=None):
if _id is None:
_id = str(ObjectId())
self._id = _id
self._cls = _cls
self.name = name
self.task = task
self.args = args
self.kwargs = kwargs
self.enabled = enabled
self.expires = expires
self.queue = queue
self.exchange = exchange
self.routing_key = routing_key
if last_run_at is not None:
last_run_at = toUTC(last_run_at)
self.last_run_at = last_run_at
self.run_immediately = run_immediately
self.total_run_count = total_run_count
self.set_schedule(schedule_type, celery_schedule)
def set_schedule(self, schedule_type, celery_schedule):
self.schedule_type = schedule_type
if self.schedule_type == 'interval':
self.celery_schedule = Interval(**celery_schedule)
elif self.schedule_type == 'crontab':
self.celery_schedule = Crontab(**celery_schedule)
else:
raise Exception("must define interval or crontab schedule")
self.schedule_str = self.celery_schedule.to_str()
@property
def schedule(self):
if self.schedule_type == 'interval':
return self.celery_schedule.schedule
elif self.schedule_type == 'crontab':
return self.celery_schedule.schedule
else:
raise Exception("must define interval or crontab schedule")
def to_dict(self):
last_run_at = self.last_run_at
if isinstance(self.last_run_at, datetime.datetime):
last_run_at = last_run_at.isoformat()
return {
"name": self.name,
"task": self.task,
"enabled": self.enabled,
"_id": self._id,
"_cls": self._cls,
"args": self.args,
"kwargs": self.kwargs,
"celery_schedule": self.celery_schedule.to_dict(),
"schedule_str": self.schedule_str,
"schedule_type": self.schedule_type,
"expires": self.expires,
"queue": self.queue,
"exchange": self.exchange,
"routing_key": self.routing_key,
"last_run_at": last_run_at,
"run_immediately": self.run_immediately,
"total_run_count": self.total_run_count,
}
| mpl-2.0 |
mpasternak/pyglet-fix-issue-552 | pyglet/gl/glu.py | 45 | 25679 | # ----------------------------------------------------------------------------
# pyglet
# Copyright (c) 2006-2008 Alex Holkner
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# * Neither the name of pyglet nor the names of its
# contributors may be used to endorse or promote products
# derived from this software without specific prior written
# permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# ----------------------------------------------------------------------------
'''Wrapper for /usr/include/GL/glu.h
Generated by tools/gengl.py.
Do not modify this file.
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id$'
from ctypes import *
from pyglet.gl.lib import link_GLU as _link_function
from pyglet.gl.lib import c_ptrdiff_t
# BEGIN GENERATED CONTENT (do not edit below this line)
# This content is generated by tools/gengl.py.
# Wrapper for /usr/include/GL/glu.h
GLU_EXT_object_space_tess = 1 # /usr/include/GL/glu.h:71
GLU_EXT_nurbs_tessellator = 1 # /usr/include/GL/glu.h:72
GLU_FALSE = 0 # /usr/include/GL/glu.h:75
GLU_TRUE = 1 # /usr/include/GL/glu.h:76
GLU_VERSION_1_1 = 1 # /usr/include/GL/glu.h:79
GLU_VERSION_1_2 = 1 # /usr/include/GL/glu.h:80
GLU_VERSION_1_3 = 1 # /usr/include/GL/glu.h:81
GLU_VERSION = 100800 # /usr/include/GL/glu.h:84
GLU_EXTENSIONS = 100801 # /usr/include/GL/glu.h:85
GLU_INVALID_ENUM = 100900 # /usr/include/GL/glu.h:88
GLU_INVALID_VALUE = 100901 # /usr/include/GL/glu.h:89
GLU_OUT_OF_MEMORY = 100902 # /usr/include/GL/glu.h:90
GLU_INCOMPATIBLE_GL_VERSION = 100903 # /usr/include/GL/glu.h:91
GLU_INVALID_OPERATION = 100904 # /usr/include/GL/glu.h:92
GLU_OUTLINE_POLYGON = 100240 # /usr/include/GL/glu.h:96
GLU_OUTLINE_PATCH = 100241 # /usr/include/GL/glu.h:97
GLU_NURBS_ERROR = 100103 # /usr/include/GL/glu.h:100
GLU_ERROR = 100103 # /usr/include/GL/glu.h:101
GLU_NURBS_BEGIN = 100164 # /usr/include/GL/glu.h:102
GLU_NURBS_BEGIN_EXT = 100164 # /usr/include/GL/glu.h:103
GLU_NURBS_VERTEX = 100165 # /usr/include/GL/glu.h:104
GLU_NURBS_VERTEX_EXT = 100165 # /usr/include/GL/glu.h:105
GLU_NURBS_NORMAL = 100166 # /usr/include/GL/glu.h:106
GLU_NURBS_NORMAL_EXT = 100166 # /usr/include/GL/glu.h:107
GLU_NURBS_COLOR = 100167 # /usr/include/GL/glu.h:108
GLU_NURBS_COLOR_EXT = 100167 # /usr/include/GL/glu.h:109
GLU_NURBS_TEXTURE_COORD = 100168 # /usr/include/GL/glu.h:110
GLU_NURBS_TEX_COORD_EXT = 100168 # /usr/include/GL/glu.h:111
GLU_NURBS_END = 100169 # /usr/include/GL/glu.h:112
GLU_NURBS_END_EXT = 100169 # /usr/include/GL/glu.h:113
GLU_NURBS_BEGIN_DATA = 100170 # /usr/include/GL/glu.h:114
GLU_NURBS_BEGIN_DATA_EXT = 100170 # /usr/include/GL/glu.h:115
GLU_NURBS_VERTEX_DATA = 100171 # /usr/include/GL/glu.h:116
GLU_NURBS_VERTEX_DATA_EXT = 100171 # /usr/include/GL/glu.h:117
GLU_NURBS_NORMAL_DATA = 100172 # /usr/include/GL/glu.h:118
GLU_NURBS_NORMAL_DATA_EXT = 100172 # /usr/include/GL/glu.h:119
GLU_NURBS_COLOR_DATA = 100173 # /usr/include/GL/glu.h:120
GLU_NURBS_COLOR_DATA_EXT = 100173 # /usr/include/GL/glu.h:121
GLU_NURBS_TEXTURE_COORD_DATA = 100174 # /usr/include/GL/glu.h:122
GLU_NURBS_TEX_COORD_DATA_EXT = 100174 # /usr/include/GL/glu.h:123
GLU_NURBS_END_DATA = 100175 # /usr/include/GL/glu.h:124
GLU_NURBS_END_DATA_EXT = 100175 # /usr/include/GL/glu.h:125
GLU_NURBS_ERROR1 = 100251 # /usr/include/GL/glu.h:128
GLU_NURBS_ERROR2 = 100252 # /usr/include/GL/glu.h:129
GLU_NURBS_ERROR3 = 100253 # /usr/include/GL/glu.h:130
GLU_NURBS_ERROR4 = 100254 # /usr/include/GL/glu.h:131
GLU_NURBS_ERROR5 = 100255 # /usr/include/GL/glu.h:132
GLU_NURBS_ERROR6 = 100256 # /usr/include/GL/glu.h:133
GLU_NURBS_ERROR7 = 100257 # /usr/include/GL/glu.h:134
GLU_NURBS_ERROR8 = 100258 # /usr/include/GL/glu.h:135
GLU_NURBS_ERROR9 = 100259 # /usr/include/GL/glu.h:136
GLU_NURBS_ERROR10 = 100260 # /usr/include/GL/glu.h:137
GLU_NURBS_ERROR11 = 100261 # /usr/include/GL/glu.h:138
GLU_NURBS_ERROR12 = 100262 # /usr/include/GL/glu.h:139
GLU_NURBS_ERROR13 = 100263 # /usr/include/GL/glu.h:140
GLU_NURBS_ERROR14 = 100264 # /usr/include/GL/glu.h:141
GLU_NURBS_ERROR15 = 100265 # /usr/include/GL/glu.h:142
GLU_NURBS_ERROR16 = 100266 # /usr/include/GL/glu.h:143
GLU_NURBS_ERROR17 = 100267 # /usr/include/GL/glu.h:144
GLU_NURBS_ERROR18 = 100268 # /usr/include/GL/glu.h:145
GLU_NURBS_ERROR19 = 100269 # /usr/include/GL/glu.h:146
GLU_NURBS_ERROR20 = 100270 # /usr/include/GL/glu.h:147
GLU_NURBS_ERROR21 = 100271 # /usr/include/GL/glu.h:148
GLU_NURBS_ERROR22 = 100272 # /usr/include/GL/glu.h:149
GLU_NURBS_ERROR23 = 100273 # /usr/include/GL/glu.h:150
GLU_NURBS_ERROR24 = 100274 # /usr/include/GL/glu.h:151
GLU_NURBS_ERROR25 = 100275 # /usr/include/GL/glu.h:152
GLU_NURBS_ERROR26 = 100276 # /usr/include/GL/glu.h:153
GLU_NURBS_ERROR27 = 100277 # /usr/include/GL/glu.h:154
GLU_NURBS_ERROR28 = 100278 # /usr/include/GL/glu.h:155
GLU_NURBS_ERROR29 = 100279 # /usr/include/GL/glu.h:156
GLU_NURBS_ERROR30 = 100280 # /usr/include/GL/glu.h:157
GLU_NURBS_ERROR31 = 100281 # /usr/include/GL/glu.h:158
GLU_NURBS_ERROR32 = 100282 # /usr/include/GL/glu.h:159
GLU_NURBS_ERROR33 = 100283 # /usr/include/GL/glu.h:160
GLU_NURBS_ERROR34 = 100284 # /usr/include/GL/glu.h:161
GLU_NURBS_ERROR35 = 100285 # /usr/include/GL/glu.h:162
GLU_NURBS_ERROR36 = 100286 # /usr/include/GL/glu.h:163
GLU_NURBS_ERROR37 = 100287 # /usr/include/GL/glu.h:164
GLU_AUTO_LOAD_MATRIX = 100200 # /usr/include/GL/glu.h:167
GLU_CULLING = 100201 # /usr/include/GL/glu.h:168
GLU_SAMPLING_TOLERANCE = 100203 # /usr/include/GL/glu.h:169
GLU_DISPLAY_MODE = 100204 # /usr/include/GL/glu.h:170
GLU_PARAMETRIC_TOLERANCE = 100202 # /usr/include/GL/glu.h:171
GLU_SAMPLING_METHOD = 100205 # /usr/include/GL/glu.h:172
GLU_U_STEP = 100206 # /usr/include/GL/glu.h:173
GLU_V_STEP = 100207 # /usr/include/GL/glu.h:174
GLU_NURBS_MODE = 100160 # /usr/include/GL/glu.h:175
GLU_NURBS_MODE_EXT = 100160 # /usr/include/GL/glu.h:176
GLU_NURBS_TESSELLATOR = 100161 # /usr/include/GL/glu.h:177
GLU_NURBS_TESSELLATOR_EXT = 100161 # /usr/include/GL/glu.h:178
GLU_NURBS_RENDERER = 100162 # /usr/include/GL/glu.h:179
GLU_NURBS_RENDERER_EXT = 100162 # /usr/include/GL/glu.h:180
GLU_OBJECT_PARAMETRIC_ERROR = 100208 # /usr/include/GL/glu.h:183
GLU_OBJECT_PARAMETRIC_ERROR_EXT = 100208 # /usr/include/GL/glu.h:184
GLU_OBJECT_PATH_LENGTH = 100209 # /usr/include/GL/glu.h:185
GLU_OBJECT_PATH_LENGTH_EXT = 100209 # /usr/include/GL/glu.h:186
GLU_PATH_LENGTH = 100215 # /usr/include/GL/glu.h:187
GLU_PARAMETRIC_ERROR = 100216 # /usr/include/GL/glu.h:188
GLU_DOMAIN_DISTANCE = 100217 # /usr/include/GL/glu.h:189
GLU_MAP1_TRIM_2 = 100210 # /usr/include/GL/glu.h:192
GLU_MAP1_TRIM_3 = 100211 # /usr/include/GL/glu.h:193
GLU_POINT = 100010 # /usr/include/GL/glu.h:196
GLU_LINE = 100011 # /usr/include/GL/glu.h:197
GLU_FILL = 100012 # /usr/include/GL/glu.h:198
GLU_SILHOUETTE = 100013 # /usr/include/GL/glu.h:199
GLU_SMOOTH = 100000 # /usr/include/GL/glu.h:205
GLU_FLAT = 100001 # /usr/include/GL/glu.h:206
GLU_NONE = 100002 # /usr/include/GL/glu.h:207
GLU_OUTSIDE = 100020 # /usr/include/GL/glu.h:210
GLU_INSIDE = 100021 # /usr/include/GL/glu.h:211
GLU_TESS_BEGIN = 100100 # /usr/include/GL/glu.h:214
GLU_BEGIN = 100100 # /usr/include/GL/glu.h:215
GLU_TESS_VERTEX = 100101 # /usr/include/GL/glu.h:216
GLU_VERTEX = 100101 # /usr/include/GL/glu.h:217
GLU_TESS_END = 100102 # /usr/include/GL/glu.h:218
GLU_END = 100102 # /usr/include/GL/glu.h:219
GLU_TESS_ERROR = 100103 # /usr/include/GL/glu.h:220
GLU_TESS_EDGE_FLAG = 100104 # /usr/include/GL/glu.h:221
GLU_EDGE_FLAG = 100104 # /usr/include/GL/glu.h:222
GLU_TESS_COMBINE = 100105 # /usr/include/GL/glu.h:223
GLU_TESS_BEGIN_DATA = 100106 # /usr/include/GL/glu.h:224
GLU_TESS_VERTEX_DATA = 100107 # /usr/include/GL/glu.h:225
GLU_TESS_END_DATA = 100108 # /usr/include/GL/glu.h:226
GLU_TESS_ERROR_DATA = 100109 # /usr/include/GL/glu.h:227
GLU_TESS_EDGE_FLAG_DATA = 100110 # /usr/include/GL/glu.h:228
GLU_TESS_COMBINE_DATA = 100111 # /usr/include/GL/glu.h:229
GLU_CW = 100120 # /usr/include/GL/glu.h:232
GLU_CCW = 100121 # /usr/include/GL/glu.h:233
GLU_INTERIOR = 100122 # /usr/include/GL/glu.h:234
GLU_EXTERIOR = 100123 # /usr/include/GL/glu.h:235
GLU_UNKNOWN = 100124 # /usr/include/GL/glu.h:236
GLU_TESS_WINDING_RULE = 100140 # /usr/include/GL/glu.h:239
GLU_TESS_BOUNDARY_ONLY = 100141 # /usr/include/GL/glu.h:240
GLU_TESS_TOLERANCE = 100142 # /usr/include/GL/glu.h:241
GLU_TESS_ERROR1 = 100151 # /usr/include/GL/glu.h:244
GLU_TESS_ERROR2 = 100152 # /usr/include/GL/glu.h:245
GLU_TESS_ERROR3 = 100153 # /usr/include/GL/glu.h:246
GLU_TESS_ERROR4 = 100154 # /usr/include/GL/glu.h:247
GLU_TESS_ERROR5 = 100155 # /usr/include/GL/glu.h:248
GLU_TESS_ERROR6 = 100156 # /usr/include/GL/glu.h:249
GLU_TESS_ERROR7 = 100157 # /usr/include/GL/glu.h:250
GLU_TESS_ERROR8 = 100158 # /usr/include/GL/glu.h:251
GLU_TESS_MISSING_BEGIN_POLYGON = 100151 # /usr/include/GL/glu.h:252
GLU_TESS_MISSING_BEGIN_CONTOUR = 100152 # /usr/include/GL/glu.h:253
GLU_TESS_MISSING_END_POLYGON = 100153 # /usr/include/GL/glu.h:254
GLU_TESS_MISSING_END_CONTOUR = 100154 # /usr/include/GL/glu.h:255
GLU_TESS_COORD_TOO_LARGE = 100155 # /usr/include/GL/glu.h:256
GLU_TESS_NEED_COMBINE_CALLBACK = 100156 # /usr/include/GL/glu.h:257
GLU_TESS_WINDING_ODD = 100130 # /usr/include/GL/glu.h:260
GLU_TESS_WINDING_NONZERO = 100131 # /usr/include/GL/glu.h:261
GLU_TESS_WINDING_POSITIVE = 100132 # /usr/include/GL/glu.h:262
GLU_TESS_WINDING_NEGATIVE = 100133 # /usr/include/GL/glu.h:263
GLU_TESS_WINDING_ABS_GEQ_TWO = 100134 # /usr/include/GL/glu.h:264
class struct_GLUnurbs(Structure):
__slots__ = [
]
struct_GLUnurbs._fields_ = [
('_opaque_struct', c_int)
]
class struct_GLUnurbs(Structure):
__slots__ = [
]
struct_GLUnurbs._fields_ = [
('_opaque_struct', c_int)
]
GLUnurbs = struct_GLUnurbs # /usr/include/GL/glu.h:274
class struct_GLUquadric(Structure):
__slots__ = [
]
struct_GLUquadric._fields_ = [
('_opaque_struct', c_int)
]
class struct_GLUquadric(Structure):
__slots__ = [
]
struct_GLUquadric._fields_ = [
('_opaque_struct', c_int)
]
GLUquadric = struct_GLUquadric # /usr/include/GL/glu.h:275
class struct_GLUtesselator(Structure):
__slots__ = [
]
struct_GLUtesselator._fields_ = [
('_opaque_struct', c_int)
]
class struct_GLUtesselator(Structure):
__slots__ = [
]
struct_GLUtesselator._fields_ = [
('_opaque_struct', c_int)
]
GLUtesselator = struct_GLUtesselator # /usr/include/GL/glu.h:276
GLUnurbsObj = GLUnurbs # /usr/include/GL/glu.h:279
GLUquadricObj = GLUquadric # /usr/include/GL/glu.h:280
GLUtesselatorObj = GLUtesselator # /usr/include/GL/glu.h:281
GLUtriangulatorObj = GLUtesselator # /usr/include/GL/glu.h:282
GLU_TESS_MAX_COORD = 9.9999999999999998e+149 # /usr/include/GL/glu.h:284
_GLUfuncptr = CFUNCTYPE(None) # /usr/include/GL/glu.h:287
# /usr/include/GL/glu.h:289
gluBeginCurve = _link_function('gluBeginCurve', None, [POINTER(GLUnurbs)], None)
# /usr/include/GL/glu.h:290
gluBeginPolygon = _link_function('gluBeginPolygon', None, [POINTER(GLUtesselator)], None)
# /usr/include/GL/glu.h:291
gluBeginSurface = _link_function('gluBeginSurface', None, [POINTER(GLUnurbs)], None)
# /usr/include/GL/glu.h:292
gluBeginTrim = _link_function('gluBeginTrim', None, [POINTER(GLUnurbs)], None)
GLint = c_int # /usr/include/GL/gl.h:159
GLenum = c_uint # /usr/include/GL/gl.h:153
GLsizei = c_int # /usr/include/GL/gl.h:163
# /usr/include/GL/glu.h:293
gluBuild1DMipmapLevels = _link_function('gluBuild1DMipmapLevels', GLint, [GLenum, GLint, GLsizei, GLenum, GLenum, GLint, GLint, GLint, POINTER(None)], None)
# /usr/include/GL/glu.h:294
gluBuild1DMipmaps = _link_function('gluBuild1DMipmaps', GLint, [GLenum, GLint, GLsizei, GLenum, GLenum, POINTER(None)], None)
# /usr/include/GL/glu.h:295
gluBuild2DMipmapLevels = _link_function('gluBuild2DMipmapLevels', GLint, [GLenum, GLint, GLsizei, GLsizei, GLenum, GLenum, GLint, GLint, GLint, POINTER(None)], None)
# /usr/include/GL/glu.h:296
gluBuild2DMipmaps = _link_function('gluBuild2DMipmaps', GLint, [GLenum, GLint, GLsizei, GLsizei, GLenum, GLenum, POINTER(None)], None)
# /usr/include/GL/glu.h:297
gluBuild3DMipmapLevels = _link_function('gluBuild3DMipmapLevels', GLint, [GLenum, GLint, GLsizei, GLsizei, GLsizei, GLenum, GLenum, GLint, GLint, GLint, POINTER(None)], None)
# /usr/include/GL/glu.h:298
gluBuild3DMipmaps = _link_function('gluBuild3DMipmaps', GLint, [GLenum, GLint, GLsizei, GLsizei, GLsizei, GLenum, GLenum, POINTER(None)], None)
GLboolean = c_ubyte # /usr/include/GL/gl.h:154
GLubyte = c_ubyte # /usr/include/GL/gl.h:160
# /usr/include/GL/glu.h:299
gluCheckExtension = _link_function('gluCheckExtension', GLboolean, [POINTER(GLubyte), POINTER(GLubyte)], None)
GLdouble = c_double # /usr/include/GL/gl.h:166
# /usr/include/GL/glu.h:300
gluCylinder = _link_function('gluCylinder', None, [POINTER(GLUquadric), GLdouble, GLdouble, GLdouble, GLint, GLint], None)
# /usr/include/GL/glu.h:301
gluDeleteNurbsRenderer = _link_function('gluDeleteNurbsRenderer', None, [POINTER(GLUnurbs)], None)
# /usr/include/GL/glu.h:302
gluDeleteQuadric = _link_function('gluDeleteQuadric', None, [POINTER(GLUquadric)], None)
# /usr/include/GL/glu.h:303
gluDeleteTess = _link_function('gluDeleteTess', None, [POINTER(GLUtesselator)], None)
# /usr/include/GL/glu.h:304
gluDisk = _link_function('gluDisk', None, [POINTER(GLUquadric), GLdouble, GLdouble, GLint, GLint], None)
# /usr/include/GL/glu.h:305
gluEndCurve = _link_function('gluEndCurve', None, [POINTER(GLUnurbs)], None)
# /usr/include/GL/glu.h:306
gluEndPolygon = _link_function('gluEndPolygon', None, [POINTER(GLUtesselator)], None)
# /usr/include/GL/glu.h:307
gluEndSurface = _link_function('gluEndSurface', None, [POINTER(GLUnurbs)], None)
# /usr/include/GL/glu.h:308
gluEndTrim = _link_function('gluEndTrim', None, [POINTER(GLUnurbs)], None)
# /usr/include/GL/glu.h:309
gluErrorString = _link_function('gluErrorString', POINTER(GLubyte), [GLenum], None)
GLfloat = c_float # /usr/include/GL/gl.h:164
# /usr/include/GL/glu.h:310
gluGetNurbsProperty = _link_function('gluGetNurbsProperty', None, [POINTER(GLUnurbs), GLenum, POINTER(GLfloat)], None)
# /usr/include/GL/glu.h:311
gluGetString = _link_function('gluGetString', POINTER(GLubyte), [GLenum], None)
# /usr/include/GL/glu.h:312
gluGetTessProperty = _link_function('gluGetTessProperty', None, [POINTER(GLUtesselator), GLenum, POINTER(GLdouble)], None)
# /usr/include/GL/glu.h:313
gluLoadSamplingMatrices = _link_function('gluLoadSamplingMatrices', None, [POINTER(GLUnurbs), POINTER(GLfloat), POINTER(GLfloat), POINTER(GLint)], None)
# /usr/include/GL/glu.h:314
gluLookAt = _link_function('gluLookAt', None, [GLdouble, GLdouble, GLdouble, GLdouble, GLdouble, GLdouble, GLdouble, GLdouble, GLdouble], None)
# /usr/include/GL/glu.h:315
gluNewNurbsRenderer = _link_function('gluNewNurbsRenderer', POINTER(GLUnurbs), [], None)
# /usr/include/GL/glu.h:316
gluNewQuadric = _link_function('gluNewQuadric', POINTER(GLUquadric), [], None)
# /usr/include/GL/glu.h:317
gluNewTess = _link_function('gluNewTess', POINTER(GLUtesselator), [], None)
# /usr/include/GL/glu.h:318
gluNextContour = _link_function('gluNextContour', None, [POINTER(GLUtesselator), GLenum], None)
# /usr/include/GL/glu.h:319
gluNurbsCallback = _link_function('gluNurbsCallback', None, [POINTER(GLUnurbs), GLenum, _GLUfuncptr], None)
GLvoid = None # /usr/include/GL/gl.h:156
# /usr/include/GL/glu.h:320
gluNurbsCallbackData = _link_function('gluNurbsCallbackData', None, [POINTER(GLUnurbs), POINTER(GLvoid)], None)
# /usr/include/GL/glu.h:321
gluNurbsCallbackDataEXT = _link_function('gluNurbsCallbackDataEXT', None, [POINTER(GLUnurbs), POINTER(GLvoid)], None)
# /usr/include/GL/glu.h:322
gluNurbsCurve = _link_function('gluNurbsCurve', None, [POINTER(GLUnurbs), GLint, POINTER(GLfloat), GLint, POINTER(GLfloat), GLint, GLenum], None)
# /usr/include/GL/glu.h:323
gluNurbsProperty = _link_function('gluNurbsProperty', None, [POINTER(GLUnurbs), GLenum, GLfloat], None)
# /usr/include/GL/glu.h:324
gluNurbsSurface = _link_function('gluNurbsSurface', None, [POINTER(GLUnurbs), GLint, POINTER(GLfloat), GLint, POINTER(GLfloat), GLint, GLint, POINTER(GLfloat), GLint, GLint, GLenum], None)
# /usr/include/GL/glu.h:325
gluOrtho2D = _link_function('gluOrtho2D', None, [GLdouble, GLdouble, GLdouble, GLdouble], None)
# /usr/include/GL/glu.h:326
gluPartialDisk = _link_function('gluPartialDisk', None, [POINTER(GLUquadric), GLdouble, GLdouble, GLint, GLint, GLdouble, GLdouble], None)
# /usr/include/GL/glu.h:327
gluPerspective = _link_function('gluPerspective', None, [GLdouble, GLdouble, GLdouble, GLdouble], None)
# /usr/include/GL/glu.h:328
gluPickMatrix = _link_function('gluPickMatrix', None, [GLdouble, GLdouble, GLdouble, GLdouble, POINTER(GLint)], None)
# /usr/include/GL/glu.h:329
gluProject = _link_function('gluProject', GLint, [GLdouble, GLdouble, GLdouble, POINTER(GLdouble), POINTER(GLdouble), POINTER(GLint), POINTER(GLdouble), POINTER(GLdouble), POINTER(GLdouble)], None)
# /usr/include/GL/glu.h:330
gluPwlCurve = _link_function('gluPwlCurve', None, [POINTER(GLUnurbs), GLint, POINTER(GLfloat), GLint, GLenum], None)
# /usr/include/GL/glu.h:331
gluQuadricCallback = _link_function('gluQuadricCallback', None, [POINTER(GLUquadric), GLenum, _GLUfuncptr], None)
# /usr/include/GL/glu.h:332
gluQuadricDrawStyle = _link_function('gluQuadricDrawStyle', None, [POINTER(GLUquadric), GLenum], None)
# /usr/include/GL/glu.h:333
gluQuadricNormals = _link_function('gluQuadricNormals', None, [POINTER(GLUquadric), GLenum], None)
# /usr/include/GL/glu.h:334
gluQuadricOrientation = _link_function('gluQuadricOrientation', None, [POINTER(GLUquadric), GLenum], None)
# /usr/include/GL/glu.h:335
gluQuadricTexture = _link_function('gluQuadricTexture', None, [POINTER(GLUquadric), GLboolean], None)
# /usr/include/GL/glu.h:336
gluScaleImage = _link_function('gluScaleImage', GLint, [GLenum, GLsizei, GLsizei, GLenum, POINTER(None), GLsizei, GLsizei, GLenum, POINTER(GLvoid)], None)
# /usr/include/GL/glu.h:337
gluSphere = _link_function('gluSphere', None, [POINTER(GLUquadric), GLdouble, GLint, GLint], None)
# /usr/include/GL/glu.h:338
gluTessBeginContour = _link_function('gluTessBeginContour', None, [POINTER(GLUtesselator)], None)
# /usr/include/GL/glu.h:339
gluTessBeginPolygon = _link_function('gluTessBeginPolygon', None, [POINTER(GLUtesselator), POINTER(GLvoid)], None)
# /usr/include/GL/glu.h:340
gluTessCallback = _link_function('gluTessCallback', None, [POINTER(GLUtesselator), GLenum, _GLUfuncptr], None)
# /usr/include/GL/glu.h:341
gluTessEndContour = _link_function('gluTessEndContour', None, [POINTER(GLUtesselator)], None)
# /usr/include/GL/glu.h:342
gluTessEndPolygon = _link_function('gluTessEndPolygon', None, [POINTER(GLUtesselator)], None)
# /usr/include/GL/glu.h:343
gluTessNormal = _link_function('gluTessNormal', None, [POINTER(GLUtesselator), GLdouble, GLdouble, GLdouble], None)
# /usr/include/GL/glu.h:344
gluTessProperty = _link_function('gluTessProperty', None, [POINTER(GLUtesselator), GLenum, GLdouble], None)
# /usr/include/GL/glu.h:345
gluTessVertex = _link_function('gluTessVertex', None, [POINTER(GLUtesselator), POINTER(GLdouble), POINTER(GLvoid)], None)
# /usr/include/GL/glu.h:346
gluUnProject = _link_function('gluUnProject', GLint, [GLdouble, GLdouble, GLdouble, POINTER(GLdouble), POINTER(GLdouble), POINTER(GLint), POINTER(GLdouble), POINTER(GLdouble), POINTER(GLdouble)], None)
# /usr/include/GL/glu.h:347
gluUnProject4 = _link_function('gluUnProject4', GLint, [GLdouble, GLdouble, GLdouble, GLdouble, POINTER(GLdouble), POINTER(GLdouble), POINTER(GLint), GLdouble, GLdouble, POINTER(GLdouble), POINTER(GLdouble), POINTER(GLdouble), POINTER(GLdouble)], None)
__all__ = ['GLU_EXT_object_space_tess', 'GLU_EXT_nurbs_tessellator',
'GLU_FALSE', 'GLU_TRUE', 'GLU_VERSION_1_1', 'GLU_VERSION_1_2',
'GLU_VERSION_1_3', 'GLU_VERSION', 'GLU_EXTENSIONS', 'GLU_INVALID_ENUM',
'GLU_INVALID_VALUE', 'GLU_OUT_OF_MEMORY', 'GLU_INCOMPATIBLE_GL_VERSION',
'GLU_INVALID_OPERATION', 'GLU_OUTLINE_POLYGON', 'GLU_OUTLINE_PATCH',
'GLU_NURBS_ERROR', 'GLU_ERROR', 'GLU_NURBS_BEGIN', 'GLU_NURBS_BEGIN_EXT',
'GLU_NURBS_VERTEX', 'GLU_NURBS_VERTEX_EXT', 'GLU_NURBS_NORMAL',
'GLU_NURBS_NORMAL_EXT', 'GLU_NURBS_COLOR', 'GLU_NURBS_COLOR_EXT',
'GLU_NURBS_TEXTURE_COORD', 'GLU_NURBS_TEX_COORD_EXT', 'GLU_NURBS_END',
'GLU_NURBS_END_EXT', 'GLU_NURBS_BEGIN_DATA', 'GLU_NURBS_BEGIN_DATA_EXT',
'GLU_NURBS_VERTEX_DATA', 'GLU_NURBS_VERTEX_DATA_EXT', 'GLU_NURBS_NORMAL_DATA',
'GLU_NURBS_NORMAL_DATA_EXT', 'GLU_NURBS_COLOR_DATA',
'GLU_NURBS_COLOR_DATA_EXT', 'GLU_NURBS_TEXTURE_COORD_DATA',
'GLU_NURBS_TEX_COORD_DATA_EXT', 'GLU_NURBS_END_DATA',
'GLU_NURBS_END_DATA_EXT', 'GLU_NURBS_ERROR1', 'GLU_NURBS_ERROR2',
'GLU_NURBS_ERROR3', 'GLU_NURBS_ERROR4', 'GLU_NURBS_ERROR5',
'GLU_NURBS_ERROR6', 'GLU_NURBS_ERROR7', 'GLU_NURBS_ERROR8',
'GLU_NURBS_ERROR9', 'GLU_NURBS_ERROR10', 'GLU_NURBS_ERROR11',
'GLU_NURBS_ERROR12', 'GLU_NURBS_ERROR13', 'GLU_NURBS_ERROR14',
'GLU_NURBS_ERROR15', 'GLU_NURBS_ERROR16', 'GLU_NURBS_ERROR17',
'GLU_NURBS_ERROR18', 'GLU_NURBS_ERROR19', 'GLU_NURBS_ERROR20',
'GLU_NURBS_ERROR21', 'GLU_NURBS_ERROR22', 'GLU_NURBS_ERROR23',
'GLU_NURBS_ERROR24', 'GLU_NURBS_ERROR25', 'GLU_NURBS_ERROR26',
'GLU_NURBS_ERROR27', 'GLU_NURBS_ERROR28', 'GLU_NURBS_ERROR29',
'GLU_NURBS_ERROR30', 'GLU_NURBS_ERROR31', 'GLU_NURBS_ERROR32',
'GLU_NURBS_ERROR33', 'GLU_NURBS_ERROR34', 'GLU_NURBS_ERROR35',
'GLU_NURBS_ERROR36', 'GLU_NURBS_ERROR37', 'GLU_AUTO_LOAD_MATRIX',
'GLU_CULLING', 'GLU_SAMPLING_TOLERANCE', 'GLU_DISPLAY_MODE',
'GLU_PARAMETRIC_TOLERANCE', 'GLU_SAMPLING_METHOD', 'GLU_U_STEP', 'GLU_V_STEP',
'GLU_NURBS_MODE', 'GLU_NURBS_MODE_EXT', 'GLU_NURBS_TESSELLATOR',
'GLU_NURBS_TESSELLATOR_EXT', 'GLU_NURBS_RENDERER', 'GLU_NURBS_RENDERER_EXT',
'GLU_OBJECT_PARAMETRIC_ERROR', 'GLU_OBJECT_PARAMETRIC_ERROR_EXT',
'GLU_OBJECT_PATH_LENGTH', 'GLU_OBJECT_PATH_LENGTH_EXT', 'GLU_PATH_LENGTH',
'GLU_PARAMETRIC_ERROR', 'GLU_DOMAIN_DISTANCE', 'GLU_MAP1_TRIM_2',
'GLU_MAP1_TRIM_3', 'GLU_POINT', 'GLU_LINE', 'GLU_FILL', 'GLU_SILHOUETTE',
'GLU_SMOOTH', 'GLU_FLAT', 'GLU_NONE', 'GLU_OUTSIDE', 'GLU_INSIDE',
'GLU_TESS_BEGIN', 'GLU_BEGIN', 'GLU_TESS_VERTEX', 'GLU_VERTEX',
'GLU_TESS_END', 'GLU_END', 'GLU_TESS_ERROR', 'GLU_TESS_EDGE_FLAG',
'GLU_EDGE_FLAG', 'GLU_TESS_COMBINE', 'GLU_TESS_BEGIN_DATA',
'GLU_TESS_VERTEX_DATA', 'GLU_TESS_END_DATA', 'GLU_TESS_ERROR_DATA',
'GLU_TESS_EDGE_FLAG_DATA', 'GLU_TESS_COMBINE_DATA', 'GLU_CW', 'GLU_CCW',
'GLU_INTERIOR', 'GLU_EXTERIOR', 'GLU_UNKNOWN', 'GLU_TESS_WINDING_RULE',
'GLU_TESS_BOUNDARY_ONLY', 'GLU_TESS_TOLERANCE', 'GLU_TESS_ERROR1',
'GLU_TESS_ERROR2', 'GLU_TESS_ERROR3', 'GLU_TESS_ERROR4', 'GLU_TESS_ERROR5',
'GLU_TESS_ERROR6', 'GLU_TESS_ERROR7', 'GLU_TESS_ERROR8',
'GLU_TESS_MISSING_BEGIN_POLYGON', 'GLU_TESS_MISSING_BEGIN_CONTOUR',
'GLU_TESS_MISSING_END_POLYGON', 'GLU_TESS_MISSING_END_CONTOUR',
'GLU_TESS_COORD_TOO_LARGE', 'GLU_TESS_NEED_COMBINE_CALLBACK',
'GLU_TESS_WINDING_ODD', 'GLU_TESS_WINDING_NONZERO',
'GLU_TESS_WINDING_POSITIVE', 'GLU_TESS_WINDING_NEGATIVE',
'GLU_TESS_WINDING_ABS_GEQ_TWO', 'GLUnurbs', 'GLUquadric', 'GLUtesselator',
'GLUnurbsObj', 'GLUquadricObj', 'GLUtesselatorObj', 'GLUtriangulatorObj',
'GLU_TESS_MAX_COORD', '_GLUfuncptr', 'gluBeginCurve', 'gluBeginPolygon',
'gluBeginSurface', 'gluBeginTrim', 'gluBuild1DMipmapLevels',
'gluBuild1DMipmaps', 'gluBuild2DMipmapLevels', 'gluBuild2DMipmaps',
'gluBuild3DMipmapLevels', 'gluBuild3DMipmaps', 'gluCheckExtension',
'gluCylinder', 'gluDeleteNurbsRenderer', 'gluDeleteQuadric', 'gluDeleteTess',
'gluDisk', 'gluEndCurve', 'gluEndPolygon', 'gluEndSurface', 'gluEndTrim',
'gluErrorString', 'gluGetNurbsProperty', 'gluGetString', 'gluGetTessProperty',
'gluLoadSamplingMatrices', 'gluLookAt', 'gluNewNurbsRenderer',
'gluNewQuadric', 'gluNewTess', 'gluNextContour', 'gluNurbsCallback',
'gluNurbsCallbackData', 'gluNurbsCallbackDataEXT', 'gluNurbsCurve',
'gluNurbsProperty', 'gluNurbsSurface', 'gluOrtho2D', 'gluPartialDisk',
'gluPerspective', 'gluPickMatrix', 'gluProject', 'gluPwlCurve',
'gluQuadricCallback', 'gluQuadricDrawStyle', 'gluQuadricNormals',
'gluQuadricOrientation', 'gluQuadricTexture', 'gluScaleImage', 'gluSphere',
'gluTessBeginContour', 'gluTessBeginPolygon', 'gluTessCallback',
'gluTessEndContour', 'gluTessEndPolygon', 'gluTessNormal', 'gluTessProperty',
'gluTessVertex', 'gluUnProject', 'gluUnProject4']
# END GENERATED CONTENT (do not edit above this line)
| bsd-3-clause |
Donkyhotay/MoonPy | zope/i18n/locales/tests/test_docstrings.py | 1 | 1459 | ##############################################################################
#
# Copyright (c) 2004 Zope Corporation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Tests for the ZCML Documentation Module
$Id: test_docstrings.py 38178 2005-08-30 21:50:19Z mj $
"""
import unittest
from zope.testing.doctestunit import DocTestSuite
from zope.i18n.locales.inheritance import AttributeInheritance, NoParentException
class LocaleInheritanceStub(AttributeInheritance):
def __init__(self, nextLocale=None):
self.__nextLocale__ = nextLocale
def getInheritedSelf(self):
if self.__nextLocale__ is None:
raise NoParentException('No parent was specified.')
return self.__nextLocale__
def test_suite():
return unittest.TestSuite((
DocTestSuite('zope.i18n.locales'),
DocTestSuite('zope.i18n.locales.inheritance'),
DocTestSuite('zope.i18n.locales.xmlfactory'),
))
if __name__ == '__main__':
unittest.main()
| gpl-3.0 |
apollo13/ansible | test/integration/targets/old_style_cache_plugins/plugins/cache/redis.py | 83 | 4391 | # (c) 2014, Brian Coca, Josh Drake, et al
# (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = '''
cache: redis
short_description: Use Redis DB for cache
description:
- This cache uses JSON formatted, per host records saved in Redis.
version_added: "1.9"
requirements:
- redis>=2.4.5 (python lib)
options:
_uri:
description:
- A colon separated string of connection information for Redis.
required: True
env:
- name: ANSIBLE_CACHE_PLUGIN_CONNECTION
ini:
- key: fact_caching_connection
section: defaults
_prefix:
description: User defined prefix to use when creating the DB entries
env:
- name: ANSIBLE_CACHE_PLUGIN_PREFIX
ini:
- key: fact_caching_prefix
section: defaults
_timeout:
default: 86400
description: Expiration timeout for the cache plugin data
env:
- name: ANSIBLE_CACHE_PLUGIN_TIMEOUT
ini:
- key: fact_caching_timeout
section: defaults
type: integer
'''
import time
import json
from ansible import constants as C
from ansible.errors import AnsibleError
from ansible.plugins.cache import BaseCacheModule
try:
from redis import StrictRedis, VERSION
except ImportError:
raise AnsibleError("The 'redis' python module (version 2.4.5 or newer) is required for the redis fact cache, 'pip install redis'")
class CacheModule(BaseCacheModule):
"""
A caching module backed by redis.
Keys are maintained in a zset with their score being the timestamp
when they are inserted. This allows for the usage of 'zremrangebyscore'
to expire keys. This mechanism is used or a pattern matched 'scan' for
performance.
"""
def __init__(self, *args, **kwargs):
if C.CACHE_PLUGIN_CONNECTION:
connection = C.CACHE_PLUGIN_CONNECTION.split(':')
else:
connection = []
self._timeout = float(C.CACHE_PLUGIN_TIMEOUT)
self._prefix = C.CACHE_PLUGIN_PREFIX
self._cache = {}
self._db = StrictRedis(*connection)
self._keys_set = 'ansible_cache_keys'
def _make_key(self, key):
return self._prefix + key
def get(self, key):
if key not in self._cache:
value = self._db.get(self._make_key(key))
# guard against the key not being removed from the zset;
# this could happen in cases where the timeout value is changed
# between invocations
if value is None:
self.delete(key)
raise KeyError
self._cache[key] = json.loads(value)
return self._cache.get(key)
def set(self, key, value):
value2 = json.dumps(value)
if self._timeout > 0: # a timeout of 0 is handled as meaning 'never expire'
self._db.setex(self._make_key(key), int(self._timeout), value2)
else:
self._db.set(self._make_key(key), value2)
if VERSION[0] == 2:
self._db.zadd(self._keys_set, time.time(), key)
else:
self._db.zadd(self._keys_set, {key: time.time()})
self._cache[key] = value
def _expire_keys(self):
if self._timeout > 0:
expiry_age = time.time() - self._timeout
self._db.zremrangebyscore(self._keys_set, 0, expiry_age)
def keys(self):
self._expire_keys()
return self._db.zrange(self._keys_set, 0, -1)
def contains(self, key):
self._expire_keys()
return (self._db.zrank(self._keys_set, key) is not None)
def delete(self, key):
if key in self._cache:
del self._cache[key]
self._db.delete(self._make_key(key))
self._db.zrem(self._keys_set, key)
def flush(self):
for key in self.keys():
self.delete(key)
def copy(self):
# TODO: there is probably a better way to do this in redis
ret = dict()
for key in self.keys():
ret[key] = self.get(key)
return ret
def __getstate__(self):
return dict()
def __setstate__(self, data):
self.__init__()
| gpl-3.0 |
coberger/DIRAC | Workflow/Modules/UploadOutputs.py | 5 | 3457 | # ##WARNING###WARNING###WARNING###WARNING###WARNING###WARNING###WARNING###WARNING###WARNING###WARNING###WARNING #
# Under development #
# ##WARNING###WARNING###WARNING###WARNING###WARNING###WARNING###WARNING###WARNING###WARNING###WARNING###WARNING #
""" Module to upload specified job output files according to the parameters
defined in the production workflow.
"""
from DIRAC import gLogger
from DIRAC.Workflow.Modules.ModuleBase import ModuleBase, GracefulTermination
class UploadOutputs( ModuleBase ):
#############################################################################
def __init__( self ):
""" c'tor
"""
self.log = gLogger.getSubLogger( "UploadOutputs" )
super( UploadOutputs, self ).__init__( self.log )
self.outputDataStep = ''
self.outputData = []
self.outputList = []
#############################################################################
def _resolveInputVariables( self ):
""" The module parameters are resolved here.
"""
super( UploadOutputs, self )._resolveInputVariables()
# this comes from Job().setOutputData(). Typical for user jobs
if self.workflow_commons.has_key( 'OutputData' ):
self.outputData = self.workflow_commons['OutputData']
if not isinstance( self.outputData, list ): # type( userOutputData ) == type( [] ):
self.outputData = [ i.strip() for i in self.outputData.split( ';' ) ]
# if not present, we use the outputList, which is instead incrementally created based on the single step outputs
# This is more typical for production jobs, that can have many steps linked one after the other
elif self.workflow_commons.has_key( 'outputList' ):
self.outputList = self.workflow_commons['outputList']
else:
raise GracefulTermination( 'Nothing to upload' )
# in case you want to put a mask on the steps
# TODO: add it to the DIRAC API
if self.workflow_commons.has_key( 'outputDataStep' ):
self.outputDataStep = self.workflow_commons['outputDataStep']
# this comes from Job().setOutputData(). Typical for user jobs
if self.workflow_commons.has_key( 'OutputSE' ):
specifiedSE = self.workflow_commons['OutputSE']
if not type( specifiedSE ) == type( [] ):
self.utputSE = [i.strip() for i in specifiedSE.split( ';' )]
else:
self.log.verbose( 'No OutputSE specified, using default value: %s' % ( ', '.join( self.defaultOutputSE ) ) )
self.outputSE = []
# this comes from Job().setOutputData(). Typical for user jobs
if self.workflow_commons.has_key( 'OutputPath' ):
self.outputPath = self.workflow_commons['OutputPath']
def _initialize( self ):
""" gets the files to upload, check if to upload
"""
# lfnsList = self.__getOutputLFNs( self.outputData ) or outputList?
if not self._checkWFAndStepStatus():
raise GracefulTermination( 'No output data upload attempted' )
def __getOuputLFNs( self, outputList, *args ):
""" This is really VO-specific.
It should be replaced by each VO. Setting an LFN here just as an idea, and for testing purposes.
"""
lfnList = []
for outputFile in outputList:
lfnList.append( '/'.join( [str( x ) for x in args] ) + outputFile )
return lfnList
def _execute( self ):
""" uploads the files
"""
pass
| gpl-3.0 |
omefire/bitcoin | qa/rpc-tests/rpcbind_test.py | 120 | 5610 | #!/usr/bin/env python2
# Copyright (c) 2014 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
# Test for -rpcbind, as well as -rpcallowip and -rpcconnect
# Add python-bitcoinrpc to module search path:
import os
import sys
import json
import shutil
import subprocess
import tempfile
import traceback
from test_framework.util import *
from test_framework.netutil import *
def run_bind_test(tmpdir, allow_ips, connect_to, addresses, expected):
'''
Start a node with requested rpcallowip and rpcbind parameters,
then try to connect, and check if the set of bound addresses
matches the expected set.
'''
expected = [(addr_to_hex(addr), port) for (addr, port) in expected]
base_args = ['-disablewallet', '-nolisten']
if allow_ips:
base_args += ['-rpcallowip=' + x for x in allow_ips]
binds = ['-rpcbind='+addr for addr in addresses]
nodes = start_nodes(1, tmpdir, [base_args + binds], connect_to)
try:
pid = bitcoind_processes[0].pid
assert_equal(set(get_bind_addrs(pid)), set(expected))
finally:
stop_nodes(nodes)
wait_bitcoinds()
def run_allowip_test(tmpdir, allow_ips, rpchost, rpcport):
'''
Start a node with rpcwallow IP, and request getinfo
at a non-localhost IP.
'''
base_args = ['-disablewallet', '-nolisten'] + ['-rpcallowip='+x for x in allow_ips]
nodes = start_nodes(1, tmpdir, [base_args])
try:
# connect to node through non-loopback interface
url = "http://rt:rt@%s:%d" % (rpchost, rpcport,)
node = AuthServiceProxy(url)
node.getinfo()
finally:
node = None # make sure connection will be garbage collected and closed
stop_nodes(nodes)
wait_bitcoinds()
def run_test(tmpdir):
assert(sys.platform == 'linux2') # due to OS-specific network stats queries, this test works only on Linux
# find the first non-loopback interface for testing
non_loopback_ip = None
for name,ip in all_interfaces():
if ip != '127.0.0.1':
non_loopback_ip = ip
break
if non_loopback_ip is None:
assert(not 'This test requires at least one non-loopback IPv4 interface')
print("Using interface %s for testing" % non_loopback_ip)
defaultport = rpc_port(0)
# check default without rpcallowip (IPv4 and IPv6 localhost)
run_bind_test(tmpdir, None, '127.0.0.1', [],
[('127.0.0.1', defaultport), ('::1', defaultport)])
# check default with rpcallowip (IPv6 any)
run_bind_test(tmpdir, ['127.0.0.1'], '127.0.0.1', [],
[('::0', defaultport)])
# check only IPv4 localhost (explicit)
run_bind_test(tmpdir, ['127.0.0.1'], '127.0.0.1', ['127.0.0.1'],
[('127.0.0.1', defaultport)])
# check only IPv4 localhost (explicit) with alternative port
run_bind_test(tmpdir, ['127.0.0.1'], '127.0.0.1:32171', ['127.0.0.1:32171'],
[('127.0.0.1', 32171)])
# check only IPv4 localhost (explicit) with multiple alternative ports on same host
run_bind_test(tmpdir, ['127.0.0.1'], '127.0.0.1:32171', ['127.0.0.1:32171', '127.0.0.1:32172'],
[('127.0.0.1', 32171), ('127.0.0.1', 32172)])
# check only IPv6 localhost (explicit)
run_bind_test(tmpdir, ['[::1]'], '[::1]', ['[::1]'],
[('::1', defaultport)])
# check both IPv4 and IPv6 localhost (explicit)
run_bind_test(tmpdir, ['127.0.0.1'], '127.0.0.1', ['127.0.0.1', '[::1]'],
[('127.0.0.1', defaultport), ('::1', defaultport)])
# check only non-loopback interface
run_bind_test(tmpdir, [non_loopback_ip], non_loopback_ip, [non_loopback_ip],
[(non_loopback_ip, defaultport)])
# Check that with invalid rpcallowip, we are denied
run_allowip_test(tmpdir, [non_loopback_ip], non_loopback_ip, defaultport)
try:
run_allowip_test(tmpdir, ['1.1.1.1'], non_loopback_ip, defaultport)
assert(not 'Connection not denied by rpcallowip as expected')
except ValueError:
pass
def main():
import optparse
parser = optparse.OptionParser(usage="%prog [options]")
parser.add_option("--nocleanup", dest="nocleanup", default=False, action="store_true",
help="Leave bitcoinds and test.* datadir on exit or error")
parser.add_option("--srcdir", dest="srcdir", default="../../src",
help="Source directory containing bitcoind/bitcoin-cli (default: %default%)")
parser.add_option("--tmpdir", dest="tmpdir", default=tempfile.mkdtemp(prefix="test"),
help="Root directory for datadirs")
(options, args) = parser.parse_args()
os.environ['PATH'] = options.srcdir+":"+os.environ['PATH']
check_json_precision()
success = False
nodes = []
try:
print("Initializing test directory "+options.tmpdir)
if not os.path.isdir(options.tmpdir):
os.makedirs(options.tmpdir)
initialize_chain(options.tmpdir)
run_test(options.tmpdir)
success = True
except AssertionError as e:
print("Assertion failed: "+e.message)
except Exception as e:
print("Unexpected exception caught during testing: "+str(e))
traceback.print_tb(sys.exc_info()[2])
if not options.nocleanup:
print("Cleaning up")
wait_bitcoinds()
shutil.rmtree(options.tmpdir)
if success:
print("Tests successful")
sys.exit(0)
else:
print("Failed")
sys.exit(1)
if __name__ == '__main__':
main()
| mit |
xbmc/atv2 | xbmc/lib/libPython/Python/Lib/plat-irix6/AL.py | 16 | 1478 | RATE_48000 = 48000
RATE_44100 = 44100
RATE_32000 = 32000
RATE_22050 = 22050
RATE_16000 = 16000
RATE_11025 = 11025
RATE_8000 = 8000
SAMPFMT_TWOSCOMP= 1
SAMPFMT_FLOAT = 32
SAMPFMT_DOUBLE = 64
SAMPLE_8 = 1
SAMPLE_16 = 2
# SAMPLE_24 is the low 24 bits of a long, sign extended to 32 bits
SAMPLE_24 = 4
MONO = 1
STEREO = 2
QUADRO = 4 # 4CHANNEL is not a legal Python name
INPUT_LINE = 0
INPUT_MIC = 1
INPUT_DIGITAL = 2
MONITOR_OFF = 0
MONITOR_ON = 1
ERROR_NUMBER = 0
ERROR_TYPE = 1
ERROR_LOCATION_LSP = 2
ERROR_LOCATION_MSP = 3
ERROR_LENGTH = 4
ERROR_INPUT_UNDERFLOW = 0
ERROR_OUTPUT_OVERFLOW = 1
# These seem to be not supported anymore:
##HOLD, RELEASE = 0, 1
##ATTAIL, ATHEAD, ATMARK, ATTIME = 0, 1, 2, 3
DEFAULT_DEVICE = 1
INPUT_SOURCE = 0
LEFT_INPUT_ATTEN = 1
RIGHT_INPUT_ATTEN = 2
INPUT_RATE = 3
OUTPUT_RATE = 4
LEFT_SPEAKER_GAIN = 5
RIGHT_SPEAKER_GAIN = 6
INPUT_COUNT = 7
OUTPUT_COUNT = 8
UNUSED_COUNT = 9
SYNC_INPUT_TO_AES = 10
SYNC_OUTPUT_TO_AES = 11
MONITOR_CTL = 12
LEFT_MONITOR_ATTEN = 13
RIGHT_MONITOR_ATTEN = 14
ENUM_VALUE = 0 # only certain values are valid
RANGE_VALUE = 1 # any value in range is valid
| gpl-2.0 |
flavour/cedarbluff | modules/facebook.py | 16 | 20141 | #!/usr/bin/env python
#
# Copyright 2010 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Python client library for the Facebook Platform.
This client library is designed to support the Graph API and the official
Facebook JavaScript SDK, which is the canonical way to implement
Facebook authentication. Read more about the Graph API at
http://developers.facebook.com/docs/api. You can download the Facebook
JavaScript SDK at http://github.com/facebook/connect-js/.
If your application is using Google AppEngine's webapp framework, your
usage of this module might look like this:
user = facebook.get_user_from_cookie(self.request.cookies, key, secret)
if user:
graph = facebook.GraphAPI(user["access_token"])
profile = graph.get_object("me")
friends = graph.get_connections("me", "friends")
"""
import cgi
import time
import urllib
import urllib2
import hashlib
import hmac
import base64
import logging
# Find a JSON parser
try:
import simplejson as json
except ImportError:
try:
from gluon.contrib import simplejson as json
except ImportError:
import json
_parse_json = json.loads
# Find a query string parser
try:
from urlparse import parse_qs
except ImportError:
from cgi import parse_qs
class GraphAPI(object):
"""A client for the Facebook Graph API.
See http://developers.facebook.com/docs/api for complete documentation
for the API.
The Graph API is made up of the objects in Facebook (e.g., people, pages,
events, photos) and the connections between them (e.g., friends,
photo tags, and event RSVPs). This client provides access to those
primitive types in a generic way. For example, given an OAuth access
token, this will fetch the profile of the active user and the list
of the user's friends:
graph = facebook.GraphAPI(access_token)
user = graph.get_object("me")
friends = graph.get_connections(user["id"], "friends")
You can see a list of all of the objects and connections supported
by the API at http://developers.facebook.com/docs/reference/api/.
You can obtain an access token via OAuth or by using the Facebook
JavaScript SDK. See http://developers.facebook.com/docs/authentication/
for details.
If you are using the JavaScript SDK, you can use the
get_user_from_cookie() method below to get the OAuth access token
for the active user from the cookie saved by the SDK.
"""
def __init__(self, access_token=None):
self.access_token = access_token
def get_object(self, id, **args):
"""Fetchs the given object from the graph."""
return self.request(id, args)
def get_object_c(self, id, **args):
"""
Fetchs the given object from the graph.
- custom function added by Movuca
"""
return self.request(id, {'fields': 'id,name,email,first_name,last_name,birthday,username,location,bio,gender,website,albums,family,friends,picture'})
def get_objects(self, ids, **args):
"""Fetchs all of the given object from the graph.
We return a map from ID to object. If any of the IDs are invalid,
we raise an exception.
"""
args["ids"] = ",".join(ids)
return self.request("", args)
def get_connections(self, id, connection_name, **args):
"""Fetchs the connections for given object."""
return self.request(id + "/" + connection_name, args)
def put_object(self, parent_object, connection_name, **data):
"""Writes the given object to the graph, connected to the given parent.
For example,
graph.put_object("me", "feed", message="Hello, world")
writes "Hello, world" to the active user's wall. Likewise, this
will comment on a the first post of the active user's feed:
feed = graph.get_connections("me", "feed")
post = feed["data"][0]
graph.put_object(post["id"], "comments", message="First!")
See http://developers.facebook.com/docs/api#publishing for all of
the supported writeable objects.
Most write operations require extended permissions. For example,
publishing wall posts requires the "publish_stream" permission. See
http://developers.facebook.com/docs/authentication/ for details about
extended permissions.
"""
assert self.access_token, "Write operations require an access token"
return self.request(parent_object + "/" + connection_name,
post_args=data)
def put_wall_post(self, message, attachment={}, profile_id="me"):
"""Writes a wall post to the given profile's wall.
We default to writing to the authenticated user's wall if no
profile_id is specified.
attachment adds a structured attachment to the status message being
posted to the Wall. It should be a dictionary of the form:
{"name": "Link name"
"link": "http://www.example.com/",
"caption": "{*actor*} posted a new review",
"description": "This is a longer description of the attachment",
"picture": "http://www.example.com/thumbnail.jpg"}
"""
return self.put_object(profile_id, "feed", message=message,
**attachment)
def put_comment(self, object_id, message):
"""Writes the given comment on the given post."""
return self.put_object(object_id, "comments", message=message)
def put_like(self, object_id):
"""Likes the given post."""
return self.put_object(object_id, "likes")
def delete_object(self, id):
"""Deletes the object with the given ID from the graph."""
self.request(id, post_args={"method": "delete"})
def put_photo(self, image, message=None, album_id=None, **kwargs):
"""Uploads an image using multipart/form-data
image=File like object for the image
message=Caption for your image
album_id=None posts to /me/photos which uses or creates and uses
an album for your application.
"""
object_id = album_id or "me"
#it would have been nice to reuse self.request;
#but multipart is messy in urllib
post_args = {
'access_token': self.access_token,
'source': image,
'message': message
}
post_args.update(kwargs)
content_type, body = self._encode_multipart_form(post_args)
req = urllib2.Request(("https://graph.facebook.com/%s/photos" %
object_id),
data=body)
req.add_header('Content-Type', content_type)
try:
data = urllib2.urlopen(req).read()
#For Python 3 use this:
#except urllib2.HTTPError as e:
except urllib2.HTTPError, e:
data = e.read() # Facebook sends OAuth errors as 400, and urllib2
# throws an exception, we want a GraphAPIError
try:
response = _parse_json(data)
# Raise an error if we got one, but don't not if Facebook just
# gave us a Bool value
if (response and isinstance(response, dict) and
response.get("error")):
raise GraphAPIError(response)
except ValueError:
response = data
return response
# based on: http://code.activestate.com/recipes/146306/
def _encode_multipart_form(self, fields):
"""Fields are a dict of form name-> value
For files, value should be a file object.
Other file-like objects might work and a fake name will be chosen.
Return (content_type, body) ready for httplib.HTTP instance
"""
BOUNDARY = '----------ThIs_Is_tHe_bouNdaRY_$'
CRLF = '\r\n'
L = []
for (key, value) in fields.items():
logging.debug("Encoding %s, (%s)%s" % (key, type(value), value))
if not value:
continue
L.append('--' + BOUNDARY)
if hasattr(value, 'read') and callable(value.read):
filename = getattr(value, 'name', '%s.jpg' % key)
L.append(('Content-Disposition: form-data;'
'name="%s";'
'filename="%s"') % (key, filename))
L.append('Content-Type: image/jpeg')
value = value.read()
logging.debug(type(value))
else:
L.append('Content-Disposition: form-data; name="%s"' % key)
L.append('')
if isinstance(value, unicode):
logging.debug("Convert to ascii")
value = value.encode('ascii')
L.append(value)
L.append('--' + BOUNDARY + '--')
L.append('')
body = CRLF.join(L)
content_type = 'multipart/form-data; boundary=%s' % BOUNDARY
return content_type, body
def request(self, path, args=None, post_args=None):
"""Fetches the given path in the Graph API.
We translate args to a valid query string. If post_args is given,
we send a POST request to the given path with the given arguments.
"""
args = args or {}
if self.access_token:
if post_args is not None:
post_args["access_token"] = self.access_token
else:
args["access_token"] = self.access_token
post_data = None if post_args is None else urllib.urlencode(post_args)
try:
file = urllib2.urlopen("https://graph.facebook.com/" + path + "?" +
urllib.urlencode(args), post_data)
except urllib2.HTTPError, e:
response = _parse_json(e.read())
raise GraphAPIError(response)
try:
fileInfo = file.info()
if fileInfo.maintype == 'text':
response = _parse_json(file.read())
elif fileInfo.maintype == 'image':
mimetype = fileInfo['content-type']
response = {
"data": file.read(),
"mime-type": mimetype,
"url": file.url,
}
else:
raise GraphAPIError('Maintype was not text or image')
finally:
file.close()
if response and isinstance(response, dict) and response.get("error"):
raise GraphAPIError(response["error"]["type"],
response["error"]["message"])
return response
def api_request(self, path, args=None, post_args=None):
"""Fetches the given path in the Graph API.
We translate args to a valid query string. If post_args is given,
we send a POST request to the given path with the given arguments.
"""
args = args or {}
if self.access_token:
if post_args is not None:
post_args["access_token"] = self.access_token
else:
args["access_token"] = self.access_token
if self.api_key:
if post_args is not None:
post_args["api_key"] = self.api_key
else:
args["api_key"] = self.api_key
if post_args is not None:
post_args["format"] = "json-strings"
else:
args["format"] = "json-strings"
post_data = None if post_args is None else urllib.urlencode(post_args)
file = urllib.urlopen("https://api.facebook.com/method/" + path + "?" +
urllib.urlencode(args), post_data)
try:
response = _parse_json(file.read())
finally:
file.close()
if response and response.get("error"):
raise GraphAPIError(response)
return response
def fql(self, query, args=None, post_args=None):
"""FQL query.
Two reasons to have this method:
1. Graph api does not expose some info fields of a user, e.g.
a user's networks/affiliations, we have to fall back to old api.
2. FQL is a strong tool.
Example query: "SELECT affiliations FROM user WHERE uid = me()"
"""
args = args or {}
if self.access_token:
if post_args is not None:
post_args["access_token"] = self.access_token
else:
args["access_token"] = self.access_token
post_data = None if post_args is None else urllib.urlencode(post_args)
"""Check if query is a dict and
use the multiquery method
else use single query
"""
if not isinstance(query, basestring):
args["queries"] = query
fql_method = 'fql.multiquery'
else:
args["query"] = query
fql_method = 'fql.query'
args["format"] = "json"
file = urllib2.urlopen("https://api.facebook.com/method/" +
fql_method + "?" + urllib.urlencode(args),
post_data)
try:
content = file.read()
response = _parse_json(content)
#Return a list if success, return a dictionary if failed
if type(response) is dict and "error_code" in response:
raise GraphAPIError(response)
except Exception, e:
raise e
finally:
file.close()
return response
def extend_access_token(self, app_id, app_secret):
"""
Extends the expiration time of a valid OAuth access token. See
<https://developers.facebook.com/roadmap/offline-access-removal/
#extend_token>
"""
args = {
"client_id": app_id,
"client_secret": app_secret,
"grant_type": "fb_exchange_token",
"fb_exchange_token": self.access_token,
}
response = urllib.urlopen("https://graph.facebook.com/oauth/"
"access_token?" + urllib.urlencode(args)).read()
query_str = parse_qs(response)
if "access_token" in query_str:
result = {"access_token": query_str["access_token"][0]}
if "expires" in query_str:
result["expires"] = query_str["expires"][0]
return result
else:
response = json.loads(response)
raise GraphAPIError(response)
class GraphAPIError(Exception):
def __init__(self, result):
#Exception.__init__(self, message)
#self.type = type
self.result = result
try:
self.type = result["error_code"]
except:
self.type = ""
# OAuth 2.0 Draft 10
try:
self.message = result["error_description"]
except:
# OAuth 2.0 Draft 00
try:
self.message = result["error"]["message"]
except:
# REST server style
try:
self.message = result["error_msg"]
except:
self.message = "Unknown Error. Check result."
Exception.__init__(self, self.message)
def get_user_from_cookie(cookies, app_id, app_secret):
"""Parses the cookie set by the official Facebook JavaScript SDK.
cookies should be a dictionary-like object mapping cookie names to
cookie values.
If the user is logged in via Facebook, we return a dictionary with
the keys "uid" and "access_token". The former is the user's
Facebook ID, and the latter can be used to make authenticated
requests to the Graph API. If the user is not logged in, we
return None.
Download the official Facebook JavaScript SDK at
http://github.com/facebook/connect-js/. Read more about Facebook
authentication at http://developers.facebook.com/docs/authentication/.
"""
cookie = cookies.get("fbsr_" + app_id, "")
if not cookie:
return None
parsed_request = parse_signed_request(cookie, app_secret)
try:
result = get_access_token_from_code(parsed_request["code"], "",
app_id, app_secret)
except GraphAPIError:
return None
result["uid"] = parsed_request["user_id"]
return result
def parse_signed_request(signed_request, app_secret):
""" Return dictionary with signed request data.
We return a dictionary containing the information in the
signed_request. This includes a user_id if the user has authorised
your application, as well as any information requested.
If the signed_request is malformed or corrupted, False is returned.
"""
try:
l = signed_request.split('.', 2)
encoded_sig = str(l[0])
payload = str(l[1])
sig = base64.urlsafe_b64decode(encoded_sig + "=" *
((4 - len(encoded_sig) % 4) % 4))
data = base64.urlsafe_b64decode(payload + "=" *
((4 - len(payload) % 4) % 4))
except IndexError:
# Signed request was malformed.
return False
except TypeError:
# Signed request had a corrupted payload.
return False
data = _parse_json(data)
if data.get('algorithm', '').upper() != 'HMAC-SHA256':
return False
expected_sig = hmac.new(app_secret,
msg=payload,
digestmod=hashlib.sha256).digest()
if sig != expected_sig:
return False
return data
def auth_url(app_id, canvas_url, perms=None):
url = "https://www.facebook.com/dialog/oauth?"
kvps = {'client_id': app_id, 'redirect_uri': canvas_url}
if perms:
kvps['scope'] = ",".join(perms)
return url + urllib.urlencode(kvps)
def get_access_token_from_code(code, redirect_uri, app_id, app_secret):
"""
Get a user-specific access token from the "code" returned from a Facebook
OAuth dialog. Returns a dict containing the access token and its expiration
date (if applicable).
"""
args = {
"code": code,
"redirect_uri": redirect_uri,
"client_id": app_id,
"client_secret": app_secret,
}
# We would use GraphAPI.request() here, except for that the fact that the
# response is a key-value pair, and not JSON.
response = urllib.urlopen("https://graph.facebook.com/oauth/access_token" +
"?" + urllib.urlencode(args)).read()
query_str = parse_qs(response)
if "access_token" in query_str:
result = {"access_token": query_str["access_token"][0]}
if "expires" in query_str:
result["expires"] = query_str["expires"][0]
return result
else:
response = json.loads(response)
raise GraphAPIError(response)
def get_app_access_token(app_id, app_secret):
"""
Get the access_token for the app that can be used for insights and
creating test users.
app_id = retrieved from the developer page
app_secret = retrieved from the developer page
returns the application access_token
"""
# Get an app access token
args = {'grant_type': 'client_credentials',
'client_id': app_id,
'client_secret': app_secret}
file = urllib2.urlopen("https://graph.facebook.com/oauth/access_token?" +
urllib.urlencode(args))
try:
result = file.read().split("=")[1]
finally:
file.close()
return result
| mit |
prefetchnta/questlab | bin/x64bin/python/36/Lib/unittest/case.py | 1 | 58415 | """Test case implementation"""
import sys
import functools
import difflib
import logging
import pprint
import re
import warnings
import collections
import contextlib
import traceback
from . import result
from .util import (strclass, safe_repr, _count_diff_all_purpose,
_count_diff_hashable, _common_shorten_repr)
__unittest = True
_subtest_msg_sentinel = object()
DIFF_OMITTED = ('\nDiff is %s characters long. '
'Set self.maxDiff to None to see it.')
class SkipTest(Exception):
"""
Raise this exception in a test to skip it.
Usually you can use TestCase.skipTest() or one of the skipping decorators
instead of raising this directly.
"""
class _ShouldStop(Exception):
"""
The test should stop.
"""
class _UnexpectedSuccess(Exception):
"""
The test was supposed to fail, but it didn't!
"""
class _Outcome(object):
def __init__(self, result=None):
self.expecting_failure = False
self.result = result
self.result_supports_subtests = hasattr(result, "addSubTest")
self.success = True
self.skipped = []
self.expectedFailure = None
self.errors = []
@contextlib.contextmanager
def testPartExecutor(self, test_case, isTest=False):
old_success = self.success
self.success = True
try:
yield
except KeyboardInterrupt:
raise
except SkipTest as e:
self.success = False
self.skipped.append((test_case, str(e)))
except _ShouldStop:
pass
except:
exc_info = sys.exc_info()
if self.expecting_failure:
self.expectedFailure = exc_info
else:
self.success = False
self.errors.append((test_case, exc_info))
# explicitly break a reference cycle:
# exc_info -> frame -> exc_info
exc_info = None
else:
if self.result_supports_subtests and self.success:
self.errors.append((test_case, None))
finally:
self.success = self.success and old_success
def _id(obj):
return obj
def skip(reason):
"""
Unconditionally skip a test.
"""
def decorator(test_item):
if not isinstance(test_item, type):
@functools.wraps(test_item)
def skip_wrapper(*args, **kwargs):
raise SkipTest(reason)
test_item = skip_wrapper
test_item.__unittest_skip__ = True
test_item.__unittest_skip_why__ = reason
return test_item
return decorator
def skipIf(condition, reason):
"""
Skip a test if the condition is true.
"""
if condition:
return skip(reason)
return _id
def skipUnless(condition, reason):
"""
Skip a test unless the condition is true.
"""
if not condition:
return skip(reason)
return _id
def expectedFailure(test_item):
test_item.__unittest_expecting_failure__ = True
return test_item
def _is_subtype(expected, basetype):
if isinstance(expected, tuple):
return all(_is_subtype(e, basetype) for e in expected)
return isinstance(expected, type) and issubclass(expected, basetype)
class _BaseTestCaseContext:
def __init__(self, test_case):
self.test_case = test_case
def _raiseFailure(self, standardMsg):
msg = self.test_case._formatMessage(self.msg, standardMsg)
raise self.test_case.failureException(msg)
class _AssertRaisesBaseContext(_BaseTestCaseContext):
def __init__(self, expected, test_case, expected_regex=None):
_BaseTestCaseContext.__init__(self, test_case)
self.expected = expected
self.test_case = test_case
if expected_regex is not None:
expected_regex = re.compile(expected_regex)
self.expected_regex = expected_regex
self.obj_name = None
self.msg = None
def handle(self, name, args, kwargs):
"""
If args is empty, assertRaises/Warns is being used as a
context manager, so check for a 'msg' kwarg and return self.
If args is not empty, call a callable passing positional and keyword
arguments.
"""
try:
if not _is_subtype(self.expected, self._base_type):
raise TypeError('%s() arg 1 must be %s' %
(name, self._base_type_str))
if args and args[0] is None:
warnings.warn("callable is None",
DeprecationWarning, 3)
args = ()
if not args:
self.msg = kwargs.pop('msg', None)
if kwargs:
warnings.warn('%r is an invalid keyword argument for '
'this function' % next(iter(kwargs)),
DeprecationWarning, 3)
return self
callable_obj, *args = args
try:
self.obj_name = callable_obj.__name__
except AttributeError:
self.obj_name = str(callable_obj)
with self:
callable_obj(*args, **kwargs)
finally:
# bpo-23890: manually break a reference cycle
self = None
class _AssertRaisesContext(_AssertRaisesBaseContext):
"""A context manager used to implement TestCase.assertRaises* methods."""
_base_type = BaseException
_base_type_str = 'an exception type or tuple of exception types'
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, tb):
if exc_type is None:
try:
exc_name = self.expected.__name__
except AttributeError:
exc_name = str(self.expected)
if self.obj_name:
self._raiseFailure("{} not raised by {}".format(exc_name,
self.obj_name))
else:
self._raiseFailure("{} not raised".format(exc_name))
else:
traceback.clear_frames(tb)
if not issubclass(exc_type, self.expected):
# let unexpected exceptions pass through
return False
# store exception, without traceback, for later retrieval
self.exception = exc_value.with_traceback(None)
if self.expected_regex is None:
return True
expected_regex = self.expected_regex
if not expected_regex.search(str(exc_value)):
self._raiseFailure('"{}" does not match "{}"'.format(
expected_regex.pattern, str(exc_value)))
return True
class _AssertWarnsContext(_AssertRaisesBaseContext):
"""A context manager used to implement TestCase.assertWarns* methods."""
_base_type = Warning
_base_type_str = 'a warning type or tuple of warning types'
def __enter__(self):
# The __warningregistry__'s need to be in a pristine state for tests
# to work properly.
for v in sys.modules.values():
if getattr(v, '__warningregistry__', None):
v.__warningregistry__ = {}
self.warnings_manager = warnings.catch_warnings(record=True)
self.warnings = self.warnings_manager.__enter__()
warnings.simplefilter("always", self.expected)
return self
def __exit__(self, exc_type, exc_value, tb):
self.warnings_manager.__exit__(exc_type, exc_value, tb)
if exc_type is not None:
# let unexpected exceptions pass through
return
try:
exc_name = self.expected.__name__
except AttributeError:
exc_name = str(self.expected)
first_matching = None
for m in self.warnings:
w = m.message
if not isinstance(w, self.expected):
continue
if first_matching is None:
first_matching = w
if (self.expected_regex is not None and
not self.expected_regex.search(str(w))):
continue
# store warning for later retrieval
self.warning = w
self.filename = m.filename
self.lineno = m.lineno
return
# Now we simply try to choose a helpful failure message
if first_matching is not None:
self._raiseFailure('"{}" does not match "{}"'.format(
self.expected_regex.pattern, str(first_matching)))
if self.obj_name:
self._raiseFailure("{} not triggered by {}".format(exc_name,
self.obj_name))
else:
self._raiseFailure("{} not triggered".format(exc_name))
_LoggingWatcher = collections.namedtuple("_LoggingWatcher",
["records", "output"])
class _CapturingHandler(logging.Handler):
"""
A logging handler capturing all (raw and formatted) logging output.
"""
def __init__(self):
logging.Handler.__init__(self)
self.watcher = _LoggingWatcher([], [])
def flush(self):
pass
def emit(self, record):
self.watcher.records.append(record)
msg = self.format(record)
self.watcher.output.append(msg)
class _AssertLogsContext(_BaseTestCaseContext):
"""A context manager used to implement TestCase.assertLogs()."""
LOGGING_FORMAT = "%(levelname)s:%(name)s:%(message)s"
def __init__(self, test_case, logger_name, level):
_BaseTestCaseContext.__init__(self, test_case)
self.logger_name = logger_name
if level:
self.level = logging._nameToLevel.get(level, level)
else:
self.level = logging.INFO
self.msg = None
def __enter__(self):
if isinstance(self.logger_name, logging.Logger):
logger = self.logger = self.logger_name
else:
logger = self.logger = logging.getLogger(self.logger_name)
formatter = logging.Formatter(self.LOGGING_FORMAT)
handler = _CapturingHandler()
handler.setFormatter(formatter)
self.watcher = handler.watcher
self.old_handlers = logger.handlers[:]
self.old_level = logger.level
self.old_propagate = logger.propagate
logger.handlers = [handler]
logger.setLevel(self.level)
logger.propagate = False
return handler.watcher
def __exit__(self, exc_type, exc_value, tb):
self.logger.handlers = self.old_handlers
self.logger.propagate = self.old_propagate
self.logger.setLevel(self.old_level)
if exc_type is not None:
# let unexpected exceptions pass through
return False
if len(self.watcher.records) == 0:
self._raiseFailure(
"no logs of level {} or higher triggered on {}"
.format(logging.getLevelName(self.level), self.logger.name))
class TestCase(object):
"""A class whose instances are single test cases.
By default, the test code itself should be placed in a method named
'runTest'.
If the fixture may be used for many test cases, create as
many test methods as are needed. When instantiating such a TestCase
subclass, specify in the constructor arguments the name of the test method
that the instance is to execute.
Test authors should subclass TestCase for their own tests. Construction
and deconstruction of the test's environment ('fixture') can be
implemented by overriding the 'setUp' and 'tearDown' methods respectively.
If it is necessary to override the __init__ method, the base class
__init__ method must always be called. It is important that subclasses
should not change the signature of their __init__ method, since instances
of the classes are instantiated automatically by parts of the framework
in order to be run.
When subclassing TestCase, you can set these attributes:
* failureException: determines which exception will be raised when
the instance's assertion methods fail; test methods raising this
exception will be deemed to have 'failed' rather than 'errored'.
* longMessage: determines whether long messages (including repr of
objects used in assert methods) will be printed on failure in *addition*
to any explicit message passed.
* maxDiff: sets the maximum length of a diff in failure messages
by assert methods using difflib. It is looked up as an instance
attribute so can be configured by individual tests if required.
"""
failureException = AssertionError
longMessage = True
maxDiff = 80*8
# If a string is longer than _diffThreshold, use normal comparison instead
# of difflib. See #11763.
_diffThreshold = 2**16
# Attribute used by TestSuite for classSetUp
_classSetupFailed = False
def __init__(self, methodName='runTest'):
"""Create an instance of the class that will use the named test
method when executed. Raises a ValueError if the instance does
not have a method with the specified name.
"""
self._testMethodName = methodName
self._outcome = None
self._testMethodDoc = 'No test'
try:
testMethod = getattr(self, methodName)
except AttributeError:
if methodName != 'runTest':
# we allow instantiation with no explicit method name
# but not an *incorrect* or missing method name
raise ValueError("no such test method in %s: %s" %
(self.__class__, methodName))
else:
self._testMethodDoc = testMethod.__doc__
self._cleanups = []
self._subtest = None
# Map types to custom assertEqual functions that will compare
# instances of said type in more detail to generate a more useful
# error message.
self._type_equality_funcs = {}
self.addTypeEqualityFunc(dict, 'assertDictEqual')
self.addTypeEqualityFunc(list, 'assertListEqual')
self.addTypeEqualityFunc(tuple, 'assertTupleEqual')
self.addTypeEqualityFunc(set, 'assertSetEqual')
self.addTypeEqualityFunc(frozenset, 'assertSetEqual')
self.addTypeEqualityFunc(str, 'assertMultiLineEqual')
def addTypeEqualityFunc(self, typeobj, function):
"""Add a type specific assertEqual style function to compare a type.
This method is for use by TestCase subclasses that need to register
their own type equality functions to provide nicer error messages.
Args:
typeobj: The data type to call this function on when both values
are of the same type in assertEqual().
function: The callable taking two arguments and an optional
msg= argument that raises self.failureException with a
useful error message when the two arguments are not equal.
"""
self._type_equality_funcs[typeobj] = function
def addCleanup(self, function, *args, **kwargs):
"""Add a function, with arguments, to be called when the test is
completed. Functions added are called on a LIFO basis and are
called after tearDown on test failure or success.
Cleanup items are called even if setUp fails (unlike tearDown)."""
self._cleanups.append((function, args, kwargs))
def setUp(self):
"Hook method for setting up the test fixture before exercising it."
pass
def tearDown(self):
"Hook method for deconstructing the test fixture after testing it."
pass
@classmethod
def setUpClass(cls):
"Hook method for setting up class fixture before running tests in the class."
@classmethod
def tearDownClass(cls):
"Hook method for deconstructing the class fixture after running all tests in the class."
def countTestCases(self):
return 1
def defaultTestResult(self):
return result.TestResult()
def shortDescription(self):
"""Returns a one-line description of the test, or None if no
description has been provided.
The default implementation of this method returns the first line of
the specified test method's docstring.
"""
doc = self._testMethodDoc
return doc and doc.split("\n")[0].strip() or None
def id(self):
return "%s.%s" % (strclass(self.__class__), self._testMethodName)
def __eq__(self, other):
if type(self) is not type(other):
return NotImplemented
return self._testMethodName == other._testMethodName
def __hash__(self):
return hash((type(self), self._testMethodName))
def __str__(self):
return "%s (%s)" % (self._testMethodName, strclass(self.__class__))
def __repr__(self):
return "<%s testMethod=%s>" % \
(strclass(self.__class__), self._testMethodName)
def _addSkip(self, result, test_case, reason):
addSkip = getattr(result, 'addSkip', None)
if addSkip is not None:
addSkip(test_case, reason)
else:
warnings.warn("TestResult has no addSkip method, skips not reported",
RuntimeWarning, 2)
result.addSuccess(test_case)
@contextlib.contextmanager
def subTest(self, msg=_subtest_msg_sentinel, **params):
"""Return a context manager that will return the enclosed block
of code in a subtest identified by the optional message and
keyword parameters. A failure in the subtest marks the test
case as failed but resumes execution at the end of the enclosed
block, allowing further test code to be executed.
"""
if self._outcome is None or not self._outcome.result_supports_subtests:
yield
return
parent = self._subtest
if parent is None:
params_map = collections.ChainMap(params)
else:
params_map = parent.params.new_child(params)
self._subtest = _SubTest(self, msg, params_map)
try:
with self._outcome.testPartExecutor(self._subtest, isTest=True):
yield
if not self._outcome.success:
result = self._outcome.result
if result is not None and result.failfast:
raise _ShouldStop
elif self._outcome.expectedFailure:
# If the test is expecting a failure, we really want to
# stop now and register the expected failure.
raise _ShouldStop
finally:
self._subtest = parent
def _feedErrorsToResult(self, result, errors):
for test, exc_info in errors:
if isinstance(test, _SubTest):
result.addSubTest(test.test_case, test, exc_info)
elif exc_info is not None:
if issubclass(exc_info[0], self.failureException):
result.addFailure(test, exc_info)
else:
result.addError(test, exc_info)
def _addExpectedFailure(self, result, exc_info):
try:
addExpectedFailure = result.addExpectedFailure
except AttributeError:
warnings.warn("TestResult has no addExpectedFailure method, reporting as passes",
RuntimeWarning)
result.addSuccess(self)
else:
addExpectedFailure(self, exc_info)
def _addUnexpectedSuccess(self, result):
try:
addUnexpectedSuccess = result.addUnexpectedSuccess
except AttributeError:
warnings.warn("TestResult has no addUnexpectedSuccess method, reporting as failure",
RuntimeWarning)
# We need to pass an actual exception and traceback to addFailure,
# otherwise the legacy result can choke.
try:
raise _UnexpectedSuccess from None
except _UnexpectedSuccess:
result.addFailure(self, sys.exc_info())
else:
addUnexpectedSuccess(self)
def run(self, result=None):
orig_result = result
if result is None:
result = self.defaultTestResult()
startTestRun = getattr(result, 'startTestRun', None)
if startTestRun is not None:
startTestRun()
result.startTest(self)
testMethod = getattr(self, self._testMethodName)
if (getattr(self.__class__, "__unittest_skip__", False) or
getattr(testMethod, "__unittest_skip__", False)):
# If the class or method was skipped.
try:
skip_why = (getattr(self.__class__, '__unittest_skip_why__', '')
or getattr(testMethod, '__unittest_skip_why__', ''))
self._addSkip(result, self, skip_why)
finally:
result.stopTest(self)
return
expecting_failure_method = getattr(testMethod,
"__unittest_expecting_failure__", False)
expecting_failure_class = getattr(self,
"__unittest_expecting_failure__", False)
expecting_failure = expecting_failure_class or expecting_failure_method
outcome = _Outcome(result)
try:
self._outcome = outcome
with outcome.testPartExecutor(self):
self.setUp()
if outcome.success:
outcome.expecting_failure = expecting_failure
with outcome.testPartExecutor(self, isTest=True):
testMethod()
outcome.expecting_failure = False
with outcome.testPartExecutor(self):
self.tearDown()
self.doCleanups()
for test, reason in outcome.skipped:
self._addSkip(result, test, reason)
self._feedErrorsToResult(result, outcome.errors)
if outcome.success:
if expecting_failure:
if outcome.expectedFailure:
self._addExpectedFailure(result, outcome.expectedFailure)
else:
self._addUnexpectedSuccess(result)
else:
result.addSuccess(self)
return result
finally:
result.stopTest(self)
if orig_result is None:
stopTestRun = getattr(result, 'stopTestRun', None)
if stopTestRun is not None:
stopTestRun()
# explicitly break reference cycles:
# outcome.errors -> frame -> outcome -> outcome.errors
# outcome.expectedFailure -> frame -> outcome -> outcome.expectedFailure
outcome.errors.clear()
outcome.expectedFailure = None
# clear the outcome, no more needed
self._outcome = None
def doCleanups(self):
"""Execute all cleanup functions. Normally called for you after
tearDown."""
outcome = self._outcome or _Outcome()
while self._cleanups:
function, args, kwargs = self._cleanups.pop()
with outcome.testPartExecutor(self):
function(*args, **kwargs)
# return this for backwards compatibility
# even though we no longer us it internally
return outcome.success
def __call__(self, *args, **kwds):
return self.run(*args, **kwds)
def debug(self):
"""Run the test without collecting errors in a TestResult"""
self.setUp()
getattr(self, self._testMethodName)()
self.tearDown()
while self._cleanups:
function, args, kwargs = self._cleanups.pop(-1)
function(*args, **kwargs)
def skipTest(self, reason):
"""Skip this test."""
raise SkipTest(reason)
def fail(self, msg=None):
"""Fail immediately, with the given message."""
raise self.failureException(msg)
def assertFalse(self, expr, msg=None):
"""Check that the expression is false."""
if expr:
msg = self._formatMessage(msg, "%s is not false" % safe_repr(expr))
raise self.failureException(msg)
def assertTrue(self, expr, msg=None):
"""Check that the expression is true."""
if not expr:
msg = self._formatMessage(msg, "%s is not true" % safe_repr(expr))
raise self.failureException(msg)
def _formatMessage(self, msg, standardMsg):
"""Honour the longMessage attribute when generating failure messages.
If longMessage is False this means:
* Use only an explicit message if it is provided
* Otherwise use the standard message for the assert
If longMessage is True:
* Use the standard message
* If an explicit message is provided, plus ' : ' and the explicit message
"""
if not self.longMessage:
return msg or standardMsg
if msg is None:
return standardMsg
try:
# don't switch to '{}' formatting in Python 2.X
# it changes the way unicode input is handled
return '%s : %s' % (standardMsg, msg)
except UnicodeDecodeError:
return '%s : %s' % (safe_repr(standardMsg), safe_repr(msg))
def assertRaises(self, expected_exception, *args, **kwargs):
"""Fail unless an exception of class expected_exception is raised
by the callable when invoked with specified positional and
keyword arguments. If a different type of exception is
raised, it will not be caught, and the test case will be
deemed to have suffered an error, exactly as for an
unexpected exception.
If called with the callable and arguments omitted, will return a
context object used like this::
with self.assertRaises(SomeException):
do_something()
An optional keyword argument 'msg' can be provided when assertRaises
is used as a context object.
The context manager keeps a reference to the exception as
the 'exception' attribute. This allows you to inspect the
exception after the assertion::
with self.assertRaises(SomeException) as cm:
do_something()
the_exception = cm.exception
self.assertEqual(the_exception.error_code, 3)
"""
context = _AssertRaisesContext(expected_exception, self)
try:
return context.handle('assertRaises', args, kwargs)
finally:
# bpo-23890: manually break a reference cycle
context = None
def assertWarns(self, expected_warning, *args, **kwargs):
"""Fail unless a warning of class warnClass is triggered
by the callable when invoked with specified positional and
keyword arguments. If a different type of warning is
triggered, it will not be handled: depending on the other
warning filtering rules in effect, it might be silenced, printed
out, or raised as an exception.
If called with the callable and arguments omitted, will return a
context object used like this::
with self.assertWarns(SomeWarning):
do_something()
An optional keyword argument 'msg' can be provided when assertWarns
is used as a context object.
The context manager keeps a reference to the first matching
warning as the 'warning' attribute; similarly, the 'filename'
and 'lineno' attributes give you information about the line
of Python code from which the warning was triggered.
This allows you to inspect the warning after the assertion::
with self.assertWarns(SomeWarning) as cm:
do_something()
the_warning = cm.warning
self.assertEqual(the_warning.some_attribute, 147)
"""
context = _AssertWarnsContext(expected_warning, self)
return context.handle('assertWarns', args, kwargs)
def assertLogs(self, logger=None, level=None):
"""Fail unless a log message of level *level* or higher is emitted
on *logger_name* or its children. If omitted, *level* defaults to
INFO and *logger* defaults to the root logger.
This method must be used as a context manager, and will yield
a recording object with two attributes: `output` and `records`.
At the end of the context manager, the `output` attribute will
be a list of the matching formatted log messages and the
`records` attribute will be a list of the corresponding LogRecord
objects.
Example::
with self.assertLogs('foo', level='INFO') as cm:
logging.getLogger('foo').info('first message')
logging.getLogger('foo.bar').error('second message')
self.assertEqual(cm.output, ['INFO:foo:first message',
'ERROR:foo.bar:second message'])
"""
return _AssertLogsContext(self, logger, level)
def _getAssertEqualityFunc(self, first, second):
"""Get a detailed comparison function for the types of the two args.
Returns: A callable accepting (first, second, msg=None) that will
raise a failure exception if first != second with a useful human
readable error message for those types.
"""
#
# NOTE(gregory.p.smith): I considered isinstance(first, type(second))
# and vice versa. I opted for the conservative approach in case
# subclasses are not intended to be compared in detail to their super
# class instances using a type equality func. This means testing
# subtypes won't automagically use the detailed comparison. Callers
# should use their type specific assertSpamEqual method to compare
# subclasses if the detailed comparison is desired and appropriate.
# See the discussion in http://bugs.python.org/issue2578.
#
if type(first) is type(second):
asserter = self._type_equality_funcs.get(type(first))
if asserter is not None:
if isinstance(asserter, str):
asserter = getattr(self, asserter)
return asserter
return self._baseAssertEqual
def _baseAssertEqual(self, first, second, msg=None):
"""The default assertEqual implementation, not type specific."""
if not first == second:
standardMsg = '%s != %s' % _common_shorten_repr(first, second)
msg = self._formatMessage(msg, standardMsg)
raise self.failureException(msg)
def assertEqual(self, first, second, msg=None):
"""Fail if the two objects are unequal as determined by the '=='
operator.
"""
assertion_func = self._getAssertEqualityFunc(first, second)
assertion_func(first, second, msg=msg)
def assertNotEqual(self, first, second, msg=None):
"""Fail if the two objects are equal as determined by the '!='
operator.
"""
if not first != second:
msg = self._formatMessage(msg, '%s == %s' % (safe_repr(first),
safe_repr(second)))
raise self.failureException(msg)
def assertAlmostEqual(self, first, second, places=None, msg=None,
delta=None):
"""Fail if the two objects are unequal as determined by their
difference rounded to the given number of decimal places
(default 7) and comparing to zero, or by comparing that the
difference between the two objects is more than the given
delta.
Note that decimal places (from zero) are usually not the same
as significant digits (measured from the most significant digit).
If the two objects compare equal then they will automatically
compare almost equal.
"""
if first == second:
# shortcut
return
if delta is not None and places is not None:
raise TypeError("specify delta or places not both")
if delta is not None:
if abs(first - second) <= delta:
return
standardMsg = '%s != %s within %s delta' % (safe_repr(first),
safe_repr(second),
safe_repr(delta))
else:
if places is None:
places = 7
if round(abs(second-first), places) == 0:
return
standardMsg = '%s != %s within %r places' % (safe_repr(first),
safe_repr(second),
places)
msg = self._formatMessage(msg, standardMsg)
raise self.failureException(msg)
def assertNotAlmostEqual(self, first, second, places=None, msg=None,
delta=None):
"""Fail if the two objects are equal as determined by their
difference rounded to the given number of decimal places
(default 7) and comparing to zero, or by comparing that the
difference between the two objects is less than the given delta.
Note that decimal places (from zero) are usually not the same
as significant digits (measured from the most significant digit).
Objects that are equal automatically fail.
"""
if delta is not None and places is not None:
raise TypeError("specify delta or places not both")
if delta is not None:
if not (first == second) and abs(first - second) > delta:
return
standardMsg = '%s == %s within %s delta' % (safe_repr(first),
safe_repr(second),
safe_repr(delta))
else:
if places is None:
places = 7
if not (first == second) and round(abs(second-first), places) != 0:
return
standardMsg = '%s == %s within %r places' % (safe_repr(first),
safe_repr(second),
places)
msg = self._formatMessage(msg, standardMsg)
raise self.failureException(msg)
def assertSequenceEqual(self, seq1, seq2, msg=None, seq_type=None):
"""An equality assertion for ordered sequences (like lists and tuples).
For the purposes of this function, a valid ordered sequence type is one
which can be indexed, has a length, and has an equality operator.
Args:
seq1: The first sequence to compare.
seq2: The second sequence to compare.
seq_type: The expected datatype of the sequences, or None if no
datatype should be enforced.
msg: Optional message to use on failure instead of a list of
differences.
"""
if seq_type is not None:
seq_type_name = seq_type.__name__
if not isinstance(seq1, seq_type):
raise self.failureException('First sequence is not a %s: %s'
% (seq_type_name, safe_repr(seq1)))
if not isinstance(seq2, seq_type):
raise self.failureException('Second sequence is not a %s: %s'
% (seq_type_name, safe_repr(seq2)))
else:
seq_type_name = "sequence"
differing = None
try:
len1 = len(seq1)
except (TypeError, NotImplementedError):
differing = 'First %s has no length. Non-sequence?' % (
seq_type_name)
if differing is None:
try:
len2 = len(seq2)
except (TypeError, NotImplementedError):
differing = 'Second %s has no length. Non-sequence?' % (
seq_type_name)
if differing is None:
if seq1 == seq2:
return
differing = '%ss differ: %s != %s\n' % (
(seq_type_name.capitalize(),) +
_common_shorten_repr(seq1, seq2))
for i in range(min(len1, len2)):
try:
item1 = seq1[i]
except (TypeError, IndexError, NotImplementedError):
differing += ('\nUnable to index element %d of first %s\n' %
(i, seq_type_name))
break
try:
item2 = seq2[i]
except (TypeError, IndexError, NotImplementedError):
differing += ('\nUnable to index element %d of second %s\n' %
(i, seq_type_name))
break
if item1 != item2:
differing += ('\nFirst differing element %d:\n%s\n%s\n' %
((i,) + _common_shorten_repr(item1, item2)))
break
else:
if (len1 == len2 and seq_type is None and
type(seq1) != type(seq2)):
# The sequences are the same, but have differing types.
return
if len1 > len2:
differing += ('\nFirst %s contains %d additional '
'elements.\n' % (seq_type_name, len1 - len2))
try:
differing += ('First extra element %d:\n%s\n' %
(len2, safe_repr(seq1[len2])))
except (TypeError, IndexError, NotImplementedError):
differing += ('Unable to index element %d '
'of first %s\n' % (len2, seq_type_name))
elif len1 < len2:
differing += ('\nSecond %s contains %d additional '
'elements.\n' % (seq_type_name, len2 - len1))
try:
differing += ('First extra element %d:\n%s\n' %
(len1, safe_repr(seq2[len1])))
except (TypeError, IndexError, NotImplementedError):
differing += ('Unable to index element %d '
'of second %s\n' % (len1, seq_type_name))
standardMsg = differing
diffMsg = '\n' + '\n'.join(
difflib.ndiff(pprint.pformat(seq1).splitlines(),
pprint.pformat(seq2).splitlines()))
standardMsg = self._truncateMessage(standardMsg, diffMsg)
msg = self._formatMessage(msg, standardMsg)
self.fail(msg)
def _truncateMessage(self, message, diff):
max_diff = self.maxDiff
if max_diff is None or len(diff) <= max_diff:
return message + diff
return message + (DIFF_OMITTED % len(diff))
def assertListEqual(self, list1, list2, msg=None):
"""A list-specific equality assertion.
Args:
list1: The first list to compare.
list2: The second list to compare.
msg: Optional message to use on failure instead of a list of
differences.
"""
self.assertSequenceEqual(list1, list2, msg, seq_type=list)
def assertTupleEqual(self, tuple1, tuple2, msg=None):
"""A tuple-specific equality assertion.
Args:
tuple1: The first tuple to compare.
tuple2: The second tuple to compare.
msg: Optional message to use on failure instead of a list of
differences.
"""
self.assertSequenceEqual(tuple1, tuple2, msg, seq_type=tuple)
def assertSetEqual(self, set1, set2, msg=None):
"""A set-specific equality assertion.
Args:
set1: The first set to compare.
set2: The second set to compare.
msg: Optional message to use on failure instead of a list of
differences.
assertSetEqual uses ducktyping to support different types of sets, and
is optimized for sets specifically (parameters must support a
difference method).
"""
try:
difference1 = set1.difference(set2)
except TypeError as e:
self.fail('invalid type when attempting set difference: %s' % e)
except AttributeError as e:
self.fail('first argument does not support set difference: %s' % e)
try:
difference2 = set2.difference(set1)
except TypeError as e:
self.fail('invalid type when attempting set difference: %s' % e)
except AttributeError as e:
self.fail('second argument does not support set difference: %s' % e)
if not (difference1 or difference2):
return
lines = []
if difference1:
lines.append('Items in the first set but not the second:')
for item in difference1:
lines.append(repr(item))
if difference2:
lines.append('Items in the second set but not the first:')
for item in difference2:
lines.append(repr(item))
standardMsg = '\n'.join(lines)
self.fail(self._formatMessage(msg, standardMsg))
def assertIn(self, member, container, msg=None):
"""Just like self.assertTrue(a in b), but with a nicer default message."""
if member not in container:
standardMsg = '%s not found in %s' % (safe_repr(member),
safe_repr(container))
self.fail(self._formatMessage(msg, standardMsg))
def assertNotIn(self, member, container, msg=None):
"""Just like self.assertTrue(a not in b), but with a nicer default message."""
if member in container:
standardMsg = '%s unexpectedly found in %s' % (safe_repr(member),
safe_repr(container))
self.fail(self._formatMessage(msg, standardMsg))
def assertIs(self, expr1, expr2, msg=None):
"""Just like self.assertTrue(a is b), but with a nicer default message."""
if expr1 is not expr2:
standardMsg = '%s is not %s' % (safe_repr(expr1),
safe_repr(expr2))
self.fail(self._formatMessage(msg, standardMsg))
def assertIsNot(self, expr1, expr2, msg=None):
"""Just like self.assertTrue(a is not b), but with a nicer default message."""
if expr1 is expr2:
standardMsg = 'unexpectedly identical: %s' % (safe_repr(expr1),)
self.fail(self._formatMessage(msg, standardMsg))
def assertDictEqual(self, d1, d2, msg=None):
self.assertIsInstance(d1, dict, 'First argument is not a dictionary')
self.assertIsInstance(d2, dict, 'Second argument is not a dictionary')
if d1 != d2:
standardMsg = '%s != %s' % _common_shorten_repr(d1, d2)
diff = ('\n' + '\n'.join(difflib.ndiff(
pprint.pformat(d1).splitlines(),
pprint.pformat(d2).splitlines())))
standardMsg = self._truncateMessage(standardMsg, diff)
self.fail(self._formatMessage(msg, standardMsg))
def assertDictContainsSubset(self, subset, dictionary, msg=None):
"""Checks whether dictionary is a superset of subset."""
warnings.warn('assertDictContainsSubset is deprecated',
DeprecationWarning)
missing = []
mismatched = []
for key, value in subset.items():
if key not in dictionary:
missing.append(key)
elif value != dictionary[key]:
mismatched.append('%s, expected: %s, actual: %s' %
(safe_repr(key), safe_repr(value),
safe_repr(dictionary[key])))
if not (missing or mismatched):
return
standardMsg = ''
if missing:
standardMsg = 'Missing: %s' % ','.join(safe_repr(m) for m in
missing)
if mismatched:
if standardMsg:
standardMsg += '; '
standardMsg += 'Mismatched values: %s' % ','.join(mismatched)
self.fail(self._formatMessage(msg, standardMsg))
def assertCountEqual(self, first, second, msg=None):
"""An unordered sequence comparison asserting that the same elements,
regardless of order. If the same element occurs more than once,
it verifies that the elements occur the same number of times.
self.assertEqual(Counter(list(first)),
Counter(list(second)))
Example:
- [0, 1, 1] and [1, 0, 1] compare equal.
- [0, 0, 1] and [0, 1] compare unequal.
"""
first_seq, second_seq = list(first), list(second)
try:
first = collections.Counter(first_seq)
second = collections.Counter(second_seq)
except TypeError:
# Handle case with unhashable elements
differences = _count_diff_all_purpose(first_seq, second_seq)
else:
if first == second:
return
differences = _count_diff_hashable(first_seq, second_seq)
if differences:
standardMsg = 'Element counts were not equal:\n'
lines = ['First has %d, Second has %d: %r' % diff for diff in differences]
diffMsg = '\n'.join(lines)
standardMsg = self._truncateMessage(standardMsg, diffMsg)
msg = self._formatMessage(msg, standardMsg)
self.fail(msg)
def assertMultiLineEqual(self, first, second, msg=None):
"""Assert that two multi-line strings are equal."""
self.assertIsInstance(first, str, 'First argument is not a string')
self.assertIsInstance(second, str, 'Second argument is not a string')
if first != second:
# don't use difflib if the strings are too long
if (len(first) > self._diffThreshold or
len(second) > self._diffThreshold):
self._baseAssertEqual(first, second, msg)
firstlines = first.splitlines(keepends=True)
secondlines = second.splitlines(keepends=True)
if len(firstlines) == 1 and first.strip('\r\n') == first:
firstlines = [first + '\n']
secondlines = [second + '\n']
standardMsg = '%s != %s' % _common_shorten_repr(first, second)
diff = '\n' + ''.join(difflib.ndiff(firstlines, secondlines))
standardMsg = self._truncateMessage(standardMsg, diff)
self.fail(self._formatMessage(msg, standardMsg))
def assertLess(self, a, b, msg=None):
"""Just like self.assertTrue(a < b), but with a nicer default message."""
if not a < b:
standardMsg = '%s not less than %s' % (safe_repr(a), safe_repr(b))
self.fail(self._formatMessage(msg, standardMsg))
def assertLessEqual(self, a, b, msg=None):
"""Just like self.assertTrue(a <= b), but with a nicer default message."""
if not a <= b:
standardMsg = '%s not less than or equal to %s' % (safe_repr(a), safe_repr(b))
self.fail(self._formatMessage(msg, standardMsg))
def assertGreater(self, a, b, msg=None):
"""Just like self.assertTrue(a > b), but with a nicer default message."""
if not a > b:
standardMsg = '%s not greater than %s' % (safe_repr(a), safe_repr(b))
self.fail(self._formatMessage(msg, standardMsg))
def assertGreaterEqual(self, a, b, msg=None):
"""Just like self.assertTrue(a >= b), but with a nicer default message."""
if not a >= b:
standardMsg = '%s not greater than or equal to %s' % (safe_repr(a), safe_repr(b))
self.fail(self._formatMessage(msg, standardMsg))
def assertIsNone(self, obj, msg=None):
"""Same as self.assertTrue(obj is None), with a nicer default message."""
if obj is not None:
standardMsg = '%s is not None' % (safe_repr(obj),)
self.fail(self._formatMessage(msg, standardMsg))
def assertIsNotNone(self, obj, msg=None):
"""Included for symmetry with assertIsNone."""
if obj is None:
standardMsg = 'unexpectedly None'
self.fail(self._formatMessage(msg, standardMsg))
def assertIsInstance(self, obj, cls, msg=None):
"""Same as self.assertTrue(isinstance(obj, cls)), with a nicer
default message."""
if not isinstance(obj, cls):
standardMsg = '%s is not an instance of %r' % (safe_repr(obj), cls)
self.fail(self._formatMessage(msg, standardMsg))
def assertNotIsInstance(self, obj, cls, msg=None):
"""Included for symmetry with assertIsInstance."""
if isinstance(obj, cls):
standardMsg = '%s is an instance of %r' % (safe_repr(obj), cls)
self.fail(self._formatMessage(msg, standardMsg))
def assertRaisesRegex(self, expected_exception, expected_regex,
*args, **kwargs):
"""Asserts that the message in a raised exception matches a regex.
Args:
expected_exception: Exception class expected to be raised.
expected_regex: Regex (re pattern object or string) expected
to be found in error message.
args: Function to be called and extra positional args.
kwargs: Extra kwargs.
msg: Optional message used in case of failure. Can only be used
when assertRaisesRegex is used as a context manager.
"""
context = _AssertRaisesContext(expected_exception, self, expected_regex)
return context.handle('assertRaisesRegex', args, kwargs)
def assertWarnsRegex(self, expected_warning, expected_regex,
*args, **kwargs):
"""Asserts that the message in a triggered warning matches a regexp.
Basic functioning is similar to assertWarns() with the addition
that only warnings whose messages also match the regular expression
are considered successful matches.
Args:
expected_warning: Warning class expected to be triggered.
expected_regex: Regex (re pattern object or string) expected
to be found in error message.
args: Function to be called and extra positional args.
kwargs: Extra kwargs.
msg: Optional message used in case of failure. Can only be used
when assertWarnsRegex is used as a context manager.
"""
context = _AssertWarnsContext(expected_warning, self, expected_regex)
return context.handle('assertWarnsRegex', args, kwargs)
def assertRegex(self, text, expected_regex, msg=None):
"""Fail the test unless the text matches the regular expression."""
if isinstance(expected_regex, (str, bytes)):
assert expected_regex, "expected_regex must not be empty."
expected_regex = re.compile(expected_regex)
if not expected_regex.search(text):
standardMsg = "Regex didn't match: %r not found in %r" % (
expected_regex.pattern, text)
# _formatMessage ensures the longMessage option is respected
msg = self._formatMessage(msg, standardMsg)
raise self.failureException(msg)
def assertNotRegex(self, text, unexpected_regex, msg=None):
"""Fail the test if the text matches the regular expression."""
if isinstance(unexpected_regex, (str, bytes)):
unexpected_regex = re.compile(unexpected_regex)
match = unexpected_regex.search(text)
if match:
standardMsg = 'Regex matched: %r matches %r in %r' % (
text[match.start() : match.end()],
unexpected_regex.pattern,
text)
# _formatMessage ensures the longMessage option is respected
msg = self._formatMessage(msg, standardMsg)
raise self.failureException(msg)
def _deprecate(original_func):
def deprecated_func(*args, **kwargs):
warnings.warn(
'Please use {0} instead.'.format(original_func.__name__),
DeprecationWarning, 2)
return original_func(*args, **kwargs)
return deprecated_func
# see #9424
failUnlessEqual = assertEquals = _deprecate(assertEqual)
failIfEqual = assertNotEquals = _deprecate(assertNotEqual)
failUnlessAlmostEqual = assertAlmostEquals = _deprecate(assertAlmostEqual)
failIfAlmostEqual = assertNotAlmostEquals = _deprecate(assertNotAlmostEqual)
failUnless = assert_ = _deprecate(assertTrue)
failUnlessRaises = _deprecate(assertRaises)
failIf = _deprecate(assertFalse)
assertRaisesRegexp = _deprecate(assertRaisesRegex)
assertRegexpMatches = _deprecate(assertRegex)
assertNotRegexpMatches = _deprecate(assertNotRegex)
class FunctionTestCase(TestCase):
"""A test case that wraps a test function.
This is useful for slipping pre-existing test functions into the
unittest framework. Optionally, set-up and tidy-up functions can be
supplied. As with TestCase, the tidy-up ('tearDown') function will
always be called if the set-up ('setUp') function ran successfully.
"""
def __init__(self, testFunc, setUp=None, tearDown=None, description=None):
super(FunctionTestCase, self).__init__()
self._setUpFunc = setUp
self._tearDownFunc = tearDown
self._testFunc = testFunc
self._description = description
def setUp(self):
if self._setUpFunc is not None:
self._setUpFunc()
def tearDown(self):
if self._tearDownFunc is not None:
self._tearDownFunc()
def runTest(self):
self._testFunc()
def id(self):
return self._testFunc.__name__
def __eq__(self, other):
if not isinstance(other, self.__class__):
return NotImplemented
return self._setUpFunc == other._setUpFunc and \
self._tearDownFunc == other._tearDownFunc and \
self._testFunc == other._testFunc and \
self._description == other._description
def __hash__(self):
return hash((type(self), self._setUpFunc, self._tearDownFunc,
self._testFunc, self._description))
def __str__(self):
return "%s (%s)" % (strclass(self.__class__),
self._testFunc.__name__)
def __repr__(self):
return "<%s tec=%s>" % (strclass(self.__class__),
self._testFunc)
def shortDescription(self):
if self._description is not None:
return self._description
doc = self._testFunc.__doc__
return doc and doc.split("\n")[0].strip() or None
class _SubTest(TestCase):
def __init__(self, test_case, message, params):
super().__init__()
self._message = message
self.test_case = test_case
self.params = params
self.failureException = test_case.failureException
def runTest(self):
raise NotImplementedError("subtests cannot be run directly")
def _subDescription(self):
parts = []
if self._message is not _subtest_msg_sentinel:
parts.append("[{}]".format(self._message))
if self.params:
params_desc = ', '.join(
"{}={!r}".format(k, v)
for (k, v) in sorted(self.params.items()))
parts.append("({})".format(params_desc))
return " ".join(parts) or '(<subtest>)'
def id(self):
return "{} {}".format(self.test_case.id(), self._subDescription())
def shortDescription(self):
"""Returns a one-line description of the subtest, or None if no
description has been provided.
"""
return self.test_case.shortDescription()
def __str__(self):
return "{} {}".format(self.test_case, self._subDescription())
| lgpl-2.1 |
lamby/pkg-python-django-debug-toolbar | debug_toolbar/panels/redirects.py | 9 | 1482 | from __future__ import absolute_import, unicode_literals
from django.core.handlers.wsgi import STATUS_CODE_TEXT
from django.shortcuts import render
from django.utils.translation import ugettext_lazy as _
from debug_toolbar.panels import Panel
class RedirectsPanel(Panel):
"""
Panel that intercepts redirects and displays a page with debug info.
"""
@property
def enabled(self):
default = 'on' if self.toolbar.config['INTERCEPT_REDIRECTS'] else 'off'
return self.toolbar.request.COOKIES.get('djdt' + self.panel_id, default) == 'on'
has_content = False
nav_title = _("Intercept redirects")
def process_response(self, request, response):
if 300 <= int(response.status_code) < 400:
redirect_to = response.get('Location', None)
if redirect_to:
try: # Django >= 1.6
reason_phrase = response.reason_phrase
except AttributeError:
reason_phrase = STATUS_CODE_TEXT.get(response.status_code,
'UNKNOWN STATUS CODE')
status_line = '%s %s' % (response.status_code, reason_phrase)
cookies = response.cookies
context = {'redirect_to': redirect_to, 'status_line': status_line}
response = render(request, 'debug_toolbar/redirect.html', context)
response.cookies = cookies
return response
| bsd-3-clause |
JFriel/honours_project | venv/lib/python2.7/site-packages/nltk/metrics/paice.py | 7 | 14427 | # Natural Language Toolkit: Agreement Metrics
#
# Copyright (C) 2001-2016 NLTK Project
# Author: Lauri Hallila <[email protected]>
# URL: <http://nltk.org/>
# For license information, see LICENSE.TXT
#
"""Counts Paice's performance statistics for evaluating stemming algorithms.
What is required:
- A dictionary of words grouped by their real lemmas
- A dictionary of words grouped by stems from a stemming algorithm
When these are given, Understemming Index (UI), Overstemming Index (OI),
Stemming Weight (SW) and Error-rate relative to truncation (ERRT) are counted.
References:
Chris D. Paice (1994). An evaluation method for stemming algorithms.
In Proceedings of SIGIR, 42--50.
"""
from math import sqrt
def get_words_from_dictionary(lemmas):
'''
Get original set of words used for analysis.
:param lemmas: A dictionary where keys are lemmas and values are sets
or lists of words corresponding to that lemma.
:type lemmas: dict(str): list(str)
:return: Set of words that exist as values in the dictionary
:rtype: set(str)
'''
words = set()
for lemma in lemmas:
words.update(set(lemmas[lemma]))
return words
def _truncate(words, cutlength):
'''Group words by stems defined by truncating them at given length.
:param words: Set of words used for analysis
:param cutlength: Words are stemmed by cutting at this length.
:type words: set(str) or list(str)
:type cutlength: int
:return: Dictionary where keys are stems and values are sets of words
corresponding to that stem.
:rtype: dict(str): set(str)
'''
stems = {}
for word in words:
stem = word[:cutlength]
try:
stems[stem].update([word])
except KeyError:
stems[stem] = set([word])
return stems
# Reference: http://en.wikipedia.org/wiki/Line-line_intersection
def _count_intersection(l1, l2):
'''Count intersection between two line segments defined by coordinate pairs.
:param l1: Tuple of two coordinate pairs defining the first line segment
:param l2: Tuple of two coordinate pairs defining the second line segment
:type l1: tuple(float, float)
:type l2: tuple(float, float)
:return: Coordinates of the intersection
:rtype: tuple(float, float)
'''
x1, y1 = l1[0]
x2, y2 = l1[1]
x3, y3 = l2[0]
x4, y4 = l2[1]
denominator = (x1 - x2) * (y3 - y4) - (y1 - y2) * (x3 - x4)
if denominator == 0.0: # lines are parallel
if x1 == x2 == x3 == x4 == 0.0:
# When lines are parallel, they must be on the y-axis.
# We can ignore x-axis because we stop counting the
# truncation line when we get there.
# There are no other options as UI (x-axis) grows and
# OI (y-axis) diminishes when we go along the truncation line.
return (0.0, y4)
x = ((x1 * y2 - y1 * x2) * (x3 - x4) - (x1 - x2) * (x3 * y4 - y3 * x4)) / denominator
y = ((x1 * y2 - y1 * x2) * (y3 - y4) - (y1 - y2) * (x3 * y4 - y3 * x4)) / denominator
return (x, y)
def _get_derivative(coordinates):
'''Get derivative of the line from (0,0) to given coordinates.
:param coordinates: A coordinate pair
:type coordinates: tuple(float, float)
:return: Derivative; inf if x is zero
:rtype: float
'''
try:
return coordinates[1] / coordinates[0]
except ZeroDivisionError:
return float('inf')
def _calculate_cut(lemmawords, stems):
'''Count understemmed and overstemmed pairs for (lemma, stem) pair with common words.
:param lemmawords: Set or list of words corresponding to certain lemma.
:param stems: A dictionary where keys are stems and values are sets
or lists of words corresponding to that stem.
:type lemmawords: set(str) or list(str)
:type stems: dict(str): set(str)
:return: Amount of understemmed and overstemmed pairs contributed by words
existing in both lemmawords and stems.
:rtype: tuple(float, float)
'''
umt, wmt = 0.0, 0.0
for stem in stems:
cut = set(lemmawords) & set(stems[stem])
if cut:
cutcount = len(cut)
stemcount = len(stems[stem])
# Unachieved merge total
umt += cutcount * (len(lemmawords) - cutcount)
# Wrongly merged total
wmt += cutcount * (stemcount - cutcount)
return (umt, wmt)
def _calculate(lemmas, stems):
'''Calculate actual and maximum possible amounts of understemmed and overstemmed word pairs.
:param lemmas: A dictionary where keys are lemmas and values are sets
or lists of words corresponding to that lemma.
:param stems: A dictionary where keys are stems and values are sets
or lists of words corresponding to that stem.
:type lemmas: dict(str): list(str)
:type stems: dict(str): set(str)
:return: Global unachieved merge total (gumt),
global desired merge total (gdmt),
global wrongly merged total (gwmt) and
global desired non-merge total (gdnt).
:rtype: tuple(float, float, float, float)
'''
n = sum(len(lemmas[word]) for word in lemmas)
gdmt, gdnt, gumt, gwmt = (0.0, 0.0, 0.0, 0.0)
for lemma in lemmas:
lemmacount = len(lemmas[lemma])
# Desired merge total
gdmt += lemmacount * (lemmacount - 1)
# Desired non-merge total
gdnt += lemmacount * (n - lemmacount)
# For each (lemma, stem) pair with common words, count how many
# pairs are understemmed and overstemmed.
umt, wmt = _calculate_cut(lemmas[lemma], stems)
# Add to total undesired and wrongly-merged totals
gumt += umt
gwmt += wmt
# Each object is counted twice, so divide by two
return (gumt / 2, gdmt / 2, gwmt / 2, gdnt / 2)
def _indexes(gumt, gdmt, gwmt, gdnt):
'''Count Understemming Index (UI), Overstemming Index (OI) and Stemming Weight (SW).
:param gumt, gdmt, gwmt, gdnt: Global unachieved merge total (gumt),
global desired merge total (gdmt),
global wrongly merged total (gwmt) and
global desired non-merge total (gdnt).
:type gumt, gdmt, gwmt, gdnt: float
:return: Understemming Index (UI),
Overstemming Index (OI) and
Stemming Weight (SW).
:rtype: tuple(float, float, float)
'''
# Calculate Understemming Index (UI),
# Overstemming Index (OI) and Stemming Weight (SW)
try:
ui = gumt / gdmt
except ZeroDivisionError:
# If GDMT (max merge total) is 0, define UI as 0
ui = 0.0
try:
oi = gwmt / gdnt
except ZeroDivisionError:
# IF GDNT (max non-merge total) is 0, define OI as 0
oi = 0.0
try:
sw = oi / ui
except ZeroDivisionError:
if oi == 0.0:
# OI and UI are 0, define SW as 'not a number'
sw = float('nan')
else:
# UI is 0, define SW as infinity
sw = float('inf')
return (ui, oi, sw)
class Paice(object):
'''Class for storing lemmas, stems and evaluation metrics.'''
def __init__(self, lemmas, stems):
'''
:param lemmas: A dictionary where keys are lemmas and values are sets
or lists of words corresponding to that lemma.
:param stems: A dictionary where keys are stems and values are sets
or lists of words corresponding to that stem.
:type lemmas: dict(str): list(str)
:type stems: dict(str): set(str)
'''
self.lemmas = lemmas
self.stems = stems
self.coords = []
self.gumt, self.gdmt, self.gwmt, self.gdnt = (None, None, None, None)
self.ui, self.oi, self.sw = (None, None, None)
self.errt = None
self.update()
def __str__(self):
text = ['Global Unachieved Merge Total (GUMT): %s\n' % self.gumt]
text.append('Global Desired Merge Total (GDMT): %s\n' % self.gdmt)
text.append('Global Wrongly-Merged Total (GWMT): %s\n' % self.gwmt)
text.append('Global Desired Non-merge Total (GDNT): %s\n' % self.gdnt)
text.append('Understemming Index (GUMT / GDMT): %s\n' % self.ui)
text.append('Overstemming Index (GWMT / GDNT): %s\n' % self.oi)
text.append('Stemming Weight (OI / UI): %s\n' % self.sw)
text.append('Error-Rate Relative to Truncation (ERRT): %s\r\n' % self.errt)
coordinates = ' '.join(['(%s, %s)' % item for item in self.coords])
text.append('Truncation line: %s' % coordinates)
return ''.join(text)
def _get_truncation_indexes(self, words, cutlength):
'''Count (UI, OI) when stemming is done by truncating words at \'cutlength\'.
:param words: Words used for the analysis
:param cutlength: Words are stemmed by cutting them at this length
:type words: set(str) or list(str)
:type cutlength: int
:return: Understemming and overstemming indexes
:rtype: tuple(int, int)
'''
truncated = _truncate(words, cutlength)
gumt, gdmt, gwmt, gdnt = _calculate(self.lemmas, truncated)
ui, oi = _indexes(gumt, gdmt, gwmt, gdnt)[:2]
return (ui, oi)
def _get_truncation_coordinates(self, cutlength=0):
'''Count (UI, OI) pairs for truncation points until we find the segment where (ui, oi) crosses the truncation line.
:param cutlength: Optional parameter to start counting from (ui, oi)
coordinates gotten by stemming at this length. Useful for speeding up
the calculations when you know the approximate location of the
intersection.
:type cutlength: int
:return: List of coordinate pairs that define the truncation line
:rtype: list(tuple(float, float))
'''
words = get_words_from_dictionary(self.lemmas)
maxlength = max(len(word) for word in words)
# Truncate words from different points until (0, 0) - (ui, oi) segment crosses the truncation line
coords = []
while cutlength <= maxlength:
# Get (UI, OI) pair of current truncation point
pair = self._get_truncation_indexes(words, cutlength)
# Store only new coordinates so we'll have an actual
# line segment when counting the intersection point
if pair not in coords:
coords.append(pair)
if pair == (0.0, 0.0):
# Stop counting if truncation line goes through origo;
# length from origo to truncation line is 0
return coords
if len(coords) >= 2 and pair[0] > 0.0:
derivative1 = _get_derivative(coords[-2])
derivative2 = _get_derivative(coords[-1])
# Derivative of the truncation line is a decreasing value;
# when it passes Stemming Weight, we've found the segment
# of truncation line intersecting with (0, 0) - (ui, oi) segment
if derivative1 >= self.sw >= derivative2:
return coords
cutlength += 1
return coords
def _errt(self):
'''Count Error-Rate Relative to Truncation (ERRT).
:return: ERRT, length of the line from origo to (UI, OI) divided by
the length of the line from origo to the point defined by the same
line when extended until the truncation line.
:rtype: float
'''
# Count (UI, OI) pairs for truncation points until we find the segment where (ui, oi) crosses the truncation line
self.coords = self._get_truncation_coordinates()
if (0.0, 0.0) in self.coords:
# Truncation line goes through origo, so ERRT cannot be counted
if (self.ui, self.oi) != (0.0, 0.0):
return float('inf')
else:
return float('nan')
if (self.ui, self.oi) == (0.0, 0.0):
# (ui, oi) is origo; define errt as 0.0
return 0.0
# Count the intersection point
# Note that (self.ui, self.oi) cannot be (0.0, 0.0) and self.coords has different coordinates
# so we have actual line segments instead of a line segment and a point
intersection = _count_intersection(((0, 0), (self.ui, self.oi)),
self.coords[-2:]
)
# Count OP (length of the line from origo to (ui, oi))
op = sqrt(self.ui ** 2 + self.oi ** 2)
# Count OT (length of the line from origo to truncation line that goes through (ui, oi))
ot = sqrt(intersection[0] ** 2 + intersection[1] ** 2)
# OP / OT tells how well the stemming algorithm works compared to just truncating words
return op / ot
def update(self):
'''Update statistics after lemmas and stems have been set.'''
self.gumt, self.gdmt, self.gwmt, self.gdnt = _calculate(self.lemmas, self.stems)
self.ui, self.oi, self.sw = _indexes(self.gumt, self.gdmt, self.gwmt, self.gdnt)
self.errt = self._errt()
def demo():
'''Demonstration of the module.'''
# Some words with their real lemmas
lemmas = {'kneel': ['kneel', 'knelt'],
'range': ['range', 'ranged'],
'ring': ['ring', 'rang', 'rung']
}
# Same words with stems from a stemming algorithm
stems = {'kneel': ['kneel'],
'knelt': ['knelt'],
'rang': ['rang', 'range', 'ranged'],
'ring': ['ring'],
'rung': ['rung']
}
print('Words grouped by their lemmas:')
for lemma in sorted(lemmas):
print('%s => %s' % (lemma, ' '.join(lemmas[lemma])))
print()
print('Same words grouped by a stemming algorithm:')
for stem in sorted(stems):
print('%s => %s' % (stem, ' '.join(stems[stem])))
print()
p = Paice(lemmas, stems)
print(p)
print()
# Let's "change" results from a stemming algorithm
stems = {'kneel': ['kneel'],
'knelt': ['knelt'],
'rang': ['rang'],
'range': ['range', 'ranged'],
'ring': ['ring'],
'rung': ['rung']
}
print('Counting stats after changing stemming results:')
for stem in sorted(stems):
print('%s => %s' % (stem, ' '.join(stems[stem])))
print()
p.stems = stems
p.update()
print(p)
if __name__ == '__main__':
demo()
| gpl-3.0 |
avalentino/qtsix | qtsix/uic.py | 1 | 3628 | # -*- coding: utf-8 -*-
# Copyright (c) 2011-2015 Antonio Valentino <[email protected]>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of qtsix nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from . import qt_api
if qt_api == 'pyqt5':
from PyQt5.uic import *
elif qt_api == 'pyqt4':
from PyQt4.uic import *
elif qt_api == 'pyside':
from pysideuic import *
# Credit:
# http://stackoverflow.com/questions/7144313/loading-qtdesigners-ui-files-in-pyside
def loadUi(uifilename, parent=None):
'''Load a Qt Designer .ui file and return an instance of
the user interface.'''
from PySide import QtCore, QtUiTools
loader = QtUiTools.QUiLoader()
uifile = QtCore.QFile(uifilename)
uifile.open(QtCore.QFile.ReadOnly)
ui = loader.load(uifile, parent)
uifile.close()
return ui
# Credit:
# http://stackoverflow.com/questions/4442286/python-code-genration-with-pyside-uic/14195313#14195313
def loadUiType(uifile):
"""Load a Qt Designer .ui file and return the generated form
class and the Qt base class.
Pyside "loadUiType" command like PyQt4 has one, so we have to
convert the ui file to py code in-memory first and then
execute it in a special frame to retrieve the form_class.
"""
# @COMPATIBILITY: Python 2
import sys
if sys.version_info >= (3, 0):
from io import StringIO
else:
from io import BytesIO as StringIO
import xml.etree.ElementTree as xml
from PySide import QtGui # NOQA
parsed = xml.parse(uifile)
widget_class = parsed.find('widget').get('class')
form_class = parsed.find('class').text
with open(uifile, 'r') as f:
o = StringIO()
frame = {}
compileUi(f, o, indent=0)
pyc = compile(o.getvalue(), '<string>', 'exec')
exec(pyc, frame)
# Fetch the base_class and form class based on their type in the
# xml from designer
form_class = frame['Ui_%s' % form_class]
base_class = eval('QtGui.%s' % widget_class)
return form_class, base_class
| bsd-3-clause |
jjo31/ATHAM-Fluidity | python/GFD_basisChange_tools.py | 6 | 16085 | # Copyright (C) 2012 Imperial College London and others.
#
# Please see the AUTHORS file in the main source directory for a full list
# of copyright holders.
#
# Prof. C Pain
# Applied Modelling and Computation Group
# Department of Earth Science and Engineering
# Imperial College London
#
# [email protected]
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation,
# version 2.1 of the License.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307
# USA
from math import sqrt, pi, sin, cos, atan2, acos
def cartesian_2_sphericalPolar(positionVectorCartesian):
'''Convert Cartesian coordinates to spherical-polar coordinates, in radians.
The origin of the Cartesian frame of reference is located at the centre of the sphere, the positive half of x-axis goes through 0 deg E, 0 deg N, the positive half of y-axis goes through 90 deg E, 0 deg N and the positive half of the z-axis goes through the North Pole equivalent.'''
#Extract the Cartesian vector components.
x = positionVectorCartesian[0]
y = positionVectorCartesian[1]
z = positionVectorCartesian[2]
#Calculate the radius.
radius = sqrt(x**2 + y**2 + z**2)
#Calculate azimuthal (theta) and zenith (phi) angles
theta = acos(z/radius)
phi = atan2(y,x)
return [radius, theta, phi]
def sphericalPolar_2_cartesian(positionVectorSphericalPolar):
'''Convert spherical-polar coordinates (in radians), into Cartesian coordinates.
The origin of the Cartesian frame of reference is located at the centre of the sphere, the positive half of x-axis goes through 0 deg E, 0 deg N, the positive half of y-axis goes through 90 deg E, 0 deg N and the positive half of the z-axis goes through the North Pole equivalent.'''
import numpy as np
[radius, theta, phi] = positionVectorSphericalPolar
x = radius*np.sin(theta)*np.cos(phi)
y = radius*np.sin(theta)*np.sin(phi)
z = radius*np.cos(theta)
return [x, y, z]
def cartesian_2_lonlatradius(positionVectorCartesian):
'''Convert Cartesian coordinates on a sphere to longitude-latitude-radius. Longitude and latitude are returned in degrees.
The origin of the Cartesian frame of reference is located at the centre of the sphere, the positive half of x-axis goes through 0 deg E, 0 deg N, the positive half of y-axis goes through 90 deg E, 0 deg N and the positive half of the z-axis goes through the North Pole equivalent.'''
#Calculate azimuthal (phi), polar (theta) angles and distance from origin.
[radius, theta, phi] = cartesian_2_sphericalPolar(positionVectorCartesian)
#Calculate longitude and latitude
lon = phi*180.0/pi
lat = (pi/2 - theta)*180.0/pi
positionVectorLonlat = [lon, lat, radius]
return positionVectorLonlat
def lonlatradius_2_cartesian(positionVectorLonLatRad):
'''Convert longitude-latitude-radial coordinates on the surface of the Earth (in degrees) to Cartesian coordinates. Longitude and latitude must be in degrees.
The origin of the Cartesian frame of reference is located at the centre of the sphere, the positive half of x-axis goes through 0 deg E, 0 deg N, the positive half of y-axis goes through 90 deg E, 0 deg N and the positive half of the z-axis goes through the North Pole equivalent.'''
#Calculate spherical-polar coordinates form longitude-latitude-radius.
[radius, theta, phi] = lonlatradius_2_sphericalPolar(positionVectorLonLatRad)
#Calculate Cartesian coordinates from spherical-polar coordinates.
[x, y, z] = sphericalPolar_2_cartesian([radius, theta, phi])
return [x, y, z]
def lonlatradius_2_sphericalPolar(positionVectorLonLatRad):
'''Convert longitude-latitude-radial coordinates on the surface of the Earth (in degrees) to Cartesian coordinates. Longitude and latitude must be in degrees, the azimuthal and polar angles are returned in radians.'''
[longitude, latitude, radius] = positionVectorLonLatRad
#Calculate azimuthal (phi), polar (theta) angles.
phi = longitude*pi/180.0
theta = pi/2 - latitude*pi/180.0
return [radius, theta, phi]
def transform_tensor_sphericalPolar_2_cartesian(positionVectorSpherical, tensor):
'''Function changing the basis of a tensor from zonal-meridional-radial basis to a Cartesian basis.
The origin of the Cartesian frame of reference is located at the centre of the sphere, the positive half of x-axis goes through 0 deg E, 0 deg N, the positive half of y-axis goes through 90 deg E, 0 deg N and the positive half of the z-axis goes through the North Pole equivalent.'''
import numpy as np
from numpy import linalg
#extract distance from origin, polar (theta) angles and azimuthal (phi) angles.
[radius, theta, phi] = positionVectorSpherical
#Evaluate components of rotation matrices.
transformationMatrix =\
np.array([\
[np.sin(theta)*np.cos(phi), np.cos(theta)*np.cos(phi), -np.sin(phi)],\
[np.sin(theta)*np.sin(phi), np.cos(theta)*np.sin(phi), np.cos(phi)],\
[np.cos(theta), -np.sin(theta), 0]])
transposedTransformationMatrix = transformationMatrix.transpose()
#Calculate the components of the tensor in the reference system.
transformed_Tensor = np.dot(transformationMatrix, np.array(tensor))
transformed_Tensor = np.dot(transformed_Tensor,transposedTransformationMatrix)
return transformed_Tensor
def transform_tensor_cartesian_2_sphericalPolar(positionVectorCartesian, tensor):
'''Function transforming the components of a tensor from a Cartesian basis to a zonal-meridional-radial basis.
The origin of the Cartesian frame of reference is located at the centre of the sphere, the positive half of x-axis goes through 0 deg E, 0 deg N, the positive half of y-axis goes through 90 deg E, 0 deg N and the positive half of the z-axis goes through the North Pole equivalent.'''
import numpy as np
from numpy import linalg
#Calculate azimuthal (theta) and zenith (phi) angles and distance from origin
[radius, theta, phi] = cartesian_2_sphericalPolar(positionVectorCartesian)
#Evaluate components of rotation matrices.
transformationMatrix =\
np.array([\
[np.sin(theta)*np.cos(phi), np.sin(theta)*np.sin(phi), np.cos(theta)],\
[np.cos(theta)*np.cos(phi), np.cos(theta)*np.sin(phi), -np.sin(theta)],\
[-np.sin(phi), np.cos(phi), 0]])
transposedTransformationMatrix = transformationMatrix.transpose()
#Calculate the components of the tensor in the reference system.
transformed_Tensor = np.dot(transformationMatrix, np.array(tensor))
transformed_Tensor = np.dot(transformed_Tensor,transposedTransformationMatrix)
return transformed_Tensor
def transform_tensor_sphericalPolar_2_lon_lat_rad(tensor):
'''Function transforming the components of a tensor from a spherical-polar basis to a zonal-meridional-radial basis.'''
import numpy as np
#The requested transformation is just a reflection followed by a change in the order
# of the components in order to get a right-handed system.
transformationMatrix = np.array([[ 0.0, 0.0, 1.0 ],\
[ 0.0,-1.0, 0.0 ],\
[ 1.0, 0.0, 0.0 ]])
transformed_Tensor = np.dot(np.dot(transformationMatrix, np.array(tensor)), transformationMatrix)
return transformed_Tensor
def transform_tensor_lon_lat_rad_2_sphericalPolar(tensor):
'''Function transforming the components of a tensor from a zonal-meridional-radial basis to a spherical-polar basis.'''
import numpy as np
#The requested transformation is just a reflection followed by a change in the order
# of the components in order to get a right-handed system.
transformationMatrix = np.array([[ 0.0, 0.0, 1.0 ],\
[ 0.0,-1.0, 0.0 ],\
[ 1.0, 0.0, 0.0 ]])
transformed_Tensor = np.dot(np.dot(transformationMatrix, np.array(tensor)), transformationMatrix)
return transformed_Tensor
def transform_tensor_cartesian_2_lon_lat_rad(positionVectorCartesian, tensor):
'''Function transforming the components of a tensor from a Cartesian basis to a zonal-meridional-radial basis.
The origin of the Cartesian frame of reference is located at the centre of the sphere, the positive half of x-axis goes through 0 deg E, 0 deg N, the positive half of y-axis goes through 90 deg E, 0 deg N and the positive half of the z-axis goes through the North Pole equivalent.'''
import numpy as np
#Transform from Cartesian into spherical-polar
transformed_Tensor = transform_tensor_cartesian_2_sphericalPolar(positionVectorCartesian, tensor)
#Transform from spherical-polar into longitude-latitude-radius.
transformed_Tensor = transform_tensor_sphericalPolar_2_lon_lat_rad(transformed_Tensor)
return transformed_Tensor
def transform_tensor_lon_lat_rad_2_cartesian(positionVectorLonLatRad, tensor):
'''Function transforming the components of a tensor from a Cartesian basis to a zonal-meridional-radial basis.
The origin of the Cartesian frame of reference is located at the centre of the sphere, the positive half of x-axis goes through 0 deg E, 0 deg N, the positive half of y-axis goes through 90 deg E, 0 deg N and the positive half of the z-axis goes through the North Pole equivalent.'''
import numpy as np
#Transform coordinates from lon-lat-rad into spherical-polar.
positionVectorSpericalPolar = lonlatradius_2_sphericalPolar(positionVectorLonLatRad)
#Transform tensor from lon-lat-rad into spherical-polar.
transformed_Tensor = transform_tensor_lon_lat_rad_2_sphericalPolar(tensor)
#Transform spherical-polar into Cartesian.
transformed_Tensor = transform_tensor_sphericalPolar_2_cartesian(positionVectorSpericalPolar, transformed_Tensor)
return transformed_Tensor
def transform_vector_sphericalPolar_2_cartesian(positionVectorSpherical, vector):
'''Function transforming the components of a vector from a spherical-polar basis to a Cartesian basis. The input position vector must be given as [radius, polar angle, azimuthal angle], all angles specified in radians.
The origin of the Cartesian frame of reference is located at the centre of the sphere, the positive half of x-axis goes through 0 deg E, 0 deg N, the positive half of y-axis goes through 90 deg E, 0 deg N and the positive half of the z-axis goes through the North Pole equivalent.'''
import numpy as np
#extract distance from origin, polar (theta) angles and azimuthal (phi) angles.
[radius, theta, phi] = positionVectorSpherical
#Evaluate components of rotation matrices.
transformationMatrix =\
np.array([\
[np.sin(theta)*np.cos(phi), np.cos(theta)*np.cos(phi), -np.sin(phi)],\
[np.sin(theta)*np.sin(phi), np.cos(theta)*np.sin(phi), np.cos(phi)],\
[np.cos(theta), -np.sin(theta), 0]])
#Calculate the components of the tensor in the reference system.
transformed_Vector = np.dot(transformationMatrix, np.array(vector))
return transformed_Vector
def transform_vector_cartesian_2_sphericalPolar(positionVectorCartesian, vector):
'''Function transforming the components of a vector from a Cartesian basis to a spherical-polar basis.
The origin of the Cartesian frame of reference is located at the centre of the sphere, the positive half of x-axis goes through 0 deg E, 0 deg N, the positive half of y-axis goes through 90 deg E, 0 deg N and the positive half of the z-axis goes through the North Pole equivalent.'''
import numpy as np
#Calculate distance from origin, polar (theta) angles and azimuthal (phi) angles.
[radius, theta, phi] = cartesian_2_sphericalPolar(positionVectorCartesian)
#Evaluate components of rotation matrices.
transformationMatrix =\
np.array([\
[np.sin(theta)*np.cos(phi), np.sin(theta)*np.sin(phi), np.cos(theta)],\
[np.cos(theta)*np.cos(phi), np.cos(theta)*np.sin(phi), -np.sin(theta)],\
[-np.sin(phi), np.cos(phi), 0]])
#Calculate the components of the tensor in the reference system.
transformed_Vector = np.dot(transformationMatrix, np.array(vector))
return transformed_Vector
def transform_vector_sphericalPolar_2_lon_lat_rad(vector):
'''Function transforming the components of a vector from a spherical-polar basis to a zonal-meridional-radial basis.'''
import numpy as np
#The requested transformation is just a reflection followed by a change in the order
# of the components in order to get a right-handed system.
transformationMatrix = np.array([[ 0.0, 0.0, 1.0 ],\
[ 0.0,-1.0, 0.0 ],\
[ 1.0, 0.0, 0.0 ]])
transformed_Vector = np.dot(transformationMatrix, np.array(vector))
return transformed_Vector
def transform_vector_lon_lat_rad_2_sphericalPolar(vector):
'''Function transforming the components of a vector from a zonal-meridional-radial basis to a spherical-polar basis.'''
import numpy as np
#The requested transformation is just a reflection followed by a change in the order
# of the components in order to get a right-handed system.
transformationMatrix = np.array([[ 0.0, 0.0, 1.0 ],\
[ 0.0,-1.0, 0.0 ],\
[ 1.0, 0.0, 0.0 ]])
transformed_Vector = np.dot(transformationMatrix, np.array(vector))
return transformed_Vector
def transform_vector_cartesian_2_lon_lat_rad(positionVectorCartesian, vector):
'''Function transforming the components of a vector from a Cartesian basis to a zonal-meridional-radial basis.
The origin of the Cartesian frame of reference is located at the centre of the sphere, the positive half of x-axis goes through 0 deg E, 0 deg N, the positive half of y-axis goes through 90 deg E, 0 deg N and the positive half of the z-axis goes through the North Pole equivalent.'''
#Calculate spherical-polar components of the vector.
transformed_Vector = transform_vector_cartesian_2_sphericalPolar(positionVectorCartesian, vector)
#Calculate zonal, meridional and radial components of the vector.
transformed_Vector = transform_vector_sphericalPolar_2_lon_lat_rad(transformed_Vector)
return transformed_Vector
def transform_vector_lon_lat_rad_2_cartesian(positionVectorRadLonLat, vector):
'''Function transforming the components of a vector from a zonal-meridional-radial basis into a Cartesian basis.
The origin of the Cartesian frame of reference is located at the centre of the sphere, the positive half of x-axis goes through 0 deg E, 0 deg N, the positive half of y-axis goes through 90 deg E, 0 deg N and the positive half of the z-axis goes through the North Pole equivalent.'''
#Transform coordinates from longitude-latitude-radius into spherical-polar
positionVectorSpherical = lonlatradius_2_sphericalPolar(positionVectorRadLonLat)
#Transform vector from longitude-latitude-radius into spherical-polar
transformed_Vector = transform_vector_lon_lat_rad_2_sphericalPolar(vector)
#Transform from spherical-polar into Cartesian
transformed_Vector = transform_vector_sphericalPolar_2_cartesian(positionVectorSpherical, vector)
return transformed_Vector
| lgpl-2.1 |
adrienbrault/home-assistant | homeassistant/components/hangouts/const.py | 7 | 2415 | """Constants for Google Hangouts Component."""
import voluptuous as vol
from homeassistant.components.notify import ATTR_DATA, ATTR_MESSAGE, ATTR_TARGET
import homeassistant.helpers.config_validation as cv
DOMAIN = "hangouts"
CONF_2FA = "2fa"
CONF_AUTH_CODE = "authorization_code"
CONF_REFRESH_TOKEN = "refresh_token"
CONF_BOT = "bot"
CONF_CONVERSATIONS = "conversations"
CONF_DEFAULT_CONVERSATIONS = "default_conversations"
CONF_ERROR_SUPPRESSED_CONVERSATIONS = "error_suppressed_conversations"
CONF_INTENTS = "intents"
CONF_INTENT_TYPE = "intent_type"
CONF_SENTENCES = "sentences"
CONF_MATCHERS = "matchers"
INTENT_HELP = "HangoutsHelp"
EVENT_HANGOUTS_CONNECTED = "hangouts_connected"
EVENT_HANGOUTS_DISCONNECTED = "hangouts_disconnected"
EVENT_HANGOUTS_USERS_CHANGED = "hangouts_users_changed"
EVENT_HANGOUTS_CONVERSATIONS_CHANGED = "hangouts_conversations_changed"
EVENT_HANGOUTS_CONVERSATIONS_RESOLVED = "hangouts_conversations_resolved"
EVENT_HANGOUTS_MESSAGE_RECEIVED = "hangouts_message_received"
CONF_CONVERSATION_ID = "id"
CONF_CONVERSATION_NAME = "name"
SERVICE_SEND_MESSAGE = "send_message"
SERVICE_UPDATE = "update"
SERVICE_RECONNECT = "reconnect"
TARGETS_SCHEMA = vol.All(
vol.Schema(
{
vol.Exclusive(CONF_CONVERSATION_ID, "id or name"): cv.string,
vol.Exclusive(CONF_CONVERSATION_NAME, "id or name"): cv.string,
}
),
cv.has_at_least_one_key(CONF_CONVERSATION_ID, CONF_CONVERSATION_NAME),
)
MESSAGE_SEGMENT_SCHEMA = vol.Schema(
{
vol.Required("text"): cv.string,
vol.Optional("is_bold"): cv.boolean,
vol.Optional("is_italic"): cv.boolean,
vol.Optional("is_strikethrough"): cv.boolean,
vol.Optional("is_underline"): cv.boolean,
vol.Optional("parse_str"): cv.boolean,
vol.Optional("link_target"): cv.string,
}
)
MESSAGE_DATA_SCHEMA = vol.Schema(
{vol.Optional("image_file"): cv.string, vol.Optional("image_url"): cv.string}
)
MESSAGE_SCHEMA = vol.Schema(
{
vol.Required(ATTR_TARGET): [TARGETS_SCHEMA],
vol.Required(ATTR_MESSAGE): [MESSAGE_SEGMENT_SCHEMA],
vol.Optional(ATTR_DATA): MESSAGE_DATA_SCHEMA,
}
)
INTENT_SCHEMA = vol.All(
# Basic Schema
vol.Schema(
{
vol.Required(CONF_SENTENCES): vol.All(cv.ensure_list, [cv.string]),
vol.Optional(CONF_CONVERSATIONS): [TARGETS_SCHEMA],
}
)
)
| mit |
yigepodan/robotframework-selenium2library | test/lib/mockito/mockito.py | 70 | 3297 | #!/usr/bin/env python
# coding: utf-8
import verification
from mocking import mock, TestDouble
from mock_registry import mock_registry
from verification import VerificationError
__copyright__ = "Copyright 2008-2010, Mockito Contributors"
__license__ = "MIT"
__maintainer__ = "Mockito Maintainers"
__email__ = "[email protected]"
class ArgumentError(Exception):
pass
def _multiple_arguments_in_use(*args):
return len(filter(lambda x: x, args)) > 1
def _invalid_argument(value):
return (value is not None and value < 1) or value == 0
def _invalid_between(between):
if between is not None:
start, end = between
if start > end or start < 0:
return True
return False
def verify(obj, times=1, atleast=None, atmost=None, between=None, inorder=False):
if times < 0:
raise ArgumentError("""'times' argument has invalid value.
It should be at least 0. You wanted to set it to: %i""" % times)
if _multiple_arguments_in_use(atleast, atmost, between):
raise ArgumentError("""Sure you know what you are doing?
You can set only one of the arguments: 'atleast', 'atmost' or 'between'.""")
if _invalid_argument(atleast):
raise ArgumentError("""'atleast' argument has invalid value.
It should be at least 1. You wanted to set it to: %i""" % atleast)
if _invalid_argument(atmost):
raise ArgumentError("""'atmost' argument has invalid value.
It should be at least 1. You wanted to set it to: %i""" % atmost)
if _invalid_between(between):
raise ArgumentError("""'between' argument has invalid value.
It should consist of positive values with second number not greater than first
e.g. [1, 4] or [0, 3] or [2, 2]
You wanted to set it to: %s""" % between)
if isinstance(obj, TestDouble):
mocked_object = obj
else:
mocked_object = mock_registry.mock_for(obj)
if atleast:
mocked_object.verification = verification.AtLeast(atleast)
elif atmost:
mocked_object.verification = verification.AtMost(atmost)
elif between:
mocked_object.verification = verification.Between(*between)
else:
mocked_object.verification = verification.Times(times)
if inorder:
mocked_object.verification = verification.InOrder(mocked_object.verification)
return mocked_object
def when(obj, strict=True):
if isinstance(obj, mock):
theMock = obj
else:
theMock = mock_registry.mock_for(obj)
if theMock is None:
theMock = mock(obj, strict=strict)
# If we call when on something that is not TestDouble that means we're trying to stub real object,
# (class, module etc.). Not to be confused with generating stubs from real classes.
theMock.stubbing_real_object = True
theMock.expect_stubbing()
return theMock
def unstub():
"""Unstubs all stubbed methods and functions"""
mock_registry.unstub_all()
def verifyNoMoreInteractions(*mocks):
for mock in mocks:
for i in mock.invocations:
if not i.verified:
raise VerificationError("\nUnwanted interaction: " + str(i))
def verifyZeroInteractions(*mocks):
verifyNoMoreInteractions(*mocks)
| apache-2.0 |
npadmana/filehandler | ndfilehandler.py | 1 | 2625 | #
# Python code to read and write "filehander files", which are
# simply directories with files in them for each "keyword".
#
from __future__ import print_function
__author__ = "Martin White"
__version__ = "1.0"
__email__ = "[email protected]"
import numpy as N
import glob
import re
import os
def read_file_helper(fn,objt):
ff = open(fn,"r")
ndim = N.fromfile(ff,count=1,dtype='<i4')
dims = N.fromfile(ff,count=ndim[0],dtype='<i8')
nobj = N.prod(dims)
ret=N.fromfile(ff,count=nobj,dtype=objt)
ff.close()
ret=ret.reshape(dims)
return(ret)
def read_file(fname,keys=None):
"""
read_file(fname,keys=None):
Reads a specificed list of "keys" in a "file" of name fname,
or all of the keys if keys==None. Returns a dictionary of
NumPy arrays.
Does minimal checking, assuming you know what you're doing.
"""
# Get a list of all of the "fields" in the "file".
flist = glob.glob(fname+"/*.nd.[fi][48]")
# and start filling in my dictionary.
ret = {}
for fn in flist:
# Get the field name and type.
mm = re.search(fname+"/"+r"(\w*)\.nd\.([fi][48])",fn)
if mm==None:
raise RuntimeError("Unable to parse file "+fn)
else:
key = mm.group(1)
objt= mm.group(2)
objt = '<'+objt # Enforce little endian.
# and add it to the dictionary
if keys==None: # Need to do it this way since can't iterate None.
ret[key] = read_file_helper(fn,objt)
elif key in keys:
ret[key] = read_file_helper(fn,objt)
# Now check we got everything.
if keys!=None:
for key in keys:
if key not in ret.keys():
raise RuntimeError("Unable to find "+key+" in "+fname)
return(ret)
#
def write_file(fname,data):
"""
write_file(fname,data):
Writes the dictionary, data, which is meant to contain only
NumPy arrays, to a "file" of name fname.
Does minimal checking, assuming you know what you're doing.
"""
# Put in a little object type converter.
suffix={}
suffix['int32']='i4'
suffix['int64']='i8'
suffix['float32']='f4'
suffix['float64']='f8'
if not os.path.exists(fname):
os.mkdir(fname)
for key in data.keys():
dt = suffix[data[key].dtype.name]
ff = open(fname+"/"+key+".nd."+dt,"w")
shape =data[key].shape
ndim = N.array(len(shape),dtype='<i4')
dims = N.array(shape,dtype='<i8')
ndim.tofile(ff)
dims.tofile(ff)
data[key].astype('<'+dt).tofile(ff)
ff.close()
#
| mit |
aseemmittal/LibreHatti | src/librehatti/prints/helper.py | 5 | 1184 | #To convert digits into words
def num2eng(n):
"""
This function converts numerals into words.
argument:number
return:words
"""
words = ''
units = ['', 'One', 'Two', 'Three', 'Four', 'Five', 'Six', 'Seven',
'Eight', 'Nine','Ten', 'Eleven', 'Twelve', 'Thirteen', 'Fourteen',
'Fifteen', 'Sixteen', 'Seventeen', 'Eighteen', 'Nineteen']
tens = ['','Ten', 'Twenty', 'Thirty', 'Forty', 'Fifty', 'Sixty', 'Seventy',
'Eighty', 'Ninety']
for group in ['', 'Hundred', 'Thousand', 'Lakh', 'Crore']:
if group in ['', 'Thousand', 'Lakh']:
n, digits = n // 100, n % 100
elif group == 'Hundred':
n, digits = n // 10, n % 10
else:
digits = n
if digits in range (1, 20):
words = units [digits] + ' ' + group + ' ' + words
elif digits in range (20, 100):
ten_digit, unit_digit = digits // 10, digits % 10
words = tens [ten_digit] + ' ' + units [
unit_digit] + ' ' + group + ' ' + words
elif digits >= 100:
words = num2eng (digits) + ' crore ' + words
return words
| gpl-2.0 |
gtko/Sick-Beard | lib/requests/packages/urllib3/exceptions.py | 65 | 1979 | # urllib3/exceptions.py
# Copyright 2008-2012 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
#
# This module is part of urllib3 and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
## Base Exceptions
class HTTPError(Exception):
"Base exception used by this module."
pass
class PoolError(HTTPError):
"Base exception for errors caused within a pool."
def __init__(self, pool, message):
self.pool = pool
HTTPError.__init__(self, "%s: %s" % (pool, message))
class SSLError(HTTPError):
"Raised when SSL certificate fails in an HTTPS connection."
pass
class DecodeError(HTTPError):
"Raised when automatic decoding based on Content-Type fails."
pass
## Leaf Exceptions
class MaxRetryError(PoolError):
"Raised when the maximum number of retries is exceeded."
def __init__(self, pool, url):
message = "Max retries exceeded with url: %s" % url
PoolError.__init__(self, pool, message)
self.url = url
class HostChangedError(PoolError):
"Raised when an existing pool gets a request for a foreign host."
def __init__(self, pool, url, retries=3):
message = "Tried to open a foreign host with url: %s" % url
PoolError.__init__(self, pool, message)
self.url = url
self.retries = retries
class TimeoutError(PoolError):
"Raised when a socket timeout occurs."
pass
class EmptyPoolError(PoolError):
"Raised when a pool runs out of connections and no more are allowed."
pass
class ClosedPoolError(PoolError):
"Raised when a request enters a pool after the pool has been closed."
pass
class LocationParseError(ValueError, HTTPError):
"Raised when get_host or similar fails to parse the URL input."
def __init__(self, location):
message = "Failed to parse: %s" % location
super(LocationParseError, self).__init__(self, message)
self.location = location
| gpl-3.0 |
jiankers/weevely3 | modules/net/curl.py | 12 | 5302 | from core.vectors import PhpCode, ShellCmd, ModuleExec, PhpFile, Os
from core.module import Module
from core import modules
from core import messages
from core.loggers import log
import os
class Curl(Module):
"""Perform a curl-like HTTP request."""
aliases = [ 'curl' ]
def init(self):
self.register_info(
{
'author': [
'Emilio Pinna'
],
'license': 'GPLv3'
}
)
self.register_vectors(
[
PhpFile(
payload_path = os.path.join(self.folder, 'php_context.tpl'),
name = 'file_get_contents',
),
PhpFile(
payload_path = os.path.join(self.folder, 'php_context.tpl'),
name = 'fopen_stream_get_contents',
),
PhpFile(
payload_path = os.path.join(self.folder, 'php_context.tpl'),
name = 'fopen_fread',
),
PhpFile(
payload_path = os.path.join(self.folder, 'php_curl.tpl'),
name = 'php_curl',
),
PhpFile(
payload_path = os.path.join(self.folder, 'php_httprequest1.tpl'),
name = 'php_httprequest1',
),
ShellCmd(
payload = """curl -s -i ${ "-A '%s'" % user_agent if user_agent else "" } ${ '--connect-timeout %i' % connect_timeout } ${ '-X %s' % request if (not data and request) else '' } ${ " ".join([ "-H '%s'" % h for h in header ]) } ${ "-b '%s'" % cookie if cookie else '' } ${ ' '.join([ "-d '%s'" % d for d in data ]) } '${ url }'""",
name = 'sh_curl'
)
]
)
self.register_arguments([
{ 'name' : 'url' },
{ 'name' : '--header', 'dest' : 'header', 'action' : 'append', 'default' : [] },
{ 'name' : '-H', 'dest' : 'header', 'action' : 'append', 'default' : [] },
{ 'name' : '--cookie', 'dest' : 'cookie' },
{ 'name' : '-b', 'dest' : 'cookie' },
{ 'name' : '--data', 'dest' : 'data', 'action' : 'append', 'default' : [] },
{ 'name' : '-d', 'dest' : 'data', 'action' : 'append', 'default' : [] },
{ 'name' : '--user-agent', 'dest' : 'user_agent' },
{ 'name' : '-A', 'dest' : 'user_agent' },
{ 'name' : '--connect-timeout', 'type' : int, 'default' : 5, 'help' : 'Default: 2' },
{ 'name' : '--request', 'dest' : 'request', 'choices' : ( 'GET', 'HEAD', 'POST', 'PUT' ), 'default' : 'GET' },
{ 'name' : '-X', 'dest' : 'request', 'choices' : ( 'GET', 'HEAD', 'POST', 'PUT' ), 'default' : 'GET' },
{ 'name' : '--output', 'dest' : 'output' },
{ 'name' : '-o', 'dest' : 'output' },
{ 'name' : '-i', 'dest' : 'include_headers', 'help' : 'Include response headers', 'action' : 'store_true', 'default' : False },
{ 'name' : '-local', 'action' : 'store_true', 'default' : False, 'help' : 'Save file locally with -o|--output' },
{ 'name' : '-vector', 'choices' : self.vectors.get_names(), 'default' : 'file_get_contents' }
])
def run(self):
headers = []
saved = None
vector_name, result = self.vectors.find_first_result(
names = [ self.args.get('vector') ],
format_args = self.args,
condition = lambda r: r if r and r.strip() else None
)
# Print error and exit with no response or no headers
if not (vector_name and result):
log.warn(messages.module_net_curl.unexpected_response)
return None, headers, saved
elif not '\r\n'*2 in result:
# If something is returned but there is \r\n*2, we consider
# everything as header. It happen with responses 204 No contents
# that end with \r\n\r (wtf).
headers = result
result = ''
else:
headers, result = result.split('\r\n'*2, 1)
headers = (
[
h.rstrip() for h
in headers.split('\r\n')
] if '\r\n' in headers
else headers
)
output_path = self.args.get('output')
if output_path:
# If response must be saved, it's anyway safer to save it
# within additional requests
if not self.args.get('local'):
saved = ModuleExec('file_upload', [ '-content', result, output_path ]).run()
else:
try:
open(output_path, 'wb').write(result)
except Exception as e:
log.warning(
messages.generic.error_loading_file_s_s % (output_path, str(e)))
saved = False
else:
saved = True
return result, headers, saved
def print_result(self, result):
result, headers, saved = result
# If is saved, we do not want output
if self.args.get('output'):
log.info(saved)
return
# Print headers if requested
if self.args.get('include_headers'):
log.info( '\r\n'.join(headers) + '\r\n')
if result:
log.info(result)
| gpl-3.0 |
arunsingh/selenium | py/selenium/webdriver/remote/webelement.py | 20 | 17448 | # Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import hashlib
import os
import zipfile
try:
from StringIO import StringIO as IOStream
except ImportError: # 3+
from io import BytesIO as IOStream
import base64
from .command import Command
from selenium.common.exceptions import WebDriverException
from selenium.common.exceptions import InvalidSelectorException
from selenium.webdriver.common.by import By
from selenium.webdriver.common.keys import Keys
try:
str = basestring
except NameError:
pass
class WebElement(object):
"""Represents a DOM element.
Generally, all interesting operations that interact with a document will be
performed through this interface.
All method calls will do a freshness check to ensure that the element
reference is still valid. This essentially determines whether or not the
element is still attached to the DOM. If this test fails, then an
``StaleElementReferenceException`` is thrown, and all future calls to this
instance will fail."""
def __init__(self, parent, id_, w3c=False):
self._parent = parent
self._id = id_
self._w3c = w3c
def __repr__(self):
return '<{0.__module__}.{0.__name__} (session="{1}", element="{2}")>'.format(
type(self), self._parent.session_id, self._id)
@property
def tag_name(self):
"""This element's ``tagName`` property."""
return self._execute(Command.GET_ELEMENT_TAG_NAME)['value']
@property
def text(self):
"""The text of the element."""
return self._execute(Command.GET_ELEMENT_TEXT)['value']
def click(self):
"""Clicks the element."""
self._execute(Command.CLICK_ELEMENT)
def submit(self):
"""Submits a form."""
self._execute(Command.SUBMIT_ELEMENT)
def clear(self):
"""Clears the text if it's a text entry element."""
self._execute(Command.CLEAR_ELEMENT)
def get_attribute(self, name):
"""Gets the given attribute or property of the element.
This method will first try to return the value of a property with the
given name. If a property with that name doesn't exist, it returns the
value of the attribute with the same name. If there's no attribute with
that name, ``None`` is returned.
Values which are considered truthy, that is equals "true" or "false",
are returned as booleans. All other non-``None`` values are returned
as strings. For attributes or properties which do not exist, ``None``
is returned.
:Args:
- name - Name of the attribute/property to retrieve.
Example::
# Check if the "active" CSS class is applied to an element.
is_active = "active" in target_element.get_attribute("class")
"""
resp = self._execute(Command.GET_ELEMENT_ATTRIBUTE, {'name': name})
attributeValue = ''
if resp['value'] is None:
attributeValue = None
else:
attributeValue = resp['value']
if name != 'value' and attributeValue.lower() in ('true', 'false'):
attributeValue = attributeValue.lower()
return attributeValue
def is_selected(self):
"""Returns whether the element is selected.
Can be used to check if a checkbox or radio button is selected.
"""
return self._execute(Command.IS_ELEMENT_SELECTED)['value']
def is_enabled(self):
"""Returns whether the element is enabled."""
return self._execute(Command.IS_ELEMENT_ENABLED)['value']
def find_element_by_id(self, id_):
"""Finds element within this element's children by ID.
:Args:
- id_ - ID of child element to locate.
"""
return self.find_element(by=By.ID, value=id_)
def find_elements_by_id(self, id_):
"""Finds a list of elements within this element's children by ID.
:Args:
- id_ - Id of child element to find.
"""
return self.find_elements(by=By.ID, value=id_)
def find_element_by_name(self, name):
"""Finds element within this element's children by name.
:Args:
- name - name property of the element to find.
"""
return self.find_element(by=By.NAME, value=name)
def find_elements_by_name(self, name):
"""Finds a list of elements within this element's children by name.
:Args:
- name - name property to search for.
"""
return self.find_elements(by=By.NAME, value=name)
def find_element_by_link_text(self, link_text):
"""Finds element within this element's children by visible link text.
:Args:
- link_text - Link text string to search for.
"""
return self.find_element(by=By.LINK_TEXT, value=link_text)
def find_elements_by_link_text(self, link_text):
"""Finds a list of elements within this element's children by visible link text.
:Args:
- link_text - Link text string to search for.
"""
return self.find_elements(by=By.LINK_TEXT, value=link_text)
def find_element_by_partial_link_text(self, link_text):
"""Finds element within this element's children by partially visible link text.
:Args:
- link_text - Link text string to search for.
"""
return self.find_element(by=By.PARTIAL_LINK_TEXT, value=link_text)
def find_elements_by_partial_link_text(self, link_text):
"""Finds a list of elements within this element's children by link text.
:Args:
- link_text - Link text string to search for.
"""
return self.find_elements(by=By.PARTIAL_LINK_TEXT, value=link_text)
def find_element_by_tag_name(self, name):
"""Finds element within this element's children by tag name.
:Args:
- name - name of html tag (eg: h1, a, span)
"""
return self.find_element(by=By.TAG_NAME, value=name)
def find_elements_by_tag_name(self, name):
"""Finds a list of elements within this element's children by tag name.
:Args:
- name - name of html tag (eg: h1, a, span)
"""
return self.find_elements(by=By.TAG_NAME, value=name)
def find_element_by_xpath(self, xpath):
"""Finds element by xpath.
:Args:
xpath - xpath of element to locate. "//input[@class='myelement']"
Note: The base path will be relative to this element's location.
This will select the first link under this element.
::
myelement.find_elements_by_xpath(".//a")
However, this will select the first link on the page.
::
myelement.find_elements_by_xpath("//a")
"""
return self.find_element(by=By.XPATH, value=xpath)
def find_elements_by_xpath(self, xpath):
"""Finds elements within the element by xpath.
:Args:
- xpath - xpath locator string.
Note: The base path will be relative to this element's location.
This will select all links under this element.
::
myelement.find_elements_by_xpath(".//a")
However, this will select all links in the page itself.
::
myelement.find_elements_by_xpath("//a")
"""
return self.find_elements(by=By.XPATH, value=xpath)
def find_element_by_class_name(self, name):
"""Finds element within this element's children by class name.
:Args:
- name - class name to search for.
"""
return self.find_element(by=By.CLASS_NAME, value=name)
def find_elements_by_class_name(self, name):
"""Finds a list of elements within this element's children by class name.
:Args:
- name - class name to search for.
"""
return self.find_elements(by=By.CLASS_NAME, value=name)
def find_element_by_css_selector(self, css_selector):
"""Finds element within this element's children by CSS selector.
:Args:
- css_selector - CSS selctor string, ex: 'a.nav#home'
"""
return self.find_element(by=By.CSS_SELECTOR, value=css_selector)
def find_elements_by_css_selector(self, css_selector):
"""Finds a list of elements within this element's children by CSS selector.
:Args:
- css_selector - CSS selctor string, ex: 'a.nav#home'
"""
return self.find_elements(by=By.CSS_SELECTOR, value=css_selector)
def send_keys(self, *value):
"""Simulates typing into the element.
:Args:
- value - A string for typing, or setting form fields. For setting
file inputs, this could be a local file path.
Use this to send simple key events or to fill out form fields::
form_textfield = driver.find_element_by_name('username')
form_textfield.send_keys("admin")
This can also be used to set file inputs.
::
file_input = driver.find_element_by_name('profilePic')
file_input.send_keys("path/to/profilepic.gif")
# Generally it's better to wrap the file path in one of the methods
# in os.path to return the actual path to support cross OS testing.
# file_input.send_keys(os.path.abspath("path/to/profilepic.gif"))
"""
# transfer file to another machine only if remote driver is used
# the same behaviour as for java binding
if self.parent._is_remote:
local_file = self.parent.file_detector.is_local_file(*value)
if local_file is not None:
value = self._upload(local_file)
typing = []
for val in value:
if isinstance(val, Keys):
typing.append(val)
elif isinstance(val, int):
val = val.__str__()
for i in range(len(val)):
typing.append(val[i])
else:
for i in range(len(val)):
typing.append(val[i])
self._execute(Command.SEND_KEYS_TO_ELEMENT, {'value': typing})
# RenderedWebElement Items
def is_displayed(self):
"""Whether the element is visible to a user."""
return self._execute(Command.IS_ELEMENT_DISPLAYED)['value']
@property
def location_once_scrolled_into_view(self):
"""THIS PROPERTY MAY CHANGE WITHOUT WARNING. Use this to discover
where on the screen an element is so that we can click it. This method
should cause the element to be scrolled into view.
Returns the top lefthand corner location on the screen, or ``None`` if
the element is not visible.
"""
return self._execute(Command.GET_ELEMENT_LOCATION_ONCE_SCROLLED_INTO_VIEW)['value']
@property
def size(self):
"""The size of the element."""
size = self._execute(Command.GET_ELEMENT_SIZE)['value']
new_size = {}
new_size["height"] = size["height"]
new_size["width"] = size["width"]
return new_size
def value_of_css_property(self, property_name):
"""The value of a CSS property."""
return self._execute(Command.GET_ELEMENT_VALUE_OF_CSS_PROPERTY,
{'propertyName': property_name})['value']
@property
def location(self):
"""The location of the element in the renderable canvas."""
old_loc = self._execute(Command.GET_ELEMENT_LOCATION)['value']
new_loc = {"x": old_loc['x'],
"y": old_loc['y']}
return new_loc
@property
def rect(self):
"""A dictionary with the size and location of the element."""
return self._execute(Command.GET_ELEMENT_RECT)['value']
@property
def screenshot_as_base64(self):
"""
Gets the screenshot of the current element as a base64 encoded string.
:Usage:
img_b64 = element.screenshot_as_base64
"""
return self._execute(Command.ELEMENT_SCREENSHOT)['value']
@property
def screenshot_as_png(self):
"""
Gets the screenshot of the current element as a binary data.
:Usage:
element_png = element.screenshot_as_png
"""
return base64.b64decode(self.screenshot_as_base64.encode('ascii'))
def screenshot(self, filename):
"""
Gets the screenshot of the current element. Returns False if there is
any IOError, else returns True. Use full paths in your filename.
:Args:
- filename: The full path you wish to save your screenshot to.
:Usage:
element.screenshot('/Screenshots/foo.png')
"""
png = self.screenshot_as_png
try:
with open(filename, 'wb') as f:
f.write(png)
except IOError:
return False
finally:
del png
return True
@property
def parent(self):
"""Internal reference to the WebDriver instance this element was found from."""
return self._parent
@property
def id(self):
"""Internal ID used by selenium.
This is mainly for internal use. Simple use cases such as checking if 2
webelements refer to the same element, can be done using ``==``::
if element1 == element2:
print("These 2 are equal")
"""
return self._id
def __eq__(self, element):
return hasattr(element, 'id') and self._id == element.id
def __ne__(self, element):
return not self.__eq__(element)
# Private Methods
def _execute(self, command, params=None):
"""Executes a command against the underlying HTML element.
Args:
command: The name of the command to _execute as a string.
params: A dictionary of named parameters to send with the command.
Returns:
The command's JSON response loaded into a dictionary object.
"""
if not params:
params = {}
params['id'] = self._id
return self._parent.execute(command, params)
def find_element(self, by=By.ID, value=None):
if not By.is_valid(by) or not isinstance(value, str):
raise InvalidSelectorException("Invalid locator values passed in")
if self._w3c:
if by == By.ID:
by = By.CSS_SELECTOR
value = '[id="%s"]' % value
elif by == By.TAG_NAME:
by = By.CSS_SELECTOR
elif by == By.CLASS_NAME:
by = By.CSS_SELECTOR
value = ".%s" % value
elif by == By.NAME:
by = By.CSS_SELECTOR
value = '[name="%s"]' % value
return self._execute(Command.FIND_CHILD_ELEMENT,
{"using": by, "value": value})['value']
def find_elements(self, by=By.ID, value=None):
if not By.is_valid(by) or not isinstance(value, str):
raise InvalidSelectorException("Invalid locator values passed in")
if self._w3c:
if by == By.ID:
by = By.CSS_SELECTOR
value = '[id="%s"]' % value
elif by == By.TAG_NAME:
by = By.CSS_SELECTOR
elif by == By.CLASS_NAME:
by = By.CSS_SELECTOR
value = ".%s" % value
elif by == By.NAME:
by = By.CSS_SELECTOR
value = '[name="%s"]' % value
return self._execute(Command.FIND_CHILD_ELEMENTS,
{"using": by, "value": value})['value']
def __hash__(self):
return int(hashlib.md5(self._id.encode('utf-8')).hexdigest(), 16)
def _upload(self, filename):
fp = IOStream()
zipped = zipfile.ZipFile(fp, 'w', zipfile.ZIP_DEFLATED)
zipped.write(filename, os.path.split(filename)[1])
zipped.close()
content = base64.encodestring(fp.getvalue())
if not isinstance(content, str):
content = content.decode('utf-8')
try:
return self._execute(Command.UPLOAD_FILE,
{'file': content})['value']
except WebDriverException as e:
if "Unrecognized command: POST" in e.__str__():
return filename
elif "Command not found: POST " in e.__str__():
return filename
elif '{"status":405,"value":["GET","HEAD","DELETE"]}' in e.__str__():
return filename
else:
raise e
| apache-2.0 |
mfalesni/cfme_tests | cfme/tests/physical_infrastructure/ui/test_physical_server_list.py | 2 | 2062 | # -*- coding: utf-8 -*-
import pytest
from cfme.utils.appliance.implementations.ui import navigate_to
from cfme.physical.provider.lenovo import LenovoProvider
pytestmark = [pytest.mark.tier(3), pytest.mark.provider([LenovoProvider], scope="module")]
@pytest.fixture(scope="module")
def physical_server_collection(appliance, provider, setup_provider_modscope):
# Get and return the physical server collection
yield appliance.collections.physical_servers
def test_physical_servers_view_displayed(physical_server_collection):
"""Navigate to the physical servers page and verify that servers are displayed"""
physical_servers_view = navigate_to(physical_server_collection, 'All')
assert physical_servers_view.is_displayed
def test_physical_servers_view_dropdowns(physical_server_collection):
"""Navigate to the physical servers page and verify that the dropdown menus are present"""
physical_servers_view = navigate_to(physical_server_collection, 'All')
configuration_items = physical_servers_view.toolbar.configuration.items
assert "Refresh Relationships and Power States" in configuration_items
assert "Remove Physical Servers from Inventory" in configuration_items
power_items = physical_servers_view.toolbar.power.items
assert "Power On" in power_items
assert "Power Off" in power_items
assert "Power Off Immediately" in power_items
assert "Restart" in power_items
assert "Restart Immediately" in power_items
assert "Restart to System Setup" in power_items
assert "Restart Management Controller" in power_items
identify_items = physical_servers_view.toolbar.identify.items
assert "Blink LED" in identify_items
assert "Turn On LED" in identify_items
assert "Turn Off LED" in identify_items
policy_items = physical_servers_view.toolbar.policy.items
assert "Manage Policies" in policy_items
assert "Edit Tags" in policy_items
lifecycle_items = physical_servers_view.toolbar.lifecycle.items
assert "Provision Physical Server" in lifecycle_items
| gpl-2.0 |
simod/geonode | geonode/base/migrations/0029_auto_20171114_0341.py | 7 | 3099 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('base', '0028_resourcebase_is_approved'),
]
operations = [
migrations.AlterField(
model_name='resourcebase',
name='language',
field=models.CharField(default=b'eng', help_text='language used within the dataset', max_length=3, verbose_name='language', choices=[(b'abk', b'Abkhazian'), (b'aar', b'Afar'), (b'afr', b'Afrikaans'), (b'amh', b'Amharic'), (b'ara', b'Arabic'), (b'asm', b'Assamese'), (b'aym', b'Aymara'), (b'aze', b'Azerbaijani'), (b'bak', b'Bashkir'), (b'ben', b'Bengali'), (b'bih', b'Bihari'), (b'bis', b'Bislama'), (b'bre', b'Breton'), (b'bul', b'Bulgarian'), (b'bel', b'Byelorussian'), (b'cat', b'Catalan'), (b'cos', b'Corsican'), (b'dan', b'Danish'), (b'dzo', b'Dzongkha'), (b'eng', b'English'), (b'fra', b'French'), (b'epo', b'Esperanto'), (b'est', b'Estonian'), (b'fao', b'Faroese'), (b'fij', b'Fijian'), (b'fin', b'Finnish'), (b'fry', b'Frisian'), (b'glg', b'Gallegan'), (b'ger', b'German'), (b'gre', b'Greek'), (b'kal', b'Greenlandic'), (b'grn', b'Guarani'), (b'guj', b'Gujarati'), (b'hau', b'Hausa'), (b'heb', b'Hebrew'), (b'hin', b'Hindi'), (b'hun', b'Hungarian'), (b'ind', b'Indonesian'), (b'ina', b'Interlingua (International Auxiliary language Association)'), (b'iku', b'Inuktitut'), (b'ipk', b'Inupiak'), (b'ita', b'Italian'), (b'jpn', b'Japanese'), (b'kan', b'Kannada'), (b'kas', b'Kashmiri'), (b'kaz', b'Kazakh'), (b'khm', b'Khmer'), (b'kin', b'Kinyarwanda'), (b'kir', b'Kirghiz'), (b'kor', b'Korean'), (b'kur', b'Kurdish'), (b'oci', b"Langue d 'Oc (post 1500)"), (b'lao', b'Lao'), (b'lat', b'Latin'), (b'lav', b'Latvian'), (b'lin', b'Lingala'), (b'lit', b'Lithuanian'), (b'mlg', b'Malagasy'), (b'mlt', b'Maltese'), (b'mar', b'Marathi'), (b'mol', b'Moldavian'), (b'mon', b'Mongolian'), (b'nau', b'Nauru'), (b'nep', b'Nepali'), (b'nor', b'Norwegian'), (b'ori', b'Oriya'), (b'orm', b'Oromo'), (b'pan', b'Panjabi'), (b'pol', b'Polish'), (b'por', b'Portuguese'), (b'pus', b'Pushto'), (b'que', b'Quechua'), (b'roh', b'Rhaeto-Romance'), (b'run', b'Rundi'), (b'rus', b'Russian'), (b'smo', b'Samoan'), (b'sag', b'Sango'), (b'san', b'Sanskrit'), (b'scr', b'Serbo-Croatian'), (b'sna', b'Shona'), (b'snd', b'Sindhi'), (b'sin', b'Singhalese'), (b'ssw', b'Siswant'), (b'slv', b'Slovenian'), (b'som', b'Somali'), (b'sot', b'Sotho'), (b'spa', b'Spanish'), (b'sun', b'Sudanese'), (b'swa', b'Swahili'), (b'tgl', b'Tagalog'), (b'tgk', b'Tajik'), (b'tam', b'Tamil'), (b'tat', b'Tatar'), (b'tel', b'Telugu'), (b'tha', b'Thai'), (b'tir', b'Tigrinya'), (b'tog', b'Tonga (Nyasa)'), (b'tso', b'Tsonga'), (b'tsn', b'Tswana'), (b'tur', b'Turkish'), (b'tuk', b'Turkmen'), (b'twi', b'Twi'), (b'uig', b'Uighur'), (b'ukr', b'Ukrainian'), (b'urd', b'Urdu'), (b'uzb', b'Uzbek'), (b'vie', b'Vietnamese'), (b'vol', b'Volap\xc3\xbck'), (b'wol', b'Wolof'), (b'xho', b'Xhosa'), (b'yid', b'Yiddish'), (b'yor', b'Yoruba'), (b'zha', b'Zhuang'), (b'zul', b'Zulu')]),
),
]
| gpl-3.0 |
gabeharms/firestorm | indra/llcorehttp/tests/test_llcorehttp_peer.py | 2 | 14000 | #!/usr/bin/env python
"""\
@file test_llsdmessage_peer.py
@author Nat Goodspeed
@date 2008-10-09
@brief This script asynchronously runs the executable (with args) specified on
the command line, returning its result code. While that executable is
running, we provide dummy local services for use by C++ tests.
$LicenseInfo:firstyear=2008&license=viewerlgpl$
Second Life Viewer Source Code
Copyright (C) 2012-2013, Linden Research, Inc.
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation;
version 2.1 of the License only.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
Linden Research, Inc., 945 Battery Street, San Francisco, CA 94111 USA
$/LicenseInfo$
"""
import os
import sys
import time
import select
import getopt
from threading import Thread
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
from SocketServer import ThreadingMixIn
mydir = os.path.dirname(__file__) # expected to be .../indra/llcorehttp/tests/
sys.path.insert(0, os.path.join(mydir, os.pardir, os.pardir, "lib", "python"))
from indra.util.fastest_elementtree import parse as xml_parse
from indra.base import llsd
from testrunner import freeport, run, debug, VERBOSE
class TestHTTPRequestHandler(BaseHTTPRequestHandler):
"""This subclass of BaseHTTPRequestHandler is to receive and echo
LLSD-flavored messages sent by the C++ LLHTTPClient.
Target URLs are fairly free-form and are assembled by
concatinating fragments. Currently defined fragments
are:
- '/reflect/' Request headers are bounced back to caller
after prefixing with 'X-Reflect-'
- '/fail/' Body of request can contain LLSD with
'reason' string and 'status' integer
which will become response header.
- '/bug2295/' 206 response, no data in body:
-- '/bug2295/0/' "Content-Range: bytes 0-75/2983"
-- '/bug2295/1/' "Content-Range: bytes 0-75/*"
-- '/bug2295/2/' "Content-Range: bytes 0-75/2983",
"Content-Length: 0"
-- '/bug2295/00000018/0/' Generates PARTIAL_FILE (18) error in libcurl.
"Content-Range: bytes 0-75/2983",
"Content-Length: 76"
-- '/bug2295/inv_cont_range/0/' Generates HE_INVALID_CONTENT_RANGE error in llcorehttp.
- '/503/' Generate 503 responses with various kinds
of 'retry-after' headers
-- '/503/0/' "Retry-After: 2"
-- '/503/1/' "Retry-After: Thu, 31 Dec 2043 23:59:59 GMT"
-- '/503/2/' "Retry-After: Fri, 31 Dec 1999 23:59:59 GMT"
-- '/503/3/' "Retry-After: "
-- '/503/4/' "Retry-After: (*#*(@*(@(")"
-- '/503/5/' "Retry-After: aklsjflajfaklsfaklfasfklasdfklasdgahsdhgasdiogaioshdgo"
-- '/503/6/' "Retry-After: 1 2 3 4 5 6 7 8 9 10"
Some combinations make no sense, there's no effort to protect
you from that.
"""
ignore_exceptions = (Exception,)
def read(self):
# The following logic is adapted from the library module
# SimpleXMLRPCServer.py.
# Get arguments by reading body of request.
# We read this in chunks to avoid straining
# socket.read(); around the 10 or 15Mb mark, some platforms
# begin to have problems (bug #792570).
try:
size_remaining = int(self.headers["content-length"])
except (KeyError, ValueError):
return ""
max_chunk_size = 10*1024*1024
L = []
while size_remaining:
chunk_size = min(size_remaining, max_chunk_size)
chunk = self.rfile.read(chunk_size)
L.append(chunk)
size_remaining -= len(chunk)
return ''.join(L)
# end of swiped read() logic
def read_xml(self):
# This approach reads the entire POST data into memory first
return llsd.parse(self.read())
## # This approach attempts to stream in the LLSD XML from self.rfile,
## # assuming that the underlying XML parser reads its input file
## # incrementally. Unfortunately I haven't been able to make it work.
## tree = xml_parse(self.rfile)
## debug("Finished raw parse")
## debug("parsed XML tree %s", tree)
## debug("parsed root node %s", tree.getroot())
## debug("root node tag %s", tree.getroot().tag)
## return llsd.to_python(tree.getroot())
def do_HEAD(self):
self.do_GET(withdata=False)
def do_GET(self, withdata=True):
# Of course, don't attempt to read data.
try:
self.answer(dict(reply="success", status=200,
reason="Your GET operation worked"))
except self.ignore_exceptions, e:
print >> sys.stderr, "Exception during GET (ignoring): %s" % str(e)
def do_POST(self):
# Read the provided POST data.
# self.answer(self.read())
try:
self.answer(dict(reply="success", status=200,
reason=self.read()))
except self.ignore_exceptions, e:
print >> sys.stderr, "Exception during POST (ignoring): %s" % str(e)
def do_PUT(self):
# Read the provided PUT data.
# self.answer(self.read())
try:
self.answer(dict(reply="success", status=200,
reason=self.read()))
except self.ignore_exceptions, e:
print >> sys.stderr, "Exception during PUT (ignoring): %s" % str(e)
def answer(self, data, withdata=True):
debug("%s.answer(%s): self.path = %r", self.__class__.__name__, data, self.path)
if "/sleep/" in self.path:
time.sleep(30)
if "/503/" in self.path:
# Tests for various kinds of 'Retry-After' header parsing
body = None
if "/503/0/" in self.path:
self.send_response(503)
self.send_header("retry-after", "2")
elif "/503/1/" in self.path:
self.send_response(503)
self.send_header("retry-after", "Thu, 31 Dec 2043 23:59:59 GMT")
elif "/503/2/" in self.path:
self.send_response(503)
self.send_header("retry-after", "Fri, 31 Dec 1999 23:59:59 GMT")
elif "/503/3/" in self.path:
self.send_response(503)
self.send_header("retry-after", "")
elif "/503/4/" in self.path:
self.send_response(503)
self.send_header("retry-after", "(*#*(@*(@(")
elif "/503/5/" in self.path:
self.send_response(503)
self.send_header("retry-after", "aklsjflajfaklsfaklfasfklasdfklasdgahsdhgasdiogaioshdgo")
elif "/503/6/" in self.path:
self.send_response(503)
self.send_header("retry-after", "1 2 3 4 5 6 7 8 9 10")
else:
# Unknown request
self.send_response(400)
body = "Unknown /503/ path in server"
if "/reflect/" in self.path:
self.reflect_headers()
self.send_header("Content-type", "text/plain")
self.end_headers()
if body:
self.wfile.write(body)
elif "/bug2295/" in self.path:
# Test for https://jira.secondlife.com/browse/BUG-2295
#
# Client can receive a header indicating data should
# appear in the body without actually getting the body.
# Library needs to defend against this case.
#
body = None
if "/bug2295/0/" in self.path:
self.send_response(206)
self.send_header("Content-Range", "bytes 0-75/2983")
elif "/bug2295/1/" in self.path:
self.send_response(206)
self.send_header("Content-Range", "bytes 0-75/*")
elif "/bug2295/2/" in self.path:
self.send_response(206)
self.send_header("Content-Range", "bytes 0-75/2983")
self.send_header("Content-Length", "0")
elif "/bug2295/00000012/0/" in self.path:
self.send_response(206)
self.send_header("Content-Range", "bytes 0-75/2983")
self.send_header("Content-Length", "76")
elif "/bug2295/inv_cont_range/0/" in self.path:
self.send_response(206)
self.send_header("Content-Range", "bytes 0-75/2983")
body = "Some text, but not enough."
else:
# Unknown request
self.send_response(400)
if "/reflect/" in self.path:
self.reflect_headers()
self.send_header("Content-type", "text/plain")
self.end_headers()
if body:
self.wfile.write(body)
elif "fail" not in self.path:
data = data.copy() # we're going to modify
# Ensure there's a "reply" key in data, even if there wasn't before
data["reply"] = data.get("reply", llsd.LLSD("success"))
response = llsd.format_xml(data)
debug("success: %s", response)
self.send_response(200)
if "/reflect/" in self.path:
self.reflect_headers()
self.send_header("Content-type", "application/llsd+xml")
self.send_header("Content-Length", str(len(response)))
self.send_header("X-LL-Special", "Mememememe");
self.end_headers()
if withdata:
self.wfile.write(response)
else: # fail requested
status = data.get("status", 500)
# self.responses maps an int status to a (short, long) pair of
# strings. We want the longer string. That's why we pass a string
# pair to get(): the [1] will select the second string, whether it
# came from self.responses or from our default pair.
reason = data.get("reason",
self.responses.get(status,
("fail requested",
"Your request specified failure status %s "
"without providing a reason" % status))[1])
debug("fail requested: %s: %r", status, reason)
self.send_error(status, reason)
if "/reflect/" in self.path:
self.reflect_headers()
self.end_headers()
def reflect_headers(self):
for name in self.headers.keys():
# print "Header: %s: %s" % (name, self.headers[name])
self.send_header("X-Reflect-" + name, self.headers[name])
if not VERBOSE:
# When VERBOSE is set, skip both these overrides because they exist to
# suppress output.
def log_request(self, code, size=None):
# For present purposes, we don't want the request splattered onto
# stderr, as it would upset devs watching the test run
pass
def log_error(self, format, *args):
# Suppress error output as well
pass
class Server(ThreadingMixIn, HTTPServer):
# This pernicious flag is on by default in HTTPServer. But proper
# operation of freeport() absolutely depends on it being off.
allow_reuse_address = False
# Override of BaseServer.handle_error(). Not too interested
# in errors and the default handler emits a scary traceback
# to stderr which annoys some. Disable this override to get
# default behavior which *shouldn't* cause the program to return
# a failure status.
def handle_error(self, request, client_address):
print '-'*40
print 'Ignoring exception during processing of request from',
print client_address
print '-'*40
if __name__ == "__main__":
do_valgrind = False
path_search = False
options, args = getopt.getopt(sys.argv[1:], "V", ["valgrind"])
for option, value in options:
if option == "-V" or option == "--valgrind":
do_valgrind = True
# Instantiate a Server(TestHTTPRequestHandler) on the first free port
# in the specified port range. Doing this inline is better than in a
# daemon thread: if it blows up here, we'll get a traceback. If it blew up
# in some other thread, the traceback would get eaten and we'd run the
# subject test program anyway.
httpd, port = freeport(xrange(8000, 8020),
lambda port: Server(('127.0.0.1', port), TestHTTPRequestHandler))
# Pass the selected port number to the subject test program via the
# environment. We don't want to impose requirements on the test program's
# command-line parsing -- and anyway, for C++ integration tests, that's
# performed in TUT code rather than our own.
os.environ["LL_TEST_PORT"] = str(port)
debug("$LL_TEST_PORT = %s", port)
if do_valgrind:
args = ["valgrind", "--log-file=./valgrind.log"] + args
path_search = True
sys.exit(run(server=Thread(name="httpd", target=httpd.serve_forever), use_path=path_search, *args))
| lgpl-2.1 |
dermute/ansible | lib/ansible/plugins/lookup/env.py | 251 | 1071 | # (c) 2012, Jan-Piet Mens <jpmens(at)gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from ansible.plugins.lookup import LookupBase
class LookupModule(LookupBase):
def run(self, terms, variables, **kwargs):
ret = []
for term in terms:
var = term.split()[0]
ret.append(os.getenv(var, ''))
return ret
| gpl-3.0 |
neumerance/cloudloon2 | horizon/test/helpers.py | 9 | 7377 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import os
import socket
from django.contrib.auth.middleware import AuthenticationMiddleware # noqa
from django.contrib.auth.models import Permission # noqa
from django.contrib.auth.models import User # noqa
from django.contrib.contenttypes.models import ContentType # noqa
from django.contrib.messages.storage import default_storage # noqa
from django.core.handlers import wsgi
from django import http
from django import test as django_test
from django.test.client import RequestFactory # noqa
from django.utils import unittest
LOG = logging.getLogger(__name__)
try:
from selenium.webdriver.firefox.webdriver import WebDriver # noqa
from selenium.webdriver.support import ui as selenium_ui
except ImportError as e:
# NOTE(saschpe): Several distribution can't ship selenium due to it's
# non-free license. So they have to patch it out of test-requirements.txt
# Avoid import failure and force not running selenium tests.
LOG.warning("{0}, force WITH_SELENIUM=False".format(str(e)))
os.environ['WITH_SELENIUM'] = ''
import mox
from horizon import middleware
# Makes output of failing mox tests much easier to read.
wsgi.WSGIRequest.__repr__ = lambda self: "<class 'django.http.HttpRequest'>"
class RequestFactoryWithMessages(RequestFactory):
def get(self, *args, **kwargs):
req = super(RequestFactoryWithMessages, self).get(*args, **kwargs)
req.user = User()
req.session = {}
req._messages = default_storage(req)
return req
def post(self, *args, **kwargs):
req = super(RequestFactoryWithMessages, self).post(*args, **kwargs)
req.user = User()
req.session = {}
req._messages = default_storage(req)
return req
@unittest.skipIf(os.environ.get('SKIP_UNITTESTS', False),
"The SKIP_UNITTESTS env variable is set.")
class TestCase(django_test.TestCase):
"""
Specialized base test case class for Horizon which gives access to
numerous additional features:
* The ``mox`` mocking framework via ``self.mox``.
* A ``RequestFactory`` class which supports Django's ``contrib.messages``
framework via ``self.factory``.
* A ready-to-go request object via ``self.request``.
"""
def setUp(self):
self.mox = mox.Mox()
self.factory = RequestFactoryWithMessages()
self.user = User.objects.create_user(username='test', password='test')
self.assertTrue(self.client.login(username="test", password="test"))
self.request = http.HttpRequest()
self.request.session = self.client._session()
middleware.HorizonMiddleware().process_request(self.request)
AuthenticationMiddleware().process_request(self.request)
os.environ["HORIZON_TEST_RUN"] = "True"
def tearDown(self):
self.mox.UnsetStubs()
self.mox.VerifyAll()
del os.environ["HORIZON_TEST_RUN"]
def set_permissions(self, permissions=None):
perm_ids = Permission.objects.values_list('id', flat=True)
self.user.user_permissions.remove(*perm_ids)
for name in permissions:
ct, create = ContentType.objects.get_or_create(model=name,
app_label='horizon')
perm, create = Permission.objects.get_or_create(codename=name,
content_type=ct,
name=name)
self.user.user_permissions.add(perm)
if hasattr(self.user, "_perm_cache"):
del self.user._perm_cache
def assertNoMessages(self, response=None):
"""
Asserts that no messages have been attached by the ``contrib.messages``
framework.
"""
self.assertMessageCount(response, success=0, warn=0, info=0, error=0)
def assertMessageCount(self, response=None, **kwargs):
"""
Asserts that the specified number of messages have been attached
for various message types. Usage would look like
``self.assertMessageCount(success=1)``.
"""
temp_req = self.client.request(**{'wsgi.input': None})
temp_req.COOKIES = self.client.cookies
storage = default_storage(temp_req)
messages = []
if response is None:
# To gain early access to the messages we have to decode the
# cookie on the test client.
if 'messages' in self.client.cookies:
message_cookie = self.client.cookies['messages'].value
messages = storage._decode(message_cookie)
# Check for messages in the context
elif hasattr(response, "context") and "messages" in response.context:
messages = response.context["messages"]
# Check for messages attached to the request on a TemplateResponse
elif hasattr(response, "_request") and hasattr(response._request,
"_messages"):
messages = response._request._messages._queued_messages
# If we don't have messages and we don't expect messages, we're done.
if not any(kwargs.values()) and not messages:
return
# If we expected messages and have none, that's a problem.
if any(kwargs.values()) and not messages:
error_msg = "Messages were expected, but none were set."
assert 0 == sum(kwargs.values()), error_msg
# Otherwise, make sure we got the expected messages.
for msg_type, count in kwargs.items():
msgs = [m.message for m in messages if msg_type in m.tags]
assert len(msgs) == count, \
"%s messages not as expected: %s" % (msg_type.title(),
", ".join(msgs))
@unittest.skipUnless(os.environ.get('WITH_SELENIUM', False),
"The WITH_SELENIUM env variable is not set.")
class SeleniumTestCase(django_test.LiveServerTestCase):
@classmethod
def setUpClass(cls):
if os.environ.get('WITH_SELENIUM', False):
cls.selenium = WebDriver()
super(SeleniumTestCase, cls).setUpClass()
@classmethod
def tearDownClass(cls):
if os.environ.get('WITH_SELENIUM', False):
cls.selenium.quit()
super(SeleniumTestCase, cls).tearDownClass()
def setUp(self):
socket.setdefaulttimeout(10)
self.ui = selenium_ui
super(SeleniumTestCase, self).setUp()
| apache-2.0 |
jotes/boto | tests/unit/cloudformation/test_stack.py | 114 | 9168 | #!/usr/bin/env python
import datetime
import xml.sax
import unittest
import boto.handler
import boto.resultset
import boto.cloudformation
SAMPLE_XML = b"""
<DescribeStacksResponse xmlns="http://cloudformation.amazonaws.com/doc/2010-05-15/">
<DescribeStacksResult>
<Stacks>
<member>
<Tags>
<member>
<Value>value0</Value>
<Key>key0</Key>
</member>
<member>
<Key>key1</Key>
<Value>value1</Value>
</member>
</Tags>
<StackId>arn:aws:cloudformation:ap-southeast-1:100:stack/Name/id</StackId>
<StackStatus>CREATE_COMPLETE</StackStatus>
<StackName>Name</StackName>
<StackStatusReason/>
<Description/>
<NotificationARNs>
<member>arn:aws:sns:ap-southeast-1:100:name</member>
</NotificationARNs>
<CreationTime>2013-01-10T05:04:56Z</CreationTime>
<DisableRollback>false</DisableRollback>
<Outputs>
<member>
<OutputValue>value0</OutputValue>
<Description>output0</Description>
<OutputKey>key0</OutputKey>
</member>
<member>
<OutputValue>value1</OutputValue>
<Description>output1</Description>
<OutputKey>key1</OutputKey>
</member>
</Outputs>
</member>
</Stacks>
</DescribeStacksResult>
<ResponseMetadata>
<RequestId>1</RequestId>
</ResponseMetadata>
</DescribeStacksResponse>
"""
DESCRIBE_STACK_RESOURCE_XML = b"""
<DescribeStackResourcesResult>
<StackResources>
<member>
<StackId>arn:aws:cloudformation:us-east-1:123456789:stack/MyStack/aaf549a0-a413-11df-adb3-5081b3858e83</StackId>
<StackName>MyStack</StackName>
<LogicalResourceId>MyDBInstance</LogicalResourceId>
<PhysicalResourceId>MyStack_DB1</PhysicalResourceId>
<ResourceType>AWS::DBInstance</ResourceType>
<Timestamp>2010-07-27T22:27:28Z</Timestamp>
<ResourceStatus>CREATE_COMPLETE</ResourceStatus>
</member>
<member>
<StackId>arn:aws:cloudformation:us-east-1:123456789:stack/MyStack/aaf549a0-a413-11df-adb3-5081b3858e83</StackId>
<StackName>MyStack</StackName>
<LogicalResourceId>MyAutoScalingGroup</LogicalResourceId>
<PhysicalResourceId>MyStack_ASG1</PhysicalResourceId>
<ResourceType>AWS::AutoScalingGroup</ResourceType>
<Timestamp>2010-07-27T22:28:28.123456Z</Timestamp>
<ResourceStatus>CREATE_IN_PROGRESS</ResourceStatus>
</member>
</StackResources>
</DescribeStackResourcesResult>
"""
LIST_STACKS_XML = b"""
<ListStacksResponse>
<ListStacksResult>
<StackSummaries>
<member>
<StackId>
arn:aws:cloudformation:us-east-1:1234567:stack/TestCreate1/aaaaa
</StackId>
<StackStatus>CREATE_IN_PROGRESS</StackStatus>
<StackName>vpc1</StackName>
<CreationTime>2011-05-23T15:47:44Z</CreationTime>
<TemplateDescription>
Creates one EC2 instance and a load balancer.
</TemplateDescription>
</member>
<member>
<StackId>
arn:aws:cloudformation:us-east-1:1234567:stack/TestDelete2/bbbbb
</StackId>
<StackStatus>DELETE_COMPLETE</StackStatus>
<DeletionTime>2011-03-10T16:20:51.575757Z</DeletionTime>
<StackName>WP1</StackName>
<CreationTime>2011-03-05T19:57:58.161616Z</CreationTime>
<TemplateDescription>
A simple basic Cloudformation Template.
</TemplateDescription>
</member>
</StackSummaries>
</ListStacksResult>
</ListStacksResponse>
"""
LIST_STACK_RESOURCES_XML = b"""
<ListStackResourcesResponse>
<ListStackResourcesResult>
<StackResourceSummaries>
<member>
<ResourceStatus>CREATE_COMPLETE</ResourceStatus>
<LogicalResourceId>DBSecurityGroup</LogicalResourceId>
<LastUpdatedTime>2011-06-21T20:15:58Z</LastUpdatedTime>
<PhysicalResourceId>gmarcteststack-dbsecuritygroup-1s5m0ez5lkk6w</PhysicalResourceId>
<ResourceType>AWS::RDS::DBSecurityGroup</ResourceType>
</member>
<member>
<ResourceStatus>CREATE_COMPLETE</ResourceStatus>
<LogicalResourceId>SampleDB</LogicalResourceId>
<LastUpdatedTime>2011-06-21T20:25:57.875643Z</LastUpdatedTime>
<PhysicalResourceId>MyStack-sampledb-ycwhk1v830lx</PhysicalResourceId>
<ResourceType>AWS::RDS::DBInstance</ResourceType>
</member>
</StackResourceSummaries>
</ListStackResourcesResult>
<ResponseMetadata>
<RequestId>2d06e36c-ac1d-11e0-a958-f9382b6eb86b</RequestId>
</ResponseMetadata>
</ListStackResourcesResponse>
"""
class TestStackParse(unittest.TestCase):
def test_parse_tags(self):
rs = boto.resultset.ResultSet([
('member', boto.cloudformation.stack.Stack)
])
h = boto.handler.XmlHandler(rs, None)
xml.sax.parseString(SAMPLE_XML, h)
tags = rs[0].tags
self.assertEqual(tags, {u'key0': u'value0', u'key1': u'value1'})
def test_event_creation_time_with_millis(self):
millis_xml = SAMPLE_XML.replace(
b"<CreationTime>2013-01-10T05:04:56Z</CreationTime>",
b"<CreationTime>2013-01-10T05:04:56.102342Z</CreationTime>"
)
rs = boto.resultset.ResultSet([
('member', boto.cloudformation.stack.Stack)
])
h = boto.handler.XmlHandler(rs, None)
xml.sax.parseString(millis_xml, h)
creation_time = rs[0].creation_time
self.assertEqual(
creation_time,
datetime.datetime(2013, 1, 10, 5, 4, 56, 102342)
)
def test_resource_time_with_millis(self):
rs = boto.resultset.ResultSet([
('member', boto.cloudformation.stack.StackResource)
])
h = boto.handler.XmlHandler(rs, None)
xml.sax.parseString(DESCRIBE_STACK_RESOURCE_XML, h)
timestamp_1 = rs[0].timestamp
self.assertEqual(
timestamp_1,
datetime.datetime(2010, 7, 27, 22, 27, 28)
)
timestamp_2 = rs[1].timestamp
self.assertEqual(
timestamp_2,
datetime.datetime(2010, 7, 27, 22, 28, 28, 123456)
)
def test_list_stacks_time_with_millis(self):
rs = boto.resultset.ResultSet([
('member', boto.cloudformation.stack.StackSummary)
])
h = boto.handler.XmlHandler(rs, None)
xml.sax.parseString(LIST_STACKS_XML, h)
timestamp_1 = rs[0].creation_time
self.assertEqual(
timestamp_1,
datetime.datetime(2011, 5, 23, 15, 47, 44)
)
timestamp_2 = rs[1].creation_time
self.assertEqual(
timestamp_2,
datetime.datetime(2011, 3, 5, 19, 57, 58, 161616)
)
timestamp_3 = rs[1].deletion_time
self.assertEqual(
timestamp_3,
datetime.datetime(2011, 3, 10, 16, 20, 51, 575757)
)
def test_list_stacks_time_with_millis_again(self):
rs = boto.resultset.ResultSet([
('member', boto.cloudformation.stack.StackResourceSummary)
])
h = boto.handler.XmlHandler(rs, None)
xml.sax.parseString(LIST_STACK_RESOURCES_XML, h)
timestamp_1 = rs[0].last_updated_time
self.assertEqual(
timestamp_1,
datetime.datetime(2011, 6, 21, 20, 15, 58)
)
timestamp_2 = rs[1].last_updated_time
self.assertEqual(
timestamp_2,
datetime.datetime(2011, 6, 21, 20, 25, 57, 875643)
)
def test_disable_rollback_false(self):
# SAMPLE_XML defines DisableRollback=="false"
rs = boto.resultset.ResultSet([('member', boto.cloudformation.stack.Stack)])
h = boto.handler.XmlHandler(rs, None)
xml.sax.parseString(SAMPLE_XML, h)
disable_rollback = rs[0].disable_rollback
self.assertFalse(disable_rollback)
def test_disable_rollback_false_upper(self):
# Should also handle "False"
rs = boto.resultset.ResultSet([('member', boto.cloudformation.stack.Stack)])
h = boto.handler.XmlHandler(rs, None)
sample_xml_upper = SAMPLE_XML.replace(b'false', b'False')
xml.sax.parseString(sample_xml_upper, h)
disable_rollback = rs[0].disable_rollback
self.assertFalse(disable_rollback)
def test_disable_rollback_true(self):
rs = boto.resultset.ResultSet([('member', boto.cloudformation.stack.Stack)])
h = boto.handler.XmlHandler(rs, None)
sample_xml_upper = SAMPLE_XML.replace(b'false', b'true')
xml.sax.parseString(sample_xml_upper, h)
disable_rollback = rs[0].disable_rollback
self.assertTrue(disable_rollback)
def test_disable_rollback_true_upper(self):
rs = boto.resultset.ResultSet([('member', boto.cloudformation.stack.Stack)])
h = boto.handler.XmlHandler(rs, None)
sample_xml_upper = SAMPLE_XML.replace(b'false', b'True')
xml.sax.parseString(sample_xml_upper, h)
disable_rollback = rs[0].disable_rollback
self.assertTrue(disable_rollback)
if __name__ == '__main__':
unittest.main()
| mit |
programadorjc/django | tests/gis_tests/geos_tests/test_geos_mutation.py | 183 | 5388 | # Copyright (c) 2008-2009 Aryeh Leib Taurog, all rights reserved.
# Modified from original contribution by Aryeh Leib Taurog, which was
# released under the New BSD license.
import unittest
from unittest import skipUnless
from django.contrib.gis.geos import (
HAS_GEOS, LinearRing, LineString, MultiPoint, Point, Polygon, fromstr,
)
def api_get_distance(x):
return x.distance(Point(-200, -200))
def api_get_buffer(x):
return x.buffer(10)
def api_get_geom_typeid(x):
return x.geom_typeid
def api_get_num_coords(x):
return x.num_coords
def api_get_centroid(x):
return x.centroid
def api_get_empty(x):
return x.empty
def api_get_valid(x):
return x.valid
def api_get_simple(x):
return x.simple
def api_get_ring(x):
return x.ring
def api_get_boundary(x):
return x.boundary
def api_get_convex_hull(x):
return x.convex_hull
def api_get_extent(x):
return x.extent
def api_get_area(x):
return x.area
def api_get_length(x):
return x.length
geos_function_tests = [val for name, val in vars().items()
if hasattr(val, '__call__')
and name.startswith('api_get_')]
@skipUnless(HAS_GEOS, "Geos is required.")
class GEOSMutationTest(unittest.TestCase):
"""
Tests Pythonic Mutability of Python GEOS geometry wrappers
get/set/delitem on a slice, normal list methods
"""
def test00_GEOSIndexException(self):
'Testing Geometry IndexError'
p = Point(1, 2)
for i in range(-2, 2):
p._checkindex(i)
self.assertRaises(IndexError, p._checkindex, 2)
self.assertRaises(IndexError, p._checkindex, -3)
def test01_PointMutations(self):
'Testing Point mutations'
for p in (Point(1, 2, 3), fromstr('POINT (1 2 3)')):
self.assertEqual(p._get_single_external(1), 2.0, 'Point _get_single_external')
# _set_single
p._set_single(0, 100)
self.assertEqual(p.coords, (100.0, 2.0, 3.0), 'Point _set_single')
# _set_list
p._set_list(2, (50, 3141))
self.assertEqual(p.coords, (50.0, 3141.0), 'Point _set_list')
def test02_PointExceptions(self):
'Testing Point exceptions'
self.assertRaises(TypeError, Point, range(1))
self.assertRaises(TypeError, Point, range(4))
def test03_PointApi(self):
'Testing Point API'
q = Point(4, 5, 3)
for p in (Point(1, 2, 3), fromstr('POINT (1 2 3)')):
p[0:2] = [4, 5]
for f in geos_function_tests:
self.assertEqual(f(q), f(p), 'Point ' + f.__name__)
def test04_LineStringMutations(self):
'Testing LineString mutations'
for ls in (LineString((1, 0), (4, 1), (6, -1)),
fromstr('LINESTRING (1 0,4 1,6 -1)')):
self.assertEqual(ls._get_single_external(1), (4.0, 1.0), 'LineString _get_single_external')
# _set_single
ls._set_single(0, (-50, 25))
self.assertEqual(ls.coords, ((-50.0, 25.0), (4.0, 1.0), (6.0, -1.0)), 'LineString _set_single')
# _set_list
ls._set_list(2, ((-50.0, 25.0), (6.0, -1.0)))
self.assertEqual(ls.coords, ((-50.0, 25.0), (6.0, -1.0)), 'LineString _set_list')
lsa = LineString(ls.coords)
for f in geos_function_tests:
self.assertEqual(f(lsa), f(ls), 'LineString ' + f.__name__)
def test05_Polygon(self):
'Testing Polygon mutations'
for pg in (Polygon(((1, 0), (4, 1), (6, -1), (8, 10), (1, 0)),
((5, 4), (6, 4), (6, 3), (5, 4))),
fromstr('POLYGON ((1 0,4 1,6 -1,8 10,1 0),(5 4,6 4,6 3,5 4))')):
self.assertEqual(pg._get_single_external(0),
LinearRing((1, 0), (4, 1), (6, -1), (8, 10), (1, 0)),
'Polygon _get_single_external(0)')
self.assertEqual(pg._get_single_external(1),
LinearRing((5, 4), (6, 4), (6, 3), (5, 4)),
'Polygon _get_single_external(1)')
# _set_list
pg._set_list(2, (((1, 2), (10, 0), (12, 9), (-1, 15), (1, 2)),
((4, 2), (5, 2), (5, 3), (4, 2))))
self.assertEqual(
pg.coords,
(((1.0, 2.0), (10.0, 0.0), (12.0, 9.0), (-1.0, 15.0), (1.0, 2.0)),
((4.0, 2.0), (5.0, 2.0), (5.0, 3.0), (4.0, 2.0))),
'Polygon _set_list')
lsa = Polygon(*pg.coords)
for f in geos_function_tests:
self.assertEqual(f(lsa), f(pg), 'Polygon ' + f.__name__)
def test06_Collection(self):
'Testing Collection mutations'
for mp in (MultiPoint(*map(Point, ((3, 4), (-1, 2), (5, -4), (2, 8)))),
fromstr('MULTIPOINT (3 4,-1 2,5 -4,2 8)')):
self.assertEqual(mp._get_single_external(2), Point(5, -4), 'Collection _get_single_external')
mp._set_list(3, map(Point, ((5, 5), (3, -2), (8, 1))))
self.assertEqual(mp.coords, ((5.0, 5.0), (3.0, -2.0), (8.0, 1.0)), 'Collection _set_list')
lsa = MultiPoint(*map(Point, ((5, 5), (3, -2), (8, 1))))
for f in geos_function_tests:
self.assertEqual(f(lsa), f(mp), 'MultiPoint ' + f.__name__)
| bsd-3-clause |
libvirt/autotest | client/common_lib/enum.py | 12 | 1928 | """\
Generic enumeration support.
"""
__author__ = '[email protected] (Steve Howard)'
class Enum(object):
"""\
Utility class to implement Enum-like functionality.
>>> e = Enum('String one', 'String two')
>>> e.STRING_ONE
0
>>> e.STRING_TWO
1
>>> e.choices()
[(0, 'String one'), (1, 'String two')]
>>> e.get_value('String one')
0
>>> e.get_string(0)
'String one'
>>> e = Enum('Hello', 'Goodbye', string_values=True)
>>> e.HELLO, e.GOODBYE
('Hello', 'Goodbye')
>>> e = Enum('One', 'Two', start_value=1)
>>> e.ONE
1
>>> e.TWO
2
"""
def __init__(self, *names, **kwargs):
self.string_values = kwargs.get('string_values')
start_value = kwargs.get('start_value', 0)
self.names = names
self.values = []
for i, name in enumerate(names):
if self.string_values:
value = name
else:
value = i + start_value
self.values.append(value)
setattr(self, self.get_attr_name(name), value)
@staticmethod
def get_attr_name(string):
return string.upper().replace(' ', '_')
def choices(self):
'Return choice list suitable for Django model choices.'
return zip(self.values, self.names)
def get_value(self, name):
"""\
Convert a string name to it's corresponding value. If a value
is passed in, it is returned.
"""
if isinstance(name, int) and not self.string_values:
# name is already a value
return name
return getattr(self, self.get_attr_name(name))
def get_string(self, value):
' Given a value, get the string name for it.'
if value not in self.values:
raise ValueError('Value %s not in this enum' % value)
index = self.values.index(value)
return self.names[index]
| gpl-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.