repo_name
stringlengths 6
100
| path
stringlengths 4
294
| copies
stringlengths 1
5
| size
stringlengths 4
6
| content
stringlengths 606
896k
| license
stringclasses 15
values |
---|---|---|---|---|---|
cuboxi/android_external_chromium_org | native_client_sdk/src/build_tools/build_updater.py | 44 | 6348 | #!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Build script to generate a new sdk_tools bundle.
This script packages the files necessary to generate the SDK updater -- the
tool users run to download new bundles, update existing bundles, etc.
"""
import buildbot_common
import build_version
import glob
import optparse
import os
import sys
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
SDK_SRC_DIR = os.path.dirname(SCRIPT_DIR)
SDK_DIR = os.path.dirname(SDK_SRC_DIR)
SRC_DIR = os.path.dirname(SDK_DIR)
NACL_DIR = os.path.join(SRC_DIR, 'native_client')
CYGTAR = os.path.join(NACL_DIR, 'build', 'cygtar.py')
sys.path.append(os.path.join(SDK_SRC_DIR, 'tools'))
import oshelpers
UPDATER_FILES = [
# launch scripts
('build_tools/naclsdk', 'nacl_sdk/naclsdk'),
('build_tools/naclsdk.bat', 'nacl_sdk/naclsdk.bat'),
# base manifest
('build_tools/json/naclsdk_manifest0.json',
'nacl_sdk/sdk_cache/naclsdk_manifest2.json'),
# SDK tools
('build_tools/sdk_tools/cacerts.txt', 'nacl_sdk/sdk_tools/cacerts.txt'),
('build_tools/sdk_tools/*.py', 'nacl_sdk/sdk_tools/'),
('build_tools/sdk_tools/command/*.py', 'nacl_sdk/sdk_tools/command/'),
('build_tools/sdk_tools/third_party/*.py', 'nacl_sdk/sdk_tools/third_party/'),
('build_tools/sdk_tools/third_party/fancy_urllib/*.py',
'nacl_sdk/sdk_tools/third_party/fancy_urllib/'),
('build_tools/sdk_tools/third_party/fancy_urllib/README.chromium',
'nacl_sdk/sdk_tools/third_party/fancy_urllib/README.chromium'),
('build_tools/manifest_util.py', 'nacl_sdk/sdk_tools/manifest_util.py'),
('LICENSE', 'nacl_sdk/sdk_tools/LICENSE'),
(CYGTAR, 'nacl_sdk/sdk_tools/cygtar.py'),
]
def MakeUpdaterFilesAbsolute(out_dir):
"""Return the result of changing all relative paths in UPDATER_FILES to
absolute paths.
Args:
out_dir: The output directory.
Returns:
A list of 2-tuples. The first element in each tuple is the source path and
the second is the destination path.
"""
assert os.path.isabs(out_dir)
result = []
for in_file, out_file in UPDATER_FILES:
if not os.path.isabs(in_file):
in_file = os.path.join(SDK_SRC_DIR, in_file)
out_file = os.path.join(out_dir, out_file)
result.append((in_file, out_file))
return result
def GlobFiles(files):
"""Expand wildcards for 2-tuples of sources/destinations.
This function also will convert destinations from directories into filenames.
For example:
('foo/*.py', 'bar/') => [('foo/a.py', 'bar/a.py'), ('foo/b.py', 'bar/b.py')]
Args:
files: A list of 2-tuples of (source, dest) paths.
Returns:
A new list of 2-tuples, after the sources have been wildcard-expanded, and
the destinations have been changed from directories to filenames.
"""
result = []
for in_file_glob, out_file in files:
if out_file.endswith('/'):
for in_file in glob.glob(in_file_glob):
result.append((in_file,
os.path.join(out_file, os.path.basename(in_file))))
else:
result.append((in_file_glob, out_file))
return result
def CopyFiles(files):
"""Given a list of 2-tuples (source, dest), copy each source file to a dest
file.
Args:
files: A list of 2-tuples."""
for in_file, out_file in files:
buildbot_common.MakeDir(os.path.dirname(out_file))
buildbot_common.CopyFile(in_file, out_file)
def UpdateRevisionNumber(out_dir, revision_number):
"""Update the sdk_tools bundle to have the given revision number.
This function finds all occurrences of the string "{REVISION}" in
sdk_update_main.py and replaces them with |revision_number|. The only
observable effect of this change should be that running:
naclsdk -v
will contain the new |revision_number|.
Args:
out_dir: The output directory containing the scripts to update.
revision_number: The revision number as an integer, or None to use the
current Chrome revision (as retrieved through svn/git).
"""
if revision_number is None:
revision_number = build_version.ChromeRevision()
SDK_UPDATE_MAIN = os.path.join(out_dir,
'nacl_sdk/sdk_tools/sdk_update_main.py')
contents = open(SDK_UPDATE_MAIN, 'r').read().replace(
'{REVISION}', str(revision_number))
open(SDK_UPDATE_MAIN, 'w').write(contents)
def BuildUpdater(out_dir, revision_number=None):
"""Build naclsdk.zip and sdk_tools.tgz in |out_dir|.
Args:
out_dir: The output directory.
revision_number: The revision number of this updater, as an integer. Or
None, to use the current Chrome revision."""
buildbot_common.BuildStep('Create Updater')
out_dir = os.path.abspath(out_dir)
# Build SDK directory
buildbot_common.RemoveDir(os.path.join(out_dir, 'nacl_sdk'))
updater_files = MakeUpdaterFilesAbsolute(out_dir)
updater_files = GlobFiles(updater_files)
CopyFiles(updater_files)
UpdateRevisionNumber(out_dir, revision_number)
out_files = [os.path.relpath(out_file, out_dir)
for _, out_file in updater_files]
# Make zip
buildbot_common.RemoveFile(os.path.join(out_dir, 'nacl_sdk.zip'))
buildbot_common.Run([sys.executable, oshelpers.__file__, 'zip',
'nacl_sdk.zip'] + out_files,
cwd=out_dir)
# Tar of all files under nacl_sdk/sdk_tools
sdktoolsdir = os.path.join('nacl_sdk', 'sdk_tools')
tarname = os.path.join(out_dir, 'sdk_tools.tgz')
files_to_tar = [os.path.relpath(out_file, sdktoolsdir)
for out_file in out_files if out_file.startswith(sdktoolsdir)]
buildbot_common.RemoveFile(tarname)
buildbot_common.Run([sys.executable, CYGTAR, '-C',
os.path.join(out_dir, sdktoolsdir), '-czf', tarname] + files_to_tar)
sys.stdout.write('\n')
def main(args):
parser = optparse.OptionParser()
parser.add_option('-o', '--out', help='output directory',
dest='out_dir', default=os.path.join(SRC_DIR, 'out'))
parser.add_option('-r', '--revision', help='revision number of this updater',
dest='revision', default=None)
options, args = parser.parse_args(args[1:])
if options.revision:
options.revision = int(options.revision)
BuildUpdater(options.out_dir, options.revision)
if __name__ == '__main__':
sys.exit(main(sys.argv))
| bsd-3-clause |
richlanc/KaraKara | website/karakara/auth/__init__.py | 1 | 1672 | from externals.lib.social._login import IUserStore
from sqlalchemy.orm.exc import NoResultFound
from ..model import DBSession, commit
from ..model.model_comunity import ComunityUser, SocialToken
from ..templates import helpers as h
class ComunityUserStore(IUserStore):
def get_user_from_token(self, provider_token):
try:
return DBSession.query(ComunityUser).join(SocialToken).filter(
SocialToken.provider == provider_token.provider,
SocialToken.token == provider_token.token,
).one()
except NoResultFound:
return None
def create_user(self, provider_token, name=None, email=None, **user_data):
user = ComunityUser()
user.name = name
user.email = email
user.tokens.append(SocialToken(
token=provider_token.token,
provider=provider_token.provider,
data=user_data,
))
#user.name = '{first_name} {last_name}'.format(**user_data)
DBSession.add(user)
commit()
def user_to_session_dict(self, user):
return {
'username': user.name,
'email': user.email,
#'provider' : provider_token.provider,
'avatar_url': user.tokens[0].data.get('avatar_url'),
'approved': user.approved,
}
class NullComunityUserStore(IUserStore):
def get_user_from_token(self, provider_token):
return True
def user_to_session_dict(self, user):
return {
'username': 'developer',
'approved': True,
'avatar': '{0}{1}'.format(h.path.static, 'dev_avatar.png'),
}
| gpl-3.0 |
SpectraLogic/samba | third_party/dnspython/dns/namedict.py | 99 | 2106 | # Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""DNS name dictionary"""
import dns.name
class NameDict(dict):
"""A dictionary whose keys are dns.name.Name objects.
@ivar max_depth: the maximum depth of the keys that have ever been
added to the dictionary.
@type max_depth: int
"""
def __init__(self, *args, **kwargs):
super(NameDict, self).__init__(*args, **kwargs)
self.max_depth = 0
def __setitem__(self, key, value):
if not isinstance(key, dns.name.Name):
raise ValueError('NameDict key must be a name')
depth = len(key)
if depth > self.max_depth:
self.max_depth = depth
super(NameDict, self).__setitem__(key, value)
def get_deepest_match(self, name):
"""Find the deepest match to I{name} in the dictionary.
The deepest match is the longest name in the dictionary which is
a superdomain of I{name}.
@param name: the name
@type name: dns.name.Name object
@rtype: (key, value) tuple
"""
depth = len(name)
if depth > self.max_depth:
depth = self.max_depth
for i in xrange(-depth, 0):
n = dns.name.Name(name[i:])
if self.has_key(n):
return (n, self[n])
v = self[dns.name.empty]
return (dns.name.empty, v)
| gpl-3.0 |
octocoin-project/octocoin | qa/rpc-tests/invalidateblock.py | 2 | 3176 | #!/usr/bin/env python2
# Copyright (c) 2014 The Octocoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test InvalidateBlock code
#
from test_framework import BitcoinTestFramework
from bitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException
from util import *
class InvalidateTest(BitcoinTestFramework):
def setup_chain(self):
print("Initializing test directory "+self.options.tmpdir)
initialize_chain_clean(self.options.tmpdir, 3)
def setup_network(self):
self.nodes = []
self.is_network_split = False
self.nodes.append(start_node(0, self.options.tmpdir, ["-debug"]))
self.nodes.append(start_node(1, self.options.tmpdir, ["-debug"]))
self.nodes.append(start_node(2, self.options.tmpdir, ["-debug"]))
def run_test(self):
print "Make sure we repopulate setBlockIndexCandidates after InvalidateBlock:"
print "Mine 4 blocks on Node 0"
self.nodes[0].setgenerate(True, 4)
assert(self.nodes[0].getblockcount() == 4)
besthash = self.nodes[0].getbestblockhash()
print "Mine competing 6 blocks on Node 1"
self.nodes[1].setgenerate(True, 6)
assert(self.nodes[1].getblockcount() == 6)
print "Connect nodes to force a reorg"
connect_nodes_bi(self.nodes,0,1)
sync_blocks(self.nodes[0:2])
assert(self.nodes[0].getblockcount() == 6)
badhash = self.nodes[1].getblockhash(2)
print "Invalidate block 2 on node 0 and verify we reorg to node 0's original chain"
self.nodes[0].invalidateblock(badhash)
newheight = self.nodes[0].getblockcount()
newhash = self.nodes[0].getbestblockhash()
if (newheight != 4 or newhash != besthash):
raise AssertionError("Wrong tip for node0, hash %s, height %d"%(newhash,newheight))
print "\nMake sure we won't reorg to a lower work chain:"
connect_nodes_bi(self.nodes,1,2)
print "Sync node 2 to node 1 so both have 6 blocks"
sync_blocks(self.nodes[1:3])
assert(self.nodes[2].getblockcount() == 6)
print "Invalidate block 5 on node 1 so its tip is now at 4"
self.nodes[1].invalidateblock(self.nodes[1].getblockhash(5))
assert(self.nodes[1].getblockcount() == 4)
print "Invalidate block 3 on node 2, so its tip is now 2"
self.nodes[2].invalidateblock(self.nodes[2].getblockhash(3))
assert(self.nodes[2].getblockcount() == 2)
print "..and then mine a block"
self.nodes[2].setgenerate(True, 1)
print "Verify all nodes are at the right height"
time.sleep(5)
for i in xrange(3):
print i,self.nodes[i].getblockcount()
assert(self.nodes[2].getblockcount() == 3)
assert(self.nodes[0].getblockcount() == 4)
node1height = self.nodes[1].getblockcount()
if node1height < 4:
raise AssertionError("Node 1 reorged to a lower height: %d"%node1height)
if __name__ == '__main__':
InvalidateTest().main()
| mit |
javierTerry/odoo | openerp/cli/start.py | 240 | 2748 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import glob
import os
import sys
from . import Command
from .server import main
from openerp.modules.module import get_module_root, MANIFEST
from openerp.service.db import _create_empty_database, DatabaseExists
class Start(Command):
"""Quick start the Odoo server for your project"""
def get_module_list(self, path):
mods = glob.glob(os.path.join(path, '*/%s' % MANIFEST))
return [mod.split(os.path.sep)[-2] for mod in mods]
def run(self, cmdargs):
parser = argparse.ArgumentParser(
prog="%s start" % sys.argv[0].split(os.path.sep)[-1],
description=self.__doc__
)
parser.add_argument('--path', default=".",
help="Directory where your project's modules are stored (will autodetect from current dir)")
parser.add_argument("-d", "--database", dest="db_name", default=None,
help="Specify the database name (default to project's directory name")
args, unknown = parser.parse_known_args(args=cmdargs)
project_path = os.path.abspath(os.path.expanduser(os.path.expandvars(args.path)))
module_root = get_module_root(project_path)
db_name = None
if module_root:
# started in a module so we choose this module name for database
db_name = project_path.split(os.path.sep)[-1]
# go to the parent's directory of the module root
project_path = os.path.abspath(os.path.join(project_path, os.pardir))
# check if one of the subfolders has at least one module
mods = self.get_module_list(project_path)
if mods and '--addons-path' not in cmdargs:
cmdargs.append('--addons-path=%s' % project_path)
if not args.db_name:
args.db_name = db_name or project_path.split(os.path.sep)[-1]
cmdargs.extend(('-d', args.db_name))
# TODO: forbid some database names ? eg template1, ...
try:
_create_empty_database(args.db_name)
except DatabaseExists, e:
pass
except Exception, e:
die("Could not create database `%s`. (%s)" % (args.db_name, e))
if '--db-filter' not in cmdargs:
cmdargs.append('--db-filter=^%s$' % args.db_name)
# Remove --path /-p options from the command arguments
def to_remove(i, l):
return l[i] == '-p' or l[i].startswith('--path') or \
(i > 0 and l[i-1] in ['-p', '--path'])
cmdargs = [v for i, v in enumerate(cmdargs)
if not to_remove(i, cmdargs)]
main(cmdargs)
def die(message, code=1):
print >>sys.stderr, message
sys.exit(code)
| agpl-3.0 |
bootswithdefer/ansible | plugins/inventory/spacewalk.py | 137 | 4999 | #!/bin/env python
"""
Spacewalk external inventory script
=================================
Ansible has a feature where instead of reading from /etc/ansible/hosts
as a text file, it can query external programs to obtain the list
of hosts, groups the hosts are in, and even variables to assign to each host.
To use this, copy this file over /etc/ansible/hosts and chmod +x the file.
This, more or less, allows you to keep one central database containing
info about all of your managed instances.
This script is dependent upon the spacealk-reports package being installed
on the same machine. It is basically a CSV-to-JSON converter from the
output of "spacewalk-report system-groups-systems|inventory".
Tested with Ansible 1.1
"""
#
# Author:: Jon Miller <[email protected]>
# Copyright:: Copyright (c) 2013, Jon Miller
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or (at
# your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import sys
import os
import time
from optparse import OptionParser
import subprocess
try:
import json
except:
import simplejson as json
base_dir = os.path.dirname(os.path.realpath(__file__))
SW_REPORT = '/usr/bin/spacewalk-report'
CACHE_DIR = os.path.join(base_dir, ".spacewalk_reports")
CACHE_AGE = 300 # 5min
# Sanity check
if not os.path.exists(SW_REPORT):
print >> sys.stderr, 'Error: %s is required for operation.' % (SW_REPORT)
sys.exit(1)
# Pre-startup work
if not os.path.exists(CACHE_DIR):
os.mkdir(CACHE_DIR)
os.chmod(CACHE_DIR, 2775)
# Helper functions
#------------------------------
def spacewalk_report(name):
"""Yield a dictionary form of each CSV output produced by the specified
spacewalk-report
"""
cache_filename = os.path.join(CACHE_DIR, name)
if not os.path.exists(cache_filename) or \
(time.time() - os.stat(cache_filename).st_mtime) > CACHE_AGE:
# Update the cache
fh = open(cache_filename, 'w')
p = subprocess.Popen([SW_REPORT, name], stdout=fh)
p.wait()
fh.close()
lines = open(cache_filename, 'r').readlines()
keys = lines[0].strip().split(',')
for line in lines[1:]:
values = line.strip().split(',')
if len(keys) == len(values):
yield dict(zip(keys, values))
# Options
#------------------------------
parser = OptionParser(usage="%prog [options] --list | --host <machine>")
parser.add_option('--list', default=False, dest="list", action="store_true",
help="Produce a JSON consumable grouping of servers for Ansible")
parser.add_option('--host', default=None, dest="host",
help="Generate additional host specific details for given host for Ansible")
parser.add_option('-H', '--human', dest="human",
default=False, action="store_true",
help="Produce a friendlier version of either server list or host detail")
(options, args) = parser.parse_args()
# List out the known server from Spacewalk
#------------------------------
if options.list:
groups = {}
try:
for system in spacewalk_report('system-groups-systems'):
if system['group_name'] not in groups:
groups[system['group_name']] = set()
groups[system['group_name']].add(system['server_name'])
except (OSError), e:
print >> sys.stderr, 'Problem executing the command "%s system-groups-systems": %s' % \
(SW_REPORT, str(e))
sys.exit(2)
if options.human:
for group, systems in groups.iteritems():
print '[%s]\n%s\n' % (group, '\n'.join(systems))
else:
print json.dumps(dict([ (k, list(s)) for k, s in groups.iteritems() ]))
sys.exit(0)
# Return a details information concerning the spacewalk server
#------------------------------
elif options.host:
host_details = {}
try:
for system in spacewalk_report('inventory'):
if system['hostname'] == options.host:
host_details = system
break
except (OSError), e:
print >> sys.stderr, 'Problem executing the command "%s inventory": %s' % \
(SW_REPORT, str(e))
sys.exit(2)
if options.human:
print 'Host: %s' % options.host
for k, v in host_details.iteritems():
print ' %s: %s' % (k, '\n '.join(v.split(';')))
else:
print json.dumps(host_details)
sys.exit(0)
else:
parser.print_help()
sys.exit(1)
| gpl-3.0 |
glomex/gcdt | tests/resources/simple_cloudformation_stack_with_ec2/cloudformation.py | 1 | 3689 | #!/usr/bin/env python
# Converted from RDS template located at:
# https://github.com/cloudtools/troposphere/blob/master/examples/RDS_with_DBParameterGroup.py
import os
import troposphere
from troposphere import Base64, Join, Parameter, Output, Ref, Template, Tags
from troposphere.rds import DBInstance, DBParameterGroup
from troposphere.ec2 import SecurityGroupRule as SGR
from gcdt.iam import IAMRoleAndPolicies
SERVICE_NAME = os.getenv('SERVICE_NAME', 'gcdtSampleStackWithEc2Instance')
t = Template()
t.add_description(
"AWS CloudFormation Sample Template S3_Bucket: template showing "
"how to create a publicly accessible S3 bucket."
)
param_vpc_id = t.add_parameter(troposphere.Parameter(
'VPCId',
Type="String",
Description="ID of glomex default VPC",
))
param_instance_type = t.add_parameter(troposphere.Parameter(
'InstanceType',
Description='Type of EC2 instance',
Type='String',
Default='t2.micro',
))
param_hosted_zone = t.add_parameter(troposphere.Parameter(
'HostedZone',
Description='Name of the hosted Zone (without trailing dot)',
Type='String'
))
param_launch_subnet_id = t.add_parameter(troposphere.Parameter(
'EC2SubnetId',
Description='ID of the VPN access security group',
Type='String',
))
param_instance_policy_arn = t.add_parameter(troposphere.Parameter(
'DefaultInstancePolicyARN',
Description='A base policys ARN you could attach to all of your instances when required. This handles several default use cases.',
Type='String'
))
param_base_ami_id = t.add_parameter(troposphere.Parameter(
'BaseAMIID',
Description='The ami-id of the glomex base image',
Type='String'
))
################# Security Groups Section ################################
sg_ec2_instance = troposphere.ec2.SecurityGroup(
'%sFrontendEC2' % SERVICE_NAME,
GroupDescription="%sEC2SecurityGroup" % SERVICE_NAME,
VpcId=Ref(param_vpc_id),
SecurityGroupIngress=[
SGR(CidrIp='192.168.225.0/24', FromPort=80, ToPort=80, IpProtocol='tcp'),
SGR(CidrIp='192.168.225.0/24', FromPort=443, ToPort=443, IpProtocol='tcp')
],
)
t.add_resource(sg_ec2_instance)
# Instantiate helper
iam = IAMRoleAndPolicies(t, 'instance-role-',
['ec2.amazonaws.com'], '/ec2/')
role_name = "infra-%s-instance" % SERVICE_NAME
role_infra_instance_role = iam.build_role(
role_name, [Ref(param_instance_policy_arn)]
)
# instance profile_name
instance_role_profile = t.add_resource(troposphere.iam.InstanceProfile(
"InstanceRoleinfraCms%s" % SERVICE_NAME,
Roles=[
troposphere.Ref(role_infra_instance_role)
]
))
################# Instance Section ############################
EC2Instance = t.add_resource(troposphere.ec2.Instance(
"EC2Instance",
ImageId=Ref(param_base_ami_id),
IamInstanceProfile=Ref(instance_role_profile),
#SecurityGroupIds=[Ref(sg_ec2_instance), Ref(param_vpn_sg)],
SecurityGroupIds=[Ref(sg_ec2_instance)],
InstanceType=Ref(param_instance_type),
BlockDeviceMappings=[
troposphere.ec2.BlockDeviceMapping(
Ebs=troposphere.ec2.EBSBlockDevice(
DeleteOnTermination=True,
VolumeSize=16,
VolumeType='gp2'
),
DeviceName='/dev/xvda')
],
Tags=Tags(DeploymentGroup=SERVICE_NAME, Name=SERVICE_NAME),
SubnetId=Ref(param_launch_subnet_id),
AvailabilityZone='eu-west-1b'
))
t.add_output(Output('UsedBaseAMI', Description='ami ID of the given base image', Value=Ref(param_base_ami_id)))
################# End Instance Section ########################
def generate_template():
return t.to_json()
| mit |
Denisolt/Tensorflow_Chat_Bot | local/lib/python2.7/site-packages/tensorflow/python/debug/cli/tensor_format.py | 13 | 15691 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Format tensors (ndarrays) for screen display and navigation."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
import re
import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.python.debug.cli import debugger_cli_common
_NUMPY_OMISSION = "...,"
_NUMPY_DEFAULT_EDGE_ITEMS = 3
_NUMBER_REGEX = re.compile(r"[-+]?([0-9][-+0-9eE\.]+|nan|inf)(\s|,|\])")
BEGIN_INDICES_KEY = "i0"
OMITTED_INDICES_KEY = "omitted"
DEFAULT_TENSOR_ELEMENT_HIGHLIGHT_FONT_ATTR = "bold"
class HighlightOptions(object):
"""Options for highlighting elements of a tensor."""
def __init__(self,
criterion,
description=None,
font_attr=DEFAULT_TENSOR_ELEMENT_HIGHLIGHT_FONT_ATTR):
"""Constructor of HighlightOptions.
Args:
criterion: (callable) A callable of the following signature:
def to_highlight(X):
# Args:
# X: The tensor to highlight elements in.
#
# Returns:
# (boolean ndarray) A boolean ndarray of the same shape as X
# indicating which elements are to be highlighted (iff True).
This callable will be used as the argument of np.argwhere() to
determine which elements of the tensor are to be highlighted.
description: (str) Description of the highlight criterion embodied by
criterion.
font_attr: (str) Font attribute to be applied to the
highlighted elements.
"""
self.criterion = criterion
self.description = description
self.font_attr = font_attr
def format_tensor(tensor,
tensor_name,
include_metadata=False,
np_printoptions=None,
highlight_options=None):
"""Generate a RichTextLines object showing a tensor in formatted style.
Args:
tensor: The tensor to be displayed, as a numpy ndarray or other
appropriate format (e.g., None representing uninitialized tensors).
tensor_name: Name of the tensor, as a str. If set to None, will suppress
the tensor name line in the return value.
include_metadata: Whether metadata such as dtype and shape are to be
included in the formatted text.
np_printoptions: A dictionary of keyword arguments that are passed to a
call of np.set_printoptions() to set the text format for display numpy
ndarrays.
highlight_options: (HighlightOptions) options for highlighting elements
of the tensor.
Returns:
A RichTextLines object. Its annotation field has line-by-line markups to
indicate which indices in the array the first element of each line
corresponds to.
"""
lines = []
if tensor_name is not None:
lines.append("Tensor \"%s\":" % tensor_name)
if tensor is None:
if lines:
lines.append("")
lines.append("Uninitialized tensor")
return debugger_cli_common.RichTextLines(lines)
elif not isinstance(tensor, np.ndarray):
# If tensor is not a np.ndarray, return simple text-line representation of
# the object without annotations.
if lines:
lines.append("")
lines.extend(repr(tensor).split("\n"))
return debugger_cli_common.RichTextLines(lines)
if include_metadata:
lines.append(" dtype: %s" % str(tensor.dtype))
lines.append(" shape: %s" % str(tensor.shape))
if lines:
lines.append("")
hlines = len(lines)
# Apply custom string formatting options for numpy ndarray.
if np_printoptions is not None:
np.set_printoptions(**np_printoptions)
array_lines = repr(tensor).split("\n")
lines.extend(array_lines)
if tensor.dtype.type is not np.string_:
# Parse array lines to get beginning indices for each line.
# TODO(cais): Currently, we do not annotate string-type tensors due to
# difficulty in escaping sequences. Address this issue.
annotations = _annotate_ndarray_lines(
array_lines, tensor, np_printoptions=np_printoptions, offset=hlines)
formatted = debugger_cli_common.RichTextLines(lines, annotations=annotations)
# Perform optional highlighting.
if highlight_options is not None:
indices_list = list(np.argwhere(highlight_options.criterion(tensor)))
total_elements = np.size(tensor)
highlight_summary = "Highlighted%s: %d of %d element(s) (%.2f%%)" % (
"(%s)" % highlight_options.description if highlight_options.description
else "", len(indices_list), total_elements,
len(indices_list) / float(total_elements) * 100.0)
formatted.lines[0] += " " + highlight_summary
if indices_list:
indices_list = [list(indices) for indices in indices_list]
are_omitted, rows, start_cols, end_cols = locate_tensor_element(
formatted, indices_list)
for is_omitted, row, start_col, end_col in zip(are_omitted, rows,
start_cols, end_cols):
if is_omitted or start_col is None or end_col is None:
continue
if row in formatted.font_attr_segs:
formatted.font_attr_segs[row].append(
(start_col, end_col, highlight_options.font_attr))
else:
formatted.font_attr_segs[row] = [(start_col, end_col,
highlight_options.font_attr)]
return formatted
def _annotate_ndarray_lines(
array_lines, tensor, np_printoptions=None, offset=0):
"""Generate annotations for line-by-line begin indices of tensor text.
Parse the numpy-generated text representation of a numpy ndarray to
determine the indices of the first element of each text line (if any
element is present in the line).
For example, given the following multi-line ndarray text representation:
["array([[ 0. , 0.0625, 0.125 , 0.1875],",
" [ 0.25 , 0.3125, 0.375 , 0.4375],",
" [ 0.5 , 0.5625, 0.625 , 0.6875],",
" [ 0.75 , 0.8125, 0.875 , 0.9375]])"]
the generate annotation will be:
{0: {BEGIN_INDICES_KEY: [0, 0]},
1: {BEGIN_INDICES_KEY: [1, 0]},
2: {BEGIN_INDICES_KEY: [2, 0]},
3: {BEGIN_INDICES_KEY: [3, 0]}}
Args:
array_lines: Text lines representing the tensor, as a list of str.
tensor: The tensor being formatted as string.
np_printoptions: A dictionary of keyword arguments that are passed to a
call of np.set_printoptions().
offset: Line number offset applied to the line indices in the returned
annotation.
Returns:
An annotation as a dict.
"""
if np_printoptions and "edgeitems" in np_printoptions:
edge_items = np_printoptions["edgeitems"]
else:
edge_items = _NUMPY_DEFAULT_EDGE_ITEMS
annotations = {}
# Put metadata about the tensor in the annotations["tensor_metadata"].
annotations["tensor_metadata"] = {
"dtype": tensor.dtype, "shape": tensor.shape}
dims = np.shape(tensor)
ndims = len(dims)
if ndims == 0:
# No indices for a 0D tensor.
return annotations
curr_indices = [0] * len(dims)
curr_dim = 0
for i in xrange(len(array_lines)):
line = array_lines[i].strip()
if not line:
# Skip empty lines, which can appear for >= 3D arrays.
continue
if line == _NUMPY_OMISSION:
annotations[offset + i] = {OMITTED_INDICES_KEY: copy.copy(curr_indices)}
curr_indices[curr_dim - 1] = dims[curr_dim - 1] - edge_items
else:
num_lbrackets = line.count("[") # TODO(cais): String array escaping.
num_rbrackets = line.count("]")
curr_dim += num_lbrackets - num_rbrackets
annotations[offset + i] = {BEGIN_INDICES_KEY: copy.copy(curr_indices)}
if num_rbrackets == 0:
line_content = line[line.rfind("[") + 1:]
num_elements = line_content.count(",")
curr_indices[curr_dim - 1] += num_elements
else:
if curr_dim > 0:
curr_indices[curr_dim - 1] += 1
for k in xrange(curr_dim, ndims):
curr_indices[k] = 0
return annotations
def locate_tensor_element(formatted, indices):
"""Locate a tensor element in formatted text lines, given element indices.
Given a RichTextLines object representing a tensor and indices of the sought
element, return the row number at which the element is located (if exists).
Args:
formatted: A RichTextLines object containing formatted text lines
representing the tensor.
indices: Indices of the sought element, as a list of int or a list of list
of int. The former case is for a single set of indices to look up,
whereas the latter case is for looking up a batch of indices sets at once.
In the latter case, the indices must be in ascending order, or a
ValueError will be raised.
Returns:
1) A boolean indicating whether the element falls into an omitted line.
2) Row index.
3) Column start index, i.e., the first column in which the representation
of the specified tensor starts, if it can be determined. If it cannot
be determined (e.g., due to ellipsis), None.
4) Column end index, i.e., the column right after the last column that
represents the specified tensor. Iff it cannot be determined, None.
For return values described above are based on a single set of indices to
look up. In the case of batch mode (multiple sets of indices), the return
values will be lists of the types described above.
Raises:
AttributeError: If:
Input argument "formatted" does not have the required annotations.
ValueError: If:
1) Indices do not match the dimensions of the tensor, or
2) Indices exceed sizes of the tensor, or
3) Indices contain negative value(s).
4) If in batch mode, and if not all sets of indices are in ascending
order.
"""
if isinstance(indices[0], list):
indices_list = indices
input_batch = True
else:
indices_list = [indices]
input_batch = False
# Check that tensor_metadata is available.
if "tensor_metadata" not in formatted.annotations:
raise AttributeError("tensor_metadata is not available in annotations.")
# Sanity check on input argument.
_validate_indices_list(indices_list, formatted)
dims = formatted.annotations["tensor_metadata"]["shape"]
batch_size = len(indices_list)
lines = formatted.lines
annot = formatted.annotations
prev_r = 0
prev_line = ""
prev_indices = [0] * len(dims)
# Initialize return values
are_omitted = [None] * batch_size
row_indices = [None] * batch_size
start_columns = [None] * batch_size
end_columns = [None] * batch_size
batch_pos = 0 # Current position in the batch.
for r in xrange(len(lines)):
if r not in annot:
continue
if BEGIN_INDICES_KEY in annot[r]:
indices_key = BEGIN_INDICES_KEY
elif OMITTED_INDICES_KEY in annot[r]:
indices_key = OMITTED_INDICES_KEY
matching_indices_list = [
ind for ind in indices_list[batch_pos:]
if prev_indices <= ind < annot[r][indices_key]
]
if matching_indices_list:
num_matches = len(matching_indices_list)
match_start_columns, match_end_columns = _locate_elements_in_line(
prev_line, matching_indices_list, prev_indices)
start_columns[batch_pos:batch_pos + num_matches] = match_start_columns
end_columns[batch_pos:batch_pos + num_matches] = match_end_columns
are_omitted[batch_pos:batch_pos + num_matches] = [
OMITTED_INDICES_KEY in annot[prev_r]
] * num_matches
row_indices[batch_pos:batch_pos + num_matches] = [prev_r] * num_matches
batch_pos += num_matches
if batch_pos >= batch_size:
break
prev_r = r
prev_line = lines[r]
prev_indices = annot[r][indices_key]
if batch_pos < batch_size:
matching_indices_list = indices_list[batch_pos:]
num_matches = len(matching_indices_list)
match_start_columns, match_end_columns = _locate_elements_in_line(
prev_line, matching_indices_list, prev_indices)
start_columns[batch_pos:batch_pos + num_matches] = match_start_columns
end_columns[batch_pos:batch_pos + num_matches] = match_end_columns
are_omitted[batch_pos:batch_pos + num_matches] = [
OMITTED_INDICES_KEY in annot[prev_r]
] * num_matches
row_indices[batch_pos:batch_pos + num_matches] = [prev_r] * num_matches
if input_batch:
return are_omitted, row_indices, start_columns, end_columns
else:
return are_omitted[0], row_indices[0], start_columns[0], end_columns[0]
def _validate_indices_list(indices_list, formatted):
prev_ind = None
for ind in indices_list:
# Check indices match tensor dimensions.
dims = formatted.annotations["tensor_metadata"]["shape"]
if len(ind) != len(dims):
raise ValueError("Dimensions mismatch: requested: %d; actual: %d" %
(len(ind), len(dims)))
# Check indices is within size limits.
for req_idx, siz in zip(ind, dims):
if req_idx >= siz:
raise ValueError("Indices exceed tensor dimensions.")
if req_idx < 0:
raise ValueError("Indices contain negative value(s).")
# Check indices are in ascending order.
if prev_ind and ind < prev_ind:
raise ValueError("Input indices sets are not in ascending order.")
prev_ind = ind
def _locate_elements_in_line(line, indices_list, ref_indices):
"""Determine the start and end indices of an element in a line.
Args:
line: (str) the line in which the element is to be sought.
indices_list: (list of list of int) list of indices of the element to
search for. Assumes that the indices in the batch are unique and sorted
in ascending order.
ref_indices: (list of int) reference indices, i.e., the indices of the
first element represented in the line.
Returns:
start_columns: (list of int) start column indices, if found. If not found,
None.
end_columns: (list of int) end column indices, if found. If not found,
None.
If found, the element is represented in the left-closed-right-open interval
[start_column, end_column].
"""
batch_size = len(indices_list)
offsets = [indices[-1] - ref_indices[-1] for indices in indices_list]
start_columns = [None] * batch_size
end_columns = [None] * batch_size
if _NUMPY_OMISSION in line:
ellipsis_index = line.find(_NUMPY_OMISSION)
else:
ellipsis_index = len(line)
matches_iter = re.finditer(_NUMBER_REGEX, line)
batch_pos = 0
offset_counter = 0
for match in matches_iter:
if match.start() > ellipsis_index:
# Do not attempt to search beyond ellipsis.
break
if offset_counter == offsets[batch_pos]:
start_columns[batch_pos] = match.start()
# Remove the final comma, right bracket, or whitespace.
end_columns[batch_pos] = match.end() - 1
batch_pos += 1
if batch_pos >= batch_size:
break
offset_counter += 1
return start_columns, end_columns
| gpl-3.0 |
tinkerinestudio/Tinkerine-Suite | TinkerineSuite/python/Lib/comtypes/test/test_msscript.py | 1 | 2790 | import unittest
from ctypes import POINTER
from comtypes.automation import IDispatch
from comtypes.client import CreateObject
from comtypes import GUID
##from test import test_support
##from comtypes.unittests import support
try:
GUID.from_progid("MSScriptControl.ScriptControl")
except WindowsError:
# doesn't exist on Windows CE
pass
else:
class Test(unittest.TestCase):
def test_jscript(self):
engine = CreateObject("MSScriptControl.ScriptControl")
engine.Language = "JScript"
# strange.
#
# engine.Eval returns a VARIANT containing a dispatch pointer.
#
# The dispatch pointer exposes this typeinfo (the number of
# dispproperties varies, depending on the length of the list we pass
# to Eval):
#
#class JScriptTypeInfo(comtypes.gen._00020430_0000_0000_C000_000000000046_0_2_0.IDispatch):
# 'JScript Type Info'
# _iid_ = GUID('{C59C6B12-F6C1-11CF-8835-00A0C911E8B2}')
# _idlflags_ = []
# _methods_ = []
#JScriptTypeInfo._disp_methods_ = [
# DISPPROPERTY([dispid(9522932)], VARIANT, '0'),
# DISPPROPERTY([dispid(9522976)], VARIANT, '1'),
#]
#
# Although the exact interface members vary, the guid stays
# the same. Don't think that's allowed by COM standards - is
# this a bug in the MSScriptControl?
#
# What's even more strange is that the returned dispatch
# pointer can't be QI'd for this interface! So it seems the
# typeinfo is really a temporary thing.
res = engine.Eval("[1, 2, 3, 4]")._comobj
# comtypes.client works around this bug, by not trying to
# high-level wrap the dispatch pointer because QI for the real
# interface fails.
self.failUnlessEqual(type(res), POINTER(IDispatch))
tinfo_1 = engine.Eval("[1, 2, 3]")._comobj.GetTypeInfo(0)
tinfo_2 = engine.Eval("[1, 2, 3, 4]")._comobj.GetTypeInfo(0)
tinfo_3 = engine.Eval("[1, 2, 3, 4, 5]")._comobj.GetTypeInfo(0)
self.failUnlessEqual(tinfo_1.GetTypeAttr().cVars, 3)
self.failUnlessEqual(tinfo_2.GetTypeAttr().cVars, 4)
self.failUnlessEqual(tinfo_3.GetTypeAttr().cVars, 5)
# These tests simply describe the current behaviour ;-)
self.failUnlessEqual(tinfo_1.GetTypeAttr().guid,
tinfo_1.GetTypeAttr().guid)
## print (res[0], res[1], res[2])
## print len(res)
engine.Reset()
if __name__ == "__main__":
unittest.main()
| agpl-3.0 |
jaruba/chromium.src | third_party/pycoverage/coverage/phystokens.py | 160 | 7401 | """Better tokenizing for coverage.py."""
import codecs, keyword, re, sys, token, tokenize
from coverage.backward import set # pylint: disable=W0622
from coverage.parser import generate_tokens
def phys_tokens(toks):
"""Return all physical tokens, even line continuations.
tokenize.generate_tokens() doesn't return a token for the backslash that
continues lines. This wrapper provides those tokens so that we can
re-create a faithful representation of the original source.
Returns the same values as generate_tokens()
"""
last_line = None
last_lineno = -1
last_ttype = None
for ttype, ttext, (slineno, scol), (elineno, ecol), ltext in toks:
if last_lineno != elineno:
if last_line and last_line.endswith("\\\n"):
# We are at the beginning of a new line, and the last line
# ended with a backslash. We probably have to inject a
# backslash token into the stream. Unfortunately, there's more
# to figure out. This code::
#
# usage = """\
# HEY THERE
# """
#
# triggers this condition, but the token text is::
#
# '"""\\\nHEY THERE\n"""'
#
# so we need to figure out if the backslash is already in the
# string token or not.
inject_backslash = True
if last_ttype == tokenize.COMMENT:
# Comments like this \
# should never result in a new token.
inject_backslash = False
elif ttype == token.STRING:
if "\n" in ttext and ttext.split('\n', 1)[0][-1] == '\\':
# It's a multiline string and the first line ends with
# a backslash, so we don't need to inject another.
inject_backslash = False
if inject_backslash:
# Figure out what column the backslash is in.
ccol = len(last_line.split("\n")[-2]) - 1
# Yield the token, with a fake token type.
yield (
99999, "\\\n",
(slineno, ccol), (slineno, ccol+2),
last_line
)
last_line = ltext
last_ttype = ttype
yield ttype, ttext, (slineno, scol), (elineno, ecol), ltext
last_lineno = elineno
def source_token_lines(source):
"""Generate a series of lines, one for each line in `source`.
Each line is a list of pairs, each pair is a token::
[('key', 'def'), ('ws', ' '), ('nam', 'hello'), ('op', '('), ... ]
Each pair has a token class, and the token text.
If you concatenate all the token texts, and then join them with newlines,
you should have your original `source` back, with two differences:
trailing whitespace is not preserved, and a final line with no newline
is indistinguishable from a final line with a newline.
"""
ws_tokens = set([token.INDENT, token.DEDENT, token.NEWLINE, tokenize.NL])
line = []
col = 0
source = source.expandtabs(8).replace('\r\n', '\n')
tokgen = generate_tokens(source)
for ttype, ttext, (_, scol), (_, ecol), _ in phys_tokens(tokgen):
mark_start = True
for part in re.split('(\n)', ttext):
if part == '\n':
yield line
line = []
col = 0
mark_end = False
elif part == '':
mark_end = False
elif ttype in ws_tokens:
mark_end = False
else:
if mark_start and scol > col:
line.append(("ws", " " * (scol - col)))
mark_start = False
tok_class = tokenize.tok_name.get(ttype, 'xx').lower()[:3]
if ttype == token.NAME and keyword.iskeyword(ttext):
tok_class = "key"
line.append((tok_class, part))
mark_end = True
scol = 0
if mark_end:
col = ecol
if line:
yield line
def source_encoding(source):
"""Determine the encoding for `source` (a string), according to PEP 263.
Returns a string, the name of the encoding.
"""
# Note: this function should never be called on Python 3, since py3 has
# built-in tools to do this.
assert sys.version_info < (3, 0)
# This is mostly code adapted from Py3.2's tokenize module.
cookie_re = re.compile(r"coding[:=]\s*([-\w.]+)")
# Do this so the detect_encode code we copied will work.
readline = iter(source.splitlines(True)).next
def _get_normal_name(orig_enc):
"""Imitates get_normal_name in tokenizer.c."""
# Only care about the first 12 characters.
enc = orig_enc[:12].lower().replace("_", "-")
if re.match(r"^utf-8($|-)", enc):
return "utf-8"
if re.match(r"^(latin-1|iso-8859-1|iso-latin-1)($|-)", enc):
return "iso-8859-1"
return orig_enc
# From detect_encode():
# It detects the encoding from the presence of a utf-8 bom or an encoding
# cookie as specified in pep-0263. If both a bom and a cookie are present,
# but disagree, a SyntaxError will be raised. If the encoding cookie is an
# invalid charset, raise a SyntaxError. Note that if a utf-8 bom is found,
# 'utf-8-sig' is returned.
# If no encoding is specified, then the default will be returned. The
# default varied with version.
if sys.version_info <= (2, 4):
default = 'iso-8859-1'
else:
default = 'ascii'
bom_found = False
encoding = None
def read_or_stop():
"""Get the next source line, or ''."""
try:
return readline()
except StopIteration:
return ''
def find_cookie(line):
"""Find an encoding cookie in `line`."""
try:
line_string = line.decode('ascii')
except UnicodeDecodeError:
return None
matches = cookie_re.findall(line_string)
if not matches:
return None
encoding = _get_normal_name(matches[0])
try:
codec = codecs.lookup(encoding)
except LookupError:
# This behaviour mimics the Python interpreter
raise SyntaxError("unknown encoding: " + encoding)
if bom_found:
# codecs in 2.3 were raw tuples of functions, assume the best.
codec_name = getattr(codec, 'name', encoding)
if codec_name != 'utf-8':
# This behaviour mimics the Python interpreter
raise SyntaxError('encoding problem: utf-8')
encoding += '-sig'
return encoding
first = read_or_stop()
if first.startswith(codecs.BOM_UTF8):
bom_found = True
first = first[3:]
default = 'utf-8-sig'
if not first:
return default
encoding = find_cookie(first)
if encoding:
return encoding
second = read_or_stop()
if not second:
return default
encoding = find_cookie(second)
if encoding:
return encoding
return default
| bsd-3-clause |
Xeralux/tensorflow | tensorflow/contrib/distributions/python/kernel_tests/geometric_test.py | 68 | 9868 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for the Geometric distribution."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from scipy import stats
from tensorflow.contrib.distributions.python.ops import geometric
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.platform import test
# In all tests that follow, we use scipy.stats.geom, which
# represents the "Shifted" Geometric distribution. Hence, loc=-1 is passed
# in to each scipy function for testing.
class GeometricTest(test.TestCase):
def testGeometricShape(self):
with self.test_session():
probs = constant_op.constant([.1] * 5)
geom = geometric.Geometric(probs=probs)
self.assertEqual([5,], geom.batch_shape_tensor().eval())
self.assertAllEqual([], geom.event_shape_tensor().eval())
self.assertEqual(tensor_shape.TensorShape([5]), geom.batch_shape)
self.assertEqual(tensor_shape.TensorShape([]), geom.event_shape)
def testInvalidP(self):
invalid_ps = [-.01, -0.01, -2.]
with self.test_session():
with self.assertRaisesOpError("Condition x >= 0"):
geom = geometric.Geometric(probs=invalid_ps, validate_args=True)
geom.probs.eval()
invalid_ps = [1.1, 3., 5.]
with self.test_session():
with self.assertRaisesOpError("Condition x <= y"):
geom = geometric.Geometric(probs=invalid_ps, validate_args=True)
geom.probs.eval()
def testGeomLogPmf(self):
with self.test_session():
batch_size = 6
probs = constant_op.constant([.2] * batch_size)
probs_v = .2
x = np.array([2., 3., 4., 5., 6., 7.], dtype=np.float32)
geom = geometric.Geometric(probs=probs)
expected_log_prob = stats.geom.logpmf(x, probs_v, loc=-1)
log_prob = geom.log_prob(x)
self.assertEqual([6,], log_prob.get_shape())
self.assertAllClose(expected_log_prob, log_prob.eval())
pmf = geom.prob(x)
self.assertEqual([6,], pmf.get_shape())
self.assertAllClose(np.exp(expected_log_prob), pmf.eval())
def testGeometricLogPmf_validate_args(self):
with self.test_session():
batch_size = 6
probs = constant_op.constant([.9] * batch_size)
x = array_ops.placeholder(dtypes.float32, shape=[6])
feed_dict = {x: [2.5, 3.2, 4.3, 5.1, 6., 7.]}
geom = geometric.Geometric(probs=probs, validate_args=True)
with self.assertRaisesOpError("Condition x == y"):
log_prob = geom.log_prob(x)
log_prob.eval(feed_dict=feed_dict)
with self.assertRaisesOpError("Condition x >= 0"):
log_prob = geom.log_prob(np.array([-1.], dtype=np.float32))
log_prob.eval()
geom = geometric.Geometric(probs=probs)
log_prob = geom.log_prob(x)
self.assertEqual([6,], log_prob.get_shape())
pmf = geom.prob(x)
self.assertEqual([6,], pmf.get_shape())
def testGeometricLogPmfMultidimensional(self):
with self.test_session():
batch_size = 6
probs = constant_op.constant([[.2, .3, .5]] * batch_size)
probs_v = np.array([.2, .3, .5])
x = np.array([[2., 3., 4., 5., 6., 7.]], dtype=np.float32).T
geom = geometric.Geometric(probs=probs)
expected_log_prob = stats.geom.logpmf(x, probs_v, loc=-1)
log_prob = geom.log_prob(x)
log_prob_values = log_prob.eval()
self.assertEqual([6, 3], log_prob.get_shape())
self.assertAllClose(expected_log_prob, log_prob_values)
pmf = geom.prob(x)
pmf_values = pmf.eval()
self.assertEqual([6, 3], pmf.get_shape())
self.assertAllClose(np.exp(expected_log_prob), pmf_values)
def testGeometricCDF(self):
with self.test_session():
batch_size = 6
probs = constant_op.constant([[.2, .4, .5]] * batch_size)
probs_v = np.array([.2, .4, .5])
x = np.array([[2., 3., 4., 5.5, 6., 7.]], dtype=np.float32).T
geom = geometric.Geometric(probs=probs)
expected_cdf = stats.geom.cdf(x, probs_v, loc=-1)
cdf = geom.cdf(x)
self.assertEqual([6, 3], cdf.get_shape())
self.assertAllClose(expected_cdf, cdf.eval())
def testGeometricEntropy(self):
with self.test_session():
probs_v = np.array([.1, .3, .25], dtype=np.float32)
geom = geometric.Geometric(probs=probs_v)
expected_entropy = stats.geom.entropy(probs_v, loc=-1)
self.assertEqual([3], geom.entropy().get_shape())
self.assertAllClose(expected_entropy, geom.entropy().eval())
def testGeometricMean(self):
with self.test_session():
probs_v = np.array([.1, .3, .25])
geom = geometric.Geometric(probs=probs_v)
expected_means = stats.geom.mean(probs_v, loc=-1)
self.assertEqual([3], geom.mean().get_shape())
self.assertAllClose(expected_means, geom.mean().eval())
def testGeometricVariance(self):
with self.test_session():
probs_v = np.array([.1, .3, .25])
geom = geometric.Geometric(probs=probs_v)
expected_vars = stats.geom.var(probs_v, loc=-1)
self.assertEqual([3], geom.variance().get_shape())
self.assertAllClose(expected_vars, geom.variance().eval())
def testGeometricStddev(self):
with self.test_session():
probs_v = np.array([.1, .3, .25])
geom = geometric.Geometric(probs=probs_v)
expected_stddevs = stats.geom.std(probs_v, loc=-1)
self.assertEqual([3], geom.stddev().get_shape())
self.assertAllClose(geom.stddev().eval(), expected_stddevs)
def testGeometricMode(self):
with self.test_session():
probs_v = np.array([.1, .3, .25])
geom = geometric.Geometric(probs=probs_v)
self.assertEqual([3,], geom.mode().get_shape())
self.assertAllClose([0.] * 3, geom.mode().eval())
def testGeometricSample(self):
with self.test_session():
probs_v = [.3, .9]
probs = constant_op.constant(probs_v)
n = constant_op.constant(100000)
geom = geometric.Geometric(probs=probs)
samples = geom.sample(n, seed=12345)
self.assertEqual([100000, 2], samples.get_shape())
sample_values = samples.eval()
self.assertFalse(np.any(sample_values < 0.0))
for i in range(2):
self.assertAllClose(sample_values[:, i].mean(),
stats.geom.mean(probs_v[i], loc=-1),
rtol=.02)
self.assertAllClose(sample_values[:, i].var(),
stats.geom.var(probs_v[i], loc=-1),
rtol=.02)
def testGeometricSampleMultiDimensional(self):
with self.test_session():
batch_size = 2
probs_v = [.3, .9]
probs = constant_op.constant([probs_v] * batch_size)
geom = geometric.Geometric(probs=probs)
n = 400000
samples = geom.sample(n, seed=12345)
self.assertEqual([n, batch_size, 2], samples.get_shape())
sample_values = samples.eval()
self.assertFalse(np.any(sample_values < 0.0))
for i in range(2):
self.assertAllClose(sample_values[:, 0, i].mean(),
stats.geom.mean(probs_v[i], loc=-1),
rtol=.02)
self.assertAllClose(sample_values[:, 0, i].var(),
stats.geom.var(probs_v[i], loc=-1),
rtol=.02)
self.assertAllClose(sample_values[:, 1, i].mean(),
stats.geom.mean(probs_v[i], loc=-1),
rtol=.02)
self.assertAllClose(sample_values[:, 1, i].var(),
stats.geom.var(probs_v[i], loc=-1),
rtol=.02)
def testGeometricAtBoundary(self):
with self.test_session():
geom = geometric.Geometric(probs=1., validate_args=True)
x = np.array([0., 2., 3., 4., 5., 6., 7.], dtype=np.float32)
expected_log_prob = stats.geom.logpmf(x, [1.], loc=-1)
# Scipy incorrectly returns nan.
expected_log_prob[np.isnan(expected_log_prob)] = 0.
log_prob = geom.log_prob(x)
self.assertEqual([7,], log_prob.get_shape())
self.assertAllClose(expected_log_prob, log_prob.eval())
pmf = geom.prob(x)
self.assertEqual([7,], pmf.get_shape())
self.assertAllClose(np.exp(expected_log_prob), pmf.eval())
expected_log_cdf = stats.geom.logcdf(x, 1., loc=-1)
log_cdf = geom.log_cdf(x)
self.assertEqual([7,], log_cdf.get_shape())
self.assertAllClose(expected_log_cdf, log_cdf.eval())
cdf = geom.cdf(x)
self.assertEqual([7,], cdf.get_shape())
self.assertAllClose(np.exp(expected_log_cdf), cdf.eval())
expected_mean = stats.geom.mean(1., loc=-1)
self.assertEqual([], geom.mean().get_shape())
self.assertAllClose(expected_mean, geom.mean().eval())
expected_variance = stats.geom.var(1., loc=-1)
self.assertEqual([], geom.variance().get_shape())
self.assertAllClose(expected_variance, geom.variance().eval())
with self.assertRaisesOpError("Entropy is undefined"):
geom.entropy().eval()
if __name__ == "__main__":
test.main()
| apache-2.0 |
xiaotdl/ansible | contrib/inventory/zone.py | 138 | 1466 | #!/usr/bin/env python
# (c) 2015, Dagobert Michelsen <[email protected]>
#
# This file is part of Ansible,
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from subprocess import Popen,PIPE
import sys
import json
result = {}
result['all'] = {}
pipe = Popen(['zoneadm', 'list', '-ip'], stdout=PIPE, universal_newlines=True)
result['all']['hosts'] = []
for l in pipe.stdout.readlines():
# 1:work:running:/zones/work:3126dc59-9a07-4829-cde9-a816e4c5040e:native:shared
s = l.split(':')
if s[1] != 'global':
result['all']['hosts'].append(s[1])
result['all']['vars'] = {}
result['all']['vars']['ansible_connection'] = 'zone'
if len(sys.argv) == 2 and sys.argv[1] == '--list':
print(json.dumps(result))
elif len(sys.argv) == 3 and sys.argv[1] == '--host':
print(json.dumps({'ansible_connection': 'zone'}))
else:
print("Need an argument, either --list or --host <host>")
| gpl-3.0 |
krisys/django | django/contrib/auth/migrations/0004_alter_user_username_opts.py | 200 | 1040 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import django.core.validators
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('auth', '0003_alter_user_email_max_length'),
]
# No database changes; modifies validators and error_messages (#13147).
operations = [
migrations.AlterField(
model_name='user',
name='username',
field=models.CharField(
error_messages={'unique': 'A user with that username already exists.'}, max_length=30,
validators=[django.core.validators.RegexValidator(
'^[\\w.@+-]+$',
'Enter a valid username. This value may contain only letters, numbers and @/./+/-/_ characters.',
'invalid'
)],
help_text='Required. 30 characters or fewer. Letters, digits and @/./+/-/_ only.',
unique=True, verbose_name='username'
),
),
]
| bsd-3-clause |
timothsp/where2ate | venv/lib/python3.3/site-packages/werkzeug/contrib/fixers.py | 148 | 10197 | # -*- coding: utf-8 -*-
"""
werkzeug.contrib.fixers
~~~~~~~~~~~~~~~~~~~~~~~
.. versionadded:: 0.5
This module includes various helpers that fix bugs in web servers. They may
be necessary for some versions of a buggy web server but not others. We try
to stay updated with the status of the bugs as good as possible but you have
to make sure whether they fix the problem you encounter.
If you notice bugs in webservers not fixed in this module consider
contributing a patch.
:copyright: Copyright 2009 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
try:
from urllib import unquote
except ImportError:
from urllib.parse import unquote
from werkzeug.http import parse_options_header, parse_cache_control_header, \
parse_set_header
from werkzeug.useragents import UserAgent
from werkzeug.datastructures import Headers, ResponseCacheControl
class CGIRootFix(object):
"""Wrap the application in this middleware if you are using FastCGI or CGI
and you have problems with your app root being set to the cgi script's path
instead of the path users are going to visit
.. versionchanged:: 0.9
Added `app_root` parameter and renamed from `LighttpdCGIRootFix`.
:param app: the WSGI application
:param app_root: Defaulting to ``'/'``, you can set this to something else
if your app is mounted somewhere else.
"""
def __init__(self, app, app_root='/'):
self.app = app
self.app_root = app_root
def __call__(self, environ, start_response):
# only set PATH_INFO for older versions of Lighty or if no
# server software is provided. That's because the test was
# added in newer Werkzeug versions and we don't want to break
# people's code if they are using this fixer in a test that
# does not set the SERVER_SOFTWARE key.
if 'SERVER_SOFTWARE' not in environ or \
environ['SERVER_SOFTWARE'] < 'lighttpd/1.4.28':
environ['PATH_INFO'] = environ.get('SCRIPT_NAME', '') + \
environ.get('PATH_INFO', '')
environ['SCRIPT_NAME'] = self.app_root.strip('/')
return self.app(environ, start_response)
# backwards compatibility
LighttpdCGIRootFix = CGIRootFix
class PathInfoFromRequestUriFix(object):
"""On windows environment variables are limited to the system charset
which makes it impossible to store the `PATH_INFO` variable in the
environment without loss of information on some systems.
This is for example a problem for CGI scripts on a Windows Apache.
This fixer works by recreating the `PATH_INFO` from `REQUEST_URI`,
`REQUEST_URL`, or `UNENCODED_URL` (whatever is available). Thus the
fix can only be applied if the webserver supports either of these
variables.
:param app: the WSGI application
"""
def __init__(self, app):
self.app = app
def __call__(self, environ, start_response):
for key in 'REQUEST_URL', 'REQUEST_URI', 'UNENCODED_URL':
if key not in environ:
continue
request_uri = unquote(environ[key])
script_name = unquote(environ.get('SCRIPT_NAME', ''))
if request_uri.startswith(script_name):
environ['PATH_INFO'] = request_uri[len(script_name):] \
.split('?', 1)[0]
break
return self.app(environ, start_response)
class ProxyFix(object):
"""This middleware can be applied to add HTTP proxy support to an
application that was not designed with HTTP proxies in mind. It
sets `REMOTE_ADDR`, `HTTP_HOST` from `X-Forwarded` headers. While
Werkzeug-based applications already can use
:py:func:`werkzeug.wsgi.get_host` to retrieve the current host even if
behind proxy setups, this middleware can be used for applications which
access the WSGI environment directly.
If you have more than one proxy server in front of your app, set
`num_proxies` accordingly.
Do not use this middleware in non-proxy setups for security reasons.
The original values of `REMOTE_ADDR` and `HTTP_HOST` are stored in
the WSGI environment as `werkzeug.proxy_fix.orig_remote_addr` and
`werkzeug.proxy_fix.orig_http_host`.
:param app: the WSGI application
:param num_proxies: the number of proxy servers in front of the app.
"""
def __init__(self, app, num_proxies=1):
self.app = app
self.num_proxies = num_proxies
def get_remote_addr(self, forwarded_for):
"""Selects the new remote addr from the given list of ips in
X-Forwarded-For. By default it picks the one that the `num_proxies`
proxy server provides. Before 0.9 it would always pick the first.
.. versionadded:: 0.8
"""
if len(forwarded_for) >= self.num_proxies:
return forwarded_for[-1 * self.num_proxies]
def __call__(self, environ, start_response):
getter = environ.get
forwarded_proto = getter('HTTP_X_FORWARDED_PROTO', '')
forwarded_for = getter('HTTP_X_FORWARDED_FOR', '').split(',')
forwarded_host = getter('HTTP_X_FORWARDED_HOST', '')
environ.update({
'werkzeug.proxy_fix.orig_wsgi_url_scheme': getter('wsgi.url_scheme'),
'werkzeug.proxy_fix.orig_remote_addr': getter('REMOTE_ADDR'),
'werkzeug.proxy_fix.orig_http_host': getter('HTTP_HOST')
})
forwarded_for = [x for x in [x.strip() for x in forwarded_for] if x]
remote_addr = self.get_remote_addr(forwarded_for)
if remote_addr is not None:
environ['REMOTE_ADDR'] = remote_addr
if forwarded_host:
environ['HTTP_HOST'] = forwarded_host
if forwarded_proto:
environ['wsgi.url_scheme'] = forwarded_proto
return self.app(environ, start_response)
class HeaderRewriterFix(object):
"""This middleware can remove response headers and add others. This
is for example useful to remove the `Date` header from responses if you
are using a server that adds that header, no matter if it's present or
not or to add `X-Powered-By` headers::
app = HeaderRewriterFix(app, remove_headers=['Date'],
add_headers=[('X-Powered-By', 'WSGI')])
:param app: the WSGI application
:param remove_headers: a sequence of header keys that should be
removed.
:param add_headers: a sequence of ``(key, value)`` tuples that should
be added.
"""
def __init__(self, app, remove_headers=None, add_headers=None):
self.app = app
self.remove_headers = set(x.lower() for x in (remove_headers or ()))
self.add_headers = list(add_headers or ())
def __call__(self, environ, start_response):
def rewriting_start_response(status, headers, exc_info=None):
new_headers = []
for key, value in headers:
if key.lower() not in self.remove_headers:
new_headers.append((key, value))
new_headers += self.add_headers
return start_response(status, new_headers, exc_info)
return self.app(environ, rewriting_start_response)
class InternetExplorerFix(object):
"""This middleware fixes a couple of bugs with Microsoft Internet
Explorer. Currently the following fixes are applied:
- removing of `Vary` headers for unsupported mimetypes which
causes troubles with caching. Can be disabled by passing
``fix_vary=False`` to the constructor.
see: http://support.microsoft.com/kb/824847/en-us
- removes offending headers to work around caching bugs in
Internet Explorer if `Content-Disposition` is set. Can be
disabled by passing ``fix_attach=False`` to the constructor.
If it does not detect affected Internet Explorer versions it won't touch
the request / response.
"""
# This code was inspired by Django fixers for the same bugs. The
# fix_vary and fix_attach fixers were originally implemented in Django
# by Michael Axiak and is available as part of the Django project:
# http://code.djangoproject.com/ticket/4148
def __init__(self, app, fix_vary=True, fix_attach=True):
self.app = app
self.fix_vary = fix_vary
self.fix_attach = fix_attach
def fix_headers(self, environ, headers, status=None):
if self.fix_vary:
header = headers.get('content-type', '')
mimetype, options = parse_options_header(header)
if mimetype not in ('text/html', 'text/plain', 'text/sgml'):
headers.pop('vary', None)
if self.fix_attach and 'content-disposition' in headers:
pragma = parse_set_header(headers.get('pragma', ''))
pragma.discard('no-cache')
header = pragma.to_header()
if not header:
headers.pop('pragma', '')
else:
headers['Pragma'] = header
header = headers.get('cache-control', '')
if header:
cc = parse_cache_control_header(header,
cls=ResponseCacheControl)
cc.no_cache = None
cc.no_store = False
header = cc.to_header()
if not header:
headers.pop('cache-control', '')
else:
headers['Cache-Control'] = header
def run_fixed(self, environ, start_response):
def fixing_start_response(status, headers, exc_info=None):
headers = Headers(headers)
self.fix_headers(environ, headers, status)
return start_response(status, headers.to_wsgi_list(), exc_info)
return self.app(environ, fixing_start_response)
def __call__(self, environ, start_response):
ua = UserAgent(environ)
if ua.browser != 'msie':
return self.app(environ, start_response)
return self.run_fixed(environ, start_response)
| cc0-1.0 |
anjakisek/Mlin | racunalnik_igralec.py | 1 | 1948 | import threading
from igra import *
from minimax import *
import time
class Racunalnik():
def __init__(self, gui, algoritem):
self.gui = gui
self.algoritem = algoritem #izbran algoritem, minimax ali alphabeta
self.mislec = None #vlakno za razmisljanje
self.je_treba_prekiniti = False
def igraj(self):
'''V vzporednem vlaknu sprozi razmislanje o novi potezi'''
#Sprozimo razmisljanje
self.mislec = threading.Thread(
target=lambda: self.algoritem.izracunaj_potezo(
self.gui.igra.kopija()))
#Pozenemo vlakno
self.mislec.start()
self.zacni_meriti_cas =time.time()
#Preverjamo na vsake 100ms, ali je mislec ze razmislil
self.gui.plosca.after(100, self.preveri_potezo)
def preveri_potezo(self):
'''Po dolocenem casu preveri, ali je vlakno ze naslo optimalno potezo in
potezo naredi s primernim zamikom.'''
#Vsake 100ms preveri, ce je mislec ze koncal
if self.algoritem.poteza is not None:
self.pretekli_cas = time.time() - self.zacni_meriti_cas
if self.pretekli_cas < 1/2:
#Poskrbi, da racunalnik poteze ne potegne prehitro, saj je
#takim potezam na plosci tezko slediti
time.sleep(2/3)
#Ce smo tacas prekinili igro, poteze ne naredimo
if not self.je_treba_prekiniti:
self.gui.naredi_potezo(self.algoritem.poteza)
else:
#Ce poteze se ni izracunal, cez cas preveri, ce je ze nared
self.gui.plosca.after(100, self.preveri_potezo)
def prekini(self):
self.je_treba_prekiniti = True
if self.mislec:
# Algoritmu sporocimo, naj neha
self.algoritem.prekini()
#Pocakamo, da se mislec ustavi
self.mislec.join()
self.mislec = None
def klik(self, p):
pass
| gpl-3.0 |
yeyanchao/calibre | src/routes/route.py | 41 | 29396 | import re
import sys
import urllib
if sys.version < '2.4':
from sets import ImmutableSet as frozenset
from routes.util import _url_quote as url_quote, _str_encode
class Route(object):
"""The Route object holds a route recognition and generation
routine.
See Route.__init__ docs for usage.
"""
# reserved keys that don't count
reserved_keys = ['requirements']
# special chars to indicate a natural split in the URL
done_chars = ('/', ',', ';', '.', '#')
def __init__(self, name, routepath, **kargs):
"""Initialize a route, with a given routepath for
matching/generation
The set of keyword args will be used as defaults.
Usage::
>>> from routes.base import Route
>>> newroute = Route(None, ':controller/:action/:id')
>>> sorted(newroute.defaults.items())
[('action', 'index'), ('id', None)]
>>> newroute = Route(None, 'date/:year/:month/:day',
... controller="blog", action="view")
>>> newroute = Route(None, 'archives/:page', controller="blog",
... action="by_page", requirements = { 'page':'\d{1,2}' })
>>> newroute.reqs
{'page': '\\\d{1,2}'}
.. Note::
Route is generally not called directly, a Mapper instance
connect method should be used to add routes.
"""
self.routepath = routepath
self.sub_domains = False
self.prior = None
self.redirect = False
self.name = name
self._kargs = kargs
self.minimization = kargs.pop('_minimize', False)
self.encoding = kargs.pop('_encoding', 'utf-8')
self.reqs = kargs.get('requirements', {})
self.decode_errors = 'replace'
# Don't bother forming stuff we don't need if its a static route
self.static = kargs.pop('_static', False)
self.filter = kargs.pop('_filter', None)
self.absolute = kargs.pop('_absolute', False)
# Pull out the member/collection name if present, this applies only to
# map.resource
self.member_name = kargs.pop('_member_name', None)
self.collection_name = kargs.pop('_collection_name', None)
self.parent_resource = kargs.pop('_parent_resource', None)
# Pull out route conditions
self.conditions = kargs.pop('conditions', None)
# Determine if explicit behavior should be used
self.explicit = kargs.pop('_explicit', False)
# Since static need to be generated exactly, treat them as
# non-minimized
if self.static:
self.external = '://' in self.routepath
self.minimization = False
# Strip preceding '/' if present, and not minimizing
if routepath.startswith('/') and self.minimization:
self.routepath = routepath[1:]
self._setup_route()
def _setup_route(self):
# Build our routelist, and the keys used in the route
self.routelist = routelist = self._pathkeys(self.routepath)
routekeys = frozenset([key['name'] for key in routelist
if isinstance(key, dict)])
self.dotkeys = frozenset([key['name'] for key in routelist
if isinstance(key, dict) and
key['type'] == '.'])
if not self.minimization:
self.make_full_route()
# Build a req list with all the regexp requirements for our args
self.req_regs = {}
for key, val in self.reqs.iteritems():
self.req_regs[key] = re.compile('^' + val + '$')
# Update our defaults and set new default keys if needed. defaults
# needs to be saved
(self.defaults, defaultkeys) = self._defaults(routekeys,
self.reserved_keys,
self._kargs.copy())
# Save the maximum keys we could utilize
self.maxkeys = defaultkeys | routekeys
# Populate our minimum keys, and save a copy of our backward keys for
# quicker generation later
(self.minkeys, self.routebackwards) = self._minkeys(routelist[:])
# Populate our hardcoded keys, these are ones that are set and don't
# exist in the route
self.hardcoded = frozenset([key for key in self.maxkeys \
if key not in routekeys and self.defaults[key] is not None])
# Cache our default keys
self._default_keys = frozenset(self.defaults.keys())
def make_full_route(self):
"""Make a full routelist string for use with non-minimized
generation"""
regpath = ''
for part in self.routelist:
if isinstance(part, dict):
regpath += '%(' + part['name'] + ')s'
else:
regpath += part
self.regpath = regpath
def make_unicode(self, s):
"""Transform the given argument into a unicode string."""
if isinstance(s, unicode):
return s
elif isinstance(s, str):
return s.decode(self.encoding)
elif callable(s):
return s
else:
return unicode(s)
def _pathkeys(self, routepath):
"""Utility function to walk the route, and pull out the valid
dynamic/wildcard keys."""
collecting = False
current = ''
done_on = ''
var_type = ''
just_started = False
routelist = []
for char in routepath:
if char in [':', '*', '{'] and not collecting and not self.static \
or char in ['{'] and not collecting:
just_started = True
collecting = True
var_type = char
if char == '{':
done_on = '}'
just_started = False
if len(current) > 0:
routelist.append(current)
current = ''
elif collecting and just_started:
just_started = False
if char == '(':
done_on = ')'
else:
current = char
done_on = self.done_chars + ('-',)
elif collecting and char not in done_on:
current += char
elif collecting:
collecting = False
if var_type == '{':
if current[0] == '.':
var_type = '.'
current = current[1:]
else:
var_type = ':'
opts = current.split(':')
if len(opts) > 1:
current = opts[0]
self.reqs[current] = opts[1]
routelist.append(dict(type=var_type, name=current))
if char in self.done_chars:
routelist.append(char)
done_on = var_type = current = ''
else:
current += char
if collecting:
routelist.append(dict(type=var_type, name=current))
elif current:
routelist.append(current)
return routelist
def _minkeys(self, routelist):
"""Utility function to walk the route backwards
Will also determine the minimum keys we can handle to generate
a working route.
routelist is a list of the '/' split route path
defaults is a dict of all the defaults provided for the route
"""
minkeys = []
backcheck = routelist[:]
# If we don't honor minimization, we need all the keys in the
# route path
if not self.minimization:
for part in backcheck:
if isinstance(part, dict):
minkeys.append(part['name'])
return (frozenset(minkeys), backcheck)
gaps = False
backcheck.reverse()
for part in backcheck:
if not isinstance(part, dict) and part not in self.done_chars:
gaps = True
continue
elif not isinstance(part, dict):
continue
key = part['name']
if self.defaults.has_key(key) and not gaps:
continue
minkeys.append(key)
gaps = True
return (frozenset(minkeys), backcheck)
def _defaults(self, routekeys, reserved_keys, kargs):
"""Creates default set with values stringified
Put together our list of defaults, stringify non-None values
and add in our action/id default if they use it and didn't
specify it.
defaultkeys is a list of the currently assumed default keys
routekeys is a list of the keys found in the route path
reserved_keys is a list of keys that are not
"""
defaults = {}
# Add in a controller/action default if they don't exist
if 'controller' not in routekeys and 'controller' not in kargs \
and not self.explicit:
kargs['controller'] = 'content'
if 'action' not in routekeys and 'action' not in kargs \
and not self.explicit:
kargs['action'] = 'index'
defaultkeys = frozenset([key for key in kargs.keys() \
if key not in reserved_keys])
for key in defaultkeys:
if kargs[key] is not None:
defaults[key] = self.make_unicode(kargs[key])
else:
defaults[key] = None
if 'action' in routekeys and not defaults.has_key('action') \
and not self.explicit:
defaults['action'] = 'index'
if 'id' in routekeys and not defaults.has_key('id') \
and not self.explicit:
defaults['id'] = None
newdefaultkeys = frozenset([key for key in defaults.keys() \
if key not in reserved_keys])
return (defaults, newdefaultkeys)
def makeregexp(self, clist, include_names=True):
"""Create a regular expression for matching purposes
Note: This MUST be called before match can function properly.
clist should be a list of valid controller strings that can be
matched, for this reason makeregexp should be called by the web
framework after it knows all available controllers that can be
utilized.
include_names indicates whether this should be a match regexp
assigned to itself using regexp grouping names, or if names
should be excluded for use in a single larger regexp to
determine if any routes match
"""
if self.minimization:
reg = self.buildnextreg(self.routelist, clist, include_names)[0]
if not reg:
reg = '/'
reg = reg + '/?' + '$'
if not reg.startswith('/'):
reg = '/' + reg
else:
reg = self.buildfullreg(clist, include_names)
reg = '^' + reg
if not include_names:
return reg
self.regexp = reg
self.regmatch = re.compile(reg)
def buildfullreg(self, clist, include_names=True):
"""Build the regexp by iterating through the routelist and
replacing dicts with the appropriate regexp match"""
regparts = []
for part in self.routelist:
if isinstance(part, dict):
var = part['name']
if var == 'controller':
partmatch = '|'.join(map(re.escape, clist))
elif part['type'] == ':':
partmatch = self.reqs.get(var) or '[^/]+?'
elif part['type'] == '.':
partmatch = self.reqs.get(var) or '[^/.]+?'
else:
partmatch = self.reqs.get(var) or '.+?'
if include_names:
regpart = '(?P<%s>%s)' % (var, partmatch)
else:
regpart = '(?:%s)' % partmatch
if part['type'] == '.':
regparts.append('(?:\.%s)??' % regpart)
else:
regparts.append(regpart)
else:
regparts.append(re.escape(part))
regexp = ''.join(regparts) + '$'
return regexp
def buildnextreg(self, path, clist, include_names=True):
"""Recursively build our regexp given a path, and a controller
list.
Returns the regular expression string, and two booleans that
can be ignored as they're only used internally by buildnextreg.
"""
if path:
part = path[0]
else:
part = ''
reg = ''
# noreqs will remember whether the remainder has either a string
# match, or a non-defaulted regexp match on a key, allblank remembers
# if the rest could possible be completely empty
(rest, noreqs, allblank) = ('', True, True)
if len(path[1:]) > 0:
self.prior = part
(rest, noreqs, allblank) = self.buildnextreg(path[1:], clist, include_names)
if isinstance(part, dict) and part['type'] in (':', '.'):
var = part['name']
typ = part['type']
partreg = ''
# First we plug in the proper part matcher
if self.reqs.has_key(var):
if include_names:
partreg = '(?P<%s>%s)' % (var, self.reqs[var])
else:
partreg = '(?:%s)' % self.reqs[var]
if typ == '.':
partreg = '(?:\.%s)??' % partreg
elif var == 'controller':
if include_names:
partreg = '(?P<%s>%s)' % (var, '|'.join(map(re.escape, clist)))
else:
partreg = '(?:%s)' % '|'.join(map(re.escape, clist))
elif self.prior in ['/', '#']:
if include_names:
partreg = '(?P<' + var + '>[^' + self.prior + ']+?)'
else:
partreg = '(?:[^' + self.prior + ']+?)'
else:
if not rest:
if typ == '.':
exclude_chars = '/.'
else:
exclude_chars = '/'
if include_names:
partreg = '(?P<%s>[^%s]+?)' % (var, exclude_chars)
else:
partreg = '(?:[^%s]+?)' % exclude_chars
if typ == '.':
partreg = '(?:\.%s)??' % partreg
else:
end = ''.join(self.done_chars)
rem = rest
if rem[0] == '\\' and len(rem) > 1:
rem = rem[1]
elif rem.startswith('(\\') and len(rem) > 2:
rem = rem[2]
else:
rem = end
rem = frozenset(rem) | frozenset(['/'])
if include_names:
partreg = '(?P<%s>[^%s]+?)' % (var, ''.join(rem))
else:
partreg = '(?:[^%s]+?)' % ''.join(rem)
if self.reqs.has_key(var):
noreqs = False
if not self.defaults.has_key(var):
allblank = False
noreqs = False
# Now we determine if its optional, or required. This changes
# depending on what is in the rest of the match. If noreqs is
# true, then its possible the entire thing is optional as there's
# no reqs or string matches.
if noreqs:
# The rest is optional, but now we have an optional with a
# regexp. Wrap to ensure that if we match anything, we match
# our regexp first. It's still possible we could be completely
# blank as we have a default
if self.reqs.has_key(var) and self.defaults.has_key(var):
reg = '(' + partreg + rest + ')?'
# Or we have a regexp match with no default, so now being
# completely blank form here on out isn't possible
elif self.reqs.has_key(var):
allblank = False
reg = partreg + rest
# If the character before this is a special char, it has to be
# followed by this
elif self.defaults.has_key(var) and \
self.prior in (',', ';', '.'):
reg = partreg + rest
# Or we have a default with no regexp, don't touch the allblank
elif self.defaults.has_key(var):
reg = partreg + '?' + rest
# Or we have a key with no default, and no reqs. Not possible
# to be all blank from here
else:
allblank = False
reg = partreg + rest
# In this case, we have something dangling that might need to be
# matched
else:
# If they can all be blank, and we have a default here, we know
# its safe to make everything from here optional. Since
# something else in the chain does have req's though, we have
# to make the partreg here required to continue matching
if allblank and self.defaults.has_key(var):
reg = '(' + partreg + rest + ')?'
# Same as before, but they can't all be blank, so we have to
# require it all to ensure our matches line up right
else:
reg = partreg + rest
elif isinstance(part, dict) and part['type'] == '*':
var = part['name']
if noreqs:
if include_names:
reg = '(?P<%s>.*)' % var + rest
else:
reg = '(?:.*)' + rest
if not self.defaults.has_key(var):
allblank = False
noreqs = False
else:
if allblank and self.defaults.has_key(var):
if include_names:
reg = '(?P<%s>.*)' % var + rest
else:
reg = '(?:.*)' + rest
elif self.defaults.has_key(var):
if include_names:
reg = '(?P<%s>.*)' % var + rest
else:
reg = '(?:.*)' + rest
else:
if include_names:
reg = '(?P<%s>.*)' % var + rest
else:
reg = '(?:.*)' + rest
allblank = False
noreqs = False
elif part and part[-1] in self.done_chars:
if allblank:
reg = re.escape(part[:-1]) + '(' + re.escape(part[-1]) + rest
reg += ')?'
else:
allblank = False
reg = re.escape(part) + rest
# We have a normal string here, this is a req, and it prevents us from
# being all blank
else:
noreqs = False
allblank = False
reg = re.escape(part) + rest
return (reg, noreqs, allblank)
def match(self, url, environ=None, sub_domains=False,
sub_domains_ignore=None, domain_match=''):
"""Match a url to our regexp.
While the regexp might match, this operation isn't
guaranteed as there's other factors that can cause a match to
fail even though the regexp succeeds (Default that was relied
on wasn't given, requirement regexp doesn't pass, etc.).
Therefore the calling function shouldn't assume this will
return a valid dict, the other possible return is False if a
match doesn't work out.
"""
# Static routes don't match, they generate only
if self.static:
return False
match = self.regmatch.match(url)
if not match:
return False
sub_domain = None
if sub_domains and environ and 'HTTP_HOST' in environ:
host = environ['HTTP_HOST'].split(':')[0]
sub_match = re.compile('^(.+?)\.%s$' % domain_match)
subdomain = re.sub(sub_match, r'\1', host)
if subdomain not in sub_domains_ignore and host != subdomain:
sub_domain = subdomain
if self.conditions:
if 'method' in self.conditions and environ and \
environ['REQUEST_METHOD'] not in self.conditions['method']:
return False
# Check sub-domains?
use_sd = self.conditions.get('sub_domain')
if use_sd and not sub_domain:
return False
elif not use_sd and 'sub_domain' in self.conditions and sub_domain:
return False
if isinstance(use_sd, list) and sub_domain not in use_sd:
return False
matchdict = match.groupdict()
result = {}
extras = self._default_keys - frozenset(matchdict.keys())
for key, val in matchdict.iteritems():
if key != 'path_info' and self.encoding:
# change back into python unicode objects from the URL
# representation
try:
val = val and val.decode(self.encoding, self.decode_errors)
except UnicodeDecodeError:
return False
if not val and key in self.defaults and self.defaults[key]:
result[key] = self.defaults[key]
else:
result[key] = val
for key in extras:
result[key] = self.defaults[key]
# Add the sub-domain if there is one
if sub_domains:
result['sub_domain'] = sub_domain
# If there's a function, call it with environ and expire if it
# returns False
if self.conditions and 'function' in self.conditions and \
not self.conditions['function'](environ, result):
return False
return result
def generate_non_minimized(self, kargs):
"""Generate a non-minimal version of the URL"""
# Iterate through the keys that are defaults, and NOT in the route
# path. If its not in kargs, or doesn't match, or is None, this
# route won't work
for k in self.maxkeys - self.minkeys:
if k not in kargs:
return False
elif self.make_unicode(kargs[k]) != \
self.make_unicode(self.defaults[k]):
return False
# Ensure that all the args in the route path are present and not None
for arg in self.minkeys:
if arg not in kargs or kargs[arg] is None:
if arg in self.dotkeys:
kargs[arg] = ''
else:
return False
# Encode all the argument that the regpath can use
for k in kargs:
if k in self.maxkeys:
if k in self.dotkeys:
if kargs[k]:
kargs[k] = url_quote('.' + kargs[k], self.encoding)
else:
kargs[k] = url_quote(kargs[k], self.encoding)
return self.regpath % kargs
def generate_minimized(self, kargs):
"""Generate a minimized version of the URL"""
routelist = self.routebackwards
urllist = []
gaps = False
for part in routelist:
if isinstance(part, dict) and part['type'] in (':', '.'):
arg = part['name']
# For efficiency, check these just once
has_arg = kargs.has_key(arg)
has_default = self.defaults.has_key(arg)
# Determine if we can leave this part off
# First check if the default exists and wasn't provided in the
# call (also no gaps)
if has_default and not has_arg and not gaps:
continue
# Now check to see if there's a default and it matches the
# incoming call arg
if (has_default and has_arg) and self.make_unicode(kargs[arg]) == \
self.make_unicode(self.defaults[arg]) and not gaps:
continue
# We need to pull the value to append, if the arg is None and
# we have a default, use that
if has_arg and kargs[arg] is None and has_default and not gaps:
continue
# Otherwise if we do have an arg, use that
elif has_arg:
val = kargs[arg]
elif has_default and self.defaults[arg] is not None:
val = self.defaults[arg]
# Optional format parameter?
elif part['type'] == '.':
continue
# No arg at all? This won't work
else:
return False
urllist.append(url_quote(val, self.encoding))
if part['type'] == '.':
urllist.append('.')
if has_arg:
del kargs[arg]
gaps = True
elif isinstance(part, dict) and part['type'] == '*':
arg = part['name']
kar = kargs.get(arg)
if kar is not None:
urllist.append(url_quote(kar, self.encoding))
gaps = True
elif part and part[-1] in self.done_chars:
if not gaps and part in self.done_chars:
continue
elif not gaps:
urllist.append(part[:-1])
gaps = True
else:
gaps = True
urllist.append(part)
else:
gaps = True
urllist.append(part)
urllist.reverse()
url = ''.join(urllist)
return url
def generate(self, _ignore_req_list=False, _append_slash=False, **kargs):
"""Generate a URL from ourself given a set of keyword arguments
Toss an exception if this
set of keywords would cause a gap in the url.
"""
# Verify that our args pass any regexp requirements
if not _ignore_req_list:
for key in self.reqs.keys():
val = kargs.get(key)
if val and not self.req_regs[key].match(self.make_unicode(val)):
return False
# Verify that if we have a method arg, its in the method accept list.
# Also, method will be changed to _method for route generation
meth = kargs.get('method')
if meth:
if self.conditions and 'method' in self.conditions \
and meth.upper() not in self.conditions['method']:
return False
kargs.pop('method')
if self.minimization:
url = self.generate_minimized(kargs)
else:
url = self.generate_non_minimized(kargs)
if url is False:
return url
if not url.startswith('/') and not self.static:
url = '/' + url
extras = frozenset(kargs.keys()) - self.maxkeys
if extras:
if _append_slash and not url.endswith('/'):
url += '/'
fragments = []
# don't assume the 'extras' set preserves order: iterate
# through the ordered kargs instead
for key in kargs:
if key not in extras:
continue
if key == 'action' or key == 'controller':
continue
val = kargs[key]
if isinstance(val, (tuple, list)):
for value in val:
fragments.append((key, _str_encode(value, self.encoding)))
else:
fragments.append((key, _str_encode(val, self.encoding)))
if fragments:
url += '?'
url += urllib.urlencode(fragments)
elif _append_slash and not url.endswith('/'):
url += '/'
return url
| gpl-3.0 |
LittleLama/Sick-Beard-BoxCar2 | sickbeard/clients/requests/packages/charade/__init__.py | 122 | 1327 | ######################## BEGIN LICENSE BLOCK ########################
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
__version__ = "1.0.3"
from sys import version_info
def detect(aBuf):
if ((version_info < (3, 0) and isinstance(aBuf, unicode)) or
(version_info >= (3, 0) and not isinstance(aBuf, bytes))):
raise ValueError('Expected a bytes object, not a unicode object')
from . import universaldetector
u = universaldetector.UniversalDetector()
u.reset()
u.feed(aBuf)
u.close()
return u.result
| gpl-3.0 |
zauonlok/server | server/server/server.py | 1 | 5551 | import logging
import socket
import sys
from ..common import Message, monitor
from .connection import Connection, ConnectionBroken, ConnectionClosed
from .multiplexer import EpollMultiplexer, SelectMultiplexer
class Server:
def __init__(self, host='', port=0, backlog=socket.SOMAXCONN):
if sys.platform.startswith('linux'): # linux box
self._multiplexer = EpollMultiplexer()
logging.info('using epoll multiplexer')
else: # whatever
self._multiplexer = SelectMultiplexer()
logging.info('using select multiplexer')
self._sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self._sock.bind((host, port))
self._sock.listen(backlog)
self._multiplexer.register(self._sock.fileno(), for_read=True)
logging.info('running at {}'.format(self._sock.getsockname()))
self._connections = {}
self._addr2fileno = {}
def connect_application(self, to_app_queue, from_app_queue):
self._to_app_queue = to_app_queue
self._from_app_queue = from_app_queue
self._multiplexer.register(self._from_app_queue.fileno(), for_read=True)
logging.info('application connected')
def start(self):
while True:
read_ready, write_ready = self._multiplexer.poll()
for fileno in read_ready:
if fileno == self._sock.fileno(): # server socket
self._handle_accept()
elif fileno == self._from_app_queue.fileno(): # app msg queue
self._handle_message()
else: # client socket
self._handle_read(fileno)
for fileno in write_ready:
self._handle_write(fileno)
@monitor
def _handle_accept(self):
sock, addr = self._sock.accept()
logging.info('accept: fileno: {}, addr: {}'.format(sock.fileno(), addr))
sock.setblocking(0)
self._multiplexer.register(sock.fileno(), for_read=True)
self._connections[sock.fileno()] = Connection(sock, addr)
self._addr2fileno[addr] = sock.fileno()
@monitor
def _handle_message(self):
message = self._from_app_queue.get()
addr, data = message
if addr not in self._addr2fileno: # already closed
logging.info('message: addr: {}, already closed'.format(addr))
return
fileno = self._addr2fileno[addr]
if data is None: # ask to close
self._handle_close(fileno)
logging.info('message: addr: {}, fileno: {}, asked to close'.format(
addr, fileno))
elif len(data) > 0: # ask to send
conn = self._connections[fileno]
conn.submit_data(data)
self._multiplexer.modify(fileno, for_read=True, for_write=True)
logging.info('message: addr: {}, fileno: {}, datalen: {}'.format(
addr, fileno, len(data)))
else: # empty
pass
@monitor
def _handle_read(self, fileno):
if fileno not in self._connections: # already closed
logging.info('read: fileno: {}, already closed'.format(fileno))
return
conn = self._connections[fileno]
addr = conn.get_address()
try:
contents = conn.receive_data()
except (ConnectionBroken, ConnectionClosed) as e: # abnormal close
self._handle_close(fileno)
self._notify_closed(addr) # notify the app
logging.warning('read: fileno: {}, abnormal close'.format(fileno))
else:
for content in contents:
message = Message(addr, content)
self._to_app_queue.put(message)
logging.info('read: fileno: {}, datalen: {}'.format(
fileno, len(content)))
@monitor
def _handle_write(self, fileno):
if fileno not in self._connections: # already closed
logging.info('write: fileno: {}, already closed'.format(fileno))
return
conn = self._connections[fileno]
addr = conn.get_address()
try:
remaining = conn.send_data()
except (ConnectionBroken, ConnectionClosed) as e: # abnormal close
self._handle_close(fileno)
self._notify_closed(addr) # notify the app
logging.warning('write: fileno: {}, abnormal close'.format(fileno))
else:
if remaining == 0: # no more
self._multiplexer.modify(fileno, for_read=True, for_write=False)
logging.info('write: fileno: {}, remaining: {}'.format(
fileno, remaining))
def _handle_close(self, fileno):
self._multiplexer.unregister(fileno)
conn = self._connections[fileno]
addr = conn.get_address()
conn.close()
del self._connections[fileno]
del self._addr2fileno[addr]
def _notify_closed(self, addr):
message = Message(addr, None)
self._to_app_queue.put(message)
| mit |
jonathanverner/brython | www/gallery/sudoku.py | 16 | 1493 | # Boris Borcic 2006
# Quick and concise Python 2.5 sudoku solver
#
# Adapted for Brython by Pierre Quentel
# load pre-computed tables
import json
data = json.loads(open('sudoku.json').read())
w2q = data['w2q']
q2w = data['q2w']
w2q2w = data['w2q2w']
class Completed(Exception) : pass
def sudoku99(problem) :
givens = list(9*j+int(k)-1 for j,k in enumerate(problem[:81]) if '0'<k)
try :
search(givens,[9]*len(q2w),set(),set())
except Completed as ws :
return ''.join(str(w%9+1) for w in sorted(ws.args[0]))
def search(w0s,q2nw,takens,ws) :
while 1 :
i = 0
while w0s:
w0 = w0s.pop()
takens.add(w0)
ws.add(w0)
for q in w2q[w0] : q2nw[q]+=100
for w in set(w2q2w[w0]) - takens :
takens.add(w)
for q in w2q[w] :
n = q2nw[q] = q2nw[q]-1
if n<2 :
w0s.append((set(q2w[q])-takens).pop())
if len(ws)>80 :
raise Completed(ws)
w1,w0 = set(q2w[q2nw.index(2)])-takens
try : search([w1],q2nw[:],takens.copy(),ws.copy())
except KeyError :
w0s.append(w0)
if __name__=='__main__':
#print(sudoku99('530070000600195000098000060800060003400803001700020006060000280000419005000080079'))
data = '004050003'+'906400000'+'130006000'+'020310000'+'090000080'+'000047050'+\
'000070038'+'000002709'+'600090100'
print(sudoku99(data))
| bsd-3-clause |
Bysmyyr/chromium-crosswalk | tools/telemetry/third_party/typ/typ/tests/main_test.py | 33 | 26960 | # Copyright 2014 Dirk Pranke. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import io
import json
import os
import sys
import textwrap
from typ import main
from typ import test_case
from typ import Host
from typ import VERSION
from typ.fakes import test_result_server_fake
is_python3 = bool(sys.version_info.major == 3)
if is_python3: # pragma: python3
# pylint: disable=redefined-builtin,invalid-name
unicode = str
d = textwrap.dedent
PASS_TEST_PY = """
import unittest
class PassingTest(unittest.TestCase):
def test_pass(self):
pass
"""
PASS_TEST_FILES = {'pass_test.py': PASS_TEST_PY}
FAIL_TEST_PY = """
import unittest
class FailingTest(unittest.TestCase):
def test_fail(self):
self.fail()
"""
FAIL_TEST_FILES = {'fail_test.py': FAIL_TEST_PY}
OUTPUT_TEST_PY = """
import sys
import unittest
class PassTest(unittest.TestCase):
def test_out(self):
sys.stdout.write("hello on stdout\\n")
sys.stdout.flush()
def test_err(self):
sys.stderr.write("hello on stderr\\n")
class FailTest(unittest.TestCase):
def test_out_err_fail(self):
sys.stdout.write("hello on stdout\\n")
sys.stdout.flush()
sys.stderr.write("hello on stderr\\n")
self.fail()
"""
OUTPUT_TEST_FILES = {'output_test.py': OUTPUT_TEST_PY}
SF_TEST_PY = """
import sys
import unittest
class SkipMethods(unittest.TestCase):
@unittest.skip('reason')
def test_reason(self):
self.fail()
@unittest.skipIf(True, 'reason')
def test_skip_if_true(self):
self.fail()
@unittest.skipIf(False, 'reason')
def test_skip_if_false(self):
self.fail()
class SkipSetup(unittest.TestCase):
def setUp(self):
self.skipTest('setup failed')
def test_notrun(self):
self.fail()
@unittest.skip('skip class')
class SkipClass(unittest.TestCase):
def test_method(self):
self.fail()
class SetupClass(unittest.TestCase):
@classmethod
def setUpClass(cls):
sys.stdout.write('in setupClass\\n')
sys.stdout.flush()
assert False, 'setupClass failed'
def test_method1(self):
pass
def test_method2(self):
pass
class ExpectedFailures(unittest.TestCase):
@unittest.expectedFailure
def test_fail(self):
self.fail()
@unittest.expectedFailure
def test_pass(self):
pass
"""
SF_TEST_FILES = {'sf_test.py': SF_TEST_PY}
LOAD_TEST_PY = """
import unittest
def load_tests(_, _2, _3):
class BaseTest(unittest.TestCase):
pass
def method_fail(self):
self.fail()
def method_pass(self):
pass
setattr(BaseTest, "test_fail", method_fail)
setattr(BaseTest, "test_pass", method_pass)
suite = unittest.TestSuite()
suite.addTest(BaseTest("test_fail"))
suite.addTest(BaseTest("test_pass"))
return suite
"""
LOAD_TEST_FILES = {'load_test.py': LOAD_TEST_PY}
path_to_main = os.path.join(
os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
'runner.py')
class TestCli(test_case.MainTestCase):
prog = [sys.executable, path_to_main]
files_to_ignore = ['*.pyc']
def test_bad_arg(self):
self.check(['--bad-arg'], ret=2, out='',
rerr='.*: error: unrecognized arguments: --bad-arg\n')
self.check(['-help'], ret=2, out='',
rerr=(".*: error: argument -h/--help: "
"ignored explicit argument 'elp'\n"))
def test_bad_metadata(self):
self.check(['--metadata', 'foo'], ret=2, err='',
out='Error: malformed --metadata "foo"\n')
def test_basic(self):
self.check([], files=PASS_TEST_FILES,
ret=0,
out=('[1/1] pass_test.PassingTest.test_pass passed\n'
'1 test run, 0 failures.\n'), err='')
def test_coverage(self):
try:
import coverage # pylint: disable=W0612
files = {
'pass_test.py': PASS_TEST_PY,
'fail_test.py': FAIL_TEST_PY,
}
self.check(['-c', 'pass_test'], files=files, ret=0, err='',
out=d("""\
[1/1] pass_test.PassingTest.test_pass passed
1 test run, 0 failures.
Name Stmts Miss Cover
-------------------------------
fail_test 4 4 0%
pass_test 4 0 100%
-------------------------------
TOTAL 8 4 50%
"""))
except ImportError: # pragma: no cover
# We can never cover this line, since running coverage means
# that import will succeed.
self.check(['-c'], files=PASS_TEST_FILES, ret=1,
out='Error: coverage is not installed\n', err='')
def test_debugger(self):
if sys.version_info.major == 3: # pragma: python3
return
else: # pragma: python2
_, out, _, _ = self.check(['-d'], stdin='quit()\n',
files=PASS_TEST_FILES, ret=0, err='')
self.assertIn('(Pdb) ', out)
def test_dryrun(self):
self.check(['-n'], files=PASS_TEST_FILES, ret=0, err='',
out=d("""\
[1/1] pass_test.PassingTest.test_pass passed
1 test run, 0 failures.
"""))
def test_error(self):
files = {'err_test.py': d("""\
import unittest
class ErrTest(unittest.TestCase):
def test_err(self):
foo = bar
""")}
_, out, _, _ = self.check([''], files=files, ret=1, err='')
self.assertIn('[1/1] err_test.ErrTest.test_err failed unexpectedly',
out)
self.assertIn('1 test run, 1 failure', out)
def test_fail(self):
_, out, _, _ = self.check([], files=FAIL_TEST_FILES, ret=1, err='')
self.assertIn('fail_test.FailingTest.test_fail failed unexpectedly',
out)
def test_fail_then_pass(self):
files = {'fail_then_pass_test.py': d("""\
import unittest
count = 0
class FPTest(unittest.TestCase):
def test_count(self):
global count
count += 1
if count == 1:
self.fail()
""")}
_, out, _, files = self.check(['--retry-limit', '3',
'--write-full-results-to',
'full_results.json'],
files=files, ret=0, err='')
self.assertIn('Retrying failed tests (attempt #1 of 3)', out)
self.assertNotIn('Retrying failed tests (attempt #2 of 3)', out)
self.assertIn('1 test run, 0 failures.\n', out)
results = json.loads(files['full_results.json'])
self.assertEqual(
results['tests'][
'fail_then_pass_test']['FPTest']['test_count']['actual'],
'FAIL PASS')
def test_failures_are_not_elided(self):
_, out, _, _ = self.check(['--terminal-width=20'],
files=FAIL_TEST_FILES, ret=1, err='')
self.assertIn('[1/1] fail_test.FailingTest.test_fail failed '
'unexpectedly:\n', out)
def test_file_list(self):
files = PASS_TEST_FILES
self.check(['-f', '-'], files=files, stdin='pass_test\n', ret=0)
self.check(['-f', '-'], files=files, stdin='pass_test.PassingTest\n',
ret=0)
self.check(['-f', '-'], files=files,
stdin='pass_test.PassingTest.test_pass\n',
ret=0)
files = {'pass_test.py': PASS_TEST_PY,
'test_list.txt': 'pass_test.PassingTest.test_pass\n'}
self.check(['-f', 'test_list.txt'], files=files, ret=0)
def test_find(self):
files = PASS_TEST_FILES
self.check(['-l'], files=files, ret=0,
out='pass_test.PassingTest.test_pass\n')
self.check(['-l', 'pass_test'], files=files, ret=0, err='',
out='pass_test.PassingTest.test_pass\n')
self.check(['-l', 'pass_test.py'], files=files, ret=0, err='',
out='pass_test.PassingTest.test_pass\n')
self.check(['-l', './pass_test.py'], files=files, ret=0, err='',
out='pass_test.PassingTest.test_pass\n')
self.check(['-l', '.'], files=files, ret=0, err='',
out='pass_test.PassingTest.test_pass\n')
self.check(['-l', 'pass_test.PassingTest.test_pass'], files=files,
ret=0, err='',
out='pass_test.PassingTest.test_pass\n')
self.check(['-l', '.'], files=files, ret=0, err='',
out='pass_test.PassingTest.test_pass\n')
def test_find_from_subdirs(self):
files = {
'foo/__init__.py': '',
'foo/pass_test.py': PASS_TEST_PY,
'bar/__init__.py': '',
'bar/tmp': '',
}
self.check(['-l', '../foo/pass_test.py'], files=files, cwd='bar',
ret=0, err='',
out='foo.pass_test.PassingTest.test_pass\n')
self.check(['-l', 'foo'], files=files, cwd='bar',
ret=0, err='',
out='foo.pass_test.PassingTest.test_pass\n')
self.check(['-l', '--path', '../foo', 'pass_test'],
files=files, cwd='bar', ret=0, err='',
out='pass_test.PassingTest.test_pass\n')
def test_help(self):
self.check(['--help'], ret=0, rout='.*', err='')
def test_import_failure_missing_file(self):
self.check(['-l', 'foo'], ret=1, err='',
rout='Failed to load "foo".*')
def test_import_failure_missing_package(self):
files = {'foo.py': d("""\
import unittest
import package_that_does_not_exist
class ImportFailureTest(unittest.TestCase):
def test_case(self):
pass
""")}
self.check(['-l', 'foo.py'], files=files, ret=1, err='',
rout=('Failed to load "foo.py": No module named '
'\'?package_that_does_not_exist\'?\n'))
def test_import_failure_no_tests(self):
files = {'foo.py': 'import unittest'}
self.check(['-l', 'foo.bar'], files=files, ret=1, err='',
rout='Failed to load "foo.bar":.*')
def test_import_failure_syntax_error(self):
files = {'syn_test.py': d("""\
import unittest
class SyntaxErrorTest(unittest.TestCase):
def test_syntax_error_in_test(self):
syntax error
""")}
_, out, _, _ = self.check([], files=files, ret=1, err='')
self.assertIn('Failed to import test module: syn_test', out)
self.assertIn((' syntax error\n'
' ^\n'
'SyntaxError: invalid syntax\n'), out)
def test_interrupt(self):
files = {'interrupt_test.py': d("""\
import unittest
class Foo(unittest.TestCase):
def test_interrupt(self):
raise KeyboardInterrupt()
""")}
self.check(['-j', '1'], files=files, ret=130, out='',
err='interrupted, exiting\n')
def test_isolate(self):
self.check(['--isolate', '*test_pass*'], files=PASS_TEST_FILES, ret=0,
out=('[1/1] pass_test.PassingTest.test_pass passed\n'
'1 test run, 0 failures.\n'), err='')
def test_load_tests_failure(self):
files = {'foo_test.py': d("""\
import unittest
def load_tests(_, _2, _3):
raise ValueError('this should fail')
""")}
self.check([], files=files, ret=1, err='',
out=('foo_test.load_tests() failed: this should fail\n'))
def test_load_tests_single_worker(self):
files = LOAD_TEST_FILES
_, out, _, _ = self.check(['-j', '1', '-v'], files=files, ret=1,
err='')
self.assertIn('[1/2] load_test.BaseTest.test_fail failed', out)
self.assertIn('[2/2] load_test.BaseTest.test_pass passed', out)
self.assertIn('2 tests run, 1 failure.\n', out)
def test_load_tests_multiple_workers(self):
_, out, _, _ = self.check([], files=LOAD_TEST_FILES, ret=1, err='')
# The output for this test is nondeterministic since we may run
# two tests in parallel. So, we just test that some of the substrings
# we care about are present.
self.assertIn('test_pass passed', out)
self.assertIn('test_fail failed', out)
self.assertIn('2 tests run, 1 failure.\n', out)
def test_missing_builder_name(self):
self.check(['--test-results-server', 'localhost'], ret=2,
out=('Error: --builder-name must be specified '
'along with --test-result-server\n'
'Error: --master-name must be specified '
'along with --test-result-server\n'
'Error: --test-type must be specified '
'along with --test-result-server\n'), err='')
def test_ninja_status_env(self):
self.check(['-v', 'output_test.PassTest.test_out'],
files=OUTPUT_TEST_FILES, aenv={'NINJA_STATUS': 'ns: '},
out=d("""\
ns: output_test.PassTest.test_out passed
1 test run, 0 failures.
"""), err='')
def test_output_for_failures(self):
_, out, _, _ = self.check(['output_test.FailTest'],
files=OUTPUT_TEST_FILES,
ret=1, err='')
self.assertIn('[1/1] output_test.FailTest.test_out_err_fail '
'failed unexpectedly:\n'
' hello on stdout\n'
' hello on stderr\n', out)
def test_quiet(self):
self.check(['-q'], files=PASS_TEST_FILES, ret=0, err='', out='')
def test_retry_limit(self):
_, out, _, _ = self.check(['--retry-limit', '2'],
files=FAIL_TEST_FILES, ret=1, err='')
self.assertIn('Retrying failed tests', out)
lines = out.splitlines()
self.assertEqual(len([l for l in lines
if 'test_fail failed unexpectedly:' in l]),
3)
def test_skip(self):
self.check(['--skip', '*test_fail*'], files=FAIL_TEST_FILES, ret=1,
out='No tests to run.\n', err='')
files = {'fail_test.py': FAIL_TEST_PY,
'pass_test.py': PASS_TEST_PY}
self.check(['-j', '1', '--skip', '*test_fail*'], files=files, ret=0,
out=('[1/2] fail_test.FailingTest.test_fail was skipped\n'
'[2/2] pass_test.PassingTest.test_pass passed\n'
'2 tests run, 0 failures.\n'), err='')
# This tests that we print test_started updates for skipped tests
# properly. It also tests how overwriting works.
_, out, _, _ = self.check(['-j', '1', '--overwrite', '--skip',
'*test_fail*'], files=files, ret=0,
err='', universal_newlines=False)
# We test this string separately and call out.strip() to
# avoid the trailing \r\n we get on windows, while keeping
# the \r's elsewhere in the string.
self.assertMultiLineEqual(
out.strip(),
('[0/2] fail_test.FailingTest.test_fail\r'
' \r'
'[1/2] fail_test.FailingTest.test_fail was skipped\r'
' \r'
'[1/2] pass_test.PassingTest.test_pass\r'
' \r'
'[2/2] pass_test.PassingTest.test_pass passed\r'
' \r'
'2 tests run, 0 failures.'))
def test_skips_and_failures(self):
_, out, _, _ = self.check(['-j', '1', '-v', '-v'], files=SF_TEST_FILES,
ret=1, err='')
# We do a bunch of assertIn()'s to work around the non-portable
# tracebacks.
self.assertIn(('[1/9] sf_test.ExpectedFailures.test_fail failed:\n'
' Traceback '), out)
self.assertIn(('[2/9] sf_test.ExpectedFailures.test_pass '
'passed unexpectedly'), out)
self.assertIn(('[3/9] sf_test.SetupClass.test_method1 '
'failed unexpectedly:\n'
' in setupClass\n'), out)
self.assertIn(('[4/9] sf_test.SetupClass.test_method2 '
'failed unexpectedly:\n'
' in setupClass\n'), out)
self.assertIn(('[5/9] sf_test.SkipClass.test_method was skipped:\n'
' skip class\n'), out)
self.assertIn(('[6/9] sf_test.SkipMethods.test_reason was skipped:\n'
' reason\n'), out)
self.assertIn(('[7/9] sf_test.SkipMethods.test_skip_if_false '
'failed unexpectedly:\n'
' Traceback'), out)
self.assertIn(('[8/9] sf_test.SkipMethods.test_skip_if_true '
'was skipped:\n'
' reason\n'
'[9/9] sf_test.SkipSetup.test_notrun was skipped:\n'
' setup failed\n'
'9 tests run, 4 failures.\n'), out)
def test_skip_and_all(self):
# --all should override --skip
self.check(['-l', '--skip', '*test_pass'],
files=PASS_TEST_FILES, ret=1, err='',
out='No tests to run.\n')
self.check(['-l', '--all', '--skip', '*test_pass'],
files=PASS_TEST_FILES, ret=0, err='',
out='pass_test.PassingTest.test_pass\n')
def test_skip_decorators_and_all(self):
_, out, _, _ = self.check(['--all', '-j', '1', '-v', '-v'],
files=SF_TEST_FILES, ret=1, err='')
self.assertIn('sf_test.SkipClass.test_method failed', out)
self.assertIn('sf_test.SkipMethods.test_reason failed', out)
self.assertIn('sf_test.SkipMethods.test_skip_if_true failed', out)
self.assertIn('sf_test.SkipMethods.test_skip_if_false failed', out)
# --all does not override explicit calls to skipTest(), only
# the decorators.
self.assertIn('sf_test.SkipSetup.test_notrun was skipped', out)
def test_subdir(self):
files = {
'foo/__init__.py': '',
'foo/bar/__init__.py': '',
'foo/bar/pass_test.py': PASS_TEST_PY
}
self.check(['foo/bar'], files=files, ret=0, err='',
out=d("""\
[1/1] foo.bar.pass_test.PassingTest.test_pass passed
1 test run, 0 failures.
"""))
def test_timing(self):
self.check(['-t'], files=PASS_TEST_FILES, ret=0, err='',
rout=(r'\[1/1\] pass_test.PassingTest.test_pass passed '
r'\d+.\d+s\n'
r'1 test run in \d+.\d+s, 0 failures.'))
def test_test_results_server(self):
server = test_result_server_fake.start()
self.assertNotEqual(server, None, 'could not start fake server')
try:
self.check(['--test-results-server',
'%s:%d' % server.server_address,
'--master-name', 'fake_master',
'--builder-name', 'fake_builder',
'--test-type', 'typ_tests',
'--metadata', 'foo=bar'],
files=PASS_TEST_FILES, ret=0, err='',
out=('[1/1] pass_test.PassingTest.test_pass passed\n'
'1 test run, 0 failures.\n'))
finally:
posts = server.stop()
self.assertEqual(len(posts), 1)
payload = posts[0][2].decode('utf8')
self.assertIn('"test_pass": {"expected": "PASS", "actual": "PASS"}',
payload)
self.assertTrue(payload.endswith('--\r\n'))
self.assertNotEqual(server.log.getvalue(), '')
def test_test_results_server_error(self):
server = test_result_server_fake.start(code=500)
self.assertNotEqual(server, None, 'could not start fake server')
try:
self.check(['--test-results-server',
'%s:%d' % server.server_address,
'--master-name', 'fake_master',
'--builder-name', 'fake_builder',
'--test-type', 'typ_tests',
'--metadata', 'foo=bar'],
files=PASS_TEST_FILES, ret=1, err='',
out=('[1/1] pass_test.PassingTest.test_pass passed\n'
'1 test run, 0 failures.\n'
'Uploading the JSON results raised '
'"HTTP Error 500: Internal Server Error"\n'))
finally:
_ = server.stop()
def test_test_results_server_not_running(self):
self.check(['--test-results-server', 'localhost:99999',
'--master-name', 'fake_master',
'--builder-name', 'fake_builder',
'--test-type', 'typ_tests',
'--metadata', 'foo=bar'],
files=PASS_TEST_FILES, ret=1, err='',
rout=(r'\[1/1\] pass_test.PassingTest.test_pass passed\n'
'1 test run, 0 failures.\n'
'Uploading the JSON results raised .*\n'))
def test_verbose_2(self):
self.check(['-vv', '-j', '1', 'output_test.PassTest'],
files=OUTPUT_TEST_FILES, ret=0,
out=d("""\
[1/2] output_test.PassTest.test_err passed:
hello on stderr
[2/2] output_test.PassTest.test_out passed:
hello on stdout
2 tests run, 0 failures.
"""), err='')
def test_verbose_3(self):
self.check(['-vvv', '-j', '1', 'output_test.PassTest'],
files=OUTPUT_TEST_FILES, ret=0,
out=d("""\
[0/2] output_test.PassTest.test_err queued
[1/2] output_test.PassTest.test_err passed:
hello on stderr
[1/2] output_test.PassTest.test_out queued
[2/2] output_test.PassTest.test_out passed:
hello on stdout
2 tests run, 0 failures.
"""), err='')
def test_version(self):
self.check('--version', ret=0, out=(VERSION + '\n'))
def test_write_full_results_to(self):
_, _, _, files = self.check(['--write-full-results-to',
'results.json'], files=PASS_TEST_FILES)
self.assertIn('results.json', files)
results = json.loads(files['results.json'])
self.assertEqual(results['interrupted'], False)
self.assertEqual(results['path_delimiter'], '.')
self.assertEqual(results['tests'],
{u'pass_test': {
u'PassingTest': {
u'test_pass': {
u'actual': u'PASS',
u'expected': u'PASS',
}
}
}})
def test_write_trace_to(self):
_, _, _, files = self.check(['--write-trace-to', 'trace.json'],
files=PASS_TEST_FILES)
self.assertIn('trace.json', files)
trace_obj = json.loads(files['trace.json'])
self.assertEqual(trace_obj['otherData'], {})
self.assertEqual(len(trace_obj['traceEvents']), 5)
event = trace_obj['traceEvents'][0]
self.assertEqual(event['name'], 'pass_test.PassingTest.test_pass')
self.assertEqual(event['ph'], 'X')
self.assertEqual(event['tid'], 1)
self.assertEqual(event['args']['expected'], ['Pass'])
self.assertEqual(event['args']['actual'], 'Pass')
class TestMain(TestCli):
prog = []
def make_host(self):
return Host()
def call(self, host, argv, stdin, env):
stdin = unicode(stdin)
host.stdin = io.StringIO(stdin)
if env:
host.getenv = env.get
host.capture_output()
orig_sys_path = sys.path[:]
orig_sys_modules = list(sys.modules.keys())
try:
ret = main(argv + ['-j', '1'], host)
finally:
out, err = host.restore_output()
modules_to_unload = []
for k in sys.modules:
if k not in orig_sys_modules:
modules_to_unload.append(k)
for k in modules_to_unload:
del sys.modules[k]
sys.path = orig_sys_path
return ret, out, err
def test_debugger(self):
# TODO: this test seems to hang under coverage.
pass
| bsd-3-clause |
JackonYang/shadowsocks | shadowsocks/daemon.py | 694 | 5602 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2014-2015 clowwindy
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import, division, print_function, \
with_statement
import os
import sys
import logging
import signal
import time
from shadowsocks import common, shell
# this module is ported from ShadowVPN daemon.c
def daemon_exec(config):
if 'daemon' in config:
if os.name != 'posix':
raise Exception('daemon mode is only supported on Unix')
command = config['daemon']
if not command:
command = 'start'
pid_file = config['pid-file']
log_file = config['log-file']
if command == 'start':
daemon_start(pid_file, log_file)
elif command == 'stop':
daemon_stop(pid_file)
# always exit after daemon_stop
sys.exit(0)
elif command == 'restart':
daemon_stop(pid_file)
daemon_start(pid_file, log_file)
else:
raise Exception('unsupported daemon command %s' % command)
def write_pid_file(pid_file, pid):
import fcntl
import stat
try:
fd = os.open(pid_file, os.O_RDWR | os.O_CREAT,
stat.S_IRUSR | stat.S_IWUSR)
except OSError as e:
shell.print_exception(e)
return -1
flags = fcntl.fcntl(fd, fcntl.F_GETFD)
assert flags != -1
flags |= fcntl.FD_CLOEXEC
r = fcntl.fcntl(fd, fcntl.F_SETFD, flags)
assert r != -1
# There is no platform independent way to implement fcntl(fd, F_SETLK, &fl)
# via fcntl.fcntl. So use lockf instead
try:
fcntl.lockf(fd, fcntl.LOCK_EX | fcntl.LOCK_NB, 0, 0, os.SEEK_SET)
except IOError:
r = os.read(fd, 32)
if r:
logging.error('already started at pid %s' % common.to_str(r))
else:
logging.error('already started')
os.close(fd)
return -1
os.ftruncate(fd, 0)
os.write(fd, common.to_bytes(str(pid)))
return 0
def freopen(f, mode, stream):
oldf = open(f, mode)
oldfd = oldf.fileno()
newfd = stream.fileno()
os.close(newfd)
os.dup2(oldfd, newfd)
def daemon_start(pid_file, log_file):
def handle_exit(signum, _):
if signum == signal.SIGTERM:
sys.exit(0)
sys.exit(1)
signal.signal(signal.SIGINT, handle_exit)
signal.signal(signal.SIGTERM, handle_exit)
# fork only once because we are sure parent will exit
pid = os.fork()
assert pid != -1
if pid > 0:
# parent waits for its child
time.sleep(5)
sys.exit(0)
# child signals its parent to exit
ppid = os.getppid()
pid = os.getpid()
if write_pid_file(pid_file, pid) != 0:
os.kill(ppid, signal.SIGINT)
sys.exit(1)
os.setsid()
signal.signal(signal.SIGHUP, signal.SIG_IGN)
print('started')
os.kill(ppid, signal.SIGTERM)
sys.stdin.close()
try:
freopen(log_file, 'a', sys.stdout)
freopen(log_file, 'a', sys.stderr)
except IOError as e:
shell.print_exception(e)
sys.exit(1)
def daemon_stop(pid_file):
import errno
try:
with open(pid_file) as f:
buf = f.read()
pid = common.to_str(buf)
if not buf:
logging.error('not running')
except IOError as e:
shell.print_exception(e)
if e.errno == errno.ENOENT:
# always exit 0 if we are sure daemon is not running
logging.error('not running')
return
sys.exit(1)
pid = int(pid)
if pid > 0:
try:
os.kill(pid, signal.SIGTERM)
except OSError as e:
if e.errno == errno.ESRCH:
logging.error('not running')
# always exit 0 if we are sure daemon is not running
return
shell.print_exception(e)
sys.exit(1)
else:
logging.error('pid is not positive: %d', pid)
# sleep for maximum 10s
for i in range(0, 200):
try:
# query for the pid
os.kill(pid, 0)
except OSError as e:
if e.errno == errno.ESRCH:
break
time.sleep(0.05)
else:
logging.error('timed out when stopping pid %d', pid)
sys.exit(1)
print('stopped')
os.unlink(pid_file)
def set_user(username):
if username is None:
return
import pwd
import grp
try:
pwrec = pwd.getpwnam(username)
except KeyError:
logging.error('user not found: %s' % username)
raise
user = pwrec[0]
uid = pwrec[2]
gid = pwrec[3]
cur_uid = os.getuid()
if uid == cur_uid:
return
if cur_uid != 0:
logging.error('can not set user as nonroot user')
# will raise later
# inspired by supervisor
if hasattr(os, 'setgroups'):
groups = [grprec[2] for grprec in grp.getgrall() if user in grprec[3]]
groups.insert(0, gid)
os.setgroups(groups)
os.setgid(gid)
os.setuid(uid)
| apache-2.0 |
uzh/vm-mad | vmmad/test/test_util.py | 1 | 1527 | #! /usr/bin/env python
# -*- coding: utf-8 -*-
"""
Run tests for the `vmmad.util` module.
"""
# Copyright (C) 2011, 2012 ETH Zurich and University of Zurich. All rights reserved.
#
# Authors:
# Riccardo Murri <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
__docformat__ = 'reStructuredText'
# stdlib imports
import unittest
# local imports
import vmmad.util
class TestRandomPassword(unittest.TestCase):
def test_random_password(self):
r1 = vmmad.util.random_password()
r2 = vmmad.util.random_password()
self.assertNotEqual(r1, r2)
def test_random_password_length(self):
r1 = vmmad.util.random_password()
self.assertEqual(len(r1), 24)
def test_random_password_letters(self):
letters = 'abcde'
r1 = vmmad.util.random_password(letters=letters)
used_letters = set(r1)
self.assertTrue(used_letters.issubset(letters))
## main: run tests
if __name__ == "__main__":
# tests defined here
unittest.main()
| apache-2.0 |
pmarques/ansible | test/lib/ansible_test/_internal/commands/sanity/pslint.py | 13 | 3265 | """Sanity test using PSScriptAnalyzer."""
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
import os
import re
from ... import types as t
from . import (
SanityVersionNeutral,
SanityMessage,
SanityFailure,
SanitySuccess,
SanitySkipped,
SANITY_ROOT,
)
from ...target import (
TestTarget,
)
from ...util import (
SubprocessError,
find_executable,
ANSIBLE_TEST_DATA_ROOT,
)
from ...util_common import (
run_command,
)
from ...config import (
SanityConfig,
)
from ...data import (
data_context,
)
class PslintTest(SanityVersionNeutral):
"""Sanity test using PSScriptAnalyzer."""
@property
def error_code(self): # type: () -> t.Optional[str]
"""Error code for ansible-test matching the format used by the underlying test program, or None if the program does not use error codes."""
return 'AnsibleTest'
def filter_targets(self, targets): # type: (t.List[TestTarget]) -> t.List[TestTarget]
"""Return the given list of test targets, filtered to include only those relevant for the test."""
return [target for target in targets if os.path.splitext(target.path)[1] in ('.ps1', '.psm1', '.psd1')]
def test(self, args, targets):
"""
:type args: SanityConfig
:type targets: SanityTargets
:rtype: TestResult
"""
settings = self.load_processor(args)
paths = [target.path for target in targets.include]
if not find_executable('pwsh', required='warning'):
return SanitySkipped(self.name)
cmds = []
if args.requirements:
cmds.append([os.path.join(ANSIBLE_TEST_DATA_ROOT, 'requirements', 'sanity.ps1')])
cmds.append([os.path.join(SANITY_ROOT, 'pslint', 'pslint.ps1')] + paths)
stdout = ''
for cmd in cmds:
try:
stdout, stderr = run_command(args, cmd, capture=True)
status = 0
except SubprocessError as ex:
stdout = ex.stdout
stderr = ex.stderr
status = ex.status
if stderr:
raise SubprocessError(cmd=cmd, status=status, stderr=stderr, stdout=stdout)
if args.explain:
return SanitySuccess(self.name)
severity = [
'Information',
'Warning',
'Error',
'ParseError',
]
cwd = data_context().content.root + '/'
# replace unicode smart quotes and ellipsis with ascii versions
stdout = re.sub(u'[\u2018\u2019]', "'", stdout)
stdout = re.sub(u'[\u201c\u201d]', '"', stdout)
stdout = re.sub(u'[\u2026]', '...', stdout)
messages = json.loads(stdout)
errors = [SanityMessage(
code=m['RuleName'],
message=m['Message'],
path=m['ScriptPath'].replace(cwd, ''),
line=m['Line'] or 0,
column=m['Column'] or 0,
level=severity[m['Severity']],
) for m in messages]
errors = settings.process_errors(errors, paths)
if errors:
return SanityFailure(self.name, messages=errors)
return SanitySuccess(self.name)
| gpl-3.0 |
looooo/paraBEM | examples/plots/lifting_line.py | 1 | 1404 | from __future__ import division
import numpy as np
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import paraBEM
from paraBEM.liftingline import LiftingLine
from paraBEM.utils import check_path
# WingGeometry
spw = 2
numpos = 50
z_fac_1 = -0.3
z_fac_2 = -0.7
y = np.sin(np.linspace(0, np.pi/2, numpos)) * spw/2
x = [0. for _ in y]
z = [i**2 * z_fac_1 + i**6 * z_fac_2 for i in y]
mirror = lambda xyz: [xyz[0], -xyz[1], xyz[2]]
wing = list(zip(x, y, z))
wing = list(map(mirror, wing))[::-1] + list(wing)[1:]
wing = [paraBEM.Vector3(*i) for i in wing]
# LiftingLine
lifting_line = LiftingLine(wing)
lifting_line.v_inf = paraBEM.Vector3(1, 0, 0)
lifting_line.solve_for_best_gamma(1)
gamma = [i.best_gamma for i in lifting_line.segments]
gamma_max = max(gamma)
# Plot
gamma_el = lambda y: gamma_max * (1 - (y / spw * 2)**2)**(1 / 2)
mids = [[i.mids.x, i.mids.y, i.mids.z] for i in lifting_line.segments]
x, y, z = zip(*mids)
fig = plt.figure()
ax1 = fig.add_subplot(3, 1, 1)
ax1.plot(y, z)
ax2 = fig.add_subplot(3, 1, 2)
ax2.plot(y, x, marker="x")
ax3 = fig.add_subplot(3, 1, 3)
y_el = np.linspace(-1, 1, 400)
ax3.plot([-spw/2] + list(y) + [spw/2], [0] + gamma + [0], marker="x")
ax3.plot(y_el, list(map(gamma_el, y_el)))
plt.savefig(check_path("results/2d/liftingline.png"))
total = 0
for i in lifting_line.segments:
total += i.lift_factor * i.best_gamma
print(total)
| gpl-3.0 |
glebb/mopidy-appletv | mopidy_appletv/frontend.py | 1 | 6287 | # -*- coding: utf-8 -*-
import pykka
import socket
import select
import sys
import pybonjour
import time
from mopidy import core, utils
from threading import Thread
import netifaces
import logging
import traceback
logger = logging.getLogger(__name__)
class AppleTvFrontend(pykka.ThreadingActor, core.CoreListener):
def __init__(self, config, core):
logger.info('appletv init')
super(AppleTvFrontend, self).__init__()
self.core = core
self.socket = None
self.running = False
self.public_ip = netifaces.ifaddresses('wlan0')[netifaces.AF_INET][0]['addr']
self.resolved = []
self.queried = []
self.host = None
self.timeout = 5
self._setup_appletv()
self.socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self._connect_socket()
def _setup_appletv(self):
regtype = "_airplay._tcp"
browse_sdRef = pybonjour.DNSServiceBrowse(regtype = regtype,
callBack = self._browse_callback)
try:
try:
while not self.host:
ready = select.select([browse_sdRef], [], [])
if browse_sdRef in ready[0]:
pybonjour.DNSServiceProcessResult(browse_sdRef)
except KeyboardInterrupt:
pass
finally:
browse_sdRef.close()
# Gets the IP from selected device
def _query_record_callback(self, sdRef, flags, interfaceIndex, errorCode, fullname, rrtype, rrclass, rdata, ttl):
if errorCode == pybonjour.kDNSServiceErr_NoError:
self.host.ip = socket.inet_ntoa(rdata)
self.queried.append(True)
def _resolve_callback(self, sdRef, flags, interfaceIndex, errorCode, fullname,
hosttarget, port, txtRecord):
if errorCode == pybonjour.kDNSServiceErr_NoError:
print 'Resolved service:'
print ' fullname =', fullname
print ' hosttarget =', hosttarget
print ' port =', port
self.host = AirPlayDevice(interfaceIndex, fullname, hosttarget, port)
self.resolved.append(True)
def _browse_callback(self, sdRef, flags, interfaceIndex, errorCode, serviceName,
regtype, replyDomain):
print "browse callback"
if errorCode != pybonjour.kDNSServiceErr_NoError:
return
if not (flags & pybonjour.kDNSServiceFlagsAdd):
print 'Service removed'
return
print 'Service added; resolving'
resolve_sdRef = pybonjour.DNSServiceResolve(0,
interfaceIndex,
serviceName,
regtype,
replyDomain,
self._resolve_callback)
try:
while not self.resolved:
ready = select.select([resolve_sdRef], [], [], self.timeout)
if resolve_sdRef not in ready[0]:
print 'Resolve timed out'
break
pybonjour.DNSServiceProcessResult(resolve_sdRef)
else:
self.resolved.pop()
finally:
resolve_sdRef.close()
####
query_sdRef = pybonjour.DNSServiceQueryRecord(interfaceIndex = self.host.interfaceIndex,
fullname = self.host.hosttarget,
rrtype = pybonjour.kDNSServiceType_A,
callBack = self._query_record_callback)
try:
while not self.queried:
ready = select.select([query_sdRef], [], [], self.timeout)
if query_sdRef not in ready[0]:
print "Query not in record"
break
pybonjour.DNSServiceProcessResult(query_sdRef)
else:
self.queried.pop()
finally:
query_sdRef.close()
def _post_message(self, action, uri):
#if not uri.startswith("mplayer:"):
# uri = 'http://'+self.public_ip+':8000/mopidy.mp3'
body = "Content-Location: %s\nStart-Position: 0\n\n" % (uri)
return "POST /"+action+" HTTP/1.1\n" \
"Content-Length: %d\n" \
"User-Agent: MediaControl/1.0\n\n%s" % (len(body), body)
def track_playback_started(self, tl_track):
self.socket.send(self._post_message("play", tl_track.track.uri))
def track_playback_resumed(self, tl_track, time_position):
self.socket.send(self._post_message("rate?value=1.000000", tl_track.track.uri))
def track_playback_paused(self, tl_track, time_position):
self.socket.send(self._post_message("rate?value=0.000000", tl_track.track.uri))
def track_playback_ended(self, tl_track, time_position):
pass
#self.socket.send(self._post_message("stop"))
def _connect_socket(self):
self.socket.connect((self.host.ip, self.host.port))
def start_thread(self):
while self.running:
try:
self.socket.send("\0")
except:
logger.info("Connection to AppleTv lost. Trying to reconnect")
self._connect_socket()
time.sleep(2)
utils.process.exit_process()
def on_start(self):
try:
self.running = True
thread = Thread(target=self.start_thread)
thread.daemon = True
thread.start()
except:
traceback.print_exc()
def on_stop(self):
self.running = False
class AirPlayDevice:
def __init__(self, interfaceIndex, fullname, hosttarget, port):
self.interfaceIndex = interfaceIndex
self.fullname = fullname
self.hosttarget = hosttarget
self.port = port;
self.displayname = hosttarget.replace(".local.", "")
self.ip = 0
| apache-2.0 |
nclsHart/glances | glances/core/glances_client.py | 1 | 8024 | # -*- coding: utf-8 -*-
#
# This file is part of Glances.
#
# Copyright (C) 2015 Nicolargo <[email protected]>
#
# Glances is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Glances is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Manage the Glances client."""
# Import system libs
import json
import socket
import sys
try:
from xmlrpc.client import Transport, ServerProxy, ProtocolError, Fault
except ImportError:
# Python 2
from xmlrpclib import Transport, ServerProxy, ProtocolError, Fault
try:
import http.client as httplib
except ImportError:
# Python 2
import httplib
# Import Glances libs
from glances.core.glances_globals import version
from glances.core.glances_logging import logger
from glances.core.glances_stats import GlancesStatsClient
from glances.outputs.glances_curses import GlancesCursesClient
class GlancesClientTransport(Transport):
"""This class overwrite the default XML-RPC transport and manage timeout"""
def set_timeout(self, timeout):
self.timeout = timeout
class GlancesClient(object):
"""This class creates and manages the TCP client."""
def __init__(self, config=None, args=None, timeout=7, return_to_browser=False):
# Store the arg/config
self.args = args
self.config = config
# Client mode:
self.set_mode()
# Return to browser or exit
self.return_to_browser = return_to_browser
# Build the URI
if args.password != "":
uri = 'http://{0}:{1}@{2}:{3}'.format(args.username, args.password,
args.client, args.port)
else:
uri = 'http://{0}:{1}'.format(args.client, args.port)
logger.debug("Try to connect to {0}".format(uri))
# Try to connect to the URI
transport = GlancesClientTransport()
# Configure the server timeout
transport.set_timeout(timeout)
try:
self.client = ServerProxy(uri, transport=transport)
except Exception as e:
self.log_and_exit("Client couldn't create socket {0}: {1}".format(uri, e))
def log_and_exit(self, msg=''):
"""Log and (exit)"""
if not self.return_to_browser:
logger.critical(msg)
sys.exit(2)
else:
logger.error(msg)
def set_mode(self, mode='glances'):
"""Set the client mode.
- 'glances' = Glances server (default)
- 'snmp' = SNMP (fallback)
"""
self.mode = mode
return self.mode
def get_mode(self):
"""Get the client mode.
- 'glances' = Glances server (default)
- 'snmp' = SNMP (fallback)
"""
return self.mode
def login(self):
"""Logon to the server."""
ret = True
if not self.args.snmp_force:
# First of all, trying to connect to a Glances server
self.set_mode('glances')
client_version = None
try:
client_version = self.client.init()
except socket.error as err:
# Fallback to SNMP
logger.error("Connection to Glances server failed (%s)" % err)
self.set_mode('snmp')
fallbackmsg = _("Trying fallback to SNMP...")
if not self.return_to_browser:
print(fallbackmsg)
else:
logger.info(fallbackmsg)
except ProtocolError as err:
# Others errors
if str(err).find(" 401 ") > 0:
msg = "Connection to server failed (bad password)"
else:
msg = "Connection to server failed ({0})".format(err)
self.log_and_exit(msg)
return False
if self.get_mode() == 'glances' and version.split('.')[0] == client_version.split('.')[0]:
# Init stats
self.stats = GlancesStatsClient(config=self.config, args=self.args)
self.stats.set_plugins(json.loads(self.client.getAllPlugins()))
logger.debug(
"Client version: %s / Server version: %s" % (version, client_version))
elif self.get_mode() == 'glances':
self.log_and_exit("Client and server not compatible: Client version: %s / Server version: %s" % (version, client_version))
return False
else:
self.set_mode('snmp')
if self.get_mode() == 'snmp':
logger.info("Trying to grab stats by SNMP...")
# Fallback to SNMP if needed
from glances.core.glances_stats import GlancesStatsClientSNMP
# Init stats
self.stats = GlancesStatsClientSNMP(config=self.config, args=self.args)
if not self.stats.check_snmp():
self.log_and_exit("Connection to SNMP server failed")
return False
if ret:
# Load limits from the configuration file
# Each client can choose its owns limits
self.stats.load_limits(self.config)
# Init screen
self.screen = GlancesCursesClient(args=self.args)
# Return result
return ret
def update(self):
"""Update stats from Glances/SNMP server."""
if self.get_mode() == 'glances':
return self.update_glances()
elif self.get_mode() == 'snmp':
return self.update_snmp()
else:
self.end()
logger.critical("Unknown server mode: {0}".format(self.get_mode()))
sys.exit(2)
def update_glances(self):
"""Get stats from Glances server.
Return the client/server connection status:
- Connected: Connection OK
- Disconnected: Connection NOK
"""
# Update the stats
try:
server_stats = json.loads(self.client.getAll())
server_stats['monitor'] = json.loads(self.client.getAllMonitored())
except socket.error:
# Client cannot get server stats
return "Disconnected"
except Fault:
# Client cannot get server stats (issue #375)
return "Disconnected"
else:
# Put it in the internal dict
self.stats.update(server_stats)
return "Connected"
def update_snmp(self):
"""Get stats from SNMP server.
Return the client/server connection status:
- SNMP: Connection with SNMP server OK
- Disconnected: Connection NOK
"""
# Update the stats
try:
self.stats.update()
except Exception:
# Client cannot get SNMP server stats
return "Disconnected"
else:
# Grab success
return "SNMP"
def serve_forever(self):
"""Main client loop."""
exitkey = False
while True and not exitkey:
# Update the stats
cs_status = self.update()
# Update the screen
exitkey = self.screen.update(self.stats,
cs_status=cs_status,
return_to_browser=self.return_to_browser)
# Export stats using export modules
self.stats.export(self.stats)
return self.get_mode()
def end(self):
"""End of the client session."""
self.screen.end()
| lgpl-3.0 |
szlin/gitsome | gitsome/compat.py | 1 | 1032 | # -*- coding: utf-8 -*-
# Copyright 2015 Donne Martin. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You
# may not use this file except in compliance with the License. A copy of
# the License is located at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# or in the "license" file accompanying this file. This file is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
# ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import sys
import urllib
try:
# Python 3
import configparser
from urllib.parse import urlparse
from urllib.request import urlretrieve
from urllib.error import URLError
except ImportError:
# Python 2
import ConfigParser as configparser
from urlparse import urlparse
from urllib import urlretrieve
from urllib2 import URLError
if sys.version_info < (3, 3):
import HTMLParser
else:
import html as HTMLParser
| gpl-3.0 |
jandecaluwe/myhdl | myhdl/test/conversion/general/test_hec.py | 4 | 3708 | import os
path = os.path
from random import randrange
import myhdl
from myhdl import *
COSET = 0x55
def calculateHecRef(header):
""" Return hec for an ATM header.
Reference version.
The hec polynomial is 1 + x + x**2 + x**8.
"""
hec = intbv(0)
for bit in header[32:]:
hec[8:] = concat(hec[7:2],
bit ^ hec[1] ^ hec[7],
bit ^ hec[0] ^ hec[7],
bit ^ hec[7]
)
return hec ^ COSET
def calculateHecFunc(header):
""" Return hec for an ATM header.
Translatable version.
The hec polynomial is 1 + x + x**2 + x**8.
"""
h = intbv(0)[8:]
for i in downrange(len(header)):
bit = header[i]
h[:] = concat(h[7:2],
bit ^ h[1] ^ h[7],
bit ^ h[0] ^ h[7],
bit ^ h[7]
)
h ^= COSET
return h
def calculateHecTask(hec, header):
""" Calculate hec for an ATM header.
Translatable version.
The hec polynomial is 1 + x + x**2 + x**8.
"""
h = intbv(0)[8:]
for i in downrange(len(header)):
bit = header[i]
h[:] = concat(h[7:2],
bit ^ h[1] ^ h[7],
bit ^ h[0] ^ h[7],
bit ^ h[7]
)
h ^= COSET
hec[:] = h
@block
def HecCalculatorPlain(hec, header):
""" Hec calculation module.
Plain version.
"""
@instance
def logic():
h = intbv(0)[8:]
while 1:
yield header
h[:] = 0
for i in downrange(len(header)):
bit = header[i]
h[:] = concat(h[7:2],
bit ^ h[1] ^ h[7],
bit ^ h[0] ^ h[7],
bit ^ h[7]
)
hec.next = h ^ COSET
return logic
def HecCalculatorFunc(hec, header):
""" Hec calculation module.
Version with function call.
"""
h = intbv(0)[8:]
while 1:
yield header
hec.next = calculateHecFunc(header=header)
def HecCalculatorTask(hec, header):
""" Hec calculation module.
Version with task call.
"""
h = intbv(0)[8:]
while 1:
yield header
calculateHecTask(h, header)
hec.next = h
def HecCalculatorTask2(hec, header):
""" Hec calculation module.
Version with task call.
"""
h = intbv(0)[8:]
while 1:
yield header
calculateHecTask(header=header, hec=h)
hec.next = h
def HecCalculator_v(name, hec, header):
return setupCosimulation(**locals())
headers = [ 0x00000000,
0x01234567,
0xbac6f4ca
]
headers.extend([randrange(2**32-1) for i in range(10)])
headers = tuple(headers)
@block
def HecBench(HecCalculator):
hec = Signal(intbv(0)[8:])
hec_v = Signal(intbv(0)[8:])
header = Signal(intbv(-1)[32:])
heccalc_inst = HecCalculator(hec, header)
@instance
def stimulus():
for i in range(len(headers)):
header.next = headers[i]
yield delay(10)
print(hec)
return stimulus, heccalc_inst
## def testPlain(self):
## sim = self.bench(HecCalculatorPlain)
## Simulation(sim).run()
## def testFunc(self):
## sim = self.bench(HecCalculatorFunc)
## Simulation(sim).run()
## def testTask(self):
## sim = self.bench(HecCalculatorTask)
## Simulation(sim).run()
## def testTask2(self):
## sim = self.bench(HecCalculatorTask2)
## Simulation(sim).run()
def testPlain():
assert HecBench(HecCalculatorPlain).verify_convert() == 0
| lgpl-2.1 |
sanjaymeena/ProgrammingProblems | python/CZITest/CZI_Test_Problem1.py | 1 | 2325 |
# coding: utf-8
# ``
# Task 1: Identifying common words between documents
# For this task, you need to generate a matrix, where each entry contains the number of unique
# common tokens (words) between each pair of documents. The output should include no headers for
# rows or columns. The matrix should be symmetric, and follow the numbering conventions of the files.
# The diagonal entries would be the count of unique terms in each document.
# For example, the first number on the first line is the number of unique terms in 001.txt, the second
# number on the second line is the number of unique terms in 002.txt, etc.
# Similarly, the first element on the second line would be the number of unique terms that appear in
# both 001.txt and 002.txt, the 23rd number on the 16th line is the number of unique common terms
# between 023.txt and 016.txt, etc.
# ``
# In[52]:
import collections
import re
import sys
import glob
import os
import numpy as np
np.set_printoptions(linewidth=120)
# In[53]:
def tokenize(string):
return re.findall(r'\w+',string.lower())
# In[54]:
data_folder='./data/'
content_dict = {}
total_content=[]
for content in os.listdir(data_folder): # "." means current directory
filepath=data_folder+ content
lines=[]
with open(filepath) as f:
lines=f.readlines()
f.close()
#print lines
#print content
#print len(lines)
tokenized=[]
for line in lines :
words=tokenize(line)
string=' '.join(w for w in words)
tokenized.append(string)
tokenset=set(tokenized)
string=' '.join(token for token in tokenset)
f=int(content.replace('.txt', ''))
#print f
content_dict[f]=string
# In[55]:
# w1=content_dict[20].split()
# w2=content_dict[21].split()
# intersection = set(w1) & set(w2)
# print len(intersection)
# In[56]:
rows, columns = 100, 100
matrix = [[0 for x in range(rows)] for y in range(columns)]
for i in range(0,rows):
for j in range(0,columns):
w1=content_dict[i+1].split()
w2=content_dict[j+1].split()
intersection = set(w1) & set(w2)
matrix[i][j]=len(intersection)
# In[58]:
np.set_printoptions(linewidth=120)
print(np.matrix(matrix))
# test case
# vals=set(content_dict[1].split())
# print len(vals)
# print matrix[0][0]
# In[ ]:
| apache-2.0 |
google/trax | trax/tf_numpy/public_symbol_test.py | 1 | 1130 | # coding=utf-8
# Copyright 2021 The Trax Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests different ways to use the public tf-numpy module."""
import numpy as onp
import tensorflow as tf
import tensorflow.experimental.numpy as np1
from tensorflow.experimental import numpy as np2 # pylint: disable=reimported
np3 = tf.experimental.numpy
class PublicSymbolTest(tf.test.TestCase):
def testSimple(self):
a = 0.1
b = 0.2
for op in [np1.add, np2.add, np3.add]:
self.assertAllClose(onp.add(a, b), op(a, b))
if __name__ == "__main__":
tf.compat.v1.enable_eager_execution()
tf.test.main()
| apache-2.0 |
jelmer/pydoctor | pydoctor/test/test_zopeinterface.py | 3 | 7725 | from pydoctor.zopeinterface import ZopeInterfaceSystem
from pydoctor.test.test_astbuilder import fromText
from pydoctor.test.test_packages import processPackage
# we set up the same situation using both implements and
# classImplements and run the same tests.
def test_implements():
src = '''
import zope.interface
class IFoo(zope.interface.Interface):
pass
class IBar(zope.interface.Interface):
pass
class Foo:
zope.interface.implements(IFoo)
class FooBar(Foo):
zope.interface.implements(IBar)
class OnlyBar(Foo):
zope.interface.implementsOnly(IBar)
'''
implements_test(src)
def test_classImplements():
src = '''
import zope.interface
class IFoo(zope.interface.Interface):
pass
class IBar(zope.interface.Interface):
pass
class Foo:
pass
class FooBar(Foo):
pass
class OnlyBar(Foo):
pass
zope.interface.classImplements(Foo, IFoo)
zope.interface.classImplements(FooBar, IBar)
zope.interface.classImplementsOnly(OnlyBar, IBar)
'''
implements_test(src)
def test_implementer():
src = '''
import zope.interface
class IFoo(zope.interface.Interface):
pass
class IBar(zope.interface.Interface):
pass
@zope.interface.implementer(IFoo)
class Foo:
pass
@zope.interface.implementer(IBar)
class FooBar(Foo):
pass
class OnlyBar(Foo):
zope.interface.implementsOnly(IBar)
'''
implements_test(src)
def implements_test(src):
mod = fromText(src, 'zi', systemcls=ZopeInterfaceSystem)
ifoo = mod.contents['IFoo']
ibar = mod.contents['IBar']
foo = mod.contents['Foo']
foobar = mod.contents['FooBar']
onlybar = mod.contents['OnlyBar']
assert ifoo.isinterface and ibar.isinterface
assert not foo.isinterface and not foobar.isinterface and not foobar.isinterface
assert not foo.implementsOnly and not foobar.implementsOnly
assert onlybar.implementsOnly
assert foo.implements_directly == ['zi.IFoo']
assert foo.allImplementedInterfaces == ['zi.IFoo']
assert foobar.implements_directly == ['zi.IBar']
assert foobar.allImplementedInterfaces == ['zi.IBar', 'zi.IFoo']
assert onlybar.implements_directly == ['zi.IBar']
assert onlybar.allImplementedInterfaces == ['zi.IBar']
assert ifoo.implementedby_directly == [foo]
assert ifoo.allImplementations == [foo, foobar]
assert ibar.implementedby_directly == [foobar, onlybar]
assert ibar.allImplementations == [foobar, onlybar]
def test_subclass_with_same_name():
src = '''
class A:
pass
class A(A):
pass
'''
fromText(src, 'zi', systemcls=ZopeInterfaceSystem)
def test_multiply_inheriting_interfaces():
src = '''
from zope.interface import Interface, implements
class IOne(Interface): pass
class ITwo(Interface): pass
class One: implements(IOne)
class Two: implements(ITwo)
class Both(One, Two): pass
'''
mod = fromText(src, 'zi', systemcls=ZopeInterfaceSystem)
assert len(mod.contents['Both'].allImplementedInterfaces) == 2
def test_attribute():
src = '''
import zope.interface as zi
class C:
attr = zi.Attribute("docstring")
'''
mod = fromText(src, systemcls=ZopeInterfaceSystem)
assert len(mod.contents['C'].contents) == 1
def test_interfaceclass():
system = processPackage('interfaceclass', systemcls=ZopeInterfaceSystem)
mod = system.allobjects['interfaceclass.mod']
assert mod.contents['AnInterface'].isinterface
def test_warnerproofing():
src = '''
from zope import interface
Interface = interface.Interface
class IMyInterface(Interface):
pass
'''
mod = fromText(src, systemcls=ZopeInterfaceSystem)
assert mod.contents['IMyInterface'].isinterface
def test_zopeschema():
src = '''
from zope import schema, interface
class IMyInterface(interface.Interface):
text = schema.TextLine(description="fun in a bun")
'''
mod = fromText(src, systemcls=ZopeInterfaceSystem)
text = mod.contents['IMyInterface'].contents['text']
assert text.docstring == 'fun in a bun'
assert text.kind == "TextLine"
def test_with_underscore():
src = '''
from zope import schema, interface
class IMyInterface(interface.Interface):
attribute = interface.Attribute(_("fun in a bun"))
text = schema.TextLine(description=_("fun in a bap"))
'''
mod = fromText(src, systemcls=ZopeInterfaceSystem)
text = mod.contents['IMyInterface'].contents['attribute']
assert text.docstring == 'fun in a bun'
assert text.kind == "Attribute"
text = mod.contents['IMyInterface'].contents['text']
assert text.docstring == 'fun in a bap'
assert text.kind == "TextLine"
def test_zopeschema_inheritance():
src = '''
from zope import schema, interface
from zope.schema import Int as INTEGERSCHMEMAFIELD
class MyTextLine(schema.TextLine):
pass
class MyOtherTextLine(MyTextLine):
pass
class IMyInterface(interface.Interface):
mytext = MyTextLine(description="fun in a bun")
myothertext = MyOtherTextLine(description="fun in another bun")
myint = INTEGERSCHMEMAFIELD(description="not as much fun")
'''
mod = fromText(src, systemcls=ZopeInterfaceSystem)
mytext = mod.contents['IMyInterface'].contents['mytext']
assert mytext.docstring == 'fun in a bun'
assert mytext.kind == "MyTextLine"
myothertext = mod.contents['IMyInterface'].contents['myothertext']
assert myothertext.docstring == 'fun in another bun'
assert myothertext.kind == "MyOtherTextLine"
myint = mod.contents['IMyInterface'].contents['myint']
assert myint.kind == "Int"
def test_docsources_includes_interface():
src = '''
from zope import interface
class IInterface(interface.Interface):
def method(self):
"""documentation"""
class Implementation:
interface.implements(IInterface)
def method(self):
pass
'''
mod = fromText(src, systemcls=ZopeInterfaceSystem)
imethod = mod.contents['IInterface'].contents['method']
method = mod.contents['Implementation'].contents['method']
assert imethod in method.docsources(), list(method.docsources())
def test_docsources_includes_baseinterface():
src = '''
from zope import interface
class IBase(interface.Interface):
def method(self):
"""documentation"""
class IExtended(IBase):
pass
class Implementation:
interface.implements(IExtended)
def method(self):
pass
'''
mod = fromText(src, systemcls=ZopeInterfaceSystem)
imethod = mod.contents['IBase'].contents['method']
method = mod.contents['Implementation'].contents['method']
assert imethod in method.docsources(), list(method.docsources())
def test_docsources_from_moduleprovides():
src = '''
from zope import interface
class IBase(interface.Interface):
def bar():
"""documentation"""
interface.moduleProvides(IBase)
def bar():
pass
'''
mod = fromText(src, systemcls=ZopeInterfaceSystem)
imethod = mod.contents['IBase'].contents['bar']
function = mod.contents['bar']
assert imethod in function.docsources(), list(function.docsources())
def test_interfaceallgames():
system = processPackage('interfaceallgames', systemcls=ZopeInterfaceSystem)
mod = system.allobjects['interfaceallgames.interface']
assert [o.fullName() for o in mod.contents['IAnInterface'].allImplementations] == ['interfaceallgames.implementation.Implementation']
| isc |
christer155/PTVS | Python/Tests/TestData/VirtualEnv/env/Lib/encodings/iso8859_6.py | 93 | 11396 | """ Python Character Mapping Codec iso8859_6 generated from 'MAPPINGS/ISO8859/8859-6.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='iso8859-6',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
u'\x00' # 0x00 -> NULL
u'\x01' # 0x01 -> START OF HEADING
u'\x02' # 0x02 -> START OF TEXT
u'\x03' # 0x03 -> END OF TEXT
u'\x04' # 0x04 -> END OF TRANSMISSION
u'\x05' # 0x05 -> ENQUIRY
u'\x06' # 0x06 -> ACKNOWLEDGE
u'\x07' # 0x07 -> BELL
u'\x08' # 0x08 -> BACKSPACE
u'\t' # 0x09 -> HORIZONTAL TABULATION
u'\n' # 0x0A -> LINE FEED
u'\x0b' # 0x0B -> VERTICAL TABULATION
u'\x0c' # 0x0C -> FORM FEED
u'\r' # 0x0D -> CARRIAGE RETURN
u'\x0e' # 0x0E -> SHIFT OUT
u'\x0f' # 0x0F -> SHIFT IN
u'\x10' # 0x10 -> DATA LINK ESCAPE
u'\x11' # 0x11 -> DEVICE CONTROL ONE
u'\x12' # 0x12 -> DEVICE CONTROL TWO
u'\x13' # 0x13 -> DEVICE CONTROL THREE
u'\x14' # 0x14 -> DEVICE CONTROL FOUR
u'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
u'\x16' # 0x16 -> SYNCHRONOUS IDLE
u'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
u'\x18' # 0x18 -> CANCEL
u'\x19' # 0x19 -> END OF MEDIUM
u'\x1a' # 0x1A -> SUBSTITUTE
u'\x1b' # 0x1B -> ESCAPE
u'\x1c' # 0x1C -> FILE SEPARATOR
u'\x1d' # 0x1D -> GROUP SEPARATOR
u'\x1e' # 0x1E -> RECORD SEPARATOR
u'\x1f' # 0x1F -> UNIT SEPARATOR
u' ' # 0x20 -> SPACE
u'!' # 0x21 -> EXCLAMATION MARK
u'"' # 0x22 -> QUOTATION MARK
u'#' # 0x23 -> NUMBER SIGN
u'$' # 0x24 -> DOLLAR SIGN
u'%' # 0x25 -> PERCENT SIGN
u'&' # 0x26 -> AMPERSAND
u"'" # 0x27 -> APOSTROPHE
u'(' # 0x28 -> LEFT PARENTHESIS
u')' # 0x29 -> RIGHT PARENTHESIS
u'*' # 0x2A -> ASTERISK
u'+' # 0x2B -> PLUS SIGN
u',' # 0x2C -> COMMA
u'-' # 0x2D -> HYPHEN-MINUS
u'.' # 0x2E -> FULL STOP
u'/' # 0x2F -> SOLIDUS
u'0' # 0x30 -> DIGIT ZERO
u'1' # 0x31 -> DIGIT ONE
u'2' # 0x32 -> DIGIT TWO
u'3' # 0x33 -> DIGIT THREE
u'4' # 0x34 -> DIGIT FOUR
u'5' # 0x35 -> DIGIT FIVE
u'6' # 0x36 -> DIGIT SIX
u'7' # 0x37 -> DIGIT SEVEN
u'8' # 0x38 -> DIGIT EIGHT
u'9' # 0x39 -> DIGIT NINE
u':' # 0x3A -> COLON
u';' # 0x3B -> SEMICOLON
u'<' # 0x3C -> LESS-THAN SIGN
u'=' # 0x3D -> EQUALS SIGN
u'>' # 0x3E -> GREATER-THAN SIGN
u'?' # 0x3F -> QUESTION MARK
u'@' # 0x40 -> COMMERCIAL AT
u'A' # 0x41 -> LATIN CAPITAL LETTER A
u'B' # 0x42 -> LATIN CAPITAL LETTER B
u'C' # 0x43 -> LATIN CAPITAL LETTER C
u'D' # 0x44 -> LATIN CAPITAL LETTER D
u'E' # 0x45 -> LATIN CAPITAL LETTER E
u'F' # 0x46 -> LATIN CAPITAL LETTER F
u'G' # 0x47 -> LATIN CAPITAL LETTER G
u'H' # 0x48 -> LATIN CAPITAL LETTER H
u'I' # 0x49 -> LATIN CAPITAL LETTER I
u'J' # 0x4A -> LATIN CAPITAL LETTER J
u'K' # 0x4B -> LATIN CAPITAL LETTER K
u'L' # 0x4C -> LATIN CAPITAL LETTER L
u'M' # 0x4D -> LATIN CAPITAL LETTER M
u'N' # 0x4E -> LATIN CAPITAL LETTER N
u'O' # 0x4F -> LATIN CAPITAL LETTER O
u'P' # 0x50 -> LATIN CAPITAL LETTER P
u'Q' # 0x51 -> LATIN CAPITAL LETTER Q
u'R' # 0x52 -> LATIN CAPITAL LETTER R
u'S' # 0x53 -> LATIN CAPITAL LETTER S
u'T' # 0x54 -> LATIN CAPITAL LETTER T
u'U' # 0x55 -> LATIN CAPITAL LETTER U
u'V' # 0x56 -> LATIN CAPITAL LETTER V
u'W' # 0x57 -> LATIN CAPITAL LETTER W
u'X' # 0x58 -> LATIN CAPITAL LETTER X
u'Y' # 0x59 -> LATIN CAPITAL LETTER Y
u'Z' # 0x5A -> LATIN CAPITAL LETTER Z
u'[' # 0x5B -> LEFT SQUARE BRACKET
u'\\' # 0x5C -> REVERSE SOLIDUS
u']' # 0x5D -> RIGHT SQUARE BRACKET
u'^' # 0x5E -> CIRCUMFLEX ACCENT
u'_' # 0x5F -> LOW LINE
u'`' # 0x60 -> GRAVE ACCENT
u'a' # 0x61 -> LATIN SMALL LETTER A
u'b' # 0x62 -> LATIN SMALL LETTER B
u'c' # 0x63 -> LATIN SMALL LETTER C
u'd' # 0x64 -> LATIN SMALL LETTER D
u'e' # 0x65 -> LATIN SMALL LETTER E
u'f' # 0x66 -> LATIN SMALL LETTER F
u'g' # 0x67 -> LATIN SMALL LETTER G
u'h' # 0x68 -> LATIN SMALL LETTER H
u'i' # 0x69 -> LATIN SMALL LETTER I
u'j' # 0x6A -> LATIN SMALL LETTER J
u'k' # 0x6B -> LATIN SMALL LETTER K
u'l' # 0x6C -> LATIN SMALL LETTER L
u'm' # 0x6D -> LATIN SMALL LETTER M
u'n' # 0x6E -> LATIN SMALL LETTER N
u'o' # 0x6F -> LATIN SMALL LETTER O
u'p' # 0x70 -> LATIN SMALL LETTER P
u'q' # 0x71 -> LATIN SMALL LETTER Q
u'r' # 0x72 -> LATIN SMALL LETTER R
u's' # 0x73 -> LATIN SMALL LETTER S
u't' # 0x74 -> LATIN SMALL LETTER T
u'u' # 0x75 -> LATIN SMALL LETTER U
u'v' # 0x76 -> LATIN SMALL LETTER V
u'w' # 0x77 -> LATIN SMALL LETTER W
u'x' # 0x78 -> LATIN SMALL LETTER X
u'y' # 0x79 -> LATIN SMALL LETTER Y
u'z' # 0x7A -> LATIN SMALL LETTER Z
u'{' # 0x7B -> LEFT CURLY BRACKET
u'|' # 0x7C -> VERTICAL LINE
u'}' # 0x7D -> RIGHT CURLY BRACKET
u'~' # 0x7E -> TILDE
u'\x7f' # 0x7F -> DELETE
u'\x80' # 0x80 -> <control>
u'\x81' # 0x81 -> <control>
u'\x82' # 0x82 -> <control>
u'\x83' # 0x83 -> <control>
u'\x84' # 0x84 -> <control>
u'\x85' # 0x85 -> <control>
u'\x86' # 0x86 -> <control>
u'\x87' # 0x87 -> <control>
u'\x88' # 0x88 -> <control>
u'\x89' # 0x89 -> <control>
u'\x8a' # 0x8A -> <control>
u'\x8b' # 0x8B -> <control>
u'\x8c' # 0x8C -> <control>
u'\x8d' # 0x8D -> <control>
u'\x8e' # 0x8E -> <control>
u'\x8f' # 0x8F -> <control>
u'\x90' # 0x90 -> <control>
u'\x91' # 0x91 -> <control>
u'\x92' # 0x92 -> <control>
u'\x93' # 0x93 -> <control>
u'\x94' # 0x94 -> <control>
u'\x95' # 0x95 -> <control>
u'\x96' # 0x96 -> <control>
u'\x97' # 0x97 -> <control>
u'\x98' # 0x98 -> <control>
u'\x99' # 0x99 -> <control>
u'\x9a' # 0x9A -> <control>
u'\x9b' # 0x9B -> <control>
u'\x9c' # 0x9C -> <control>
u'\x9d' # 0x9D -> <control>
u'\x9e' # 0x9E -> <control>
u'\x9f' # 0x9F -> <control>
u'\xa0' # 0xA0 -> NO-BREAK SPACE
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\xa4' # 0xA4 -> CURRENCY SIGN
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\u060c' # 0xAC -> ARABIC COMMA
u'\xad' # 0xAD -> SOFT HYPHEN
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\u061b' # 0xBB -> ARABIC SEMICOLON
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\u061f' # 0xBF -> ARABIC QUESTION MARK
u'\ufffe'
u'\u0621' # 0xC1 -> ARABIC LETTER HAMZA
u'\u0622' # 0xC2 -> ARABIC LETTER ALEF WITH MADDA ABOVE
u'\u0623' # 0xC3 -> ARABIC LETTER ALEF WITH HAMZA ABOVE
u'\u0624' # 0xC4 -> ARABIC LETTER WAW WITH HAMZA ABOVE
u'\u0625' # 0xC5 -> ARABIC LETTER ALEF WITH HAMZA BELOW
u'\u0626' # 0xC6 -> ARABIC LETTER YEH WITH HAMZA ABOVE
u'\u0627' # 0xC7 -> ARABIC LETTER ALEF
u'\u0628' # 0xC8 -> ARABIC LETTER BEH
u'\u0629' # 0xC9 -> ARABIC LETTER TEH MARBUTA
u'\u062a' # 0xCA -> ARABIC LETTER TEH
u'\u062b' # 0xCB -> ARABIC LETTER THEH
u'\u062c' # 0xCC -> ARABIC LETTER JEEM
u'\u062d' # 0xCD -> ARABIC LETTER HAH
u'\u062e' # 0xCE -> ARABIC LETTER KHAH
u'\u062f' # 0xCF -> ARABIC LETTER DAL
u'\u0630' # 0xD0 -> ARABIC LETTER THAL
u'\u0631' # 0xD1 -> ARABIC LETTER REH
u'\u0632' # 0xD2 -> ARABIC LETTER ZAIN
u'\u0633' # 0xD3 -> ARABIC LETTER SEEN
u'\u0634' # 0xD4 -> ARABIC LETTER SHEEN
u'\u0635' # 0xD5 -> ARABIC LETTER SAD
u'\u0636' # 0xD6 -> ARABIC LETTER DAD
u'\u0637' # 0xD7 -> ARABIC LETTER TAH
u'\u0638' # 0xD8 -> ARABIC LETTER ZAH
u'\u0639' # 0xD9 -> ARABIC LETTER AIN
u'\u063a' # 0xDA -> ARABIC LETTER GHAIN
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\u0640' # 0xE0 -> ARABIC TATWEEL
u'\u0641' # 0xE1 -> ARABIC LETTER FEH
u'\u0642' # 0xE2 -> ARABIC LETTER QAF
u'\u0643' # 0xE3 -> ARABIC LETTER KAF
u'\u0644' # 0xE4 -> ARABIC LETTER LAM
u'\u0645' # 0xE5 -> ARABIC LETTER MEEM
u'\u0646' # 0xE6 -> ARABIC LETTER NOON
u'\u0647' # 0xE7 -> ARABIC LETTER HEH
u'\u0648' # 0xE8 -> ARABIC LETTER WAW
u'\u0649' # 0xE9 -> ARABIC LETTER ALEF MAKSURA
u'\u064a' # 0xEA -> ARABIC LETTER YEH
u'\u064b' # 0xEB -> ARABIC FATHATAN
u'\u064c' # 0xEC -> ARABIC DAMMATAN
u'\u064d' # 0xED -> ARABIC KASRATAN
u'\u064e' # 0xEE -> ARABIC FATHA
u'\u064f' # 0xEF -> ARABIC DAMMA
u'\u0650' # 0xF0 -> ARABIC KASRA
u'\u0651' # 0xF1 -> ARABIC SHADDA
u'\u0652' # 0xF2 -> ARABIC SUKUN
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
u'\ufffe'
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| apache-2.0 |
jemekite/youtube-dl | youtube_dl/extractor/cbsnews.py | 96 | 3204 | # encoding: utf-8
from __future__ import unicode_literals
import re
import json
from .common import InfoExtractor
class CBSNewsIE(InfoExtractor):
IE_DESC = 'CBS News'
_VALID_URL = r'http://(?:www\.)?cbsnews\.com/(?:[^/]+/)+(?P<id>[\da-z_-]+)'
_TESTS = [
{
'url': 'http://www.cbsnews.com/news/tesla-and-spacex-elon-musks-industrial-empire/',
'info_dict': {
'id': 'tesla-and-spacex-elon-musks-industrial-empire',
'ext': 'flv',
'title': 'Tesla and SpaceX: Elon Musk\'s industrial empire',
'thumbnail': 'http://beta.img.cbsnews.com/i/2014/03/30/60147937-2f53-4565-ad64-1bdd6eb64679/60-0330-pelley-640x360.jpg',
'duration': 791,
},
'params': {
# rtmp download
'skip_download': True,
},
},
{
'url': 'http://www.cbsnews.com/videos/fort-hood-shooting-army-downplays-mental-illness-as-cause-of-attack/',
'info_dict': {
'id': 'fort-hood-shooting-army-downplays-mental-illness-as-cause-of-attack',
'ext': 'flv',
'title': 'Fort Hood shooting: Army downplays mental illness as cause of attack',
'thumbnail': 're:^https?://.*\.jpg$',
'duration': 205,
},
'params': {
# rtmp download
'skip_download': True,
},
},
]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
webpage = self._download_webpage(url, video_id)
video_info = json.loads(self._html_search_regex(
r'(?:<ul class="media-list items" id="media-related-items"><li data-video-info|<div id="cbsNewsVideoPlayer" data-video-player-options)=\'({.+?})\'',
webpage, 'video JSON info'))
item = video_info['item'] if 'item' in video_info else video_info
title = item.get('articleTitle') or item.get('hed')
duration = item.get('duration')
thumbnail = item.get('mediaImage') or item.get('thumbnail')
formats = []
for format_id in ['RtmpMobileLow', 'RtmpMobileHigh', 'Hls', 'RtmpDesktop']:
uri = item.get('media' + format_id + 'URI')
if not uri:
continue
fmt = {
'url': uri,
'format_id': format_id,
}
if uri.startswith('rtmp'):
fmt.update({
'app': 'ondemand?auth=cbs',
'play_path': 'mp4:' + uri.split('<break>')[-1],
'player_url': 'http://www.cbsnews.com/[[IMPORT]]/vidtech.cbsinteractive.com/player/3_3_0/CBSI_PLAYER_HD.swf',
'page_url': 'http://www.cbsnews.com',
'ext': 'flv',
})
elif uri.endswith('.m3u8'):
fmt['ext'] = 'mp4'
formats.append(fmt)
return {
'id': video_id,
'title': title,
'thumbnail': thumbnail,
'duration': duration,
'formats': formats,
}
| unlicense |
jnewbery/bitcoin | contrib/devtools/circular-dependencies.py | 29 | 3219 | #!/usr/bin/env python3
# Copyright (c) 2018-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
import sys
import re
from typing import Dict, List, Set
MAPPING = {
'core_read.cpp': 'core_io.cpp',
'core_write.cpp': 'core_io.cpp',
}
# Directories with header-based modules, where the assumption that .cpp files
# define functions and variables declared in corresponding .h files is
# incorrect.
HEADER_MODULE_PATHS = [
'interfaces/'
]
def module_name(path):
if path in MAPPING:
path = MAPPING[path]
if any(path.startswith(dirpath) for dirpath in HEADER_MODULE_PATHS):
return path
if path.endswith(".h"):
return path[:-2]
if path.endswith(".c"):
return path[:-2]
if path.endswith(".cpp"):
return path[:-4]
return None
files = dict()
deps: Dict[str, Set[str]] = dict()
RE = re.compile("^#include <(.*)>")
# Iterate over files, and create list of modules
for arg in sys.argv[1:]:
module = module_name(arg)
if module is None:
print("Ignoring file %s (does not constitute module)\n" % arg)
else:
files[arg] = module
deps[module] = set()
# Iterate again, and build list of direct dependencies for each module
# TODO: implement support for multiple include directories
for arg in sorted(files.keys()):
module = files[arg]
with open(arg, 'r', encoding="utf8") as f:
for line in f:
match = RE.match(line)
if match:
include = match.group(1)
included_module = module_name(include)
if included_module is not None and included_module in deps and included_module != module:
deps[module].add(included_module)
# Loop to find the shortest (remaining) circular dependency
have_cycle: bool = False
while True:
shortest_cycle = None
for module in sorted(deps.keys()):
# Build the transitive closure of dependencies of module
closure: Dict[str, List[str]] = dict()
for dep in deps[module]:
closure[dep] = []
while True:
old_size = len(closure)
old_closure_keys = sorted(closure.keys())
for src in old_closure_keys:
for dep in deps[src]:
if dep not in closure:
closure[dep] = closure[src] + [src]
if len(closure) == old_size:
break
# If module is in its own transitive closure, it's a circular dependency; check if it is the shortest
if module in closure and (shortest_cycle is None or len(closure[module]) + 1 < len(shortest_cycle)):
shortest_cycle = [module] + closure[module]
if shortest_cycle is None:
break
# We have the shortest circular dependency; report it
module = shortest_cycle[0]
print("Circular dependency: %s" % (" -> ".join(shortest_cycle + [module])))
# And then break the dependency to avoid repeating in other cycles
deps[shortest_cycle[-1]] = deps[shortest_cycle[-1]] - set([module])
have_cycle = True
sys.exit(1 if have_cycle else 0)
| mit |
ondrokrc/gramps | gramps/gui/views/treemodels/familymodel.py | 1 | 6542 | #
# Gramps - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2000-2007 Donald N. Allingham
# Copyright (C) 2010 Nick Hall
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
#-------------------------------------------------------------------------
#
# python modules
#
#-------------------------------------------------------------------------
import logging
log = logging.getLogger(".")
#-------------------------------------------------------------------------
#
# GNOME/GTK modules
#
#-------------------------------------------------------------------------
from gi.repository import Gtk
#-------------------------------------------------------------------------
#
# GRAMPS modules
#
#-------------------------------------------------------------------------
from gramps.gen.datehandler import displayer, format_time, get_date_valid
from gramps.gen.display.name import displayer as name_displayer
from gramps.gen.lib import EventRoleType, FamilyRelType
from .flatbasemodel import FlatBaseModel
from gramps.gen.utils.db import get_marriage_or_fallback
from gramps.gen.config import config
from gramps.gen.const import GRAMPS_LOCALE as glocale
invalid_date_format = config.get('preferences.invalid-date-format')
#-------------------------------------------------------------------------
#
# FamilyModel
#
#-------------------------------------------------------------------------
class FamilyModel(FlatBaseModel):
def __init__(self, db, scol=0, order=Gtk.SortType.ASCENDING, search=None,
skip=set(), sort_map=None):
self.gen_cursor = db.get_family_cursor
self.map = db.get_raw_family_data
self.fmap = [
self.column_id,
self.column_father,
self.column_mother,
self.column_type,
self.column_marriage,
self.column_private,
self.column_tags,
self.column_change,
self.column_tag_color,
]
self.smap = [
self.column_id,
self.sort_father,
self.sort_mother,
self.column_type,
self.sort_marriage,
self.column_private,
self.column_tags,
self.sort_change,
self.column_tag_color,
]
FlatBaseModel.__init__(self, db, scol, order, search=search, skip=skip,
sort_map=sort_map)
def destroy(self):
"""
Unset all elements that can prevent garbage collection
"""
self.db = None
self.gen_cursor = None
self.map = None
self.fmap = None
self.smap = None
FlatBaseModel.destroy(self)
def color_column(self):
"""
Return the color column.
"""
return 8
def on_get_n_columns(self):
return len(self.fmap)+1
def column_father(self, data):
if data[2]:
person = self.db.get_person_from_handle(data[2])
return name_displayer.display_name(person.primary_name)
else:
return ""
def sort_father(self, data):
if data[2]:
person = self.db.get_person_from_handle(data[2])
return name_displayer.sorted_name(person.primary_name)
else:
return ""
def column_mother(self, data):
if data[3]:
person = self.db.get_person_from_handle(data[3])
return name_displayer.display_name(person.primary_name)
else:
return ""
def sort_mother(self, data):
if data[3]:
person = self.db.get_person_from_handle(data[3])
return name_displayer.sorted_name(person.primary_name)
else:
return ""
def column_type(self, data):
return str(FamilyRelType(data[5]))
def column_marriage(self, data):
family = self.db.get_family_from_handle(data[0])
event = get_marriage_or_fallback(self.db, family, "<i>%s</i>")
if event:
if event.date.format:
return event.date.format % displayer.display(event.date)
elif not get_date_valid(event):
return invalid_date_format % displayer.display(event.date)
else:
return "%s" % displayer.display(event.date)
else:
return ''
def sort_marriage(self, data):
family = self.db.get_family_from_handle(data[0])
event = get_marriage_or_fallback(self.db, family)
if event:
return "%09d" % event.date.get_sort_value()
else:
return ''
def column_id(self, data):
return str(data[1])
def column_private(self, data):
if data[14]:
return 'gramps-lock'
else:
# There is a problem returning None here.
return ''
def sort_change(self, data):
return "%012x" % data[12]
def column_change(self, data):
return format_time(data[12])
def get_tag_name(self, tag_handle):
"""
Return the tag name from the given tag handle.
"""
return self.db.get_tag_from_handle(tag_handle).get_name()
def column_tag_color(self, data):
"""
Return the tag color.
"""
tag_color = "#000000000000"
tag_priority = None
for handle in data[13]:
tag = self.db.get_tag_from_handle(handle)
this_priority = tag.get_priority()
if tag_priority is None or this_priority < tag_priority:
tag_color = tag.get_color()
tag_priority = this_priority
return tag_color
def column_tags(self, data):
"""
Return the sorted list of tags.
"""
tag_list = list(map(self.get_tag_name, data[13]))
return ', '.join(sorted(tag_list, key=glocale.sort_key))
| gpl-2.0 |
teamfx/openjfx-10-dev-rt | modules/javafx.web/src/main/native/Tools/Scripts/webkitpy/tool/steps/obsoletepatches.py | 3 | 2318 | # Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import logging
from webkitpy.tool.grammar import pluralize
from webkitpy.tool.steps.abstractstep import AbstractStep
from webkitpy.tool.steps.options import Options
_log = logging.getLogger(__name__)
class ObsoletePatches(AbstractStep):
@classmethod
def options(cls):
return AbstractStep.options() + [
Options.obsolete_patches,
]
def run(self, state):
if not self._options.obsolete_patches:
return
bug_id = state["bug_id"]
patches = self._tool.bugs.fetch_bug(bug_id).patches()
if not patches:
return
_log.info("Obsoleting %s on bug %s" % (pluralize(len(patches), "old patch"), bug_id))
for patch in patches:
self._tool.bugs.obsolete_attachment(patch.id())
| gpl-2.0 |
luisgg/iteexe | exe/engine/configparser.py | 4 | 15800 | """A more user friendly configParser
Copyright 2005-2006 Matthew Sherborne.
Copyright 2005-2007 eXe Project, New Zealand Tertiary Education Commisssion
Released under the GPL2 license found at
http://www.fsf.org/licensing/licenses/gpl.txt
"""
import re, os
import codecs
exSection = re.compile('\[(?P<sectionname>(\w|\s)+)\]\s*')
exOption = re.compile("""\s* # Ignore white space at the beginning
(?P<optionname>
(\s*(\w|[\_\-])+)+) # This matches multiple words
#(\s*\w+)+) # This matches multiple words
(?P<operator>\s*[:=]\s*) # = or : with optional white space around it
(?P<value>.*?) # Multiple words
(\s*)(?=$) # White space at end ignored
""", re.VERBOSE)
# Constants
# A default parameter value that lets us use a dynamic default for returning
# non-existing values
UseDefault = object()
# Set ConfigParser.defaultValue to this if you want no default value but to
# raise value error when trying to get non existant option values
RaiseValueError = object()
class ConfigParser(object):
"""For parsing and writing config files"""
# The default char to put between option names and vals
optionMiddle = ' = '
# Set this to a default val for options that don't exist
defaultValue = RaiseValueError
# Set this to write after each attribute change
autoWrite = False
def __init__(self, onWrite=None):
"""
set 'onWrite' to a method that will be called passing the parser as the
only parameter, just before we write the file.
"""
self._sections = {}
self._originalFile = None
self._onWrite = onWrite
def __getattr__(self, attr):
"""
Allows access to section objects
through attributes
"""
if '.' in attr:
# The dotted format is supported here only for
# hasattr use
section, option = attr.split('.', 1)
try:
return self.get(section, option)
except ValueError:
raise AttributeError('%s instance has no attribute %s' %
(self.__class__.__name__, attr))
else:
raise AttributeError('%s instance has no attribute %s' %
(self.__class__.__name__, attr))
def __setattr__(self, attr, value):
"""
Allows creation of sections by attributes
"""
if '.' in attr:
section, option = attr.split('.', 1)
section = self.set(section, option, value)
else:
if attr.startswith('_'):
self.__dict__[attr] = value
elif attr in self.__dict__ or attr in self.__class__.__dict__:
# Existing sections may only be replaced by other section objects
if attr in self._sections:
assert isinstance(value, Section)
self.__dict__[attr] = value
else:
# Create a new section on the fly
Section(attr, self)
def __delattr__(self, attr):
"""
Allows deletion of a section
by deleting its attribute
"""
if self.has_section(attr):
self.delete(attr)
elif attr in self.__dict__:
del self.__dict__[attr]
else:
raise AttributeError('%s instance has no attribute %s' %
(self.__class__.__name__, attr))
def __contains__(self, name):
"""
Allows checking for existence of sections
"""
if '.' in name:
section, option = name.split('.', 1)
return self.has_option(section, option)
else:
return self.has_section(name)
def addSection(self, name):
"""
Adds and returns a section object
"""
if not self.has_section(name):
Section(name, self)
return self.__dict__[name]
def read(self, file_):
"""Reads in a config file. 'file_' can be a file object
or a string"""
self._originalFile = file_
if isinstance(file_, basestring):
file_ = open(file_)
# Apparently in files encoded with utf8, readlines works fine because \n
# is still the new line character in utf8.
# However, in some other less popular encodings, line ending chars are
# different
lines = file_.readlines()
# the codecs utf_8_sig decoder is only available in Python 2.5+,
# so process by hand
if len(lines) > 0 and lines[0].startswith(codecs.BOM_UTF8):
lines[0] = lines[0][3:]
# Store each line as a unicode string internally
for i, line in enumerate(lines):
if not isinstance(line, unicode):
lines[i] = unicode(line, 'utf8')
# Init state
self._sections = {}
# Parse each line
section = None
sectionName = None
for line in lines:
if not line:
continue
match = exSection.match(line)
if match:
# New Section starting
sectionName = match.group('sectionname')
section = Section(sectionName, self)
elif section is not None:
match = exOption.match(line)
if match:
# Reading a value intor the current section
opName = match.group('optionname')
opValue = match.group('value')
section[opName] = opValue
def write(self, file_=None):
"""Writes the options to the file_"""
# Let any interested partys make last minute alterations
if self._onWrite:
self._onWrite(self)
# Use default file if none specified
if not file_:
file_ = self._originalFile
else:
# Store the file for later writes
self._originalFile = file_
# If 'file_' is a string, it's a file_ name
if isinstance(file_, basestring):
if os.path.exists(file_):
file_ = open(file_, 'r+')
else:
file_ = open(file_, 'w+')
# Read the lines of the file
try:
lines = file_.readlines()
except IOError:
lines = []
# the codecs utf_8_sig decoder is only available in Python 2.5+,
# so process by hand, silently stripping the signature
if len(lines) > 0 and lines[0].startswith(codecs.BOM_UTF8):
lines[0] = lines[0][3:]
section = None
sectionName = None
sectionOffsets = {}
# Change the options that are already in the file
for i in range(len(lines)):
line = lines[i]
match = exSection.match(line)
if match:
sectionName = match.group('sectionname')
optionOffsets = {}
sectionOffsets[sectionName] = (line, optionOffsets, i)
elif sectionName:
match = exOption.match(line)
if match:
opName = match.group('optionname')
if self.has_option(sectionName, opName):
opNewVal = self.get(sectionName, opName)
lines[i] = exOption.sub(r'\1\4', line) + opNewVal
optionOffsets[opName] = i
else: lines[i] = None
# Add new options
linesToAdd, lastSectionLines = \
self.addNewOptions(lines, section, sectionOffsets)
self._writeFile(file_, lines, linesToAdd, lastSectionLines)
def addNewOptions(self, lines, section, sectionOffsets):
"""
Adds the new options to the file
"""
linesToAdd = {}
lastSectionLines = []
linesToAdd[len(lines)] = lastSectionLines
existingSections = sectionOffsets.keys()
for section in self._sections:
if section not in existingSections:
# Just append the section on the end
linesToInsert = lastSectionLines
if lines:
linesToInsert.append('')
linesToInsert.append('[%s]' % section)
newOpts = [(name, val)
for name, (val)
in self._sections[section].items()]
else:
# Get a list of the "not already updated" options
offsets = sectionOffsets[section][1]
existingOptions = offsets.keys()
newOpts = [(name, val)
for name, val
in self._sections[section].items()
if name not in existingOptions]
# Append new options on the end of the section,
# in the order they were added to 'self'
if offsets:
lineToAppendAt = max(offsets.values())
else:
# Use the line of the start of the section
lineToAppendAt = sectionOffsets[section][2]
linesToInsert = []
linesToAdd[lineToAppendAt] = linesToInsert
# Now append/insert the options
newOpts.sort() # Put them in order
for name, val in newOpts:
linesToInsert.append('%s%s%s' % (name, self.optionMiddle, val))
return linesToAdd, lastSectionLines
def _writeFile(self, file_, lines, linesToAdd, lastSectionLines):
"""
Actually writes the new file
"""
# Finally compile the lines and write the file
newLines = []
for i in range(len(lines)):
line = lines[i]
if line is not None:
newLines.append(line.replace('\n', ''))
toAdd = linesToAdd.get(i, None)
if toAdd:
newLines += toAdd
# Also append some lines at the end
newLines += lastSectionLines
# Write the file
file_.seek(0)
data = '\n'.join(newLines)
file_.write(data.encode('utf8'))
file_.truncate()
def has_option(self, sectionName, optionName):
"""Returns 1 if we know about this setting"""
if self.has_section(sectionName):
return optionName in self._sections[sectionName].keys()
else: return 0
def has_section(self, sectionName):
"""Returns 1 if this section has been defined"""
return sectionName in self._sections.keys()
def get(self, sectionName, optionName, default=UseDefault):
"""Returns the option or 'default' if it doesn't exist"""
if self.has_option(sectionName, optionName):
return self._sections[sectionName][optionName]
elif default is not UseDefault:
return default
elif self.defaultValue is RaiseValueError:
raise ValueError("Option %s.%s doesn't exist" %
(sectionName, optionName))
elif callable(self.defaultValue):
return self.defaultValue(sectionName, optionName)
else:
return self.defaultValue
def set(self, sectionName, optionName, value):
"""Set's an option in a section to value,
can be used for new options, new sections and pre-existing ones"""
sec = Section(sectionName, self) # This creates or gets a section
if not isinstance(value, unicode):
# Convert ints and floats to str before encoding to unicode
if not isinstance(value, str):
value = str(value)
value = unicode(value, 'utf8')
if sec.get(optionName, None) != value:
sec[optionName] = value
if self.autoWrite and self._originalFile is not None:
# Move the original file to the beginning if we can
if hasattr(self._originalFile, 'seek') and \
callable(self._originalFile.seek):
self._originalFile.seek(0)
# Can't use autowrite with append, writeonly or readonly files
if hasattr(self._originalFile, 'mode'):
# Must have r+ or rb+ or w+
if '+' not in self._originalFile.mode:
return
# Write using self._originalFile
self.write()
def setdefault(self, sectionName, optionName, value):
"""
If 'sectionName' and 'optionName' exists, returns its value,
otherwise, sets it then returns the new value set.
it's like setdefault in 'dict' instanaces
"""
if self.has_section(sectionName) and \
self.has_option(sectionName, optionName):
return self.get(sectionName, optionName)
else:
self.set(sectionName, optionName, value)
return value
def delete(self, sectionName, optionName=None):
"""Remove a section or optionName. Set optionName to None
to remove the whole section"""
if self._sections.has_key(sectionName):
if optionName is None:
del self._sections[sectionName]
delattr(self, sectionName)
if self.autoWrite:
self.write()
else:
sec = self._sections[sectionName]
if sec.has_key(optionName):
del sec[optionName]
if self.autoWrite:
self.write()
class Section(dict):
"""Represents a single section"""
def __new__(cls, name, parent):
"""
Utility func that will either make a new
or return an existing Section instance
"""
if parent.has_section(name):
return parent._sections[name]
else:
return dict.__new__(cls, name, parent)
def __init__(self, name, parent):
"""Setup"""
dict.__init__(self, {})
self.__name = name
self.__parent = parent
self.__parent._sections[name] = self
dct = self.__parent.__dict__
if name not in dct:
dct[name] = self
def has_option(self, optionName):
"""Returns 1 if we know about this setting"""
return self.__parent.has_option(self.__name, optionName)
def get(self, optionName, default=UseDefault):
"""Returns the option name"""
return self.__parent.get(self.__name, optionName, default)
def set(self, optionName, value):
"""Sets an option"""
self.__parent.set(self.__name, optionName, value)
def setdefault(self, optionName, value):
"""
If 'optionName' exists, returns its value,
otherwise, sets it then returns the new value set.
it's like setdefault in 'dict' instanaces
"""
return self.__parent.setdefault(self.__name, optionName, value)
def __getattr__(self, attr):
try:
return self.__parent.get(self.__name, attr)
except ValueError:
raise AttributeError('%s instance has no attribute %s' %
(self.__class__.__name__, attr))
def __setattr__(self, attr, value):
if attr.startswith('_'):
self.__dict__[attr] = value
else:
self.__parent.set(self.__name, attr, value)
def __delattr__(self, attr):
self.__parent.delete(self.__name, attr)
def __contains__(self, name):
return self.__parent.has_option(self.__name, name)
| gpl-2.0 |
eerorika/ansible | lib/ansible/template/vars.py | 2 | 3756 | # (c) 2012, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.compat.six import iteritems
from jinja2.utils import missing
from ansible.module_utils._text import to_native
__all__ = ['AnsibleJ2Vars']
class AnsibleJ2Vars:
'''
Helper class to template all variable content before jinja2 sees it. This is
done by hijacking the variable storage that jinja2 uses, and overriding __contains__
and __getitem__ to look like a dict. Added bonus is avoiding duplicating the large
hashes that inject tends to be.
To facilitate using builtin jinja2 things like range, globals are also handled here.
'''
def __init__(self, templar, globals, locals=None, *extras):
'''
Initializes this object with a valid Templar() object, as
well as several dictionaries of variables representing
different scopes (in jinja2 terminology).
'''
self._templar = templar
self._globals = globals
self._extras = extras
self._locals = dict()
if isinstance(locals, dict):
for key, val in iteritems(locals):
if key[:2] == 'l_' and val is not missing:
self._locals[key[2:]] = val
def __contains__(self, k):
if k in self._templar._available_variables:
return True
if k in self._locals:
return True
for i in self._extras:
if k in i:
return True
if k in self._globals:
return True
return False
def __getitem__(self, varname):
if varname not in self._templar._available_variables:
if varname in self._locals:
return self._locals[varname]
for i in self._extras:
if varname in i:
return i[varname]
if varname in self._globals:
return self._globals[varname]
else:
raise KeyError("undefined variable: %s" % varname)
variable = self._templar._available_variables[varname]
# HostVars is special, return it as-is, as is the special variable
# 'vars', which contains the vars structure
from ansible.vars.hostvars import HostVars
if isinstance(variable, dict) and varname == "vars" or isinstance(variable, HostVars) or hasattr(variable, '__UNSAFE__'):
return variable
else:
value = None
try:
value = self._templar.template(variable)
except Exception as e:
raise type(e)(to_native(variable) + ': ' + e.message)
return value
def add_locals(self, locals):
'''
If locals are provided, create a copy of self containing those
locals in addition to what is already in this variable proxy.
'''
if locals is None:
return self
return AnsibleJ2Vars(self._templar, self._globals, locals=locals, *self._extras)
| gpl-3.0 |
rizumu/django | tests/forms_tests/widget_tests/test_splitdatetimewidget.py | 202 | 1943 | from datetime import date, datetime, time
from django.forms import SplitDateTimeWidget
from .base import WidgetTest
class SplitDateTimeWidgetTest(WidgetTest):
widget = SplitDateTimeWidget()
def test_render_empty(self):
self.check_html(self.widget, 'date', '', html=(
'<input type="text" name="date_0" /><input type="text" name="date_1" />'
))
def test_render_none(self):
self.check_html(self.widget, 'date', None, html=(
'<input type="text" name="date_0" /><input type="text" name="date_1" />'
))
def test_render_datetime(self):
self.check_html(self.widget, 'date', datetime(2006, 1, 10, 7, 30), html=(
'<input type="text" name="date_0" value="2006-01-10" />'
'<input type="text" name="date_1" value="07:30:00" />'
))
def test_render_date_and_time(self):
self.check_html(self.widget, 'date', [date(2006, 1, 10), time(7, 30)], html=(
'<input type="text" name="date_0" value="2006-01-10" />'
'<input type="text" name="date_1" value="07:30:00" />'
))
def test_constructor_attrs(self):
widget = SplitDateTimeWidget(attrs={'class': 'pretty'})
self.check_html(widget, 'date', datetime(2006, 1, 10, 7, 30), html=(
'<input type="text" class="pretty" value="2006-01-10" name="date_0" />'
'<input type="text" class="pretty" value="07:30:00" name="date_1" />'
))
def test_formatting(self):
"""
Use 'date_format' and 'time_format' to change the way a value is
displayed.
"""
widget = SplitDateTimeWidget(
date_format='%d/%m/%Y', time_format='%H:%M',
)
self.check_html(widget, 'date', datetime(2006, 1, 10, 7, 30), html=(
'<input type="text" name="date_0" value="10/01/2006" />'
'<input type="text" name="date_1" value="07:30" />'
))
| bsd-3-clause |
akretion/openerp-server | openerp/workflow/workitem.py | 61 | 9046 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
#
# TODO:
# cr.execute('delete from wkf_triggers where model=%s and res_id=%s', (res_type,res_id))
#
import instance
import wkf_expr
import wkf_logs
def create(cr, act_datas, inst_id, ident, stack):
for act in act_datas:
cr.execute("select nextval('wkf_workitem_id_seq')")
id_new = cr.fetchone()[0]
cr.execute("insert into wkf_workitem (id,act_id,inst_id,state) values (%s,%s,%s,'active')", (id_new, act['id'], inst_id))
cr.execute('select * from wkf_workitem where id=%s',(id_new,))
res = cr.dictfetchone()
wkf_logs.log(cr,ident,act['id'],'active')
process(cr, res, ident, stack=stack)
def process(cr, workitem, ident, signal=None, force_running=False, stack=None):
if stack is None:
raise 'Error !!!'
result = True
cr.execute('select * from wkf_activity where id=%s', (workitem['act_id'],))
activity = cr.dictfetchone()
triggers = False
if workitem['state']=='active':
triggers = True
result = _execute(cr, workitem, activity, ident, stack)
if not result:
return False
if workitem['state']=='running':
pass
if workitem['state']=='complete' or force_running:
ok = _split_test(cr, workitem, activity['split_mode'], ident, signal, stack)
triggers = triggers and not ok
if triggers:
cr.execute('select * from wkf_transition where act_from=%s', (workitem['act_id'],))
alltrans = cr.dictfetchall()
for trans in alltrans:
if trans['trigger_model']:
ids = wkf_expr._eval_expr(cr,ident,workitem,trans['trigger_expr_id'])
for res_id in ids:
cr.execute('select nextval(\'wkf_triggers_id_seq\')')
id =cr.fetchone()[0]
cr.execute('insert into wkf_triggers (model,res_id,instance_id,workitem_id,id) values (%s,%s,%s,%s,%s)', (trans['trigger_model'],res_id,workitem['inst_id'], workitem['id'], id))
return result
# ---------------------- PRIVATE FUNCS --------------------------------
def _state_set(cr, workitem, activity, state, ident):
cr.execute('update wkf_workitem set state=%s where id=%s', (state,workitem['id']))
workitem['state'] = state
wkf_logs.log(cr,ident,activity['id'],state)
def _execute(cr, workitem, activity, ident, stack):
result = True
#
# send a signal to parent workflow (signal: subflow.signal_name)
#
signal_todo = []
if (workitem['state']=='active') and activity['signal_send']:
cr.execute("select i.id,w.osv,i.res_id from wkf_instance i left join wkf w on (i.wkf_id=w.id) where i.id IN (select inst_id from wkf_workitem where subflow_id=%s)", (workitem['inst_id'],))
for i in cr.fetchall():
signal_todo.append((i[0], (ident[0],i[1],i[2]), activity['signal_send']))
if activity['kind']=='dummy':
if workitem['state']=='active':
_state_set(cr, workitem, activity, 'complete', ident)
if activity['action_id']:
res2 = wkf_expr.execute_action(cr, ident, workitem, activity)
if res2:
stack.append(res2)
result=res2
elif activity['kind']=='function':
if workitem['state']=='active':
_state_set(cr, workitem, activity, 'running', ident)
returned_action = wkf_expr.execute(cr, ident, workitem, activity)
if type(returned_action) in (dict,):
stack.append(returned_action)
if activity['action_id']:
res2 = wkf_expr.execute_action(cr, ident, workitem, activity)
# A client action has been returned
if res2:
stack.append(res2)
result=res2
_state_set(cr, workitem, activity, 'complete', ident)
elif activity['kind']=='stopall':
if workitem['state']=='active':
_state_set(cr, workitem, activity, 'running', ident)
cr.execute('delete from wkf_workitem where inst_id=%s and id<>%s', (workitem['inst_id'], workitem['id']))
if activity['action']:
wkf_expr.execute(cr, ident, workitem, activity)
_state_set(cr, workitem, activity, 'complete', ident)
elif activity['kind']=='subflow':
if workitem['state']=='active':
_state_set(cr, workitem, activity, 'running', ident)
if activity.get('action', False):
id_new = wkf_expr.execute(cr, ident, workitem, activity)
if not id_new:
cr.execute('delete from wkf_workitem where id=%s', (workitem['id'],))
return False
assert type(id_new)==type(1) or type(id_new)==type(1L), 'Wrong return value: '+str(id_new)+' '+str(type(id_new))
cr.execute('select id from wkf_instance where res_id=%s and wkf_id=%s', (id_new,activity['subflow_id']))
id_new = cr.fetchone()[0]
else:
id_new = instance.create(cr, ident, activity['subflow_id'])
cr.execute('update wkf_workitem set subflow_id=%s where id=%s', (id_new, workitem['id']))
workitem['subflow_id'] = id_new
if workitem['state']=='running':
cr.execute("select state from wkf_instance where id=%s", (workitem['subflow_id'],))
state= cr.fetchone()[0]
if state=='complete':
_state_set(cr, workitem, activity, 'complete', ident)
for t in signal_todo:
instance.validate(cr, t[0], t[1], t[2], force_running=True)
return result
def _split_test(cr, workitem, split_mode, ident, signal=None, stack=None):
if stack is None:
raise 'Error !!!'
cr.execute('select * from wkf_transition where act_from=%s', (workitem['act_id'],))
test = False
transitions = []
alltrans = cr.dictfetchall()
if split_mode=='XOR' or split_mode=='OR':
for transition in alltrans:
if wkf_expr.check(cr, workitem, ident, transition,signal):
test = True
transitions.append((transition['id'], workitem['inst_id']))
if split_mode=='XOR':
break
else:
test = True
for transition in alltrans:
if not wkf_expr.check(cr, workitem, ident, transition,signal):
test = False
break
cr.execute('select count(*) from wkf_witm_trans where trans_id=%s and inst_id=%s', (transition['id'], workitem['inst_id']))
if not cr.fetchone()[0]:
transitions.append((transition['id'], workitem['inst_id']))
if test and len(transitions):
cr.executemany('insert into wkf_witm_trans (trans_id,inst_id) values (%s,%s)', transitions)
cr.execute('delete from wkf_workitem where id=%s', (workitem['id'],))
for t in transitions:
_join_test(cr, t[0], t[1], ident, stack)
return True
return False
def _join_test(cr, trans_id, inst_id, ident, stack):
cr.execute('select * from wkf_activity where id=(select act_to from wkf_transition where id=%s)', (trans_id,))
activity = cr.dictfetchone()
if activity['join_mode']=='XOR':
create(cr,[activity], inst_id, ident, stack)
cr.execute('delete from wkf_witm_trans where inst_id=%s and trans_id=%s', (inst_id,trans_id))
else:
cr.execute('select id from wkf_transition where act_to=%s', (activity['id'],))
trans_ids = cr.fetchall()
ok = True
for (id,) in trans_ids:
cr.execute('select count(*) from wkf_witm_trans where trans_id=%s and inst_id=%s', (id,inst_id))
res = cr.fetchone()[0]
if not res:
ok = False
break
if ok:
for (id,) in trans_ids:
cr.execute('delete from wkf_witm_trans where trans_id=%s and inst_id=%s', (id,inst_id))
create(cr, [activity], inst_id, ident, stack)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
GheRivero/ansible | lib/ansible/modules/network/cloudengine/ce_aaa_server_host.py | 43 | 105166 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ce_aaa_server_host
version_added: "2.4"
short_description: Manages AAA server host configuration on HUAWEI CloudEngine switches.
description:
- Manages AAA server host configuration on HUAWEI CloudEngine switches.
author:
- wangdezhuang (@CloudEngine-Ansible)
options:
state:
description:
- Specify desired state of the resource.
default: present
choices: ['present', 'absent']
local_user_name:
description:
- Name of a local user.
The value is a string of 1 to 253 characters.
local_password:
description:
- Login password of a user. The password can contain letters, numbers, and special characters.
The value is a string of 1 to 255 characters.
local_service_type:
description:
- The type of local user login through, such as ftp ssh snmp telnet.
local_ftp_dir:
description:
- FTP user directory.
The value is a string of 1 to 255 characters.
local_user_level:
description:
- Login level of a local user.
The value is an integer ranging from 0 to 15.
local_user_group:
description:
- Name of the user group where the user belongs. The user inherits all the rights of the user group.
The value is a string of 1 to 32 characters.
radius_group_name:
description:
- RADIUS server group's name.
The value is a string of 1 to 32 case-insensitive characters.
radius_server_type:
description:
- Type of Radius Server.
choices: ['Authentication', 'Accounting']
radius_server_ip:
description:
- IPv4 address of configured server.
The value is a string of 0 to 255 characters, in dotted decimal notation.
radius_server_ipv6:
description:
- IPv6 address of configured server.
The total length is 128 bits.
radius_server_port:
description:
- Configured server port for a particular server.
The value is an integer ranging from 1 to 65535.
radius_server_mode:
description:
- Configured primary or secondary server for a particular server.
choices: ['Secondary-server', 'Primary-server']
radius_vpn_name:
description:
- Set VPN instance.
The value is a string of 1 to 31 case-sensitive characters.
radius_server_name:
description:
- Hostname of configured server.
The value is a string of 0 to 255 case-sensitive characters.
hwtacacs_template:
description:
- Name of a HWTACACS template.
The value is a string of 1 to 32 case-insensitive characters.
hwtacacs_server_ip:
description:
- Server IPv4 address. Must be a valid unicast IP address.
The value is a string of 0 to 255 characters, in dotted decimal notation.
hwtacacs_server_ipv6:
description:
- Server IPv6 address. Must be a valid unicast IP address.
The total length is 128 bits.
hwtacacs_server_type:
description:
- Hwtacacs server type.
choices: ['Authentication', 'Authorization', 'Accounting', 'Common']
hwtacacs_is_secondary_server:
description:
- Whether the server is secondary.
type: bool
default: 'no'
hwtacacs_vpn_name:
description:
- VPN instance name.
hwtacacs_is_public_net:
description:
- Set the public-net.
type: bool
default: 'no'
hwtacacs_server_host_name:
description:
- Hwtacacs server host name.
'''
EXAMPLES = '''
- name: AAA server host test
hosts: cloudengine
connection: local
gather_facts: no
vars:
cli:
host: "{{ inventory_hostname }}"
port: "{{ ansible_ssh_port }}"
username: "{{ username }}"
password: "{{ password }}"
transport: cli
tasks:
- name: "Config local user when use local scheme"
ce_aaa_server_host:
state: present
local_user_name: user1
local_password: 123456
provider: "{{ cli }}"
- name: "Undo local user when use local scheme"
ce_aaa_server_host:
state: absent
local_user_name: user1
local_password: 123456
provider: "{{ cli }}"
- name: "Config radius server ip"
ce_aaa_server_host:
state: present
radius_group_name: group1
radius_server_type: Authentication
radius_server_ip: 10.1.10.1
radius_server_port: 2000
radius_server_mode: Primary-server
radius_vpn_name: _public_
provider: "{{ cli }}"
- name: "Undo radius server ip"
ce_aaa_server_host:
state: absent
radius_group_name: group1
radius_server_type: Authentication
radius_server_ip: 10.1.10.1
radius_server_port: 2000
radius_server_mode: Primary-server
radius_vpn_name: _public_
provider: "{{ cli }}"
- name: "Config hwtacacs server ip"
ce_aaa_server_host:
state: present
hwtacacs_template: template
hwtacacs_server_ip: 10.10.10.10
hwtacacs_server_type: Authorization
hwtacacs_vpn_name: _public_
provider: "{{ cli }}"
- name: "Undo hwtacacs server ip"
ce_aaa_server_host:
state: absent
hwtacacs_template: template
hwtacacs_server_ip: 10.10.10.10
hwtacacs_server_type: Authorization
hwtacacs_vpn_name: _public_
provider: "{{ cli }}"
'''
RETURN = '''
changed:
description: check to see if a change was made on the device
returned: always
type: boolean
sample: true
proposed:
description: k/v pairs of parameters passed into module
returned: always
type: dict
sample: {"hwtacacs_is_public_net": "false",
"hwtacacs_is_secondary_server": "false",
"hwtacacs_server_ip": "10.135.182.157",
"hwtacacs_server_type": "Authorization",
"hwtacacs_template": "wdz",
"hwtacacs_vpn_name": "_public_",
"local_password": "******",
"state": "present"}
existing:
description: k/v pairs of existing aaa server host
returned: always
type: dict
sample: {"radius server ipv4": []}
end_state:
description: k/v pairs of aaa params after module execution
returned: always
type: dict
sample: {"radius server ipv4": [
[
"10.1.10.1",
"Authentication",
"2000",
"Primary-server",
"_public_"
]
]}
updates:
description: command sent to the device
returned: always
type: list
sample: ["hwtacacs server template test",
"hwtacacs server authorization 10.135.182.157 vpn-instance test_vpn public-net"]
'''
from xml.etree import ElementTree
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.cloudengine.ce import get_nc_config, set_nc_config, ce_argument_spec, check_ip_addr
SUCCESS = """success"""
FAILED = """failed"""
INVALID_USER_NAME_CHAR = [' ', '/', '\\',
':', '*', '?', '"', '\'', '<', '>', '%']
# get local user name
CE_GET_LOCAL_USER_INFO_HEADER = """
<filter type="subtree">
<aaa xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<lam>
<users>
<user>
<userName></userName>
<password></password>
"""
CE_GET_LOCAL_USER_INFO_TAIL = """
</user>
</users>
</lam>
</aaa>
</filter>
"""
# merge local user name
CE_MERGE_LOCAL_USER_INFO_HEADER = """
<config>
<aaa xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<lam>
<users>
<user operation="merge">
<userName>%s</userName>
"""
CE_MERGE_LOCAL_USER_INFO_TAIL = """
</user>
</users>
</lam>
</aaa>
</config>
"""
# delete local user name
CE_DELETE_LOCAL_USER_INFO_HEADER = """
<config>
<aaa xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<lam>
<users>
<user operation="delete">
<userName>%s</userName>
"""
CE_DELETE_LOCAL_USER_INFO_TAIL = """
</user>
</users>
</lam>
</aaa>
</config>
"""
# get radius server config ipv4
CE_GET_RADIUS_SERVER_CFG_IPV4 = """
<filter type="subtree">
<radius xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<rdsTemplates>
<rdsTemplate>
<groupName>%s</groupName>
<rdsServerIPV4s>
<rdsServerIPV4>
<serverType></serverType>
<serverIPAddress></serverIPAddress>
<serverPort></serverPort>
<serverMode></serverMode>
<vpnName></vpnName>
</rdsServerIPV4>
</rdsServerIPV4s>
</rdsTemplate>
</rdsTemplates>
</radius>
</filter>
"""
# merge radius server config ipv4
CE_MERGE_RADIUS_SERVER_CFG_IPV4 = """
<config>
<radius xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<rdsTemplates>
<rdsTemplate>
<groupName>%s</groupName>
<rdsServerIPV4s>
<rdsServerIPV4 operation="merge">
<serverType>%s</serverType>
<serverIPAddress>%s</serverIPAddress>
<serverPort>%s</serverPort>
<serverMode>%s</serverMode>
<vpnName>%s</vpnName>
</rdsServerIPV4>
</rdsServerIPV4s>
</rdsTemplate>
</rdsTemplates>
</radius>
</config>
"""
# delete radius server config ipv4
CE_DELETE_RADIUS_SERVER_CFG_IPV4 = """
<config>
<radius xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<rdsTemplates>
<rdsTemplate>
<groupName>%s</groupName>
<rdsServerIPV4s>
<rdsServerIPV4 operation="delete">
<serverType>%s</serverType>
<serverIPAddress>%s</serverIPAddress>
<serverPort>%s</serverPort>
<serverMode>%s</serverMode>
<vpnName>%s</vpnName>
</rdsServerIPV4>
</rdsServerIPV4s>
</rdsTemplate>
</rdsTemplates>
</radius>
</config>
"""
# get radius server config ipv6
CE_GET_RADIUS_SERVER_CFG_IPV6 = """
<filter type="subtree">
<radius xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<rdsTemplates>
<rdsTemplate>
<groupName>%s</groupName>
<rdsServerIPV6s>
<rdsServerIPV6>
<serverType></serverType>
<serverIPAddress></serverIPAddress>
<serverPort></serverPort>
<serverMode></serverMode>
</rdsServerIPV6>
</rdsServerIPV6s>
</rdsTemplate>
</rdsTemplates>
</radius>
</filter>
"""
# merge radius server config ipv6
CE_MERGE_RADIUS_SERVER_CFG_IPV6 = """
<config>
<radius xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<rdsTemplates>
<rdsTemplate>
<groupName>%s</groupName>
<rdsServerIPV6s>
<rdsServerIPV6 operation="merge">
<serverType>%s</serverType>
<serverIPAddress>%s</serverIPAddress>
<serverPort>%s</serverPort>
<serverMode>%s</serverMode>
</rdsServerIPV6>
</rdsServerIPV6s>
</rdsTemplate>
</rdsTemplates>
</radius>
</config>
"""
# delete radius server config ipv6
CE_DELETE_RADIUS_SERVER_CFG_IPV6 = """
<config>
<radius xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<rdsTemplates>
<rdsTemplate>
<groupName>%s</groupName>
<rdsServerIPV6s>
<rdsServerIPV6 operation="delete">
<serverType>%s</serverType>
<serverIPAddress>%s</serverIPAddress>
<serverPort>%s</serverPort>
<serverMode>%s</serverMode>
</rdsServerIPV6>
</rdsServerIPV6s>
</rdsTemplate>
</rdsTemplates>
</radius>
</config>
"""
# get radius server name
CE_GET_RADIUS_SERVER_NAME = """
<filter type="subtree">
<radius xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<rdsTemplates>
<rdsTemplate>
<groupName>%s</groupName>
<rdsServerNames>
<rdsServerName>
<serverType></serverType>
<serverName></serverName>
<serverPort></serverPort>
<serverMode></serverMode>
<vpnName></vpnName>
</rdsServerName>
</rdsServerNames>
</rdsTemplate>
</rdsTemplates>
</radius>
</filter>
"""
# merge radius server name
CE_MERGE_RADIUS_SERVER_NAME = """
<config>
<radius xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<rdsTemplates>
<rdsTemplate>
<groupName>%s</groupName>
<rdsServerNames>
<rdsServerName operation="merge">
<serverType>%s</serverType>
<serverName>%s</serverName>
<serverPort>%s</serverPort>
<serverMode>%s</serverMode>
<vpnName>%s</vpnName>
</rdsServerName>
</rdsServerNames>
</rdsTemplate>
</rdsTemplates>
</radius>
</config>
"""
# delete radius server name
CE_DELETE_RADIUS_SERVER_NAME = """
<config>
<radius xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<rdsTemplates>
<rdsTemplate>
<groupName>%s</groupName>
<rdsServerNames>
<rdsServerName operation="delete">
<serverType>%s</serverType>
<serverName>%s</serverName>
<serverPort>%s</serverPort>
<serverMode>%s</serverMode>
<vpnName>%s</vpnName>
</rdsServerName>
</rdsServerNames>
</rdsTemplate>
</rdsTemplates>
</radius>
</config>
"""
# get hwtacacs server config ipv4
CE_GET_HWTACACS_SERVER_CFG_IPV4 = """
<filter type="subtree">
<hwtacacs xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<hwTacTempCfgs>
<hwTacTempCfg>
<templateName>%s</templateName>
<hwTacSrvCfgs>
<hwTacSrvCfg>
<serverIpAddress></serverIpAddress>
<serverType></serverType>
<isSecondaryServer></isSecondaryServer>
<vpnName></vpnName>
<isPublicNet></isPublicNet>
</hwTacSrvCfg>
</hwTacSrvCfgs>
</hwTacTempCfg>
</hwTacTempCfgs>
</hwtacacs>
</filter>
"""
# merge hwtacacs server config ipv4
CE_MERGE_HWTACACS_SERVER_CFG_IPV4 = """
<config>
<hwtacacs xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<hwTacTempCfgs>
<hwTacTempCfg>
<templateName>%s</templateName>
<hwTacSrvCfgs>
<hwTacSrvCfg operation="merge">
<serverIpAddress>%s</serverIpAddress>
<serverType>%s</serverType>
<isSecondaryServer>%s</isSecondaryServer>
<vpnName>%s</vpnName>
<isPublicNet>%s</isPublicNet>
</hwTacSrvCfg>
</hwTacSrvCfgs>
</hwTacTempCfg>
</hwTacTempCfgs>
</hwtacacs>
</config>
"""
# delete hwtacacs server config ipv4
CE_DELETE_HWTACACS_SERVER_CFG_IPV4 = """
<config>
<hwtacacs xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<hwTacTempCfgs>
<hwTacTempCfg>
<templateName>%s</templateName>
<hwTacSrvCfgs>
<hwTacSrvCfg operation="delete">
<serverIpAddress>%s</serverIpAddress>
<serverType>%s</serverType>
<isSecondaryServer>%s</isSecondaryServer>
<vpnName>%s</vpnName>
<isPublicNet>%s</isPublicNet>
</hwTacSrvCfg>
</hwTacSrvCfgs>
</hwTacTempCfg>
</hwTacTempCfgs>
</hwtacacs>
</config>
"""
# get hwtacacs server config ipv6
CE_GET_HWTACACS_SERVER_CFG_IPV6 = """
<filter type="subtree">
<hwtacacs xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<hwTacTempCfgs>
<hwTacTempCfg>
<templateName>%s</templateName>
<hwTacIpv6SrvCfgs>
<hwTacIpv6SrvCfg>
<serverIpAddress></serverIpAddress>
<serverType></serverType>
<isSecondaryServer></isSecondaryServer>
<vpnName></vpnName>
</hwTacIpv6SrvCfg>
</hwTacIpv6SrvCfgs>
</hwTacTempCfg>
</hwTacTempCfgs>
</hwtacacs>
</filter>
"""
# merge hwtacacs server config ipv6
CE_MERGE_HWTACACS_SERVER_CFG_IPV6 = """
<config>
<hwtacacs xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<hwTacTempCfgs>
<hwTacTempCfg>
<templateName>%s</templateName>
<hwTacIpv6SrvCfgs>
<hwTacIpv6SrvCfg operation="merge">
<serverIpAddress>%s</serverIpAddress>
<serverType>%s</serverType>
<isSecondaryServer>%s</isSecondaryServer>
<vpnName>%s</vpnName>
</hwTacIpv6SrvCfg>
</hwTacIpv6SrvCfgs>
</hwTacTempCfg>
</hwTacTempCfgs>
</hwtacacs>
</config>
"""
# delete hwtacacs server config ipv6
CE_DELETE_HWTACACS_SERVER_CFG_IPV6 = """
<config>
<hwtacacs xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<hwTacTempCfgs>
<hwTacTempCfg>
<templateName>%s</templateName>
<hwTacIpv6SrvCfgs>
<hwTacIpv6SrvCfg operation="delete">
<serverIpAddress>%s</serverIpAddress>
<serverType>%s</serverType>
<isSecondaryServer>%s</isSecondaryServer>
<vpnName>%s</vpnName>
</hwTacIpv6SrvCfg>
</hwTacIpv6SrvCfgs>
</hwTacTempCfg>
</hwTacTempCfgs>
</hwtacacs>
</config>
"""
# get hwtacacs host server config
CE_GET_HWTACACS_HOST_SERVER_CFG = """
<filter type="subtree">
<hwtacacs xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<hwTacTempCfgs>
<hwTacTempCfg>
<templateName>%s</templateName>
<hwTacHostSrvCfgs>
<hwTacHostSrvCfg>
<serverHostName></serverHostName>
<serverType></serverType>
<isSecondaryServer></isSecondaryServer>
<vpnName></vpnName>
<isPublicNet></isPublicNet>
</hwTacHostSrvCfg>
</hwTacHostSrvCfgs>
</hwTacTempCfg>
</hwTacTempCfgs>
</hwtacacs>
</filter>
"""
# merge hwtacacs host server config
CE_MERGE_HWTACACS_HOST_SERVER_CFG = """
<config>
<hwtacacs xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<hwTacTempCfgs>
<hwTacTempCfg>
<templateName>%s</templateName>
<hwTacHostSrvCfgs>
<hwTacHostSrvCfg operation="merge">
<serverHostName>%s</serverHostName>
<serverType>%s</serverType>
<isSecondaryServer>%s</isSecondaryServer>
<vpnName>%s</vpnName>
<isPublicNet>%s</isPublicNet>
</hwTacHostSrvCfg>
</hwTacHostSrvCfgs>
</hwTacTempCfg>
</hwTacTempCfgs>
</hwtacacs>
</config>
"""
# delete hwtacacs host server config
CE_DELETE_HWTACACS_HOST_SERVER_CFG = """
<config>
<hwtacacs xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
<hwTacTempCfgs>
<hwTacTempCfg>
<templateName>%s</templateName>
<hwTacHostSrvCfgs>
<hwTacHostSrvCfg operation="delete">
<serverHostName>%s</serverHostName>
<serverType>%s</serverType>
<isSecondaryServer>%s</isSecondaryServer>
<vpnName>%s</vpnName>
<isPublicNet>%s</isPublicNet>
</hwTacHostSrvCfg>
</hwTacHostSrvCfgs>
</hwTacTempCfg>
</hwTacTempCfgs>
</hwtacacs>
</config>
"""
class AaaServerHost(object):
""" Manages aaa server host configuration """
def netconf_get_config(self, **kwargs):
""" Get configure by netconf """
module = kwargs["module"]
conf_str = kwargs["conf_str"]
xml_str = get_nc_config(module, conf_str)
return xml_str
def netconf_set_config(self, **kwargs):
""" Set configure by netconf """
module = kwargs["module"]
conf_str = kwargs["conf_str"]
recv_xml = set_nc_config(module, conf_str)
return recv_xml
def get_local_user_info(self, **kwargs):
""" Get local user information """
module = kwargs["module"]
local_user_name = module.params['local_user_name']
local_service_type = module.params['local_service_type']
local_ftp_dir = module.params['local_ftp_dir']
local_user_level = module.params['local_user_level']
local_user_group = module.params['local_user_group']
state = module.params['state']
result = dict()
result["local_user_info"] = []
need_cfg = False
conf_str = CE_GET_LOCAL_USER_INFO_HEADER
if local_service_type:
if local_service_type == "none":
conf_str += "<serviceTerminal></serviceTerminal>"
conf_str += "<serviceTelnet></serviceTelnet>"
conf_str += "<serviceFtp></serviceFtp>"
conf_str += "<serviceSsh></serviceSsh>"
conf_str += "<serviceSnmp></serviceSnmp>"
conf_str += "<serviceDot1x></serviceDot1x>"
elif local_service_type == "dot1x":
conf_str += "<serviceDot1x></serviceDot1x>"
else:
option = local_service_type.split(" ")
for tmp in option:
if tmp == "dot1x":
module.fail_json(
msg='Error: Do not input dot1x with other service type.')
elif tmp == "none":
module.fail_json(
msg='Error: Do not input none with other service type.')
elif tmp == "ftp":
conf_str += "<serviceFtp></serviceFtp>"
elif tmp == "snmp":
conf_str += "<serviceSnmp></serviceSnmp>"
elif tmp == "ssh":
conf_str += "<serviceSsh></serviceSsh>"
elif tmp == "telnet":
conf_str += "<serviceTelnet></serviceTelnet>"
elif tmp == "terminal":
conf_str += "<serviceTerminal></serviceTerminal>"
else:
module.fail_json(
msg='Error: Do not support the type [%s].' % tmp)
if local_ftp_dir:
conf_str += "<ftpDir></ftpDir>"
if local_user_level:
conf_str += "<userLevel></userLevel>"
if local_user_group:
conf_str += "<userGroupName></userGroupName>"
conf_str += CE_GET_LOCAL_USER_INFO_TAIL
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
if state == "present":
need_cfg = True
else:
xml_str = recv_xml.replace('\r', '').replace('\n', '').\
replace('xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"', "").\
replace('xmlns="http://www.huawei.com/netconf/vrp"', "")
root = ElementTree.fromstring(xml_str)
local_user_info = root.findall("data/aaa/lam/users/user")
if local_user_info:
for tmp in local_user_info:
tmp_dict = dict()
for site in tmp:
if site.tag in ["userName", "password", "userLevel", "ftpDir", "userGroupName",
"serviceTerminal", "serviceTelnet", "serviceFtp", "serviceSsh",
"serviceSnmp", "serviceDot1x"]:
tmp_dict[site.tag] = site.text
result["local_user_info"].append(tmp_dict)
if state == "present":
need_cfg = True
else:
if result["local_user_info"]:
for tmp in result["local_user_info"]:
if "userName" in tmp.keys():
if tmp["userName"] == local_user_name:
if not local_service_type and not local_user_level \
and not local_ftp_dir and not local_user_group:
need_cfg = True
if local_service_type:
if local_service_type == "none":
if tmp.get("serviceTerminal") == "true" or \
tmp.get("serviceTelnet") == "true" or \
tmp.get("serviceFtp") == "true" or \
tmp.get("serviceSsh") == "true" or \
tmp.get("serviceSnmp") == "true" or \
tmp.get("serviceDot1x") == "true":
need_cfg = True
elif local_service_type == "dot1x":
if tmp.get("serviceDot1x") == "true":
need_cfg = True
elif tmp == "ftp":
if tmp.get("serviceFtp") == "true":
need_cfg = True
elif tmp == "snmp":
if tmp.get("serviceSnmp") == "true":
need_cfg = True
elif tmp == "ssh":
if tmp.get("serviceSsh") == "true":
need_cfg = True
elif tmp == "telnet":
if tmp.get("serviceTelnet") == "true":
need_cfg = True
elif tmp == "terminal":
if tmp.get("serviceTerminal") == "true":
need_cfg = True
if local_user_level:
if tmp.get("userLevel") == local_user_level:
need_cfg = True
if local_ftp_dir:
if tmp.get("ftpDir") == local_ftp_dir:
need_cfg = True
if local_user_group:
if tmp.get("userGroupName") == local_user_group:
need_cfg = True
break
result["need_cfg"] = need_cfg
return result
def merge_local_user_info(self, **kwargs):
""" Merge local user information by netconf """
module = kwargs["module"]
local_user_name = module.params['local_user_name']
local_password = module.params['local_password']
local_service_type = module.params['local_service_type']
local_ftp_dir = module.params['local_ftp_dir']
local_user_level = module.params['local_user_level']
local_user_group = module.params['local_user_group']
state = module.params['state']
cmds = []
conf_str = CE_MERGE_LOCAL_USER_INFO_HEADER % local_user_name
if local_password:
conf_str += "<password>%s</password>" % local_password
if state == "present":
cmd = "local-user %s password cipher %s" % (
local_user_name, local_password)
cmds.append(cmd)
if local_service_type:
if local_service_type == "none":
conf_str += "<serviceTerminal>false</serviceTerminal>"
conf_str += "<serviceTelnet>false</serviceTelnet>"
conf_str += "<serviceFtp>false</serviceFtp>"
conf_str += "<serviceSsh>false</serviceSsh>"
conf_str += "<serviceSnmp>false</serviceSnmp>"
conf_str += "<serviceDot1x>false</serviceDot1x>"
cmd = "local-user %s service-type none" % local_user_name
cmds.append(cmd)
elif local_service_type == "dot1x":
if state == "present":
conf_str += "<serviceDot1x>true</serviceDot1x>"
cmd = "local-user %s service-type dot1x" % local_user_name
else:
conf_str += "<serviceDot1x>false</serviceDot1x>"
cmd = "undo local-user %s service-type" % local_user_name
cmds.append(cmd)
else:
option = local_service_type.split(" ")
for tmp in option:
if tmp == "dot1x":
module.fail_json(
msg='Error: Do not input dot1x with other service type.')
if tmp == "none":
module.fail_json(
msg='Error: Do not input none with other service type.')
if state == "present":
if tmp == "ftp":
conf_str += "<serviceFtp>true</serviceFtp>"
cmd = "local-user %s service-type ftp" % local_user_name
elif tmp == "snmp":
conf_str += "<serviceSnmp>true</serviceSnmp>"
cmd = "local-user %s service-type snmp" % local_user_name
elif tmp == "ssh":
conf_str += "<serviceSsh>true</serviceSsh>"
cmd = "local-user %s service-type ssh" % local_user_name
elif tmp == "telnet":
conf_str += "<serviceTelnet>true</serviceTelnet>"
cmd = "local-user %s service-type telnet" % local_user_name
elif tmp == "terminal":
conf_str += "<serviceTerminal>true</serviceTerminal>"
cmd = "local-user %s service-type terminal" % local_user_name
cmds.append(cmd)
else:
if tmp == "ftp":
conf_str += "<serviceFtp>false</serviceFtp>"
elif tmp == "snmp":
conf_str += "<serviceSnmp>false</serviceSnmp>"
elif tmp == "ssh":
conf_str += "<serviceSsh>false</serviceSsh>"
elif tmp == "telnet":
conf_str += "<serviceTelnet>false</serviceTelnet>"
elif tmp == "terminal":
conf_str += "<serviceTerminal>false</serviceTerminal>"
if state == "absent":
cmd = "undo local-user %s service-type" % local_user_name
cmds.append(cmd)
if local_ftp_dir:
if state == "present":
conf_str += "<ftpDir>%s</ftpDir>" % local_ftp_dir
cmd = "local-user %s ftp-directory %s" % (
local_user_name, local_ftp_dir)
cmds.append(cmd)
else:
conf_str += "<ftpDir></ftpDir>"
cmd = "undo local-user %s ftp-directory" % local_user_name
cmds.append(cmd)
if local_user_level:
if state == "present":
conf_str += "<userLevel>%s</userLevel>" % local_user_level
cmd = "local-user %s level %s" % (
local_user_name, local_user_level)
cmds.append(cmd)
else:
conf_str += "<userLevel></userLevel>"
cmd = "undo local-user %s level" % local_user_name
cmds.append(cmd)
if local_user_group:
if state == "present":
conf_str += "<userGroupName>%s</userGroupName>" % local_user_group
cmd = "local-user %s user-group %s" % (
local_user_name, local_user_group)
cmds.append(cmd)
else:
conf_str += "<userGroupName></userGroupName>"
cmd = "undo local-user %s user-group" % local_user_name
cmds.append(cmd)
conf_str += CE_MERGE_LOCAL_USER_INFO_TAIL
recv_xml = self.netconf_set_config(module=module, conf_str=conf_str)
if "<ok/>" not in recv_xml:
module.fail_json(msg='Error: Merge local user info failed.')
return cmds
def delete_local_user_info(self, **kwargs):
""" Delete local user information by netconf """
module = kwargs["module"]
local_user_name = module.params['local_user_name']
conf_str = CE_DELETE_LOCAL_USER_INFO_HEADER % local_user_name
conf_str += CE_DELETE_LOCAL_USER_INFO_TAIL
cmds = []
recv_xml = self.netconf_set_config(module=module, conf_str=conf_str)
if "<ok/>" not in recv_xml:
module.fail_json(msg='Error: Delete local user info failed.')
cmd = "undo local-user %s" % local_user_name
cmds.append(cmd)
return cmds
def get_radius_server_cfg_ipv4(self, **kwargs):
""" Get radius server configure ipv4 """
module = kwargs["module"]
radius_group_name = module.params['radius_group_name']
radius_server_type = module.params['radius_server_type']
radius_server_ip = module.params['radius_server_ip']
radius_server_port = module.params['radius_server_port']
radius_server_mode = module.params['radius_server_mode']
radius_vpn_name = module.params['radius_vpn_name']
state = module.params['state']
result = dict()
result["radius_server_ip_v4"] = []
need_cfg = False
conf_str = CE_GET_RADIUS_SERVER_CFG_IPV4 % radius_group_name
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
if state == "present":
need_cfg = True
else:
xml_str = recv_xml.replace('\r', '').replace('\n', '').\
replace('xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"', "").\
replace('xmlns="http://www.huawei.com/netconf/vrp"', "")
root = ElementTree.fromstring(xml_str)
radius_server_ip_v4 = root.findall(
"data/radius/rdsTemplates/rdsTemplate/rdsServerIPV4s/rdsServerIPV4")
if radius_server_ip_v4:
for tmp in radius_server_ip_v4:
tmp_dict = dict()
for site in tmp:
if site.tag in ["serverType", "serverIPAddress", "serverPort", "serverMode", "vpnName"]:
tmp_dict[site.tag] = site.text
result["radius_server_ip_v4"].append(tmp_dict)
if result["radius_server_ip_v4"]:
for tmp in result["radius_server_ip_v4"]:
if "serverType" in tmp.keys():
if state == "present":
if tmp["serverType"] != radius_server_type:
need_cfg = True
else:
if tmp["serverType"] == radius_server_type:
need_cfg = True
if "serverIPAddress" in tmp.keys():
if state == "present":
if tmp["serverIPAddress"] != radius_server_ip:
need_cfg = True
else:
if tmp["serverIPAddress"] == radius_server_ip:
need_cfg = True
if "serverPort" in tmp.keys():
if state == "present":
if tmp["serverPort"] != radius_server_port:
need_cfg = True
else:
if tmp["serverPort"] == radius_server_port:
need_cfg = True
if "serverMode" in tmp.keys():
if state == "present":
if tmp["serverMode"] != radius_server_mode:
need_cfg = True
else:
if tmp["serverMode"] == radius_server_mode:
need_cfg = True
if "vpnName" in tmp.keys():
if state == "present":
if tmp["vpnName"] != radius_vpn_name:
need_cfg = True
else:
if tmp["vpnName"] == radius_vpn_name:
need_cfg = True
result["need_cfg"] = need_cfg
return result
def merge_radius_server_cfg_ipv4(self, **kwargs):
""" Merge radius server configure ipv4 """
module = kwargs["module"]
radius_group_name = module.params['radius_group_name']
radius_server_type = module.params['radius_server_type']
radius_server_ip = module.params['radius_server_ip']
radius_server_port = module.params['radius_server_port']
radius_server_mode = module.params['radius_server_mode']
radius_vpn_name = module.params['radius_vpn_name']
conf_str = CE_MERGE_RADIUS_SERVER_CFG_IPV4 % (
radius_group_name, radius_server_type,
radius_server_ip, radius_server_port,
radius_server_mode, radius_vpn_name)
recv_xml = self.netconf_set_config(module=module, conf_str=conf_str)
if "<ok/>" not in recv_xml:
module.fail_json(
msg='Error: Merge radius server config ipv4 failed.')
cmds = []
cmd = "radius server group %s" % radius_group_name
cmds.append(cmd)
if radius_server_type == "Authentication":
cmd = "radius server authentication %s %s" % (
radius_server_ip, radius_server_port)
if radius_vpn_name and radius_vpn_name != "_public_":
cmd += " vpn-instance %s" % radius_vpn_name
if radius_server_mode == "Secondary-server":
cmd += " secondary"
else:
cmd = "radius server accounting %s %s" % (
radius_server_ip, radius_server_port)
if radius_vpn_name and radius_vpn_name != "_public_":
cmd += " vpn-instance %s" % radius_vpn_name
if radius_server_mode == "Secondary-server":
cmd += " secondary"
cmds.append(cmd)
return cmds
def delete_radius_server_cfg_ipv4(self, **kwargs):
""" Delete radius server configure ipv4 """
module = kwargs["module"]
radius_group_name = module.params['radius_group_name']
radius_server_type = module.params['radius_server_type']
radius_server_ip = module.params['radius_server_ip']
radius_server_port = module.params['radius_server_port']
radius_server_mode = module.params['radius_server_mode']
radius_vpn_name = module.params['radius_vpn_name']
conf_str = CE_DELETE_RADIUS_SERVER_CFG_IPV4 % (
radius_group_name, radius_server_type,
radius_server_ip, radius_server_port,
radius_server_mode, radius_vpn_name)
recv_xml = self.netconf_set_config(module=module, conf_str=conf_str)
if "<ok/>" not in recv_xml:
module.fail_json(
msg='Error: Create radius server config ipv4 failed.')
cmds = []
cmd = "radius server group %s" % radius_group_name
cmds.append(cmd)
if radius_server_type == "Authentication":
cmd = "undo radius server authentication %s %s" % (
radius_server_ip, radius_server_port)
if radius_vpn_name and radius_vpn_name != "_public_":
cmd += " vpn-instance %s" % radius_vpn_name
if radius_server_mode == "Secondary-server":
cmd += " secondary"
else:
cmd = "undo radius server accounting %s %s" % (
radius_server_ip, radius_server_port)
if radius_vpn_name and radius_vpn_name != "_public_":
cmd += " vpn-instance %s" % radius_vpn_name
if radius_server_mode == "Secondary-server":
cmd += " secondary"
cmds.append(cmd)
return cmds
def get_radius_server_cfg_ipv6(self, **kwargs):
""" Get radius server configure ipv6 """
module = kwargs["module"]
radius_group_name = module.params['radius_group_name']
radius_server_type = module.params['radius_server_type']
radius_server_ipv6 = module.params['radius_server_ipv6']
radius_server_port = module.params['radius_server_port']
radius_server_mode = module.params['radius_server_mode']
state = module.params['state']
result = dict()
result["radius_server_ip_v6"] = []
need_cfg = False
conf_str = CE_GET_RADIUS_SERVER_CFG_IPV6 % radius_group_name
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
if state == "present":
need_cfg = True
else:
xml_str = recv_xml.replace('\r', '').replace('\n', '').\
replace('xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"', "").\
replace('xmlns="http://www.huawei.com/netconf/vrp"', "")
root = ElementTree.fromstring(xml_str)
radius_server_ip_v6 = root.findall(
"data/radius/rdsTemplates/rdsTemplate/rdsServerIPV6s/rdsServerIPV6")
if radius_server_ip_v6:
for tmp in radius_server_ip_v6:
tmp_dict = dict()
for site in tmp:
if site.tag in ["serverType", "serverIPAddress", "serverPort", "serverMode"]:
tmp_dict[site.tag] = site.text
result["radius_server_ip_v6"].append(tmp_dict)
if result["radius_server_ip_v6"]:
for tmp in result["radius_server_ip_v6"]:
if "serverType" in tmp.keys():
if state == "present":
if tmp["serverType"] != radius_server_type:
need_cfg = True
else:
if tmp["serverType"] == radius_server_type:
need_cfg = True
if "serverIPAddress" in tmp.keys():
if state == "present":
if tmp["serverIPAddress"] != radius_server_ipv6:
need_cfg = True
else:
if tmp["serverIPAddress"] == radius_server_ipv6:
need_cfg = True
if "serverPort" in tmp.keys():
if state == "present":
if tmp["serverPort"] != radius_server_port:
need_cfg = True
else:
if tmp["serverPort"] == radius_server_port:
need_cfg = True
if "serverMode" in tmp.keys():
if state == "present":
if tmp["serverMode"] != radius_server_mode:
need_cfg = True
else:
if tmp["serverMode"] == radius_server_mode:
need_cfg = True
result["need_cfg"] = need_cfg
return result
def merge_radius_server_cfg_ipv6(self, **kwargs):
""" Merge radius server configure ipv6 """
module = kwargs["module"]
radius_group_name = module.params['radius_group_name']
radius_server_type = module.params['radius_server_type']
radius_server_ipv6 = module.params['radius_server_ipv6']
radius_server_port = module.params['radius_server_port']
radius_server_mode = module.params['radius_server_mode']
conf_str = CE_MERGE_RADIUS_SERVER_CFG_IPV6 % (
radius_group_name, radius_server_type,
radius_server_ipv6, radius_server_port,
radius_server_mode)
recv_xml = self.netconf_set_config(module=module, conf_str=conf_str)
if "<ok/>" not in recv_xml:
module.fail_json(
msg='Error: Merge radius server config ipv6 failed.')
cmds = []
cmd = "radius server group %s" % radius_group_name
cmds.append(cmd)
if radius_server_type == "Authentication":
cmd = "radius server authentication %s %s" % (
radius_server_ipv6, radius_server_port)
if radius_server_mode == "Secondary-server":
cmd += " secondary"
else:
cmd = "radius server accounting %s %s" % (
radius_server_ipv6, radius_server_port)
if radius_server_mode == "Secondary-server":
cmd += " secondary"
cmds.append(cmd)
return cmds
def delete_radius_server_cfg_ipv6(self, **kwargs):
""" Delete radius server configure ipv6 """
module = kwargs["module"]
radius_group_name = module.params['radius_group_name']
radius_server_type = module.params['radius_server_type']
radius_server_ipv6 = module.params['radius_server_ipv6']
radius_server_port = module.params['radius_server_port']
radius_server_mode = module.params['radius_server_mode']
conf_str = CE_DELETE_RADIUS_SERVER_CFG_IPV6 % (
radius_group_name, radius_server_type,
radius_server_ipv6, radius_server_port,
radius_server_mode)
recv_xml = self.netconf_set_config(module=module, conf_str=conf_str)
if "<ok/>" not in recv_xml:
module.fail_json(
msg='Error: Create radius server config ipv6 failed.')
cmds = []
cmd = "radius server group %s" % radius_group_name
cmds.append(cmd)
if radius_server_type == "Authentication":
cmd = "undo radius server authentication %s %s" % (
radius_server_ipv6, radius_server_port)
if radius_server_mode == "Secondary-server":
cmd += " secondary"
else:
cmd = "undo radius server accounting %s %s" % (
radius_server_ipv6, radius_server_port)
if radius_server_mode == "Secondary-server":
cmd += " secondary"
cmds.append(cmd)
return cmds
def get_radius_server_name(self, **kwargs):
""" Get radius server name """
module = kwargs["module"]
radius_group_name = module.params['radius_group_name']
radius_server_type = module.params['radius_server_type']
radius_server_name = module.params['radius_server_name']
radius_server_port = module.params['radius_server_port']
radius_server_mode = module.params['radius_server_mode']
radius_vpn_name = module.params['radius_vpn_name']
state = module.params['state']
result = dict()
result["radius_server_name_cfg"] = []
need_cfg = False
conf_str = CE_GET_RADIUS_SERVER_NAME % radius_group_name
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
if state == "present":
need_cfg = True
else:
xml_str = recv_xml.replace('\r', '').replace('\n', '').\
replace('xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"', "").\
replace('xmlns="http://www.huawei.com/netconf/vrp"', "")
root = ElementTree.fromstring(xml_str)
radius_server_name_cfg = root.findall(
"data/radius/rdsTemplates/rdsTemplate/rdsServerNames/rdsServerName")
if radius_server_name_cfg:
for tmp in radius_server_name_cfg:
tmp_dict = dict()
for site in tmp:
if site.tag in ["serverType", "serverName", "serverPort", "serverMode", "vpnName"]:
tmp_dict[site.tag] = site.text
result["radius_server_name_cfg"].append(tmp_dict)
if result["radius_server_name_cfg"]:
for tmp in result["radius_server_name_cfg"]:
if "serverType" in tmp.keys():
if state == "present":
if tmp["serverType"] != radius_server_type:
need_cfg = True
else:
if tmp["serverType"] == radius_server_type:
need_cfg = True
if "serverName" in tmp.keys():
if state == "present":
if tmp["serverName"] != radius_server_name:
need_cfg = True
else:
if tmp["serverName"] == radius_server_name:
need_cfg = True
if "serverPort" in tmp.keys():
if state == "present":
if tmp["serverPort"] != radius_server_port:
need_cfg = True
else:
if tmp["serverPort"] == radius_server_port:
need_cfg = True
if "serverMode" in tmp.keys():
if state == "present":
if tmp["serverMode"] != radius_server_mode:
need_cfg = True
else:
if tmp["serverMode"] == radius_server_mode:
need_cfg = True
if "vpnName" in tmp.keys():
if state == "present":
if tmp["vpnName"] != radius_vpn_name:
need_cfg = True
else:
if tmp["vpnName"] == radius_vpn_name:
need_cfg = True
result["need_cfg"] = need_cfg
return result
def merge_radius_server_name(self, **kwargs):
""" Merge radius server name """
module = kwargs["module"]
radius_group_name = module.params['radius_group_name']
radius_server_type = module.params['radius_server_type']
radius_server_name = module.params['radius_server_name']
radius_server_port = module.params['radius_server_port']
radius_server_mode = module.params['radius_server_mode']
radius_vpn_name = module.params['radius_vpn_name']
conf_str = CE_MERGE_RADIUS_SERVER_NAME % (
radius_group_name, radius_server_type,
radius_server_name, radius_server_port,
radius_server_mode, radius_vpn_name)
recv_xml = self.netconf_set_config(module=module, conf_str=conf_str)
if "<ok/>" not in recv_xml:
module.fail_json(msg='Error: Merge radius server name failed.')
cmds = []
cmd = "radius server group %s" % radius_group_name
cmds.append(cmd)
if radius_server_type == "Authentication":
cmd = "radius server authentication hostname %s %s" % (
radius_server_name, radius_server_port)
if radius_vpn_name and radius_vpn_name != "_public_":
cmd += " vpn-instance %s" % radius_vpn_name
if radius_server_mode == "Secondary-server":
cmd += " secondary"
else:
cmd = "radius server accounting hostname %s %s" % (
radius_server_name, radius_server_port)
if radius_vpn_name and radius_vpn_name != "_public_":
cmd += " vpn-instance %s" % radius_vpn_name
if radius_server_mode == "Secondary-server":
cmd += " secondary"
cmds.append(cmd)
return cmds
def delete_radius_server_name(self, **kwargs):
""" Delete radius server name """
module = kwargs["module"]
radius_group_name = module.params['radius_group_name']
radius_server_type = module.params['radius_server_type']
radius_server_name = module.params['radius_server_name']
radius_server_port = module.params['radius_server_port']
radius_server_mode = module.params['radius_server_mode']
radius_vpn_name = module.params['radius_vpn_name']
conf_str = CE_DELETE_RADIUS_SERVER_NAME % (
radius_group_name, radius_server_type,
radius_server_name, radius_server_port,
radius_server_mode, radius_vpn_name)
recv_xml = self.netconf_set_config(module=module, conf_str=conf_str)
if "<ok/>" not in recv_xml:
module.fail_json(msg='Error: delete radius server name failed.')
cmds = []
cmd = "radius server group %s" % radius_group_name
cmds.append(cmd)
if radius_server_type == "Authentication":
cmd = "undo radius server authentication hostname %s %s" % (
radius_server_name, radius_server_port)
if radius_vpn_name and radius_vpn_name != "_public_":
cmd += " vpn-instance %s" % radius_vpn_name
if radius_server_mode == "Secondary-server":
cmd += " secondary"
else:
cmd = "undo radius server accounting hostname %s %s" % (
radius_server_name, radius_server_port)
if radius_vpn_name and radius_vpn_name != "_public_":
cmd += " vpn-instance %s" % radius_vpn_name
if radius_server_mode == "Secondary-server":
cmd += " secondary"
cmds.append(cmd)
return cmds
def get_hwtacacs_server_cfg_ipv4(self, **kwargs):
""" Get hwtacacs server configure ipv4 """
module = kwargs["module"]
hwtacacs_template = module.params["hwtacacs_template"]
hwtacacs_server_ip = module.params["hwtacacs_server_ip"]
hwtacacs_server_type = module.params["hwtacacs_server_type"]
hwtacacs_is_secondary_server = module.params[
"hwtacacs_is_secondary_server"]
hwtacacs_vpn_name = module.params["hwtacacs_vpn_name"]
hwtacacs_is_public_net = module.params["hwtacacs_is_public_net"]
state = module.params["state"]
result = dict()
result["hwtacacs_server_cfg_ipv4"] = []
need_cfg = False
conf_str = CE_GET_HWTACACS_SERVER_CFG_IPV4 % hwtacacs_template
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
if state == "present":
need_cfg = True
else:
xml_str = recv_xml.replace('\r', '').replace('\n', '').\
replace('xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"', "").\
replace('xmlns="http://www.huawei.com/netconf/vrp"', "")
root = ElementTree.fromstring(xml_str)
hwtacacs_server_cfg_ipv4 = root.findall(
"data/hwtacacs/hwTacTempCfgs/hwTacTempCfg/hwTacSrvCfgs/hwTacSrvCfg")
if hwtacacs_server_cfg_ipv4:
for tmp in hwtacacs_server_cfg_ipv4:
tmp_dict = dict()
for site in tmp:
if site.tag in ["serverIpAddress", "serverType", "isSecondaryServer", "isPublicNet", "vpnName"]:
tmp_dict[site.tag] = site.text
result["hwtacacs_server_cfg_ipv4"].append(tmp_dict)
if result["hwtacacs_server_cfg_ipv4"]:
for tmp in result["hwtacacs_server_cfg_ipv4"]:
if "serverIpAddress" in tmp.keys():
if state == "present":
if tmp["serverIpAddress"] != hwtacacs_server_ip:
need_cfg = True
else:
if tmp["serverIpAddress"] == hwtacacs_server_ip:
need_cfg = True
if "serverType" in tmp.keys():
if state == "present":
if tmp["serverType"] != hwtacacs_server_type:
need_cfg = True
else:
if tmp["serverType"] == hwtacacs_server_type:
need_cfg = True
if "isSecondaryServer" in tmp.keys():
if state == "present":
if tmp["isSecondaryServer"] != str(hwtacacs_is_secondary_server).lower():
need_cfg = True
else:
if tmp["isSecondaryServer"] == str(hwtacacs_is_secondary_server).lower():
need_cfg = True
if "isPublicNet" in tmp.keys():
if state == "present":
if tmp["isPublicNet"] != str(hwtacacs_is_public_net).lower():
need_cfg = True
else:
if tmp["isPublicNet"] == str(hwtacacs_is_public_net).lower():
need_cfg = True
if "vpnName" in tmp.keys():
if state == "present":
if tmp["vpnName"] != hwtacacs_vpn_name:
need_cfg = True
else:
if tmp["vpnName"] == hwtacacs_vpn_name:
need_cfg = True
result["need_cfg"] = need_cfg
return result
def merge_hwtacacs_server_cfg_ipv4(self, **kwargs):
""" Merge hwtacacs server configure ipv4 """
module = kwargs["module"]
hwtacacs_template = module.params["hwtacacs_template"]
hwtacacs_server_ip = module.params["hwtacacs_server_ip"]
hwtacacs_server_type = module.params["hwtacacs_server_type"]
hwtacacs_is_secondary_server = module.params[
"hwtacacs_is_secondary_server"]
hwtacacs_vpn_name = module.params["hwtacacs_vpn_name"]
hwtacacs_is_public_net = module.params["hwtacacs_is_public_net"]
conf_str = CE_MERGE_HWTACACS_SERVER_CFG_IPV4 % (
hwtacacs_template, hwtacacs_server_ip,
hwtacacs_server_type, str(hwtacacs_is_secondary_server).lower(),
hwtacacs_vpn_name, str(hwtacacs_is_public_net).lower())
recv_xml = self.netconf_set_config(module=module, conf_str=conf_str)
if "<ok/>" not in recv_xml:
module.fail_json(
msg='Error: Merge hwtacacs server config ipv4 failed.')
cmds = []
cmd = "hwtacacs server template %s" % hwtacacs_template
cmds.append(cmd)
if hwtacacs_server_type == "Authentication":
cmd = "hwtacacs server authentication %s" % hwtacacs_server_ip
if hwtacacs_vpn_name and hwtacacs_vpn_name != "_public_":
cmd += " vpn-instance %s" % hwtacacs_vpn_name
if hwtacacs_is_public_net:
cmd += " public-net"
if hwtacacs_is_secondary_server:
cmd += " secondary"
elif hwtacacs_server_type == "Authorization":
cmd = "hwtacacs server authorization %s" % hwtacacs_server_ip
if hwtacacs_vpn_name and hwtacacs_vpn_name != "_public_":
cmd += " vpn-instance %s" % hwtacacs_vpn_name
if hwtacacs_is_public_net:
cmd += " public-net"
if hwtacacs_is_secondary_server:
cmd += " secondary"
elif hwtacacs_server_type == "Accounting":
cmd = "hwtacacs server accounting %s" % hwtacacs_server_ip
if hwtacacs_vpn_name and hwtacacs_vpn_name != "_public_":
cmd += " vpn-instance %s" % hwtacacs_vpn_name
if hwtacacs_is_public_net:
cmd += " public-net"
if hwtacacs_is_secondary_server:
cmd += " secondary"
elif hwtacacs_server_type == "Common":
cmd = "hwtacacs server %s" % hwtacacs_server_ip
if hwtacacs_vpn_name and hwtacacs_vpn_name != "_public_":
cmd += " vpn-instance %s" % hwtacacs_vpn_name
if hwtacacs_is_public_net:
cmd += " public-net"
if hwtacacs_is_secondary_server:
cmd += " secondary"
cmds.append(cmd)
return cmds
def delete_hwtacacs_server_cfg_ipv4(self, **kwargs):
""" Delete hwtacacs server configure ipv4 """
module = kwargs["module"]
hwtacacs_template = module.params["hwtacacs_template"]
hwtacacs_server_ip = module.params["hwtacacs_server_ip"]
hwtacacs_server_type = module.params["hwtacacs_server_type"]
hwtacacs_is_secondary_server = module.params[
"hwtacacs_is_secondary_server"]
hwtacacs_vpn_name = module.params["hwtacacs_vpn_name"]
hwtacacs_is_public_net = module.params["hwtacacs_is_public_net"]
conf_str = CE_DELETE_HWTACACS_SERVER_CFG_IPV4 % (
hwtacacs_template, hwtacacs_server_ip,
hwtacacs_server_type, str(hwtacacs_is_secondary_server).lower(),
hwtacacs_vpn_name, str(hwtacacs_is_public_net).lower())
recv_xml = self.netconf_set_config(module=module, conf_str=conf_str)
if "<ok/>" not in recv_xml:
module.fail_json(
msg='Error: Delete hwtacacs server config ipv4 failed.')
cmds = []
cmd = "hwtacacs server template %s" % hwtacacs_template
cmds.append(cmd)
if hwtacacs_server_type == "Authentication":
cmd = "undo hwtacacs server authentication %s" % hwtacacs_server_ip
if hwtacacs_vpn_name and hwtacacs_vpn_name != "_public_":
cmd += " vpn-instance %s" % hwtacacs_vpn_name
if hwtacacs_is_public_net:
cmd += " public-net"
if hwtacacs_is_secondary_server:
cmd += " secondary"
elif hwtacacs_server_type == "Authorization":
cmd = "undo hwtacacs server authorization %s" % hwtacacs_server_ip
if hwtacacs_vpn_name and hwtacacs_vpn_name != "_public_":
cmd += " vpn-instance %s" % hwtacacs_vpn_name
if hwtacacs_is_public_net:
cmd += " public-net"
if hwtacacs_is_secondary_server:
cmd += " secondary"
elif hwtacacs_server_type == "Accounting":
cmd = "undo hwtacacs server accounting %s" % hwtacacs_server_ip
if hwtacacs_vpn_name and hwtacacs_vpn_name != "_public_":
cmd += " vpn-instance %s" % hwtacacs_vpn_name
if hwtacacs_is_public_net:
cmd += " public-net"
if hwtacacs_is_secondary_server:
cmd += " secondary"
elif hwtacacs_server_type == "Common":
cmd = "undo hwtacacs server %s" % hwtacacs_server_ip
if hwtacacs_vpn_name and hwtacacs_vpn_name != "_public_":
cmd += " vpn-instance %s" % hwtacacs_vpn_name
if hwtacacs_is_public_net:
cmd += " public-net"
if hwtacacs_is_secondary_server:
cmd += " secondary"
cmds.append(cmd)
return cmds
def get_hwtacacs_server_cfg_ipv6(self, **kwargs):
""" Get hwtacacs server configure ipv6 """
module = kwargs["module"]
hwtacacs_template = module.params["hwtacacs_template"]
hwtacacs_server_ipv6 = module.params["hwtacacs_server_ipv6"]
hwtacacs_server_type = module.params["hwtacacs_server_type"]
hwtacacs_is_secondary_server = module.params[
"hwtacacs_is_secondary_server"]
hwtacacs_vpn_name = module.params["hwtacacs_vpn_name"]
state = module.params["state"]
result = dict()
result["hwtacacs_server_cfg_ipv6"] = []
need_cfg = False
conf_str = CE_GET_HWTACACS_SERVER_CFG_IPV6 % hwtacacs_template
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
if state == "present":
need_cfg = True
else:
xml_str = recv_xml.replace('\r', '').replace('\n', '').\
replace('xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"', "").\
replace('xmlns="http://www.huawei.com/netconf/vrp"', "")
root = ElementTree.fromstring(xml_str)
hwtacacs_server_cfg_ipv6 = root.findall(
"data/hwtacacs/hwTacTempCfgs/hwTacTempCfg/hwTacIpv6SrvCfgs/hwTacIpv6SrvCfg")
if hwtacacs_server_cfg_ipv6:
for tmp in hwtacacs_server_cfg_ipv6:
tmp_dict = dict()
for site in tmp:
if site.tag in ["serverIpAddress", "serverType", "isSecondaryServer", "vpnName"]:
tmp_dict[site.tag] = site.text
result["hwtacacs_server_cfg_ipv6"].append(tmp_dict)
if result["hwtacacs_server_cfg_ipv6"]:
for tmp in result["hwtacacs_server_cfg_ipv6"]:
if "serverIpAddress" in tmp.keys():
if state == "present":
if tmp["serverIpAddress"] != hwtacacs_server_ipv6:
need_cfg = True
else:
if tmp["serverIpAddress"] == hwtacacs_server_ipv6:
need_cfg = True
if "serverType" in tmp.keys():
if state == "present":
if tmp["serverType"] != hwtacacs_server_type:
need_cfg = True
else:
if tmp["serverType"] == hwtacacs_server_type:
need_cfg = True
if "isSecondaryServer" in tmp.keys():
if state == "present":
if tmp["isSecondaryServer"] != str(hwtacacs_is_secondary_server).lower():
need_cfg = True
else:
if tmp["isSecondaryServer"] == str(hwtacacs_is_secondary_server).lower():
need_cfg = True
if "vpnName" in tmp.keys():
if state == "present":
if tmp["vpnName"] != hwtacacs_vpn_name:
need_cfg = True
else:
if tmp["vpnName"] == hwtacacs_vpn_name:
need_cfg = True
result["need_cfg"] = need_cfg
return result
def merge_hwtacacs_server_cfg_ipv6(self, **kwargs):
""" Merge hwtacacs server configure ipv6 """
module = kwargs["module"]
hwtacacs_template = module.params["hwtacacs_template"]
hwtacacs_server_ipv6 = module.params["hwtacacs_server_ipv6"]
hwtacacs_server_type = module.params["hwtacacs_server_type"]
hwtacacs_is_secondary_server = module.params[
"hwtacacs_is_secondary_server"]
hwtacacs_vpn_name = module.params["hwtacacs_vpn_name"]
conf_str = CE_MERGE_HWTACACS_SERVER_CFG_IPV6 % (
hwtacacs_template, hwtacacs_server_ipv6,
hwtacacs_server_type, str(hwtacacs_is_secondary_server).lower(),
hwtacacs_vpn_name)
recv_xml = self.netconf_set_config(module=module, conf_str=conf_str)
if "<ok/>" not in recv_xml:
module.fail_json(
msg='Error: Merge hwtacacs server config ipv6 failed.')
cmds = []
cmd = "hwtacacs server template %s" % hwtacacs_template
cmds.append(cmd)
if hwtacacs_server_type == "Authentication":
cmd = "hwtacacs server authentication %s" % hwtacacs_server_ipv6
if hwtacacs_vpn_name and hwtacacs_vpn_name != "_public_":
cmd += " vpn-instance %s" % hwtacacs_vpn_name
if hwtacacs_is_secondary_server:
cmd += " secondary"
elif hwtacacs_server_type == "Authorization":
cmd = "hwtacacs server authorization %s" % hwtacacs_server_ipv6
if hwtacacs_vpn_name and hwtacacs_vpn_name != "_public_":
cmd += " vpn-instance %s" % hwtacacs_vpn_name
if hwtacacs_is_secondary_server:
cmd += " secondary"
elif hwtacacs_server_type == "Accounting":
cmd = "hwtacacs server accounting %s" % hwtacacs_server_ipv6
if hwtacacs_vpn_name and hwtacacs_vpn_name != "_public_":
cmd += " vpn-instance %s" % hwtacacs_vpn_name
if hwtacacs_is_secondary_server:
cmd += " secondary"
elif hwtacacs_server_type == "Common":
cmd = "hwtacacs server %s" % hwtacacs_server_ipv6
if hwtacacs_vpn_name and hwtacacs_vpn_name != "_public_":
cmd += " vpn-instance %s" % hwtacacs_vpn_name
if hwtacacs_is_secondary_server:
cmd += " secondary"
cmds.append(cmd)
return cmds
def delete_hwtacacs_server_cfg_ipv6(self, **kwargs):
""" Delete hwtacacs server configure ipv6 """
module = kwargs["module"]
hwtacacs_template = module.params["hwtacacs_template"]
hwtacacs_server_ipv6 = module.params["hwtacacs_server_ipv6"]
hwtacacs_server_type = module.params["hwtacacs_server_type"]
hwtacacs_is_secondary_server = module.params[
"hwtacacs_is_secondary_server"]
hwtacacs_vpn_name = module.params["hwtacacs_vpn_name"]
conf_str = CE_DELETE_HWTACACS_SERVER_CFG_IPV6 % (
hwtacacs_template, hwtacacs_server_ipv6,
hwtacacs_server_type, str(hwtacacs_is_secondary_server).lower(),
hwtacacs_vpn_name)
recv_xml = self.netconf_set_config(module=module, conf_str=conf_str)
if "<ok/>" not in recv_xml:
module.fail_json(
msg='Error: Delete hwtacacs server config ipv6 failed.')
cmds = []
cmd = "hwtacacs server template %s" % hwtacacs_template
cmds.append(cmd)
if hwtacacs_server_type == "Authentication":
cmd = "undo hwtacacs server authentication %s" % hwtacacs_server_ipv6
if hwtacacs_vpn_name and hwtacacs_vpn_name != "_public_":
cmd += " vpn-instance %s" % hwtacacs_vpn_name
if hwtacacs_is_secondary_server:
cmd += " secondary"
elif hwtacacs_server_type == "Authorization":
cmd = "undo hwtacacs server authorization %s" % hwtacacs_server_ipv6
if hwtacacs_vpn_name and hwtacacs_vpn_name != "_public_":
cmd += " vpn-instance %s" % hwtacacs_vpn_name
if hwtacacs_is_secondary_server:
cmd += " secondary"
elif hwtacacs_server_type == "Accounting":
cmd = "undo hwtacacs server accounting %s" % hwtacacs_server_ipv6
if hwtacacs_vpn_name and hwtacacs_vpn_name != "_public_":
cmd += " vpn-instance %s" % hwtacacs_vpn_name
if hwtacacs_is_secondary_server:
cmd += " secondary"
elif hwtacacs_server_type == "Common":
cmd = "undo hwtacacs server %s" % hwtacacs_server_ipv6
if hwtacacs_vpn_name and hwtacacs_vpn_name != "_public_":
cmd += " vpn-instance %s" % hwtacacs_vpn_name
if hwtacacs_is_secondary_server:
cmd += " secondary"
cmds.append(cmd)
return cmds
def get_hwtacacs_host_server_cfg(self, **kwargs):
""" Get hwtacacs host server configure """
module = kwargs["module"]
hwtacacs_template = module.params["hwtacacs_template"]
hwtacacs_server_host_name = module.params["hwtacacs_server_host_name"]
hwtacacs_server_type = module.params["hwtacacs_server_type"]
hwtacacs_is_secondary_server = module.params[
"hwtacacs_is_secondary_server"]
hwtacacs_vpn_name = module.params["hwtacacs_vpn_name"]
hwtacacs_is_public_net = module.params["hwtacacs_is_public_net"]
state = module.params["state"]
result = dict()
result["hwtacacs_server_name_cfg"] = []
need_cfg = False
conf_str = CE_GET_HWTACACS_HOST_SERVER_CFG % hwtacacs_template
recv_xml = self.netconf_get_config(module=module, conf_str=conf_str)
if "<data/>" in recv_xml:
if state == "present":
need_cfg = True
else:
xml_str = recv_xml.replace('\r', '').replace('\n', '').\
replace('xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"', "").\
replace('xmlns="http://www.huawei.com/netconf/vrp"', "")
root = ElementTree.fromstring(xml_str)
hwtacacs_server_name_cfg = root.findall(
"data/hwtacacs/hwTacTempCfgs/hwTacTempCfg/hwTacHostSrvCfgs/hwTacHostSrvCfg")
if hwtacacs_server_name_cfg:
for tmp in hwtacacs_server_name_cfg:
tmp_dict = dict()
for site in tmp:
if site.tag in ["serverHostName", "serverType", "isSecondaryServer", "isPublicNet", "vpnName"]:
tmp_dict[site.tag] = site.text
result["hwtacacs_server_name_cfg"].append(tmp_dict)
if result["hwtacacs_server_name_cfg"]:
for tmp in result["hwtacacs_server_name_cfg"]:
if "serverHostName" in tmp.keys():
if state == "present":
if tmp["serverHostName"] != hwtacacs_server_host_name:
need_cfg = True
else:
if tmp["serverHostName"] == hwtacacs_server_host_name:
need_cfg = True
if "serverType" in tmp.keys():
if state == "present":
if tmp["serverType"] != hwtacacs_server_type:
need_cfg = True
else:
if tmp["serverType"] == hwtacacs_server_type:
need_cfg = True
if "isSecondaryServer" in tmp.keys():
if state == "present":
if tmp["isSecondaryServer"] != str(hwtacacs_is_secondary_server).lower():
need_cfg = True
else:
if tmp["isSecondaryServer"] == str(hwtacacs_is_secondary_server).lower():
need_cfg = True
if "isPublicNet" in tmp.keys():
if state == "present":
if tmp["isPublicNet"] != str(hwtacacs_is_public_net).lower():
need_cfg = True
else:
if tmp["isPublicNet"] == str(hwtacacs_is_public_net).lower():
need_cfg = True
if "vpnName" in tmp.keys():
if state == "present":
if tmp["vpnName"] != hwtacacs_vpn_name:
need_cfg = True
else:
if tmp["vpnName"] == hwtacacs_vpn_name:
need_cfg = True
result["need_cfg"] = need_cfg
return result
def merge_hwtacacs_host_server_cfg(self, **kwargs):
""" Merge hwtacacs host server configure """
module = kwargs["module"]
hwtacacs_template = module.params["hwtacacs_template"]
hwtacacs_server_host_name = module.params["hwtacacs_server_host_name"]
hwtacacs_server_type = module.params["hwtacacs_server_type"]
hwtacacs_is_secondary_server = module.params[
"hwtacacs_is_secondary_server"]
hwtacacs_vpn_name = module.params["hwtacacs_vpn_name"]
hwtacacs_is_public_net = module.params["hwtacacs_is_public_net"]
conf_str = CE_MERGE_HWTACACS_HOST_SERVER_CFG % (
hwtacacs_template, hwtacacs_server_host_name,
hwtacacs_server_type, str(hwtacacs_is_secondary_server).lower(),
hwtacacs_vpn_name, str(hwtacacs_is_public_net).lower())
recv_xml = self.netconf_set_config(module=module, conf_str=conf_str)
if "<ok/>" not in recv_xml:
module.fail_json(
msg='Error: Merge hwtacacs host server config failed.')
cmds = []
if hwtacacs_server_type == "Authentication":
cmd = "hwtacacs server authentication host host-name %s" % hwtacacs_server_host_name
if hwtacacs_vpn_name and hwtacacs_vpn_name != "_public_":
cmd += " vpn-instance %s" % hwtacacs_vpn_name
if hwtacacs_is_public_net:
cmd += " public-net"
if hwtacacs_is_secondary_server:
cmd += " secondary"
elif hwtacacs_server_type == "Authorization":
cmd = "hwtacacs server authorization host host-name %s" % hwtacacs_server_host_name
if hwtacacs_vpn_name and hwtacacs_vpn_name != "_public_":
cmd += " vpn-instance %s" % hwtacacs_vpn_name
if hwtacacs_is_public_net:
cmd += " public-net"
if hwtacacs_is_secondary_server:
cmd += " secondary"
elif hwtacacs_server_type == "Accounting":
cmd = "hwtacacs server accounting host host-name %s" % hwtacacs_server_host_name
if hwtacacs_vpn_name and hwtacacs_vpn_name != "_public_":
cmd += " vpn-instance %s" % hwtacacs_vpn_name
if hwtacacs_is_public_net:
cmd += " public-net"
if hwtacacs_is_secondary_server:
cmd += " secondary"
elif hwtacacs_server_type == "Common":
cmd = "hwtacacs server host host-name %s" % hwtacacs_server_host_name
if hwtacacs_vpn_name and hwtacacs_vpn_name != "_public_":
cmd += " vpn-instance %s" % hwtacacs_vpn_name
if hwtacacs_is_public_net:
cmd += " public-net"
if hwtacacs_is_secondary_server:
cmd += " secondary"
cmds.append(cmd)
return cmds
def delete_hwtacacs_host_server_cfg(self, **kwargs):
""" Delete hwtacacs host server configure """
module = kwargs["module"]
hwtacacs_template = module.params["hwtacacs_template"]
hwtacacs_server_host_name = module.params["hwtacacs_server_host_name"]
hwtacacs_server_type = module.params["hwtacacs_server_type"]
hwtacacs_is_secondary_server = module.params[
"hwtacacs_is_secondary_server"]
hwtacacs_vpn_name = module.params["hwtacacs_vpn_name"]
hwtacacs_is_public_net = module.params["hwtacacs_is_public_net"]
conf_str = CE_DELETE_HWTACACS_HOST_SERVER_CFG % (
hwtacacs_template, hwtacacs_server_host_name,
hwtacacs_server_type, str(hwtacacs_is_secondary_server).lower(),
hwtacacs_vpn_name, str(hwtacacs_is_public_net).lower())
recv_xml = self.netconf_set_config(module=module, conf_str=conf_str)
if "<ok/>" not in recv_xml:
module.fail_json(
msg='Error: Delete hwtacacs host server config failed.')
cmds = []
if hwtacacs_server_type == "Authentication":
cmd = "undo hwtacacs server authentication host host-name %s" % hwtacacs_server_host_name
if hwtacacs_vpn_name and hwtacacs_vpn_name != "_public_":
cmd += " vpn-instance %s" % hwtacacs_vpn_name
if hwtacacs_is_public_net:
cmd += " public-net"
if hwtacacs_is_secondary_server:
cmd += " secondary"
elif hwtacacs_server_type == "Authorization":
cmd = "undo hwtacacs server authorization host host-name %s" % hwtacacs_server_host_name
if hwtacacs_vpn_name and hwtacacs_vpn_name != "_public_":
cmd += " vpn-instance %s" % hwtacacs_vpn_name
if hwtacacs_is_public_net:
cmd += " public-net"
if hwtacacs_is_secondary_server:
cmd += " secondary"
elif hwtacacs_server_type == "Accounting":
cmd = "undo hwtacacs server accounting host host-name %s" % hwtacacs_server_host_name
if hwtacacs_vpn_name and hwtacacs_vpn_name != "_public_":
cmd += " vpn-instance %s" % hwtacacs_vpn_name
if hwtacacs_is_public_net:
cmd += " public-net"
if hwtacacs_is_secondary_server:
cmd += " secondary"
elif hwtacacs_server_type == "Common":
cmd = "undo hwtacacs server host host-name %s" % hwtacacs_server_host_name
if hwtacacs_vpn_name and hwtacacs_vpn_name != "_public_":
cmd += " vpn-instance %s" % hwtacacs_vpn_name
if hwtacacs_is_public_net:
cmd += " public-net"
if hwtacacs_is_secondary_server:
cmd += " secondary"
cmds.append(cmd)
return cmds
def check_name(**kwargs):
""" Check invalid name """
module = kwargs["module"]
name = kwargs["name"]
invalid_char = kwargs["invalid_char"]
for item in invalid_char:
if item in name:
module.fail_json(
msg='Error: Invalid char %s is in the name %s ' % (item, name))
def check_module_argument(**kwargs):
""" Check module argument """
module = kwargs["module"]
# local para
local_user_name = module.params['local_user_name']
local_password = module.params['local_password']
local_ftp_dir = module.params['local_ftp_dir']
local_user_level = module.params['local_user_level']
local_user_group = module.params['local_user_group']
# radius para
radius_group_name = module.params['radius_group_name']
radius_server_ip = module.params['radius_server_ip']
radius_server_port = module.params['radius_server_port']
radius_vpn_name = module.params['radius_vpn_name']
radius_server_name = module.params['radius_server_name']
# hwtacacs para
hwtacacs_template = module.params['hwtacacs_template']
hwtacacs_server_ip = module.params['hwtacacs_server_ip']
hwtacacs_vpn_name = module.params['hwtacacs_vpn_name']
hwtacacs_server_host_name = module.params['hwtacacs_server_host_name']
if local_user_name:
if len(local_user_name) > 253:
module.fail_json(
msg='Error: The local_user_name %s is large than 253.' % local_user_name)
check_name(module=module, name=local_user_name,
invalid_char=INVALID_USER_NAME_CHAR)
if local_password and len(local_password) > 255:
module.fail_json(
msg='Error: The local_password %s is large than 255.' % local_password)
if local_user_level:
if int(local_user_level) > 15 or int(local_user_level) < 0:
module.fail_json(
msg='Error: The local_user_level %s is out of [0 - 15].' % local_user_level)
if local_ftp_dir:
if len(local_ftp_dir) > 255:
module.fail_json(
msg='Error: The local_ftp_dir %s is large than 255.' % local_ftp_dir)
if local_user_group:
if len(local_user_group) > 32 or len(local_user_group) < 1:
module.fail_json(
msg='Error: The local_user_group %s is out of [1 - 32].' % local_user_group)
if radius_group_name and len(radius_group_name) > 32:
module.fail_json(
msg='Error: The radius_group_name %s is large than 32.' % radius_group_name)
if radius_server_ip and not check_ip_addr(radius_server_ip):
module.fail_json(
msg='Error: The radius_server_ip %s is invalid.' % radius_server_ip)
if radius_server_port and not radius_server_port.isdigit():
module.fail_json(
msg='Error: The radius_server_port %s is invalid.' % radius_server_port)
if radius_vpn_name:
if len(radius_vpn_name) > 31:
module.fail_json(
msg='Error: The radius_vpn_name %s is large than 31.' % radius_vpn_name)
if ' ' in radius_vpn_name:
module.fail_json(
msg='Error: The radius_vpn_name %s include space.' % radius_vpn_name)
if radius_server_name:
if len(radius_server_name) > 255:
module.fail_json(
msg='Error: The radius_server_name %s is large than 255.' % radius_server_name)
if ' ' in radius_server_name:
module.fail_json(
msg='Error: The radius_server_name %s include space.' % radius_server_name)
if hwtacacs_template and len(hwtacacs_template) > 32:
module.fail_json(
msg='Error: The hwtacacs_template %s is large than 32.' % hwtacacs_template)
if hwtacacs_server_ip and not check_ip_addr(hwtacacs_server_ip):
module.fail_json(
msg='Error: The hwtacacs_server_ip %s is invalid.' % hwtacacs_server_ip)
if hwtacacs_vpn_name:
if len(hwtacacs_vpn_name) > 31:
module.fail_json(
msg='Error: The hwtacacs_vpn_name %s is large than 31.' % hwtacacs_vpn_name)
if ' ' in hwtacacs_vpn_name:
module.fail_json(
msg='Error: The hwtacacs_vpn_name %s include space.' % hwtacacs_vpn_name)
if hwtacacs_server_host_name:
if len(hwtacacs_server_host_name) > 255:
module.fail_json(
msg='Error: The hwtacacs_server_host_name %s is large than 255.' % hwtacacs_server_host_name)
if ' ' in hwtacacs_server_host_name:
module.fail_json(
msg='Error: The hwtacacs_server_host_name %s include space.' % hwtacacs_server_host_name)
def main():
""" Module main """
argument_spec = dict(
state=dict(choices=['present', 'absent'], default='present'),
local_user_name=dict(type='str'),
local_password=dict(type='str', no_log=True),
local_service_type=dict(type='str'),
local_ftp_dir=dict(type='str'),
local_user_level=dict(type='str'),
local_user_group=dict(type='str'),
radius_group_name=dict(type='str'),
radius_server_type=dict(choices=['Authentication', 'Accounting']),
radius_server_ip=dict(type='str'),
radius_server_ipv6=dict(type='str'),
radius_server_port=dict(type='str'),
radius_server_mode=dict(
choices=['Secondary-server', 'Primary-server']),
radius_vpn_name=dict(type='str'),
radius_server_name=dict(type='str'),
hwtacacs_template=dict(type='str'),
hwtacacs_server_ip=dict(type='str'),
hwtacacs_server_ipv6=dict(type='str'),
hwtacacs_server_type=dict(
choices=['Authentication', 'Authorization', 'Accounting', 'Common']),
hwtacacs_is_secondary_server=dict(
required=False, default=False, type='bool'),
hwtacacs_vpn_name=dict(type='str'),
hwtacacs_is_public_net=dict(
required=False, default=False, type='bool'),
hwtacacs_server_host_name=dict(type='str')
)
argument_spec.update(ce_argument_spec)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
check_module_argument(module=module)
changed = False
proposed = dict()
existing = dict()
end_state = dict()
updates = []
# common para
state = module.params['state']
# local para
local_user_name = module.params['local_user_name']
local_password = module.params['local_password']
local_service_type = module.params['local_service_type']
local_ftp_dir = module.params['local_ftp_dir']
local_user_level = module.params['local_user_level']
local_user_group = module.params['local_user_group']
# radius para
radius_group_name = module.params['radius_group_name']
radius_server_type = module.params['radius_server_type']
radius_server_ip = module.params['radius_server_ip']
radius_server_ipv6 = module.params['radius_server_ipv6']
radius_server_port = module.params['radius_server_port']
radius_server_mode = module.params['radius_server_mode']
radius_vpn_name = module.params['radius_vpn_name']
radius_server_name = module.params['radius_server_name']
# hwtacacs para
hwtacacs_template = module.params['hwtacacs_template']
hwtacacs_server_ip = module.params['hwtacacs_server_ip']
hwtacacs_server_ipv6 = module.params['hwtacacs_server_ipv6']
hwtacacs_server_type = module.params['hwtacacs_server_type']
hwtacacs_is_secondary_server = module.params[
'hwtacacs_is_secondary_server']
hwtacacs_vpn_name = module.params['hwtacacs_vpn_name']
hwtacacs_is_public_net = module.params['hwtacacs_is_public_net']
hwtacacs_server_host_name = module.params['hwtacacs_server_host_name']
ce_aaa_server_host = AaaServerHost()
if not ce_aaa_server_host:
module.fail_json(msg='Error: Construct ce_aaa_server failed.')
# get proposed
proposed["state"] = state
if local_user_name:
proposed["local_user_name"] = local_user_name
if local_password:
proposed["local_password"] = "******"
if local_service_type:
proposed["local_service_type"] = local_service_type
if local_ftp_dir:
proposed["local_ftp_dir"] = local_ftp_dir
if local_user_level:
proposed["local_user_level"] = local_user_level
if local_user_group:
proposed["local_user_group"] = local_user_group
if radius_group_name:
proposed["radius_group_name"] = radius_group_name
if radius_server_type:
proposed["radius_server_type"] = radius_server_type
if radius_server_ip:
proposed["radius_server_ip"] = radius_server_ip
if radius_server_ipv6:
proposed["radius_server_ipv6"] = radius_server_ipv6
if radius_server_port:
proposed["radius_server_port"] = radius_server_port
if radius_server_mode:
proposed["radius_server_mode"] = radius_server_mode
if radius_vpn_name:
proposed["radius_vpn_name"] = radius_vpn_name
if radius_server_name:
proposed["radius_server_name"] = radius_server_name
if hwtacacs_template:
proposed["hwtacacs_template"] = hwtacacs_template
if hwtacacs_server_ip:
proposed["hwtacacs_server_ip"] = hwtacacs_server_ip
if hwtacacs_server_ipv6:
proposed["hwtacacs_server_ipv6"] = hwtacacs_server_ipv6
if hwtacacs_server_type:
proposed["hwtacacs_server_type"] = hwtacacs_server_type
proposed["hwtacacs_is_secondary_server"] = hwtacacs_is_secondary_server
if hwtacacs_vpn_name:
proposed["hwtacacs_vpn_name"] = hwtacacs_vpn_name
proposed["hwtacacs_is_public_net"] = hwtacacs_is_public_net
if hwtacacs_server_host_name:
proposed["hwtacacs_server_host_name"] = hwtacacs_server_host_name
if local_user_name:
if state == "present" and not local_password:
module.fail_json(
msg='Error: Please input local_password when config local user.')
local_user_result = ce_aaa_server_host.get_local_user_info(
module=module)
existing["local user name"] = local_user_result["local_user_info"]
if state == "present":
# present local user
if local_user_result["need_cfg"]:
cmd = ce_aaa_server_host.merge_local_user_info(module=module)
changed = True
updates.append(cmd)
else:
# absent local user
if local_user_result["need_cfg"]:
if not local_service_type and not local_ftp_dir and not local_user_level and not local_user_group:
cmd = ce_aaa_server_host.delete_local_user_info(
module=module)
else:
cmd = ce_aaa_server_host.merge_local_user_info(
module=module)
changed = True
updates.append(cmd)
local_user_result = ce_aaa_server_host.get_local_user_info(
module=module)
end_state["local user name"] = local_user_result["local_user_info"]
if radius_group_name:
if not radius_server_ip and not radius_server_ipv6 and not radius_server_name:
module.fail_json(
msg='Error: Please input radius_server_ip or radius_server_ipv6 or radius_server_name.')
if radius_server_ip and radius_server_ipv6:
module.fail_json(
msg='Error: Please do not input radius_server_ip and radius_server_ipv6 at the same time.')
if not radius_server_type or not radius_server_port or not radius_server_mode or not radius_vpn_name:
module.fail_json(
msg='Error: Please input radius_server_type radius_server_port radius_server_mode radius_vpn_name.')
if radius_server_ip:
rds_server_ipv4_result = ce_aaa_server_host.get_radius_server_cfg_ipv4(
module=module)
if radius_server_ipv6:
rds_server_ipv6_result = ce_aaa_server_host.get_radius_server_cfg_ipv6(
module=module)
if radius_server_name:
rds_server_name_result = ce_aaa_server_host.get_radius_server_name(
module=module)
if radius_server_ip and rds_server_ipv4_result["radius_server_ip_v4"]:
existing["radius server ipv4"] = rds_server_ipv4_result[
"radius_server_ip_v4"]
if radius_server_ipv6 and rds_server_ipv6_result["radius_server_ip_v6"]:
existing["radius server ipv6"] = rds_server_ipv6_result[
"radius_server_ip_v6"]
if radius_server_name and rds_server_name_result["radius_server_name_cfg"]:
existing["radius server name cfg"] = rds_server_name_result[
"radius_server_name_cfg"]
if state == "present":
if radius_server_ip and rds_server_ipv4_result["need_cfg"]:
cmd = ce_aaa_server_host.merge_radius_server_cfg_ipv4(
module=module)
changed = True
updates.append(cmd)
if radius_server_ipv6 and rds_server_ipv6_result["need_cfg"]:
cmd = ce_aaa_server_host.merge_radius_server_cfg_ipv6(
module=module)
changed = True
updates.append(cmd)
if radius_server_name and rds_server_name_result["need_cfg"]:
cmd = ce_aaa_server_host.merge_radius_server_name(
module=module)
changed = True
updates.append(cmd)
else:
if radius_server_ip and rds_server_ipv4_result["need_cfg"]:
cmd = ce_aaa_server_host.delete_radius_server_cfg_ipv4(
module=module)
changed = True
updates.append(cmd)
if radius_server_ipv6 and rds_server_ipv6_result["need_cfg"]:
cmd = ce_aaa_server_host.delete_radius_server_cfg_ipv6(
module=module)
changed = True
updates.append(cmd)
if radius_server_name and rds_server_name_result["need_cfg"]:
cmd = ce_aaa_server_host.delete_radius_server_name(
module=module)
changed = True
updates.append(cmd)
if radius_server_ip:
rds_server_ipv4_result = ce_aaa_server_host.get_radius_server_cfg_ipv4(
module=module)
if radius_server_ipv6:
rds_server_ipv6_result = ce_aaa_server_host.get_radius_server_cfg_ipv6(
module=module)
if radius_server_name:
rds_server_name_result = ce_aaa_server_host.get_radius_server_name(
module=module)
if radius_server_ip and rds_server_ipv4_result["radius_server_ip_v4"]:
end_state["radius server ipv4"] = rds_server_ipv4_result[
"radius_server_ip_v4"]
if radius_server_ipv6 and rds_server_ipv6_result["radius_server_ip_v6"]:
end_state["radius server ipv6"] = rds_server_ipv6_result[
"radius_server_ip_v6"]
if radius_server_name and rds_server_name_result["radius_server_name_cfg"]:
end_state["radius server name cfg"] = rds_server_name_result[
"radius_server_name_cfg"]
if hwtacacs_template:
if not hwtacacs_server_ip and not hwtacacs_server_ipv6 and not hwtacacs_server_host_name:
module.fail_json(
msg='Error: Please input hwtacacs_server_ip or hwtacacs_server_ipv6 or hwtacacs_server_host_name.')
if not hwtacacs_server_type or not hwtacacs_vpn_name:
module.fail_json(
msg='Error: Please input hwtacacs_server_type hwtacacs_vpn_name.')
if hwtacacs_server_ip and hwtacacs_server_ipv6:
module.fail_json(
msg='Error: Please do not set hwtacacs_server_ip and hwtacacs_server_ipv6 at the same time.')
if hwtacacs_vpn_name and hwtacacs_is_public_net:
module.fail_json(
msg='Error: Please do not set vpn and public net at the same time.')
if hwtacacs_server_ip:
hwtacacs_server_ipv4_result = ce_aaa_server_host.get_hwtacacs_server_cfg_ipv4(
module=module)
if hwtacacs_server_ipv6:
hwtacacs_server_ipv6_result = ce_aaa_server_host.get_hwtacacs_server_cfg_ipv6(
module=module)
if hwtacacs_server_host_name:
hwtacacs_host_name_result = ce_aaa_server_host.get_hwtacacs_host_server_cfg(
module=module)
if hwtacacs_server_ip and hwtacacs_server_ipv4_result["hwtacacs_server_cfg_ipv4"]:
existing["hwtacacs server cfg ipv4"] = hwtacacs_server_ipv4_result[
"hwtacacs_server_cfg_ipv4"]
if hwtacacs_server_ipv6 and hwtacacs_server_ipv6_result["hwtacacs_server_cfg_ipv6"]:
existing["hwtacacs server cfg ipv6"] = hwtacacs_server_ipv6_result[
"hwtacacs_server_cfg_ipv6"]
if hwtacacs_server_host_name and hwtacacs_host_name_result["hwtacacs_server_name_cfg"]:
existing["hwtacacs server name cfg"] = hwtacacs_host_name_result[
"hwtacacs_server_name_cfg"]
if state == "present":
if hwtacacs_server_ip and hwtacacs_server_ipv4_result["need_cfg"]:
cmd = ce_aaa_server_host.merge_hwtacacs_server_cfg_ipv4(
module=module)
changed = True
updates.append(cmd)
if hwtacacs_server_ipv6 and hwtacacs_server_ipv6_result["need_cfg"]:
cmd = ce_aaa_server_host.merge_hwtacacs_server_cfg_ipv6(
module=module)
changed = True
updates.append(cmd)
if hwtacacs_server_host_name and hwtacacs_host_name_result["need_cfg"]:
cmd = ce_aaa_server_host.merge_hwtacacs_host_server_cfg(
module=module)
changed = True
updates.append(cmd)
else:
if hwtacacs_server_ip and hwtacacs_server_ipv4_result["need_cfg"]:
cmd = ce_aaa_server_host.delete_hwtacacs_server_cfg_ipv4(
module=module)
changed = True
updates.append(cmd)
if hwtacacs_server_ipv6 and hwtacacs_server_ipv6_result["need_cfg"]:
cmd = ce_aaa_server_host.delete_hwtacacs_server_cfg_ipv6(
module=module)
changed = True
updates.append(cmd)
if hwtacacs_server_host_name and hwtacacs_host_name_result["need_cfg"]:
cmd = ce_aaa_server_host.delete_hwtacacs_host_server_cfg(
module=module)
changed = True
updates.append(cmd)
if hwtacacs_server_ip:
hwtacacs_server_ipv4_result = ce_aaa_server_host.get_hwtacacs_server_cfg_ipv4(
module=module)
if hwtacacs_server_ipv6:
hwtacacs_server_ipv6_result = ce_aaa_server_host.get_hwtacacs_server_cfg_ipv6(
module=module)
if hwtacacs_server_host_name:
hwtacacs_host_name_result = ce_aaa_server_host.get_hwtacacs_host_server_cfg(
module=module)
if hwtacacs_server_ip and hwtacacs_server_ipv4_result["hwtacacs_server_cfg_ipv4"]:
end_state["hwtacacs server cfg ipv4"] = hwtacacs_server_ipv4_result[
"hwtacacs_server_cfg_ipv4"]
if hwtacacs_server_ipv6 and hwtacacs_server_ipv6_result["hwtacacs_server_cfg_ipv6"]:
end_state["hwtacacs server cfg ipv6"] = hwtacacs_server_ipv6_result[
"hwtacacs_server_cfg_ipv6"]
if hwtacacs_server_host_name and hwtacacs_host_name_result["hwtacacs_server_name_cfg"]:
end_state["hwtacacs server name cfg"] = hwtacacs_host_name_result[
"hwtacacs_server_name_cfg"]
results = dict()
results['proposed'] = proposed
results['existing'] = existing
results['changed'] = changed
results['end_state'] = end_state
results['updates'] = updates
module.exit_json(**results)
if __name__ == '__main__':
main()
| gpl-3.0 |
ltsimps/Midterm | vendor/googletest/googlemock/test/gmock_test_utils.py | 769 | 3684 | #!/usr/bin/env python
#
# Copyright 2006, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test utilities for Google C++ Mocking Framework."""
__author__ = '[email protected] (Zhanyong Wan)'
import os
import sys
# Determines path to gtest_test_utils and imports it.
SCRIPT_DIR = os.path.dirname(__file__) or '.'
# isdir resolves symbolic links.
gtest_tests_util_dir = os.path.join(SCRIPT_DIR, '../gtest/test')
if os.path.isdir(gtest_tests_util_dir):
GTEST_TESTS_UTIL_DIR = gtest_tests_util_dir
else:
GTEST_TESTS_UTIL_DIR = os.path.join(SCRIPT_DIR, '../../gtest/test')
sys.path.append(GTEST_TESTS_UTIL_DIR)
import gtest_test_utils # pylint: disable-msg=C6204
def GetSourceDir():
"""Returns the absolute path of the directory where the .py files are."""
return gtest_test_utils.GetSourceDir()
def GetTestExecutablePath(executable_name):
"""Returns the absolute path of the test binary given its name.
The function will print a message and abort the program if the resulting file
doesn't exist.
Args:
executable_name: name of the test binary that the test script runs.
Returns:
The absolute path of the test binary.
"""
return gtest_test_utils.GetTestExecutablePath(executable_name)
def GetExitStatus(exit_code):
"""Returns the argument to exit(), or -1 if exit() wasn't called.
Args:
exit_code: the result value of os.system(command).
"""
if os.name == 'nt':
# On Windows, os.WEXITSTATUS() doesn't work and os.system() returns
# the argument to exit() directly.
return exit_code
else:
# On Unix, os.WEXITSTATUS() must be used to extract the exit status
# from the result of os.system().
if os.WIFEXITED(exit_code):
return os.WEXITSTATUS(exit_code)
else:
return -1
# Suppresses the "Invalid const name" lint complaint
# pylint: disable-msg=C6409
# Exposes utilities from gtest_test_utils.
Subprocess = gtest_test_utils.Subprocess
TestCase = gtest_test_utils.TestCase
environ = gtest_test_utils.environ
SetEnvVar = gtest_test_utils.SetEnvVar
PREMATURE_EXIT_FILE_ENV_VAR = gtest_test_utils.PREMATURE_EXIT_FILE_ENV_VAR
# pylint: enable-msg=C6409
def Main():
"""Runs the unit test."""
gtest_test_utils.Main()
| mit |
tdhopper/scikit-learn | examples/svm/plot_svm_scale_c.py | 223 | 5375 | """
==============================================
Scaling the regularization parameter for SVCs
==============================================
The following example illustrates the effect of scaling the
regularization parameter when using :ref:`svm` for
:ref:`classification <svm_classification>`.
For SVC classification, we are interested in a risk minimization for the
equation:
.. math::
C \sum_{i=1, n} \mathcal{L} (f(x_i), y_i) + \Omega (w)
where
- :math:`C` is used to set the amount of regularization
- :math:`\mathcal{L}` is a `loss` function of our samples
and our model parameters.
- :math:`\Omega` is a `penalty` function of our model parameters
If we consider the loss function to be the individual error per
sample, then the data-fit term, or the sum of the error for each sample, will
increase as we add more samples. The penalization term, however, will not
increase.
When using, for example, :ref:`cross validation <cross_validation>`, to
set the amount of regularization with `C`, there will be a
different amount of samples between the main problem and the smaller problems
within the folds of the cross validation.
Since our loss function is dependent on the amount of samples, the latter
will influence the selected value of `C`.
The question that arises is `How do we optimally adjust C to
account for the different amount of training samples?`
The figures below are used to illustrate the effect of scaling our
`C` to compensate for the change in the number of samples, in the
case of using an `l1` penalty, as well as the `l2` penalty.
l1-penalty case
-----------------
In the `l1` case, theory says that prediction consistency
(i.e. that under given hypothesis, the estimator
learned predicts as well as a model knowing the true distribution)
is not possible because of the bias of the `l1`. It does say, however,
that model consistency, in terms of finding the right set of non-zero
parameters as well as their signs, can be achieved by scaling
`C1`.
l2-penalty case
-----------------
The theory says that in order to achieve prediction consistency, the
penalty parameter should be kept constant
as the number of samples grow.
Simulations
------------
The two figures below plot the values of `C` on the `x-axis` and the
corresponding cross-validation scores on the `y-axis`, for several different
fractions of a generated data-set.
In the `l1` penalty case, the cross-validation-error correlates best with
the test-error, when scaling our `C` with the number of samples, `n`,
which can be seen in the first figure.
For the `l2` penalty case, the best result comes from the case where `C`
is not scaled.
.. topic:: Note:
Two separate datasets are used for the two different plots. The reason
behind this is the `l1` case works better on sparse data, while `l2`
is better suited to the non-sparse case.
"""
print(__doc__)
# Author: Andreas Mueller <[email protected]>
# Jaques Grobler <[email protected]>
# License: BSD 3 clause
import numpy as np
import matplotlib.pyplot as plt
from sklearn.svm import LinearSVC
from sklearn.cross_validation import ShuffleSplit
from sklearn.grid_search import GridSearchCV
from sklearn.utils import check_random_state
from sklearn import datasets
rnd = check_random_state(1)
# set up dataset
n_samples = 100
n_features = 300
# l1 data (only 5 informative features)
X_1, y_1 = datasets.make_classification(n_samples=n_samples,
n_features=n_features, n_informative=5,
random_state=1)
# l2 data: non sparse, but less features
y_2 = np.sign(.5 - rnd.rand(n_samples))
X_2 = rnd.randn(n_samples, n_features / 5) + y_2[:, np.newaxis]
X_2 += 5 * rnd.randn(n_samples, n_features / 5)
clf_sets = [(LinearSVC(penalty='l1', loss='squared_hinge', dual=False,
tol=1e-3),
np.logspace(-2.3, -1.3, 10), X_1, y_1),
(LinearSVC(penalty='l2', loss='squared_hinge', dual=True,
tol=1e-4),
np.logspace(-4.5, -2, 10), X_2, y_2)]
colors = ['b', 'g', 'r', 'c']
for fignum, (clf, cs, X, y) in enumerate(clf_sets):
# set up the plot for each regressor
plt.figure(fignum, figsize=(9, 10))
for k, train_size in enumerate(np.linspace(0.3, 0.7, 3)[::-1]):
param_grid = dict(C=cs)
# To get nice curve, we need a large number of iterations to
# reduce the variance
grid = GridSearchCV(clf, refit=False, param_grid=param_grid,
cv=ShuffleSplit(n=n_samples, train_size=train_size,
n_iter=250, random_state=1))
grid.fit(X, y)
scores = [x[1] for x in grid.grid_scores_]
scales = [(1, 'No scaling'),
((n_samples * train_size), '1/n_samples'),
]
for subplotnum, (scaler, name) in enumerate(scales):
plt.subplot(2, 1, subplotnum + 1)
plt.xlabel('C')
plt.ylabel('CV Score')
grid_cs = cs * float(scaler) # scale the C's
plt.semilogx(grid_cs, scores, label="fraction %.2f" %
train_size)
plt.title('scaling=%s, penalty=%s, loss=%s' %
(name, clf.penalty, clf.loss))
plt.legend(loc="best")
plt.show()
| bsd-3-clause |
peterjoel/servo | tests/wpt/web-platform-tests/webdriver/tests/take_element_screenshot/screenshot.py | 8 | 1221 | from tests.support.asserts import assert_error, assert_png, assert_success
from tests.support.image import png_dimensions
from tests.support.inline import inline
from . import element_rect
def take_element_screenshot(session, element_id):
return session.transport.send(
"GET",
"session/{session_id}/element/{element_id}/screenshot".format(
session_id=session.session_id,
element_id=element_id,
)
)
def test_no_browsing_context(session, closed_window):
response = take_element_screenshot(session, "foo")
assert_error(response, "no such window")
def test_stale(session):
session.url = inline("<input>")
element = session.find.css("input", all=False)
session.refresh()
result = take_element_screenshot(session, element.id)
assert_error(result, "stale element reference")
def test_format_and_dimensions(session):
session.url = inline("<input>")
element = session.find.css("input", all=False)
rect = element_rect(session, element)
response = take_element_screenshot(session, element.id)
value = assert_success(response)
assert_png(value)
assert png_dimensions(value) == (rect["width"], rect["height"])
| mpl-2.0 |
sahilshekhawat/sympy | sympy/printing/tests/test_mathml.py | 62 | 16917 | from sympy import diff, Integral, Limit, sin, Symbol, Integer, Rational, cos, \
tan, asin, acos, atan, sinh, cosh, tanh, asinh, acosh, atanh, E, I, oo, \
pi, GoldenRatio, EulerGamma, Sum, Eq, Ne, Ge, Lt, Float
from sympy.core.compatibility import u
from sympy.printing.mathml import mathml, MathMLPrinter
from sympy.utilities.pytest import raises
x = Symbol('x')
y = Symbol('y')
mp = MathMLPrinter()
def test_printmethod():
assert mp.doprint(1 + x) == '<apply><plus/><ci>x</ci><cn>1</cn></apply>'
def test_mathml_core():
mml_1 = mp._print(1 + x)
assert mml_1.nodeName == 'apply'
nodes = mml_1.childNodes
assert len(nodes) == 3
assert nodes[0].nodeName == 'plus'
assert nodes[0].hasChildNodes() is False
assert nodes[0].nodeValue is None
assert nodes[1].nodeName in ['cn', 'ci']
if nodes[1].nodeName == 'cn':
assert nodes[1].childNodes[0].nodeValue == '1'
assert nodes[2].childNodes[0].nodeValue == 'x'
else:
assert nodes[1].childNodes[0].nodeValue == 'x'
assert nodes[2].childNodes[0].nodeValue == '1'
mml_2 = mp._print(x**2)
assert mml_2.nodeName == 'apply'
nodes = mml_2.childNodes
assert nodes[1].childNodes[0].nodeValue == 'x'
assert nodes[2].childNodes[0].nodeValue == '2'
mml_3 = mp._print(2*x)
assert mml_3.nodeName == 'apply'
nodes = mml_3.childNodes
assert nodes[0].nodeName == 'times'
assert nodes[1].childNodes[0].nodeValue == '2'
assert nodes[2].childNodes[0].nodeValue == 'x'
mml = mp._print(Float(1.0, 2)*x)
assert mml.nodeName == 'apply'
nodes = mml.childNodes
assert nodes[0].nodeName == 'times'
assert nodes[1].childNodes[0].nodeValue == '1.0'
assert nodes[2].childNodes[0].nodeValue == 'x'
def test_mathml_functions():
mml_1 = mp._print(sin(x))
assert mml_1.nodeName == 'apply'
assert mml_1.childNodes[0].nodeName == 'sin'
assert mml_1.childNodes[1].nodeName == 'ci'
mml_2 = mp._print(diff(sin(x), x, evaluate=False))
assert mml_2.nodeName == 'apply'
assert mml_2.childNodes[0].nodeName == 'diff'
assert mml_2.childNodes[1].nodeName == 'bvar'
assert mml_2.childNodes[1].childNodes[
0].nodeName == 'ci' # below bvar there's <ci>x/ci>
mml_3 = mp._print(diff(cos(x*y), x, evaluate=False))
assert mml_3.nodeName == 'apply'
assert mml_3.childNodes[0].nodeName == 'partialdiff'
assert mml_3.childNodes[1].nodeName == 'bvar'
assert mml_3.childNodes[1].childNodes[
0].nodeName == 'ci' # below bvar there's <ci>x/ci>
def test_mathml_limits():
# XXX No unevaluated limits
lim_fun = sin(x)/x
mml_1 = mp._print(Limit(lim_fun, x, 0))
assert mml_1.childNodes[0].nodeName == 'limit'
assert mml_1.childNodes[1].nodeName == 'bvar'
assert mml_1.childNodes[2].nodeName == 'lowlimit'
assert mml_1.childNodes[3].toxml() == mp._print(lim_fun).toxml()
def test_mathml_integrals():
integrand = x
mml_1 = mp._print(Integral(integrand, (x, 0, 1)))
assert mml_1.childNodes[0].nodeName == 'int'
assert mml_1.childNodes[1].nodeName == 'bvar'
assert mml_1.childNodes[2].nodeName == 'lowlimit'
assert mml_1.childNodes[3].nodeName == 'uplimit'
assert mml_1.childNodes[4].toxml() == mp._print(integrand).toxml()
def test_mathml_sums():
summand = x
mml_1 = mp._print(Sum(summand, (x, 1, 10)))
assert mml_1.childNodes[0].nodeName == 'sum'
assert mml_1.childNodes[1].nodeName == 'bvar'
assert mml_1.childNodes[2].nodeName == 'lowlimit'
assert mml_1.childNodes[3].nodeName == 'uplimit'
assert mml_1.childNodes[4].toxml() == mp._print(summand).toxml()
def test_mathml_tuples():
mml_1 = mp._print([2])
assert mml_1.nodeName == 'list'
assert mml_1.childNodes[0].nodeName == 'cn'
assert len(mml_1.childNodes) == 1
mml_2 = mp._print([2, Integer(1)])
assert mml_2.nodeName == 'list'
assert mml_2.childNodes[0].nodeName == 'cn'
assert mml_2.childNodes[1].nodeName == 'cn'
assert len(mml_2.childNodes) == 2
def test_mathml_add():
mml = mp._print(x**5 - x**4 + x)
assert mml.childNodes[0].nodeName == 'plus'
assert mml.childNodes[1].childNodes[0].nodeName == 'minus'
assert mml.childNodes[1].childNodes[1].nodeName == 'apply'
def test_mathml_Rational():
mml_1 = mp._print(Rational(1, 1))
"""should just return a number"""
assert mml_1.nodeName == 'cn'
mml_2 = mp._print(Rational(2, 5))
assert mml_2.childNodes[0].nodeName == 'divide'
def test_mathml_constants():
mml = mp._print(I)
assert mml.nodeName == 'imaginaryi'
mml = mp._print(E)
assert mml.nodeName == 'exponentiale'
mml = mp._print(oo)
assert mml.nodeName == 'infinity'
mml = mp._print(pi)
assert mml.nodeName == 'pi'
assert mathml(GoldenRatio) == '<cn>φ</cn>'
mml = mathml(EulerGamma)
assert mml == '<eulergamma/>'
def test_mathml_trig():
mml = mp._print(sin(x))
assert mml.childNodes[0].nodeName == 'sin'
mml = mp._print(cos(x))
assert mml.childNodes[0].nodeName == 'cos'
mml = mp._print(tan(x))
assert mml.childNodes[0].nodeName == 'tan'
mml = mp._print(asin(x))
assert mml.childNodes[0].nodeName == 'arcsin'
mml = mp._print(acos(x))
assert mml.childNodes[0].nodeName == 'arccos'
mml = mp._print(atan(x))
assert mml.childNodes[0].nodeName == 'arctan'
mml = mp._print(sinh(x))
assert mml.childNodes[0].nodeName == 'sinh'
mml = mp._print(cosh(x))
assert mml.childNodes[0].nodeName == 'cosh'
mml = mp._print(tanh(x))
assert mml.childNodes[0].nodeName == 'tanh'
mml = mp._print(asinh(x))
assert mml.childNodes[0].nodeName == 'arcsinh'
mml = mp._print(atanh(x))
assert mml.childNodes[0].nodeName == 'arctanh'
mml = mp._print(acosh(x))
assert mml.childNodes[0].nodeName == 'arccosh'
def test_mathml_relational():
mml_1 = mp._print(Eq(x, 1))
assert mml_1.nodeName == 'apply'
assert mml_1.childNodes[0].nodeName == 'eq'
assert mml_1.childNodes[1].nodeName == 'ci'
assert mml_1.childNodes[1].childNodes[0].nodeValue == 'x'
assert mml_1.childNodes[2].nodeName == 'cn'
assert mml_1.childNodes[2].childNodes[0].nodeValue == '1'
mml_2 = mp._print(Ne(1, x))
assert mml_2.nodeName == 'apply'
assert mml_2.childNodes[0].nodeName == 'neq'
assert mml_2.childNodes[1].nodeName == 'cn'
assert mml_2.childNodes[1].childNodes[0].nodeValue == '1'
assert mml_2.childNodes[2].nodeName == 'ci'
assert mml_2.childNodes[2].childNodes[0].nodeValue == 'x'
mml_3 = mp._print(Ge(1, x))
assert mml_3.nodeName == 'apply'
assert mml_3.childNodes[0].nodeName == 'geq'
assert mml_3.childNodes[1].nodeName == 'cn'
assert mml_3.childNodes[1].childNodes[0].nodeValue == '1'
assert mml_3.childNodes[2].nodeName == 'ci'
assert mml_3.childNodes[2].childNodes[0].nodeValue == 'x'
mml_4 = mp._print(Lt(1, x))
assert mml_4.nodeName == 'apply'
assert mml_4.childNodes[0].nodeName == 'lt'
assert mml_4.childNodes[1].nodeName == 'cn'
assert mml_4.childNodes[1].childNodes[0].nodeValue == '1'
assert mml_4.childNodes[2].nodeName == 'ci'
assert mml_4.childNodes[2].childNodes[0].nodeValue == 'x'
def test_symbol():
mml = mp._print(Symbol("x"))
assert mml.nodeName == 'ci'
assert mml.childNodes[0].nodeValue == 'x'
del mml
mml = mp._print(Symbol("x^2"))
assert mml.nodeName == 'ci'
assert mml.childNodes[0].nodeName == 'mml:msup'
assert mml.childNodes[0].childNodes[0].nodeName == 'mml:mi'
assert mml.childNodes[0].childNodes[0].childNodes[0].nodeValue == 'x'
assert mml.childNodes[0].childNodes[1].nodeName == 'mml:mi'
assert mml.childNodes[0].childNodes[1].childNodes[0].nodeValue == '2'
del mml
mml = mp._print(Symbol("x__2"))
assert mml.nodeName == 'ci'
assert mml.childNodes[0].nodeName == 'mml:msup'
assert mml.childNodes[0].childNodes[0].nodeName == 'mml:mi'
assert mml.childNodes[0].childNodes[0].childNodes[0].nodeValue == 'x'
assert mml.childNodes[0].childNodes[1].nodeName == 'mml:mi'
assert mml.childNodes[0].childNodes[1].childNodes[0].nodeValue == '2'
del mml
mml = mp._print(Symbol("x_2"))
assert mml.nodeName == 'ci'
assert mml.childNodes[0].nodeName == 'mml:msub'
assert mml.childNodes[0].childNodes[0].nodeName == 'mml:mi'
assert mml.childNodes[0].childNodes[0].childNodes[0].nodeValue == 'x'
assert mml.childNodes[0].childNodes[1].nodeName == 'mml:mi'
assert mml.childNodes[0].childNodes[1].childNodes[0].nodeValue == '2'
del mml
mml = mp._print(Symbol("x^3_2"))
assert mml.nodeName == 'ci'
assert mml.childNodes[0].nodeName == 'mml:msubsup'
assert mml.childNodes[0].childNodes[0].nodeName == 'mml:mi'
assert mml.childNodes[0].childNodes[0].childNodes[0].nodeValue == 'x'
assert mml.childNodes[0].childNodes[1].nodeName == 'mml:mi'
assert mml.childNodes[0].childNodes[1].childNodes[0].nodeValue == '2'
assert mml.childNodes[0].childNodes[2].nodeName == 'mml:mi'
assert mml.childNodes[0].childNodes[2].childNodes[0].nodeValue == '3'
del mml
mml = mp._print(Symbol("x__3_2"))
assert mml.nodeName == 'ci'
assert mml.childNodes[0].nodeName == 'mml:msubsup'
assert mml.childNodes[0].childNodes[0].nodeName == 'mml:mi'
assert mml.childNodes[0].childNodes[0].childNodes[0].nodeValue == 'x'
assert mml.childNodes[0].childNodes[1].nodeName == 'mml:mi'
assert mml.childNodes[0].childNodes[1].childNodes[0].nodeValue == '2'
assert mml.childNodes[0].childNodes[2].nodeName == 'mml:mi'
assert mml.childNodes[0].childNodes[2].childNodes[0].nodeValue == '3'
del mml
mml = mp._print(Symbol("x_2_a"))
assert mml.nodeName == 'ci'
assert mml.childNodes[0].nodeName == 'mml:msub'
assert mml.childNodes[0].childNodes[0].nodeName == 'mml:mi'
assert mml.childNodes[0].childNodes[0].childNodes[0].nodeValue == 'x'
assert mml.childNodes[0].childNodes[1].nodeName == 'mml:mrow'
assert mml.childNodes[0].childNodes[1].childNodes[0].nodeName == 'mml:mi'
assert mml.childNodes[0].childNodes[1].childNodes[0].childNodes[
0].nodeValue == '2'
assert mml.childNodes[0].childNodes[1].childNodes[1].nodeName == 'mml:mo'
assert mml.childNodes[0].childNodes[1].childNodes[1].childNodes[
0].nodeValue == ' '
assert mml.childNodes[0].childNodes[1].childNodes[2].nodeName == 'mml:mi'
assert mml.childNodes[0].childNodes[1].childNodes[2].childNodes[
0].nodeValue == 'a'
del mml
mml = mp._print(Symbol("x^2^a"))
assert mml.nodeName == 'ci'
assert mml.childNodes[0].nodeName == 'mml:msup'
assert mml.childNodes[0].childNodes[0].nodeName == 'mml:mi'
assert mml.childNodes[0].childNodes[0].childNodes[0].nodeValue == 'x'
assert mml.childNodes[0].childNodes[1].nodeName == 'mml:mrow'
assert mml.childNodes[0].childNodes[1].childNodes[0].nodeName == 'mml:mi'
assert mml.childNodes[0].childNodes[1].childNodes[0].childNodes[
0].nodeValue == '2'
assert mml.childNodes[0].childNodes[1].childNodes[1].nodeName == 'mml:mo'
assert mml.childNodes[0].childNodes[1].childNodes[1].childNodes[
0].nodeValue == ' '
assert mml.childNodes[0].childNodes[1].childNodes[2].nodeName == 'mml:mi'
assert mml.childNodes[0].childNodes[1].childNodes[2].childNodes[
0].nodeValue == 'a'
del mml
mml = mp._print(Symbol("x__2__a"))
assert mml.nodeName == 'ci'
assert mml.childNodes[0].nodeName == 'mml:msup'
assert mml.childNodes[0].childNodes[0].nodeName == 'mml:mi'
assert mml.childNodes[0].childNodes[0].childNodes[0].nodeValue == 'x'
assert mml.childNodes[0].childNodes[1].nodeName == 'mml:mrow'
assert mml.childNodes[0].childNodes[1].childNodes[0].nodeName == 'mml:mi'
assert mml.childNodes[0].childNodes[1].childNodes[0].childNodes[
0].nodeValue == '2'
assert mml.childNodes[0].childNodes[1].childNodes[1].nodeName == 'mml:mo'
assert mml.childNodes[0].childNodes[1].childNodes[1].childNodes[
0].nodeValue == ' '
assert mml.childNodes[0].childNodes[1].childNodes[2].nodeName == 'mml:mi'
assert mml.childNodes[0].childNodes[1].childNodes[2].childNodes[
0].nodeValue == 'a'
del mml
def test_mathml_greek():
mml = mp._print(Symbol('alpha'))
assert mml.nodeName == 'ci'
assert mml.childNodes[0].nodeValue == u('\N{GREEK SMALL LETTER ALPHA}')
assert mp.doprint(Symbol('alpha')) == '<ci>α</ci>'
assert mp.doprint(Symbol('beta')) == '<ci>β</ci>'
assert mp.doprint(Symbol('gamma')) == '<ci>γ</ci>'
assert mp.doprint(Symbol('delta')) == '<ci>δ</ci>'
assert mp.doprint(Symbol('epsilon')) == '<ci>ε</ci>'
assert mp.doprint(Symbol('zeta')) == '<ci>ζ</ci>'
assert mp.doprint(Symbol('eta')) == '<ci>η</ci>'
assert mp.doprint(Symbol('theta')) == '<ci>θ</ci>'
assert mp.doprint(Symbol('iota')) == '<ci>ι</ci>'
assert mp.doprint(Symbol('kappa')) == '<ci>κ</ci>'
assert mp.doprint(Symbol('lambda')) == '<ci>λ</ci>'
assert mp.doprint(Symbol('mu')) == '<ci>μ</ci>'
assert mp.doprint(Symbol('nu')) == '<ci>ν</ci>'
assert mp.doprint(Symbol('xi')) == '<ci>ξ</ci>'
assert mp.doprint(Symbol('omicron')) == '<ci>ο</ci>'
assert mp.doprint(Symbol('pi')) == '<ci>π</ci>'
assert mp.doprint(Symbol('rho')) == '<ci>ρ</ci>'
assert mp.doprint(Symbol('varsigma')) == '<ci>ς</ci>', mp.doprint(Symbol('varsigma'))
assert mp.doprint(Symbol('sigma')) == '<ci>σ</ci>'
assert mp.doprint(Symbol('tau')) == '<ci>τ</ci>'
assert mp.doprint(Symbol('upsilon')) == '<ci>υ</ci>'
assert mp.doprint(Symbol('phi')) == '<ci>φ</ci>'
assert mp.doprint(Symbol('chi')) == '<ci>χ</ci>'
assert mp.doprint(Symbol('psi')) == '<ci>ψ</ci>'
assert mp.doprint(Symbol('omega')) == '<ci>ω</ci>'
assert mp.doprint(Symbol('Alpha')) == '<ci>Α</ci>'
assert mp.doprint(Symbol('Beta')) == '<ci>Β</ci>'
assert mp.doprint(Symbol('Gamma')) == '<ci>Γ</ci>'
assert mp.doprint(Symbol('Delta')) == '<ci>Δ</ci>'
assert mp.doprint(Symbol('Epsilon')) == '<ci>Ε</ci>'
assert mp.doprint(Symbol('Zeta')) == '<ci>Ζ</ci>'
assert mp.doprint(Symbol('Eta')) == '<ci>Η</ci>'
assert mp.doprint(Symbol('Theta')) == '<ci>Θ</ci>'
assert mp.doprint(Symbol('Iota')) == '<ci>Ι</ci>'
assert mp.doprint(Symbol('Kappa')) == '<ci>Κ</ci>'
assert mp.doprint(Symbol('Lambda')) == '<ci>Λ</ci>'
assert mp.doprint(Symbol('Mu')) == '<ci>Μ</ci>'
assert mp.doprint(Symbol('Nu')) == '<ci>Ν</ci>'
assert mp.doprint(Symbol('Xi')) == '<ci>Ξ</ci>'
assert mp.doprint(Symbol('Omicron')) == '<ci>Ο</ci>'
assert mp.doprint(Symbol('Pi')) == '<ci>Π</ci>'
assert mp.doprint(Symbol('Rho')) == '<ci>Ρ</ci>'
assert mp.doprint(Symbol('Sigma')) == '<ci>Σ</ci>'
assert mp.doprint(Symbol('Tau')) == '<ci>Τ</ci>'
assert mp.doprint(Symbol('Upsilon')) == '<ci>Υ</ci>'
assert mp.doprint(Symbol('Phi')) == '<ci>Φ</ci>'
assert mp.doprint(Symbol('Chi')) == '<ci>Χ</ci>'
assert mp.doprint(Symbol('Psi')) == '<ci>Ψ</ci>'
assert mp.doprint(Symbol('Omega')) == '<ci>Ω</ci>'
def test_mathml_order():
expr = x**3 + x**2*y + 3*x*y**3 + y**4
mp = MathMLPrinter({'order': 'lex'})
mml = mp._print(expr)
assert mml.childNodes[1].childNodes[0].nodeName == 'power'
assert mml.childNodes[1].childNodes[1].childNodes[0].data == 'x'
assert mml.childNodes[1].childNodes[2].childNodes[0].data == '3'
assert mml.childNodes[4].childNodes[0].nodeName == 'power'
assert mml.childNodes[4].childNodes[1].childNodes[0].data == 'y'
assert mml.childNodes[4].childNodes[2].childNodes[0].data == '4'
mp = MathMLPrinter({'order': 'rev-lex'})
mml = mp._print(expr)
assert mml.childNodes[1].childNodes[0].nodeName == 'power'
assert mml.childNodes[1].childNodes[1].childNodes[0].data == 'y'
assert mml.childNodes[1].childNodes[2].childNodes[0].data == '4'
assert mml.childNodes[4].childNodes[0].nodeName == 'power'
assert mml.childNodes[4].childNodes[1].childNodes[0].data == 'x'
assert mml.childNodes[4].childNodes[2].childNodes[0].data == '3'
def test_settings():
raises(TypeError, lambda: mathml(Symbol("x"), method="garbage"))
def test_toprettyxml_hooking():
# test that the patch doesn't influence the behavior of the standard library
import xml.dom.minidom
doc = xml.dom.minidom.parseString(
"<apply><plus/><ci>x</ci><cn>1</cn></apply>")
prettyxml_old = doc.toprettyxml()
mp.apply_patch()
mp.restore_patch()
assert prettyxml_old == doc.toprettyxml()
| bsd-3-clause |
janol77/flask-app | app/manage.py | 2 | 1082 | from flask_script import Manager
from werkzeug import generate_password_hash
from app.db import db
from flask import Flask, g
from app.modules.user.models import User
import os
def create_app(config="config.ini"):
app = Flask(__name__, static_url_path='/static')
app.config.from_object(__name__)
if os.path.exists(config):
app.config.from_pyfile(config)
else:
print("The app does not have a config.ini file")
# Define the WSGI application object
db.init_app(app)
return app
app = create_app()
manager = Manager(app)
@manager.command
def init_db():
"""Inicializar la base de datos."""
user_admin = {
'password': 'admin',
'email': '[email protected]',
'name': 'Administrador',
'active': True,
'state': "confirmed",
'rol': 'admin',
'deleted': False
}
qt = User.objects.filter(email=user_admin['email']).delete()
u = User(**user_admin)
u.generate_password()
u.save()
print "Usuario Administrador creado."
if __name__ == "__main__":
manager.run()
| gpl-3.0 |
vietch2612/phantomjs | src/qt/qtwebkit/Tools/Scripts/webkitpy/common/config/committers_unittest.py | 121 | 20173 | # Copyright (C) 2009 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import unittest2 as unittest
from webkitpy.common.config.committers import CommitterList, Contributor, Committer, Reviewer
class CommittersTest(unittest.TestCase):
def test_committer_lookup(self):
committer = Committer('Test One', '[email protected]', 'one')
reviewer = Reviewer('Test Two', ['[email protected]', '[email protected]', '[email protected]'])
contributor = Contributor('Test Three', ['[email protected]'], 'three')
contributor_with_two_nicknames = Contributor('Other Four', ['[email protected]', '[email protected]'], ['four', 'otherfour'])
contributor_with_same_email_username = Contributor('Yet Another Four', ['[email protected]'], ['yetanotherfour'])
committer_list = CommitterList(committers=[committer], reviewers=[reviewer],
contributors=[contributor, contributor_with_two_nicknames, contributor_with_same_email_username])
# Test valid committer, reviewer and contributor lookup
self.assertEqual(committer_list.committer_by_email('[email protected]'), committer)
self.assertEqual(committer_list.reviewer_by_email('[email protected]'), reviewer)
self.assertEqual(committer_list.committer_by_email('[email protected]'), reviewer)
self.assertEqual(committer_list.committer_by_email('[email protected]'), reviewer)
self.assertEqual(committer_list.reviewer_by_email('[email protected]'), reviewer)
self.assertEqual(committer_list.contributor_by_email('[email protected]'), contributor)
# Test valid committer, reviewer and contributor lookup
self.assertEqual(committer_list.committer_by_name("Test One"), committer)
self.assertEqual(committer_list.committer_by_name("Test Two"), reviewer)
self.assertIsNone(committer_list.committer_by_name("Test Three"))
self.assertEqual(committer_list.contributor_by_name("Test Three"), contributor)
self.assertEqual(committer_list.contributor_by_name("test one"), committer)
self.assertEqual(committer_list.contributor_by_name("test two"), reviewer)
self.assertEqual(committer_list.contributor_by_name("test three"), contributor)
# Test that the first email is assumed to be the Bugzilla email address (for now)
self.assertEqual(committer_list.committer_by_email('[email protected]').bugzilla_email(), '[email protected]')
# Test that a known committer is not returned during reviewer lookup
self.assertIsNone(committer_list.reviewer_by_email('[email protected]'))
self.assertIsNone(committer_list.reviewer_by_email('[email protected]'))
# and likewise that a known contributor is not returned for committer lookup.
self.assertIsNone(committer_list.committer_by_email('[email protected]'))
# Test that unknown email address fail both committer and reviewer lookup
self.assertIsNone(committer_list.committer_by_email('[email protected]'))
self.assertIsNone(committer_list.reviewer_by_email('[email protected]'))
# Test that emails returns a list.
self.assertEqual(committer.emails, ['[email protected]'])
self.assertEqual(committer.irc_nicknames, ['one'])
self.assertEqual(committer_list.contributor_by_irc_nickname('one'), committer)
self.assertEqual(committer_list.contributor_by_irc_nickname('three'), contributor)
self.assertEqual(committer_list.contributor_by_irc_nickname('four'), contributor_with_two_nicknames)
self.assertEqual(committer_list.contributor_by_irc_nickname('otherfour'), contributor_with_two_nicknames)
# Test that the lists returned are are we expect them.
self.assertEqual(committer_list.contributors(), [contributor, contributor_with_two_nicknames, contributor_with_same_email_username, committer, reviewer])
self.assertEqual(committer_list.committers(), [committer, reviewer])
self.assertEqual(committer_list.reviewers(), [reviewer])
self.assertEqual(committer_list.contributors_by_search_string('test'), [contributor, committer, reviewer])
self.assertEqual(committer_list.contributors_by_search_string('rad'), [reviewer])
self.assertEqual(committer_list.contributors_by_search_string('Two'), [reviewer])
self.assertEqual(committer_list.contributors_by_search_string('otherfour'), [contributor_with_two_nicknames])
self.assertEqual(committer_list.contributors_by_search_string('*otherfour*'), [contributor_with_two_nicknames, contributor_with_same_email_username])
self.assertEqual(committer_list.contributors_by_email_username("one"), [committer])
self.assertEqual(committer_list.contributors_by_email_username("four"), [])
self.assertEqual(committer_list.contributors_by_email_username("otherfour"), [contributor_with_two_nicknames, contributor_with_same_email_username])
def _assert_fuzz_match(self, text, name_of_expected_contributor, expected_distance):
committers = CommitterList()
contributors, distance = committers.contributors_by_fuzzy_match(text)
if type(name_of_expected_contributor) is list:
expected_names = name_of_expected_contributor
else:
expected_names = [name_of_expected_contributor] if name_of_expected_contributor else []
self.assertEqual(([contributor.full_name for contributor in contributors], distance), (expected_names, expected_distance))
# Test that the string representation of a Contributor supports unicode
def test_contributor_encoding(self):
committer_encoding = Contributor(u'\u017dan M\u00fcller', '[email protected]', 'zmuller')
self.assertTrue(str(committer_encoding))
# Basic testing of the edit distance matching ...
def test_contributors_by_fuzzy_match(self):
self._assert_fuzz_match('Geoff Garen', 'Geoffrey Garen', 3)
self._assert_fuzz_match('Kenneth Christiansen', 'Kenneth Rohde Christiansen', 6)
self._assert_fuzz_match('Sam', 'Sam Weinig', 0)
self._assert_fuzz_match('me', None, 2)
# The remaining tests test that certain names are resolved in a specific way.
# We break this up into multiple tests so that each is faster and they can
# be run in parallel. Unfortunately each test scans the entire committers list,
# so these are inherently slow (see https://bugs.webkit.org/show_bug.cgi?id=79179).
#
# Commented out lines are test cases imported from the bug 26533 yet to pass.
def integration_test_contributors__none(self):
self._assert_fuzz_match('myself', None, 6)
self._assert_fuzz_match('others', None, 6)
self._assert_fuzz_match('BUILD FIX', None, 9)
def integration_test_contributors__none_2(self):
self._assert_fuzz_match('but Dan Bernstein also reviewed', None, 31)
self._assert_fuzz_match('asked thoughtful questions', None, 26)
self._assert_fuzz_match('build fix of mac', None, 16)
def integration_test_contributors__none_3(self):
self._assert_fuzz_match('a spell checker', None, 15)
self._assert_fuzz_match('nobody, build fix', None, 17)
self._assert_fuzz_match('NOBODY (chromium build fix)', None, 27)
def integration_test_contributors_ada_chan(self):
self._assert_fuzz_match('Ada', 'Ada Chan', 0)
def integration_test_contributors_adele_peterson(self):
self._assert_fuzz_match('adele', 'Adele Peterson', 0)
def integration_test_contributors_adele_peterson(self):
# self._assert_fuzz_match('Adam', 'Adam Roben', 0)
self._assert_fuzz_match('aroben', 'Adam Roben', 0)
def integration_test_contributors_alexey_proskuryakov(self):
# self._assert_fuzz_match('Alexey', 'Alexey Proskuryakov', 0)
self._assert_fuzz_match('ap', 'Alexey Proskuryakov', 0)
self._assert_fuzz_match('Alexey P', 'Alexey Proskuryakov', 0)
def integration_test_contributors_alice_liu(self):
# self._assert_fuzz_match('Alice', 'Alice Liu', 0)
self._assert_fuzz_match('aliu', 'Alice Liu', 0)
self._assert_fuzz_match('Liu', 'Alice Liu', 0)
def integration_test_contributors_alp_toker(self):
self._assert_fuzz_match('Alp', 'Alp Toker', 0)
def integration_test_contributors_anders_carlsson(self):
self._assert_fuzz_match('Anders', 'Anders Carlsson', 0)
self._assert_fuzz_match('andersca', 'Anders Carlsson', 0)
self._assert_fuzz_match('anders', 'Anders Carlsson', 0)
self._assert_fuzz_match('Andersca', 'Anders Carlsson', 0)
def integration_test_contributors_antti_koivisto(self):
self._assert_fuzz_match('Antti "printf" Koivisto', 'Antti Koivisto', 9)
self._assert_fuzz_match('Antti', 'Antti Koivisto', 0)
def integration_test_contributors_beth_dakin(self):
self._assert_fuzz_match('Beth', 'Beth Dakin', 0)
self._assert_fuzz_match('beth', 'Beth Dakin', 0)
self._assert_fuzz_match('bdakin', 'Beth Dakin', 0)
def integration_test_contributors_brady_eidson(self):
self._assert_fuzz_match('Brady', 'Brady Eidson', 0)
self._assert_fuzz_match('bradee-oh', 'Brady Eidson', 0)
self._assert_fuzz_match('Brady', 'Brady Eidson', 0)
def integration_test_contributors_cameron_zwarich(self):
pass # self._assert_fuzz_match('Cameron', 'Cameron Zwarich', 0)
# self._assert_fuzz_match('cpst', 'Cameron Zwarich', 1)
def integration_test_contributors_chris_blumenberg(self):
# self._assert_fuzz_match('Chris', 'Chris Blumenberg', 0)
self._assert_fuzz_match('cblu', 'Chris Blumenberg', 0)
def integration_test_contributors_dan_bernstein(self):
self._assert_fuzz_match('Dan', ['Dan Winship', 'Dan Bernstein'], 0)
self._assert_fuzz_match('Dan B', 'Dan Bernstein', 0)
# self._assert_fuzz_match('mitz', 'Dan Bernstein', 0)
self._assert_fuzz_match('Mitz Pettel', 'Dan Bernstein', 1)
self._assert_fuzz_match('Mitzpettel', 'Dan Bernstein', 0)
self._assert_fuzz_match('Mitz Pettel RTL', 'Dan Bernstein', 5)
def integration_test_contributors_dan_bernstein_2(self):
self._assert_fuzz_match('Teh Mitzpettel', 'Dan Bernstein', 4)
# self._assert_fuzz_match('The Mitz', 'Dan Bernstein', 0)
self._assert_fuzz_match('Dr Dan Bernstein', 'Dan Bernstein', 3)
def integration_test_contributors_darin_adler(self):
self._assert_fuzz_match('Darin Adler\'', 'Darin Adler', 1)
self._assert_fuzz_match('Darin', 'Darin Adler', 0) # Thankfully "Fisher" is longer than "Adler"
self._assert_fuzz_match('darin', 'Darin Adler', 0)
def integration_test_contributors_david_harrison(self):
self._assert_fuzz_match('Dave Harrison', 'David Harrison', 2)
self._assert_fuzz_match('harrison', 'David Harrison', 0)
self._assert_fuzz_match('Dr. Harrison', 'David Harrison', 4)
def integration_test_contributors_david_harrison_2(self):
self._assert_fuzz_match('Dave Harrson', 'David Harrison', 3)
self._assert_fuzz_match('Dave Harrsion', 'David Harrison', 4) # Damerau-Levenshtein distance is 3
def integration_test_contributors_david_hyatt(self):
self._assert_fuzz_match('Dave Hyatt', 'David Hyatt', 2)
self._assert_fuzz_match('Daddy Hyatt', 'David Hyatt', 3)
# self._assert_fuzz_match('Dave', 'David Hyatt', 0) # 'Dave' could mean harrison.
self._assert_fuzz_match('hyatt', 'David Hyatt', 0)
# self._assert_fuzz_match('Haytt', 'David Hyatt', 0) # Works if we had implemented Damerau-Levenshtein distance!
def integration_test_contributors_david_kilzer(self):
self._assert_fuzz_match('Dave Kilzer', 'David Kilzer', 2)
self._assert_fuzz_match('David D. Kilzer', 'David Kilzer', 3)
self._assert_fuzz_match('ddkilzer', 'David Kilzer', 0)
def integration_test_contributors_don_melton(self):
self._assert_fuzz_match('Don', 'Don Melton', 0)
self._assert_fuzz_match('Gramps', 'Don Melton', 0)
def integration_test_contributors_eric_seidel(self):
# self._assert_fuzz_match('eric', 'Eric Seidel', 0)
self._assert_fuzz_match('Eric S', 'Eric Seidel', 0)
# self._assert_fuzz_match('MacDome', 'Eric Seidel', 0)
self._assert_fuzz_match('eseidel', 'Eric Seidel', 0)
def integration_test_contributors_geoffrey_garen(self):
# self._assert_fuzz_match('Geof', 'Geoffrey Garen', 4)
# self._assert_fuzz_match('Geoff', 'Geoffrey Garen', 3)
self._assert_fuzz_match('Geoff Garen', 'Geoffrey Garen', 3)
self._assert_fuzz_match('ggaren', 'Geoffrey Garen', 0)
# self._assert_fuzz_match('geoff', 'Geoffrey Garen', 0)
self._assert_fuzz_match('Geoffrey', 'Geoffrey Garen', 0)
self._assert_fuzz_match('GGaren', 'Geoffrey Garen', 0)
def integration_test_contributors_greg_bolsinga(self):
pass # self._assert_fuzz_match('Greg', 'Greg Bolsinga', 0)
def integration_test_contributors_holger_freyther(self):
self._assert_fuzz_match('Holger', 'Holger Freyther', 0)
self._assert_fuzz_match('Holger Hans Peter Freyther', 'Holger Freyther', 11)
def integration_test_contributors_jon_sullivan(self):
# self._assert_fuzz_match('john', 'John Sullivan', 0)
self._assert_fuzz_match('sullivan', 'John Sullivan', 0)
def integration_test_contributors_jon_honeycutt(self):
self._assert_fuzz_match('John Honeycutt', 'Jon Honeycutt', 1)
# self._assert_fuzz_match('Jon', 'Jon Honeycutt', 0)
def integration_test_contributors_jon_honeycutt(self):
# self._assert_fuzz_match('justin', 'Justin Garcia', 0)
self._assert_fuzz_match('justing', 'Justin Garcia', 0)
def integration_test_contributors_joseph_pecoraro(self):
self._assert_fuzz_match('Joe Pecoraro', 'Joseph Pecoraro', 3)
def integration_test_contributors_ken_kocienda(self):
self._assert_fuzz_match('ken', 'Ken Kocienda', 0)
self._assert_fuzz_match('kocienda', 'Ken Kocienda', 0)
def integration_test_contributors_kenneth_russell(self):
self._assert_fuzz_match('Ken Russell', 'Kenneth Russell', 4)
def integration_test_contributors_kevin_decker(self):
self._assert_fuzz_match('kdecker', 'Kevin Decker', 0)
def integration_test_contributors_kevin_mccullough(self):
self._assert_fuzz_match('Kevin M', 'Kevin McCullough', 0)
self._assert_fuzz_match('Kevin McCulough', 'Kevin McCullough', 1)
self._assert_fuzz_match('mccullough', 'Kevin McCullough', 0)
def integration_test_contributors_lars_knoll(self):
self._assert_fuzz_match('lars', 'Lars Knoll', 0)
def integration_test_contributors_lars_weintraub(self):
self._assert_fuzz_match('levi', 'Levi Weintraub', 0)
def integration_test_contributors_maciej_stachowiak(self):
self._assert_fuzz_match('Maciej', 'Maciej Stachowiak', 0)
# self._assert_fuzz_match('mjs', 'Maciej Stachowiak', 0)
self._assert_fuzz_match('Maciej S', 'Maciej Stachowiak', 0)
def integration_test_contributors_mark_rowe(self):
# self._assert_fuzz_match('Mark', 'Mark Rowe', 0)
self._assert_fuzz_match('bdash', 'Mark Rowe', 0)
self._assert_fuzz_match('mrowe', 'Mark Rowe', 0)
# self._assert_fuzz_match('Brian Dash', 'Mark Rowe', 0)
def integration_test_contributors_nikolas_zimmermann(self):
# self._assert_fuzz_match('Niko', 'Nikolas Zimmermann', 1)
self._assert_fuzz_match('Niko Zimmermann', 'Nikolas Zimmermann', 3)
self._assert_fuzz_match('Nikolas', 'Nikolas Zimmermann', 0)
def integration_test_contributors_oliver_hunt(self):
# self._assert_fuzz_match('Oliver', 'Oliver Hunt', 0)
self._assert_fuzz_match('Ollie', 'Oliver Hunt', 1)
self._assert_fuzz_match('Olliej', 'Oliver Hunt', 0)
self._assert_fuzz_match('Olliej Hunt', 'Oliver Hunt', 3)
self._assert_fuzz_match('olliej', 'Oliver Hunt', 0)
self._assert_fuzz_match('ollie', 'Oliver Hunt', 1)
self._assert_fuzz_match('ollliej', 'Oliver Hunt', 1)
def integration_test_contributors_oliver_hunt(self):
self._assert_fuzz_match('Richard', 'Richard Williamson', 0)
self._assert_fuzz_match('rjw', 'Richard Williamson', 0)
def integration_test_contributors_oliver_hunt(self):
self._assert_fuzz_match('Rob', 'Rob Buis', 0)
self._assert_fuzz_match('rwlbuis', 'Rob Buis', 0)
def integration_test_contributors_rniwa(self):
self._assert_fuzz_match('[email protected]', 'Ryosuke Niwa', 0)
def disabled_integration_test_contributors_simon_fraser(self):
pass # self._assert_fuzz_match('Simon', 'Simon Fraser', 0)
def integration_test_contributors_steve_falkenburg(self):
self._assert_fuzz_match('Sfalken', 'Steve Falkenburg', 0)
# self._assert_fuzz_match('Steve', 'Steve Falkenburg', 0)
def integration_test_contributors_sam_weinig(self):
self._assert_fuzz_match('Sam', 'Sam Weinig', 0)
# self._assert_fuzz_match('Weinig Sam', 'weinig', 0)
self._assert_fuzz_match('Weinig', 'Sam Weinig', 0)
self._assert_fuzz_match('Sam W', 'Sam Weinig', 0)
self._assert_fuzz_match('Sammy Weinig', 'Sam Weinig', 2)
def integration_test_contributors_tim_omernick(self):
# self._assert_fuzz_match('timo', 'Tim Omernick', 0)
self._assert_fuzz_match('TimO', 'Tim Omernick', 0)
# self._assert_fuzz_match('Timo O', 'Tim Omernick', 0)
# self._assert_fuzz_match('Tim O.', 'Tim Omernick', 0)
self._assert_fuzz_match('Tim O', 'Tim Omernick', 0)
def integration_test_contributors_timothy_hatcher(self):
# self._assert_fuzz_match('Tim', 'Timothy Hatcher', 0)
# self._assert_fuzz_match('Tim H', 'Timothy Hatcher', 0)
self._assert_fuzz_match('Tim Hatcher', 'Timothy Hatcher', 4)
self._assert_fuzz_match('Tim Hatcheri', 'Timothy Hatcher', 5)
self._assert_fuzz_match('timothy', 'Timothy Hatcher', 0)
self._assert_fuzz_match('thatcher', 'Timothy Hatcher', 1)
self._assert_fuzz_match('xenon', 'Timothy Hatcher', 0)
self._assert_fuzz_match('Hatcher', 'Timothy Hatcher', 0)
# self._assert_fuzz_match('TimH', 'Timothy Hatcher', 0)
def integration_test_contributors_tor_arne_vestbo(self):
self._assert_fuzz_match('Tor Arne', u"Tor Arne Vestb\u00f8", 1) # Matches IRC nickname
def integration_test_contributors_vicki_murley(self):
self._assert_fuzz_match('Vicki', u"Vicki Murley", 0)
def integration_test_contributors_zack_rusin(self):
self._assert_fuzz_match('Zack', 'Zack Rusin', 0)
| bsd-3-clause |
JulyKikuAkita/PythonPrac | cs15211/MaximumSizeSubarraySumEqualsk.py | 1 | 4810 | __source__ = 'https://leetcode.com/problems/maximum-size-subarray-sum-equals-k/'
# https://github.com/kamyu104/LeetCode/blob/master/Python/maximum-size-subarray-sum-equals-k.py
# Time: O(n)
# Space: O(n)
#
# Description: Leetcode # 325. Maximum Size Subarray Sum Equals k
#
# Given an array nums and a target value k, find the maximum length of a subarray that sums to k.
# If there isn't one, return 0 instead.
#
# Example 1:
# Given nums = [1, -1, 5, -2, 3], k = 3,
# return 4. (because the subarray [1, -1, 5, -2] sums to 3 and is the longest)
#
# Example 2:
# Given nums = [-2, -1, 2, 1], k = 1,
# return 2. (because the subarray [-1, 2] sums to 1 and is the longest)
#
# Follow Up:
# Can you do it in O(n) time?
#
# Companies
# Palantir Facebook
# Related Topics
# Hash Table
# Similar Questions
# Minimum Size Subarray Sum Range Sum Query - Immutable Contiguous Array
#
import unittest
# O(n)
# 40ms 72.57%
class Solution(object):
def maxSubArrayLen(self, nums, k):
result, acc = 0, 0
dic = { 0: -1} # so that when i - dict[acc-k] have correct len of subarr
for i in xrange(len(nums)):
acc += nums[i]
if acc not in dict: # need to have this for (key, val) not being update
dict[acc] = i
if acc - k in dict:
result = max(result, i - dict[acc-k])
return result
class Solution2(object):
def maxSubArrayLen(self, nums, k):
"""
:type nums: List[int]
:type k: int
:rtype: int
"""
sums = {}
cur_sum, max_len = 0, 0
for i in xrange(len(nums)):
cur_sum += nums[i]
if cur_sum == k:
max_len = i + 1
elif cur_sum - k in sums:
max_len = max(max_len, i - sums[cur_sum - k])
if cur_sum not in sums:
sums[cur_sum] = i # Only keep the smallest index.
return max_len
class TestMethods(unittest.TestCase):
def test_Local(self):
self.assertEqual(1, 1)
if __name__ == '__main__':
unittest.main()
Java = '''
# Thought:
#
# 13ms 95.45%
class Solution {
public int maxSubArrayLen(int[] nums, int k) {
if(nums == null || nums.length == 0) return 0;
int len = nums.length;
int[] sums = new int[len + 1];
for (int i = 0; i < len; i++) {
sums[i + 1] = sums[i] + nums[i];
}
Map<Integer, Integer> map = new HashMap<>();
for (int i = 1; i < len + 1; i++) {
map.put(sums[i], i);
}
int result = 0;
for (int i = 0; i < len + 1; i++) {
int curr = sums[i] + k;
if (map.containsKey(curr)) {
result = Math.max(result, map.get(curr) - i);
}
}
return result;
}
}
# 23ms 26.54%
class Solution {
public int maxSubArrayLen(int[] nums, int k) {
if(nums == null || nums.length == 0) return 0;
int len = nums.length;
int[] sums = new int[len + 1];
sums[0] = nums[0];
for (int i = 1; i < len; i++) {
sums[i] = sums[i - 1] + nums[i];
}
Map<Integer, Integer> map = new HashMap<>();
map.put(0, -1);
int result = 0;
for (int i = 0; i < len; i++) {
int curr = sums[i] - k;
if (map.containsKey(curr)) {
result = Math.max(result, i - map.get(curr));
}
if (!map.containsKey(sums[i])) map.put(sums[i], i);
}
return result;
}
}
# 29ms 13.32%
class Solution {
public int maxSubArrayLen(int[] nums, int k) {
if(nums.length == 0) return 0;
int len = nums.length;
int sums = 0;
int res = 0;
Map<Integer, Integer> map = new HashMap<>();
map.put(0, -1);
for(int i = 0; i < len; i++){
sums += nums[i];
if(!map.containsKey(sums)){
map.put(sums, i);
}
if(map.containsKey(sums - k)){
res = Math.max(res, i - map.get(sums - k));
}
}
return res;
}
}
# 11ms 99.90%
class Solution {
public int maxSubArrayLen(int[] nums, int k) {
int[] sums = new int[nums.length + 1];
for (int i = 0; i < nums.length; i++) {
sums[i + 1] = sums[i] + nums[i];
}
Map<Integer, Integer> map = new HashMap<>();
for (int i = 0; i < nums.length + 1; i++) {
map.put(sums[i], i);
}
int result = 0;
for (int i = 0; i < nums.length; i++) {
int target = sums[i] + k;
Integer val = map.get(target);
if (val != null && val > i) {
result = Math.max(result, val - i);
}
}
return result;
}
}
'''
| apache-2.0 |
dbestm/mbed | workspace_tools/tests.py | 1 | 47117 | """
mbed SDK
Copyright (c) 2011-2013 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from workspace_tools.paths import *
from workspace_tools.data.support import *
TEST_CMSIS_LIB = join(TEST_DIR, "cmsis", "lib")
TEST_MBED_LIB = join(TEST_DIR, "mbed", "env")
PERIPHERALS = join(TEST_DIR, "peripherals")
BENCHMARKS_DIR = join(TEST_DIR, "benchmarks")
SD = join(TEST_DIR, "sd")
TMP102 = join(PERIPHERALS, 'TMP102')
AT30TSE75X = join(PERIPHERALS, 'AT30TSE75X')
"""
Wiring:
* Ground:
* LPC1*: p1
* KL25Z: GND
* Vout
* LPC1*: p40
* KL25Z: P3V3
* TMP102 (I2C):
* LPC1*: (SDA=p28 , SCL=p27)
* KL25Z: (SDA=PTC9, SCL=PTC8)
* MAXWSNENV: (SDA=TP6, SCL=TP5)
* digital_loop (Digital(In|Out|InOut), InterruptIn):
* Arduino headers: (D0 <-> D7)
* LPC1549: (D2 <-> D7)
* LPC1*: (p5 <-> p25 )
* KL25Z: (PTA5<-> PTC6)
* NUCLEO_F103RB: (PC_6 <-> PB_8)
* MAXWSNENV: (TP3 <-> TP4)
* MAX32600MBED: (P1_0 <-> P4_7)
* VK_RZ_A1H: (P3_2 <-> P5_6)
* port_loop (Port(In|Out|InOut)):
* Arduino headers: (D0 <-> D7), (D1 <-> D6)
* LPC1*: (p5 <-> p25), (p6 <-> p26)
* KL25Z: (PTA5 <-> PTC6), (PTA4 <-> PTC5)
* NUCLEO_F103RB: (PC_6 <-> PB_8), (PC_5 <-> PB_9)
* MAXWSNENV: (TP1 <-> TP3), (TP2 <-> TP4)
* MAX32600MBED: (P1_0 <-> P4_7), (P1_1 <-> P4_6)
* VK_RZ_A1H: (P3_2 <-> P5_6), (P3_7 <-> P5_1)
* analog_loop (AnalogIn, AnalogOut):
* Arduino headers: (A0 <-> A5)
* LPC1549: (A0 <-> D12)
* LPC1*: (p17 <-> p18 )
* KL25Z: (PTE30 <-> PTC2)
* analog_pot (AnalogIn):
* Arduino headers: (A0, A1)
* VK_RZ_A1H: (AN0, AN1)
* SD (SPI):
* LPC1*: (mosi=p11 , miso=p12 , sclk=p13 , cs=p14 )
* KL25Z: (mosi=PTD2, miso=PTD3, sclk=PTD1, cs=PTD0)
* MMA7660 (I2C):
* LPC1*: (SDA=p28 , SCL=p27)
* i2c_loop:
* LPC1768: (p28 <-> p9), (p27 <-> p10)
* i2c_eeprom:
* LPC1*: (SDA=p28 , SCL=p27)
* KL25Z: (SDA=PTE0, SCL=PTE1)
* VK_RZ_A1H:(SDA=P1_1, SCL=P1_0)
* can_transceiver:
* LPC1768: (RX=p9, TX=p10)
* LPC1549: (RX=D9, TX=D8)
* LPC4088: (RX=p9, TX=p10)
* VK_RZ_A1H:(RX=P5_9, TX=P5_10)
* NUCLEO_F091RC: (RX=PA_11, TX=PA_12)
* NUCLEO_F072RB: (RX=PA_11, TX=PA_12)
* NUCLEO_F042K6: (RX=PA_11, TX=PA_12)
"""
TESTS = [
# Automated MBED tests
{
"id": "MBED_A1", "description": "Basic",
"source_dir": join(TEST_DIR, "mbed", "basic"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"automated": True,
},
{
"id": "MBED_A2", "description": "Semihost file system",
"source_dir": join(TEST_DIR, "mbed", "file"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"automated": True,
"mcu": ["LPC1768", "LPC2368", "LPC11U24"]
},
{
"id": "MBED_A3", "description": "C++ STL",
"source_dir": join(TEST_DIR, "mbed", "stl"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"automated": False,
},
{
"id": "MBED_A4", "description": "I2C TMP102",
"source_dir": join(TEST_DIR, "mbed", "i2c_TMP102"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, TMP102],
"automated": True,
"peripherals": ["TMP102"]
},
{
"id": "MBED_AT30TSE75X", "description": "I2C Temperature Sensor / EEPROM",
"source_dir": join(TEST_DIR, "mbed", "i2c_at30tse75x"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, AT30TSE75X],
"automated": False,
"peripherals": ["AT30TSE75X"]
},
{
"id": "MBED_A5", "description": "DigitalIn DigitalOut",
"source_dir": join(TEST_DIR, "mbed", "digitalin_digitalout"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"automated": True,
"peripherals": ["digital_loop"]
},
{
"id": "MBED_A6", "description": "DigitalInOut",
"source_dir": join(TEST_DIR, "mbed", "digitalinout"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"automated": True,
"peripherals": ["digital_loop"]
},
{
"id": "MBED_A7", "description": "InterruptIn",
"source_dir": join(TEST_DIR, "mbed", "interruptin"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"duration": 15,
"automated": True,
"peripherals": ["digital_loop"]
},
{
"id": "MBED_A8", "description": "Analog",
"source_dir": join(TEST_DIR, "mbed", "analog"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"automated": True,
"peripherals": ["analog_loop"],
"mcu": ["LPC1768", "LPC2368", "LPC2460", "KL25Z", "K64F", "K22F", "LPC4088", "LPC1549",
"NUCLEO_F072RB", "NUCLEO_F091RC", "NUCLEO_F302R8", "NUCLEO_F303K8", "NUCLEO_F303RE",
"NUCLEO_F334R8", "NUCLEO_L053R8", "NUCLEO_L073RZ", "NUCLEO_L152RE",
"NUCLEO_F410RB", "NUCLEO_F411RE", "NUCLEO_F446RE", "DISCO_F407VG", "DISCO_F746NG", "NUCLEO_F746ZG",
"ARCH_MAX", "MAX32600MBED", "MOTE_L152RC", "B96B_F446VE"]
},
{
"id": "MBED_A9", "description": "Serial Echo at 115200",
"source_dir": join(TEST_DIR, "mbed", "echo"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"automated": True,
#"host_test": "echo"
},
{
"id": "MBED_A10", "description": "PortOut PortIn",
"source_dir": join(TEST_DIR, "mbed", "portout_portin"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"peripherals": ["port_loop"],
"supported": DEFAULT_SUPPORT,
"automated": True,
},
{
"id": "MBED_A11", "description": "PortInOut",
"source_dir": join(TEST_DIR, "mbed", "portinout"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"peripherals": ["port_loop"],
"supported": DEFAULT_SUPPORT,
"automated": True,
},
{
"id": "MBED_A12", "description": "SD File System",
"source_dir": join(TEST_DIR, "mbed", "sd"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, FS_LIBRARY],
"automated": True,
"duration": 15,
"peripherals": ["SD"]
},
{
"id": "MBED_A13", "description": "I2C MMA7660 accelerometer",
"source_dir": join(TEST_DIR, "mbed", "i2c_MMA7660"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, join(PERIPHERALS, 'MMA7660')],
"automated": True,
"peripherals": ["MMA7660"]
},
{
"id": "MBED_A14", "description": "I2C Master",
"source_dir": join(TEST_DIR, "mbed", "i2c_master"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB,],
},
{
"id": "MBED_A15", "description": "I2C Slave",
"source_dir": join(TEST_DIR, "mbed", "i2c_slave"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB,],
},
{
"id": "MBED_A16", "description": "SPI Master",
"source_dir": join(TEST_DIR, "mbed", "spi_master"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB,],
},
{
"id": "MBED_A17", "description": "SPI Slave",
"source_dir": join(TEST_DIR, "mbed", "spi_slave"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB,],
},
{
"id": "MBED_A18", "description": "Interrupt vector relocation",
"source_dir": join(TEST_DIR, "mbed", "vtor_reloc"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB,],
"mcu": ["LPC1768"],
"automated": True,
},
{
"id": "MBED_A19", "description": "I2C EEPROM read/write test",
"source_dir": join(TEST_DIR, "mbed", "i2c_eeprom"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"peripherals": ["24LC256"],
"automated": True,
"duration": 15,
},
{
"id": "MBED_A20", "description": "I2C master/slave test",
"source_dir": join(TEST_DIR, "mbed", "i2c_master_slave"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB,],
"mcu": ["LPC1768", "RZ_A1H"],
"peripherals": ["i2c_loop"]
},
{
"id": "MBED_A21", "description": "Call function before main (mbed_main)",
"source_dir": join(TEST_DIR, "mbed", "call_before_main"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"automated": True,
},
{
"id": "MBED_A22", "description": "SPIFI for LPC4088 (test 1)",
"source_dir": join(TEST_DIR, "mbed", "spifi1"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"automated": True,
"duration": 30,
"mcu": ["LPC4088","LPC4088_DM"]
},
{
"id": "MBED_A23", "description": "SPIFI for LPC4088 (test 2)",
"source_dir": join(TEST_DIR, "mbed", "spifi2"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"automated": True,
"duration": 30,
"mcu": ["LPC4088","LPC4088_DM"]
},
{
"id": "MBED_A24", "description": "Serial echo with RTS/CTS flow control",
"source_dir": join(TEST_DIR, "mbed", "echo_flow_control"),
"dependencies": [MBED_LIBRARIES],
"automated": "True",
"host_test": "echo_flow_control",
"mcu": ["LPC1768"],
"peripherals": ["extra_serial"]
},
{
"id": "MBED_A25", "description": "I2C EEPROM line read/write test",
"source_dir": join(TEST_DIR, "mbed", "i2c_eeprom_line"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"peripherals": ["24LC256"],
"automated": True,
"duration": 10,
},
{
"id": "MBED_A26", "description": "AnalogIn potentiometer test",
"source_dir": join(TEST_DIR, "mbed", "analog_pot"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"peripherals": ["analog_pot"],
"automated": True,
"duration": 10,
},
{
"id": "MBED_A27", "description": "CAN loopback test",
"source_dir": join(TEST_DIR, "mbed", "can_loopback"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"automated": True,
"duration": 20,
"peripherals": ["can_transceiver"],
"mcu": ["LPC1549", "LPC1768","B96B_F446VE", "VK_RZ_A1H",
"NUCLEO_F091RC", "NUCLEO_F072RB", "NUCLEO_F042K6"],
},
{
"id": "MBED_BLINKY", "description": "Blinky",
"source_dir": join(TEST_DIR, "mbed", "blinky"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"automated": False,
},
{
"id": "MBED_BUS", "description": "Blinky BUS",
"source_dir": join(TEST_DIR, "mbed", "bus"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"automated": False,
"duration": 15,
},
{
"id": "MBED_BUSOUT", "description": "BusOut",
"source_dir": join(TEST_DIR, "mbed", "bus_out"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"automated": True,
"duration": 15,
},
# Size benchmarks
{
"id": "BENCHMARK_1", "description": "Size (c environment)",
"source_dir": join(BENCHMARKS_DIR, "cenv"),
"dependencies": [MBED_LIBRARIES]
},
{
"id": "BENCHMARK_2", "description": "Size (float math)",
"source_dir": join(BENCHMARKS_DIR, "float_math"),
"dependencies": [MBED_LIBRARIES]
},
{
"id": "BENCHMARK_3", "description": "Size (printf)",
"source_dir": join(BENCHMARKS_DIR, "printf"),
"dependencies": [MBED_LIBRARIES]
},
{
"id": "BENCHMARK_4", "description": "Size (mbed libs)",
"source_dir": join(BENCHMARKS_DIR, "mbed"),
"dependencies": [MBED_LIBRARIES]
},
{
"id": "BENCHMARK_5", "description": "Size (all)",
"source_dir": join(BENCHMARKS_DIR, "all"),
"dependencies": [MBED_LIBRARIES]
},
# performance related tests
{
"id": "PERF_1", "description": "SD Stdio R/W Speed",
"source_dir": join(TEST_DIR, "mbed", "sd_perf_stdio"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, FS_LIBRARY],
"automated": True,
"duration": 15,
"peripherals": ["SD"]
},
{
"id": "PERF_2", "description": "SD FileHandle R/W Speed",
"source_dir": join(TEST_DIR, "mbed", "sd_perf_fhandle"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, FS_LIBRARY],
"automated": True,
"duration": 15,
"peripherals": ["SD"]
},
{
"id": "PERF_3", "description": "SD FatFS R/W Speed",
"source_dir": join(TEST_DIR, "mbed", "sd_perf_fatfs"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, FS_LIBRARY],
"automated": True,
"duration": 15,
"peripherals": ["SD"]
},
# Not automated MBED tests
{
"id": "MBED_1", "description": "I2C SRF08",
"source_dir": join(TEST_DIR, "mbed", "i2c_SRF08"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, join(PERIPHERALS, 'SRF08')],
"peripherals": ["SRF08"]
},
{
"id": "MBED_2", "description": "stdio",
"source_dir": join(TEST_DIR, "mbed", "stdio"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"duration": 20,
"automated": True,
#"host_test": "stdio_auto"
},
{
"id": "MBED_3", "description": "PortOut",
"source_dir": join(TEST_DIR, "mbed", "portout"),
"dependencies": [MBED_LIBRARIES],
},
{
"id": "MBED_4", "description": "Sleep",
"source_dir": join(TEST_DIR, "mbed", "sleep"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"duration": 30,
"mcu": ["LPC1768", "LPC11U24", "LPC4088","LPC4088_DM","NRF51822", "LPC11U68"]
},
{
"id": "MBED_5", "description": "PWM",
"source_dir": join(TEST_DIR, "mbed", "pwm"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB]
},
{
"id": "MBED_6", "description": "SW Reset",
"source_dir": join(TEST_DIR, "mbed", "reset"),
"dependencies": [MBED_LIBRARIES],
"duration": 15
},
{
"id": "MBED_7", "description": "stdio benchmark",
"source_dir": join(TEST_DIR, "mbed", "stdio_benchmark"),
"dependencies": [MBED_LIBRARIES],
"duration": 40
},
{
"id": "MBED_8", "description": "SPI",
"source_dir": join(TEST_DIR, "mbed", "spi"),
"dependencies": [MBED_LIBRARIES],
},
{
"id": "MBED_9", "description": "Sleep Timeout",
"source_dir": join(TEST_DIR, "mbed", "sleep_timeout"),
"dependencies": [MBED_LIBRARIES],
},
{
"id": "MBED_10", "description": "Hello World",
"source_dir": join(TEST_DIR, "mbed", "hello"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"automated": True,
#"host_test": "hello_auto",
},
{
"id": "MBED_11", "description": "Ticker Int",
"source_dir": join(TEST_DIR, "mbed", "ticker"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"automated": True,
#"host_test": "wait_us_auto",
"duration": 20,
},
{
"id": "MBED_12", "description": "C++",
"source_dir": join(TEST_DIR, "mbed", "cpp"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"automated": True
},
{
"id": "MBED_13", "description": "Heap & Stack",
"source_dir": join(TEST_DIR, "mbed", "heap_and_stack"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
},
{
"id": "MBED_14", "description": "Serial Interrupt",
"source_dir": join(TEST_DIR, "mbed", "serial_interrupt"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
},
{
"id": "MBED_15", "description": "RPC",
"source_dir": join(TEST_DIR, "mbed", "rpc"),
"dependencies": [MBED_LIBRARIES, join(LIB_DIR, "rpc"), TEST_MBED_LIB],
"automated": False,
"mcu": ["LPC1768"]
},
{
"id": "MBED_16", "description": "RTC",
"source_dir": join(TEST_DIR, "mbed", "rtc"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"automated": True,
"exclude_mcu": ["NRF51822", "NRF51822_BOOT", "NRF51822_OTA", "NRF51822_Y5_MBUG",
"NRF51_DK", "NRF51_DK_BOOT", "NRF51_DK_OTA",
"NRF51_MICROBIT", "NRF51_MICROBIT_B", "NRF51_MICROBIT_BOOT",
"NRF51_MICROBIT_B_BOOT", "NRF51_MICROBIT_B_OTA", "NRF51_MICROBIT_OTA",
"HRM1017", "HRM1017_BOOT", "HRM1701_OTA",
"TY51822R3", "TY51822R3_BOOT", "TY51822R3_OTA",
"NRF15_DONGLE", "NRF15_DONGLE_BOOT", "NRF15_DONGLE_OTA",
"ARCH_BLE", "ARCH_BLE_BOOT", "ARCH_BLE_OTA",
"ARCH_LINK", "ARCH_LINK_BOOT", "ARCH_LINK_OTA",
"RBLAB_BLENANO", "RBLAB_BLENANO_BOOT", "RBLAB_BLENANO_OTA",
"RBLAB_NRF51822", "RBLAB_NRF51822_BOOT", "RBLAB_NRF51822_OTA",
"SEEED_TINY_BLE", "SEEED_TINY_BLE_BOOT", "SEEED_TINY_BLE_OTA",
"WALLBOT_BLE", "WALLBOT_BLE_BOOT", "WALLBOT_BLE_OTA",
"DELTA_DFCM_NNN40", "DELTA_DFCM_NNN40_BOOT", "DELTA_DFCM_NNN40_OTA",
"LPC1114"],
#"host_test": "rtc_auto",
"duration": 15
},
{
"id": "MBED_17", "description": "Serial Interrupt 2",
"source_dir": join(TEST_DIR, "mbed", "serial_interrupt_2"),
"dependencies": [MBED_LIBRARIES],
},
{
"id": "MBED_18", "description": "Local FS Directory",
"source_dir": join(TEST_DIR, "mbed", "dir"),
"dependencies": [MBED_LIBRARIES],
},
{
"id": "MBED_19", "description": "SD FS Directory",
"source_dir": join(TEST_DIR, "mbed", "dir_sd"),
"dependencies": [MBED_LIBRARIES, FS_LIBRARY],
"peripherals": ["SD"]
},
{
"id": "MBED_20", "description": "InterruptIn 2",
"source_dir": join(TEST_DIR, "mbed", "interruptin_2"),
"dependencies": [MBED_LIBRARIES],
},
{
"id": "MBED_21", "description": "freopen Stream",
"source_dir": join(TEST_DIR, "mbed", "freopen"),
"dependencies": [MBED_LIBRARIES],
},
{
"id": "MBED_22", "description": "Semihost",
"source_dir": join(TEST_DIR, "mbed", "semihost"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"automated": True,
"mcu": ["LPC1768", "LPC2368", "LPC11U24"]
},
{
"id": "MBED_23", "description": "Ticker Int us",
"source_dir": join(TEST_DIR, "mbed", "ticker_2"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"duration": 15,
"automated": True,
#"host_test": "wait_us_auto"
},
{
"id": "MBED_24", "description": "Timeout Int us",
"source_dir": join(TEST_DIR, "mbed", "timeout"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"duration": 15,
"automated": True,
#"host_test": "wait_us_auto"
},
{
"id": "MBED_25", "description": "Time us",
"source_dir": join(TEST_DIR, "mbed", "time_us"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"duration": 15,
"automated": True,
#"host_test": "wait_us_auto"
},
{
"id": "MBED_26", "description": "Integer constant division",
"source_dir": join(TEST_DIR, "mbed", "div"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"automated": True,
},
{
"id": "MBED_27", "description": "SPI ADXL345",
"source_dir": join(TEST_DIR, "mbed", "spi_ADXL345"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, join(PERIPHERALS, 'ADXL345')],
"peripherals": ["ADXL345"]
},
{
"id": "MBED_28", "description": "Interrupt chaining (InterruptManager)",
"source_dir": join(TEST_DIR, "mbed", "interrupt_chaining"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
},
{
"id": "MBED_29", "description": "CAN network test",
"source_dir": join(TEST_DIR, "mbed", "can"),
"dependencies": [MBED_LIBRARIES],
"mcu": ["LPC1768", "LPC4088", "LPC1549", "RZ_A1H", "B96B_F446VE", "NUCLEO_F091RC",
"NUCLEO_F072RB", "NUCLEO_F042K6"]
},
{
"id": "MBED_30", "description": "CAN network test using interrupts",
"source_dir": join(TEST_DIR, "mbed", "can_interrupt"),
"dependencies": [MBED_LIBRARIES],
"mcu": ["LPC1768", "LPC4088", "LPC1549", "RZ_A1H", "B96B_F446VE", "NUCLEO_F091RC",
"NUCLEO_F072RB", "NUCLEO_F042K6"]
},
{
"id": "MBED_31", "description": "PWM LED test",
"source_dir": join(TEST_DIR, "mbed", "pwm_led"),
"dependencies": [MBED_LIBRARIES],
},
{
"id": "MBED_32", "description": "Pin toggling",
"source_dir": join(TEST_DIR, "mbed", "pin_toggling"),
"dependencies": [MBED_LIBRARIES],
},
{
"id": "MBED_33", "description": "C string operations",
"source_dir": join(TEST_DIR, "mbed", "cstring"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"duration": 10,
"automated": False,
},
{
"id": "MBED_34", "description": "Ticker Two callbacks",
"source_dir": join(TEST_DIR, "mbed", "ticker_3"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"duration": 15,
"automated": True,
#"host_test": "wait_us_auto"
},
{
"id": "MBED_35", "description": "SPI C12832 display",
"source_dir": join(TEST_DIR, "mbed", "spi_C12832"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, join(PERIPHERALS, 'C12832')],
"peripherals": ["C12832"],
"automated": True,
"duration": 10,
},
{
"id": "MBED_36", "description": "WFI correct behavior",
"source_dir": join(TEST_DIR, "mbed", "wfi"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"automated": False
},
{
"id": "MBED_37", "description": "Serial NC RX",
"source_dir": join(TEST_DIR, "mbed", "serial_nc_rx"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"automated": True
},
{
"id": "MBED_38", "description": "Serial NC TX",
"source_dir": join(TEST_DIR, "mbed", "serial_nc_tx"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"automated": True
},
# CMSIS RTOS tests
{
"id": "CMSIS_RTOS_1", "description": "Basic",
"source_dir": join(TEST_DIR, "rtos", "cmsis", "basic"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES],
},
{
"id": "CMSIS_RTOS_2", "description": "Mutex",
"source_dir": join(TEST_DIR, "rtos", "cmsis", "mutex"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES],
"duration": 20
},
{
"id": "CMSIS_RTOS_3", "description": "Semaphore",
"source_dir": join(TEST_DIR, "rtos", "cmsis", "semaphore"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES],
"duration": 20
},
{
"id": "CMSIS_RTOS_4", "description": "Signals",
"source_dir": join(TEST_DIR, "rtos", "cmsis", "signals"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES],
},
{
"id": "CMSIS_RTOS_5", "description": "Queue",
"source_dir": join(TEST_DIR, "rtos", "cmsis", "queue"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES],
"duration": 20
},
{
"id": "CMSIS_RTOS_6", "description": "Mail",
"source_dir": join(TEST_DIR, "rtos", "cmsis", "mail"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES],
"duration": 20
},
{
"id": "CMSIS_RTOS_7", "description": "Timer",
"source_dir": join(TEST_DIR, "rtos", "cmsis", "timer"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES],
},
{
"id": "CMSIS_RTOS_8", "description": "ISR",
"source_dir": join(TEST_DIR, "rtos", "cmsis", "isr"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES],
},
# mbed RTOS tests
{
"id": "RTOS_1", "description": "Basic thread",
"source_dir": join(TEST_DIR, "rtos", "mbed", "basic"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, TEST_MBED_LIB],
"duration": 15,
"automated": True,
#"host_test": "wait_us_auto",
"mcu": ["LPC1768", "LPC1549", "LPC11U24", "LPC812", "LPC2460", "LPC824", "SSCI824",
"KL25Z", "KL05Z", "K64F", "KL46Z",
"RZ_A1H", "VK_RZ_A1H", "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE", "DISCO_F469NI", "NUCLEO_F410RB",
"NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8", "NUCLEO_F030R8", "NUCLEO_F070RB",
"NUCLEO_L031K6", "NUCLEO_L053R8", "DISCO_L053C8", "NUCLEO_L073RZ", "NUCLEO_F072RB", "NUCLEO_F091RC", "DISCO_L476VG", "NUCLEO_L476RG",
"DISCO_F401VC", "NUCLEO_F303RE", "NUCLEO_F303K8", "MAXWSNENV", "MAX32600MBED", "NUCLEO_L152RE", "NUCLEO_F446RE", "NUCLEO_F103RB", "DISCO_F746NG", "NUCLEO_F746ZG", "MOTE_L152RC", "B96B_F446VE"],
},
{
"id": "RTOS_2", "description": "Mutex resource lock",
"source_dir": join(TEST_DIR, "rtos", "mbed", "mutex"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, TEST_MBED_LIB],
"duration": 20,
"automated": True,
"mcu": ["LPC1768", "LPC1549", "LPC11U24", "LPC812", "LPC2460", "LPC824", "SSCI824",
"KL25Z", "KL05Z", "K64F", "KL46Z",
"RZ_A1H", "VK_RZ_A1H", "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE", "DISCO_F469NI", "NUCLEO_F410RB",
"NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8", "NUCLEO_F030R8", "NUCLEO_F070RB",
"NUCLEO_L031K6", "NUCLEO_L053R8", "DISCO_L053C8", "NUCLEO_L073RZ", "NUCLEO_F072RB", "NUCLEO_F091RC", "DISCO_L476VG", "NUCLEO_L476RG",
"DISCO_F401VC", "NUCLEO_F303RE", "NUCLEO_F303K8", "MAXWSNENV", "MAX32600MBED", "NUCLEO_L152RE", "NUCLEO_F446RE", "NUCLEO_F103RB", "DISCO_F746NG",
"NUCLEO_F746ZG", "MOTE_L152RC", "B96B_F446VE"],
},
{
"id": "RTOS_3", "description": "Semaphore resource lock",
"source_dir": join(TEST_DIR, "rtos", "mbed", "semaphore"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, TEST_MBED_LIB],
"duration": 20,
"automated": True,
"mcu": ["LPC1768", "LPC1549", "LPC11U24", "LPC812", "LPC2460", "LPC824", "SSCI824",
"KL25Z", "KL05Z", "K64F", "KL46Z",
"RZ_A1H", "VK_RZ_A1H", "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE", "DISCO_F469NI", "NUCLEO_F410RB",
"NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8", "NUCLEO_F030R8", "NUCLEO_F070RB",
"NUCLEO_L031K6", "NUCLEO_L053R8", "DISCO_L053C8", "NUCLEO_L073RZ", "NUCLEO_F072RB", "NUCLEO_F091RC", "DISCO_L476VG", "NUCLEO_L476RG",
"DISCO_F401VC", "NUCLEO_F303RE", "NUCLEO_F303K8", "MAXWSNENV", "MAX32600MBED", "NUCLEO_L152RE", "NUCLEO_F446RE", "NUCLEO_F103RB", "DISCO_F746NG",
"NUCLEO_F746ZG", "MOTE_L152RC", "B96B_F446VE"],
},
{
"id": "RTOS_4", "description": "Signals messaging",
"source_dir": join(TEST_DIR, "rtos", "mbed", "signals"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, TEST_MBED_LIB],
"automated": True,
"mcu": ["LPC1768", "LPC1549", "LPC11U24", "LPC812", "LPC2460", "LPC824", "SSCI824",
"KL25Z", "KL05Z", "K64F", "KL46Z",
"RZ_A1H", "VK_RZ_A1H", "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE", "DISCO_F469NI", "NUCLEO_F410RB",
"NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8", "NUCLEO_F030R8", "NUCLEO_F070RB",
"NUCLEO_L031K6", "NUCLEO_L053R8", "DISCO_L053C8", "NUCLEO_L073RZ", "NUCLEO_F072RB", "NUCLEO_F091RC", "DISCO_L476VG", "NUCLEO_L476RG",
"DISCO_F401VC", "NUCLEO_F303RE", "NUCLEO_F303K8", "MAXWSNENV", "MAX32600MBED", "NUCLEO_L152RE", "NUCLEO_F446RE", "NUCLEO_F103RB", "DISCO_F746NG",
"NUCLEO_F746ZG", "MOTE_L152RC", "B96B_F446VE"],
},
{
"id": "RTOS_5", "description": "Queue messaging",
"source_dir": join(TEST_DIR, "rtos", "mbed", "queue"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, TEST_MBED_LIB],
"automated": True,
"mcu": ["LPC1768", "LPC1549", "LPC11U24", "LPC812", "LPC2460", "LPC824", "SSCI824",
"KL25Z", "KL05Z", "K64F", "KL46Z",
"RZ_A1H", "VK_RZ_A1H", "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE", "DISCO_F469NI", "NUCLEO_F410RB",
"NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8", "NUCLEO_F030R8", "NUCLEO_F070RB",
"NUCLEO_L031K6", "NUCLEO_L053R8", "DISCO_L053C8", "NUCLEO_L073RZ", "NUCLEO_F072RB", "NUCLEO_F091RC", "DISCO_L476VG", "NUCLEO_L476RG",
"DISCO_F401VC", "NUCLEO_F303RE", "NUCLEO_F303K8", "MAXWSNENV", "MAX32600MBED", "NUCLEO_L152RE",
"NUCLEO_F446RE", "NUCLEO_F103RB", "DISCO_F746NG", "NUCLEO_F746ZG", "MOTE_L152RC", "B96B_F446VE"],
},
{
"id": "RTOS_6", "description": "Mail messaging",
"source_dir": join(TEST_DIR, "rtos", "mbed", "mail"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, TEST_MBED_LIB],
"automated": True,
"mcu": ["LPC1768", "LPC1549", "LPC11U24", "LPC812", "LPC2460", "LPC824", "SSCI824",
"KL25Z", "KL05Z", "K64F", "KL46Z",
"RZ_A1H", "VK_RZ_A1H", "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE", "DISCO_F469NI", "NUCLEO_F410RB",
"NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8", "NUCLEO_F030R8", "NUCLEO_F070RB",
"NUCLEO_L031K6", "NUCLEO_L053R8", "DISCO_L053C8", "NUCLEO_L073RZ", "NUCLEO_F072RB", "NUCLEO_F091RC", "DISCO_L476VG", "NUCLEO_L476RG",
"DISCO_F401VC", "NUCLEO_F303RE", "NUCLEO_F303K8", "MAXWSNENV", "MAX32600MBED", "NUCLEO_L152RE",
"NUCLEO_F446RE", "NUCLEO_F103RB", "DISCO_F746NG", "NUCLEO_F746ZG", "MOTE_L152RC", "B96B_F446VE"],
},
{
"id": "RTOS_7", "description": "Timer",
"source_dir": join(TEST_DIR, "rtos", "mbed", "timer"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, TEST_MBED_LIB],
"duration": 15,
"automated": True,
#"host_test": "wait_us_auto",
"mcu": ["LPC1768", "LPC1549", "LPC11U24", "LPC812", "LPC2460", "LPC824", "SSCI824",
"KL25Z", "KL05Z", "K64F", "KL46Z",
"RZ_A1H", "VK_RZ_A1H", "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE", "DISCO_F469NI", "NUCLEO_F410RB",
"NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8", "NUCLEO_F030R8", "NUCLEO_F070RB",
"NUCLEO_L031K6", "NUCLEO_L053R8", "DISCO_L053C8", "NUCLEO_L073RZ", "NUCLEO_F072RB", "NUCLEO_F091RC", "DISCO_L476VG", "NUCLEO_L476RG",
"DISCO_F401VC", "NUCLEO_F303RE", "NUCLEO_F303K8", "MAXWSNENV", "MAX32600MBED", "NUCLEO_L152RE",
"NUCLEO_F446RE", "NUCLEO_F103RB", "DISCO_F746NG", "NUCLEO_F746ZG", "MOTE_L152RC", "B96B_F446VE"],
},
{
"id": "RTOS_8", "description": "ISR (Queue)",
"source_dir": join(TEST_DIR, "rtos", "mbed", "isr"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, TEST_MBED_LIB],
"automated": True,
"mcu": ["LPC1768", "LPC1549", "LPC11U24", "LPC812", "LPC2460", "LPC824", "SSCI824",
"KL25Z", "KL05Z", "K64F", "KL46Z",
"RZ_A1H", "VK_RZ_A1H", "DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE", "DISCO_F469NI", "NUCLEO_F410RB",
"NUCLEO_F401RE", "NUCLEO_F334R8", "DISCO_F334C8", "NUCLEO_F302R8", "NUCLEO_F030R8", "NUCLEO_F070RB",
"NUCLEO_L031K6", "NUCLEO_L053R8", "DISCO_L053C8", "NUCLEO_L073RZ", "NUCLEO_F072RB", "NUCLEO_F091RC", "DISCO_L476VG", "NUCLEO_L476RG",
"DISCO_F401VC", "NUCLEO_F303RE", "NUCLEO_F303K8", "MAXWSNENV", "MAX32600MBED", "NUCLEO_L152RE",
"NUCLEO_F446RE", "NUCLEO_F103RB", "DISCO_F746NG", "NUCLEO_F746ZG", "MOTE_L152RC", "B96B_F446VE"],
},
{
"id": "RTOS_9", "description": "SD File write-read",
"source_dir": join(TEST_DIR, "rtos", "mbed", "file"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, TEST_MBED_LIB, FS_LIBRARY],
"automated": True,
"peripherals": ["SD"],
"mcu": ["LPC1768", "LPC11U24", "LPC812", "KL25Z",
"KL05Z", "K64F", "KL46Z", "RZ_A1H",
"DISCO_F407VG", "DISCO_F429ZI", "NUCLEO_F411RE", "NUCLEO_F401RE", "NUCLEO_F410RB", "DISCO_F469NI"],
},
# Networking Tests
{
"id": "NET_1", "description": "TCP client hello world",
"source_dir": join(TEST_DIR, "net", "helloworld", "tcpclient"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY, TEST_MBED_LIB],
"duration": 15,
"automated": True,
"peripherals": ["ethernet"],
},
{
"id": "NET_2", "description": "NIST Internet Time Service",
"source_dir": join(TEST_DIR, "net", "helloworld", "udpclient"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY, TEST_MBED_LIB],
"duration": 15,
"automated": True,
"peripherals": ["ethernet"],
},
{
"id": "NET_3", "description": "TCP echo server",
"source_dir": join(TEST_DIR, "net", "echo", "tcp_server"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY, TEST_MBED_LIB],
"automated": True,
#"host_test" : "tcpecho_server_auto",
"peripherals": ["ethernet"],
},
{
"id": "NET_4", "description": "TCP echo client",
"source_dir": join(TEST_DIR, "net", "echo", "tcp_client"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY, TEST_MBED_LIB],
"automated": True,
#"host_test": "tcpecho_client_auto",
"peripherals": ["ethernet"]
},
{
"id": "NET_5", "description": "UDP echo server",
"source_dir": join(TEST_DIR, "net", "echo", "udp_server"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY, TEST_MBED_LIB],
"automated": True,
#"host_test" : "udpecho_server_auto",
"peripherals": ["ethernet"]
},
{
"id": "NET_6", "description": "UDP echo client",
"source_dir": join(TEST_DIR, "net", "echo", "udp_client"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY, TEST_MBED_LIB],
"automated": True,
#"host_test" : "udpecho_client_auto",
"peripherals": ["ethernet"],
},
{
"id": "NET_7", "description": "HTTP client hello world",
"source_dir": join(TEST_DIR, "net", "protocols", "HTTPClient_HelloWorld"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY, TEST_MBED_LIB],
"automated": True,
"duration": 15,
"peripherals": ["ethernet"],
},
{
"id": "NET_8", "description": "NTP client",
"source_dir": join(TEST_DIR, "net", "protocols", "NTPClient_HelloWorld"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY, TEST_MBED_LIB],
"automated": True,
"peripherals": ["ethernet"],
},
{
"id": "NET_9", "description": "Multicast Send",
"source_dir": join(TEST_DIR, "net", "helloworld", "multicast_send"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY],
"peripherals": ["ethernet"],
},
{
"id": "NET_10", "description": "Multicast Receive",
"source_dir": join(TEST_DIR, "net", "helloworld", "multicast_receive"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY],
"peripherals": ["ethernet"],
},
{
"id": "NET_11", "description": "Broadcast Send",
"source_dir": join(TEST_DIR, "net", "helloworld", "broadcast_send"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY],
"peripherals": ["ethernet"],
},
{
"id": "NET_12", "description": "Broadcast Receive",
"source_dir": join(TEST_DIR, "net", "helloworld", "broadcast_receive"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY],
"peripherals": ["ethernet"],
},
{
"id": "NET_13", "description": "TCP client echo loop",
"source_dir": join(TEST_DIR, "net", "echo", "tcp_client_loop"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY, TEST_MBED_LIB],
"automated": True,
"duration": 15,
#"host_test": "tcpecho_client_auto",
"peripherals": ["ethernet"],
},
{
"id": "NET_14", "description": "UDP PHY/Data link layer",
"source_dir": join(TEST_DIR, "net", "echo", "udp_link_layer"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, ETH_LIBRARY],
"automated": False,
"duration": 20,
"host_test": "udp_link_layer_auto",
"peripherals": ["ethernet"],
},
# u-blox tests
{
"id": "UB_1", "description": "u-blox USB modem: HTTP client",
"source_dir": [join(TEST_DIR, "net", "cellular", "http", "ubloxusb"), join(TEST_DIR, "net", "cellular", "http", "common")],
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, USB_HOST_LIBRARIES, UBLOX_LIBRARY],
"supported": CORTEX_ARM_SUPPORT,
},
{
"id": "UB_2", "description": "u-blox USB modem: SMS test",
"source_dir": [join(TEST_DIR, "net", "cellular", "sms", "ubloxusb"), join(TEST_DIR, "net", "cellular", "sms", "common")],
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, USB_HOST_LIBRARIES, UBLOX_LIBRARY],
"supported": CORTEX_ARM_SUPPORT,
},
# USB Tests
{
"id": "USB_1", "description": "Mouse",
"source_dir": join(TEST_DIR, "usb", "device", "basic"),
"dependencies": [MBED_LIBRARIES, USB_LIBRARIES],
},
{
"id": "USB_2", "description": "Keyboard",
"source_dir": join(TEST_DIR, "usb", "device", "keyboard"),
"dependencies": [MBED_LIBRARIES, USB_LIBRARIES],
},
{
"id": "USB_3", "description": "Mouse_Keyboard",
"source_dir": join(TEST_DIR, "usb", "device", "keyboard"),
"dependencies": [MBED_LIBRARIES, USB_LIBRARIES],
},
{
"id": "USB_4", "description": "Serial Port",
"source_dir": join(TEST_DIR, "usb", "device", "serial"),
"dependencies": [MBED_LIBRARIES, USB_LIBRARIES],
"supported": CORTEX_ARM_SUPPORT,
},
{
"id": "USB_5", "description": "Generic HID",
"source_dir": join(TEST_DIR, "usb", "device", "raw_hid"),
"dependencies": [MBED_LIBRARIES, USB_LIBRARIES],
},
{
"id": "USB_6", "description": "MIDI",
"source_dir": join(TEST_DIR, "usb", "device", "midi"),
"dependencies": [MBED_LIBRARIES, USB_LIBRARIES],
},
{
"id": "USB_7", "description": "AUDIO",
"source_dir": join(TEST_DIR, "usb", "device", "audio"),
"dependencies": [MBED_LIBRARIES, USB_LIBRARIES],
},
# CMSIS DSP
{
"id": "CMSIS_DSP_1", "description": "FIR",
"source_dir": join(TEST_DIR, "dsp", "cmsis", "fir_f32"),
"dependencies": [MBED_LIBRARIES, DSP_LIBRARIES],
},
# mbed DSP
{
"id": "DSP_1", "description": "FIR",
"source_dir": join(TEST_DIR, "dsp", "mbed", "fir_f32"),
"dependencies": [MBED_LIBRARIES, DSP_LIBRARIES],
},
# KL25Z
{
"id": "KL25Z_1", "description": "LPTMR",
"source_dir": join(TEST_DIR, "KL25Z", "lptmr"),
"dependencies": [MBED_LIBRARIES],
"supported": CORTEX_ARM_SUPPORT,
"mcu": ["KL25Z"],
},
{
"id": "KL25Z_2", "description": "PIT",
"source_dir": join(TEST_DIR, "KL25Z", "pit"),
"dependencies": [MBED_LIBRARIES],
"supported": CORTEX_ARM_SUPPORT,
"mcu": ["KL25Z"],
},
{
"id": "KL25Z_3", "description": "TSI Touch Sensor",
"source_dir": join(TEST_DIR, "mbed", "tsi"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, join(PERIPHERALS, 'TSI')],
"mcu": ["KL25Z"],
},
{
"id": "KL25Z_4", "description": "RTC",
"source_dir": join(TEST_DIR, "KL25Z", "rtc"),
"dependencies": [MBED_LIBRARIES],
"mcu": ["KL25Z"],
},
{
"id": "KL25Z_5", "description": "MMA8451Q accelerometer",
"source_dir": join(TEST_DIR, "mbed", "i2c_MMA8451Q"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, join(PERIPHERALS, 'MMA8451Q')],
"mcu": ["KL25Z", "KL05Z", "KL46Z", "K20D50M"],
"automated": True,
"duration": 15,
},
# Examples
{
"id": "EXAMPLE_1", "description": "/dev/null",
"source_dir": join(TEST_DIR, "mbed", "dev_null"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"automated": True,
#"host_test" : "dev_null_auto",
},
{
"id": "EXAMPLE_2", "description": "FS + RTOS",
"source_dir": join(TEST_DIR, "mbed", "fs"),
"dependencies": [MBED_LIBRARIES, RTOS_LIBRARIES, TEST_MBED_LIB, FS_LIBRARY],
},
# CPPUTEST Library provides Unit testing Framework
#
# To write TESTs and TEST_GROUPs please add CPPUTEST_LIBRARY to 'dependencies'
#
# This will also include:
# 1. test runner - main function with call to CommandLineTestRunner::RunAllTests(ac, av)
# 2. Serial console object to print test result on serial port console
#
# Unit testing with cpputest library
{
"id": "UT_1", "description": "Basic",
"source_dir": join(TEST_DIR, "utest", "basic"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, CPPUTEST_LIBRARY],
"automated": False,
},
{
"id": "UT_2", "description": "Semihost file system",
"source_dir": join(TEST_DIR, "utest", "semihost_fs"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, CPPUTEST_LIBRARY],
"automated": False,
"mcu": ["LPC1768", "LPC2368", "LPC11U24"]
},
{
"id": "UT_3", "description": "General tests",
"source_dir": join(TEST_DIR, "utest", "general"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, CPPUTEST_LIBRARY],
"automated": False,
},
{
"id": "UT_BUSIO", "description": "BusIn BusOut",
"source_dir": join(TEST_DIR, "utest", "bus"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, CPPUTEST_LIBRARY],
"automated": False,
},
{
"id": "UT_I2C_EEPROM_ASYNCH", "description": "I2C Asynch eeprom",
"source_dir": join(TEST_DIR, "utest", "i2c_eeprom_asynch"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, CPPUTEST_LIBRARY],
"automated": False,
},
{
"id": "UT_SERIAL_ASYNCH", "description": "Asynch serial test (req 2 serial peripherals)",
"source_dir": join(TEST_DIR, "utest", "serial_asynch"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, CPPUTEST_LIBRARY],
"automated": False,
},
{
"id": "UT_SPI_ASYNCH", "description": "Asynch spi test",
"source_dir": join(TEST_DIR, "utest", "spi_asynch"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, CPPUTEST_LIBRARY],
"automated": False,
},
{
"id": "UT_LP_TICKER", "description": "Low power ticker test",
"source_dir": join(TEST_DIR, "utest", "lp_ticker"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB, CPPUTEST_LIBRARY],
"automated": False,
},
# Tests used for target information purposes
{
"id": "DTCT_1", "description": "Simple detect test",
"source_dir": join(TEST_DIR, "mbed", "detect"),
"dependencies": [MBED_LIBRARIES, TEST_MBED_LIB],
"automated": True,
#"host_test" : "detect_auto",
},
]
# Group tests with the same goals into categories
GROUPS = {
"core": ["MBED_A1", "MBED_A2", "MBED_A3", "MBED_A18"],
"digital_io": ["MBED_A5", "MBED_A6", "MBED_A7", "MBED_A10", "MBED_A11"],
"analog_io": ["MBED_A8"],
"i2c": ["MBED_A19", "MBED_A20"],
"spi": ["MBED_A12"],
}
GROUPS["rtos"] = [test["id"] for test in TESTS if test["id"].startswith("RTOS_")]
GROUPS["net"] = [test["id"] for test in TESTS if test["id"].startswith("NET_")]
GROUPS["automated"] = [test["id"] for test in TESTS if test.get("automated", False)]
# Look for 'TEST_GROUPS' in private_settings.py and update the GROUPS dictionary
# with the information in test_groups if found
try:
from workspace_tools.private_settings import TEST_GROUPS
except:
TEST_GROUPS = {}
GROUPS.update(TEST_GROUPS)
class Test:
DEFAULTS = {
#'mcu': None,
'description': None,
'dependencies': None,
'duration': 10,
'host_test': 'host_test',
'automated': False,
'peripherals': None,
#'supported': None,
'source_dir': None,
'extra_files': None
}
def __init__(self, n):
self.n = n
self.__dict__.update(Test.DEFAULTS)
self.__dict__.update(TESTS[n])
def is_supported(self, target, toolchain):
if hasattr(self, 'mcu') and not target in self.mcu:
return False
if hasattr(self, 'exclude_mcu') and target in self.exclude_mcu:
return False
if not hasattr(self, 'supported'):
return True
return (target in self.supported) and (toolchain in self.supported[target])
def get_description(self):
if self.description:
return self.description
else:
return self.id
def __cmp__(self, other):
return cmp(self.n, other.n)
def __str__(self):
return "[%3d] %s: %s" % (self.n, self.id, self.get_description())
def __getitem__(self, key):
if key == "id": return self.id
elif key == "mcu": return self.mcu
elif key == "exclude_mcu": return self.exclude_mcu
elif key == "dependencies": return self.dependencies
elif key == "description": return self.description
elif key == "duration": return self.duration
elif key == "host_test": return self.host_test
elif key == "automated": return self.automated
elif key == "peripherals": return self.peripherals
elif key == "supported": return self.supported
elif key == "source_dir": return self.source_dir
elif key == "extra_files": return self.extra_files
else:
return None
TEST_MAP = dict([(test['id'], Test(i)) for i, test in enumerate(TESTS)])
| apache-2.0 |
assuming/infiltrator | infiltrator/spiders/auto_base.py | 1 | 1356 | __author__ = 'ohmyxm'
# coding: utf-8
import json
import codecs
def design_sites(filepath='infiltrator/sitesbase/design_sites.json'):
fd = codecs.open(filepath, 'r', encoding='utf-8')
result = json.loads(fd.read())
fd.close()
return result['ui'], result['view']
UI_DESIGN_SITES, VIEW_DESIGN_SITES = design_sites()
class Sites():
def __init__(self, filepath='infiltrator/sitesbase/all_sites.json'):
self.path = filepath
self.datas = self._load_sites()
def _load_sites(self):
# print os.listdir(os.getcwd())
fd = codecs.open(self.path, 'r', encoding='utf-8')
result = json.loads(fd.read())
fd.close()
return result
def start_URLs(self):
urls = []
for i in self.datas.values():
urls += i['site_url']
return urls
def domain_list(self):
d_list = []
for domain in self.datas.values():
d_list.append(domain['allowed'])
return d_list
def get_sites(self):
return self.datas.keys()
def get_xpath_and_baseurl(self, sitename):
xpath = self.datas[sitename]['xpath']
if "baseurl" in self.datas[sitename]:
baseurl = self.datas[sitename]['baseurl']
else:
baseurl = ''
return xpath, baseurl
if __name__ == '__main__':
pass
| gpl-2.0 |
02agarwalt/FNGS_website | fngs/fngs/settings.py | 1 | 3882 | """
Django settings for fngs project.
Generated by 'django-admin startproject' using Django 1.10.3.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '82w=+1q7#usjksm6mfx_p)q0eyb*g-8q&r&=356jv#dt7h@%ae'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['cortex.jhu.edu', '0.0.0.0', '128.220.11.5', 'localhost', '*']
# Application definition
INSTALLED_APPS = [
'home.apps.HomeConfig',
'analyze.apps.AnalyzeConfig',
'explore.apps.ExploreConfig',
'algorithms.apps.AlgorithmsConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
# CELERY STUFF
BROKER_URL = 'redis://localhost:6379'
CELERY_RESULT_BACKEND = 'redis://localhost:6379'
CELERY_ACCEPT_CONTENT = ['application/json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_TIMEZONE = 'Africa/Nairobi'
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'fngs.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'fngs.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.10/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.10/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.10/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.10/howto/static-files/
STATIC_URL = '/static/'
# the directory to place all uploaded graphs and outputs
SERVER_DIR = "/FNGS_server/"
MEDIA_ROOT = os.path.join(SERVER_DIR, 'input_data')
MEDIA_URL = os.path.join(SERVER_DIR, 'input_data/')
OUTPUT_DIR = os.path.join(SERVER_DIR, 'output_data/')
DATA_FOLDER = os.path.join(SERVER_DIR, "datasets")
AT_FOLDER = os.path.join(SERVER_DIR, "atlases")
| apache-2.0 |
acrsilva/animated-zZz-machine | bundle_final_app/libs/pyqtgraph-develop/examples/relativity/relativity.py | 20 | 28294 | import numpy as np
import collections
import sys, os
import pyqtgraph as pg
from pyqtgraph.Qt import QtGui, QtCore
from pyqtgraph.parametertree import Parameter, ParameterTree
from pyqtgraph.parametertree import types as pTypes
import pyqtgraph.configfile
from pyqtgraph.python2_3 import xrange
class RelativityGUI(QtGui.QWidget):
def __init__(self):
QtGui.QWidget.__init__(self)
self.animations = []
self.animTimer = QtCore.QTimer()
self.animTimer.timeout.connect(self.stepAnimation)
self.animTime = 0
self.animDt = .016
self.lastAnimTime = 0
self.setupGUI()
self.objectGroup = ObjectGroupParam()
self.params = Parameter.create(name='params', type='group', children=[
dict(name='Load Preset..', type='list', values=[]),
#dict(name='Unit System', type='list', values=['', 'MKS']),
dict(name='Duration', type='float', value=10.0, step=0.1, limits=[0.1, None]),
dict(name='Reference Frame', type='list', values=[]),
dict(name='Animate', type='bool', value=True),
dict(name='Animation Speed', type='float', value=1.0, dec=True, step=0.1, limits=[0.0001, None]),
dict(name='Recalculate Worldlines', type='action'),
dict(name='Save', type='action'),
dict(name='Load', type='action'),
self.objectGroup,
])
self.tree.setParameters(self.params, showTop=False)
self.params.param('Recalculate Worldlines').sigActivated.connect(self.recalculate)
self.params.param('Save').sigActivated.connect(self.save)
self.params.param('Load').sigActivated.connect(self.load)
self.params.param('Load Preset..').sigValueChanged.connect(self.loadPreset)
self.params.sigTreeStateChanged.connect(self.treeChanged)
## read list of preset configs
presetDir = os.path.join(os.path.abspath(os.path.dirname(sys.argv[0])), 'presets')
if os.path.exists(presetDir):
presets = [os.path.splitext(p)[0] for p in os.listdir(presetDir)]
self.params.param('Load Preset..').setLimits(['']+presets)
def setupGUI(self):
self.layout = QtGui.QVBoxLayout()
self.layout.setContentsMargins(0,0,0,0)
self.setLayout(self.layout)
self.splitter = QtGui.QSplitter()
self.splitter.setOrientation(QtCore.Qt.Horizontal)
self.layout.addWidget(self.splitter)
self.tree = ParameterTree(showHeader=False)
self.splitter.addWidget(self.tree)
self.splitter2 = QtGui.QSplitter()
self.splitter2.setOrientation(QtCore.Qt.Vertical)
self.splitter.addWidget(self.splitter2)
self.worldlinePlots = pg.GraphicsLayoutWidget()
self.splitter2.addWidget(self.worldlinePlots)
self.animationPlots = pg.GraphicsLayoutWidget()
self.splitter2.addWidget(self.animationPlots)
self.splitter2.setSizes([int(self.height()*0.8), int(self.height()*0.2)])
self.inertWorldlinePlot = self.worldlinePlots.addPlot()
self.refWorldlinePlot = self.worldlinePlots.addPlot()
self.inertAnimationPlot = self.animationPlots.addPlot()
self.inertAnimationPlot.setAspectLocked(1)
self.refAnimationPlot = self.animationPlots.addPlot()
self.refAnimationPlot.setAspectLocked(1)
self.inertAnimationPlot.setXLink(self.inertWorldlinePlot)
self.refAnimationPlot.setXLink(self.refWorldlinePlot)
def recalculate(self):
## build 2 sets of clocks
clocks1 = collections.OrderedDict()
clocks2 = collections.OrderedDict()
for cl in self.params.param('Objects'):
clocks1.update(cl.buildClocks())
clocks2.update(cl.buildClocks())
## Inertial simulation
dt = self.animDt * self.params['Animation Speed']
sim1 = Simulation(clocks1, ref=None, duration=self.params['Duration'], dt=dt)
sim1.run()
sim1.plot(self.inertWorldlinePlot)
self.inertWorldlinePlot.autoRange(padding=0.1)
## reference simulation
ref = self.params['Reference Frame']
dur = clocks1[ref].refData['pt'][-1] ## decide how long to run the reference simulation
sim2 = Simulation(clocks2, ref=clocks2[ref], duration=dur, dt=dt)
sim2.run()
sim2.plot(self.refWorldlinePlot)
self.refWorldlinePlot.autoRange(padding=0.1)
## create animations
self.refAnimationPlot.clear()
self.inertAnimationPlot.clear()
self.animTime = 0
self.animations = [Animation(sim1), Animation(sim2)]
self.inertAnimationPlot.addItem(self.animations[0])
self.refAnimationPlot.addItem(self.animations[1])
## create lines representing all that is visible to a particular reference
#self.inertSpaceline = Spaceline(sim1, ref)
#self.refSpaceline = Spaceline(sim2)
self.inertWorldlinePlot.addItem(self.animations[0].items[ref].spaceline())
self.refWorldlinePlot.addItem(self.animations[1].items[ref].spaceline())
def setAnimation(self, a):
if a:
self.lastAnimTime = pg.ptime.time()
self.animTimer.start(self.animDt*1000)
else:
self.animTimer.stop()
def stepAnimation(self):
now = pg.ptime.time()
dt = (now-self.lastAnimTime) * self.params['Animation Speed']
self.lastAnimTime = now
self.animTime += dt
if self.animTime > self.params['Duration']:
self.animTime = 0
for a in self.animations:
a.restart()
for a in self.animations:
a.stepTo(self.animTime)
def treeChanged(self, *args):
clocks = []
for c in self.params.param('Objects'):
clocks.extend(c.clockNames())
#for param, change, data in args[1]:
#if change == 'childAdded':
self.params.param('Reference Frame').setLimits(clocks)
self.setAnimation(self.params['Animate'])
def save(self):
fn = str(pg.QtGui.QFileDialog.getSaveFileName(self, "Save State..", "untitled.cfg", "Config Files (*.cfg)"))
if fn == '':
return
state = self.params.saveState()
pg.configfile.writeConfigFile(state, fn)
def load(self):
fn = str(pg.QtGui.QFileDialog.getOpenFileName(self, "Save State..", "", "Config Files (*.cfg)"))
if fn == '':
return
state = pg.configfile.readConfigFile(fn)
self.loadState(state)
def loadPreset(self, param, preset):
if preset == '':
return
path = os.path.abspath(os.path.dirname(__file__))
fn = os.path.join(path, 'presets', preset+".cfg")
state = pg.configfile.readConfigFile(fn)
self.loadState(state)
def loadState(self, state):
if 'Load Preset..' in state['children']:
del state['children']['Load Preset..']['limits']
del state['children']['Load Preset..']['value']
self.params.param('Objects').clearChildren()
self.params.restoreState(state, removeChildren=False)
self.recalculate()
class ObjectGroupParam(pTypes.GroupParameter):
def __init__(self):
pTypes.GroupParameter.__init__(self, name="Objects", addText="Add New..", addList=['Clock', 'Grid'])
def addNew(self, typ):
if typ == 'Clock':
self.addChild(ClockParam())
elif typ == 'Grid':
self.addChild(GridParam())
class ClockParam(pTypes.GroupParameter):
def __init__(self, **kwds):
defs = dict(name="Clock", autoIncrementName=True, renamable=True, removable=True, children=[
dict(name='Initial Position', type='float', value=0.0, step=0.1),
#dict(name='V0', type='float', value=0.0, step=0.1),
AccelerationGroup(),
dict(name='Rest Mass', type='float', value=1.0, step=0.1, limits=[1e-9, None]),
dict(name='Color', type='color', value=(100,100,150)),
dict(name='Size', type='float', value=0.5),
dict(name='Vertical Position', type='float', value=0.0, step=0.1),
])
#defs.update(kwds)
pTypes.GroupParameter.__init__(self, **defs)
self.restoreState(kwds, removeChildren=False)
def buildClocks(self):
x0 = self['Initial Position']
y0 = self['Vertical Position']
color = self['Color']
m = self['Rest Mass']
size = self['Size']
prog = self.param('Acceleration').generate()
c = Clock(x0=x0, m0=m, y0=y0, color=color, prog=prog, size=size)
return {self.name(): c}
def clockNames(self):
return [self.name()]
pTypes.registerParameterType('Clock', ClockParam)
class GridParam(pTypes.GroupParameter):
def __init__(self, **kwds):
defs = dict(name="Grid", autoIncrementName=True, renamable=True, removable=True, children=[
dict(name='Number of Clocks', type='int', value=5, limits=[1, None]),
dict(name='Spacing', type='float', value=1.0, step=0.1),
ClockParam(name='ClockTemplate'),
])
#defs.update(kwds)
pTypes.GroupParameter.__init__(self, **defs)
self.restoreState(kwds, removeChildren=False)
def buildClocks(self):
clocks = {}
template = self.param('ClockTemplate')
spacing = self['Spacing']
for i in range(self['Number of Clocks']):
c = list(template.buildClocks().values())[0]
c.x0 += i * spacing
clocks[self.name() + '%02d' % i] = c
return clocks
def clockNames(self):
return [self.name() + '%02d' % i for i in range(self['Number of Clocks'])]
pTypes.registerParameterType('Grid', GridParam)
class AccelerationGroup(pTypes.GroupParameter):
def __init__(self, **kwds):
defs = dict(name="Acceleration", addText="Add Command..")
pTypes.GroupParameter.__init__(self, **defs)
self.restoreState(kwds, removeChildren=False)
def addNew(self):
nextTime = 0.0
if self.hasChildren():
nextTime = self.children()[-1]['Proper Time'] + 1
self.addChild(Parameter.create(name='Command', autoIncrementName=True, type=None, renamable=True, removable=True, children=[
dict(name='Proper Time', type='float', value=nextTime),
dict(name='Acceleration', type='float', value=0.0, step=0.1),
]))
def generate(self):
prog = []
for cmd in self:
prog.append((cmd['Proper Time'], cmd['Acceleration']))
return prog
pTypes.registerParameterType('AccelerationGroup', AccelerationGroup)
class Clock(object):
nClocks = 0
def __init__(self, x0=0.0, y0=0.0, m0=1.0, v0=0.0, t0=0.0, color=None, prog=None, size=0.5):
Clock.nClocks += 1
self.pen = pg.mkPen(color)
self.brush = pg.mkBrush(color)
self.y0 = y0
self.x0 = x0
self.v0 = v0
self.m0 = m0
self.t0 = t0
self.prog = prog
self.size = size
def init(self, nPts):
## Keep records of object from inertial frame as well as reference frame
self.inertData = np.empty(nPts, dtype=[('x', float), ('t', float), ('v', float), ('pt', float), ('m', float), ('f', float)])
self.refData = np.empty(nPts, dtype=[('x', float), ('t', float), ('v', float), ('pt', float), ('m', float), ('f', float)])
## Inertial frame variables
self.x = self.x0
self.v = self.v0
self.m = self.m0
self.t = 0.0 ## reference clock always starts at 0
self.pt = self.t0 ## proper time starts at t0
## reference frame variables
self.refx = None
self.refv = None
self.refm = None
self.reft = None
self.recordFrame(0)
def recordFrame(self, i):
f = self.force()
self.inertData[i] = (self.x, self.t, self.v, self.pt, self.m, f)
self.refData[i] = (self.refx, self.reft, self.refv, self.pt, self.refm, f)
def force(self, t=None):
if len(self.prog) == 0:
return 0.0
if t is None:
t = self.pt
ret = 0.0
for t1,f in self.prog:
if t >= t1:
ret = f
return ret
def acceleration(self, t=None):
return self.force(t) / self.m0
def accelLimits(self):
## return the proper time values which bound the current acceleration command
if len(self.prog) == 0:
return -np.inf, np.inf
t = self.pt
ind = -1
for i, v in enumerate(self.prog):
t1,f = v
if t >= t1:
ind = i
if ind == -1:
return -np.inf, self.prog[0][0]
elif ind == len(self.prog)-1:
return self.prog[-1][0], np.inf
else:
return self.prog[ind][0], self.prog[ind+1][0]
def getCurve(self, ref=True):
if ref is False:
data = self.inertData
else:
data = self.refData[1:]
x = data['x']
y = data['t']
curve = pg.PlotCurveItem(x=x, y=y, pen=self.pen)
#x = self.data['x'] - ref.data['x']
#y = self.data['t']
step = 1.0
#mod = self.data['pt'] % step
#inds = np.argwhere(abs(mod[1:] - mod[:-1]) > step*0.9)
inds = [0]
pt = data['pt']
for i in range(1,len(pt)):
diff = pt[i] - pt[inds[-1]]
if abs(diff) >= step:
inds.append(i)
inds = np.array(inds)
#t = self.data['t'][inds]
#x = self.data['x'][inds]
pts = []
for i in inds:
x = data['x'][i]
y = data['t'][i]
if i+1 < len(data):
dpt = data['pt'][i+1]-data['pt'][i]
dt = data['t'][i+1]-data['t'][i]
else:
dpt = 1
if dpt > 0:
c = pg.mkBrush((0,0,0))
else:
c = pg.mkBrush((200,200,200))
pts.append({'pos': (x, y), 'brush': c})
points = pg.ScatterPlotItem(pts, pen=self.pen, size=7)
return curve, points
class Simulation:
def __init__(self, clocks, ref, duration, dt):
self.clocks = clocks
self.ref = ref
self.duration = duration
self.dt = dt
@staticmethod
def hypTStep(dt, v0, x0, tau0, g):
## Hyperbolic step.
## If an object has proper acceleration g and starts at position x0 with speed v0 and proper time tau0
## as seen from an inertial frame, then return the new v, x, tau after time dt has elapsed.
if g == 0:
return v0, x0 + v0*dt, tau0 + dt * (1. - v0**2)**0.5
v02 = v0**2
g2 = g**2
tinit = v0 / (g * (1 - v02)**0.5)
B = (1 + (g2 * (dt+tinit)**2))**0.5
v1 = g * (dt+tinit) / B
dtau = (np.arcsinh(g * (dt+tinit)) - np.arcsinh(g * tinit)) / g
tau1 = tau0 + dtau
x1 = x0 + (1.0 / g) * ( B - 1. / (1.-v02)**0.5 )
return v1, x1, tau1
@staticmethod
def tStep(dt, v0, x0, tau0, g):
## Linear step.
## Probably not as accurate as hyperbolic step, but certainly much faster.
gamma = (1. - v0**2)**-0.5
dtau = dt / gamma
return v0 + dtau * g, x0 + v0*dt, tau0 + dtau
@staticmethod
def tauStep(dtau, v0, x0, t0, g):
## linear step in proper time of clock.
## If an object has proper acceleration g and starts at position x0 with speed v0 at time t0
## as seen from an inertial frame, then return the new v, x, t after proper time dtau has elapsed.
## Compute how much t will change given a proper-time step of dtau
gamma = (1. - v0**2)**-0.5
if g == 0:
dt = dtau * gamma
else:
v0g = v0 * gamma
dt = (np.sinh(dtau * g + np.arcsinh(v0g)) - v0g) / g
#return v0 + dtau * g, x0 + v0*dt, t0 + dt
v1, x1, t1 = Simulation.hypTStep(dt, v0, x0, t0, g)
return v1, x1, t0+dt
@staticmethod
def hypIntersect(x0r, t0r, vr, x0, t0, v0, g):
## given a reference clock (seen from inertial frame) has rx, rt, and rv,
## and another clock starts at x0, t0, and v0, with acceleration g,
## compute the intersection time of the object clock's hyperbolic path with
## the reference plane.
## I'm sure we can simplify this...
if g == 0: ## no acceleration, path is linear (and hyperbola is undefined)
#(-t0r + t0 v0 vr - vr x0 + vr x0r)/(-1 + v0 vr)
t = (-t0r + t0 *v0 *vr - vr *x0 + vr *x0r)/(-1 + v0 *vr)
return t
gamma = (1.0-v0**2)**-0.5
sel = (1 if g>0 else 0) + (1 if vr<0 else 0)
sel = sel%2
if sel == 0:
#(1/(g^2 (-1 + vr^2)))(-g^2 t0r + g gamma vr + g^2 t0 vr^2 -
#g gamma v0 vr^2 - g^2 vr x0 +
#g^2 vr x0r + \[Sqrt](g^2 vr^2 (1 + gamma^2 (v0 - vr)^2 - vr^2 +
#2 g gamma (v0 - vr) (-t0 + t0r + vr (x0 - x0r)) +
#g^2 (t0 - t0r + vr (-x0 + x0r))^2)))
t = (1./(g**2 *(-1. + vr**2)))*(-g**2 *t0r + g *gamma *vr + g**2 *t0 *vr**2 - g *gamma *v0 *vr**2 - g**2 *vr *x0 + g**2 *vr *x0r + np.sqrt(g**2 *vr**2 *(1. + gamma**2 *(v0 - vr)**2 - vr**2 + 2 *g *gamma *(v0 - vr)* (-t0 + t0r + vr *(x0 - x0r)) + g**2 *(t0 - t0r + vr* (-x0 + x0r))**2)))
else:
#-(1/(g^2 (-1 + vr^2)))(g^2 t0r - g gamma vr - g^2 t0 vr^2 +
#g gamma v0 vr^2 + g^2 vr x0 -
#g^2 vr x0r + \[Sqrt](g^2 vr^2 (1 + gamma^2 (v0 - vr)^2 - vr^2 +
#2 g gamma (v0 - vr) (-t0 + t0r + vr (x0 - x0r)) +
#g^2 (t0 - t0r + vr (-x0 + x0r))^2)))
t = -(1./(g**2 *(-1. + vr**2)))*(g**2 *t0r - g *gamma* vr - g**2 *t0 *vr**2 + g *gamma *v0 *vr**2 + g**2* vr* x0 - g**2 *vr *x0r + np.sqrt(g**2* vr**2 *(1. + gamma**2 *(v0 - vr)**2 - vr**2 + 2 *g *gamma *(v0 - vr) *(-t0 + t0r + vr *(x0 - x0r)) + g**2 *(t0 - t0r + vr *(-x0 + x0r))**2)))
return t
def run(self):
nPts = int(self.duration/self.dt)+1
for cl in self.clocks.values():
cl.init(nPts)
if self.ref is None:
self.runInertial(nPts)
else:
self.runReference(nPts)
def runInertial(self, nPts):
clocks = self.clocks
dt = self.dt
tVals = np.linspace(0, dt*(nPts-1), nPts)
for cl in self.clocks.values():
for i in xrange(1,nPts):
nextT = tVals[i]
while True:
tau1, tau2 = cl.accelLimits()
x = cl.x
v = cl.v
tau = cl.pt
g = cl.acceleration()
v1, x1, tau1 = self.hypTStep(dt, v, x, tau, g)
if tau1 > tau2:
dtau = tau2-tau
cl.v, cl.x, cl.t = self.tauStep(dtau, v, x, cl.t, g)
cl.pt = tau2
else:
cl.v, cl.x, cl.pt = v1, x1, tau1
cl.t += dt
if cl.t >= nextT:
cl.refx = cl.x
cl.refv = cl.v
cl.reft = cl.t
cl.recordFrame(i)
break
def runReference(self, nPts):
clocks = self.clocks
ref = self.ref
dt = self.dt
dur = self.duration
## make sure reference clock is not present in the list of clocks--this will be handled separately.
clocks = clocks.copy()
for k,v in clocks.items():
if v is ref:
del clocks[k]
break
ref.refx = 0
ref.refv = 0
ref.refm = ref.m0
## These are the set of proper times (in the reference frame) that will be simulated
ptVals = np.linspace(ref.pt, ref.pt + dt*(nPts-1), nPts)
for i in xrange(1,nPts):
## step reference clock ahead one time step in its proper time
nextPt = ptVals[i] ## this is where (when) we want to end up
while True:
tau1, tau2 = ref.accelLimits()
dtau = min(nextPt-ref.pt, tau2-ref.pt) ## do not step past the next command boundary
g = ref.acceleration()
v, x, t = Simulation.tauStep(dtau, ref.v, ref.x, ref.t, g)
ref.pt += dtau
ref.v = v
ref.x = x
ref.t = t
ref.reft = ref.pt
if ref.pt >= nextPt:
break
#else:
#print "Stepped to", tau2, "instead of", nextPt
ref.recordFrame(i)
## determine plane visible to reference clock
## this plane goes through the point ref.x, ref.t and has slope = ref.v
## update all other clocks
for cl in clocks.values():
while True:
g = cl.acceleration()
tau1, tau2 = cl.accelLimits()
##Given current position / speed of clock, determine where it will intersect reference plane
#t1 = (ref.v * (cl.x - cl.v * cl.t) + (ref.t - ref.v * ref.x)) / (1. - cl.v)
t1 = Simulation.hypIntersect(ref.x, ref.t, ref.v, cl.x, cl.t, cl.v, g)
dt1 = t1 - cl.t
## advance clock by correct time step
v, x, tau = Simulation.hypTStep(dt1, cl.v, cl.x, cl.pt, g)
## check to see whether we have gone past an acceleration command boundary.
## if so, we must instead advance the clock to the boundary and start again
if tau < tau1:
dtau = tau1 - cl.pt
cl.v, cl.x, cl.t = Simulation.tauStep(dtau, cl.v, cl.x, cl.t, g)
cl.pt = tau1-0.000001
continue
if tau > tau2:
dtau = tau2 - cl.pt
cl.v, cl.x, cl.t = Simulation.tauStep(dtau, cl.v, cl.x, cl.t, g)
cl.pt = tau2
continue
## Otherwise, record the new values and exit the loop
cl.v = v
cl.x = x
cl.pt = tau
cl.t = t1
cl.m = None
break
## transform position into reference frame
x = cl.x - ref.x
t = cl.t - ref.t
gamma = (1.0 - ref.v**2) ** -0.5
vg = -ref.v * gamma
cl.refx = gamma * (x - ref.v * t)
cl.reft = ref.pt # + gamma * (t - ref.v * x) # this term belongs here, but it should always be equal to 0.
cl.refv = (cl.v - ref.v) / (1.0 - cl.v * ref.v)
cl.refm = None
cl.recordFrame(i)
t += dt
def plot(self, plot):
plot.clear()
for cl in self.clocks.values():
c, p = cl.getCurve()
plot.addItem(c)
plot.addItem(p)
class Animation(pg.ItemGroup):
def __init__(self, sim):
pg.ItemGroup.__init__(self)
self.sim = sim
self.clocks = sim.clocks
self.items = {}
for name, cl in self.clocks.items():
item = ClockItem(cl)
self.addItem(item)
self.items[name] = item
#self.timer = timer
#self.timer.timeout.connect(self.step)
#def run(self, run):
#if not run:
#self.timer.stop()
#else:
#self.timer.start(self.dt)
def restart(self):
for cl in self.items.values():
cl.reset()
def stepTo(self, t):
for i in self.items.values():
i.stepTo(t)
class ClockItem(pg.ItemGroup):
def __init__(self, clock):
pg.ItemGroup.__init__(self)
self.size = clock.size
self.item = QtGui.QGraphicsEllipseItem(QtCore.QRectF(0, 0, self.size, self.size))
self.item.translate(-self.size*0.5, -self.size*0.5)
self.item.setPen(pg.mkPen(100,100,100))
self.item.setBrush(clock.brush)
self.hand = QtGui.QGraphicsLineItem(0, 0, 0, self.size*0.5)
self.hand.setPen(pg.mkPen('w'))
self.hand.setZValue(10)
self.flare = QtGui.QGraphicsPolygonItem(QtGui.QPolygonF([
QtCore.QPointF(0, -self.size*0.25),
QtCore.QPointF(0, self.size*0.25),
QtCore.QPointF(self.size*1.5, 0),
QtCore.QPointF(0, -self.size*0.25),
]))
self.flare.setPen(pg.mkPen('y'))
self.flare.setBrush(pg.mkBrush(255,150,0))
self.flare.setZValue(-10)
self.addItem(self.hand)
self.addItem(self.item)
self.addItem(self.flare)
self.clock = clock
self.i = 1
self._spaceline = None
def spaceline(self):
if self._spaceline is None:
self._spaceline = pg.InfiniteLine()
self._spaceline.setPen(self.clock.pen)
return self._spaceline
def stepTo(self, t):
data = self.clock.refData
while self.i < len(data)-1 and data['t'][self.i] < t:
self.i += 1
while self.i > 1 and data['t'][self.i-1] >= t:
self.i -= 1
self.setPos(data['x'][self.i], self.clock.y0)
t = data['pt'][self.i]
self.hand.setRotation(-0.25 * t * 360.)
self.resetTransform()
v = data['v'][self.i]
gam = (1.0 - v**2)**0.5
self.scale(gam, 1.0)
f = data['f'][self.i]
self.flare.resetTransform()
if f < 0:
self.flare.translate(self.size*0.4, 0)
else:
self.flare.translate(-self.size*0.4, 0)
self.flare.scale(-f * (0.5+np.random.random()*0.1), 1.0)
if self._spaceline is not None:
self._spaceline.setPos(pg.Point(data['x'][self.i], data['t'][self.i]))
self._spaceline.setAngle(data['v'][self.i] * 45.)
def reset(self):
self.i = 1
#class Spaceline(pg.InfiniteLine):
#def __init__(self, sim, frame):
#self.sim = sim
#self.frame = frame
#pg.InfiniteLine.__init__(self)
#self.setPen(sim.clocks[frame].pen)
#def stepTo(self, t):
#self.setAngle(0)
#pass
if __name__ == '__main__':
pg.mkQApp()
#import pyqtgraph.console
#cw = pyqtgraph.console.ConsoleWidget()
#cw.show()
#cw.catchNextException()
win = RelativityGUI()
win.setWindowTitle("Relativity!")
win.show()
win.resize(1100,700)
if (sys.flags.interactive != 1) or not hasattr(QtCore, 'PYQT_VERSION'):
QtGui.QApplication.instance().exec_()
#win.params.param('Objects').restoreState(state, removeChildren=False)
| lgpl-3.0 |
TNT-Samuel/Coding-Projects | DNS Server/Source/Lib/site-packages/pygments/lexers/praat.py | 31 | 12556 | # -*- coding: utf-8 -*-
"""
pygments.lexers.praat
~~~~~~~~~~~~~~~~~~~~~
Lexer for Praat
:copyright: Copyright 2006-2017 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
from pygments.lexer import RegexLexer, words, bygroups, include
from pygments.token import Name, Text, Comment, Keyword, String, Punctuation, Number, \
Operator
__all__ = ['PraatLexer']
class PraatLexer(RegexLexer):
"""
For `Praat <http://www.praat.org>`_ scripts.
.. versionadded:: 2.1
"""
name = 'Praat'
aliases = ['praat']
filenames = ['*.praat', '*.proc', '*.psc']
keywords = (
'if', 'then', 'else', 'elsif', 'elif', 'endif', 'fi', 'for', 'from', 'to',
'endfor', 'endproc', 'while', 'endwhile', 'repeat', 'until', 'select', 'plus',
'minus', 'demo', 'assert', 'stopwatch', 'nocheck', 'nowarn', 'noprogress',
'editor', 'endeditor', 'clearinfo',
)
functions_string = (
'backslashTrigraphsToUnicode', 'chooseDirectory', 'chooseReadFile',
'chooseWriteFile', 'date', 'demoKey', 'do', 'environment', 'extractLine',
'extractWord', 'fixed', 'info', 'left', 'mid', 'percent', 'readFile', 'replace',
'replace_regex', 'right', 'selected', 'string', 'unicodeToBackslashTrigraphs',
)
functions_numeric = (
'abs', 'appendFile', 'appendFileLine', 'appendInfo', 'appendInfoLine', 'arccos',
'arccosh', 'arcsin', 'arcsinh', 'arctan', 'arctan2', 'arctanh', 'barkToHertz',
'beginPause', 'beginSendPraat', 'besselI', 'besselK', 'beta', 'beta2',
'binomialP', 'binomialQ', 'boolean', 'ceiling', 'chiSquareP', 'chiSquareQ',
'choice', 'comment', 'cos', 'cosh', 'createDirectory', 'deleteFile',
'demoClicked', 'demoClickedIn', 'demoCommandKeyPressed',
'demoExtraControlKeyPressed', 'demoInput', 'demoKeyPressed',
'demoOptionKeyPressed', 'demoShiftKeyPressed', 'demoShow', 'demoWaitForInput',
'demoWindowTitle', 'demoX', 'demoY', 'differenceLimensToPhon', 'do', 'editor',
'endPause', 'endSendPraat', 'endsWith', 'erb', 'erbToHertz', 'erf', 'erfc',
'exitScript', 'exp', 'extractNumber', 'fileReadable', 'fisherP', 'fisherQ',
'floor', 'gaussP', 'gaussQ', 'hertzToBark', 'hertzToErb', 'hertzToMel',
'hertzToSemitones', 'imax', 'imin', 'incompleteBeta', 'incompleteGammaP', 'index',
'index_regex', 'invBinomialP', 'invBinomialQ', 'invChiSquareQ', 'invFisherQ',
'invGaussQ', 'invSigmoid', 'invStudentQ', 'length', 'ln', 'lnBeta', 'lnGamma',
'log10', 'log2', 'max', 'melToHertz', 'min', 'minusObject', 'natural', 'number',
'numberOfColumns', 'numberOfRows', 'numberOfSelected', 'objectsAreIdentical',
'option', 'optionMenu', 'pauseScript', 'phonToDifferenceLimens', 'plusObject',
'positive', 'randomBinomial', 'randomGauss', 'randomInteger', 'randomPoisson',
'randomUniform', 'real', 'readFile', 'removeObject', 'rindex', 'rindex_regex',
'round', 'runScript', 'runSystem', 'runSystem_nocheck', 'selectObject',
'selected', 'semitonesToHertz', 'sentencetext', 'sigmoid', 'sin', 'sinc',
'sincpi', 'sinh', 'soundPressureToPhon', 'sqrt', 'startsWith', 'studentP',
'studentQ', 'tan', 'tanh', 'variableExists', 'word', 'writeFile', 'writeFileLine',
'writeInfo', 'writeInfoLine',
)
functions_array = (
'linear', 'randomGauss', 'randomInteger', 'randomUniform', 'zero',
)
objects = (
'Activation', 'AffineTransform', 'AmplitudeTier', 'Art', 'Artword',
'Autosegment', 'BarkFilter', 'BarkSpectrogram', 'CCA', 'Categories',
'Cepstrogram', 'Cepstrum', 'Cepstrumc', 'ChebyshevSeries', 'ClassificationTable',
'Cochleagram', 'Collection', 'ComplexSpectrogram', 'Configuration', 'Confusion',
'ContingencyTable', 'Corpus', 'Correlation', 'Covariance',
'CrossCorrelationTable', 'CrossCorrelationTables', 'DTW', 'DataModeler',
'Diagonalizer', 'Discriminant', 'Dissimilarity', 'Distance', 'Distributions',
'DurationTier', 'EEG', 'ERP', 'ERPTier', 'EditCostsTable', 'EditDistanceTable',
'Eigen', 'Excitation', 'Excitations', 'ExperimentMFC', 'FFNet', 'FeatureWeights',
'FileInMemory', 'FilesInMemory', 'Formant', 'FormantFilter', 'FormantGrid',
'FormantModeler', 'FormantPoint', 'FormantTier', 'GaussianMixture', 'HMM',
'HMM_Observation', 'HMM_ObservationSequence', 'HMM_State', 'HMM_StateSequence',
'Harmonicity', 'ISpline', 'Index', 'Intensity', 'IntensityTier', 'IntervalTier',
'KNN', 'KlattGrid', 'KlattTable', 'LFCC', 'LPC', 'Label', 'LegendreSeries',
'LinearRegression', 'LogisticRegression', 'LongSound', 'Ltas', 'MFCC', 'MSpline',
'ManPages', 'Manipulation', 'Matrix', 'MelFilter', 'MelSpectrogram',
'MixingMatrix', 'Movie', 'Network', 'OTGrammar', 'OTHistory', 'OTMulti', 'PCA',
'PairDistribution', 'ParamCurve', 'Pattern', 'Permutation', 'Photo', 'Pitch',
'PitchModeler', 'PitchTier', 'PointProcess', 'Polygon', 'Polynomial',
'PowerCepstrogram', 'PowerCepstrum', 'Procrustes', 'RealPoint', 'RealTier',
'ResultsMFC', 'Roots', 'SPINET', 'SSCP', 'SVD', 'Salience', 'ScalarProduct',
'Similarity', 'SimpleString', 'SortedSetOfString', 'Sound', 'Speaker',
'Spectrogram', 'Spectrum', 'SpectrumTier', 'SpeechSynthesizer', 'SpellingChecker',
'Strings', 'StringsIndex', 'Table', 'TableOfReal', 'TextGrid', 'TextInterval',
'TextPoint', 'TextTier', 'Tier', 'Transition', 'VocalTract', 'VocalTractTier',
'Weight', 'WordList',
)
variables_numeric = (
'macintosh', 'windows', 'unix', 'praatVersion', 'pi', 'e', 'undefined',
)
variables_string = (
'praatVersion', 'tab', 'shellDirectory', 'homeDirectory',
'preferencesDirectory', 'newline', 'temporaryDirectory',
'defaultDirectory',
)
tokens = {
'root': [
(r'(\s+)(#.*?$)', bygroups(Text, Comment.Single)),
(r'^#.*?$', Comment.Single),
(r';[^\n]*', Comment.Single),
(r'\s+', Text),
(r'\bprocedure\b', Keyword, 'procedure_definition'),
(r'\bcall\b', Keyword, 'procedure_call'),
(r'@', Name.Function, 'procedure_call'),
include('function_call'),
(words(keywords, suffix=r'\b'), Keyword),
(r'(\bform\b)(\s+)([^\n]+)',
bygroups(Keyword, Text, String), 'old_form'),
(r'(print(?:line|tab)?|echo|exit|asserterror|pause|send(?:praat|socket)|'
r'include|execute|system(?:_nocheck)?)(\s+)',
bygroups(Keyword, Text), 'string_unquoted'),
(r'(goto|label)(\s+)(\w+)', bygroups(Keyword, Text, Name.Label)),
include('variable_name'),
include('number'),
(r'"', String, 'string'),
(words((objects), suffix=r'(?=\s+\S+\n)'), Name.Class, 'string_unquoted'),
(r'\b[A-Z]', Keyword, 'command'),
(r'(\.{3}|[)(,])', Punctuation),
],
'command': [
(r'( ?[\w()-]+ ?)', Keyword),
(r"'(?=.*')", String.Interpol, 'string_interpolated'),
(r'\.{3}', Keyword, ('#pop', 'old_arguments')),
(r':', Keyword, ('#pop', 'comma_list')),
(r'\s', Text, '#pop'),
],
'procedure_call': [
(r'\s+', Text),
(r'([\w.]+)(:|\s*\()',
bygroups(Name.Function, Text), '#pop'),
(r'([\w.]+)', Name.Function, ('#pop', 'old_arguments')),
],
'procedure_definition': [
(r'\s', Text),
(r'([\w.]+)(\s*?[(:])',
bygroups(Name.Function, Text), '#pop'),
(r'([\w.]+)([^\n]*)',
bygroups(Name.Function, Text), '#pop'),
],
'function_call': [
(words(functions_string, suffix=r'\$(?=\s*[:(])'), Name.Function, 'function'),
(words(functions_array, suffix=r'#(?=\s*[:(])'), Name.Function, 'function'),
(words(functions_numeric, suffix=r'(?=\s*[:(])'), Name.Function, 'function'),
],
'function': [
(r'\s+', Text),
(r':', Punctuation, ('#pop', 'comma_list')),
(r'\s*\(', Punctuation, ('#pop', 'comma_list')),
],
'comma_list': [
(r'(\s*\n\s*)(\.{3})', bygroups(Text, Punctuation)),
(r'(\s*[])\n])', Text, '#pop'),
(r'\s+', Text),
(r'"', String, 'string'),
(r'\b(if|then|else|fi|endif)\b', Keyword),
include('function_call'),
include('variable_name'),
include('operator'),
include('number'),
(r'[()]', Text),
(r',', Punctuation),
],
'old_arguments': [
(r'\n', Text, '#pop'),
include('variable_name'),
include('operator'),
include('number'),
(r'"', String, 'string'),
(r'[^\n]', Text),
],
'number': [
(r'\n', Text, '#pop'),
(r'\b\d+(\.\d*)?([eE][-+]?\d+)?%?', Number),
],
'object_attributes': [
(r'\.?(n(col|row)|[xy]min|[xy]max|[nd][xy])\b', Name.Builtin, '#pop'),
(r'(\.?(?:col|row)\$)(\[)',
bygroups(Name.Builtin, Text), 'variable_name'),
(r'(\$?)(\[)',
bygroups(Name.Builtin, Text), ('#pop', 'comma_list')),
],
'variable_name': [
include('operator'),
include('number'),
(words(variables_string, suffix=r'\$'), Name.Variable.Global),
(words(variables_numeric, suffix=r'\b'), Name.Variable.Global),
(r'\bObject_\w+', Name.Builtin, 'object_attributes'),
(words(objects, prefix=r'\b', suffix=r'_\w+'),
Name.Builtin, 'object_attributes'),
(r"\b(Object_)(')",
bygroups(Name.Builtin, String.Interpol),
('object_attributes', 'string_interpolated')),
(words(objects, prefix=r'\b', suffix=r"(_)(')"),
bygroups(Name.Builtin, Name.Builtin, String.Interpol),
('object_attributes', 'string_interpolated')),
(r'\.?_?[a-z][\w.]*(\$|#)?', Text),
(r'[\[\]]', Punctuation, 'comma_list'),
(r"'(?=.*')", String.Interpol, 'string_interpolated'),
],
'operator': [
(r'([+\/*<>=!-]=?|[&*|][&*|]?|\^|<>)', Operator),
(r'(?<![\w.])(and|or|not|div|mod)(?![\w.])', Operator.Word),
],
'string_interpolated': [
(r'\.?[_a-z][\w.]*[$#]?(?:\[[a-zA-Z0-9,]+\])?(:[0-9]+)?',
String.Interpol),
(r"'", String.Interpol, '#pop'),
],
'string_unquoted': [
(r'(\n\s*)(\.{3})', bygroups(Text, Punctuation)),
(r'\n', Text, '#pop'),
(r'\s', Text),
(r"'(?=.*')", String.Interpol, 'string_interpolated'),
(r"'", String),
(r"[^'\n]+", String),
],
'string': [
(r'(\n\s*)(\.{3})', bygroups(Text, Punctuation)),
(r'"', String, '#pop'),
(r"'(?=.*')", String.Interpol, 'string_interpolated'),
(r"'", String),
(r'[^\'"\n]+', String),
],
'old_form': [
(r'\s+', Text),
(r'(optionmenu|choice)([ \t]+\S+:[ \t]+)',
bygroups(Keyword, Text), 'number'),
(r'(option|button)([ \t]+)',
bygroups(Keyword, Text), 'string_unquoted'),
(r'(sentence|text)([ \t]+\S+)',
bygroups(Keyword, Text), 'string_unquoted'),
(r'(word)([ \t]+\S+[ \t]*)(\S+)?([ \t]+.*)?',
bygroups(Keyword, Text, String, Text)),
(r'(boolean)(\s+\S+\s*)(0|1|"?(?:yes|no)"?)',
bygroups(Keyword, Text, Name.Variable)),
# Ideally processing of the number would happend in the 'number'
# but that doesn't seem to work
(r'(real|natural|positive|integer)([ \t]+\S+[ \t]*)([+-]?)(\d+(?:\.\d*)?'
r'(?:[eE][-+]?\d+)?%?)',
bygroups(Keyword, Text, Operator, Number)),
(r'(comment)(\s+)',
bygroups(Keyword, Text), 'string_unquoted'),
(r'\bendform\b', Keyword, '#pop'),
]
}
| gpl-3.0 |
kawasaki2013/python-for-android-x86 | python3-alpha/python3-src/Lib/lib2to3/fixes/fix_tuple_params.py | 203 | 5565 | """Fixer for function definitions with tuple parameters.
def func(((a, b), c), d):
...
->
def func(x, d):
((a, b), c) = x
...
It will also support lambdas:
lambda (x, y): x + y -> lambda t: t[0] + t[1]
# The parens are a syntax error in Python 3
lambda (x): x + y -> lambda x: x + y
"""
# Author: Collin Winter
# Local imports
from .. import pytree
from ..pgen2 import token
from .. import fixer_base
from ..fixer_util import Assign, Name, Newline, Number, Subscript, syms
def is_docstring(stmt):
return isinstance(stmt, pytree.Node) and \
stmt.children[0].type == token.STRING
class FixTupleParams(fixer_base.BaseFix):
run_order = 4 #use a lower order since lambda is part of other
#patterns
BM_compatible = True
PATTERN = """
funcdef< 'def' any parameters< '(' args=any ')' >
['->' any] ':' suite=any+ >
|
lambda=
lambdef< 'lambda' args=vfpdef< '(' inner=any ')' >
':' body=any
>
"""
def transform(self, node, results):
if "lambda" in results:
return self.transform_lambda(node, results)
new_lines = []
suite = results["suite"]
args = results["args"]
# This crap is so "def foo(...): x = 5; y = 7" is handled correctly.
# TODO(cwinter): suite-cleanup
if suite[0].children[1].type == token.INDENT:
start = 2
indent = suite[0].children[1].value
end = Newline()
else:
start = 0
indent = "; "
end = pytree.Leaf(token.INDENT, "")
# We need access to self for new_name(), and making this a method
# doesn't feel right. Closing over self and new_lines makes the
# code below cleaner.
def handle_tuple(tuple_arg, add_prefix=False):
n = Name(self.new_name())
arg = tuple_arg.clone()
arg.prefix = ""
stmt = Assign(arg, n.clone())
if add_prefix:
n.prefix = " "
tuple_arg.replace(n)
new_lines.append(pytree.Node(syms.simple_stmt,
[stmt, end.clone()]))
if args.type == syms.tfpdef:
handle_tuple(args)
elif args.type == syms.typedargslist:
for i, arg in enumerate(args.children):
if arg.type == syms.tfpdef:
# Without add_prefix, the emitted code is correct,
# just ugly.
handle_tuple(arg, add_prefix=(i > 0))
if not new_lines:
return
# This isn't strictly necessary, but it plays nicely with other fixers.
# TODO(cwinter) get rid of this when children becomes a smart list
for line in new_lines:
line.parent = suite[0]
# TODO(cwinter) suite-cleanup
after = start
if start == 0:
new_lines[0].prefix = " "
elif is_docstring(suite[0].children[start]):
new_lines[0].prefix = indent
after = start + 1
for line in new_lines:
line.parent = suite[0]
suite[0].children[after:after] = new_lines
for i in range(after+1, after+len(new_lines)+1):
suite[0].children[i].prefix = indent
suite[0].changed()
def transform_lambda(self, node, results):
args = results["args"]
body = results["body"]
inner = simplify_args(results["inner"])
# Replace lambda ((((x)))): x with lambda x: x
if inner.type == token.NAME:
inner = inner.clone()
inner.prefix = " "
args.replace(inner)
return
params = find_params(args)
to_index = map_to_index(params)
tup_name = self.new_name(tuple_name(params))
new_param = Name(tup_name, prefix=" ")
args.replace(new_param.clone())
for n in body.post_order():
if n.type == token.NAME and n.value in to_index:
subscripts = [c.clone() for c in to_index[n.value]]
new = pytree.Node(syms.power,
[new_param.clone()] + subscripts)
new.prefix = n.prefix
n.replace(new)
### Helper functions for transform_lambda()
def simplify_args(node):
if node.type in (syms.vfplist, token.NAME):
return node
elif node.type == syms.vfpdef:
# These look like vfpdef< '(' x ')' > where x is NAME
# or another vfpdef instance (leading to recursion).
while node.type == syms.vfpdef:
node = node.children[1]
return node
raise RuntimeError("Received unexpected node %s" % node)
def find_params(node):
if node.type == syms.vfpdef:
return find_params(node.children[1])
elif node.type == token.NAME:
return node.value
return [find_params(c) for c in node.children if c.type != token.COMMA]
def map_to_index(param_list, prefix=[], d=None):
if d is None:
d = {}
for i, obj in enumerate(param_list):
trailer = [Subscript(Number(str(i)))]
if isinstance(obj, list):
map_to_index(obj, trailer, d=d)
else:
d[obj] = prefix + trailer
return d
def tuple_name(param_list):
l = []
for obj in param_list:
if isinstance(obj, list):
l.append(tuple_name(obj))
else:
l.append(obj)
return "_".join(l)
| apache-2.0 |
whitepyro/debian_server_setup | lib/tvdb_api/tvdb_exceptions.py | 31 | 1232 | #!/usr/bin/env python2
#encoding:utf-8
#author:dbr/Ben
#project:tvdb_api
#repository:http://github.com/dbr/tvdb_api
#license:unlicense (http://unlicense.org/)
"""Custom exceptions used or raised by tvdb_api
"""
__author__ = "dbr/Ben"
__version__ = "1.9"
__all__ = ["tvdb_error", "tvdb_userabort", "tvdb_shownotfound",
"tvdb_seasonnotfound", "tvdb_episodenotfound", "tvdb_attributenotfound"]
class tvdb_exception(Exception):
"""Any exception generated by tvdb_api
"""
pass
class tvdb_error(tvdb_exception):
"""An error with thetvdb.com (Cannot connect, for example)
"""
pass
class tvdb_userabort(tvdb_exception):
"""User aborted the interactive selection (via
the q command, ^c etc)
"""
pass
class tvdb_shownotfound(tvdb_exception):
"""Show cannot be found on thetvdb.com (non-existant show)
"""
pass
class tvdb_seasonnotfound(tvdb_exception):
"""Season cannot be found on thetvdb.com
"""
pass
class tvdb_episodenotfound(tvdb_exception):
"""Episode cannot be found on thetvdb.com
"""
pass
class tvdb_attributenotfound(tvdb_exception):
"""Raised if an episode does not have the requested
attribute (such as a episode name)
"""
pass
| gpl-3.0 |
genome-vendor/cython | setup.py | 1 | 12652 | from distutils.core import setup, Extension
from distutils.sysconfig import get_python_lib
import os, os.path
import sys
try:
import platform
is_cpython = not hasattr(platform, 'python_implementation') or platform.python_implementation() == 'CPython'
except (ImportError, NameError):
is_cpython = True # CPython < 2.6
if sys.platform == "darwin":
# Don't create resource files on OS X tar.
os.environ['COPY_EXTENDED_ATTRIBUTES_DISABLE'] = 'true'
os.environ['COPYFILE_DISABLE'] = 'true'
setup_args = {}
def add_command_class(name, cls):
cmdclasses = setup_args.get('cmdclass', {})
cmdclasses[name] = cls
setup_args['cmdclass'] = cmdclasses
from distutils.command.sdist import sdist as sdist_orig
class sdist(sdist_orig):
def run(self):
self.force_manifest = 1
if (sys.platform != "win32" and
os.path.isdir('.git')):
assert os.system("git rev-parse --verify HEAD > .gitrev") == 0
sdist_orig.run(self)
add_command_class('sdist', sdist)
if sys.version_info[0] >= 3:
import lib2to3.refactor
from distutils.command.build_py \
import build_py_2to3 as build_py
# need to convert sources to Py3 on installation
fixers = [ fix for fix in lib2to3.refactor.get_fixers_from_package("lib2to3.fixes")
if fix.split('fix_')[-1] not in ('next',)
]
build_py.fixer_names = fixers
add_command_class("build_py", build_py)
pxd_include_dirs = [
directory for directory, dirs, files in os.walk('Cython/Includes')
if '__init__.pyx' in files or '__init__.pxd' in files
or directory == 'Cython/Includes' or directory == 'Cython/Includes/Deprecated']
pxd_include_patterns = [
p+'/*.pxd' for p in pxd_include_dirs ] + [
p+'/*.pyx' for p in pxd_include_dirs ]
setup_args['package_data'] = {
'Cython.Plex' : ['*.pxd'],
'Cython.Compiler' : ['*.pxd'],
'Cython.Runtime' : ['*.pyx', '*.pxd'],
'Cython.Utility' : ['*.pyx', '*.pxd', '*.c', '*.h', '*.cpp'],
'Cython' : [ p[7:] for p in pxd_include_patterns ],
}
# This dict is used for passing extra arguments that are setuptools
# specific to setup
setuptools_extra_args = {}
# tells whether to include cygdb (the script and the Cython.Debugger package
include_debugger = sys.version_info[:2] > (2, 5)
if 'setuptools' in sys.modules:
setuptools_extra_args['zip_safe'] = False
setuptools_extra_args['entry_points'] = {
'console_scripts': [
'cython = Cython.Compiler.Main:setuptools_main',
]
}
scripts = []
else:
if os.name == "posix":
scripts = ["bin/cython"]
else:
scripts = ["cython.py"]
if include_debugger:
if 'setuptools' in sys.modules:
setuptools_extra_args['entry_points']['console_scripts'].append(
'cygdb = Cython.Debugger.Cygdb:main')
else:
if os.name == "posix":
scripts.append('bin/cygdb')
else:
scripts.append('cygdb.py')
def compile_cython_modules(profile=False, compile_more=False, cython_with_refnanny=False):
source_root = os.path.abspath(os.path.dirname(__file__))
compiled_modules = ["Cython.Plex.Scanners",
"Cython.Plex.Actions",
"Cython.Compiler.Lexicon",
"Cython.Compiler.Scanning",
"Cython.Compiler.Parsing",
"Cython.Compiler.Visitor",
"Cython.Compiler.FlowControl",
"Cython.Compiler.Code",
"Cython.Runtime.refnanny",
# "Cython.Compiler.FusedNode",
]
if compile_more:
compiled_modules.extend([
"Cython.Build.Dependencies",
"Cython.Compiler.ParseTreeTransforms",
"Cython.Compiler.Nodes",
"Cython.Compiler.ExprNodes",
"Cython.Compiler.ModuleNode",
"Cython.Compiler.Optimize",
])
defines = []
if cython_with_refnanny:
defines.append(('CYTHON_REFNANNY', '1'))
extensions = []
if sys.version_info[0] >= 3:
from Cython.Distutils import build_ext as build_ext_orig
for module in compiled_modules:
source_file = os.path.join(source_root, *module.split('.'))
if os.path.exists(source_file + ".py"):
pyx_source_file = source_file + ".py"
else:
pyx_source_file = source_file + ".pyx"
dep_files = []
if os.path.exists(source_file + '.pxd'):
dep_files.append(source_file + '.pxd')
if '.refnanny' in module:
defines_for_module = []
else:
defines_for_module = defines
extensions.append(
Extension(module, sources = [pyx_source_file],
define_macros = defines_for_module,
depends = dep_files)
)
class build_ext(build_ext_orig):
# we must keep the original modules alive to make sure
# their code keeps working when we remove them from
# sys.modules
dead_modules = []
def build_extensions(self):
# add path where 2to3 installed the transformed sources
# and make sure Python (re-)imports them from there
already_imported = [ module for module in sys.modules
if module == 'Cython' or module.startswith('Cython.') ]
keep_alive = self.dead_modules.append
for module in already_imported:
keep_alive(sys.modules[module])
del sys.modules[module]
sys.path.insert(0, os.path.join(source_root, self.build_lib))
if profile:
from Cython.Compiler.Options import directive_defaults
directive_defaults['profile'] = True
print("Enabled profiling for the Cython binary modules")
build_ext_orig.build_extensions(self)
setup_args['ext_modules'] = extensions
add_command_class("build_ext", build_ext)
else: # Python 2.x
from distutils.command.build_ext import build_ext as build_ext_orig
try:
class build_ext(build_ext_orig):
def build_extension(self, ext, *args, **kargs):
try:
build_ext_orig.build_extension(self, ext, *args, **kargs)
except StandardError:
print("Compilation of '%s' failed" % ext.sources[0])
from Cython.Compiler.Main import compile
from Cython import Utils
if profile:
from Cython.Compiler.Options import directive_defaults
directive_defaults['profile'] = True
print("Enabled profiling for the Cython binary modules")
source_root = os.path.dirname(__file__)
for module in compiled_modules:
source_file = os.path.join(source_root, *module.split('.'))
if os.path.exists(source_file + ".py"):
pyx_source_file = source_file + ".py"
else:
pyx_source_file = source_file + ".pyx"
c_source_file = source_file + ".c"
source_is_newer = False
if not os.path.exists(c_source_file):
source_is_newer = True
else:
c_last_modified = Utils.modification_time(c_source_file)
if Utils.file_newer_than(pyx_source_file, c_last_modified):
source_is_newer = True
else:
pxd_source_file = source_file + ".pxd"
if os.path.exists(pxd_source_file) and Utils.file_newer_than(pxd_source_file, c_last_modified):
source_is_newer = True
if source_is_newer:
print("Compiling module %s ..." % module)
result = compile(pyx_source_file)
c_source_file = result.c_file
if c_source_file:
# Py2 distutils can't handle unicode file paths
if isinstance(c_source_file, unicode):
filename_encoding = sys.getfilesystemencoding()
if filename_encoding is None:
filename_encoding = sys.getdefaultencoding()
c_source_file = c_source_file.encode(filename_encoding)
if '.refnanny' in module:
defines_for_module = []
else:
defines_for_module = defines
extensions.append(
Extension(module, sources = [c_source_file],
define_macros = defines_for_module)
)
else:
print("Compilation failed")
if extensions:
setup_args['ext_modules'] = extensions
add_command_class("build_ext", build_ext)
except Exception:
print('''
ERROR: %s
Extension module compilation failed, looks like Cython cannot run
properly on this system. To work around this, pass the option
"--no-cython-compile". This will install a pure Python version of
Cython without compiling its own sources.
''' % sys.exc_info()[1])
raise
cython_profile = '--cython-profile' in sys.argv
if cython_profile:
sys.argv.remove('--cython-profile')
try:
sys.argv.remove("--cython-compile-all")
cython_compile_more = True
except ValueError:
cython_compile_more = False
try:
sys.argv.remove("--cython-with-refnanny")
cython_with_refnanny = True
except ValueError:
cython_with_refnanny = False
try:
sys.argv.remove("--no-cython-compile")
compile_cython_itself = False
except ValueError:
compile_cython_itself = True
if compile_cython_itself and (is_cpython or cython_compile_more):
compile_cython_modules(cython_profile, cython_compile_more, cython_with_refnanny)
setup_args.update(setuptools_extra_args)
from Cython import __version__ as version
packages = [
'Cython',
'Cython.Build',
'Cython.Compiler',
'Cython.Runtime',
'Cython.Distutils',
'Cython.Plex',
'Cython.Tests',
'Cython.Build.Tests',
'Cython.Compiler.Tests',
'Cython.Utility',
'Cython.Tempita',
'pyximport',
]
if include_debugger:
packages.append('Cython.Debugger')
packages.append('Cython.Debugger.Tests')
# it's enough to do this for Py2.5+:
setup_args['package_data']['Cython.Debugger.Tests'] = ['codefile', 'cfuncs.c']
setup(
name = 'Cython',
version = version,
url = 'http://www.cython.org',
author = 'Robert Bradshaw, Stefan Behnel, Dag Seljebotn, Greg Ewing, et al.',
author_email = '[email protected]',
description = "The Cython compiler for writing C extensions for the Python language.",
long_description = """\
The Cython language makes writing C extensions for the Python language as
easy as Python itself. Cython is a source code translator based on the
well-known Pyrex_, but supports more cutting edge functionality and
optimizations.
The Cython language is very close to the Python language (and most Python
code is also valid Cython code), but Cython additionally supports calling C
functions and declaring C types on variables and class attributes. This
allows the compiler to generate very efficient C code from Cython code.
This makes Cython the ideal language for writing glue code for external C
libraries, and for fast C modules that speed up the execution of Python
code.
.. _Pyrex: http://www.cosc.canterbury.ac.nz/greg.ewing/python/Pyrex/
""",
classifiers = [
"Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
"Programming Language :: C",
"Programming Language :: Cython",
"Topic :: Software Development :: Code Generators",
"Topic :: Software Development :: Compilers",
"Topic :: Software Development :: Libraries :: Python Modules"
],
scripts = scripts,
packages=packages,
py_modules = ["cython"],
**setup_args
)
| apache-2.0 |
idosekely/python-lessons | lesson_1/python_types.py | 1 | 1595 | __author__ = 'sekely'
# this is a comment. we use it for code documentation.
'''
this is comment block.
everything between those markers,
is considered
as a string, or block
'''
1 + 1 # these are pure integers, complete numbers. the result is '2'
3 / 1 # division ot integers will produce an integer. in that case, '3.0'.
5 / 3 # what is the result in that case?
1.0 + 1.0 # these are float numbers. they represent numbers with floating point. the result is '2.0'
5 // 3 # what about now?
'''
so the result of two integers is integer.
the result of two floats, is float.
what about integer and float?
try it!
'''
'this is a string'
"this is also a string"
# is there any difference? (no!)
# calling to "print" will always reproduce a string output!
1 == 5 # this is boolean. it has only two results - True or False.
1 != 5 # now it's True!
# Summary
# math and operands
5 / 3 # 1.6666666666666667
5.0 / 3 # 1.6666666666666667
5.0 // 3 # 1.0
5 // 3 # 1
5 % 3 # 2
5 ** 3 # 125
# strings and some string manipulation
len('hello') # 5
print('hello ' + 'world') # 'hello world'
print('this is %s format' % 'string') # 'this is string format'
print('My Name'.upper()) # 'MY NAME'
print('My Name'.lower()) # 'my name'
# boolean expression and algebra
not False # True
not True # False
False and True # False
True and True # True
True or False # True
False or False # False
5 == 5 # True
3 != 5 # True
5 < 3 # False
5 <= 5 # True
| mit |
theochem/horton | horton/part/test/test_becke.py | 4 | 4428 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# HORTON: Helpful Open-source Research TOol for N-fermion systems.
# Copyright (C) 2011-2017 The HORTON Development Team
#
# This file is part of HORTON.
#
# HORTON is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# HORTON is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>
#
# --
from nose.tools import assert_raises
from horton import * # pylint: disable=wildcard-import,unused-wildcard-import
def test_becke_n2_hfs_sto3g():
fn_fchk = context.get_fn('test/n2_hfs_sto3g.fchk')
mol = IOData.from_file(fn_fchk)
rtf = ExpRTransform(1e-3, 1e1, 100)
rgrid = RadialGrid(rtf)
grid = BeckeMolGrid(mol.coordinates, mol.numbers, mol.pseudo_numbers, (rgrid, 110), random_rotate=False, mode='only')
dm_full = mol.get_dm_full()
moldens = mol.obasis.compute_grid_density_dm(dm_full, grid.points)
bp = BeckeWPart(mol.coordinates, mol.numbers, mol.pseudo_numbers, grid, moldens)
bp.do_populations()
assert abs(bp['populations'] - 7).max() < 1e-4
bp.do_charges()
assert abs(bp['charges']).max() < 1e-4
bp.clear()
with assert_raises(KeyError):
bp['charges']
bp.do_charges()
assert abs(bp['populations'] - 7).max() < 1e-4
assert abs(bp['charges']).max() < 1e-4
def test_becke_nonlocal_lih_hf_321g():
fn_fchk = context.get_fn('test/li_h_3-21G_hf_g09.fchk')
mol = IOData.from_file(fn_fchk)
rtf = ExpRTransform(1e-3, 1e1, 100)
rgrid = RadialGrid(rtf)
dm_full = mol.get_dm_full()
grid1 = BeckeMolGrid(mol.coordinates, mol.numbers, mol.pseudo_numbers, (rgrid, 110), random_rotate=False, mode='only')
moldens = mol.obasis.compute_grid_density_dm(dm_full, grid1.points)
bp1 = BeckeWPart(mol.coordinates, mol.numbers, mol.pseudo_numbers, grid1, moldens)
grid2 = BeckeMolGrid(mol.coordinates, mol.numbers, mol.pseudo_numbers, (rgrid, 110), random_rotate=False, mode='discard')
moldens = mol.obasis.compute_grid_density_dm(dm_full, grid2.points)
bp2 = BeckeWPart(mol.coordinates, mol.numbers, mol.pseudo_numbers, grid2, moldens, local=False)
bp1.do_charges()
bp2.do_charges()
assert abs(bp1['charges'] - bp2['charges']).max() < 5e-4
def check_becke_azirine(key, expected):
fn_fchk = context.get_fn('test/2h-azirine-%s.fchk' % key)
mol = IOData.from_file(fn_fchk)
grid = BeckeMolGrid(mol.coordinates, mol.numbers, mol.pseudo_numbers, random_rotate=False, mode='only')
dm_full = mol.get_dm_full()
moldens = mol.obasis.compute_grid_density_dm(dm_full, grid.points)
bp = BeckeWPart(mol.coordinates, mol.numbers, mol.pseudo_numbers, grid, moldens)
bp.do_charges()
c = bp['charges']
assert abs(c[0] - expected[0]) < 1e-3
assert abs(c[2] - expected[1]) < 1e-3
assert abs(c[5] - expected[2]) < 1e-3
def test_becke_azirine_ccd():
check_becke_azirine('cc', [-0.0656538087277, -0.0770555290299, 0.123503410725])
def test_becke_azirine_cis():
check_becke_azirine('ci', [-0.122893896731, -0.266685240737, 0.137147967309])
def test_becke_azirine_mp2():
check_becke_azirine('mp2', [-0.0656579068849, -0.0761190062373, 0.126890127581])
def test_becke_azirine_mp3():
check_becke_azirine('mp3', [-0.0665919182085, -0.0769654765789, 0.125587673579])
def test_becke_ch3_hf_sto3g():
fn_fchk = context.get_fn('test/ch3_hf_sto3g.fchk')
mol = IOData.from_file(fn_fchk)
grid = BeckeMolGrid(mol.coordinates, mol.numbers, mol.pseudo_numbers, random_rotate=False, mode='only')
dm_full = mol.get_dm_full()
dm_spin = mol.get_dm_spin()
moldens = mol.obasis.compute_grid_density_dm(dm_full, grid.points)
spindens = mol.obasis.compute_grid_density_dm(dm_spin, grid.points)
bp = BeckeWPart(mol.coordinates, mol.numbers, mol.pseudo_numbers, grid, moldens, spindens)
bp.do_all()
sc = bp['spin_charges']
assert abs(sc - [1.08458698, -0.02813376, -0.02813376, -0.02815979]).max() < 1e-3
| gpl-3.0 |
IUNO-TDM/CouponGenerator | python-printer/Adafruit_Thermal.py | 1 | 16561 | #*************************************************************************
# This is a Python library for the Adafruit Thermal Printer.
# Pick one up at --> http://www.adafruit.com/products/597
# These printers use TTL serial to communicate, 2 pins are required.
# IMPORTANT: On 3.3V systems (e.g. Raspberry Pi), use a 10K resistor on
# the RX pin (TX on the printer, green wire), or simply leave unconnected.
#
# Adafruit invests time and resources providing this open source code.
# Please support Adafruit and open-source hardware by purchasing products
# from Adafruit!
#
# Written by Limor Fried/Ladyada for Adafruit Industries.
# Python port by Phil Burgess for Adafruit Industries.
# MIT license, all text above must be included in any redistribution.
#*************************************************************************
# This is pretty much a 1:1 direct Python port of the Adafruit_Thermal
# library for Arduino. All methods use the same naming conventions as the
# Arduino library, with only slight changes in parameter behavior where
# needed. This should simplify porting existing Adafruit_Thermal-based
# printer projects to Raspberry Pi, BeagleBone, etc. See printertest.py
# for an example.
#
# One significant change is the addition of the printImage() function,
# which ties this to the Python Imaging Library and opens the door to a
# lot of cool graphical stuff!
#
# TO DO:
# - Might use standard ConfigParser library to put thermal calibration
# settings in a global configuration file (rather than in the library).
# - Make this use proper Python library installation procedure.
# - Trap errors properly. Some stuff just falls through right now.
# - Add docstrings throughout!
# Python 2.X code using the library usu. needs to include the next line:
from __future__ import print_function
from serial import Serial
import time
class Adafruit_Thermal(Serial):
resumeTime = 0.0
byteTime = 0.0
dotPrintTime = 0.033
dotFeedTime = 0.0025
prevByte = '\n'
column = 0
maxColumn = 32
charHeight = 24
lineSpacing = 8
barcodeHeight = 50
printMode = 0
defaultHeatTime = 60
def __init__(self, *args, **kwargs):
# If no parameters given, use default port & baud rate.
# If only port is passed, use default baud rate.
# If both passed, use those values.
baudrate = 19200
if len(args) == 0:
args = [ "/dev/serial0", baudrate ]
elif len(args) == 1:
args = [ args[0], baudrate ]
else:
baudrate = args[1]
# Calculate time to issue one byte to the printer.
# 11 bits (not 8) to accommodate idle, start and stop bits.
# Idle time might be unnecessary, but erring on side of
# caution here.
self.byteTime = 11.0 / float(baudrate)
Serial.__init__(self, *args, **kwargs)
# Remainder of this method was previously in begin()
# The printer can't start receiving data immediately upon
# power up -- it needs a moment to cold boot and initialize.
# Allow at least 1/2 sec of uptime before printer can
# receive data.
self.timeoutSet(0.5)
self.wake()
self.reset()
# Description of print settings from page 23 of the manual:
# ESC 7 n1 n2 n3 Setting Control Parameter Command
# Decimal: 27 55 n1 n2 n3
# Set "max heating dots", "heating time", "heating interval"
# n1 = 0-255 Max heat dots, Unit (8dots), Default: 7 (64 dots)
# n2 = 3-255 Heating time, Unit (10us), Default: 80 (800us)
# n3 = 0-255 Heating interval, Unit (10us), Default: 2 (20us)
# The more max heating dots, the more peak current will cost
# when printing, the faster printing speed. The max heating
# dots is 8*(n1+1). The more heating time, the more density,
# but the slower printing speed. If heating time is too short,
# blank page may occur. The more heating interval, the more
# clear, but the slower printing speed.
heatTime = kwargs.get('heattime', self.defaultHeatTime)
self.writeBytes(
27, # Esc
55, # 7 (print settings)
20, # Heat dots (20 = balance darkness w/no jams)
heatTime, # Lib default = 45
250) # Heat interval (500 uS = slower but darker)
# Description of print density from page 23 of the manual:
# DC2 # n Set printing density
# Decimal: 18 35 n
# D4..D0 of n is used to set the printing density.
# Density is 50% + 5% * n(D4-D0) printing density.
# D7..D5 of n is used to set the printing break time.
# Break time is n(D7-D5)*250us.
# (Unsure of the default value for either -- not documented)
printDensity = 14 # 120% (can go higher, but text gets fuzzy)
printBreakTime = 4 # 500 uS
self.writeBytes(
18, # DC2
35, # Print density
(printBreakTime << 5) | printDensity)
self.dotPrintTime = 0.03
self.dotFeedTime = 0.0021
# Because there's no flow control between the printer and computer,
# special care must be taken to avoid overrunning the printer's
# buffer. Serial output is throttled based on serial speed as well
# as an estimate of the device's print and feed rates (relatively
# slow, being bound to moving parts and physical reality). After
# an operation is issued to the printer (e.g. bitmap print), a
# timeout is set before which any other printer operations will be
# suspended. This is generally more efficient than using a delay
# in that it allows the calling code to continue with other duties
# (e.g. receiving or decoding an image) while the printer
# physically completes the task.
# Sets estimated completion time for a just-issued task.
def timeoutSet(self, x):
self.resumeTime = time.time() + x
# Waits (if necessary) for the prior task to complete.
def timeoutWait(self):
while (time.time() - self.resumeTime) < 0: pass
# Printer performance may vary based on the power supply voltage,
# thickness of paper, phase of the moon and other seemingly random
# variables. This method sets the times (in microseconds) for the
# paper to advance one vertical 'dot' when printing and feeding.
# For example, in the default initialized state, normal-sized text
# is 24 dots tall and the line spacing is 32 dots, so the time for
# one line to be issued is approximately 24 * print time + 8 * feed
# time. The default print and feed times are based on a random
# test unit, but as stated above your reality may be influenced by
# many factors. This lets you tweak the timing to avoid excessive
# delays and/or overrunning the printer buffer.
def setTimes(self, p, f):
# Units are in microseconds for
# compatibility with Arduino library
self.dotPrintTime = p / 1000000.0
self.dotFeedTime = f / 1000000.0
# 'Raw' byte-writing method
def writeBytes(self, *args):
self.timeoutWait()
self.timeoutSet(len(args) * self.byteTime)
for arg in args:
super(Adafruit_Thermal, self).write(chr(arg))
# Override write() method to keep track of paper feed.
def write(self, *data):
for i in range(len(data)):
c = data[i]
if c != 0x13:
self.timeoutWait()
super(Adafruit_Thermal, self).write(c)
d = self.byteTime
if ((c == '\n') or
(self.column == self.maxColumn)):
# Newline or wrap
if self.prevByte == '\n':
# Feed line (blank)
d += ((self.charHeight +
self.lineSpacing) *
self.dotFeedTime)
else:
# Text line
d += ((self.charHeight *
self.dotPrintTime) +
(self.lineSpacing *
self.dotFeedTime))
self.column = 0
# Treat wrap as newline
# on next pass
c = '\n'
else:
self.column += 1
self.timeoutSet(d)
self.prevByte = c
# The bulk of this method was moved into __init__,
# but this is left here for compatibility with older
# code that might get ported directly from Arduino.
def begin(self, heatTime=defaultHeatTime):
self.writeBytes(
27, # Esc
55, # 7 (print settings)
20, # Heat dots (20 = balance darkness w/no jams)
heatTime, # Lib default = 45
250) # Heat interval (500 uS = slower but darker)
def reset(self):
self.prevByte = '\n' # Treat as if prior line is blank
self.column = 0
self.maxColumn = 32
self.charHeight = 24
self.lineSpacing = 8
self.barcodeHeight = 50
self.writeBytes(27, 64)
# Reset text formatting parameters.
def setDefault(self):
self.online()
self.justify('L')
self.inverseOff()
self.doubleHeightOff()
self.setLineHeight(32)
self.boldOff()
self.underlineOff()
self.setBarcodeHeight(50)
self.setSize('s')
def test(self):
self.writeBytes(18, 84)
self.timeoutSet(
self.dotPrintTime * 24 * 26 +
self.dotFeedTime * (8 * 26 + 32))
UPC_A = 0
UPC_E = 1
EAN13 = 2
EAN8 = 3
CODE39 = 4
I25 = 5
CODEBAR = 6
CODE93 = 7
CODE128 = 8
CODE11 = 9
MSI = 10
def printBarcode(self, text, type):
self.writeBytes(
29, 72, 2, # Print label below barcode
29, 119, 3, # Barcode width
29, 107, type) # Barcode type
# Print string
self.timeoutWait()
self.timeoutSet((self.barcodeHeight + 40) * self.dotPrintTime)
super(Adafruit_Thermal, self).write(text)
self.prevByte = '\n'
self.feed(2)
def setBarcodeHeight(self, val=50):
if val < 1:
val = 1
self.barcodeHeight = val
self.writeBytes(29, 104, val)
# === Character commands ===
INVERSE_MASK = (1 << 1)
UPDOWN_MASK = (1 << 2)
BOLD_MASK = (1 << 3)
DOUBLE_HEIGHT_MASK = (1 << 4)
DOUBLE_WIDTH_MASK = (1 << 5)
STRIKE_MASK = (1 << 6)
def setPrintMode(self, mask):
self.printMode |= mask
self.writePrintMode()
if self.printMode & self.DOUBLE_HEIGHT_MASK:
self.charHeight = 48
else:
self.charHeight = 24
if self.printMode & self.DOUBLE_WIDTH_MASK:
self.maxColumn = 16
else:
self.maxColumn = 32
def unsetPrintMode(self, mask):
self.printMode &= ~mask
self.writePrintMode()
if self.printMode & self.DOUBLE_HEIGHT_MASK:
self.charHeight = 48
else:
self.charHeight = 24
if self.printMode & self.DOUBLE_WIDTH_MASK:
self.maxColumn = 16
else:
self.maxColumn = 32
def writePrintMode(self):
self.writeBytes(27, 33, self.printMode)
def normal(self):
self.printMode = 0
self.writePrintMode()
def inverseOn(self):
self.setPrintMode(self.INVERSE_MASK)
def inverseOff(self):
self.unsetPrintMode(self.INVERSE_MASK)
def upsideDownOn(self):
self.setPrintMode(self.UPDOWN_MASK)
def upsideDownOff(self):
self.unsetPrintMode(self.UPDOWN_MASK)
def doubleHeightOn(self):
self.setPrintMode(self.DOUBLE_HEIGHT_MASK)
def doubleHeightOff(self):
self.unsetPrintMode(self.DOUBLE_HEIGHT_MASK)
def doubleWidthOn(self):
self.setPrintMode(self.DOUBLE_WIDTH_MASK)
def doubleWidthOff(self):
self.unsetPrintMode(self.DOUBLE_WIDTH_MASK)
def strikeOn(self):
self.setPrintMode(self.STRIKE_MASK)
def strikeOff(self):
self.unsetPrintMode(self.STRIKE_MASK)
def boldOn(self):
self.setPrintMode(self.BOLD_MASK)
def boldOff(self):
self.unsetPrintMode(self.BOLD_MASK)
def justify(self, value):
c = value.upper()
if c == 'C':
pos = 1
elif c == 'R':
pos = 2
else:
pos = 0
self.writeBytes(0x1B, 0x61, pos)
# Feeds by the specified number of lines
def feed(self, x=1):
# The datasheet claims sending bytes 27, 100, <x> will work,
# but it feeds much more than that. So it's done manually:
while x > 0:
self.write('\n')
x -= 1
# Feeds by the specified number of individual pixel rows
def feedRows(self, rows):
self.writeBytes(27, 74, rows)
self.timeoutSet(rows * dotFeedTime)
def flush(self):
self.writeBytes(12)
def setSize(self, value):
c = value.upper()
if c == 'L': # Large: double width and height
size = 0x11
self.charHeight = 48
self.maxColumn = 16
elif c == 'M': # Medium: double height
size = 0x01
self.charHeight = 48
self.maxColumn = 32
else: # Small: standard width and height
size = 0x00
self.charHeight = 24
self.maxColumn = 32
self.writeBytes(29, 33, size, 10)
prevByte = '\n' # Setting the size adds a linefeed
# Underlines of different weights can be produced:
# 0 - no underline
# 1 - normal underline
# 2 - thick underline
def underlineOn(self, weight=1):
self.writeBytes(27, 45, weight)
def underlineOff(self):
self.underlineOn(0)
def printBitmap(self, w, h, bitmap, LaaT=False):
rowBytes = (w + 7) / 8 # Round up to next byte boundary
if rowBytes >= 48:
rowBytesClipped = 48 # 384 pixels max width
else:
rowBytesClipped = rowBytes
# if LaaT (line-at-a-time) is True, print bitmaps
# scanline-at-a-time (rather than in chunks).
# This tends to make for much cleaner printing
# (no feed gaps) on large images...but has the
# opposite effect on small images that would fit
# in a single 'chunk', so use carefully!
if LaaT: maxChunkHeight = 1
else: maxChunkHeight = 255
i = 0
for rowStart in range(0, h, maxChunkHeight):
chunkHeight = h - rowStart
if chunkHeight > maxChunkHeight:
chunkHeight = maxChunkHeight
# Timeout wait happens here
self.writeBytes(18, 42, chunkHeight, rowBytesClipped)
for y in range(chunkHeight):
for x in range(rowBytesClipped):
super(Adafruit_Thermal, self).write(
chr(bitmap[i]))
i += 1
i += rowBytes - rowBytesClipped
self.timeoutSet(chunkHeight * self.dotPrintTime)
self.prevByte = '\n'
# Print Image. Requires Python Imaging Library. This is
# specific to the Python port and not present in the Arduino
# library. Image will be cropped to 384 pixels width if
# necessary, and converted to 1-bit w/diffusion dithering.
# For any other behavior (scale, B&W threshold, etc.), use
# the Imaging Library to perform such operations before
# passing the result to this function.
def printImage(self, image, LaaT=False):
from PIL import Image
if image.mode != '1':
image = image.convert('1')
width = image.size[0]
height = image.size[1]
if width > 384:
width = 384
rowBytes = (width + 7) / 8
bitmap = bytearray(rowBytes * height)
pixels = image.load()
for y in range(height):
n = y * rowBytes
x = 0
for b in range(rowBytes):
sum = 0
bit = 128
while bit > 0:
if x >= width: break
if pixels[x, y] == 0:
sum |= bit
x += 1
bit >>= 1
bitmap[n + b] = sum
self.printBitmap(width, height, bitmap, LaaT)
# Take the printer offline. Print commands sent after this
# will be ignored until 'online' is called.
def offline(self):
self.writeBytes(27, 61, 0)
# Take the printer online. Subsequent print commands will be obeyed.
def online(self):
self.writeBytes(27, 61, 1)
# Put the printer into a low-energy state immediately.
def sleep(self):
self.sleepAfter(1)
# Put the printer into a low-energy state after
# the given number of seconds.
def sleepAfter(self, seconds):
self.writeBytes(27, 56, seconds)
def wake(self):
self.timeoutSet(0);
self.writeBytes(255)
for i in range(10):
self.writeBytes(27)
self.timeoutSet(0.1)
# Empty method, included for compatibility
# with existing code ported from Arduino.
def listen(self):
pass
# Check the status of the paper using the printers self reporting
# ability. Doesn't match the datasheet...
# Returns True for paper, False for no paper.
def hasPaper(self):
self.writeBytes(27, 118, 0)
# Bit 2 of response seems to be paper status
stat = ord(self.read(1)) & 0b00000100
# If set, we have paper; if clear, no paper
return stat == 0
def setLineHeight(self, val=32):
if val < 24:
val = 24
self.lineSpacing = val - 24
# The printer doesn't take into account the current text
# height when setting line height, making this more akin
# to inter-line spacing. Default line spacing is 32
# (char height of 24, line spacing of 8).
self.writeBytes(27, 51, val)
# Copied from Arduino lib for parity; is marked 'not working' there
def tab(self):
self.writeBytes(9)
# Copied from Arduino lib for parity; is marked 'not working' there
def setCharSpacing(self, spacing):
self.writeBytes(27, 32, 0, 10)
# Overloading print() in Python pre-3.0 is dirty pool,
# but these are here to provide more direct compatibility
# with existing code written for the Arduino library.
def print(self, *args, **kwargs):
for arg in args:
self.write(str(arg))
# For Arduino code compatibility again
def println(self, *args, **kwargs):
for arg in args:
self.write(str(arg))
self.write('\n')
| gpl-3.0 |
minhphung171093/GreenERP_V8 | openerp/addons/project/report/__init__.py | 444 | 1069 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import project_report
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
sloshedpuppie/LetsGoRetro | addons/plugin.video.emby-master/resources/lib/userclient.py | 1 | 15585 | # -*- coding: utf-8 -*-
##################################################################################################
import hashlib
import threading
import xbmc
import xbmcgui
import xbmcaddon
import xbmcvfs
import artwork
import utils
import clientinfo
import downloadutils
##################################################################################################
class UserClient(threading.Thread):
# Borg - multiple instances, shared state
_shared_state = {}
stopClient = False
auth = True
retry = 0
currUser = None
currUserId = None
currServer = None
currToken = None
HasAccess = True
AdditionalUser = []
userSettings = None
def __init__(self):
self.__dict__ = self._shared_state
self.addon = xbmcaddon.Addon()
self.addonName = clientinfo.ClientInfo().getAddonName()
self.doUtils = downloadutils.DownloadUtils()
threading.Thread.__init__(self)
def logMsg(self, msg, lvl=1):
className = self.__class__.__name__
utils.logMsg("%s %s" % (self.addonName, className), msg, lvl)
def getAdditionalUsers(self):
additionalUsers = utils.settings('additionalUsers')
if additionalUsers:
self.AdditionalUser = additionalUsers.split(',')
def getUsername(self):
username = utils.settings('username')
if not username:
self.logMsg("No username saved.", 2)
return ""
return username
def getLogLevel(self):
try:
logLevel = int(utils.settings('logLevel'))
except ValueError:
logLevel = 0
return logLevel
def getUserId(self):
window = utils.window
settings = utils.settings
username = self.getUsername()
w_userId = window('emby_currUser')
s_userId = settings('userId%s' % username)
# Verify the window property
if w_userId:
if not s_userId:
# Save access token if it's missing from settings
settings('userId%s' % username, value=w_userId)
self.logMsg("Returning userId from WINDOW for username: %s UserId: %s"
% (username, w_userId), 2)
return w_userId
# Verify the settings
elif s_userId:
self.logMsg("Returning userId from SETTINGS for username: %s userId: %s"
% (username, s_userId), 2)
return s_userId
# No userId found
else:
self.logMsg("No userId saved for username: %s." % username, 1)
def getServer(self, prefix=True):
settings = utils.settings
alternate = settings('altip') == "true"
if alternate:
# Alternate host
HTTPS = settings('secondhttps') == "true"
host = settings('secondipaddress')
port = settings('secondport')
else:
# Original host
HTTPS = settings('https') == "true"
host = settings('ipaddress')
port = settings('port')
server = host + ":" + port
if not host:
self.logMsg("No server information saved.", 2)
return False
# If https is true
if prefix and HTTPS:
server = "https://%s" % server
return server
# If https is false
elif prefix and not HTTPS:
server = "http://%s" % server
return server
# If only the host:port is required
elif not prefix:
return server
def getToken(self):
window = utils.window
settings = utils.settings
username = self.getUsername()
userId = self.getUserId()
w_token = window('emby_accessToken%s' % userId)
s_token = settings('accessToken')
# Verify the window property
if w_token:
if not s_token:
# Save access token if it's missing from settings
settings('accessToken', value=w_token)
self.logMsg("Returning accessToken from WINDOW for username: %s accessToken: %s"
% (username, w_token), 2)
return w_token
# Verify the settings
elif s_token:
self.logMsg("Returning accessToken from SETTINGS for username: %s accessToken: %s"
% (username, s_token), 2)
window('emby_accessToken%s' % username, value=s_token)
return s_token
else:
self.logMsg("No token found.", 1)
return ""
def getSSLverify(self):
# Verify host certificate
settings = utils.settings
s_sslverify = settings('sslverify')
if settings('altip') == "true":
s_sslverify = settings('secondsslverify')
if s_sslverify == "true":
return True
else:
return False
def getSSL(self):
# Client side certificate
settings = utils.settings
s_cert = settings('sslcert')
if settings('altip') == "true":
s_cert = settings('secondsslcert')
if s_cert == "None":
return None
else:
return s_cert
def setUserPref(self):
doUtils = self.doUtils.downloadUrl
result = doUtils("{server}/emby/Users/{UserId}?format=json")
self.userSettings = result
# Set user image for skin display
if result.get('PrimaryImageTag'):
utils.window('EmbyUserImage', value=artwork.Artwork().getUserArtwork(result['Id'], 'Primary'))
# Set resume point max
result = doUtils("{server}/emby/System/Configuration?format=json")
utils.settings('markPlayed', value=str(result['MaxResumePct']))
def getPublicUsers(self):
# Get public Users
result = self.doUtils.downloadUrl("%s/emby/Users/Public?format=json" % self.getServer(), authenticate=False)
if result != "":
return result
else:
# Server connection failed
return False
def hasAccess(self):
# hasAccess is verified in service.py
window = utils.window
result = self.doUtils.downloadUrl("{server}/emby/Users?format=json")
if result == False:
# Access is restricted, set in downloadutils.py via exception
self.logMsg("Access is restricted.", 1)
self.HasAccess = False
elif window('emby_online') != "true":
# Server connection failed
pass
elif window('emby_serverStatus') == "restricted":
self.logMsg("Access is granted.", 1)
self.HasAccess = True
window('emby_serverStatus', clear=True)
xbmcgui.Dialog().notification("Emby for Kodi", utils.language(33007))
def loadCurrUser(self, authenticated=False):
window = utils.window
doUtils = self.doUtils
username = self.getUsername()
userId = self.getUserId()
# Only to be used if token exists
self.currUserId = userId
self.currServer = self.getServer()
self.currToken = self.getToken()
self.ssl = self.getSSLverify()
self.sslcert = self.getSSL()
# Test the validity of current token
if authenticated == False:
url = "%s/emby/Users/%s?format=json" % (self.currServer, userId)
window('emby_currUser', value=userId)
window('emby_accessToken%s' % userId, value=self.currToken)
result = doUtils.downloadUrl(url)
if result == 401:
# Token is no longer valid
self.resetClient()
return False
# Set to windows property
window('emby_currUser', value=userId)
window('emby_accessToken%s' % userId, value=self.currToken)
window('emby_server%s' % userId, value=self.currServer)
window('emby_server_%s' % userId, value=self.getServer(prefix=False))
# Set DownloadUtils values
doUtils.setUsername(username)
doUtils.setUserId(self.currUserId)
doUtils.setServer(self.currServer)
doUtils.setToken(self.currToken)
doUtils.setSSL(self.ssl, self.sslcert)
# parental control - let's verify if access is restricted
self.hasAccess()
# Start DownloadUtils session
doUtils.startSession()
self.getAdditionalUsers()
# Set user preferences in settings
self.currUser = username
self.setUserPref()
def authenticate(self):
lang = utils.language
window = utils.window
settings = utils.settings
dialog = xbmcgui.Dialog()
# Get /profile/addon_data
addondir = xbmc.translatePath(self.addon.getAddonInfo('profile')).decode('utf-8')
hasSettings = xbmcvfs.exists("%ssettings.xml" % addondir)
username = self.getUsername()
server = self.getServer()
# If there's no settings.xml
if not hasSettings:
self.logMsg("No settings.xml found.", 1)
self.auth = False
return
# If no user information
elif not server or not username:
self.logMsg("Missing server information.", 1)
self.auth = False
return
# If there's a token, load the user
elif self.getToken():
result = self.loadCurrUser()
if result is False:
pass
else:
self.logMsg("Current user: %s" % self.currUser, 1)
self.logMsg("Current userId: %s" % self.currUserId, 1)
self.logMsg("Current accessToken: %s" % self.currToken, 2)
return
##### AUTHENTICATE USER #####
users = self.getPublicUsers()
password = ""
# Find user in list
for user in users:
name = user['Name']
if username.decode('utf-8') in name:
# If user has password
if user['HasPassword'] == True:
password = dialog.input(
heading="%s %s" % (lang(33008), username.decode('utf-8')),
option=xbmcgui.ALPHANUM_HIDE_INPUT)
# If password dialog is cancelled
if not password:
self.logMsg("No password entered.", 0)
window('emby_serverStatus', value="Stop")
self.auth = False
return
break
else:
# Manual login, user is hidden
password = dialog.input(
heading="%s %s" % (lang(33008), username),
option=xbmcgui.ALPHANUM_HIDE_INPUT)
sha1 = hashlib.sha1(password)
sha1 = sha1.hexdigest()
# Authenticate username and password
data = {'username': username, 'password': sha1}
self.logMsg(data, 2)
result = self.doUtils.downloadUrl("%s/emby/Users/AuthenticateByName?format=json" % server, postBody=data, action_type="POST", authenticate=False)
try:
self.logMsg("Auth response: %s" % result, 1)
accessToken = result['AccessToken']
except (KeyError, TypeError):
self.logMsg("Failed to retrieve the api key.", 1)
accessToken = None
if accessToken is not None:
self.currUser = username
dialog.notification("Emby for Kodi",
"%s %s!" % (lang(33000), self.currUser.decode('utf-8')))
settings('accessToken', value=accessToken)
settings('userId%s' % username, value=result['User']['Id'])
self.logMsg("User Authenticated: %s" % accessToken, 1)
self.loadCurrUser(authenticated=True)
window('emby_serverStatus', clear=True)
self.retry = 0
else:
self.logMsg("User authentication failed.", 1)
settings('accessToken', value="")
settings('userId%s' % username, value="")
dialog.ok(lang(33001), lang(33009))
# Give two attempts at entering password
if self.retry == 2:
self.logMsg("Too many retries. "
"You can retry by resetting attempts in the addon settings.", 1)
window('emby_serverStatus', value="Stop")
dialog.ok(lang(33001), lang(33010))
self.retry += 1
self.auth = False
def resetClient(self):
self.logMsg("Reset UserClient authentication.", 1)
if self.currToken is not None:
# In case of 401, removed saved token
utils.settings('accessToken', value="")
utils.window('emby_accessToken%s' % self.getUserId(), clear=True)
self.currToken = None
self.logMsg("User token has been removed.", 1)
self.auth = True
self.currUser = None
def run(self):
window = utils.window
monitor = xbmc.Monitor()
self.logMsg("----===## Starting UserClient ##===----", 0)
while not monitor.abortRequested():
status = window('emby_serverStatus')
if status:
# Verify the connection status to server
if status == "restricted":
# Parental control is restricting access
self.HasAccess = False
elif status == "401":
# Unauthorized access, revoke token
window('emby_serverStatus', value="Auth")
self.resetClient()
if self.auth and (self.currUser is None):
# Try to authenticate user
status = window('emby_serverStatus')
if not status or status == "Auth":
# Set auth flag because we no longer need
# to authenticate the user
self.auth = False
self.authenticate()
if not self.auth and (self.currUser is None):
# If authenticate failed.
server = self.getServer()
username = self.getUsername()
status = window('emby_serverStatus')
# The status Stop is for when user cancelled password dialog.
if server and username and status != "Stop":
# Only if there's information found to login
self.logMsg("Server found: %s" % server, 2)
self.logMsg("Username found: %s" % username, 2)
self.auth = True
if self.stopClient == True:
# If stopping the client didn't work
break
if monitor.waitForAbort(1):
# Abort was requested while waiting. We should exit
break
self.doUtils.stopSession()
self.logMsg("##===---- UserClient Stopped ----===##", 0)
def stopClient(self):
# When emby for kodi terminates
self.stopClient = True | gpl-2.0 |
PetePriority/home-assistant | tests/helpers/test_discovery.py | 6 | 7760 | """Test discovery helpers."""
from unittest.mock import patch
import pytest
from homeassistant import loader, setup
from homeassistant.core import callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers import discovery
from tests.common import (
get_test_home_assistant, MockModule, MockPlatform, mock_coro)
class TestHelpersDiscovery:
"""Tests for discovery helper methods."""
def setup_method(self, method):
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
def teardown_method(self, method):
"""Stop everything that was started."""
self.hass.stop()
@patch('homeassistant.setup.async_setup_component',
return_value=mock_coro())
def test_listen(self, mock_setup_component):
"""Test discovery listen/discover combo."""
helpers = self.hass.helpers
calls_single = []
calls_multi = []
@callback
def callback_single(service, info):
"""Service discovered callback."""
calls_single.append((service, info))
@callback
def callback_multi(service, info):
"""Service discovered callback."""
calls_multi.append((service, info))
helpers.discovery.listen('test service', callback_single)
helpers.discovery.listen(['test service', 'another service'],
callback_multi)
helpers.discovery.discover('test service', 'discovery info',
'test_component')
self.hass.block_till_done()
assert mock_setup_component.called
assert mock_setup_component.call_args[0] == \
(self.hass, 'test_component', None)
assert len(calls_single) == 1
assert calls_single[0] == ('test service', 'discovery info')
helpers.discovery.discover('another service', 'discovery info',
'test_component')
self.hass.block_till_done()
assert len(calls_single) == 1
assert len(calls_multi) == 2
assert ['test service', 'another service'] == [info[0] for info
in calls_multi]
@patch('homeassistant.setup.async_setup_component',
return_value=mock_coro(True))
def test_platform(self, mock_setup_component):
"""Test discover platform method."""
calls = []
@callback
def platform_callback(platform, info):
"""Platform callback method."""
calls.append((platform, info))
discovery.listen_platform(self.hass, 'test_component',
platform_callback)
discovery.load_platform(self.hass, 'test_component', 'test_platform',
'discovery info', {'test_component': {}})
self.hass.block_till_done()
assert mock_setup_component.called
assert mock_setup_component.call_args[0] == \
(self.hass, 'test_component', {'test_component': {}})
self.hass.block_till_done()
discovery.load_platform(self.hass, 'test_component_2', 'test_platform',
'discovery info', {'test_component': {}})
self.hass.block_till_done()
assert len(calls) == 1
assert calls[0] == ('test_platform', 'discovery info')
self.hass.bus.fire(discovery.EVENT_PLATFORM_DISCOVERED, {
discovery.ATTR_SERVICE:
discovery.EVENT_LOAD_PLATFORM.format('test_component')
})
self.hass.block_till_done()
assert len(calls) == 1
def test_circular_import(self):
"""Test we don't break doing circular import.
This test will have test_component discover the switch.test_circular
component while setting up.
The supplied config will load test_component and will load
switch.test_circular.
That means that after startup, we will have test_component and switch
setup. The test_circular platform has been loaded twice.
"""
component_calls = []
platform_calls = []
def component_setup(hass, config):
"""Set up mock component."""
discovery.load_platform(hass, 'switch', 'test_circular', 'disc',
config)
component_calls.append(1)
return True
def setup_platform(hass, config, add_entities_callback,
discovery_info=None):
"""Set up mock platform."""
platform_calls.append('disc' if discovery_info else 'component')
loader.set_component(
self.hass, 'test_component',
MockModule('test_component', setup=component_setup))
loader.set_component(
self.hass, 'switch.test_circular',
MockPlatform(setup_platform,
dependencies=['test_component']))
setup.setup_component(self.hass, 'test_component', {
'test_component': None,
'switch': [{
'platform': 'test_circular',
}],
})
self.hass.block_till_done()
# test_component will only be setup once
assert len(component_calls) == 1
# The platform will be setup once via the config in `setup_component`
# and once via the discovery inside test_component.
assert len(platform_calls) == 2
assert 'test_component' in self.hass.config.components
assert 'switch' in self.hass.config.components
@patch('homeassistant.helpers.signal.async_register_signal_handling')
def test_1st_discovers_2nd_component(self, mock_signal):
"""Test that we don't break if one component discovers the other.
If the first component fires a discovery event to set up the
second component while the second component is about to be set up,
it should not set up the second component twice.
"""
component_calls = []
def component1_setup(hass, config):
"""Set up mock component."""
print('component1 setup')
discovery.discover(hass, 'test_component2',
component='test_component2')
return True
def component2_setup(hass, config):
"""Set up mock component."""
component_calls.append(1)
return True
loader.set_component(
self.hass, 'test_component1',
MockModule('test_component1', setup=component1_setup))
loader.set_component(
self.hass, 'test_component2',
MockModule('test_component2', setup=component2_setup))
@callback
def do_setup():
"""Set up 2 components."""
self.hass.async_add_job(setup.async_setup_component(
self.hass, 'test_component1', {}))
self.hass.async_add_job(setup.async_setup_component(
self.hass, 'test_component2', {}))
self.hass.add_job(do_setup)
self.hass.block_till_done()
# test_component will only be setup once
assert len(component_calls) == 1
async def test_load_platform_forbids_config():
"""Test you cannot setup config component with load_platform."""
with pytest.raises(HomeAssistantError):
await discovery.async_load_platform(None, 'config', 'zwave', {},
{'config': {}})
async def test_discover_forbids_config():
"""Test you cannot setup config component with load_platform."""
with pytest.raises(HomeAssistantError):
await discovery.async_discover(None, None, None, 'config')
| apache-2.0 |
wonwon0/StrategyIA | ai/Util/ai_command.py | 2 | 1866 | from RULEngine.Util.Pose import Pose
from RULEngine.Util.Position import Position
from enum import Enum
class AICommandType(Enum):
STOP = 0
MOVE = 1
KICK = 2
class AICommand(object):
"""
Sert a emmagasiner les états demandés par l'IA
avant transformation en commandes d'envoie aux robots
"""
def __init__(self, p_robot_id: int, p_command=AICommandType.STOP, **other_args):
"""
Initialise.
:param p_robot_id: (int) l'identifiant du robot
:param p_command: (AICommandType) le type de AICommand
:param other_args: (Dict) les flags et arguments à passer
"""
self.robot_id = p_robot_id
self.command = p_command
self.dribbler_on = other_args.get("dribbler_on", 0)
self.pathfinder_on = other_args.get("pathfinder_on", False)
self.kick_strength = other_args.get("kick_strength", 0)
self.charge_kick = other_args.get("charge_kick", False)
self.kick = other_args.get("kick", False)
self.pose_goal = other_args.get("pose_goal", Pose())
self.speed = Pose()
self.robot_speed = other_args.get("speed", 0)
# set this flag to true if you only need speed regulation (The pose_goal will be in m/s)
self.speed_flag = other_args.get("speed_flag", False)
# this is for the pathfinder only no direct assignation
self.path = []
def __eq__(self, other):
return self.__dict__ == other.__dict__
# Getter and setter here?
class RotateAroundCommand(object):
""" Please, move me somewhere else"""
# TODO what it wants ^
def __init__(self, radius=0.0, direction=0.0, orientation=0.0, center_position=Position()):
self.radius = radius
self.direction = direction
self.orientation = orientation
self.center_position = center_position | mit |
spatialdev/onadata | onadata/apps/restservice/views.py | 8 | 2973 | import json
from django.contrib.auth.decorators import login_required
from django.db.utils import IntegrityError
from django.http import HttpResponse
from django.shortcuts import render
from django.shortcuts import get_object_or_404
from django.template.base import Template
from django.template.context import Context
from django.template.loader import render_to_string
from django.utils.translation import ugettext as _
from onadata.apps.logger.models.xform import XForm
from onadata.apps.restservice.forms import RestServiceForm
from onadata.apps.restservice.models import RestService
@login_required
def add_service(request, username, id_string):
data = {}
form = RestServiceForm()
xform = get_object_or_404(
XForm, user__username__iexact=username, id_string__iexact=id_string)
if request.method == 'POST':
form = RestServiceForm(request.POST)
restservice = None
if form.is_valid():
service_name = form.cleaned_data['service_name']
service_url = form.cleaned_data['service_url']
try:
rs = RestService(service_url=service_url,
name=service_name, xform=xform)
rs.save()
except IntegrityError:
message = _(u"Service already defined.")
status = 'fail'
else:
status = 'success'
message = (_(u"Successfully added service %(name)s.")
% {'name': service_name})
service_tpl = render_to_string("service.html", {
"sv": rs, "username": xform.user.username,
"id_string": xform.id_string})
restservice = service_tpl
else:
status = 'fail'
message = _(u"Please fill in all required fields")
if form.errors:
for field in form:
message += Template(u"{{ field.errors }}")\
.render(Context({'field': field}))
if request.is_ajax():
response = {'status': status, 'message': message}
if restservice:
response["restservice"] = u"%s" % restservice
return HttpResponse(json.dumps(response))
data['status'] = status
data['message'] = message
data['list_services'] = RestService.objects.filter(xform=xform)
data['form'] = form
data['username'] = username
data['id_string'] = id_string
return render(request, "add-service.html", data)
def delete_service(request, username, id_string):
success = "FAILED"
if request.method == 'POST':
pk = request.POST.get('service-id')
if pk:
try:
rs = RestService.objects.get(pk=int(pk))
except RestService.DoesNotExist:
pass
else:
rs.delete()
success = "OK"
return HttpResponse(success)
| bsd-2-clause |
oandrew/home-assistant | tests/components/test_logger.py | 16 | 1989 | """The tests for the Logger component."""
from collections import namedtuple
import logging
import unittest
from homeassistant.bootstrap import setup_component
from homeassistant.components import logger
from tests.common import get_test_home_assistant
RECORD = namedtuple('record', ('name', 'levelno'))
class TestUpdater(unittest.TestCase):
"""Test logger component."""
def setUp(self):
"""Setup things to be run when tests are started."""
self.hass = get_test_home_assistant()
self.log_config = {'logger':
{'default': 'warning', 'logs': {'test': 'info'}}}
def tearDown(self):
"""Stop everything that was started."""
del logging.root.handlers[-1]
self.hass.stop()
def test_logger_setup(self):
"""Use logger to create a logging filter."""
setup_component(self.hass, logger.DOMAIN, self.log_config)
self.assertTrue(len(logging.root.handlers) > 0)
handler = logging.root.handlers[-1]
self.assertEqual(len(handler.filters), 1)
log_filter = handler.filters[0].logfilter
self.assertEqual(log_filter['default'], logging.WARNING)
self.assertEqual(log_filter['logs']['test'], logging.INFO)
def test_logger_test_filters(self):
"""Test resulting filter operation."""
setup_component(self.hass, logger.DOMAIN, self.log_config)
log_filter = logging.root.handlers[-1].filters[0]
# Blocked default record
record = RECORD('asdf', logging.DEBUG)
self.assertFalse(log_filter.filter(record))
# Allowed default record
record = RECORD('asdf', logging.WARNING)
self.assertTrue(log_filter.filter(record))
# Blocked named record
record = RECORD('test', logging.DEBUG)
self.assertFalse(log_filter.filter(record))
# Allowed named record
record = RECORD('test', logging.INFO)
self.assertTrue(log_filter.filter(record))
| mit |
gluwer/przepisymm | lib/markdown/odict.py | 143 | 5157 | class OrderedDict(dict):
"""
A dictionary that keeps its keys in the order in which they're inserted.
Copied from Django's SortedDict with some modifications.
"""
def __new__(cls, *args, **kwargs):
instance = super(OrderedDict, cls).__new__(cls, *args, **kwargs)
instance.keyOrder = []
return instance
def __init__(self, data=None):
if data is None:
data = {}
super(OrderedDict, self).__init__(data)
if isinstance(data, dict):
self.keyOrder = data.keys()
else:
self.keyOrder = []
for key, value in data:
if key not in self.keyOrder:
self.keyOrder.append(key)
def __deepcopy__(self, memo):
from copy import deepcopy
return self.__class__([(key, deepcopy(value, memo))
for key, value in self.iteritems()])
def __setitem__(self, key, value):
super(OrderedDict, self).__setitem__(key, value)
if key not in self.keyOrder:
self.keyOrder.append(key)
def __delitem__(self, key):
super(OrderedDict, self).__delitem__(key)
self.keyOrder.remove(key)
def __iter__(self):
for k in self.keyOrder:
yield k
def pop(self, k, *args):
result = super(OrderedDict, self).pop(k, *args)
try:
self.keyOrder.remove(k)
except ValueError:
# Key wasn't in the dictionary in the first place. No problem.
pass
return result
def popitem(self):
result = super(OrderedDict, self).popitem()
self.keyOrder.remove(result[0])
return result
def items(self):
return zip(self.keyOrder, self.values())
def iteritems(self):
for key in self.keyOrder:
yield key, super(OrderedDict, self).__getitem__(key)
def keys(self):
return self.keyOrder[:]
def iterkeys(self):
return iter(self.keyOrder)
def values(self):
return [super(OrderedDict, self).__getitem__(k) for k in self.keyOrder]
def itervalues(self):
for key in self.keyOrder:
yield super(OrderedDict, self).__getitem__(key)
def update(self, dict_):
for k, v in dict_.items():
self.__setitem__(k, v)
def setdefault(self, key, default):
if key not in self.keyOrder:
self.keyOrder.append(key)
return super(OrderedDict, self).setdefault(key, default)
def value_for_index(self, index):
"""Return the value of the item at the given zero-based index."""
return self[self.keyOrder[index]]
def insert(self, index, key, value):
"""Insert the key, value pair before the item with the given index."""
if key in self.keyOrder:
n = self.keyOrder.index(key)
del self.keyOrder[n]
if n < index:
index -= 1
self.keyOrder.insert(index, key)
super(OrderedDict, self).__setitem__(key, value)
def copy(self):
"""Return a copy of this object."""
# This way of initializing the copy means it works for subclasses, too.
obj = self.__class__(self)
obj.keyOrder = self.keyOrder[:]
return obj
def __repr__(self):
"""
Replace the normal dict.__repr__ with a version that returns the keys
in their sorted order.
"""
return '{%s}' % ', '.join(['%r: %r' % (k, v) for k, v in self.items()])
def clear(self):
super(OrderedDict, self).clear()
self.keyOrder = []
def index(self, key):
""" Return the index of a given key. """
return self.keyOrder.index(key)
def index_for_location(self, location):
""" Return index or None for a given location. """
if location == '_begin':
i = 0
elif location == '_end':
i = None
elif location.startswith('<') or location.startswith('>'):
i = self.index(location[1:])
if location.startswith('>'):
if i >= len(self):
# last item
i = None
else:
i += 1
else:
raise ValueError('Not a valid location: "%s". Location key '
'must start with a ">" or "<".' % location)
return i
def add(self, key, value, location):
""" Insert by key location. """
i = self.index_for_location(location)
if i is not None:
self.insert(i, key, value)
else:
self.__setitem__(key, value)
def link(self, key, location):
""" Change location of an existing item. """
n = self.keyOrder.index(key)
del self.keyOrder[n]
i = self.index_for_location(location)
try:
if i is not None:
self.keyOrder.insert(i, key)
else:
self.keyOrder.append(key)
except Error:
# restore to prevent data loss and reraise
self.keyOrder.insert(n, key)
raise Error
| bsd-3-clause |
amir-qayyum-khan/edx-platform | lms/djangoapps/courseware/model_data.py | 7 | 36374 | """
Classes to provide the LMS runtime data storage to XBlocks.
:class:`DjangoKeyValueStore`: An XBlock :class:`~KeyValueStore` which
stores a subset of xblocks scopes as Django ORM objects. It wraps
:class:`~FieldDataCache` to provide an XBlock-friendly interface.
:class:`FieldDataCache`: A object which provides a read-through prefetch cache
of data to support XBlock fields within a limited set of scopes.
The remaining classes in this module provide read-through prefetch cache implementations
for specific scopes. The individual classes provide the knowledge of what are the essential
pieces of information for each scope, and thus how to cache, prefetch, and create new field data
entries.
UserStateCache: A cache for Scope.user_state
UserStateSummaryCache: A cache for Scope.user_state_summary
PreferencesCache: A cache for Scope.preferences
UserInfoCache: A cache for Scope.user_info
DjangoOrmFieldCache: A base-class for single-row-per-field caches.
"""
import json
from abc import abstractmethod, ABCMeta
from collections import defaultdict, namedtuple
from .models import (
StudentModule,
XModuleUserStateSummaryField,
XModuleStudentPrefsField,
XModuleStudentInfoField
)
import logging
from opaque_keys.edx.keys import CourseKey, UsageKey
from opaque_keys.edx.block_types import BlockTypeKeyV1
from opaque_keys.edx.asides import AsideUsageKeyV1, AsideUsageKeyV2
from contracts import contract, new_contract
from django.db import DatabaseError
from xblock.runtime import KeyValueStore
from xblock.exceptions import KeyValueMultiSaveError, InvalidScopeError
from xblock.fields import Scope, UserScope
from xmodule.modulestore.django import modulestore
from xblock.core import XBlockAside
from courseware.user_state_client import DjangoXBlockUserStateClient
log = logging.getLogger(__name__)
class InvalidWriteError(Exception):
"""
Raised to indicate that writing to a particular key
in the KeyValueStore is disabled
"""
def _all_usage_keys(descriptors, aside_types):
"""
Return a set of all usage_ids for the `descriptors` and for
as all asides in `aside_types` for those descriptors.
"""
usage_ids = set()
for descriptor in descriptors:
usage_ids.add(descriptor.scope_ids.usage_id)
for aside_type in aside_types:
usage_ids.add(AsideUsageKeyV1(descriptor.scope_ids.usage_id, aside_type))
usage_ids.add(AsideUsageKeyV2(descriptor.scope_ids.usage_id, aside_type))
return usage_ids
def _all_block_types(descriptors, aside_types):
"""
Return a set of all block_types for the supplied `descriptors` and for
the asides types in `aside_types` associated with those descriptors.
"""
block_types = set()
for descriptor in descriptors:
block_types.add(BlockTypeKeyV1(descriptor.entry_point, descriptor.scope_ids.block_type))
for aside_type in aside_types:
block_types.add(BlockTypeKeyV1(XBlockAside.entry_point, aside_type))
return block_types
class DjangoKeyValueStore(KeyValueStore):
"""
This KeyValueStore will read and write data in the following scopes to django models
Scope.user_state_summary
Scope.user_state
Scope.preferences
Scope.user_info
Access to any other scopes will raise an InvalidScopeError
Data for Scope.user_state is stored as StudentModule objects via the django orm.
Data for the other scopes is stored in individual objects that are named for the
scope involved and have the field name as a key
If the key isn't found in the expected table during a read or a delete, then a KeyError will be raised
"""
_allowed_scopes = (
Scope.user_state_summary,
Scope.user_state,
Scope.preferences,
Scope.user_info,
)
def __init__(self, field_data_cache):
self._field_data_cache = field_data_cache
def get(self, key):
self._raise_unless_scope_is_allowed(key)
return self._field_data_cache.get(key)
def set(self, key, value):
"""
Set a single value in the KeyValueStore
"""
self.set_many({key: value})
def set_many(self, kv_dict):
"""
Provide a bulk save mechanism.
`kv_dict`: A dictionary of dirty fields that maps
xblock.KvsFieldData._key : value
"""
for key in kv_dict:
# Check key for validity
self._raise_unless_scope_is_allowed(key)
self._field_data_cache.set_many(kv_dict)
def delete(self, key):
self._raise_unless_scope_is_allowed(key)
self._field_data_cache.delete(key)
def has(self, key):
self._raise_unless_scope_is_allowed(key)
return self._field_data_cache.has(key)
def _raise_unless_scope_is_allowed(self, key):
"""Raise an InvalidScopeError if key.scope is not in self._allowed_scopes."""
if key.scope not in self._allowed_scopes:
raise InvalidScopeError(key, self._allowed_scopes)
new_contract("DjangoKeyValueStore", DjangoKeyValueStore)
new_contract("DjangoKeyValueStore_Key", DjangoKeyValueStore.Key)
class DjangoOrmFieldCache(object):
"""
Baseclass for Scope-specific field cache objects that are based on
single-row-per-field Django ORM objects.
"""
__metaclass__ = ABCMeta
def __init__(self):
self._cache = {}
def cache_fields(self, fields, xblocks, aside_types):
"""
Load all fields specified by ``fields`` for the supplied ``xblocks``
and ``aside_types`` into this cache.
Arguments:
fields (list of str): Field names to cache.
xblocks (list of :class:`XBlock`): XBlocks to cache fields for.
aside_types (list of str): Aside types to cache fields for.
"""
for field_object in self._read_objects(fields, xblocks, aside_types):
self._cache[self._cache_key_for_field_object(field_object)] = field_object
@contract(kvs_key=DjangoKeyValueStore.Key)
def get(self, kvs_key):
"""
Return the django model object specified by `kvs_key` from
the cache.
Arguments:
kvs_key (`DjangoKeyValueStore.Key`): The field value to delete
Returns: A django orm object from the cache
"""
cache_key = self._cache_key_for_kvs_key(kvs_key)
if cache_key not in self._cache:
raise KeyError(kvs_key.field_name)
field_object = self._cache[cache_key]
return json.loads(field_object.value)
@contract(kvs_key=DjangoKeyValueStore.Key)
def set(self, kvs_key, value):
"""
Set the specified `kvs_key` to the field value `value`.
Arguments:
kvs_key (`DjangoKeyValueStore.Key`): The field value to delete
value: The field value to store
"""
self.set_many({kvs_key: value})
@contract(kv_dict="dict(DjangoKeyValueStore_Key: *)")
def set_many(self, kv_dict):
"""
Set the specified fields to the supplied values.
Arguments:
kv_dict (dict): A dictionary mapping :class:`~DjangoKeyValueStore.Key`
objects to values to set.
"""
saved_fields = []
for kvs_key, value in sorted(kv_dict.items()):
cache_key = self._cache_key_for_kvs_key(kvs_key)
field_object = self._cache.get(cache_key)
try:
serialized_value = json.dumps(value)
# It is safe to force an insert or an update, because
# a) we should have retrieved the object as part of the
# prefetch step, so if it isn't in our cache, it doesn't exist yet.
# b) no other code should be modifying these models out of band of
# this cache.
if field_object is None:
field_object = self._create_object(kvs_key, serialized_value)
field_object.save(force_insert=True)
self._cache[cache_key] = field_object
else:
field_object.value = serialized_value
field_object.save(force_update=True)
except DatabaseError:
log.exception("Saving field %r failed", kvs_key.field_name)
raise KeyValueMultiSaveError(saved_fields)
finally:
saved_fields.append(kvs_key.field_name)
@contract(kvs_key=DjangoKeyValueStore.Key)
def delete(self, kvs_key):
"""
Delete the value specified by `kvs_key`.
Arguments:
kvs_key (`DjangoKeyValueStore.Key`): The field value to delete
Raises: KeyError if key isn't found in the cache
"""
cache_key = self._cache_key_for_kvs_key(kvs_key)
field_object = self._cache.get(cache_key)
if field_object is None:
raise KeyError(kvs_key.field_name)
field_object.delete()
del self._cache[cache_key]
@contract(kvs_key=DjangoKeyValueStore.Key, returns=bool)
def has(self, kvs_key):
"""
Return whether the specified `kvs_key` is set.
Arguments:
kvs_key (`DjangoKeyValueStore.Key`): The field value to delete
Returns: bool
"""
return self._cache_key_for_kvs_key(kvs_key) in self._cache
@contract(kvs_key=DjangoKeyValueStore.Key, returns="datetime|None")
def last_modified(self, kvs_key):
"""
Return when the supplied field was changed.
Arguments:
kvs_key (`DjangoKeyValueStore.Key`): The field value to delete
Returns: datetime if there was a modified date, or None otherwise
"""
field_object = self._cache.get(self._cache_key_for_kvs_key(kvs_key))
if field_object is None:
return None
else:
return field_object.modified
def __len__(self):
return len(self._cache)
@abstractmethod
def _create_object(self, kvs_key, value):
"""
Create a new object to add to the cache (which should record
the specified field ``value`` for the field identified by
``kvs_key``).
Arguments:
kvs_key (:class:`DjangoKeyValueStore.Key`): Which field to create an entry for
value: What value to record in the field
"""
raise NotImplementedError()
@abstractmethod
def _read_objects(self, fields, xblocks, aside_types):
"""
Return an iterator for all objects stored in the underlying datastore
for the ``fields`` on the ``xblocks`` and the ``aside_types`` associated
with them.
Arguments:
fields (list of str): Field names to return values for
xblocks (list of :class:`~XBlock`): XBlocks to load fields for
aside_types (list of str): Asides to load field for (which annotate the supplied
xblocks).
"""
raise NotImplementedError()
@abstractmethod
def _cache_key_for_field_object(self, field_object):
"""
Return the key used in this DjangoOrmFieldCache to store the specified field_object.
Arguments:
field_object: A Django model instance that stores the data for fields in this cache
"""
raise NotImplementedError()
@abstractmethod
def _cache_key_for_kvs_key(self, key):
"""
Return the key used in this DjangoOrmFieldCache for the specified KeyValueStore key.
Arguments:
key (:class:`~DjangoKeyValueStore.Key`): The key representing the cached field
"""
raise NotImplementedError()
class UserStateCache(object):
"""
Cache for Scope.user_state xblock field data.
"""
def __init__(self, user, course_id):
self._cache = defaultdict(dict)
self.course_id = course_id
self.user = user
self._client = DjangoXBlockUserStateClient(self.user)
def cache_fields(self, fields, xblocks, aside_types): # pylint: disable=unused-argument
"""
Load all fields specified by ``fields`` for the supplied ``xblocks``
and ``aside_types`` into this cache.
Arguments:
fields (list of str): Field names to cache.
xblocks (list of :class:`XBlock`): XBlocks to cache fields for.
aside_types (list of str): Aside types to cache fields for.
"""
block_field_state = self._client.get_many(
self.user.username,
_all_usage_keys(xblocks, aside_types),
)
for user_state in block_field_state:
self._cache[user_state.block_key] = user_state.state
@contract(kvs_key=DjangoKeyValueStore.Key)
def set(self, kvs_key, value):
"""
Set the specified `kvs_key` to the field value `value`.
Arguments:
kvs_key (`DjangoKeyValueStore.Key`): The field value to delete
value: The field value to store
"""
self.set_many({kvs_key: value})
@contract(kvs_key=DjangoKeyValueStore.Key, returns="datetime|None")
def last_modified(self, kvs_key):
"""
Return when the supplied field was changed.
Arguments:
kvs_key (`DjangoKeyValueStore.Key`): The key representing the cached field
Returns: datetime if there was a modified date, or None otherwise
"""
try:
return self._client.get(
self.user.username,
kvs_key.block_scope_id,
fields=[kvs_key.field_name],
).updated
except self._client.DoesNotExist:
return None
@contract(kv_dict="dict(DjangoKeyValueStore_Key: *)")
def set_many(self, kv_dict):
"""
Set the specified fields to the supplied values.
Arguments:
kv_dict (dict): A dictionary mapping :class:`~DjangoKeyValueStore.Key`
objects to values to set.
"""
pending_updates = defaultdict(dict)
for kvs_key, value in kv_dict.items():
cache_key = self._cache_key_for_kvs_key(kvs_key)
pending_updates[cache_key][kvs_key.field_name] = value
try:
self._client.set_many(
self.user.username,
pending_updates
)
except DatabaseError:
log.exception("Saving user state failed for %s", self.user.username)
raise KeyValueMultiSaveError([])
finally:
self._cache.update(pending_updates)
@contract(kvs_key=DjangoKeyValueStore.Key)
def get(self, kvs_key):
"""
Return the django model object specified by `kvs_key` from
the cache.
Arguments:
kvs_key (`DjangoKeyValueStore.Key`): The field value to delete
Returns: A django orm object from the cache
"""
cache_key = self._cache_key_for_kvs_key(kvs_key)
if cache_key not in self._cache:
raise KeyError(kvs_key.field_name)
return self._cache[cache_key][kvs_key.field_name]
@contract(kvs_key=DjangoKeyValueStore.Key)
def delete(self, kvs_key):
"""
Delete the value specified by `kvs_key`.
Arguments:
kvs_key (`DjangoKeyValueStore.Key`): The field value to delete
Raises: KeyError if key isn't found in the cache
"""
cache_key = self._cache_key_for_kvs_key(kvs_key)
if cache_key not in self._cache:
raise KeyError(kvs_key.field_name)
field_state = self._cache[cache_key]
if kvs_key.field_name not in field_state:
raise KeyError(kvs_key.field_name)
self._client.delete(self.user.username, cache_key, fields=[kvs_key.field_name])
del field_state[kvs_key.field_name]
@contract(kvs_key=DjangoKeyValueStore.Key, returns=bool)
def has(self, kvs_key):
"""
Return whether the specified `kvs_key` is set.
Arguments:
kvs_key (`DjangoKeyValueStore.Key`): The field value to delete
Returns: bool
"""
cache_key = self._cache_key_for_kvs_key(kvs_key)
return (
cache_key in self._cache and
kvs_key.field_name in self._cache[cache_key]
)
def __len__(self):
return len(self._cache)
def _cache_key_for_kvs_key(self, key):
"""
Return the key used in this DjangoOrmFieldCache for the specified KeyValueStore key.
Arguments:
key (:class:`~DjangoKeyValueStore.Key`): The key representing the cached field
"""
return key.block_scope_id
class UserStateSummaryCache(DjangoOrmFieldCache):
"""
Cache for Scope.user_state_summary xblock field data.
"""
def __init__(self, course_id):
super(UserStateSummaryCache, self).__init__()
self.course_id = course_id
def _create_object(self, kvs_key, value):
"""
Create a new object to add to the cache (which should record
the specified field ``value`` for the field identified by
``kvs_key``).
Arguments:
kvs_key (:class:`DjangoKeyValueStore.Key`): Which field to create an entry for
value: The value to assign to the new field object
"""
return XModuleUserStateSummaryField(
field_name=kvs_key.field_name,
usage_id=kvs_key.block_scope_id,
value=value,
)
def _read_objects(self, fields, xblocks, aside_types):
"""
Return an iterator for all objects stored in the underlying datastore
for the ``fields`` on the ``xblocks`` and the ``aside_types`` associated
with them.
Arguments:
fields (list of :class:`~Field`): Fields to return values for
xblocks (list of :class:`~XBlock`): XBlocks to load fields for
aside_types (list of str): Asides to load field for (which annotate the supplied
xblocks).
"""
return XModuleUserStateSummaryField.objects.chunked_filter(
'usage_id__in',
_all_usage_keys(xblocks, aside_types),
field_name__in=set(field.name for field in fields),
)
def _cache_key_for_field_object(self, field_object):
"""
Return the key used in this DjangoOrmFieldCache to store the specified field_object.
Arguments:
field_object: A Django model instance that stores the data for fields in this cache
"""
return (field_object.usage_id.map_into_course(self.course_id), field_object.field_name)
def _cache_key_for_kvs_key(self, key):
"""
Return the key used in this DjangoOrmFieldCache for the specified KeyValueStore key.
Arguments:
key (:class:`~DjangoKeyValueStore.Key`): The key representing the cached field
"""
return (key.block_scope_id, key.field_name)
class PreferencesCache(DjangoOrmFieldCache):
"""
Cache for Scope.preferences xblock field data.
"""
def __init__(self, user):
super(PreferencesCache, self).__init__()
self.user = user
def _create_object(self, kvs_key, value):
"""
Create a new object to add to the cache (which should record
the specified field ``value`` for the field identified by
``kvs_key``).
Arguments:
kvs_key (:class:`DjangoKeyValueStore.Key`): Which field to create an entry for
value: The value to assign to the new field object
"""
return XModuleStudentPrefsField(
field_name=kvs_key.field_name,
module_type=BlockTypeKeyV1(kvs_key.block_family, kvs_key.block_scope_id),
student_id=kvs_key.user_id,
value=value,
)
def _read_objects(self, fields, xblocks, aside_types):
"""
Return an iterator for all objects stored in the underlying datastore
for the ``fields`` on the ``xblocks`` and the ``aside_types`` associated
with them.
Arguments:
fields (list of str): Field names to return values for
xblocks (list of :class:`~XBlock`): XBlocks to load fields for
aside_types (list of str): Asides to load field for (which annotate the supplied
xblocks).
"""
return XModuleStudentPrefsField.objects.chunked_filter(
'module_type__in',
_all_block_types(xblocks, aside_types),
student=self.user.pk,
field_name__in=set(field.name for field in fields),
)
def _cache_key_for_field_object(self, field_object):
"""
Return the key used in this DjangoOrmFieldCache to store the specified field_object.
Arguments:
field_object: A Django model instance that stores the data for fields in this cache
"""
return (field_object.module_type, field_object.field_name)
def _cache_key_for_kvs_key(self, key):
"""
Return the key used in this DjangoOrmFieldCache for the specified KeyValueStore key.
Arguments:
key (:class:`~DjangoKeyValueStore.Key`): The key representing the cached field
"""
return (BlockTypeKeyV1(key.block_family, key.block_scope_id), key.field_name)
class UserInfoCache(DjangoOrmFieldCache):
"""
Cache for Scope.user_info xblock field data
"""
def __init__(self, user):
super(UserInfoCache, self).__init__()
self.user = user
def _create_object(self, kvs_key, value):
"""
Create a new object to add to the cache (which should record
the specified field ``value`` for the field identified by
``kvs_key``).
Arguments:
kvs_key (:class:`DjangoKeyValueStore.Key`): Which field to create an entry for
value: The value to assign to the new field object
"""
return XModuleStudentInfoField(
field_name=kvs_key.field_name,
student_id=kvs_key.user_id,
value=value,
)
def _read_objects(self, fields, xblocks, aside_types):
"""
Return an iterator for all objects stored in the underlying datastore
for the ``fields`` on the ``xblocks`` and the ``aside_types`` associated
with them.
Arguments:
fields (list of str): Field names to return values for
xblocks (list of :class:`~XBlock`): XBlocks to load fields for
aside_types (list of str): Asides to load field for (which annotate the supplied
xblocks).
"""
return XModuleStudentInfoField.objects.filter(
student=self.user.pk,
field_name__in=set(field.name for field in fields),
)
def _cache_key_for_field_object(self, field_object):
"""
Return the key used in this DjangoOrmFieldCache to store the specified field_object.
Arguments:
field_object: A Django model instance that stores the data for fields in this cache
"""
return field_object.field_name
def _cache_key_for_kvs_key(self, key):
"""
Return the key used in this DjangoOrmFieldCache for the specified KeyValueStore key.
Arguments:
key (:class:`~DjangoKeyValueStore.Key`): The key representing the cached field
"""
return key.field_name
class FieldDataCache(object):
"""
A cache of django model objects needed to supply the data
for a module and its descendants
"""
def __init__(self, descriptors, course_id, user, select_for_update=False, asides=None):
"""
Find any courseware.models objects that are needed by any descriptor
in descriptors. Attempts to minimize the number of queries to the database.
Note: Only modules that have store_state = True or have shared
state will have a StudentModule.
Arguments
descriptors: A list of XModuleDescriptors.
course_id: The id of the current course
user: The user for which to cache data
select_for_update: Ignored
asides: The list of aside types to load, or None to prefetch no asides.
"""
if asides is None:
self.asides = []
else:
self.asides = asides
assert isinstance(course_id, CourseKey)
self.course_id = course_id
self.user = user
self.cache = {
Scope.user_state: UserStateCache(
self.user,
self.course_id,
),
Scope.user_info: UserInfoCache(
self.user,
),
Scope.preferences: PreferencesCache(
self.user,
),
Scope.user_state_summary: UserStateSummaryCache(
self.course_id,
),
}
self.scorable_locations = set()
self.add_descriptors_to_cache(descriptors)
def add_descriptors_to_cache(self, descriptors):
"""
Add all `descriptors` to this FieldDataCache.
"""
if self.user.is_authenticated():
self.scorable_locations.update(desc.location for desc in descriptors if desc.has_score)
for scope, fields in self._fields_to_cache(descriptors).items():
if scope not in self.cache:
continue
self.cache[scope].cache_fields(fields, descriptors, self.asides)
def add_descriptor_descendents(self, descriptor, depth=None, descriptor_filter=lambda descriptor: True):
"""
Add all descendants of `descriptor` to this FieldDataCache.
Arguments:
descriptor: An XModuleDescriptor
depth is the number of levels of descendant modules to load StudentModules for, in addition to
the supplied descriptor. If depth is None, load all descendant StudentModules
descriptor_filter is a function that accepts a descriptor and return whether the field data
should be cached
"""
def get_child_descriptors(descriptor, depth, descriptor_filter):
"""
Return a list of all child descriptors down to the specified depth
that match the descriptor filter. Includes `descriptor`
descriptor: The parent to search inside
depth: The number of levels to descend, or None for infinite depth
descriptor_filter(descriptor): A function that returns True
if descriptor should be included in the results
"""
if descriptor_filter(descriptor):
descriptors = [descriptor]
else:
descriptors = []
if depth is None or depth > 0:
new_depth = depth - 1 if depth is not None else depth
for child in descriptor.get_children() + descriptor.get_required_module_descriptors():
descriptors.extend(get_child_descriptors(child, new_depth, descriptor_filter))
return descriptors
with modulestore().bulk_operations(descriptor.location.course_key):
descriptors = get_child_descriptors(descriptor, depth, descriptor_filter)
self.add_descriptors_to_cache(descriptors)
@classmethod
def cache_for_descriptor_descendents(cls, course_id, user, descriptor, depth=None,
descriptor_filter=lambda descriptor: True,
select_for_update=False, asides=None):
"""
course_id: the course in the context of which we want StudentModules.
user: the django user for whom to load modules.
descriptor: An XModuleDescriptor
depth is the number of levels of descendant modules to load StudentModules for, in addition to
the supplied descriptor. If depth is None, load all descendant StudentModules
descriptor_filter is a function that accepts a descriptor and return whether the field data
should be cached
select_for_update: Ignored
"""
cache = FieldDataCache([], course_id, user, select_for_update, asides=asides)
cache.add_descriptor_descendents(descriptor, depth, descriptor_filter)
return cache
def _fields_to_cache(self, descriptors):
"""
Returns a map of scopes to fields in that scope that should be cached
"""
scope_map = defaultdict(set)
for descriptor in descriptors:
for field in descriptor.fields.values():
scope_map[field.scope].add(field)
return scope_map
@contract(key=DjangoKeyValueStore.Key)
def get(self, key):
"""
Load the field value specified by `key`.
Arguments:
key (`DjangoKeyValueStore.Key`): The field value to load
Returns: The found value
Raises: KeyError if key isn't found in the cache
"""
if key.scope.user == UserScope.ONE and not self.user.is_anonymous():
# If we're getting user data, we expect that the key matches the
# user we were constructed for.
assert key.user_id == self.user.id
if key.scope not in self.cache:
raise KeyError(key.field_name)
return self.cache[key.scope].get(key)
@contract(kv_dict="dict(DjangoKeyValueStore_Key: *)")
def set_many(self, kv_dict):
"""
Set all of the fields specified by the keys of `kv_dict` to the values
in that dict.
Arguments:
kv_dict (dict): dict mapping from `DjangoKeyValueStore.Key`s to field values
Raises: DatabaseError if any fields fail to save
"""
saved_fields = []
by_scope = defaultdict(dict)
for key, value in kv_dict.iteritems():
if key.scope.user == UserScope.ONE and not self.user.is_anonymous():
# If we're getting user data, we expect that the key matches the
# user we were constructed for.
assert key.user_id == self.user.id
if key.scope not in self.cache:
continue
by_scope[key.scope][key] = value
for scope, set_many_data in by_scope.iteritems():
try:
self.cache[scope].set_many(set_many_data)
# If save is successful on these fields, add it to
# the list of successful saves
saved_fields.extend(key.field_name for key in set_many_data)
except KeyValueMultiSaveError as exc:
log.exception('Error saving fields %r', [key.field_name for key in set_many_data])
raise KeyValueMultiSaveError(saved_fields + exc.saved_field_names)
@contract(key=DjangoKeyValueStore.Key)
def delete(self, key):
"""
Delete the value specified by `key`.
Arguments:
key (`DjangoKeyValueStore.Key`): The field value to delete
Raises: KeyError if key isn't found in the cache
"""
if key.scope.user == UserScope.ONE and not self.user.is_anonymous():
# If we're getting user data, we expect that the key matches the
# user we were constructed for.
assert key.user_id == self.user.id
if key.scope not in self.cache:
raise KeyError(key.field_name)
self.cache[key.scope].delete(key)
@contract(key=DjangoKeyValueStore.Key, returns=bool)
def has(self, key):
"""
Return whether the specified `key` is set.
Arguments:
key (`DjangoKeyValueStore.Key`): The field value to delete
Returns: bool
"""
if key.scope.user == UserScope.ONE and not self.user.is_anonymous():
# If we're getting user data, we expect that the key matches the
# user we were constructed for.
assert key.user_id == self.user.id
if key.scope not in self.cache:
return False
return self.cache[key.scope].has(key)
@contract(key=DjangoKeyValueStore.Key, returns="datetime|None")
def last_modified(self, key):
"""
Return when the supplied field was changed.
Arguments:
key (`DjangoKeyValueStore.Key`): The field value to delete
Returns: datetime if there was a modified date, or None otherwise
"""
if key.scope.user == UserScope.ONE and not self.user.is_anonymous():
# If we're getting user data, we expect that the key matches the
# user we were constructed for.
assert key.user_id == self.user.id
if key.scope not in self.cache:
return None
return self.cache[key.scope].last_modified(key)
def __len__(self):
return sum(len(cache) for cache in self.cache.values())
class ScoresClient(object):
"""
Basic client interface for retrieving Score information.
Eventually, this should read and write scores, but at the moment it only
handles the read side of things.
"""
Score = namedtuple('Score', 'correct total')
def __init__(self, course_key, user_id):
self.course_key = course_key
self.user_id = user_id
self._locations_to_scores = {}
self._has_fetched = False
def __contains__(self, location):
"""Return True if we have a score for this location."""
return location in self._locations_to_scores
def fetch_scores(self, locations):
"""Grab score information."""
scores_qset = StudentModule.objects.filter(
student_id=self.user_id,
course_id=self.course_key,
module_state_key__in=set(locations),
)
# Locations in StudentModule don't necessarily have course key info
# attached to them (since old mongo identifiers don't include runs).
# So we have to add that info back in before we put it into our lookup.
self._locations_to_scores.update({
UsageKey.from_string(location).map_into_course(self.course_key): self.Score(correct, total)
for location, correct, total
in scores_qset.values_list('module_state_key', 'grade', 'max_grade')
})
self._has_fetched = True
def get(self, location):
"""
Get the score for a given location, if it exists.
If we don't have a score for that location, return `None`. Note that as
convention, you should be passing in a location with full course run
information.
"""
if not self._has_fetched:
raise ValueError(
"Tried to fetch location {} from ScoresClient before fetch_scores() has run."
.format(location)
)
return self._locations_to_scores.get(location.replace(version=None, branch=None))
@classmethod
def create_for_locations(cls, course_id, user_id, scorable_locations):
"""Create a ScoresClient with pre-fetched data for the given locations."""
client = cls(course_id, user_id)
client.fetch_scores(scorable_locations)
return client
# @contract(user_id=int, usage_key=UsageKey, score="number|None", max_score="number|None")
def set_score(user_id, usage_key, score, max_score):
"""
Set the score and max_score for the specified user and xblock usage.
"""
student_module, created = StudentModule.objects.get_or_create(
student_id=user_id,
module_state_key=usage_key,
course_id=usage_key.course_key,
defaults={
'grade': score,
'max_grade': max_score,
}
)
if not created:
student_module.grade = score
student_module.max_grade = max_score
student_module.save()
return student_module.modified
def get_score(user_id, usage_key):
"""
Get the score and max_score for the specified user and xblock usage.
Returns None if not found.
"""
try:
student_module = StudentModule.objects.get(
student_id=user_id,
module_state_key=usage_key,
course_id=usage_key.course_key,
)
except StudentModule.DoesNotExist:
return None
else:
return student_module
| agpl-3.0 |
tudorbarascu/QGIS | tests/src/python/test_qgssinglesymbolrenderer.py | 43 | 3309 | # -*- coding: utf-8 -*-
"""
***************************************************************************
test_qgssinglesymbolrenderer.py
---------------------
Date : December 2015
Copyright : (C) 2015 by Matthias Kuhn
Email : matthias at opengis dot ch
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
From build dir, run: ctest -R PyQgsSingleSymbolRenderer -V
"""
__author__ = 'Matthias Kuhn'
__date__ = 'December 2015'
__copyright__ = '(C) 2015, Matthias Kuhn'
import qgis # NOQA
import os
from qgis.PyQt.QtCore import QSize
from qgis.core import (QgsVectorLayer,
QgsProject,
QgsRectangle,
QgsMultiRenderChecker,
QgsSingleSymbolRenderer,
QgsFillSymbol,
QgsFeatureRequest,
QgsRenderContext
)
from qgis.testing import unittest
from qgis.testing.mocked import get_iface
from utilities import unitTestDataPath
TEST_DATA_DIR = unitTestDataPath()
class TestQgsSingleSymbolRenderer(unittest.TestCase):
def setUp(self):
self.iface = get_iface()
myShpFile = os.path.join(TEST_DATA_DIR, 'polys_overlapping.shp')
layer = QgsVectorLayer(myShpFile, 'Polys', 'ogr')
QgsProject.instance().addMapLayer(layer)
# Create rulebased style
sym1 = QgsFillSymbol.createSimple({'color': '#fdbf6f', 'outline_color': 'black'})
self.renderer = QgsSingleSymbolRenderer(sym1)
layer.setRenderer(self.renderer)
rendered_layers = [layer]
self.mapsettings = self.iface.mapCanvas().mapSettings()
self.mapsettings.setOutputSize(QSize(400, 400))
self.mapsettings.setOutputDpi(96)
self.mapsettings.setExtent(QgsRectangle(-163, 22, -70, 52))
self.mapsettings.setLayers(rendered_layers)
def testOrderBy(self):
self.renderer.setOrderBy(QgsFeatureRequest.OrderBy([QgsFeatureRequest.OrderByClause('Value', False)]))
self.renderer.setOrderByEnabled(True)
# Setup rendering check
renderchecker = QgsMultiRenderChecker()
renderchecker.setMapSettings(self.mapsettings)
renderchecker.setControlName('expected_singlesymbol_orderby')
self.assertTrue(renderchecker.runTest('singlesymbol_orderby'))
# disable order by and retest
self.renderer.setOrderByEnabled(False)
self.assertTrue(renderchecker.runTest('single'))
def testUsedAttributes(self):
ctx = QgsRenderContext.fromMapSettings(self.mapsettings)
self.assertCountEqual(self.renderer.usedAttributes(ctx), {})
if __name__ == '__main__':
unittest.main()
| gpl-2.0 |
dmitry-r/incubator-airflow | airflow/www/app.py | 12 | 5467 | # -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import logging
import socket
import six
from flask import Flask
from flask_admin import Admin, base
from flask_cache import Cache
from flask_wtf.csrf import CSRFProtect
csrf = CSRFProtect()
import airflow
from airflow import models
from airflow.settings import Session
from airflow.www.blueprints import routes
from airflow import jobs
from airflow import settings
from airflow import configuration
def create_app(config=None, testing=False):
app = Flask(__name__)
app.secret_key = configuration.get('webserver', 'SECRET_KEY')
app.config['LOGIN_DISABLED'] = not configuration.getboolean('webserver', 'AUTHENTICATE')
csrf.init_app(app)
app.config['TESTING'] = testing
airflow.load_login()
airflow.login.login_manager.init_app(app)
from airflow import api
api.load_auth()
api.api_auth.init_app(app)
cache = Cache(
app=app, config={'CACHE_TYPE': 'filesystem', 'CACHE_DIR': '/tmp'})
app.register_blueprint(routes)
log_format = airflow.settings.LOG_FORMAT_WITH_PID
airflow.settings.configure_logging(log_format=log_format)
with app.app_context():
from airflow.www import views
admin = Admin(
app, name='Airflow',
static_url_path='/admin',
index_view=views.HomeView(endpoint='', url='/admin', name="DAGs"),
template_mode='bootstrap3',
)
av = admin.add_view
vs = views
av(vs.Airflow(name='DAGs', category='DAGs'))
av(vs.QueryView(name='Ad Hoc Query', category="Data Profiling"))
av(vs.ChartModelView(
models.Chart, Session, name="Charts", category="Data Profiling"))
av(vs.KnownEventView(
models.KnownEvent,
Session, name="Known Events", category="Data Profiling"))
av(vs.SlaMissModelView(
models.SlaMiss,
Session, name="SLA Misses", category="Browse"))
av(vs.TaskInstanceModelView(models.TaskInstance,
Session, name="Task Instances", category="Browse"))
av(vs.LogModelView(
models.Log, Session, name="Logs", category="Browse"))
av(vs.JobModelView(
jobs.BaseJob, Session, name="Jobs", category="Browse"))
av(vs.PoolModelView(
models.Pool, Session, name="Pools", category="Admin"))
av(vs.ConfigurationView(
name='Configuration', category="Admin"))
av(vs.UserModelView(
models.User, Session, name="Users", category="Admin"))
av(vs.ConnectionModelView(
models.Connection, Session, name="Connections", category="Admin"))
av(vs.VariableView(
models.Variable, Session, name="Variables", category="Admin"))
av(vs.XComView(
models.XCom, Session, name="XComs", category="Admin"))
admin.add_link(base.MenuLink(
category='Docs', name='Documentation',
url='http://pythonhosted.org/airflow/'))
admin.add_link(
base.MenuLink(category='Docs',
name='Github',url='https://github.com/apache/incubator-airflow'))
av(vs.VersionView(name='Version', category="About"))
av(vs.DagRunModelView(
models.DagRun, Session, name="DAG Runs", category="Browse"))
av(vs.DagModelView(models.DagModel, Session, name=None))
# Hack to not add this view to the menu
admin._menu = admin._menu[:-1]
def integrate_plugins():
"""Integrate plugins to the context"""
from airflow.plugins_manager import (
admin_views, flask_blueprints, menu_links)
for v in admin_views:
logging.debug('Adding view ' + v.name)
admin.add_view(v)
for bp in flask_blueprints:
logging.debug('Adding blueprint ' + bp.name)
app.register_blueprint(bp)
for ml in sorted(menu_links, key=lambda x: x.name):
logging.debug('Adding menu link ' + ml.name)
admin.add_link(ml)
integrate_plugins()
import airflow.www.api.experimental.endpoints as e
# required for testing purposes otherwise the module retains
# a link to the default_auth
if app.config['TESTING']:
if six.PY2:
reload(e)
else:
import importlib
importlib.reload(e)
app.register_blueprint(e.api_experimental, url_prefix='/api/experimental')
@app.context_processor
def jinja_globals():
return {
'hostname': socket.getfqdn(),
}
@app.teardown_appcontext
def shutdown_session(exception=None):
settings.Session.remove()
return app
app = None
def cached_app(config=None):
global app
if not app:
app = create_app(config)
return app
| apache-2.0 |
pytorch/fairseq | fairseq/modules/transformer_sentence_encoder_layer.py | 1 | 4326 | # Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from typing import Callable, Optional
import torch
import torch.nn as nn
from fairseq import utils
from fairseq.modules import LayerNorm, MultiheadAttention
from fairseq.modules.fairseq_dropout import FairseqDropout
from fairseq.modules.quant_noise import quant_noise
class TransformerSentenceEncoderLayer(nn.Module):
"""
Implements a Transformer Encoder Layer used in BERT/XLM style pre-trained
models.
"""
def __init__(
self,
embedding_dim: int = 768,
ffn_embedding_dim: int = 3072,
num_attention_heads: int = 8,
dropout: float = 0.1,
attention_dropout: float = 0.1,
activation_dropout: float = 0.1,
activation_fn: str = "relu",
export: bool = False,
q_noise: float = 0.0,
qn_block_size: int = 8,
init_fn: Callable = None,
) -> None:
super().__init__()
if init_fn is not None:
init_fn()
# Initialize parameters
self.embedding_dim = embedding_dim
self.num_attention_heads = num_attention_heads
self.attention_dropout = attention_dropout
self.q_noise = q_noise
self.qn_block_size = qn_block_size
self.dropout_module = FairseqDropout(
dropout, module_name=self.__class__.__name__
)
self.activation_dropout_module = FairseqDropout(
activation_dropout, module_name=self.__class__.__name__
)
# Initialize blocks
self.activation_fn = utils.get_activation_fn(activation_fn)
self.self_attn = self.build_self_attention(
self.embedding_dim,
num_attention_heads,
dropout=attention_dropout,
self_attention=True,
q_noise=q_noise,
qn_block_size=qn_block_size,
)
# layer norm associated with the self attention layer
self.self_attn_layer_norm = LayerNorm(self.embedding_dim, export=export)
self.fc1 = self.build_fc1(
self.embedding_dim,
ffn_embedding_dim,
q_noise=q_noise,
qn_block_size=qn_block_size,
)
self.fc2 = self.build_fc2(
ffn_embedding_dim,
self.embedding_dim,
q_noise=q_noise,
qn_block_size=qn_block_size,
)
# layer norm associated with the position wise feed-forward NN
self.final_layer_norm = LayerNorm(self.embedding_dim, export=export)
def build_fc1(self, input_dim, output_dim, q_noise, qn_block_size):
return quant_noise(nn.Linear(input_dim, output_dim), q_noise, qn_block_size)
def build_fc2(self, input_dim, output_dim, q_noise, qn_block_size):
return quant_noise(nn.Linear(input_dim, output_dim), q_noise, qn_block_size)
def build_self_attention(
self,
embed_dim,
num_attention_heads,
dropout,
self_attention,
q_noise,
qn_block_size,
):
return MultiheadAttention(
embed_dim,
num_attention_heads,
dropout=dropout,
self_attention=True,
q_noise=q_noise,
qn_block_size=qn_block_size,
)
def forward(
self,
x: torch.Tensor,
self_attn_mask: Optional[torch.Tensor] = None,
self_attn_padding_mask: Optional[torch.Tensor] = None,
):
"""
LayerNorm is applied either before or after the self-attention/ffn
modules similar to the original Transformer implementation.
"""
residual = x
x, attn = self.self_attn(
query=x,
key=x,
value=x,
key_padding_mask=self_attn_padding_mask,
need_weights=False,
attn_mask=self_attn_mask,
)
x = self.dropout_module(x)
x = residual + x
x = self.self_attn_layer_norm(x)
residual = x
x = self.activation_fn(self.fc1(x))
x = self.activation_dropout_module(x)
x = self.fc2(x)
x = self.dropout_module(x)
x = residual + x
x = self.final_layer_norm(x)
return x, attn
| mit |
aseigneurin/ansible-modules-core | cloud/openstack/keystone_user.py | 39 | 12541 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Based on Jimmy Tang's implementation
DOCUMENTATION = '''
---
module: keystone_user
version_added: "1.2"
short_description: Manage OpenStack Identity (keystone) users, tenants and roles
description:
- Manage users,tenants, roles from OpenStack.
options:
login_user:
description:
- login username to authenticate to keystone
required: false
default: admin
login_password:
description:
- Password of login user
required: false
default: 'yes'
login_tenant_name:
description:
- The tenant login_user belongs to
required: false
default: None
version_added: "1.3"
token:
description:
- The token to be uses in case the password is not specified
required: false
default: None
endpoint:
description:
- The keystone url for authentication
required: false
default: 'http://127.0.0.1:35357/v2.0/'
user:
description:
- The name of the user that has to added/removed from OpenStack
required: false
default: None
password:
description:
- The password to be assigned to the user
required: false
default: None
tenant:
description:
- The tenant name that has be added/removed
required: false
default: None
tenant_description:
description:
- A description for the tenant
required: false
default: None
email:
description:
- An email address for the user
required: false
default: None
role:
description:
- The name of the role to be assigned or created
required: false
default: None
state:
description:
- Indicate desired state of the resource
choices: ['present', 'absent']
default: present
requirements: [ python-keystoneclient ]
author: Lorin Hochstein
'''
EXAMPLES = '''
# Create a tenant
- keystone_user: tenant=demo tenant_description="Default Tenant"
# Create a user
- keystone_user: user=john tenant=demo password=secrete
# Apply the admin role to the john user in the demo tenant
- keystone_user: role=admin user=john tenant=demo
'''
try:
from keystoneclient.v2_0 import client
except ImportError:
keystoneclient_found = False
else:
keystoneclient_found = True
def authenticate(endpoint, token, login_user, login_password, login_tenant_name):
"""Return a keystone client object"""
if token:
return client.Client(endpoint=endpoint, token=token)
else:
return client.Client(auth_url=endpoint, username=login_user,
password=login_password, tenant_name=login_tenant_name)
def tenant_exists(keystone, tenant):
""" Return True if tenant already exists"""
return tenant in [x.name for x in keystone.tenants.list()]
def user_exists(keystone, user):
"""" Return True if user already exists"""
return user in [x.name for x in keystone.users.list()]
def get_tenant(keystone, name):
""" Retrieve a tenant by name"""
tenants = [x for x in keystone.tenants.list() if x.name == name]
count = len(tenants)
if count == 0:
raise KeyError("No keystone tenants with name %s" % name)
elif count > 1:
raise ValueError("%d tenants with name %s" % (count, name))
else:
return tenants[0]
def get_user(keystone, name):
""" Retrieve a user by name"""
users = [x for x in keystone.users.list() if x.name == name]
count = len(users)
if count == 0:
raise KeyError("No keystone users with name %s" % name)
elif count > 1:
raise ValueError("%d users with name %s" % (count, name))
else:
return users[0]
def get_role(keystone, name):
""" Retrieve a role by name"""
roles = [x for x in keystone.roles.list() if x.name == name]
count = len(roles)
if count == 0:
raise KeyError("No keystone roles with name %s" % name)
elif count > 1:
raise ValueError("%d roles with name %s" % (count, name))
else:
return roles[0]
def get_tenant_id(keystone, name):
return get_tenant(keystone, name).id
def get_user_id(keystone, name):
return get_user(keystone, name).id
def ensure_tenant_exists(keystone, tenant_name, tenant_description,
check_mode):
""" Ensure that a tenant exists.
Return (True, id) if a new tenant was created, (False, None) if it
already existed.
"""
# Check if tenant already exists
try:
tenant = get_tenant(keystone, tenant_name)
except KeyError:
# Tenant doesn't exist yet
pass
else:
if tenant.description == tenant_description:
return (False, tenant.id)
else:
# We need to update the tenant description
if check_mode:
return (True, tenant.id)
else:
tenant.update(description=tenant_description)
return (True, tenant.id)
# We now know we will have to create a new tenant
if check_mode:
return (True, None)
ks_tenant = keystone.tenants.create(tenant_name=tenant_name,
description=tenant_description,
enabled=True)
return (True, ks_tenant.id)
def ensure_tenant_absent(keystone, tenant, check_mode):
""" Ensure that a tenant does not exist
Return True if the tenant was removed, False if it didn't exist
in the first place
"""
if not tenant_exists(keystone, tenant):
return False
# We now know we will have to delete the tenant
if check_mode:
return True
def ensure_user_exists(keystone, user_name, password, email, tenant_name,
check_mode):
""" Check if user exists
Return (True, id) if a new user was created, (False, id) user alrady
exists
"""
# Check if tenant already exists
try:
user = get_user(keystone, user_name)
except KeyError:
# Tenant doesn't exist yet
pass
else:
# User does exist, we're done
return (False, user.id)
# We now know we will have to create a new user
if check_mode:
return (True, None)
tenant = get_tenant(keystone, tenant_name)
user = keystone.users.create(name=user_name, password=password,
email=email, tenant_id=tenant.id)
return (True, user.id)
def ensure_role_exists(keystone, user_name, tenant_name, role_name,
check_mode):
""" Check if role exists
Return (True, id) if a new role was created or if the role was newly
assigned to the user for the tenant. (False, id) if the role already
exists and was already assigned to the user ofr the tenant.
"""
# Check if the user has the role in the tenant
user = get_user(keystone, user_name)
tenant = get_tenant(keystone, tenant_name)
roles = [x for x in keystone.roles.roles_for_user(user, tenant)
if x.name == role_name]
count = len(roles)
if count == 1:
# If the role is in there, we are done
role = roles[0]
return (False, role.id)
elif count > 1:
# Too many roles with the same name, throw an error
raise ValueError("%d roles with name %s" % (count, role_name))
# At this point, we know we will need to make changes
if check_mode:
return (True, None)
# Get the role if it exists
try:
role = get_role(keystone, role_name)
except KeyError:
# Role doesn't exist yet
role = keystone.roles.create(role_name)
# Associate the role with the user in the admin
keystone.roles.add_user_role(user, role, tenant)
return (True, role.id)
def ensure_user_absent(keystone, user, check_mode):
raise NotImplementedError("Not yet implemented")
def ensure_role_absent(keystone, uesr, tenant, role, check_mode):
raise NotImplementedError("Not yet implemented")
def main():
argument_spec = openstack_argument_spec()
argument_spec.update(dict(
tenant_description=dict(required=False),
email=dict(required=False),
user=dict(required=False),
tenant=dict(required=False),
password=dict(required=False),
role=dict(required=False),
state=dict(default='present', choices=['present', 'absent']),
endpoint=dict(required=False,
default="http://127.0.0.1:35357/v2.0"),
token=dict(required=False),
login_user=dict(required=False),
login_password=dict(required=False),
login_tenant_name=dict(required=False)
))
# keystone operations themselves take an endpoint, not a keystone auth_url
del(argument_spec['auth_url'])
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
mutually_exclusive=[['token', 'login_user'],
['token', 'login_password'],
['token', 'login_tenant_name']]
)
if not keystoneclient_found:
module.fail_json(msg="the python-keystoneclient module is required")
user = module.params['user']
password = module.params['password']
tenant = module.params['tenant']
tenant_description = module.params['tenant_description']
email = module.params['email']
role = module.params['role']
state = module.params['state']
endpoint = module.params['endpoint']
token = module.params['token']
login_user = module.params['login_user']
login_password = module.params['login_password']
login_tenant_name = module.params['login_tenant_name']
keystone = authenticate(endpoint, token, login_user, login_password, login_tenant_name)
check_mode = module.check_mode
try:
d = dispatch(keystone, user, password, tenant, tenant_description,
email, role, state, endpoint, token, login_user,
login_password, check_mode)
except Exception, e:
if check_mode:
# If we have a failure in check mode
module.exit_json(changed=True,
msg="exception: %s" % e)
else:
module.fail_json(msg="exception: %s" % e)
else:
module.exit_json(**d)
def dispatch(keystone, user=None, password=None, tenant=None,
tenant_description=None, email=None, role=None,
state="present", endpoint=None, token=None, login_user=None,
login_password=None, check_mode=False):
""" Dispatch to the appropriate method.
Returns a dict that will be passed to exit_json
tenant user role state
------ ---- ---- --------
X present ensure_tenant_exists
X absent ensure_tenant_absent
X X present ensure_user_exists
X X absent ensure_user_absent
X X X present ensure_role_exists
X X X absent ensure_role_absent
"""
changed = False
id = None
if tenant and not user and not role and state == "present":
changed, id = ensure_tenant_exists(keystone, tenant,
tenant_description, check_mode)
elif tenant and not user and not role and state == "absent":
changed = ensure_tenant_absent(keystone, tenant, check_mode)
elif tenant and user and not role and state == "present":
changed, id = ensure_user_exists(keystone, user, password,
email, tenant, check_mode)
elif tenant and user and not role and state == "absent":
changed = ensure_user_absent(keystone, user, check_mode)
elif tenant and user and role and state == "present":
changed, id = ensure_role_exists(keystone, user, tenant, role,
check_mode)
elif tenant and user and role and state == "absent":
changed = ensure_role_absent(keystone, user, tenant, role, check_mode)
else:
# Should never reach here
raise ValueError("Code should never reach here")
return dict(changed=changed, id=id)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.openstack import *
if __name__ == '__main__':
main()
| gpl-3.0 |
TheSimoms/Felleshoelet | spotifyconnector/venv/lib/python3.6/site-packages/pydub/playback.py | 1 | 1790 | """
Support for playing AudioSegments. Pyaudio will be used if it's installed,
otherwise will fallback to ffplay. Pyaudio is a *much* nicer solution, but
is tricky to install. See my notes on installing pyaudio in a virtualenv (on
OSX 10.10): https://gist.github.com/jiaaro/9767512210a1d80a8a0d
"""
import subprocess
from tempfile import NamedTemporaryFile
from .utils import get_player_name, make_chunks
PLAYER = get_player_name()
def _play_with_ffplay(seg):
with NamedTemporaryFile("w+b", suffix=".wav") as f:
seg.export(f.name, "wav")
subprocess.call([PLAYER, "-nodisp", "-autoexit", "-hide_banner", f.name])
def _play_with_pyaudio(seg):
import pyaudio
p = pyaudio.PyAudio()
stream = p.open(format=p.get_format_from_width(seg.sample_width),
channels=seg.channels,
rate=seg.frame_rate,
output=True)
# break audio into half-second chunks (to allows keyboard interrupts)
for chunk in make_chunks(seg, 500):
stream.write(chunk._data)
stream.stop_stream()
stream.close()
p.terminate()
def _play_with_simpleaudio(seg):
import simpleaudio
return simpleaudio.play_buffer(
seg.raw_data,
num_channels=seg.channels,
bytes_per_sample=seg.sample_width,
sample_rate=seg.frame_rate
)
def play(audio_segment):
try:
playback = _play_with_simpleaudio(audio_segment)
try:
playback.wait_done()
except KeyboardInterrupt:
playback.stop()
except ImportError:
pass
else:
return
try:
_play_with_pyaudio(audio_segment)
return
except ImportError:
pass
else:
return
_play_with_ffplay(audio_segment)
| gpl-2.0 |
glwu/python-for-android | python3-alpha/extra_modules/gdata/webmastertools/__init__.py | 128 | 17900 | #!/usr/bin/python
#
# Copyright (C) 2008 Yu-Jie Lin
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains extensions to Atom objects used with Google Webmaster Tools."""
__author__ = 'livibetter (Yu-Jie Lin)'
try:
from xml.etree import cElementTree as ElementTree
except ImportError:
try:
import cElementTree as ElementTree
except ImportError:
try:
from xml.etree import ElementTree
except ImportError:
from elementtree import ElementTree
import atom
import gdata
# XML namespaces which are often used in Google Webmaster Tools entities.
GWEBMASTERTOOLS_NAMESPACE = 'http://schemas.google.com/webmasters/tools/2007'
GWEBMASTERTOOLS_TEMPLATE = '{http://schemas.google.com/webmasters/tools/2007}%s'
class Indexed(atom.AtomBase):
_tag = 'indexed'
_namespace = GWEBMASTERTOOLS_NAMESPACE
def IndexedFromString(xml_string):
return atom.CreateClassFromXMLString(Indexed, xml_string)
class Crawled(atom.Date):
_tag = 'crawled'
_namespace = GWEBMASTERTOOLS_NAMESPACE
def CrawledFromString(xml_string):
return atom.CreateClassFromXMLString(Crawled, xml_string)
class GeoLocation(atom.AtomBase):
_tag = 'geolocation'
_namespace = GWEBMASTERTOOLS_NAMESPACE
def GeoLocationFromString(xml_string):
return atom.CreateClassFromXMLString(GeoLocation, xml_string)
class PreferredDomain(atom.AtomBase):
_tag = 'preferred-domain'
_namespace = GWEBMASTERTOOLS_NAMESPACE
def PreferredDomainFromString(xml_string):
return atom.CreateClassFromXMLString(PreferredDomain, xml_string)
class CrawlRate(atom.AtomBase):
_tag = 'crawl-rate'
_namespace = GWEBMASTERTOOLS_NAMESPACE
def CrawlRateFromString(xml_string):
return atom.CreateClassFromXMLString(CrawlRate, xml_string)
class EnhancedImageSearch(atom.AtomBase):
_tag = 'enhanced-image-search'
_namespace = GWEBMASTERTOOLS_NAMESPACE
def EnhancedImageSearchFromString(xml_string):
return atom.CreateClassFromXMLString(EnhancedImageSearch, xml_string)
class Verified(atom.AtomBase):
_tag = 'verified'
_namespace = GWEBMASTERTOOLS_NAMESPACE
def VerifiedFromString(xml_string):
return atom.CreateClassFromXMLString(Verified, xml_string)
class VerificationMethodMeta(atom.AtomBase):
_tag = 'meta'
_namespace = atom.ATOM_NAMESPACE
_children = atom.AtomBase._children.copy()
_attributes = atom.AtomBase._attributes.copy()
_attributes['name'] = 'name'
_attributes['content'] = 'content'
def __init__(self, text=None, name=None, content=None,
extension_elements=None, extension_attributes=None):
self.text = text
self.name = name
self.content = content
self.extension_elements = extension_elements or []
self.extension_attributes = extension_attributes or {}
def VerificationMethodMetaFromString(xml_string):
return atom.CreateClassFromXMLString(VerificationMethodMeta, xml_string)
class VerificationMethod(atom.AtomBase):
_tag = 'verification-method'
_namespace = GWEBMASTERTOOLS_NAMESPACE
_children = atom.Text._children.copy()
_attributes = atom.Text._attributes.copy()
_children['{%s}meta' % atom.ATOM_NAMESPACE] = (
'meta', VerificationMethodMeta)
_attributes['in-use'] = 'in_use'
_attributes['type'] = 'type'
def __init__(self, text=None, in_use=None, meta=None, type=None,
extension_elements=None, extension_attributes=None):
self.text = text
self.in_use = in_use
self.meta = meta
self.type = type
self.extension_elements = extension_elements or []
self.extension_attributes = extension_attributes or {}
def VerificationMethodFromString(xml_string):
return atom.CreateClassFromXMLString(VerificationMethod, xml_string)
class MarkupLanguage(atom.AtomBase):
_tag = 'markup-language'
_namespace = GWEBMASTERTOOLS_NAMESPACE
def MarkupLanguageFromString(xml_string):
return atom.CreateClassFromXMLString(MarkupLanguage, xml_string)
class SitemapMobile(atom.AtomBase):
_tag = 'sitemap-mobile'
_namespace = GWEBMASTERTOOLS_NAMESPACE
_children = atom.AtomBase._children.copy()
_attributes = atom.AtomBase._attributes.copy()
_children['{%s}markup-language' % GWEBMASTERTOOLS_NAMESPACE] = (
'markup_language', [MarkupLanguage])
def __init__(self, markup_language=None,
extension_elements=None, extension_attributes=None, text=None):
self.markup_language = markup_language or []
self.text = text
self.extension_elements = extension_elements or []
self.extension_attributes = extension_attributes or {}
def SitemapMobileFromString(xml_string):
return atom.CreateClassFromXMLString(SitemapMobile, xml_string)
class SitemapMobileMarkupLanguage(atom.AtomBase):
_tag = 'sitemap-mobile-markup-language'
_namespace = GWEBMASTERTOOLS_NAMESPACE
def SitemapMobileMarkupLanguageFromString(xml_string):
return atom.CreateClassFromXMLString(SitemapMobileMarkupLanguage, xml_string)
class PublicationLabel(atom.AtomBase):
_tag = 'publication-label'
_namespace = GWEBMASTERTOOLS_NAMESPACE
def PublicationLabelFromString(xml_string):
return atom.CreateClassFromXMLString(PublicationLabel, xml_string)
class SitemapNews(atom.AtomBase):
_tag = 'sitemap-news'
_namespace = GWEBMASTERTOOLS_NAMESPACE
_children = atom.AtomBase._children.copy()
_attributes = atom.AtomBase._attributes.copy()
_children['{%s}publication-label' % GWEBMASTERTOOLS_NAMESPACE] = (
'publication_label', [PublicationLabel])
def __init__(self, publication_label=None,
extension_elements=None, extension_attributes=None, text=None):
self.publication_label = publication_label or []
self.text = text
self.extension_elements = extension_elements or []
self.extension_attributes = extension_attributes or {}
def SitemapNewsFromString(xml_string):
return atom.CreateClassFromXMLString(SitemapNews, xml_string)
class SitemapNewsPublicationLabel(atom.AtomBase):
_tag = 'sitemap-news-publication-label'
_namespace = GWEBMASTERTOOLS_NAMESPACE
def SitemapNewsPublicationLabelFromString(xml_string):
return atom.CreateClassFromXMLString(SitemapNewsPublicationLabel, xml_string)
class SitemapLastDownloaded(atom.Date):
_tag = 'sitemap-last-downloaded'
_namespace = GWEBMASTERTOOLS_NAMESPACE
def SitemapLastDownloadedFromString(xml_string):
return atom.CreateClassFromXMLString(SitemapLastDownloaded, xml_string)
class SitemapType(atom.AtomBase):
_tag = 'sitemap-type'
_namespace = GWEBMASTERTOOLS_NAMESPACE
def SitemapTypeFromString(xml_string):
return atom.CreateClassFromXMLString(SitemapType, xml_string)
class SitemapStatus(atom.AtomBase):
_tag = 'sitemap-status'
_namespace = GWEBMASTERTOOLS_NAMESPACE
def SitemapStatusFromString(xml_string):
return atom.CreateClassFromXMLString(SitemapStatus, xml_string)
class SitemapUrlCount(atom.AtomBase):
_tag = 'sitemap-url-count'
_namespace = GWEBMASTERTOOLS_NAMESPACE
def SitemapUrlCountFromString(xml_string):
return atom.CreateClassFromXMLString(SitemapUrlCount, xml_string)
class LinkFinder(atom.LinkFinder):
"""An "interface" providing methods to find link elements
SitesEntry elements often contain multiple links which differ in the rel
attribute or content type. Often, developers are interested in a specific
type of link so this class provides methods to find specific classes of links.
This class is used as a mixin in SitesEntry.
"""
def GetSelfLink(self):
"""Find the first link with rel set to 'self'
Returns:
An atom.Link or none if none of the links had rel equal to 'self'
"""
for a_link in self.link:
if a_link.rel == 'self':
return a_link
return None
def GetEditLink(self):
for a_link in self.link:
if a_link.rel == 'edit':
return a_link
return None
def GetPostLink(self):
"""Get a link containing the POST target URL.
The POST target URL is used to insert new entries.
Returns:
A link object with a rel matching the POST type.
"""
for a_link in self.link:
if a_link.rel == 'http://schemas.google.com/g/2005#post':
return a_link
return None
def GetFeedLink(self):
for a_link in self.link:
if a_link.rel == 'http://schemas.google.com/g/2005#feed':
return a_link
return None
class SitesEntry(atom.Entry, LinkFinder):
"""A Google Webmaster Tools meta Entry flavor of an Atom Entry """
_tag = atom.Entry._tag
_namespace = atom.Entry._namespace
_children = atom.Entry._children.copy()
_attributes = atom.Entry._attributes.copy()
_children['{%s}entryLink' % gdata.GDATA_NAMESPACE] = (
'entry_link', [gdata.EntryLink])
_children['{%s}indexed' % GWEBMASTERTOOLS_NAMESPACE] = ('indexed', Indexed)
_children['{%s}crawled' % GWEBMASTERTOOLS_NAMESPACE] = (
'crawled', Crawled)
_children['{%s}geolocation' % GWEBMASTERTOOLS_NAMESPACE] = (
'geolocation', GeoLocation)
_children['{%s}preferred-domain' % GWEBMASTERTOOLS_NAMESPACE] = (
'preferred_domain', PreferredDomain)
_children['{%s}crawl-rate' % GWEBMASTERTOOLS_NAMESPACE] = (
'crawl_rate', CrawlRate)
_children['{%s}enhanced-image-search' % GWEBMASTERTOOLS_NAMESPACE] = (
'enhanced_image_search', EnhancedImageSearch)
_children['{%s}verified' % GWEBMASTERTOOLS_NAMESPACE] = (
'verified', Verified)
_children['{%s}verification-method' % GWEBMASTERTOOLS_NAMESPACE] = (
'verification_method', [VerificationMethod])
def __GetId(self):
return self.__id
# This method was created to strip the unwanted whitespace from the id's
# text node.
def __SetId(self, id):
self.__id = id
if id is not None and id.text is not None:
self.__id.text = id.text.strip()
id = property(__GetId, __SetId)
def __init__(self, category=None, content=None,
atom_id=None, link=None, title=None, updated=None,
entry_link=None, indexed=None, crawled=None,
geolocation=None, preferred_domain=None, crawl_rate=None,
enhanced_image_search=None,
verified=None, verification_method=None,
extension_elements=None, extension_attributes=None, text=None):
atom.Entry.__init__(self, category=category,
content=content, atom_id=atom_id, link=link,
title=title, updated=updated, text=text)
self.entry_link = entry_link or []
self.indexed = indexed
self.crawled = crawled
self.geolocation = geolocation
self.preferred_domain = preferred_domain
self.crawl_rate = crawl_rate
self.enhanced_image_search = enhanced_image_search
self.verified = verified
self.verification_method = verification_method or []
def SitesEntryFromString(xml_string):
return atom.CreateClassFromXMLString(SitesEntry, xml_string)
class SitesFeed(atom.Feed, LinkFinder):
"""A Google Webmaster Tools meta Sites feed flavor of an Atom Feed"""
_tag = atom.Feed._tag
_namespace = atom.Feed._namespace
_children = atom.Feed._children.copy()
_attributes = atom.Feed._attributes.copy()
_children['{%s}startIndex' % gdata.OPENSEARCH_NAMESPACE] = (
'start_index', gdata.StartIndex)
_children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [SitesEntry])
del _children['{%s}generator' % atom.ATOM_NAMESPACE]
del _children['{%s}author' % atom.ATOM_NAMESPACE]
del _children['{%s}contributor' % atom.ATOM_NAMESPACE]
del _children['{%s}logo' % atom.ATOM_NAMESPACE]
del _children['{%s}icon' % atom.ATOM_NAMESPACE]
del _children['{%s}rights' % atom.ATOM_NAMESPACE]
del _children['{%s}subtitle' % atom.ATOM_NAMESPACE]
def __GetId(self):
return self.__id
def __SetId(self, id):
self.__id = id
if id is not None and id.text is not None:
self.__id.text = id.text.strip()
id = property(__GetId, __SetId)
def __init__(self, start_index=None, atom_id=None, title=None, entry=None,
category=None, link=None, updated=None,
extension_elements=None, extension_attributes=None, text=None):
"""Constructor for Source
Args:
category: list (optional) A list of Category instances
id: Id (optional) The entry's Id element
link: list (optional) A list of Link instances
title: Title (optional) the entry's title element
updated: Updated (optional) the entry's updated element
entry: list (optional) A list of the Entry instances contained in the
feed.
text: String (optional) The text contents of the element. This is the
contents of the Entry's XML text node.
(Example: <foo>This is the text</foo>)
extension_elements: list (optional) A list of ExtensionElement instances
which are children of this element.
extension_attributes: dict (optional) A dictionary of strings which are
the values for additional XML attributes of this element.
"""
self.start_index = start_index
self.category = category or []
self.id = atom_id
self.link = link or []
self.title = title
self.updated = updated
self.entry = entry or []
self.text = text
self.extension_elements = extension_elements or []
self.extension_attributes = extension_attributes or {}
def SitesFeedFromString(xml_string):
return atom.CreateClassFromXMLString(SitesFeed, xml_string)
class SitemapsEntry(atom.Entry, LinkFinder):
"""A Google Webmaster Tools meta Sitemaps Entry flavor of an Atom Entry """
_tag = atom.Entry._tag
_namespace = atom.Entry._namespace
_children = atom.Entry._children.copy()
_attributes = atom.Entry._attributes.copy()
_children['{%s}sitemap-type' % GWEBMASTERTOOLS_NAMESPACE] = (
'sitemap_type', SitemapType)
_children['{%s}sitemap-status' % GWEBMASTERTOOLS_NAMESPACE] = (
'sitemap_status', SitemapStatus)
_children['{%s}sitemap-last-downloaded' % GWEBMASTERTOOLS_NAMESPACE] = (
'sitemap_last_downloaded', SitemapLastDownloaded)
_children['{%s}sitemap-url-count' % GWEBMASTERTOOLS_NAMESPACE] = (
'sitemap_url_count', SitemapUrlCount)
_children['{%s}sitemap-mobile-markup-language' % GWEBMASTERTOOLS_NAMESPACE] \
= ('sitemap_mobile_markup_language', SitemapMobileMarkupLanguage)
_children['{%s}sitemap-news-publication-label' % GWEBMASTERTOOLS_NAMESPACE] \
= ('sitemap_news_publication_label', SitemapNewsPublicationLabel)
def __GetId(self):
return self.__id
# This method was created to strip the unwanted whitespace from the id's
# text node.
def __SetId(self, id):
self.__id = id
if id is not None and id.text is not None:
self.__id.text = id.text.strip()
id = property(__GetId, __SetId)
def __init__(self, category=None, content=None,
atom_id=None, link=None, title=None, updated=None,
sitemap_type=None, sitemap_status=None, sitemap_last_downloaded=None,
sitemap_url_count=None, sitemap_mobile_markup_language=None,
sitemap_news_publication_label=None,
extension_elements=None, extension_attributes=None, text=None):
atom.Entry.__init__(self, category=category,
content=content, atom_id=atom_id, link=link,
title=title, updated=updated, text=text)
self.sitemap_type = sitemap_type
self.sitemap_status = sitemap_status
self.sitemap_last_downloaded = sitemap_last_downloaded
self.sitemap_url_count = sitemap_url_count
self.sitemap_mobile_markup_language = sitemap_mobile_markup_language
self.sitemap_news_publication_label = sitemap_news_publication_label
def SitemapsEntryFromString(xml_string):
return atom.CreateClassFromXMLString(SitemapsEntry, xml_string)
class SitemapsFeed(atom.Feed, LinkFinder):
"""A Google Webmaster Tools meta Sitemaps feed flavor of an Atom Feed"""
_tag = atom.Feed._tag
_namespace = atom.Feed._namespace
_children = atom.Feed._children.copy()
_attributes = atom.Feed._attributes.copy()
_children['{%s}entry' % atom.ATOM_NAMESPACE] = ('entry', [SitemapsEntry])
_children['{%s}sitemap-mobile' % GWEBMASTERTOOLS_NAMESPACE] = (
'sitemap_mobile', SitemapMobile)
_children['{%s}sitemap-news' % GWEBMASTERTOOLS_NAMESPACE] = (
'sitemap_news', SitemapNews)
del _children['{%s}generator' % atom.ATOM_NAMESPACE]
del _children['{%s}author' % atom.ATOM_NAMESPACE]
del _children['{%s}contributor' % atom.ATOM_NAMESPACE]
del _children['{%s}logo' % atom.ATOM_NAMESPACE]
del _children['{%s}icon' % atom.ATOM_NAMESPACE]
del _children['{%s}rights' % atom.ATOM_NAMESPACE]
del _children['{%s}subtitle' % atom.ATOM_NAMESPACE]
def __GetId(self):
return self.__id
def __SetId(self, id):
self.__id = id
if id is not None and id.text is not None:
self.__id.text = id.text.strip()
id = property(__GetId, __SetId)
def __init__(self, category=None, content=None,
atom_id=None, link=None, title=None, updated=None,
entry=None, sitemap_mobile=None, sitemap_news=None,
extension_elements=None, extension_attributes=None, text=None):
self.category = category or []
self.id = atom_id
self.link = link or []
self.title = title
self.updated = updated
self.entry = entry or []
self.text = text
self.sitemap_mobile = sitemap_mobile
self.sitemap_news = sitemap_news
self.extension_elements = extension_elements or []
self.extension_attributes = extension_attributes or {}
def SitemapsFeedFromString(xml_string):
return atom.CreateClassFromXMLString(SitemapsFeed, xml_string)
| apache-2.0 |
rlindner81/pyload | module/plugins/accounts/MyfastfileCom.py | 1 | 1806 | # -*- coding: utf-8 -*-
import time
from module.plugins.internal.misc import json
from module.plugins.internal.MultiAccount import MultiAccount
class MyfastfileCom(MultiAccount):
__name__ = "MyfastfileCom"
__type__ = "account"
__version__ = "0.10"
__status__ = "testing"
__config__ = [("mh_mode", "all;listed;unlisted", "Filter hosters to use", "all"),
("mh_list", "str", "Hoster list (comma separated)", ""),
("mh_interval", "int", "Reload interval in minutes", 60)]
__description__ = """Myfastfile.com account plugin"""
__license__ = "GPLv3"
__authors__ = [("stickell", "[email protected]")]
def grab_hosters(self, user, password, data):
json_data = self.load(
"http://myfastfile.com/api.php",
get={
'hosts': ""})
self.log_debug("JSON data", json_data)
json_data = json.loads(json_data)
return json_data['hosts']
def grab_info(self, user, password, data):
if 'days_left' in self.json_data:
validuntil = time.time() + \
self.json_data['days_left'] * 24 * 60 * 60
return {'premium': True, 'validuntil': validuntil, 'trafficleft': -1}
else:
self.log_error(_("Unable to get account information"))
def signin(self, user, password, data):
#: Password to use is the API-Password written in http://myfastfile.com/myaccount
html = self.load("https://myfastfile.com/api.php",
get={'user': user,
'pass': password})
self.log_debug("JSON data: " + html)
self.json_data = json.loads(html)
if self.json_data['status'] != 'ok':
self.fail_login(_("Invalid username or password"))
| gpl-3.0 |
martinhoaragao/hour-of-code | node_modules/node-gyp/gyp/pylib/gyp/easy_xml_test.py | 2698 | 3270 | #!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
""" Unit tests for the easy_xml.py file. """
import gyp.easy_xml as easy_xml
import unittest
import StringIO
class TestSequenceFunctions(unittest.TestCase):
def setUp(self):
self.stderr = StringIO.StringIO()
def test_EasyXml_simple(self):
self.assertEqual(
easy_xml.XmlToString(['test']),
'<?xml version="1.0" encoding="utf-8"?><test/>')
self.assertEqual(
easy_xml.XmlToString(['test'], encoding='Windows-1252'),
'<?xml version="1.0" encoding="Windows-1252"?><test/>')
def test_EasyXml_simple_with_attributes(self):
self.assertEqual(
easy_xml.XmlToString(['test2', {'a': 'value1', 'b': 'value2'}]),
'<?xml version="1.0" encoding="utf-8"?><test2 a="value1" b="value2"/>')
def test_EasyXml_escaping(self):
original = '<test>\'"\r&\nfoo'
converted = '<test>\'"
&
foo'
converted_apos = converted.replace("'", ''')
self.assertEqual(
easy_xml.XmlToString(['test3', {'a': original}, original]),
'<?xml version="1.0" encoding="utf-8"?><test3 a="%s">%s</test3>' %
(converted, converted_apos))
def test_EasyXml_pretty(self):
self.assertEqual(
easy_xml.XmlToString(
['test3',
['GrandParent',
['Parent1',
['Child']
],
['Parent2']
]
],
pretty=True),
'<?xml version="1.0" encoding="utf-8"?>\n'
'<test3>\n'
' <GrandParent>\n'
' <Parent1>\n'
' <Child/>\n'
' </Parent1>\n'
' <Parent2/>\n'
' </GrandParent>\n'
'</test3>\n')
def test_EasyXml_complex(self):
# We want to create:
target = (
'<?xml version="1.0" encoding="utf-8"?>'
'<Project>'
'<PropertyGroup Label="Globals">'
'<ProjectGuid>{D2250C20-3A94-4FB9-AF73-11BC5B73884B}</ProjectGuid>'
'<Keyword>Win32Proj</Keyword>'
'<RootNamespace>automated_ui_tests</RootNamespace>'
'</PropertyGroup>'
'<Import Project="$(VCTargetsPath)\\Microsoft.Cpp.props"/>'
'<PropertyGroup '
'Condition="\'$(Configuration)|$(Platform)\'=='
'\'Debug|Win32\'" Label="Configuration">'
'<ConfigurationType>Application</ConfigurationType>'
'<CharacterSet>Unicode</CharacterSet>'
'</PropertyGroup>'
'</Project>')
xml = easy_xml.XmlToString(
['Project',
['PropertyGroup', {'Label': 'Globals'},
['ProjectGuid', '{D2250C20-3A94-4FB9-AF73-11BC5B73884B}'],
['Keyword', 'Win32Proj'],
['RootNamespace', 'automated_ui_tests']
],
['Import', {'Project': '$(VCTargetsPath)\\Microsoft.Cpp.props'}],
['PropertyGroup',
{'Condition': "'$(Configuration)|$(Platform)'=='Debug|Win32'",
'Label': 'Configuration'},
['ConfigurationType', 'Application'],
['CharacterSet', 'Unicode']
]
])
self.assertEqual(xml, target)
if __name__ == '__main__':
unittest.main()
| mit |
waheedahmed/edx-platform | common/lib/xmodule/xmodule/capa_module.py | 3 | 10916 | """Implements basics of Capa, including class CapaModule."""
import json
import logging
import sys
import re
from lxml import etree
from pkg_resources import resource_string
import dogstats_wrapper as dog_stats_api
from .capa_base import CapaMixin, CapaFields, ComplexEncoder
from capa import responsetypes
from .progress import Progress
from xmodule.util.misc import escape_html_characters
from xmodule.x_module import XModule, module_attr, DEPRECATION_VSCOMPAT_EVENT
from xmodule.raw_module import RawDescriptor
from xmodule.exceptions import NotFoundError, ProcessingError
log = logging.getLogger("edx.courseware")
class CapaModule(CapaMixin, XModule):
"""
An XModule implementing LonCapa format problems, implemented by way of
capa.capa_problem.LoncapaProblem
CapaModule.__init__ takes the same arguments as xmodule.x_module:XModule.__init__
"""
icon_class = 'problem'
js = {
'coffee': [
resource_string(__name__, 'js/src/capa/display.coffee'),
resource_string(__name__, 'js/src/javascript_loader.coffee'),
],
'js': [
resource_string(__name__, 'js/src/collapsible.js'),
resource_string(__name__, 'js/src/capa/imageinput.js'),
resource_string(__name__, 'js/src/capa/schematic.js'),
]
}
js_module_name = "Problem"
css = {'scss': [resource_string(__name__, 'css/capa/display.scss')]}
def __init__(self, *args, **kwargs):
"""
Accepts the same arguments as xmodule.x_module:XModule.__init__
"""
super(CapaModule, self).__init__(*args, **kwargs)
def author_view(self, context):
"""
Renders the Studio preview view.
"""
return self.student_view(context)
def handle_ajax(self, dispatch, data):
"""
This is called by courseware.module_render, to handle an AJAX call.
`data` is request.POST.
Returns a json dictionary:
{ 'progress_changed' : True/False,
'progress' : 'none'/'in_progress'/'done',
<other request-specific values here > }
"""
handlers = {
'hint_button': self.hint_button,
'problem_get': self.get_problem,
'problem_check': self.check_problem,
'problem_reset': self.reset_problem,
'problem_save': self.save_problem,
'problem_show': self.get_answer,
'score_update': self.update_score,
'input_ajax': self.handle_input_ajax,
'ungraded_response': self.handle_ungraded_response
}
_ = self.runtime.service(self, "i18n").ugettext
generic_error_message = _(
"We're sorry, there was an error with processing your request. "
"Please try reloading your page and trying again."
)
not_found_error_message = _(
"The state of this problem has changed since you loaded this page. "
"Please refresh your page."
)
if dispatch not in handlers:
return 'Error: {} is not a known capa action'.format(dispatch)
before = self.get_progress()
try:
result = handlers[dispatch](data)
except NotFoundError as err:
log.exception(
"Unable to find data when dispatching %s to %s for user %s",
dispatch,
self.scope_ids.usage_id,
self.scope_ids.user_id
)
_, _, traceback_obj = sys.exc_info() # pylint: disable=redefined-outer-name
raise ProcessingError(not_found_error_message), None, traceback_obj
except Exception as err:
log.exception(
"Unknown error when dispatching %s to %s for user %s",
dispatch,
self.scope_ids.usage_id,
self.scope_ids.user_id
)
_, _, traceback_obj = sys.exc_info() # pylint: disable=redefined-outer-name
raise ProcessingError(generic_error_message), None, traceback_obj
after = self.get_progress()
result.update({
'progress_changed': after != before,
'progress_status': Progress.to_js_status_str(after),
'progress_detail': Progress.to_js_detail_str(after),
})
return json.dumps(result, cls=ComplexEncoder)
@property
def display_name_with_default(self):
"""
Constructs the display name for a CAPA problem.
Default to the display_name if it isn't None or not an empty string,
else fall back to problem category.
"""
if self.display_name is None or not self.display_name.strip():
return self.location.block_type
return self.display_name
class CapaDescriptor(CapaFields, RawDescriptor):
"""
Module implementing problems in the LON-CAPA format,
as implemented by capa.capa_problem
"""
INDEX_CONTENT_TYPE = 'CAPA'
module_class = CapaModule
resources_dir = None
has_score = True
show_in_read_only_mode = True
template_dir_name = 'problem'
mako_template = "widgets/problem-edit.html"
js = {'coffee': [resource_string(__name__, 'js/src/problem/edit.coffee')]}
js_module_name = "MarkdownEditingDescriptor"
has_author_view = True
css = {
'scss': [
resource_string(__name__, 'css/editor/edit.scss'),
resource_string(__name__, 'css/problem/edit.scss')
]
}
# The capa format specifies that what we call max_attempts in the code
# is the attribute `attempts`. This will do that conversion
metadata_translations = dict(RawDescriptor.metadata_translations)
metadata_translations['attempts'] = 'max_attempts'
@classmethod
def filter_templates(cls, template, course):
"""
Filter template that contains 'latex' from templates.
Show them only if use_latex_compiler is set to True in
course settings.
"""
return 'latex' not in template['template_id'] or course.use_latex_compiler
def get_context(self):
_context = RawDescriptor.get_context(self)
_context.update({
'markdown': self.markdown,
'enable_markdown': self.markdown is not None,
'enable_latex_compiler': self.use_latex_compiler,
})
return _context
# VS[compat]
# TODO (cpennington): Delete this method once all fall 2012 course are being
# edited in the cms
@classmethod
def backcompat_paths(cls, path):
dog_stats_api.increment(
DEPRECATION_VSCOMPAT_EVENT,
tags=["location:capa_descriptor_backcompat_paths"]
)
return [
'problems/' + path[8:],
path[8:],
]
@property
def non_editable_metadata_fields(self):
non_editable_fields = super(CapaDescriptor, self).non_editable_metadata_fields
non_editable_fields.extend([
CapaDescriptor.due,
CapaDescriptor.graceperiod,
CapaDescriptor.force_save_button,
CapaDescriptor.markdown,
CapaDescriptor.text_customization,
CapaDescriptor.use_latex_compiler,
])
return non_editable_fields
@property
def problem_types(self):
""" Low-level problem type introspection for content libraries filtering by problem type """
tree = etree.XML(self.data)
registered_tags = responsetypes.registry.registered_tags()
return set([node.tag for node in tree.iter() if node.tag in registered_tags])
def index_dictionary(self):
"""
Return dictionary prepared with module content and type for indexing.
"""
xblock_body = super(CapaDescriptor, self).index_dictionary()
# Removing solutions and hints, as well as script and style
capa_content = re.sub(
re.compile(
r"""
<solution>.*?</solution> |
<script>.*?</script> |
<style>.*?</style> |
<[a-z]*hint.*?>.*?</[a-z]*hint>
""",
re.DOTALL |
re.VERBOSE),
"",
self.data
)
capa_content = escape_html_characters(capa_content)
capa_body = {
"capa_content": capa_content,
"display_name": self.display_name,
}
if "content" in xblock_body:
xblock_body["content"].update(capa_body)
else:
xblock_body["content"] = capa_body
xblock_body["content_type"] = self.INDEX_CONTENT_TYPE
xblock_body["problem_types"] = list(self.problem_types)
return xblock_body
def has_support(self, view, functionality):
"""
Override the XBlock.has_support method to return appropriate
value for the multi-device functionality.
Returns whether the given view has support for the given functionality.
"""
if functionality == "multi_device":
return all(
responsetypes.registry.get_class_for_tag(tag).multi_device_support
for tag in self.problem_types
)
return False
# Proxy to CapaModule for access to any of its attributes
answer_available = module_attr('answer_available')
check_button_name = module_attr('check_button_name')
check_button_checking_name = module_attr('check_button_checking_name')
check_problem = module_attr('check_problem')
choose_new_seed = module_attr('choose_new_seed')
closed = module_attr('closed')
get_answer = module_attr('get_answer')
get_problem = module_attr('get_problem')
get_problem_html = module_attr('get_problem_html')
get_state_for_lcp = module_attr('get_state_for_lcp')
handle_input_ajax = module_attr('handle_input_ajax')
hint_button = module_attr('hint_button')
handle_problem_html_error = module_attr('handle_problem_html_error')
handle_ungraded_response = module_attr('handle_ungraded_response')
is_attempted = module_attr('is_attempted')
is_correct = module_attr('is_correct')
is_past_due = module_attr('is_past_due')
is_submitted = module_attr('is_submitted')
lcp = module_attr('lcp')
make_dict_of_responses = module_attr('make_dict_of_responses')
new_lcp = module_attr('new_lcp')
publish_grade = module_attr('publish_grade')
rescore_problem = module_attr('rescore_problem')
reset_problem = module_attr('reset_problem')
save_problem = module_attr('save_problem')
set_state_from_lcp = module_attr('set_state_from_lcp')
should_show_check_button = module_attr('should_show_check_button')
should_show_reset_button = module_attr('should_show_reset_button')
should_show_save_button = module_attr('should_show_save_button')
update_score = module_attr('update_score')
| agpl-3.0 |
chrisjdavie/Cookit | 5-a-day-thing/data_out/save_all_as_xls.py | 1 | 1065 | '''
Extracts the 5-a-day data from the different excel spreadsheets, and
saves it all in one big spreadsheet.
Created on 14 Nov 2014
@author: chris
'''
def main():
base_dir = '/home/chris/Projects/Cookit/family-food-datasets/'
import xlwt
workbook = xlwt.Workbook()
from pickle_this import get_fruit_n_veg_data
files = [ 'ConsAGEHRPHH-12dec13.xls', 'ConsGORHH-12dec13.xls', 'ConsINCHH-12dec13.xls', 'ConsAGEHRPEDHH-12dec13.xls', 'ConsCOMPHH-12dec13.xls', 'ConsINCEQUIVHH-12dec13.xls' ]
book_names = ['age (old)', 'region', 'income quintile', 'age (young)', 'household composition', 'income decile' ]
'''By old age'''
for suff, book_name in zip(files,book_names):
fname = base_dir + suff
dat, units, _ = get_fruit_n_veg_data(fname)
from save_as_xls import gen_sheet
gen_sheet(workbook, dat, units, book_name)
workbook.save('/tmp/tst.xls')
if __name__ == '__main__':
main() | mit |
Buggaarde/youtube-dl | youtube_dl/extractor/gfycat.py | 147 | 3555 | # coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
from ..utils import (
int_or_none,
float_or_none,
qualities,
ExtractorError,
)
class GfycatIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?gfycat\.com/(?:ifr/)?(?P<id>[^/?#]+)'
_TESTS = [{
'url': 'http://gfycat.com/DeadlyDecisiveGermanpinscher',
'info_dict': {
'id': 'DeadlyDecisiveGermanpinscher',
'ext': 'mp4',
'title': 'Ghost in the Shell',
'timestamp': 1410656006,
'upload_date': '20140914',
'uploader': 'anonymous',
'duration': 10.4,
'view_count': int,
'like_count': int,
'dislike_count': int,
'categories': list,
'age_limit': 0,
}
}, {
'url': 'http://gfycat.com/ifr/JauntyTimelyAmazontreeboa',
'info_dict': {
'id': 'JauntyTimelyAmazontreeboa',
'ext': 'mp4',
'title': 'JauntyTimelyAmazontreeboa',
'timestamp': 1411720126,
'upload_date': '20140926',
'uploader': 'anonymous',
'duration': 3.52,
'view_count': int,
'like_count': int,
'dislike_count': int,
'categories': list,
'age_limit': 0,
}
}]
def _real_extract(self, url):
video_id = self._match_id(url)
gfy = self._download_json(
'http://gfycat.com/cajax/get/%s' % video_id,
video_id, 'Downloading video info')
if 'error' in gfy:
raise ExtractorError('Gfycat said: ' + gfy['error'], expected=True)
gfy = gfy['gfyItem']
title = gfy.get('title') or gfy['gfyName']
description = gfy.get('description')
timestamp = int_or_none(gfy.get('createDate'))
uploader = gfy.get('userName')
view_count = int_or_none(gfy.get('views'))
like_count = int_or_none(gfy.get('likes'))
dislike_count = int_or_none(gfy.get('dislikes'))
age_limit = 18 if gfy.get('nsfw') == '1' else 0
width = int_or_none(gfy.get('width'))
height = int_or_none(gfy.get('height'))
fps = int_or_none(gfy.get('frameRate'))
num_frames = int_or_none(gfy.get('numFrames'))
duration = float_or_none(num_frames, fps) if num_frames and fps else None
categories = gfy.get('tags') or gfy.get('extraLemmas') or []
FORMATS = ('gif', 'webm', 'mp4')
quality = qualities(FORMATS)
formats = []
for format_id in FORMATS:
video_url = gfy.get('%sUrl' % format_id)
if not video_url:
continue
filesize = gfy.get('%sSize' % format_id)
formats.append({
'url': video_url,
'format_id': format_id,
'width': width,
'height': height,
'fps': fps,
'filesize': filesize,
'quality': quality(format_id),
})
self._sort_formats(formats)
return {
'id': video_id,
'title': title,
'description': description,
'timestamp': timestamp,
'uploader': uploader,
'duration': duration,
'view_count': view_count,
'like_count': like_count,
'dislike_count': dislike_count,
'categories': categories,
'age_limit': age_limit,
'formats': formats,
}
| unlicense |
alexschlueter/cern-root | interpreter/llvm/src/utils/release/findRegressions-nightly.py | 123 | 3193 | #!/usr/bin/env python
import re, string, sys, os, time
DEBUG = 0
testDirName = 'llvm-test'
test = ['compile', 'llc', 'jit', 'cbe']
exectime = ['llc-time', 'jit-time', 'cbe-time',]
comptime = ['llc', 'jit-comptime', 'compile']
(tp, exp) = ('compileTime_', 'executeTime_')
def parse(file):
f=open(file, 'r')
d = f.read()
#Cleanup weird stuff
d = re.sub(r',\d+:\d','', d)
r = re.findall(r'TEST-(PASS|FAIL|RESULT.*?):\s+(.*?)\s+(.*?)\r*\n', d)
test = {}
fname = ''
for t in r:
if DEBUG:
print t
if t[0] == 'PASS' or t[0] == 'FAIL' :
tmp = t[2].split(testDirName)
if DEBUG:
print tmp
if len(tmp) == 2:
fname = tmp[1].strip('\r\n')
else:
fname = tmp[0].strip('\r\n')
if not test.has_key(fname) :
test[fname] = {}
for k in test:
test[fname][k] = 'NA'
test[fname][t[1]] = t[0]
if DEBUG:
print test[fname][t[1]]
else :
try:
n = t[0].split('RESULT-')[1]
if DEBUG:
print n;
if n == 'llc' or n == 'jit-comptime' or n == 'compile':
test[fname][tp + n] = float(t[2].split(' ')[2])
if DEBUG:
print test[fname][tp + n]
elif n.endswith('-time') :
test[fname][exp + n] = float(t[2].strip('\r\n'))
if DEBUG:
print test[fname][exp + n]
else :
print "ERROR!"
sys.exit(1)
except:
continue
return test
# Diff results and look for regressions.
def diffResults(d_old, d_new):
for t in sorted(d_old.keys()) :
if DEBUG:
print t
if d_new.has_key(t) :
# Check if the test passed or failed.
for x in test:
if d_old[t].has_key(x):
if d_new[t].has_key(x):
if d_old[t][x] == 'PASS':
if d_new[t][x] != 'PASS':
print t + " *** REGRESSION (" + x + ")\n"
else:
if d_new[t][x] == 'PASS':
print t + " * NEW PASS (" + x + ")\n"
else :
print t + "*** REGRESSION (" + x + ")\n"
# For execution time, if there is no result, its a fail.
for x in exectime:
if d_old[t].has_key(tp + x):
if not d_new[t].has_key(tp + x):
print t + " *** REGRESSION (" + tp + x + ")\n"
else :
if d_new[t].has_key(tp + x):
print t + " * NEW PASS (" + tp + x + ")\n"
for x in comptime:
if d_old[t].has_key(exp + x):
if not d_new[t].has_key(exp + x):
print t + " *** REGRESSION (" + exp + x + ")\n"
else :
if d_new[t].has_key(exp + x):
print t + " * NEW PASS (" + exp + x + ")\n"
else :
print t + ": Removed from test-suite.\n"
#Main
if len(sys.argv) < 3 :
print 'Usage:', sys.argv[0], \
'<old log> <new log>'
sys.exit(-1)
d_old = parse(sys.argv[1])
d_new = parse(sys.argv[2])
diffResults(d_old, d_new)
| lgpl-2.1 |
gangadhar-kadam/adb-erp | selling/utils.py | 5 | 1499 | # ERPNext - web based ERP (http://erpnext.com)
# Copyright (C) 2012 Web Notes Technologies Pvt Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
import webnotes
def get_customer_list(doctype, txt, searchfield, start, page_len, filters):
if webnotes.conn.get_default("cust_master_name") == "Customer Name":
fields = ["name", "customer_group", "territory"]
else:
fields = ["name", "customer_name", "customer_group", "territory"]
return webnotes.conn.sql("""select %s from `tabCustomer` where docstatus < 2
and (%s like %s or customer_name like %s) order by
case when name like %s then 0 else 1 end,
case when customer_name like %s then 0 else 1 end,
name, customer_name limit %s, %s""" %
(", ".join(fields), searchfield, "%s", "%s", "%s", "%s", "%s", "%s"),
("%%%s%%" % txt, "%%%s%%" % txt, "%%%s%%" % txt, "%%%s%%" % txt, start, page_len)) | agpl-3.0 |
StefanRijnhart/OpenUpgrade | addons/l10n_ve/__init__.py | 975 | 1058 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2008 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# $Id$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
signed/intellij-community | python/testData/types/NumpyArrayType/numpy/core/multiarray.py | 79 | 7955 | def array(p_object, dtype=None, copy=True, order=None, subok=False, ndmin=0): # real signature unknown; restored from __doc__
"""
array(object, dtype=None, copy=True, order=None, subok=False, ndmin=0)
Create an array.
Parameters
----------
object : array_like
An array, any object exposing the array interface, an
object whose __array__ method returns an array, or any
(nested) sequence.
dtype : data-type, optional
The desired data-type for the array. If not given, then
the type will be determined as the minimum type required
to hold the objects in the sequence. This argument can only
be used to 'upcast' the array. For downcasting, use the
.astype(t) method.
copy : bool, optional
If true (default), then the object is copied. Otherwise, a copy
will only be made if __array__ returns a copy, if obj is a
nested sequence, or if a copy is needed to satisfy any of the other
requirements (`dtype`, `order`, etc.).
order : {'C', 'F', 'A'}, optional
Specify the order of the array. If order is 'C' (default), then the
array will be in C-contiguous order (last-index varies the
fastest). If order is 'F', then the returned array
will be in Fortran-contiguous order (first-index varies the
fastest). If order is 'A', then the returned array may
be in any order (either C-, Fortran-contiguous, or even
discontiguous).
subok : bool, optional
If True, then sub-classes will be passed-through, otherwise
the returned array will be forced to be a base-class array (default).
ndmin : int, optional
Specifies the minimum number of dimensions that the resulting
array should have. Ones will be pre-pended to the shape as
needed to meet this requirement.
Returns
-------
out : ndarray
An array object satisfying the specified requirements.
See Also
--------
empty, empty_like, zeros, zeros_like, ones, ones_like, fill
Examples
--------
>>> np.array([1, 2, 3])
array([1, 2, 3])
Upcasting:
>>> np.array([1, 2, 3.0])
array([ 1., 2., 3.])
More than one dimension:
>>> np.array([[1, 2], [3, 4]])
array([[1, 2],
[3, 4]])
Minimum dimensions 2:
>>> np.array([1, 2, 3], ndmin=2)
array([[1, 2, 3]])
Type provided:
>>> np.array([1, 2, 3], dtype=complex)
array([ 1.+0.j, 2.+0.j, 3.+0.j])
Data-type consisting of more than one element:
>>> x = np.array([(1,2),(3,4)],dtype=[('a','<i4'),('b','<i4')])
>>> x['a']
array([1, 3])
Creating an array from sub-classes:
>>> np.array(np.mat('1 2; 3 4'))
array([[1, 2],
[3, 4]])
>>> np.array(np.mat('1 2; 3 4'), subok=True)
matrix([[1, 2],
[3, 4]])
"""
pass
class ndarray(object):
"""
ndarray(shape, dtype=float, buffer=None, offset=0,
strides=None, order=None)
An array object represents a multidimensional, homogeneous array
of fixed-size items. An associated data-type object describes the
format of each element in the array (its byte-order, how many bytes it
occupies in memory, whether it is an integer, a floating point number,
or something else, etc.)
Arrays should be constructed using `array`, `zeros` or `empty` (refer
to the See Also section below). The parameters given here refer to
a low-level method (`ndarray(...)`) for instantiating an array.
For more information, refer to the `numpy` module and examine the
the methods and attributes of an array.
Parameters
----------
(for the __new__ method; see Notes below)
shape : tuple of ints
Shape of created array.
dtype : data-type, optional
Any object that can be interpreted as a numpy data type.
buffer : object exposing buffer interface, optional
Used to fill the array with data.
offset : int, optional
Offset of array data in buffer.
strides : tuple of ints, optional
Strides of data in memory.
order : {'C', 'F'}, optional
Row-major or column-major order.
Attributes
----------
T : ndarray
Transpose of the array.
data : buffer
The array's elements, in memory.
dtype : dtype object
Describes the format of the elements in the array.
flags : dict
Dictionary containing information related to memory use, e.g.,
'C_CONTIGUOUS', 'OWNDATA', 'WRITEABLE', etc.
flat : numpy.flatiter object
Flattened version of the array as an iterator. The iterator
allows assignments, e.g., ``x.flat = 3`` (See `ndarray.flat` for
assignment examples; TODO).
imag : ndarray
Imaginary part of the array.
real : ndarray
Real part of the array.
size : int
Number of elements in the array.
itemsize : int
The memory use of each array element in bytes.
nbytes : int
The total number of bytes required to store the array data,
i.e., ``itemsize * size``.
ndim : int
The array's number of dimensions.
shape : tuple of ints
Shape of the array.
strides : tuple of ints
The step-size required to move from one element to the next in
memory. For example, a contiguous ``(3, 4)`` array of type
``int16`` in C-order has strides ``(8, 2)``. This implies that
to move from element to element in memory requires jumps of 2 bytes.
To move from row-to-row, one needs to jump 8 bytes at a time
(``2 * 4``).
ctypes : ctypes object
Class containing properties of the array needed for interaction
with ctypes.
base : ndarray
If the array is a view into another array, that array is its `base`
(unless that array is also a view). The `base` array is where the
array data is actually stored.
See Also
--------
array : Construct an array.
zeros : Create an array, each element of which is zero.
empty : Create an array, but leave its allocated memory unchanged (i.e.,
it contains "garbage").
dtype : Create a data-type.
Notes
-----
There are two modes of creating an array using ``__new__``:
1. If `buffer` is None, then only `shape`, `dtype`, and `order`
are used.
2. If `buffer` is an object exposing the buffer interface, then
all keywords are interpreted.
No ``__init__`` method is needed because the array is fully initialized
after the ``__new__`` method.
Examples
--------
These examples illustrate the low-level `ndarray` constructor. Refer
to the `See Also` section above for easier ways of constructing an
ndarray.
First mode, `buffer` is None:
>>> np.ndarray(shape=(2,2), dtype=float, order='F')
array([[ -1.13698227e+002, 4.25087011e-303],
[ 2.88528414e-306, 3.27025015e-309]]) #random
Second mode:
>>> np.ndarray((2,), buffer=np.array([1,2,3]),
... offset=np.int_().itemsize,
... dtype=int) # offset = 1*itemsize, i.e. skip first element
array([2, 3])
"""
pass | apache-2.0 |
betoesquivel/CIE | flask/lib/python2.7/site-packages/sqlalchemy/connectors/zxJDBC.py | 33 | 1868 | # connectors/zxJDBC.py
# Copyright (C) 2005-2014 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
import sys
from . import Connector
class ZxJDBCConnector(Connector):
driver = 'zxjdbc'
supports_sane_rowcount = False
supports_sane_multi_rowcount = False
supports_unicode_binds = True
supports_unicode_statements = sys.version > '2.5.0+'
description_encoding = None
default_paramstyle = 'qmark'
jdbc_db_name = None
jdbc_driver_name = None
@classmethod
def dbapi(cls):
from com.ziclix.python.sql import zxJDBC
return zxJDBC
def _driver_kwargs(self):
"""Return kw arg dict to be sent to connect()."""
return {}
def _create_jdbc_url(self, url):
"""Create a JDBC url from a :class:`~sqlalchemy.engine.url.URL`"""
return 'jdbc:%s://%s%s/%s' % (self.jdbc_db_name, url.host,
url.port is not None
and ':%s' % url.port or '',
url.database)
def create_connect_args(self, url):
opts = self._driver_kwargs()
opts.update(url.query)
return [
[self._create_jdbc_url(url),
url.username, url.password,
self.jdbc_driver_name],
opts]
def is_disconnect(self, e, connection, cursor):
if not isinstance(e, self.dbapi.ProgrammingError):
return False
e = str(e)
return 'connection is closed' in e or 'cursor is closed' in e
def _get_server_version_info(self, connection):
# use connection.connection.dbversion, and parse appropriately
# to get a tuple
raise NotImplementedError()
| mit |
chirilo/phantomjs | src/qt/qtwebkit/Tools/Scripts/webkitpy/common/system/zipfileset_mock.py | 167 | 2166 | # Copyright (C) 2011 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
def make_factory(ziphashes):
"""ZipFileSet factory routine that looks up zipfiles in a dict;
each zipfile should also be a dict of member names -> contents."""
class MockZipFileSet(object):
def __init__(self, url):
self._url = url
self._ziphash = ziphashes[url]
def namelist(self):
return self._ziphash.keys()
def read(self, member):
return self._ziphash[member]
def close(self):
pass
def maker(url):
# We return None because there's no tempfile to delete.
return (None, MockZipFileSet(url))
return maker
| bsd-3-clause |
jiadaizhao/LeetCode | 0401-0500/0407-Trapping Rain Water II/0407-Trapping Rain Water II.py | 1 | 1241 | import heapq
class Solution:
def trapRainWater(self, heightMap: List[List[int]]) -> int:
m = len(heightMap)
if m == 0:
return 0
n = len(heightMap[0])
if n == 0:
return 0
visited = [[False]*n for _ in range(m)]
pq = []
for i in range(m):
heapq.heappush(pq, (heightMap[i][0], i, 0))
visited[i][0] = True
if n > 1:
heapq.heappush(pq, (heightMap[i][n - 1], i, n - 1))
visited[i][n - 1] = True
for j in range(1, n - 1):
heapq.heappush(pq, (heightMap[0][j], 0, j))
visited[0][j] = True
if m > 1:
heapq.heappush(pq, (heightMap[m - 1][j], m - 1, j))
visited[m - 1][j] = True
vol = 0
while pq:
h, row, col = heapq.heappop(pq)
for nr, nc in (row-1, col), (row+1, col), (row, col-1), (row, col+1):
if 0 <= nr < m and 0 <= nc < n and (not visited[nr][nc]):
heapq.heappush(pq, (max(h, heightMap[nr][nc]), nr, nc))
visited[nr][nc] = True
vol += max(h - heightMap[nr][nc], 0)
return vol
| mit |
lukas-hetzenecker/home-assistant | homeassistant/components/notify/syslog.py | 11 | 2564 | """
Syslog notification service.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/notify.syslog/
"""
import logging
import voluptuous as vol
from homeassistant.components.notify import (
ATTR_TITLE, ATTR_TITLE_DEFAULT, PLATFORM_SCHEMA, BaseNotificationService)
CONF_FACILITY = 'facility'
CONF_OPTION = 'option'
CONF_PRIORITY = 'priority'
SYSLOG_FACILITY = {
'kernel': 'LOG_KERN',
'user': 'LOG_USER',
'mail': 'LOG_MAIL',
'daemon': 'LOG_DAEMON',
'auth': 'LOG_KERN',
'LPR': 'LOG_LPR',
'news': 'LOG_NEWS',
'uucp': 'LOG_UUCP',
'cron': 'LOG_CRON',
'syslog': 'LOG_SYSLOG',
'local0': 'LOG_LOCAL0',
'local1': 'LOG_LOCAL1',
'local2': 'LOG_LOCAL2',
'local3': 'LOG_LOCAL3',
'local4': 'LOG_LOCAL4',
'local5': 'LOG_LOCAL5',
'local6': 'LOG_LOCAL6',
'local7': 'LOG_LOCAL7',
}
SYSLOG_OPTION = {
'pid': 'LOG_PID',
'cons': 'LOG_CONS',
'ndelay': 'LOG_NDELAY',
'nowait': 'LOG_NOWAIT',
'perror': 'LOG_PERROR',
}
SYSLOG_PRIORITY = {
5: 'LOG_EMERG',
4: 'LOG_ALERT',
3: 'LOG_CRIT',
2: 'LOG_ERR',
1: 'LOG_WARNING',
0: 'LOG_NOTICE',
-1: 'LOG_INFO',
-2: 'LOG_DEBUG',
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_FACILITY, default='syslog'):
vol.In(SYSLOG_FACILITY.keys()),
vol.Optional(CONF_OPTION, default='pid'): vol.In(SYSLOG_OPTION.keys()),
vol.Optional(CONF_PRIORITY, default=-1): vol.In(SYSLOG_PRIORITY.keys()),
})
_LOGGER = logging.getLogger(__name__)
def get_service(hass, config):
"""Get the syslog notification service."""
import syslog
facility = getattr(syslog, SYSLOG_FACILITY[config.get(CONF_FACILITY)])
option = getattr(syslog, SYSLOG_OPTION[config.get(CONF_OPTION)])
priority = getattr(syslog, SYSLOG_PRIORITY[config.get(CONF_PRIORITY)])
return SyslogNotificationService(facility, option, priority)
class SyslogNotificationService(BaseNotificationService):
"""Implement the syslog notification service."""
def __init__(self, facility, option, priority):
"""Initialize the service."""
self._facility = facility
self._option = option
self._priority = priority
def send_message(self, message="", **kwargs):
"""Send a message to a user."""
import syslog
title = kwargs.get(ATTR_TITLE, ATTR_TITLE_DEFAULT)
syslog.openlog(title, self._option, self._facility)
syslog.syslog(self._priority, message)
syslog.closelog()
| mit |
aricchen/openHR | openerp/addons/l10n_gr/__init__.py | 438 | 1102 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2008 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# $Id$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
#import sandwich_wizard
#import order_create
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
mattnenterprise/servo | tests/wpt/web-platform-tests/tools/third_party/pytest/testing/test_runner.py | 13 | 25655 | # -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
import _pytest._code
import os
import py
import pytest
import sys
from _pytest import runner, main, outcomes
class TestSetupState(object):
def test_setup(self, testdir):
ss = runner.SetupState()
item = testdir.getitem("def test_func(): pass")
values = [1]
ss.prepare(item)
ss.addfinalizer(values.pop, colitem=item)
assert values
ss._pop_and_teardown()
assert not values
def test_teardown_exact_stack_empty(self, testdir):
item = testdir.getitem("def test_func(): pass")
ss = runner.SetupState()
ss.teardown_exact(item, None)
ss.teardown_exact(item, None)
ss.teardown_exact(item, None)
def test_setup_fails_and_failure_is_cached(self, testdir):
item = testdir.getitem("""
def setup_module(mod):
raise ValueError(42)
def test_func(): pass
""") # noqa
ss = runner.SetupState()
pytest.raises(ValueError, lambda: ss.prepare(item))
pytest.raises(ValueError, lambda: ss.prepare(item))
def test_teardown_multiple_one_fails(self, testdir):
r = []
def fin1():
r.append('fin1')
def fin2():
raise Exception('oops')
def fin3():
r.append('fin3')
item = testdir.getitem("def test_func(): pass")
ss = runner.SetupState()
ss.addfinalizer(fin1, item)
ss.addfinalizer(fin2, item)
ss.addfinalizer(fin3, item)
with pytest.raises(Exception) as err:
ss._callfinalizers(item)
assert err.value.args == ('oops',)
assert r == ['fin3', 'fin1']
def test_teardown_multiple_fail(self, testdir):
# Ensure the first exception is the one which is re-raised.
# Ideally both would be reported however.
def fin1():
raise Exception('oops1')
def fin2():
raise Exception('oops2')
item = testdir.getitem("def test_func(): pass")
ss = runner.SetupState()
ss.addfinalizer(fin1, item)
ss.addfinalizer(fin2, item)
with pytest.raises(Exception) as err:
ss._callfinalizers(item)
assert err.value.args == ('oops2',)
class BaseFunctionalTests(object):
def test_passfunction(self, testdir):
reports = testdir.runitem("""
def test_func():
pass
""")
rep = reports[1]
assert rep.passed
assert not rep.failed
assert rep.outcome == "passed"
assert not rep.longrepr
def test_failfunction(self, testdir):
reports = testdir.runitem("""
def test_func():
assert 0
""")
rep = reports[1]
assert not rep.passed
assert not rep.skipped
assert rep.failed
assert rep.when == "call"
assert rep.outcome == "failed"
# assert isinstance(rep.longrepr, ReprExceptionInfo)
def test_skipfunction(self, testdir):
reports = testdir.runitem("""
import pytest
def test_func():
pytest.skip("hello")
""")
rep = reports[1]
assert not rep.failed
assert not rep.passed
assert rep.skipped
assert rep.outcome == "skipped"
# assert rep.skipped.when == "call"
# assert rep.skipped.when == "call"
# assert rep.skipped == "%sreason == "hello"
# assert rep.skipped.location.lineno == 3
# assert rep.skipped.location.path
# assert not rep.skipped.failurerepr
def test_skip_in_setup_function(self, testdir):
reports = testdir.runitem("""
import pytest
def setup_function(func):
pytest.skip("hello")
def test_func():
pass
""")
print(reports)
rep = reports[0]
assert not rep.failed
assert not rep.passed
assert rep.skipped
# assert rep.skipped.reason == "hello"
# assert rep.skipped.location.lineno == 3
# assert rep.skipped.location.lineno == 3
assert len(reports) == 2
assert reports[1].passed # teardown
def test_failure_in_setup_function(self, testdir):
reports = testdir.runitem("""
import pytest
def setup_function(func):
raise ValueError(42)
def test_func():
pass
""")
rep = reports[0]
assert not rep.skipped
assert not rep.passed
assert rep.failed
assert rep.when == "setup"
assert len(reports) == 2
def test_failure_in_teardown_function(self, testdir):
reports = testdir.runitem("""
import pytest
def teardown_function(func):
raise ValueError(42)
def test_func():
pass
""")
print(reports)
assert len(reports) == 3
rep = reports[2]
assert not rep.skipped
assert not rep.passed
assert rep.failed
assert rep.when == "teardown"
# assert rep.longrepr.reprcrash.lineno == 3
# assert rep.longrepr.reprtraceback.reprentries
def test_custom_failure_repr(self, testdir):
testdir.makepyfile(conftest="""
import pytest
class Function(pytest.Function):
def repr_failure(self, excinfo):
return "hello"
""")
reports = testdir.runitem("""
import pytest
def test_func():
assert 0
""")
rep = reports[1]
assert not rep.skipped
assert not rep.passed
assert rep.failed
# assert rep.outcome.when == "call"
# assert rep.failed.where.lineno == 3
# assert rep.failed.where.path.basename == "test_func.py"
# assert rep.failed.failurerepr == "hello"
def test_teardown_final_returncode(self, testdir):
rec = testdir.inline_runsource("""
def test_func():
pass
def teardown_function(func):
raise ValueError(42)
""")
assert rec.ret == 1
def test_exact_teardown_issue90(self, testdir):
rec = testdir.inline_runsource("""
import pytest
class TestClass(object):
def test_method(self):
pass
def teardown_class(cls):
raise Exception()
def test_func():
import sys
# on python2 exc_info is keept till a function exits
# so we would end up calling test functions while
# sys.exc_info would return the indexerror
# from guessing the lastitem
excinfo = sys.exc_info()
import traceback
assert excinfo[0] is None, \
traceback.format_exception(*excinfo)
def teardown_function(func):
raise ValueError(42)
""")
reps = rec.getreports("pytest_runtest_logreport")
print(reps)
for i in range(2):
assert reps[i].nodeid.endswith("test_method")
assert reps[i].passed
assert reps[2].when == "teardown"
assert reps[2].failed
assert len(reps) == 6
for i in range(3, 5):
assert reps[i].nodeid.endswith("test_func")
assert reps[i].passed
assert reps[5].when == "teardown"
assert reps[5].nodeid.endswith("test_func")
assert reps[5].failed
def test_exact_teardown_issue1206(self, testdir):
"""issue shadowing error with wrong number of arguments on teardown_method."""
rec = testdir.inline_runsource("""
import pytest
class TestClass(object):
def teardown_method(self, x, y, z):
pass
def test_method(self):
assert True
""")
reps = rec.getreports("pytest_runtest_logreport")
print(reps)
assert len(reps) == 3
#
assert reps[0].nodeid.endswith("test_method")
assert reps[0].passed
assert reps[0].when == 'setup'
#
assert reps[1].nodeid.endswith("test_method")
assert reps[1].passed
assert reps[1].when == 'call'
#
assert reps[2].nodeid.endswith("test_method")
assert reps[2].failed
assert reps[2].when == "teardown"
assert reps[2].longrepr.reprcrash.message in (
# python3 error
"TypeError: teardown_method() missing 2 required positional arguments: 'y' and 'z'",
# python2 error
'TypeError: teardown_method() takes exactly 4 arguments (2 given)'
)
def test_failure_in_setup_function_ignores_custom_repr(self, testdir):
testdir.makepyfile(conftest="""
import pytest
class Function(pytest.Function):
def repr_failure(self, excinfo):
assert 0
""")
reports = testdir.runitem("""
def setup_function(func):
raise ValueError(42)
def test_func():
pass
""")
assert len(reports) == 2
rep = reports[0]
print(rep)
assert not rep.skipped
assert not rep.passed
assert rep.failed
# assert rep.outcome.when == "setup"
# assert rep.outcome.where.lineno == 3
# assert rep.outcome.where.path.basename == "test_func.py"
# assert instanace(rep.failed.failurerepr, PythonFailureRepr)
def test_systemexit_does_not_bail_out(self, testdir):
try:
reports = testdir.runitem("""
def test_func():
raise SystemExit(42)
""")
except SystemExit:
pytest.fail("runner did not catch SystemExit")
rep = reports[1]
assert rep.failed
assert rep.when == "call"
def test_exit_propagates(self, testdir):
try:
testdir.runitem("""
import pytest
def test_func():
raise pytest.exit.Exception()
""")
except pytest.exit.Exception:
pass
else:
pytest.fail("did not raise")
class TestExecutionNonForked(BaseFunctionalTests):
def getrunner(self):
def f(item):
return runner.runtestprotocol(item, log=False)
return f
def test_keyboardinterrupt_propagates(self, testdir):
try:
testdir.runitem("""
def test_func():
raise KeyboardInterrupt("fake")
""")
except KeyboardInterrupt:
pass
else:
pytest.fail("did not raise")
class TestExecutionForked(BaseFunctionalTests):
pytestmark = pytest.mark.skipif("not hasattr(os, 'fork')")
def getrunner(self):
# XXX re-arrange this test to live in pytest-xdist
boxed = pytest.importorskip("xdist.boxed")
return boxed.forked_run_report
def test_suicide(self, testdir):
reports = testdir.runitem("""
def test_func():
import os
os.kill(os.getpid(), 15)
""")
rep = reports[0]
assert rep.failed
assert rep.when == "???"
class TestSessionReports(object):
def test_collect_result(self, testdir):
col = testdir.getmodulecol("""
def test_func1():
pass
class TestClass(object):
pass
""")
rep = runner.collect_one_node(col)
assert not rep.failed
assert not rep.skipped
assert rep.passed
locinfo = rep.location
assert locinfo[0] == col.fspath.basename
assert not locinfo[1]
assert locinfo[2] == col.fspath.basename
res = rep.result
assert len(res) == 2
assert res[0].name == "test_func1"
assert res[1].name == "TestClass"
reporttypes = [
runner.BaseReport,
runner.TestReport,
runner.TeardownErrorReport,
runner.CollectReport,
]
@pytest.mark.parametrize('reporttype', reporttypes, ids=[x.__name__ for x in reporttypes])
def test_report_extra_parameters(reporttype):
if hasattr(py.std.inspect, 'signature'):
args = list(py.std.inspect.signature(reporttype.__init__).parameters.keys())[1:]
else:
args = py.std.inspect.getargspec(reporttype.__init__)[0][1:]
basekw = dict.fromkeys(args, [])
report = reporttype(newthing=1, **basekw)
assert report.newthing == 1
def test_callinfo():
ci = runner.CallInfo(lambda: 0, '123')
assert ci.when == "123"
assert ci.result == 0
assert "result" in repr(ci)
ci = runner.CallInfo(lambda: 0 / 0, '123')
assert ci.when == "123"
assert not hasattr(ci, 'result')
assert ci.excinfo
assert "exc" in repr(ci)
# design question: do we want general hooks in python files?
# then something like the following functional tests makes sense
@pytest.mark.xfail
def test_runtest_in_module_ordering(testdir):
p1 = testdir.makepyfile("""
import pytest
def pytest_runtest_setup(item): # runs after class-level!
item.function.mylist.append("module")
class TestClass(object):
def pytest_runtest_setup(self, item):
assert not hasattr(item.function, 'mylist')
item.function.mylist = ['class']
@pytest.fixture
def mylist(self, request):
return request.function.mylist
def pytest_runtest_call(self, item, __multicall__):
try:
__multicall__.execute()
except ValueError:
pass
def test_hello1(self, mylist):
assert mylist == ['class', 'module'], mylist
raise ValueError()
def test_hello2(self, mylist):
assert mylist == ['class', 'module'], mylist
def pytest_runtest_teardown(item):
del item.function.mylist
""")
result = testdir.runpytest(p1)
result.stdout.fnmatch_lines([
"*2 passed*"
])
def test_outcomeexception_exceptionattributes():
outcome = outcomes.OutcomeException('test')
assert outcome.args[0] == outcome.msg
def test_outcomeexception_passes_except_Exception():
with pytest.raises(outcomes.OutcomeException):
try:
raise outcomes.OutcomeException('test')
except Exception:
pass
def test_pytest_exit():
try:
pytest.exit("hello")
except pytest.exit.Exception:
excinfo = _pytest._code.ExceptionInfo()
assert excinfo.errisinstance(KeyboardInterrupt)
def test_pytest_fail():
try:
pytest.fail("hello")
except pytest.fail.Exception:
excinfo = _pytest._code.ExceptionInfo()
s = excinfo.exconly(tryshort=True)
assert s.startswith("Failed")
def test_pytest_exit_msg(testdir):
testdir.makeconftest("""
import pytest
def pytest_configure(config):
pytest.exit('oh noes')
""")
result = testdir.runpytest()
result.stderr.fnmatch_lines([
"Exit: oh noes",
])
def test_pytest_fail_notrace(testdir):
testdir.makepyfile("""
import pytest
def test_hello():
pytest.fail("hello", pytrace=False)
def teardown_function(function):
pytest.fail("world", pytrace=False)
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines([
"world",
"hello",
])
assert 'def teardown_function' not in result.stdout.str()
@pytest.mark.parametrize('str_prefix', ['u', ''])
def test_pytest_fail_notrace_non_ascii(testdir, str_prefix):
"""Fix pytest.fail with pytrace=False with non-ascii characters (#1178).
This tests with native and unicode strings containing non-ascii chars.
"""
testdir.makepyfile(u"""
# coding: utf-8
import pytest
def test_hello():
pytest.fail(%s'oh oh: ☺', pytrace=False)
""" % str_prefix)
result = testdir.runpytest()
if sys.version_info[0] >= 3:
result.stdout.fnmatch_lines(['*test_hello*', "oh oh: ☺"])
else:
result.stdout.fnmatch_lines(['*test_hello*', "oh oh: *"])
assert 'def test_hello' not in result.stdout.str()
def test_pytest_no_tests_collected_exit_status(testdir):
result = testdir.runpytest()
result.stdout.fnmatch_lines('*collected 0 items*')
assert result.ret == main.EXIT_NOTESTSCOLLECTED
testdir.makepyfile(test_foo="""
def test_foo():
assert 1
""")
result = testdir.runpytest()
result.stdout.fnmatch_lines('*collected 1 item*')
result.stdout.fnmatch_lines('*1 passed*')
assert result.ret == main.EXIT_OK
result = testdir.runpytest('-k nonmatch')
result.stdout.fnmatch_lines('*collected 1 item*')
result.stdout.fnmatch_lines('*1 deselected*')
assert result.ret == main.EXIT_NOTESTSCOLLECTED
def test_exception_printing_skip():
try:
pytest.skip("hello")
except pytest.skip.Exception:
excinfo = _pytest._code.ExceptionInfo()
s = excinfo.exconly(tryshort=True)
assert s.startswith("Skipped")
def test_importorskip(monkeypatch):
importorskip = pytest.importorskip
def f():
importorskip("asdlkj")
try:
sys = importorskip("sys") # noqa
assert sys == py.std.sys
# path = pytest.importorskip("os.path")
# assert path == py.std.os.path
excinfo = pytest.raises(pytest.skip.Exception, f)
path = py.path.local(excinfo.getrepr().reprcrash.path)
# check that importorskip reports the actual call
# in this test the test_runner.py file
assert path.purebasename == "test_runner"
pytest.raises(SyntaxError, "pytest.importorskip('x y z')")
pytest.raises(SyntaxError, "pytest.importorskip('x=y')")
mod = py.std.types.ModuleType("hello123")
mod.__version__ = "1.3"
monkeypatch.setitem(sys.modules, "hello123", mod)
pytest.raises(pytest.skip.Exception, """
pytest.importorskip("hello123", minversion="1.3.1")
""")
mod2 = pytest.importorskip("hello123", minversion="1.3")
assert mod2 == mod
except pytest.skip.Exception:
print(_pytest._code.ExceptionInfo())
pytest.fail("spurious skip")
def test_importorskip_imports_last_module_part():
ospath = pytest.importorskip("os.path")
assert os.path == ospath
def test_importorskip_dev_module(monkeypatch):
try:
mod = py.std.types.ModuleType("mockmodule")
mod.__version__ = '0.13.0.dev-43290'
monkeypatch.setitem(sys.modules, 'mockmodule', mod)
mod2 = pytest.importorskip('mockmodule', minversion='0.12.0')
assert mod2 == mod
pytest.raises(pytest.skip.Exception, """
pytest.importorskip('mockmodule1', minversion='0.14.0')""")
except pytest.skip.Exception:
print(_pytest._code.ExceptionInfo())
pytest.fail("spurious skip")
def test_importorskip_module_level(testdir):
"""importorskip must be able to skip entire modules when used at module level"""
testdir.makepyfile('''
import pytest
foobarbaz = pytest.importorskip("foobarbaz")
def test_foo():
pass
''')
result = testdir.runpytest()
result.stdout.fnmatch_lines(['*collected 0 items / 1 skipped*'])
def test_pytest_cmdline_main(testdir):
p = testdir.makepyfile("""
import pytest
def test_hello():
assert 1
if __name__ == '__main__':
pytest.cmdline.main([__file__])
""")
import subprocess
popen = subprocess.Popen([sys.executable, str(p)], stdout=subprocess.PIPE)
popen.communicate()
ret = popen.wait()
assert ret == 0
def test_unicode_in_longrepr(testdir):
testdir.makeconftest("""
# -*- coding: utf-8 -*-
import pytest
@pytest.hookimpl(hookwrapper=True)
def pytest_runtest_makereport():
outcome = yield
rep = outcome.get_result()
if rep.when == "call":
rep.longrepr = u'ä'
""")
testdir.makepyfile("""
def test_out():
assert 0
""")
result = testdir.runpytest()
assert result.ret == 1
assert "UnicodeEncodeError" not in result.stderr.str()
def test_failure_in_setup(testdir):
testdir.makepyfile("""
def setup_module():
0/0
def test_func():
pass
""")
result = testdir.runpytest("--tb=line")
assert "def setup_module" not in result.stdout.str()
def test_makereport_getsource(testdir):
testdir.makepyfile("""
def test_foo():
if False: pass
else: assert False
""")
result = testdir.runpytest()
assert 'INTERNALERROR' not in result.stdout.str()
result.stdout.fnmatch_lines(['*else: assert False*'])
def test_makereport_getsource_dynamic_code(testdir, monkeypatch):
"""Test that exception in dynamically generated code doesn't break getting the source line."""
import inspect
original_findsource = inspect.findsource
def findsource(obj, *args, **kwargs):
# Can be triggered by dynamically created functions
if obj.__name__ == 'foo':
raise IndexError()
return original_findsource(obj, *args, **kwargs)
monkeypatch.setattr(inspect, 'findsource', findsource)
testdir.makepyfile("""
import pytest
@pytest.fixture
def foo(missing):
pass
def test_fix(foo):
assert False
""")
result = testdir.runpytest('-vv')
assert 'INTERNALERROR' not in result.stdout.str()
result.stdout.fnmatch_lines(["*test_fix*", "*fixture*'missing'*not found*"])
def test_store_except_info_on_eror():
""" Test that upon test failure, the exception info is stored on
sys.last_traceback and friends.
"""
# Simulate item that raises a specific exception
class ItemThatRaises(object):
nodeid = 'item_that_raises'
def runtest(self):
raise IndexError('TEST')
try:
runner.pytest_runtest_call(ItemThatRaises())
except IndexError:
pass
# Check that exception info is stored on sys
assert sys.last_type is IndexError
assert sys.last_value.args[0] == 'TEST'
assert sys.last_traceback
def test_current_test_env_var(testdir, monkeypatch):
pytest_current_test_vars = []
monkeypatch.setattr(sys, 'pytest_current_test_vars', pytest_current_test_vars, raising=False)
testdir.makepyfile('''
import pytest
import sys
import os
@pytest.fixture
def fix():
sys.pytest_current_test_vars.append(('setup', os.environ['PYTEST_CURRENT_TEST']))
yield
sys.pytest_current_test_vars.append(('teardown', os.environ['PYTEST_CURRENT_TEST']))
def test(fix):
sys.pytest_current_test_vars.append(('call', os.environ['PYTEST_CURRENT_TEST']))
''')
result = testdir.runpytest_inprocess()
assert result.ret == 0
test_id = 'test_current_test_env_var.py::test'
assert pytest_current_test_vars == [
('setup', test_id + ' (setup)'), ('call', test_id + ' (call)'), ('teardown', test_id + ' (teardown)')]
assert 'PYTEST_CURRENT_TEST' not in os.environ
class TestReportContents(object):
"""
Test user-level API of ``TestReport`` objects.
"""
def getrunner(self):
return lambda item: runner.runtestprotocol(item, log=False)
def test_longreprtext_pass(self, testdir):
reports = testdir.runitem("""
def test_func():
pass
""")
rep = reports[1]
assert rep.longreprtext == ''
def test_longreprtext_failure(self, testdir):
reports = testdir.runitem("""
def test_func():
x = 1
assert x == 4
""")
rep = reports[1]
assert 'assert 1 == 4' in rep.longreprtext
def test_captured_text(self, testdir):
reports = testdir.runitem("""
import pytest
import sys
@pytest.fixture
def fix():
sys.stdout.write('setup: stdout\\n')
sys.stderr.write('setup: stderr\\n')
yield
sys.stdout.write('teardown: stdout\\n')
sys.stderr.write('teardown: stderr\\n')
assert 0
def test_func(fix):
sys.stdout.write('call: stdout\\n')
sys.stderr.write('call: stderr\\n')
assert 0
""")
setup, call, teardown = reports
assert setup.capstdout == 'setup: stdout\n'
assert call.capstdout == 'setup: stdout\ncall: stdout\n'
assert teardown.capstdout == 'setup: stdout\ncall: stdout\nteardown: stdout\n'
assert setup.capstderr == 'setup: stderr\n'
assert call.capstderr == 'setup: stderr\ncall: stderr\n'
assert teardown.capstderr == 'setup: stderr\ncall: stderr\nteardown: stderr\n'
def test_no_captured_text(self, testdir):
reports = testdir.runitem("""
def test_func():
pass
""")
rep = reports[1]
assert rep.capstdout == ''
assert rep.capstderr == ''
| mpl-2.0 |
Kozea/Radicale | radicale/tests/custom/auth.py | 1 | 1080 | # This file is part of Radicale Server - Calendar Server
# Copyright © 2008 Nicolas Kandel
# Copyright © 2008 Pascal Halter
# Copyright © 2008-2017 Guillaume Ayoub
# Copyright © 2017-2018 Unrud <[email protected]>
#
# This library is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Radicale. If not, see <http://www.gnu.org/licenses/>.
"""
Custom authentication.
Just check username for testing
"""
from radicale import auth
class Auth(auth.BaseAuth):
def login(self, login, password):
if login == "tmp":
return login
return ""
| gpl-3.0 |
smithab/azure-quickstart-templates | hortonworks-on-centos/scripts/vm-bootstrap.py | 89 | 53170 | #
# vm-bootstrap.py
#
# This script is used to prepare VMs launched via HDP Cluster Install Blade on Azure.
#
# Parameters passed from the bootstrap script invocation by the controller (shown in the parameter order).
# Required parameters:
# action: "bootstrap" to set up VM and initiate cluster deployment. "check" for checking on cluster deployment status.
# cluster_id: user-specified name of the cluster
# admin_password: password for the Ambari "admin" user
# Required parameters for "bootstrap" action:
# scenario_id: "evaluation" or "standard"
# num_masters: number of masters in the cluster
# num_workers: number of workers in the cluster
# master_prefix: hostname prefix for master hosts (master hosts are named <cluster_id>-<master_prefix>-<id>
# worker_prefix: hostname prefix for worker hosts (worker hosts are named <cluster_id>-<worker_prefix>-<id>
# domain_name: the domain name part of the hosts, starting with a period (e.g., .cloudapp.net)
# id_padding: number of digits for the host <id> (e.g., 2 uses <id> like 01, 02, .., 10, 11)
# masters_iplist: list of masters' local IPV4 addresses sorted from master_01 to master_XX delimited by a ','
# workers_iplist: list of workers' local IPV4 addresses sorted from worker_01 to worker_XX delimited by a ','
# Required parameters for "check" action:
# --check_timeout_seconds:
# the number of seconds after which the script is required to exit
# --report_timeout_fail:
# if "true", exit code 1 is returned in case deployment has failed, or deployment has not finished after
# check_timeout_seconds
# if "false", exit code 0 is returned if deployment has finished successfully, or deployment has not finished after
# check_timeout_seconds
# Optional:
# protocol: if "https" (default), https:8443 is used for Ambari. Otherwise, Ambari uses http:8080
from optparse import OptionParser
import base64
import json
import logging
import os
import pprint
import re
import socket
import sys
import time
import urllib2
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
handler = logging.FileHandler('/tmp/vm-bootstrap.log')
handler.setLevel(logging.INFO)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.info('Starting VM Bootstrap...')
parser = OptionParser()
parser.add_option("--cluster_id", type="string", dest="cluster_id")
parser.add_option("--scenario_id", type="string", dest="scenario_id", default="evaluation")
parser.add_option("--num_masters", type="int", dest="num_masters")
parser.add_option("--num_workers", type="int", dest="num_workers")
parser.add_option("--master_prefix", type="string", dest="master_prefix")
parser.add_option("--worker_prefix", type="string", dest="worker_prefix")
parser.add_option("--domain_name", type="string", dest="domain_name")
parser.add_option("--id_padding", type="int", dest="id_padding", default=2)
parser.add_option("--admin_password", type="string", dest="admin_password", default="admin")
parser.add_option("--masters_iplist", type="string", dest="masters_iplist")
parser.add_option("--workers_iplist", type="string", dest="workers_iplist")
parser.add_option("--protocol", type="string", dest="protocol", default="https")
parser.add_option("--action", type="string", dest="action", default="bootstrap")
parser.add_option("--check_timeout_seconds", type="int", dest="check_timeout_seconds", default="250")
parser.add_option("--report_timeout_fail", type="string", dest="report_timeout_fail", default="false")
(options, args) = parser.parse_args()
cluster_id = options.cluster_id
scenario_id = options.scenario_id.lower()
num_masters = options.num_masters
num_workers = options.num_workers
master_prefix = options.master_prefix
worker_prefix = options.worker_prefix
domain_name = options.domain_name
id_padding = options.id_padding
admin_password = options.admin_password
masters_iplist = options.masters_iplist
workers_iplist = options.workers_iplist
protocol = options.protocol
action = options.action
check_timeout_seconds = options.check_timeout_seconds
report_timeout_fail = options.report_timeout_fail.lower() == "true"
logger.info('action=' + action)
admin_username = 'admin'
current_admin_password = 'admin'
request_timeout = 30
port = '8443' if (protocol == 'https') else '8080'
http_handler = urllib2.HTTPHandler(debuglevel=1)
opener = urllib2.build_opener(http_handler)
urllib2.install_opener(opener)
class TimeoutException(Exception):
pass
def get_ambari_auth_string():
return 'Basic ' + base64.encodestring('%s:%s' % (admin_username, current_admin_password)).replace('\n', '')
def run_system_command(command):
os.system(command)
def get_hostname(id):
if id <= num_masters:
return master_prefix + str(id).zfill(id_padding)
else:
return worker_prefix + str(id - num_masters).zfill(id_padding)
def get_fqdn(id):
return get_hostname(id) + domain_name
def get_host_ip(hostname):
if (hostname.startswith(master_prefix)):
return masters_iplist[int(hostname.split('-')[-1]) -1]
else:
return workers_iplist[int(hostname.split('-')[-1]) -1]
def get_host_ip_map(hostnames):
host_ip_map = {}
for hostname in hostnames:
num_tries = 0
ip = None
while ip is None and num_tries < 5:
try:
ip = get_host_ip(hostname)
# ip = socket.gethostbyname(hostname)
except:
time.sleep(1)
num_tries = num_tries + 1
continue
if ip is None:
logger.info('Failed to look up ip address for ' + hostname)
raise
else:
logger.info(hostname + ' resolved to ' + ip)
host_ip_map[hostname] = ip
return host_ip_map
def update_etc_hosts(host_ip_map):
logger.info('Adding entries to /etc/hosts file...')
with open("/etc/hosts", "a") as file:
for host in sorted(host_ip_map):
file.write('%s\t%s\t%s\n' % (host_ip_map[host], host + domain_name, host))
logger.info('Finished updating /etc/hosts')
def update_ambari_agent_ini(ambari_server_hostname):
logger.info('Updating ambari-agent.ini file...')
command = 'sed -i s/hostname=localhost/hostname=%s/ /etc/ambari-agent/conf/ambari-agent.ini' % ambari_server_hostname
logger.info('Executing command: ' + command)
run_system_command(command)
logger.info('Finished updating ambari-agent.ini file')
def patch_ambari_agent():
logger.info('Patching ambari-agent to prevent rpmdb corruption...')
logger.info('Finished patching ambari-server')
def enable_https():
command = """
printf 'api.ssl=true\nclient.api.ssl.cert_name=https.crt\nclient.api.ssl.key_name=https.key\nclient.api.ssl.port=8443' >> /etc/ambari-server/conf/ambari.properties
mkdir /root/ambari-cert
cd /root/ambari-cert
# create server.crt and server.key (self-signed)
openssl genrsa -out server.key 2048
openssl req -new -key server.key -out server.csr -batch
openssl x509 -req -days 365 -in server.csr -signkey server.key -out server.crt
echo PulUuMWPp0o4Lq6flGA0NGDKNRZQGffW2mWmJI3klSyspS7mUl > pass.txt
cp pass.txt passin.txt
# encrypts server.key with des3 as server.key.secured with the specified password
openssl rsa -in server.key -des3 -out server.key.secured -passout file:pass.txt
# creates /tmp/https.keystore.p12
openssl pkcs12 -export -in 'server.crt' -inkey 'server.key.secured' -certfile 'server.crt' -out '/var/lib/ambari-server/keys/https.keystore.p12' -password file:pass.txt -passin file:passin.txt
mv pass.txt /var/lib/ambari-server/keys/https.pass.txt
cd ..
rm -rf /root/ambari-cert
"""
run_system_command(command)
def set_admin_password(new_password, timeout):
logger.info('Setting admin password...')
def poll_until_all_agents_registered(num_hosts, timeout):
url = '%s://localhost:%s/api/v1/hosts' % (protocol, port)
logger.info('poll until all agents')
all_hosts_registered = False
start_time = time.time()
while time.time() - start_time < timeout:
request = urllib2.Request(url)
request.add_header("Authorization", get_ambari_auth_string())
try:
result = urllib2.urlopen(request, timeout=request_timeout).read()
pprint.pprint(result)
if (result is not None):
jsonResult = json.loads(result)
if len(jsonResult['items']) >= num_hosts:
all_hosts_registered = True
break
except :
logger.exception('Could not poll agent status from the server.')
time.sleep(5)
if not all_hosts_registered:
raise Exception('Timed out while waiting for all agents to register')
def is_ambari_server_host():
hostname = socket.getfqdn()
hostname = hostname.split('.')[0]
logger.info(hostname)
logger.info('Checking ambari host')
logger.info(ambari_server_hostname)
return hostname == ambari_server_hostname
def create_blueprint(scenario_id):
blueprint_name = 'myblueprint'
logger.info('Creating blueprint for scenario %s' % scenario_id)
url = '%s://localhost:%s/api/v1/blueprints/%s' % (protocol, port, blueprint_name)
evaluation_host_groups = [
{
"name" : "master_1",
"components" : [
{
"name" : "AMBARI_SERVER"
},
{
"name" : "DRPC_SERVER"
},
{
"name" : "HIVE_SERVER"
},
{
"name" : "MYSQL_SERVER"
},
{
"name" : "NIMBUS"
},
{
"name" : "SECONDARY_NAMENODE"
},
{
"name" : "SPARK_JOBHISTORYSERVER"
},
{
"name" : "STORM_UI_SERVER"
},
{
"name" : "FALCON_CLIENT"
},
{
"name" : "HBASE_CLIENT"
},
{
"name" : "HCAT"
},
{
"name" : "HDFS_CLIENT"
},
{
"name" : "HIVE_CLIENT"
},
{
"name" : "MAPREDUCE2_CLIENT"
},
{
"name" : "METRICS_MONITOR"
},
{
"name" : "OOZIE_CLIENT"
},
{
"name" : "PIG"
},
{
"name" : "SLIDER"
},
{
"name" : "SPARK_CLIENT"
},
{
"name" : "SQOOP"
},
{
"name" : "TEZ_CLIENT"
},
{
"name" : "YARN_CLIENT"
},
{
"name" : "ZOOKEEPER_CLIENT"
}
],
"cardinality" : "1"
},
{
"name" : "master_2",
"components" : [
{
"name" : "APP_TIMELINE_SERVER"
},
{
"name" : "FALCON_SERVER"
},
{
"name" : "HBASE_MASTER"
},
{
"name" : "HISTORYSERVER"
},
{
"name" : "HIVE_METASTORE"
},
{
"name" : "KAFKA_BROKER"
},
{
"name" : "METRICS_COLLECTOR"
},
{
"name" : "NAMENODE"
},
{
"name" : "OOZIE_SERVER"
},
{
"name" : "RESOURCEMANAGER"
},
{
"name" : "WEBHCAT_SERVER"
},
{
"name" : "ZOOKEEPER_SERVER"
},
{
"name" : "FALCON_CLIENT"
},
{
"name" : "HBASE_CLIENT"
},
{
"name" : "HCAT"
},
{
"name" : "HDFS_CLIENT"
},
{
"name" : "HIVE_CLIENT"
},
{
"name" : "MAPREDUCE2_CLIENT"
},
{
"name" : "METRICS_MONITOR"
},
{
"name" : "OOZIE_CLIENT"
},
{
"name" : "PIG"
},
{
"name" : "SLIDER"
},
{
"name" : "SPARK_CLIENT"
},
{
"name" : "SQOOP"
},
{
"name" : "TEZ_CLIENT"
},
{
"name" : "YARN_CLIENT"
},
{
"name" : "ZOOKEEPER_CLIENT"
}
],
"cardinality" : "1"
},
{
"name" : "workers",
"components" : [
{
"name" : "DATANODE"
},
{
"name" : "HBASE_REGIONSERVER"
},
{
"name" : "NODEMANAGER"
},
{
"name" : "SUPERVISOR"
},
{
"name" : "FALCON_CLIENT"
},
{
"name" : "HBASE_CLIENT"
},
{
"name" : "HCAT"
},
{
"name" : "HDFS_CLIENT"
},
{
"name" : "HIVE_CLIENT"
},
{
"name" : "MAPREDUCE2_CLIENT"
},
{
"name" : "METRICS_MONITOR"
},
{
"name" : "OOZIE_CLIENT"
},
{
"name" : "PIG"
},
{
"name" : "SLIDER"
},
{
"name" : "SPARK_CLIENT"
},
{
"name" : "SQOOP"
},
{
"name" : "TEZ_CLIENT"
},
{
"name" : "YARN_CLIENT"
},
{
"name" : "ZOOKEEPER_CLIENT"
}
],
"cardinality" : "3"
}
]
small_host_groups = [
{
"name" : "master_1",
"components" : [
{
"name" : "AMBARI_SERVER"
},
{
"name" : "FALCON_CLIENT"
},
{
"name" : "HBASE_CLIENT"
},
{
"name" : "HCAT"
},
{
"name" : "HDFS_CLIENT"
},
{
"name" : "HIVE_CLIENT"
},
{
"name" : "MAPREDUCE2_CLIENT"
},
{
"name" : "METRICS_MONITOR"
},
{
"name" : "OOZIE_CLIENT"
},
{
"name" : "PIG"
},
{
"name" : "SLIDER"
},
{
"name" : "SPARK_CLIENT"
},
{
"name" : "SQOOP"
},
{
"name" : "TEZ_CLIENT"
},
{
"name" : "YARN_CLIENT"
},
{
"name" : "ZOOKEEPER_CLIENT"
}
],
"cardinality" : "1"
},
{
"name" : "master_2",
"components" : [
{
"name" : "METRICS_COLLECTOR"
},
{
"name" : "NAMENODE"
},
{
"name" : "NIMBUS"
},
{
"name" : "ZOOKEEPER_SERVER"
},
{
"name" : "FALCON_CLIENT"
},
{
"name" : "HBASE_CLIENT"
},
{
"name" : "HCAT"
},
{
"name" : "HDFS_CLIENT"
},
{
"name" : "HIVE_CLIENT"
},
{
"name" : "MAPREDUCE2_CLIENT"
},
{
"name" : "METRICS_MONITOR"
},
{
"name" : "OOZIE_CLIENT"
},
{
"name" : "PIG"
},
{
"name" : "SLIDER"
},
{
"name" : "SPARK_CLIENT"
},
{
"name" : "SQOOP"
},
{
"name" : "TEZ_CLIENT"
},
{
"name" : "YARN_CLIENT"
},
{
"name" : "ZOOKEEPER_CLIENT"
}
],
"cardinality" : "1"
},
{
"name" : "master_3",
"components" : [
{
"name" : "DRPC_SERVER"
},
{
"name" : "FALCON_SERVER"
},
{
"name" : "HBASE_MASTER"
},
{
"name" : "HISTORYSERVER"
},
{
"name" : "HIVE_METASTORE"
},
{
"name" : "KAFKA_BROKER"
},
{
"name" : "METRICS_MONITOR"
},
{
"name" : "OOZIE_SERVER"
},
{
"name" : "RESOURCEMANAGER"
},
{
"name" : "SECONDARY_NAMENODE"
},
{
"name" : "WEBHCAT_SERVER"
},
{
"name" : "ZOOKEEPER_SERVER"
},
{
"name" : "FALCON_CLIENT"
},
{
"name" : "HBASE_CLIENT"
},
{
"name" : "HCAT"
},
{
"name" : "HDFS_CLIENT"
},
{
"name" : "HIVE_CLIENT"
},
{
"name" : "MAPREDUCE2_CLIENT"
},
{
"name" : "METRICS_MONITOR"
},
{
"name" : "OOZIE_CLIENT"
},
{
"name" : "PIG"
},
{
"name" : "SLIDER"
},
{
"name" : "SPARK_CLIENT"
},
{
"name" : "SQOOP"
},
{
"name" : "TEZ_CLIENT"
},
{
"name" : "YARN_CLIENT"
},
{
"name" : "ZOOKEEPER_CLIENT"
}
],
"cardinality" : "1"
},
{
"name" : "master_4",
"components" : [
{
"name" : "APP_TIMELINE_SERVER"
},
{
"name" : "HIVE_SERVER"
},
{
"name" : "MYSQL_SERVER"
},
{
"name" : "SPARK_JOBHISTORYSERVER"
},
{
"name" : "STORM_UI_SERVER"
},
{
"name" : "ZOOKEEPER_SERVER"
},
{
"name" : "FALCON_CLIENT"
},
{
"name" : "HBASE_CLIENT"
},
{
"name" : "HCAT"
},
{
"name" : "HDFS_CLIENT"
},
{
"name" : "HIVE_CLIENT"
},
{
"name" : "MAPREDUCE2_CLIENT"
},
{
"name" : "METRICS_MONITOR"
},
{
"name" : "OOZIE_CLIENT"
},
{
"name" : "PIG"
},
{
"name" : "SLIDER"
},
{
"name" : "SPARK_CLIENT"
},
{
"name" : "SQOOP"
},
{
"name" : "TEZ_CLIENT"
},
{
"name" : "YARN_CLIENT"
},
{
"name" : "ZOOKEEPER_CLIENT"
}
],
"cardinality" : "1"
},
{
"name" : "workers",
"components" : [
{
"name" : "DATANODE"
},
{
"name" : "HBASE_REGIONSERVER"
},
{
"name" : "NODEMANAGER"
},
{
"name" : "SUPERVISOR"
},
{
"name" : "FALCON_CLIENT"
},
{
"name" : "HBASE_CLIENT"
},
{
"name" : "HCAT"
},
{
"name" : "HDFS_CLIENT"
},
{
"name" : "HIVE_CLIENT"
},
{
"name" : "MAPREDUCE2_CLIENT"
},
{
"name" : "METRICS_MONITOR"
},
{
"name" : "OOZIE_CLIENT"
},
{
"name" : "PIG"
},
{
"name" : "SLIDER"
},
{
"name" : "SPARK_CLIENT"
},
{
"name" : "SQOOP"
},
{
"name" : "TEZ_CLIENT"
},
{
"name" : "YARN_CLIENT"
},
{
"name" : "ZOOKEEPER_CLIENT"
}
],
"cardinality" : "9"
}
]
medium_host_groups = [
{
"name" : "master_1",
"components" : [
{
"name" : "AMBARI_SERVER"
},
{
"name" : "FALCON_CLIENT"
},
{
"name" : "HBASE_CLIENT"
},
{
"name" : "HCAT"
},
{
"name" : "HDFS_CLIENT"
},
{
"name" : "HIVE_CLIENT"
},
{
"name" : "MAPREDUCE2_CLIENT"
},
{
"name" : "METRICS_MONITOR"
},
{
"name" : "OOZIE_CLIENT"
},
{
"name" : "PIG"
},
{
"name" : "SLIDER"
},
{
"name" : "SPARK_CLIENT"
},
{
"name" : "SQOOP"
},
{
"name" : "TEZ_CLIENT"
},
{
"name" : "YARN_CLIENT"
},
{
"name" : "ZOOKEEPER_CLIENT"
}
],
"cardinality" : "1"
},
{
"name" : "master_2",
"components" : [
{
"name" : "DRPC_SERVER"
},
{
"name" : "METRICS_COLLECTOR"
},
{
"name" : "NAMENODE"
},
{
"name" : "ZOOKEEPER_SERVER"
},
{
"name" : "FALCON_CLIENT"
},
{
"name" : "HBASE_CLIENT"
},
{
"name" : "HCAT"
},
{
"name" : "HDFS_CLIENT"
},
{
"name" : "HIVE_CLIENT"
},
{
"name" : "MAPREDUCE2_CLIENT"
},
{
"name" : "METRICS_MONITOR"
},
{
"name" : "OOZIE_CLIENT"
},
{
"name" : "PIG"
},
{
"name" : "SLIDER"
},
{
"name" : "SPARK_CLIENT"
},
{
"name" : "SQOOP"
},
{
"name" : "TEZ_CLIENT"
},
{
"name" : "YARN_CLIENT"
},
{
"name" : "ZOOKEEPER_CLIENT"
}
],
"cardinality" : "1"
},
{
"name" : "master_3",
"components" : [
{
"name" : "HIVE_SERVER"
},
{
"name" : "SUPERVISOR"
},
{
"name" : "ZOOKEEPER_SERVER"
},
{
"name" : "FALCON_CLIENT"
},
{
"name" : "HBASE_CLIENT"
},
{
"name" : "HCAT"
},
{
"name" : "HDFS_CLIENT"
},
{
"name" : "HIVE_CLIENT"
},
{
"name" : "MAPREDUCE2_CLIENT"
},
{
"name" : "METRICS_MONITOR"
},
{
"name" : "OOZIE_CLIENT"
},
{
"name" : "PIG"
},
{
"name" : "SLIDER"
},
{
"name" : "SPARK_CLIENT"
},
{
"name" : "SQOOP"
},
{
"name" : "TEZ_CLIENT"
},
{
"name" : "YARN_CLIENT"
},
{
"name" : "ZOOKEEPER_CLIENT"
}
],
"cardinality" : "1"
},
{
"name" : "master_4",
"components" : [
{
"name" : "APP_TIMELINE_SERVER"
},
{
"name" : "HIVE_SERVER"
},
{
"name" : "MYSQL_SERVER"
},
{
"name" : "SPARK_JOBHISTORYSERVER"
},
{
"name" : "STORM_UI_SERVER"
},
{
"name" : "ZOOKEEPER_SERVER"
},
{
"name" : "FALCON_CLIENT"
},
{
"name" : "HBASE_CLIENT"
},
{
"name" : "HCAT"
},
{
"name" : "HDFS_CLIENT"
},
{
"name" : "HIVE_CLIENT"
},
{
"name" : "MAPREDUCE2_CLIENT"
},
{
"name" : "METRICS_MONITOR"
},
{
"name" : "OOZIE_CLIENT"
},
{
"name" : "PIG"
},
{
"name" : "SLIDER"
},
{
"name" : "SPARK_CLIENT"
},
{
"name" : "SQOOP"
},
{
"name" : "TEZ_CLIENT"
},
{
"name" : "YARN_CLIENT"
},
{
"name" : "ZOOKEEPER_CLIENT"
}
],
"cardinality" : "1"
},
{
"name" : "workers",
"components" : [
{
"name" : "DATANODE"
},
{
"name" : "HBASE_REGIONSERVER"
},
{
"name" : "NODEMANAGER"
},
{
"name" : "SUPERVISOR"
},
{
"name" : "FALCON_CLIENT"
},
{
"name" : "HBASE_CLIENT"
},
{
"name" : "HCAT"
},
{
"name" : "HDFS_CLIENT"
},
{
"name" : "HIVE_CLIENT"
},
{
"name" : "MAPREDUCE2_CLIENT"
},
{
"name" : "METRICS_MONITOR"
},
{
"name" : "OOZIE_CLIENT"
},
{
"name" : "PIG"
},
{
"name" : "SLIDER"
},
{
"name" : "SPARK_CLIENT"
},
{
"name" : "SQOOP"
},
{
"name" : "TEZ_CLIENT"
},
{
"name" : "YARN_CLIENT"
},
{
"name" : "ZOOKEEPER_CLIENT"
}
],
"cardinality" : "99"
}
]
large_host_groups = [
{
"name" : "master_1",
"components" : [
{
"name" : "AMBARI_SERVER"
},
{
"name" : "KAFKA_BROKER"
},
{
"name" : "METRICS_COLLECTOR"
},
{
"name" : "FALCON_CLIENT"
},
{
"name" : "HBASE_CLIENT"
},
{
"name" : "HCAT"
},
{
"name" : "HDFS_CLIENT"
},
{
"name" : "HIVE_CLIENT"
},
{
"name" : "MAPREDUCE2_CLIENT"
},
{
"name" : "METRICS_MONITOR"
},
{
"name" : "OOZIE_CLIENT"
},
{
"name" : "PIG"
},
{
"name" : "SLIDER"
},
{
"name" : "SPARK_CLIENT"
},
{
"name" : "SQOOP"
},
{
"name" : "TEZ_CLIENT"
},
{
"name" : "YARN_CLIENT"
},
{
"name" : "ZOOKEEPER_CLIENT"
}
],
"cardinality" : "1"
},
{
"name" : "master_2",
"components" : [
{
"name" : "METRICS_COLLECTOR"
},
{
"name" : "NAMENODE"
},
{
"name" : "NIMBUS"
},
{
"name" : "ZOOKEEPER_SERVER"
},
{
"name" : "FALCON_CLIENT"
},
{
"name" : "HBASE_CLIENT"
},
{
"name" : "HCAT"
},
{
"name" : "HDFS_CLIENT"
},
{
"name" : "HIVE_CLIENT"
},
{
"name" : "MAPREDUCE2_CLIENT"
},
{
"name" : "METRICS_MONITOR"
},
{
"name" : "OOZIE_CLIENT"
},
{
"name" : "PIG"
},
{
"name" : "SLIDER"
},
{
"name" : "SPARK_CLIENT"
},
{
"name" : "SQOOP"
},
{
"name" : "TEZ_CLIENT"
},
{
"name" : "YARN_CLIENT"
},
{
"name" : "ZOOKEEPER_CLIENT"
}
],
"cardinality" : "1"
},
{
"name" : "master_3",
"components" : [
{
"name" : "DRPC_SERVER"
},
{
"name" : "FALCON_SERVER"
},
{
"name" : "HBASE_MASTER"
},
{
"name" : "HISTORYSERVER"
},
{
"name" : "HIVE_METASTORE"
},
{
"name" : "KAFKA_BROKER"
},
{
"name" : "OOZIE_SERVER"
},
{
"name" : "RESOURCEMANAGER"
},
{
"name" : "SECONDARY_NAMENODE"
},
{
"name" : "WEBHCAT_SERVER"
},
{
"name" : "ZOOKEEPER_SERVER"
},
{
"name" : "FALCON_CLIENT"
},
{
"name" : "HBASE_CLIENT"
},
{
"name" : "HCAT"
},
{
"name" : "HDFS_CLIENT"
},
{
"name" : "HIVE_CLIENT"
},
{
"name" : "MAPREDUCE2_CLIENT"
},
{
"name" : "METRICS_MONITOR"
},
{
"name" : "OOZIE_CLIENT"
},
{
"name" : "PIG"
},
{
"name" : "SLIDER"
},
{
"name" : "SPARK_CLIENT"
},
{
"name" : "SQOOP"
},
{
"name" : "TEZ_CLIENT"
},
{
"name" : "YARN_CLIENT"
},
{
"name" : "ZOOKEEPER_CLIENT"
}
],
"cardinality" : "1"
},
{
"name" : "master_4",
"components" : [
{
"name" : "HIVE_METASTORE"
},
{
"name" : "MYSQL_SERVER"
},
{
"name" : "SECONDARY_NAMENODE"
},
{
"name" : "SPARK_JOBHISTORYSERVER"
},
{
"name" : "ZOOKEEPER_SERVER"
},
{
"name" : "FALCON_CLIENT"
},
{
"name" : "HBASE_CLIENT"
},
{
"name" : "HCAT"
},
{
"name" : "HDFS_CLIENT"
},
{
"name" : "HIVE_CLIENT"
},
{
"name" : "MAPREDUCE2_CLIENT"
},
{
"name" : "METRICS_MONITOR"
},
{
"name" : "OOZIE_CLIENT"
},
{
"name" : "PIG"
},
{
"name" : "SLIDER"
},
{
"name" : "SPARK_CLIENT"
},
{
"name" : "SQOOP"
},
{
"name" : "TEZ_CLIENT"
},
{
"name" : "YARN_CLIENT"
},
{
"name" : "ZOOKEEPER_CLIENT"
}
],
"cardinality" : "1"
},
{
"name" : "master_5",
"components" : [
{
"name" : "NODEMANAGER"
},
{
"name" : "OOZIE_SERVER"
},
{
"name" : "FALCON_CLIENT"
},
{
"name" : "HBASE_CLIENT"
},
{
"name" : "HCAT"
},
{
"name" : "HDFS_CLIENT"
},
{
"name" : "HIVE_CLIENT"
},
{
"name" : "MAPREDUCE2_CLIENT"
},
{
"name" : "METRICS_MONITOR"
},
{
"name" : "OOZIE_CLIENT"
},
{
"name" : "PIG"
},
{
"name" : "SLIDER"
},
{
"name" : "SPARK_CLIENT"
},
{
"name" : "SQOOP"
},
{
"name" : "TEZ_CLIENT"
},
{
"name" : "YARN_CLIENT"
},
{
"name" : "ZOOKEEPER_CLIENT"
}
],
"cardinality" : "1"
},
{
"name" : "master_6",
"components" : [
{
"name" : "RESOURCEMANAGER"
},
{
"name" : "WEBHCAT_SERVER"
},
{
"name" : "FALCON_CLIENT"
},
{
"name" : "HBASE_CLIENT"
},
{
"name" : "HCAT"
},
{
"name" : "HDFS_CLIENT"
},
{
"name" : "HIVE_CLIENT"
},
{
"name" : "MAPREDUCE2_CLIENT"
},
{
"name" : "METRICS_MONITOR"
},
{
"name" : "OOZIE_CLIENT"
},
{
"name" : "PIG"
},
{
"name" : "SLIDER"
},
{
"name" : "SPARK_CLIENT"
},
{
"name" : "SQOOP"
},
{
"name" : "TEZ_CLIENT"
},
{
"name" : "YARN_CLIENT"
},
{
"name" : "ZOOKEEPER_CLIENT"
}
],
"cardinality" : "1"
},
{
"name" : "master_7",
"components" : [
{
"name" : "HBASE_MASTER"
},
{
"name" : "HISTORYSERVER"
},
{
"name" : "FALCON_CLIENT"
},
{
"name" : "HBASE_CLIENT"
},
{
"name" : "HCAT"
},
{
"name" : "HDFS_CLIENT"
},
{
"name" : "HIVE_CLIENT"
},
{
"name" : "MAPREDUCE2_CLIENT"
},
{
"name" : "METRICS_MONITOR"
},
{
"name" : "OOZIE_CLIENT"
},
{
"name" : "PIG"
},
{
"name" : "SLIDER"
},
{
"name" : "SPARK_CLIENT"
},
{
"name" : "SQOOP"
},
{
"name" : "TEZ_CLIENT"
},
{
"name" : "YARN_CLIENT"
},
{
"name" : "ZOOKEEPER_CLIENT"
}
],
"cardinality" : "1"
},
{
"name" : "master_8",
"components" : [
{
"name" : "APP_TIMELINE_SERVER"
},
{
"name" : "FALCON_SERVER"
},
{
"name" : "NIMBUS"
},
{
"name" : "FALCON_CLIENT"
},
{
"name" : "HBASE_CLIENT"
},
{
"name" : "HCAT"
},
{
"name" : "HDFS_CLIENT"
},
{
"name" : "HIVE_CLIENT"
},
{
"name" : "MAPREDUCE2_CLIENT"
},
{
"name" : "METRICS_MONITOR"
},
{
"name" : "OOZIE_CLIENT"
},
{
"name" : "PIG"
},
{
"name" : "SLIDER"
},
{
"name" : "SPARK_CLIENT"
},
{
"name" : "SQOOP"
},
{
"name" : "TEZ_CLIENT"
},
{
"name" : "YARN_CLIENT"
},
{
"name" : "ZOOKEEPER_CLIENT"
}
],
"cardinality" : "1"
},
{
"name" : "workers",
"components" : [
{
"name" : "DATANODE"
},
{
"name" : "HBASE_REGIONSERVER"
},
{
"name" : "NODEMANAGER"
},
{
"name" : "FALCON_CLIENT"
},
{
"name" : "HBASE_CLIENT"
},
{
"name" : "HCAT"
},
{
"name" : "HDFS_CLIENT"
},
{
"name" : "HIVE_CLIENT"
},
{
"name" : "MAPREDUCE2_CLIENT"
},
{
"name" : "METRICS_MONITOR"
},
{
"name" : "OOZIE_CLIENT"
},
{
"name" : "PIG"
},
{
"name" : "SLIDER"
},
{
"name" : "SPARK_CLIENT"
},
{
"name" : "SQOOP"
},
{
"name" : "TEZ_CLIENT"
},
{
"name" : "YARN_CLIENT"
},
{
"name" : "ZOOKEEPER_CLIENT"
}
],
"cardinality" : "200"
}
]
if scenario_id == 'evaluation':
host_groups = evaluation_host_groups
elif scenario_id == 'small':
host_groups = small_host_groups
elif scenario_id == 'medium':
host_groups = medium_host_groups
elif scenario_id == 'large':
host_groups = large_host_groups
host_groups = evaluation_host_groups if scenario_id == 'evaluation' else small_host_groups
evaluation_configurations = [
{
"ams-hbase-env" : {
"properties" : {
"hbase_master_heapsize" : "512m",
"hbase_regionserver_heapsize" : "512m",
"hbase_regionserver_xmn_max" : "256m",
"hbase_regionserver_xmn_ratio" : "0.2"
}
}
},
{
"capacity-scheduler" : {
"yarn.scheduler.capacity.root.default.maximum-am-resource-percent" : "0.5",
"yarn.scheduler.capacity.maximum-am-resource-percent" : "0.5"
}
},
{
"cluster-env": {
"cluster_name": "sandbox",
"smokeuser": "ambari-qa",
"user_group": "hadoop",
"security_enabled": "false"
}
},
{
"core-site" : {
"hadoop.proxyuser.hue.hosts" : "*",
"hadoop.proxyuser.hue.groups" : "*",
"hadoop.proxyuser.root.hosts" : "*",
"hadoop.proxyuser.root.groups" : "*",
"hadoop.proxyuser.oozie.hosts" : "*",
"hadoop.proxyuser.oozie.groups" : "*",
"hadoop.proxyuser.hcat.hosts" : "*",
"hadoop.proxyuser.hcat.groups" : "*"
}
},
{
"hadoop-env": {
"dtnode_heapsize" : "250",
"hadoop_heapsize" : "250",
"namenode_heapsize" : "250",
"namenode_opt_newsize": "50",
"namenode_opt_maxnewsize": "100"
}
},
{
"hbase-site" : {
"hbase.security.authorization": "true",
"hbase.rpc.engine": "org.apache.hadoop.hbase.ipc.SecureRpcEngine",
"hbase_master_heapsize": "250",
"hbase_regionserver_heapsize": "250",
"hbase.rpc.protection": "PRIVACY"
}
},
{
"hdfs-site" : {
"dfs.block.size" : "34217472",
"dfs.replication" : "1",
"dfs.namenode.accesstime.precision" : "3600000",
"dfs.nfs3.dump.dir" : "/tmp/.hdfs-nfs",
"dfs.nfs.exports.allowed.hosts" : "* rw",
"dfs.datanode.max.xcievers" : "1024",
"dfs.block.access.token.enable" : "false",
"dfs.datanode.data.dir": "/disks/0/hadoop/hdfs/data,/disks/1/hadoop/hdfs/data,/disks/2/hadoop/hdfs/data,/disks/3/hadoop/hdfs/data,/disks/4/hadoop/hdfs/data,/disks/5/hadoop/hdfs/data,/disks/6/hadoop/hdfs/data,/disks/7/hadoop/hdfs/data,/disks/8/hadoop/hdfs/data,/disks/9/hadoop/hdfs/data,/disks/10/hadoop/hdfs/data,/disks/11/hadoop/hdfs/data,/disks/12/hadoop/hdfs/data,/disks/13/hadoop/hdfs/data,/disks/14/hadoop/hdfs/data,/disks/15/hadoop/hdfs/data",
"dfs.namenode.checkpoint.dir": "/disks/0/hadoop/hdfs/namesecondary",
"dfs.namenode.name.dir": "/disks/0/hadoop/hdfs/namenode,/disks/1/hadoop/hdfs/namenode,/disks/2/hadoop/hdfs/namenode,/disks/3/hadoop/hdfs/namenode,/disks/4/hadoop/hdfs/namenode,/disks/5/hadoop/hdfs/namenode,/disks/6/hadoop/hdfs/namenode,/disks/7/hadoop/hdfs/namenode",
"dfs.datanode.failed.volumes.tolerated": "6"
}
},
{
"global": {
"oozie_data_dir": "/disks/0/hadoop/oozie/data",
"zk_data_dir": "/disks/0/hadoop/zookeeper",
"falcon.embeddedmq.data": "/disks/0/hadoop/falcon/embeddedmq/data",
"falcon_local_dir": "/disks/0/hadoop/falcon",
"namenode_heapsize" : "16384m"
}
},
{
"hive-site" : {
"javax.jdo.option.ConnectionPassword" : "hive",
"hive.tez.container.size" : "250",
"hive.tez.java.opts" : "-server -Xmx200m -Djava.net.preferIPv4Stack=true",
"hive.heapsize" : "250",
"hive.users.in.admin.role" : "hue,hive",
"hive_metastore_user_passwd" : "hive",
"hive.server2.enable.impersonation": "true",
"hive.compactor.check.interval": "300s",
"hive.compactor.initiator.on": "true",
"hive.compactor.worker.timeout": "86400s",
"hive.enforce.bucketing": "true",
"hive.support.concurrency": "true",
"hive.exec.dynamic.partition.mode": "nonstrict",
"hive.server2.enable.doAs": "true",
"hive.txn.manager": "org.apache.hadoop.hive.ql.lockmgr.DbTxnManager",
"hive.txn.max.open.batch": "1000",
"hive.txn.timeout": "300",
"hive.security.authorization.enabled": "false",
"hive.users.in.admin.role": "hue,hive",
"hive.metastore.uris" : "thrift://%HOSTGROUP::master_2%:9083"
}
},
{
"mapred-env": {
"jobhistory_heapsize" : "250"
}
},
{
"mapred-site" : {
"mapreduce.map.memory.mb" : "250",
"mapreduce.reduce.memory.mb" : "250",
"mapreduce.task.io.sort.mb" : "64",
"yarn.app.mapreduce.am.resource.mb" : "250",
"yarn.app.mapreduce.am.command-opts" : "-Xmx200m",
"mapred.job.reduce.memory.mb" : "250",
"mapred.child.java.opts" : "-Xmx200m",
"mapred.job.map.memory.mb" : "250",
"io.sort.mb" : "64",
"mapreduce.map.java.opts" : "-Xmx200m",
"mapreduce.reduce.java.opts" : "-Xmx200m"
}
},
{
"oozie-site" : {
"oozie.service.ProxyUserService.proxyuser.hue.hosts" : "*",
"oozie.service.ProxyUserService.proxyuser.hue.groups" : "*",
"oozie.service.ProxyUserService.proxyuser.falcon.hosts": "*",
"oozie.service.ProxyUserService.proxyuser.falcon.groups": "*",
"oozie.service.JPAService.jdbc.password" : "oozie"
}
},
{
"storm-site" : {
"logviewer.port" : 8005,
"nimbus.childopts" : "-Xmx220m -javaagent:/usr/hdp/current/storm-client/contrib/storm-jmxetric/lib/jmxetric-1.0.4.jar=host=sandbox.hortonworks.com,port=8649,wireformat31x=true,mode=multicast,config=/usr/hdp/current/storm-client/contrib/storm-jmxetric/conf/jmxetric-conf.xml,process=Nimbus_JVM",
"ui.childopts" : "-Xmx220m",
"drpc.childopts" : "-Xmx220m"
}
},
{
"tez-site" : {
"tez.am.java.opts" : "-server -Xmx200m -Djava.net.preferIPv4Stack=true -XX:+UseNUMA -XX:+UseParallelGC",
"tez.am.resource.memory.mb" : "250",
"tez.dag.am.resource.memory.mb" : "250",
"yarn.app.mapreduce.am.command-opts" : "-Xmx200m"
}
},
{
"webhcat-site" : {
"webhcat.proxyuser.hue.hosts" : "*",
"webhcat.proxyuser.hue.groups" : "*",
"webhcat.proxyuser.hcat.hosts" : "*",
"webhcat.proxyuser.hcat.groups" : "*",
"templeton.hive.properties" : "hive.metastore.local=false,hive.metastore.uris=thrift://sandbox.hortonworks.com:9083,hive.metastore.sasl.enabled=false,hive.metastore.execute.setugi=true,hive.metastore.warehouse.dir=/apps/hive/warehouse"
}
},
{
"yarn-env": {
"apptimelineserver_heapsize" : "250",
"resourcemanager_heapsize" : "250",
"nodemanager_heapsize" : "250",
"yarn_heapsize" : "250"
}
},
{
"yarn-site" : {
"yarn.nodemanager.resource.memory-mb": "2250",
"yarn.nodemanager.vmem-pmem-ratio" : "10",
"yarn.scheduler.minimum-allocation-mb" : "250",
"yarn.scheduler.maximum-allocation-mb" : "2250",
"yarn.nodemanager.pmem-check-enabled" : "false",
"yarn.acl.enable" : "false",
"yarn.resourcemanager.webapp.proxyuser.hcat.groups" : "*",
"yarn.resourcemanager.webapp.proxyuser.hcat.hosts" : "*",
"yarn.resourcemanager.webapp.proxyuser.oozie.groups" : "*",
"yarn.resourcemanager.webapp.proxyuser.oozie.hosts" : "*"
}
}
]
standard_configurations = [
{
"ams-hbase-env" : {
"properties" : {
"hbase_master_heapsize" : "512m",
"hbase_regionserver_heapsize" : "512m",
"hbase_regionserver_xmn_max" : "256m",
"hbase_regionserver_xmn_ratio" : "0.2"
}
}
},
{
"capacity-scheduler" : {
"yarn.scheduler.capacity.root.default.maximum-am-resource-percent" : "0.5",
"yarn.scheduler.capacity.maximum-am-resource-percent" : "0.5"
}
},
{
"cluster-env": {
"cluster_name": "hdp",
"smokeuser": "ambari-qa",
"user_group": "hadoop",
"security_enabled": "false"
}
},
{
"core-site" : {
"hadoop.proxyuser.hue.hosts" : "*",
"hadoop.proxyuser.hue.groups" : "*",
"hadoop.proxyuser.root.hosts" : "*",
"hadoop.proxyuser.root.groups" : "*",
"hadoop.proxyuser.oozie.hosts" : "*",
"hadoop.proxyuser.oozie.groups" : "*",
"hadoop.proxyuser.hcat.hosts" : "*",
"hadoop.proxyuser.hcat.groups" : "*"
}
},
{
"hadoop-env": {
"dtnode_heapsize" : "250",
"hadoop_heapsize" : "250",
"namenode_heapsize" : "250",
"namenode_opt_newsize": "50",
"namenode_opt_maxnewsize": "100"
}
},
{
"yarn-site": {
"yarn.nodemanager.local-dirs": "/disks/0/hadoop/yarn/local,/disks/1/hadoop/yarn/local,/disks/2/hadoop/yarn/local,/disks/3/hadoop/yarn/local,/disks/4/hadoop/yarn/local,/disks/5/hadoop/yarn/local,/disks/6/hadoop/yarn/local,/disks/7/hadoop/yarn/local",
"yarn.nodemanager.log-dirs": "/disks/0/hadoop/yarn/log,/disks/1/hadoop/yarn/log,/disks/2/hadoop/yarn/log,/disks/3/hadoop/yarn/log,/disks/4/hadoop/yarn/log,/disks/5/hadoop/yarn/log,/disks/6/hadoop/yarn/log,/disks/7/hadoop/yarn/log,/disks/8/hadoop/yarn/log,/disks/9/hadoop/yarn/log,/disks/10/hadoop/yarn/log,/disks/11/hadoop/yarn/log,/disks/12/hadoop/yarn/log,/disks/13/hadoop/yarn/log,/disks/14/hadoop/yarn/log,/disks/15/hadoop/yarn/log",
"yarn.timeline-service.leveldb-timeline-store.path": "/disks/0/hadoop/yarn/timeline",
"yarn.nodemanager.resource.memory-mb" : "32768",
"yarn.scheduler.maximum-allocation-mb" : "32768",
"yarn.scheduler.minimum-allocation-mb" : "2048"
}
},
{
"tez-site": {
"tez.am.resource.memory.mb" : "2048",
"tez.am.java.opts" : "-server -Xmx1638m -Djava.net.preferIPv4Stack=true -XX:+UseNUMA -XX:+UseParallelGC"
}
},
{
"mapred-site": {
"mapreduce.map.java.opts" : "-Xmx1638m",
"mapreduce.map.memory.mb" : "2048",
"mapreduce.reduce.java.opts" : "-Xmx1638m",
"mapreduce.reduce.memory.mb" : "2048",
"mapreduce.task.io.sort.mb" : "819",
"yarn.app.mapreduce.am.command-opts" : "-Xmx1638m",
"yarn.app.mapreduce.am.resource.mb" : "2048"
}
},
{
"hdfs-site": {
"dfs.datanode.data.dir": "/disks/0/hadoop/hdfs/data,/disks/1/hadoop/hdfs/data,/disks/2/hadoop/hdfs/data,/disks/3/hadoop/hdfs/data,/disks/4/hadoop/hdfs/data,/disks/5/hadoop/hdfs/data,/disks/6/hadoop/hdfs/data,/disks/7/hadoop/hdfs/data,/disks/8/hadoop/hdfs/data,/disks/9/hadoop/hdfs/data,/disks/10/hadoop/hdfs/data,/disks/11/hadoop/hdfs/data,/disks/12/hadoop/hdfs/data,/disks/13/hadoop/hdfs/data,/disks/14/hadoop/hdfs/data,/disks/15/hadoop/hdfs/data",
"dfs.namenode.checkpoint.dir": "/disks/0/hadoop/hdfs/namesecondary",
"dfs.namenode.name.dir": "/disks/0/hadoop/hdfs/namenode,/disks/1/hadoop/hdfs/namenode,/disks/2/hadoop/hdfs/namenode,/disks/3/hadoop/hdfs/namenode,/disks/4/hadoop/hdfs/namenode,/disks/5/hadoop/hdfs/namenode,/disks/6/hadoop/hdfs/namenode,/disks/7/hadoop/hdfs/namenode",
"dfs.datanode.failed.volumes.tolerated": "6",
"dfs.nfs3.dump.dir" : "/tmp/.hdfs-nfs"
}
},
{
"global": {
"oozie_data_dir": "/disks/0/hadoop/oozie/data",
"zk_data_dir": "/disks/0/hadoop/zookeeper",
"falcon.embeddedmq.data": "/disks/0/hadoop/falcon/embeddedmq/data",
"falcon_local_dir": "/disks/0/hadoop/falcon",
"namenode_heapsize" : "16384m"
}
},
{
"hbase-site" : {
"hbase.security.authorization": "true",
"hbase.rpc.engine": "org.apache.hadoop.hbase.ipc.SecureRpcEngine",
"hbase_master_heapsize": "250",
"hbase_regionserver_heapsize": "250",
"hbase.rpc.protection": "PRIVACY",
"hbase.tmp.dir": "/disks/0/hadoop/hbase"
}
},
{
"storm-site": {
"storm.local.dir": "/disks/0/hadoop/storm"
}
},
{
"falcon-startup.properties": {
"*.config.store.uri": "file:///disks/0/hadoop/falcon/store"
}
},
{
"hive-site": {
"hive.auto.convert.join.noconditionaltask.size" : "716177408",
"hive.tez.container.size" : "2048",
"hive.tez.java.opts" : "-server -Xmx1638m -Djava.net.preferIPv4Stack=true -XX:NewRatio=8 -XX:+UseNUMA -XX:+UseParallelGC",
"hive.metastore.uris" : "thrift://%HOSTGROUP::master_3%:9083"
}
}
]
configurations = evaluation_configurations if scenario_id == 'evaluation' else standard_configurations
data = {
"configurations" : configurations,
"host_groups": host_groups,
"Blueprints" : {
"blueprint_name" : blueprint_name,
"stack_name" : "HDP",
"stack_version" : "2.2"
}
}
data = json.dumps(data)
request = urllib2.Request(url, data)
request.add_header('Authorization', get_ambari_auth_string())
request.add_header('X-Requested-By', 'ambari')
request.add_header('Content-Type', 'text/plain')
try:
response = urllib2.urlopen(request, timeout=request_timeout)
pprint.pprint(response.read())
except urllib2.HTTPError as e:
logger.error('Cluster deployment failed: ' + e.read())
raise e
return 'myblueprint'
def initiate_cluster_deploy(blueprint_name, cluster_id, num_masters, num_workers):
logger.info('Deploying cluster...')
url = '%s://localhost:%s/api/v1/clusters/%s' % (protocol, port, cluster_id)
if num_masters + num_workers < 4:
raise Exception('Cluster size must be 4 or greater')
data = {
"blueprint": blueprint_name,
"default_password": "admin",
"host_groups": [
]
}
for i in range(1, num_masters + 1):
data['host_groups'].append({
"name": "master_%d" % i,
"hosts": [{
"fqdn": get_fqdn(i)
}]
})
worker_hosts = []
for i in range(num_masters + 1, num_masters + num_workers + 1):
worker_hosts.append({
"fqdn": get_fqdn(i)
})
data['host_groups'].append({
"name": "workers",
"hosts": worker_hosts
})
data = json.dumps(data)
pprint.pprint('data=' + data)
request = urllib2.Request(url, data)
request.add_header('Authorization', get_ambari_auth_string())
request.add_header('X-Requested-By', 'ambari')
request.add_header('Content-Type', 'text/plain')
try:
response = urllib2.urlopen(request, timeout=120)
pprint.pprint(response.read())
except urllib2.HTTPError as e:
logger.error('Cluster deployment failed: ' + e.read())
raise e
def poll_until_cluster_deployed(cluster_id, timeout):
url = '%s://localhost:%s/api/v1/clusters/%s/requests/1?fields=Requests/progress_percent,Requests/request_status' % (protocol, port, cluster_id)
deploy_success = False
deploy_finished = False
start_time = time.time()
logger.info('poll until function')
while time.time() - start_time < timeout:
request = urllib2.Request(url)
request.add_header("Authorization", get_ambari_auth_string())
try:
result = urllib2.urlopen(request, timeout=request_timeout).read()
pprint.pprint(result)
if (result is not None):
jsonResult = json.loads(result)
if jsonResult['Requests']['request_status'] == 'COMPLETED':
deploy_success = True
if int(jsonResult['Requests']['progress_percent']) == 100 or jsonResult['Requests']['request_status'] == 'FAILED':
deploy_finished = True
break
except:
logger.info('Could not poll deploy status from the server.')
time.sleep(5)
if not deploy_finished:
raise TimeoutException('Timed out while waiting for cluster deployment to finish')
elif not deploy_success:
raise Exception('Cluster deploy failed')
if action == 'bootstrap':
masters_iplist = masters_iplist.split(',')
workers_iplist = workers_iplist.split(',')
ambari_server_hostname = get_hostname(1)
all_hostnames = map((lambda i: get_hostname(i)), range(1, num_masters + num_workers + 1))
logger.info(all_hostnames)
host_ip_map = get_host_ip_map(all_hostnames)
update_etc_hosts(host_ip_map)
update_ambari_agent_ini(ambari_server_hostname)
patch_ambari_agent()
run_system_command('chkconfig ambari-agent on')
logger.info('Starting ambari-agent...')
run_system_command('ambari-agent start')
logger.info('ambari-agent started')
if is_ambari_server_host():
run_system_command('chkconfig ambari-server on')
logger.info('Running ambari-server setup...')
run_system_command('ambari-server setup -s -j /usr/jdk64/jdk1.7.0_45')
logger.info('ambari-server setup finished')
if protocol == 'https':
logger.info('Enabling HTTPS...')
enable_https()
logger.info('HTTPS enabled')
logger.info('Starting ambari-server...')
run_system_command('ambari-server start')
logger.info('ambari-server started')
try:
set_admin_password(admin_password, 60 * 2)
# set current_admin_password so that HTTP requests to Ambari start using the new user-specified password
current_admin_password = admin_password
poll_until_all_agents_registered(num_masters + num_workers, 60 * 4)
blueprint_name = create_blueprint(scenario_id)
initiate_cluster_deploy(blueprint_name, cluster_id, num_masters, num_workers)
except:
logger.error('Failed VM Bootstrap')
sys.exit(1)
else:
try:
current_admin_password = admin_password
poll_until_cluster_deployed(cluster_id, check_timeout_seconds)
except TimeoutException as e:
logger.info(e)
if report_timeout_fail:
logger.error('Failed cluster deployment')
sys.exit(1)
else:
logger.info('Cluster deployment has not completed')
sys.exit(0)
except:
logger.error('Failed cluster deployment')
sys.exit(1)
logger.info('Finished VM Bootstrap successfully')
sys.exit(0)
| mit |
CSC301H-Fall2013/JuakStore | site-packages/django/contrib/localflavor/us/forms.py | 101 | 4555 | """
USA-specific Form helpers
"""
from __future__ import absolute_import, unicode_literals
import re
from django.core.validators import EMPTY_VALUES
from django.forms import ValidationError
from django.forms.fields import Field, RegexField, Select, CharField
from django.utils.encoding import smart_text
from django.utils.translation import ugettext_lazy as _
phone_digits_re = re.compile(r'^(?:1-?)?(\d{3})[-\.]?(\d{3})[-\.]?(\d{4})$')
ssn_re = re.compile(r"^(?P<area>\d{3})[-\ ]?(?P<group>\d{2})[-\ ]?(?P<serial>\d{4})$")
class USZipCodeField(RegexField):
default_error_messages = {
'invalid': _('Enter a zip code in the format XXXXX or XXXXX-XXXX.'),
}
def __init__(self, max_length=None, min_length=None, *args, **kwargs):
super(USZipCodeField, self).__init__(r'^\d{5}(?:-\d{4})?$',
max_length, min_length, *args, **kwargs)
class USPhoneNumberField(CharField):
default_error_messages = {
'invalid': _('Phone numbers must be in XXX-XXX-XXXX format.'),
}
def clean(self, value):
super(USPhoneNumberField, self).clean(value)
if value in EMPTY_VALUES:
return ''
value = re.sub('(\(|\)|\s+)', '', smart_text(value))
m = phone_digits_re.search(value)
if m:
return '%s-%s-%s' % (m.group(1), m.group(2), m.group(3))
raise ValidationError(self.error_messages['invalid'])
class USSocialSecurityNumberField(Field):
"""
A United States Social Security number.
Checks the following rules to determine whether the number is valid:
* Conforms to the XXX-XX-XXXX format.
* No group consists entirely of zeroes.
* The leading group is not "666" (block "666" will never be allocated).
* The number is not in the promotional block 987-65-4320 through
987-65-4329, which are permanently invalid.
* The number is not one known to be invalid due to otherwise widespread
promotional use or distribution (e.g., the Woolworth's number or the
1962 promotional number).
"""
default_error_messages = {
'invalid': _('Enter a valid U.S. Social Security number in XXX-XX-XXXX format.'),
}
def clean(self, value):
super(USSocialSecurityNumberField, self).clean(value)
if value in EMPTY_VALUES:
return ''
match = re.match(ssn_re, value)
if not match:
raise ValidationError(self.error_messages['invalid'])
area, group, serial = match.groupdict()['area'], match.groupdict()['group'], match.groupdict()['serial']
# First pass: no blocks of all zeroes.
if area == '000' or \
group == '00' or \
serial == '0000':
raise ValidationError(self.error_messages['invalid'])
# Second pass: promotional and otherwise permanently invalid numbers.
if area == '666' or \
(area == '987' and group == '65' and 4320 <= int(serial) <= 4329) or \
value == '078-05-1120' or \
value == '219-09-9999':
raise ValidationError(self.error_messages['invalid'])
return '%s-%s-%s' % (area, group, serial)
class USStateField(Field):
"""
A form field that validates its input is a U.S. state name or abbreviation.
It normalizes the input to the standard two-leter postal service
abbreviation for the given state.
"""
default_error_messages = {
'invalid': _('Enter a U.S. state or territory.'),
}
def clean(self, value):
from .us_states import STATES_NORMALIZED
super(USStateField, self).clean(value)
if value in EMPTY_VALUES:
return ''
try:
value = value.strip().lower()
except AttributeError:
pass
else:
try:
return STATES_NORMALIZED[value.strip().lower()]
except KeyError:
pass
raise ValidationError(self.error_messages['invalid'])
class USStateSelect(Select):
"""
A Select widget that uses a list of U.S. states/territories as its choices.
"""
def __init__(self, attrs=None):
from .us_states import STATE_CHOICES
super(USStateSelect, self).__init__(attrs, choices=STATE_CHOICES)
class USPSSelect(Select):
"""
A Select widget that uses a list of US Postal Service codes as its
choices.
"""
def __init__(self, attrs=None):
from .us_states import USPS_CHOICES
super(USPSSelect, self).__init__(attrs, choices=USPS_CHOICES)
| mit |
IceCubeDev/SpaceOrNot | psycopg2/setup.py | 1 | 22843 | # setup.py - distutils packaging
#
# Copyright (C) 2003-2010 Federico Di Gregorio <[email protected]>
#
# psycopg2 is free software: you can redistribute it and/or modify it
# under the terms of the GNU Lesser General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# psycopg2 is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
# License for more details.
"""Python-PostgreSQL Database Adapter
psycopg2 is a PostgreSQL database adapter for the Python programming
language. psycopg2 was written with the aim of being very small and fast,
and stable as a rock.
psycopg2 is different from the other database adapter because it was
designed for heavily multi-threaded applications that create and destroy
lots of cursors and make a conspicuous number of concurrent INSERTs or
UPDATEs. psycopg2 also provide full asynchronous operations and support
for coroutine libraries.
"""
# note: if you are changing the list of supported Python version please fix
# the docs in install.rst and the /features/ page on the website.
classifiers = """\
Development Status :: 5 - Production/Stable
Intended Audience :: Developers
License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)
License :: OSI Approved :: Zope Public License
Programming Language :: Python
Programming Language :: Python :: 2.5
Programming Language :: Python :: 2.6
Programming Language :: Python :: 2.7
Programming Language :: Python :: 3
Programming Language :: Python :: 3.1
Programming Language :: Python :: 3.2
Programming Language :: Python :: 3.3
Programming Language :: Python :: 3.4
Programming Language :: C
Programming Language :: SQL
Topic :: Database
Topic :: Database :: Front-Ends
Topic :: Software Development
Topic :: Software Development :: Libraries :: Python Modules
Operating System :: Microsoft :: Windows
Operating System :: Unix
"""
# Note: The setup.py must be compatible with both Python 2 and 3
import os
import sys
import re
import subprocess
from distutils.core import setup, Extension
from distutils.command.build_ext import build_ext
from distutils.sysconfig import get_python_inc
from distutils.ccompiler import get_default_compiler
from distutils.util import get_platform
try:
from distutils.command.build_py import build_py_2to3
except ImportError:
from distutils.command.build_py import build_py
else:
class build_py(build_py_2to3):
# workaround subclass for ticket #153
pass
# Configure distutils to run our custom 2to3 fixers as well
from lib2to3.refactor import get_fixers_from_package
build_py.fixer_names = get_fixers_from_package('lib2to3.fixes') \
+ [ 'fix_b' ]
sys.path.insert(0, 'scripts')
try:
import configparser
except ImportError:
import ConfigParser as configparser
# Take a look at http://www.python.org/dev/peps/pep-0386/
# for a consistent versioning pattern.
PSYCOPG_VERSION = '2.6'
version_flags = ['dt', 'dec']
PLATFORM_IS_WINDOWS = sys.platform.lower().startswith('win')
class PostgresConfig:
def __init__(self, build_ext):
self.build_ext = build_ext
self.pg_config_exe = self.build_ext.pg_config
if not self.pg_config_exe:
self.pg_config_exe = self.autodetect_pg_config_path()
if self.pg_config_exe is None:
sys.stderr.write("""\
Error: pg_config executable not found.
Please add the directory containing pg_config to the PATH
or specify the full executable path with the option:
python setup.py build_ext --pg-config /path/to/pg_config build ...
or with the pg_config option in 'setup.cfg'.
""")
sys.exit(1)
def query(self, attr_name):
"""Spawn the pg_config executable, querying for the given config
name, and return the printed value, sanitized. """
try:
pg_config_process = subprocess.Popen(
[self.pg_config_exe, "--" + attr_name],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
except OSError:
raise Warning("Unable to find 'pg_config' file in '%s'" %
self.pg_config_exe)
pg_config_process.stdin.close()
result = pg_config_process.stdout.readline().strip()
if not result:
raise Warning(pg_config_process.stderr.readline())
if not isinstance(result, str):
result = result.decode('ascii')
return result
def find_on_path(self, exename, path_directories=None):
if not path_directories:
path_directories = os.environ['PATH'].split(os.pathsep)
for dir_name in path_directories:
fullpath = os.path.join(dir_name, exename)
if os.path.isfile(fullpath):
return fullpath
return None
def autodetect_pg_config_path(self):
"""Find and return the path to the pg_config executable."""
if PLATFORM_IS_WINDOWS:
return self.autodetect_pg_config_path_windows()
else:
return self.find_on_path('pg_config')
def autodetect_pg_config_path_windows(self):
"""Attempt several different ways of finding the pg_config
executable on Windows, and return its full path, if found."""
# This code only runs if they have not specified a pg_config option
# in the config file or via the commandline.
# First, check for pg_config.exe on the PATH, and use that if found.
pg_config_exe = self.find_on_path('pg_config.exe')
if pg_config_exe:
return pg_config_exe
# Now, try looking in the Windows Registry to find a PostgreSQL
# installation, and infer the path from that.
pg_config_exe = self._get_pg_config_from_registry()
if pg_config_exe:
return pg_config_exe
return None
def _get_pg_config_from_registry(self):
try:
import winreg
except ImportError:
import _winreg as winreg
reg = winreg.ConnectRegistry(None, winreg.HKEY_LOCAL_MACHINE)
try:
pg_inst_list_key = winreg.OpenKey(reg,
'SOFTWARE\\PostgreSQL\\Installations')
except EnvironmentError:
# No PostgreSQL installation, as best as we can tell.
return None
try:
# Determine the name of the first subkey, if any:
try:
first_sub_key_name = winreg.EnumKey(pg_inst_list_key, 0)
except EnvironmentError:
return None
pg_first_inst_key = winreg.OpenKey(reg,
'SOFTWARE\\PostgreSQL\\Installations\\'
+ first_sub_key_name)
try:
pg_inst_base_dir = winreg.QueryValueEx(
pg_first_inst_key, 'Base Directory')[0]
finally:
winreg.CloseKey(pg_first_inst_key)
finally:
winreg.CloseKey(pg_inst_list_key)
pg_config_path = os.path.join(
pg_inst_base_dir, 'bin', 'pg_config.exe')
if not os.path.exists(pg_config_path):
return None
# Support unicode paths, if this version of Python provides the
# necessary infrastructure:
if sys.version_info[0] < 3 \
and hasattr(sys, 'getfilesystemencoding'):
pg_config_path = pg_config_path.encode(
sys.getfilesystemencoding())
return pg_config_path
class psycopg_build_ext(build_ext):
"""Conditionally complement the setup.cfg options file.
This class configures the include_dirs, library_dirs, libraries
options as required by the system. Most of the configuration happens
in finalize_options() method.
If you want to set up the build step for a peculiar platform, add a
method finalize_PLAT(), where PLAT matches your sys.platform.
"""
user_options = build_ext.user_options[:]
user_options.extend([
('use-pydatetime', None,
"Use Python datatime objects for date and time representation."),
('pg-config=', None,
"The name of the pg_config binary and/or full path to find it"),
('have-ssl', None,
"Compile with OpenSSL built PostgreSQL libraries (Windows only)."),
('static-libpq', None,
"Statically link the PostgreSQL client library"),
])
boolean_options = build_ext.boolean_options[:]
boolean_options.extend(('use-pydatetime', 'have-ssl', 'static-libpq'))
def __init__(self, *args, **kwargs):
build_ext.__init__(self, *args, **kwargs)
def initialize_options(self):
build_ext.initialize_options(self)
self.use_pg_dll = 1
self.pgdir = None
self.mx_include_dir = None
self.use_pydatetime = 1
self.have_ssl = have_ssl
self.static_libpq = static_libpq
self.pg_config = None
def compiler_is_msvc(self):
return self.get_compiler_name().lower().startswith('msvc')
def compiler_is_mingw(self):
return self.get_compiler_name().lower().startswith('mingw')
def get_compiler_name(self):
"""Return the name of the C compiler used to compile extensions.
If a compiler was not explicitly set (on the command line, for
example), fall back on the default compiler.
"""
if self.compiler:
# distutils doesn't keep the type of self.compiler uniform; we
# compensate:
if isinstance(self.compiler, str):
name = self.compiler
else:
name = self.compiler.compiler_type
else:
name = get_default_compiler()
return name
def get_export_symbols(self, extension):
# Fix MSVC seeing two of the same export symbols.
if self.compiler_is_msvc():
return []
else:
return build_ext.get_export_symbols(self, extension)
def build_extension(self, extension):
build_ext.build_extension(self, extension)
sysVer = sys.version_info[:2]
# For Python versions that use MSVC compiler 2008, re-insert the
# manifest into the resulting .pyd file.
if self.compiler_is_msvc() and sysVer not in ((2, 4), (2, 5)):
platform = get_platform()
# Default to the x86 manifest
manifest = '_psycopg.vc9.x86.manifest'
if platform == 'win-amd64':
manifest = '_psycopg.vc9.amd64.manifest'
try:
ext_path = self.get_ext_fullpath(extension.name)
except AttributeError:
ext_path = os.path.join(self.build_lib,
'psycopg2', '_psycopg.pyd')
self.compiler.spawn(
['mt.exe', '-nologo', '-manifest',
os.path.join('psycopg', manifest),
'-outputresource:%s;2' % ext_path])
def finalize_win32(self):
"""Finalize build system configuration on win32 platform."""
sysVer = sys.version_info[:2]
# Add compiler-specific arguments:
extra_compiler_args = []
if self.compiler_is_mingw():
# Default MinGW compilation of Python extensions on Windows uses
# only -O:
extra_compiler_args.append('-O3')
# GCC-compiled Python on non-Windows platforms is built with strict
# aliasing disabled, but that must be done explicitly on Windows to
# avoid large numbers of warnings for perfectly idiomatic Python C
# API code.
extra_compiler_args.append('-fno-strict-aliasing')
# Force correct C runtime library linkage:
if sysVer <= (2, 3):
# Yes: 'msvcr60', rather than 'msvcrt', is the correct value
# on the line below:
self.libraries.append('msvcr60')
elif sysVer in ((2, 4), (2, 5)):
self.libraries.append('msvcr71')
# Beyond Python 2.5, we take our chances on the default C runtime
# library, because we don't know what compiler those future
# versions of Python will use.
for extension in ext: # ext is a global list of Extension objects
extension.extra_compile_args.extend(extra_compiler_args)
# End of add-compiler-specific arguments section.
self.libraries.append("ws2_32")
self.libraries.append("advapi32")
if self.compiler_is_msvc():
# MSVC requires an explicit "libpq"
self.libraries.remove("pq")
self.libraries.append("secur32")
self.libraries.append("libpq")
self.libraries.append("shfolder")
for path in self.library_dirs:
if os.path.isfile(os.path.join(path, "ms", "libpq.lib")):
self.library_dirs.append(os.path.join(path, "ms"))
break
if self.have_ssl:
self.libraries.append("libeay32")
self.libraries.append("ssleay32")
self.libraries.append("crypt32")
self.libraries.append("user32")
self.libraries.append("gdi32")
def finalize_darwin(self):
"""Finalize build system configuration on darwin platform."""
self.libraries.append('ssl')
self.libraries.append('crypto')
def finalize_linux(self):
"""Finalize build system configuration on GNU/Linux platform."""
# tell piro that GCC is fine and dandy, but not so MS compilers
for extension in self.extensions:
extension.extra_compile_args.append(
'-Wdeclaration-after-statement')
finalize_linux2 = finalize_linux
finalize_linux3 = finalize_linux
def finalize_options(self):
"""Complete the build system configuration."""
build_ext.finalize_options(self)
pg_config_helper = PostgresConfig(self)
self.include_dirs.append(".")
if self.static_libpq:
if not getattr(self, 'link_objects', None):
self.link_objects = []
self.link_objects.append(
os.path.join(pg_config_helper.query("libdir"), "libpq.a"))
else:
self.libraries.append("pq")
try:
self.library_dirs.append(pg_config_helper.query("libdir"))
self.include_dirs.append(pg_config_helper.query("includedir"))
self.include_dirs.append(pg_config_helper.query("includedir-server"))
try:
# Here we take a conservative approach: we suppose that
# *at least* PostgreSQL 7.4 is available (this is the only
# 7.x series supported by psycopg 2)
pgversion = pg_config_helper.query("version").split()[1]
except:
pgversion = "7.4.0"
verre = re.compile(
r"(\d+)\.(\d+)(?:(?:\.(\d+))|(devel|(alpha|beta|rc)\d+))")
m = verre.match(pgversion)
if m:
pgmajor, pgminor, pgpatch = m.group(1, 2, 3)
if pgpatch is None or not pgpatch.isdigit():
pgpatch = 0
pgmajor = int(pgmajor)
pgminor = int(pgminor)
pgpatch = int(pgpatch)
else:
sys.stderr.write(
"Error: could not determine PostgreSQL version from '%s'"
% pgversion)
sys.exit(1)
define_macros.append(("PG_VERSION_HEX", "0x%02X%02X%02X" %
(pgmajor, pgminor, pgpatch)))
# enable lo64 if libpq >= 9.3 and Python 64 bits
if (pgmajor, pgminor) >= (9, 3) and is_py_64():
define_macros.append(("HAVE_LO64", "1"))
# Inject the flag in the version string already packed up
# because we didn't know the version before.
# With distutils everything is complicated.
for i, t in enumerate(define_macros):
if t[0] == 'PSYCOPG_VERSION':
n = t[1].find(')')
if n > 0:
define_macros[i] = (
t[0], t[1][:n] + ' lo64' + t[1][n:])
except Warning:
w = sys.exc_info()[1] # work around py 2/3 different syntax
sys.stderr.write("Error: %s\n" % w)
sys.exit(1)
if hasattr(self, "finalize_" + sys.platform):
getattr(self, "finalize_" + sys.platform)()
def is_py_64():
# sys.maxint not available since Py 3.1;
# sys.maxsize not available before Py 2.6;
# this is portable at least between Py 2.4 and 3.4.
import struct
return struct.calcsize("P") > 4
# let's start with macro definitions (the ones not already in setup.cfg)
define_macros = []
include_dirs = []
# gather information to build the extension module
ext = []
data_files = []
# sources
sources = [
'psycopgmodule.c',
'green.c', 'pqpath.c', 'utils.c', 'bytes_format.c',
'connection_int.c', 'connection_type.c',
'cursor_int.c', 'cursor_type.c',
'diagnostics_type.c', 'error_type.c',
'lobject_int.c', 'lobject_type.c',
'notify_type.c', 'xid_type.c',
'adapter_asis.c', 'adapter_binary.c', 'adapter_datetime.c',
'adapter_list.c', 'adapter_pboolean.c', 'adapter_pdecimal.c',
'adapter_pint.c', 'adapter_pfloat.c', 'adapter_qstring.c',
'microprotocols.c', 'microprotocols_proto.c',
'typecast.c',
]
depends = [
# headers
'config.h', 'pgtypes.h', 'psycopg.h', 'python.h', 'connection.h',
'cursor.h', 'diagnostics.h', 'error.h', 'green.h', 'lobject.h',
'notify.h', 'pqpath.h', 'xid.h',
'adapter_asis.h', 'adapter_binary.h', 'adapter_datetime.h',
'adapter_list.h', 'adapter_pboolean.h', 'adapter_pdecimal.h',
'adapter_pint.h', 'adapter_pfloat.h', 'adapter_qstring.h',
'microprotocols.h', 'microprotocols_proto.h',
'typecast.h', 'typecast_binary.h',
# included sources
'typecast_array.c', 'typecast_basic.c', 'typecast_binary.c',
'typecast_builtins.c', 'typecast_datetime.c',
]
parser = configparser.ConfigParser()
parser.read('setup.cfg')
# Choose a datetime module
have_pydatetime = True
have_mxdatetime = False
use_pydatetime = int(parser.get('build_ext', 'use_pydatetime'))
# check for mx package
if parser.has_option('build_ext', 'mx_include_dir'):
mxincludedir = parser.get('build_ext', 'mx_include_dir')
else:
mxincludedir = os.path.join(get_python_inc(plat_specific=1), "mx")
if os.path.exists(mxincludedir):
# Build the support for mx: we will check at runtime if it can be imported
include_dirs.append(mxincludedir)
define_macros.append(('HAVE_MXDATETIME', '1'))
sources.append('adapter_mxdatetime.c')
depends.extend(['adapter_mxdatetime.h', 'typecast_mxdatetime.c'])
have_mxdatetime = True
version_flags.append('mx')
# now decide which package will be the default for date/time typecasts
if have_pydatetime and (use_pydatetime or not have_mxdatetime):
define_macros.append(('PSYCOPG_DEFAULT_PYDATETIME', '1'))
elif have_mxdatetime:
define_macros.append(('PSYCOPG_DEFAULT_MXDATETIME', '1'))
else:
error_message = """\
psycopg requires a datetime module:
mx.DateTime module not found
python datetime module not found
Note that psycopg needs the module headers and not just the module
itself. If you installed Python or mx.DateTime from a binary package
you probably need to install its companion -dev or -devel package."""
for line in error_message.split("\n"):
sys.stderr.write("error: " + line)
sys.exit(1)
# generate a nice version string to avoid confusion when users report bugs
version_flags.append('pq3') # no more a choice
version_flags.append('ext') # no more a choice
if version_flags:
PSYCOPG_VERSION_EX = PSYCOPG_VERSION + " (%s)" % ' '.join(version_flags)
else:
PSYCOPG_VERSION_EX = PSYCOPG_VERSION
if not PLATFORM_IS_WINDOWS:
define_macros.append(('PSYCOPG_VERSION', '"' + PSYCOPG_VERSION_EX + '"'))
else:
define_macros.append(('PSYCOPG_VERSION', '\\"' + PSYCOPG_VERSION_EX + '\\"'))
if parser.has_option('build_ext', 'have_ssl'):
have_ssl = int(parser.get('build_ext', 'have_ssl'))
else:
have_ssl = 0
if parser.has_option('build_ext', 'static_libpq'):
static_libpq = int(parser.get('build_ext', 'static_libpq'))
else:
static_libpq = 0
# And now... explicitly add the defines from the .cfg files.
# Looks like setuptools or some other cog doesn't add them to the command line
# when called e.g. with "pip -e git+url'. This results in declarations
# duplicate on the commandline, which I hope is not a problem.
for define in parser.get('build_ext', 'define').split(','):
if define:
define_macros.append((define, '1'))
# build the extension
sources = [ os.path.join('psycopg', x) for x in sources]
depends = [ os.path.join('psycopg', x) for x in depends]
ext.append(Extension("psycopg2._psycopg", sources,
define_macros=define_macros,
include_dirs=include_dirs,
depends=depends,
undef_macros=[]))
# Compute the direct download url.
# Note that the current package installation programs are stupidly intelligent
# and will try to install a beta if they find a link in the homepage instead of
# using these pretty metadata. But that's their problem, not ours.
download_url = (
"http://initd.org/psycopg/tarballs/PSYCOPG-%s/psycopg2-%s.tar.gz"
% ('-'.join(PSYCOPG_VERSION.split('.')[:2]), PSYCOPG_VERSION))
try:
f = open("README.rst")
readme = f.read()
f.close()
except:
print("failed to read readme: ignoring...")
readme = __doc__
setup(name="psycopg2",
version=PSYCOPG_VERSION,
maintainer="Federico Di Gregorio",
maintainer_email="[email protected]",
author="Federico Di Gregorio",
author_email="[email protected]",
url="http://initd.org/psycopg/",
download_url=download_url,
license="LGPL with exceptions or ZPL",
platforms=["any"],
description=readme.split("\n")[0],
long_description="\n".join(readme.split("\n")[2:]).lstrip(),
classifiers=[x for x in classifiers.split("\n") if x],
data_files=data_files,
package_dir={'psycopg2': 'lib', 'psycopg2.tests': 'tests'},
packages=['psycopg2', 'psycopg2.tests'],
cmdclass={
'build_ext': psycopg_build_ext,
'build_py': build_py, },
ext_modules=ext)
| apache-2.0 |
drajen/resthome | app/smartthings.py | 1 | 5032 | #
# smartthings.py
#
# David Janes
# IOTDB.org
# 2014-01-31
#
# Demonstrate how to use the SmartThings API from Python.
#
# See also:
# Example App explanation:
# http://build.smartthings.com/blog/tutorial-creating-a-custom-rest-smartapp-endpoint/
#
# Example PHP code:
# https://www.dropbox.com/s/7m7gmlr9q3u7rmk/exampleOauth.php
#
# Example "Groovy"/SMART code (this is the app we tap into)
# https://www.dropbox.com/s/lohzziy2wjlrppb/endpointExample.groovy
#
import sys
import requests
import pprint
import json
## import httplib
## httplib.HTTPConnection.debuglevel = 1
from optparse import OptionParser
try:
import iotdb_log
except:
class iotdb_log(object):
@staticmethod
def log(**ad):
pprint.pprint(ad)
class SmartThings(object):
def __init__(self, verbose=True):
self.verbose = verbose
self.std = {}
self.endpointd = {}
self.deviceds = {}
def load_settings(self, filename="smartthings.json"):
"""Load the JSON Settings file.
See the documentation, but briefly you can
get it from here:
https://iotdb.org/playground/oauthorize
"""
with open(filename) as fin:
self.std = json.load(fin)
def request_endpoints(self):
"""Get the endpoints exposed by the SmartThings App
The first command you need to call
"""
endpoints_url = self.std["api"]
endpoints_paramd = {
"access_token": self.std["access_token"]
}
endpoints_response = requests.get(url=endpoints_url, params=endpoints_paramd)
self.endpointd = endpoints_response.json()[0]
if self.verbose: iotdb_log.log(
"endpoints",
endpoints_url=endpoints_url,
endpoints_paramd=endpoints_paramd,
resultds=self.endpointd,
)
def request_devices(self, device_type):
"""List the devices"""
devices_url = "https://graph.api.smartthings.com%s/%s" % ( self.endpointd["url"], device_type, )
devices_paramd = {
}
devices_headerd = {
"Authorization": "Bearer %s" % self.std["access_token"],
}
devices_response = requests.get(url=devices_url, params=devices_paramd, headers=devices_headerd)
self.deviceds = devices_response.json()
for switchd in self.deviceds:
switchd['url'] = "%s/%s" % ( devices_url, switchd['id'], )
if self.verbose: iotdb_log.log(
"devices",
url=devices_url,
paramd=devices_paramd,
deviceds=self.deviceds,
)
return self.deviceds
def device_request(self, deviced, requestd):
"""Send a request the named device"""
command_url = deviced['url']
command_paramd = {
"access_token": self.std["access_token"]
}
command_headerd = {}
command_response = requests.put(
url=command_url,
params=command_paramd,
headers=command_headerd,
data=json.dumps(requestd)
)
def device_types(self):
return dtypes
dtypes = [
"switch", "motion", "presence", "acceleration", "contact",
"temperature", "battery", "acceleration", "threeAxis", "humidity"
]
if __name__ == '__main__':
parser = OptionParser()
parser.add_option(
"", "--debug",
default = False,
action = "store_true",
dest = "debug",
help = "",
)
parser.add_option(
"", "--verbose",
default = False,
action = "store_true",
dest = "verbose",
help = "",
)
parser.add_option(
"", "--type",
dest = "device_type",
help = "The device type (required), one of %s" % ", ".join(dtypes)
)
parser.add_option(
"", "--id",
dest = "device_id",
help = "The ID or Name of the device to manipulate"
)
parser.add_option(
"", "--request",
dest = "request",
help = "Something to do, e.g. 'switch=1', 'switch=0'"
)
(options, args) = parser.parse_args()
if not options.device_type:
print >> sys.stderr, "%s: --type <%s>" % ( sys.argv[0], "|".join(dtypes))
parser.print_help(sys.stderr)
sys.exit(1)
st = SmartThings(verbose=options.verbose)
st.load_settings()
st.request_endpoints()
ds = st.request_devices(options.device_type)
if options.device_id:
ds = filter(lambda d: options.device_id in [ d.get("id"), d.get("label"), ], ds)
if options.request:
key, value = options.request.split('=', 2)
try:
value = int(value)
except ValueError:
pass
requestd = {
key: value
}
for d in ds:
iotdb_log.log(device=d, request=requestd)
st.device_request(d, requestd)
else:
print json.dumps(ds, indent=2, sort_keys=True)
| bsd-3-clause |
overtherain/scriptfile | software/googleAppEngine/google/appengine/ext/ndb/tasklets.py | 4 | 36759 | """A tasklet decorator.
Tasklets are a way to write concurrently running functions without
threads; tasklets are executed by an event loop and can suspend
themselves blocking for I/O or some other operation using a yield
statement. The notion of a blocking operation is abstracted into the
Future class, but a tasklet may also yield an RPC in order to wait for
that RPC to complete.
The @tasklet decorator wraps generator function so that when it is
called, a Future is returned while the generator is executed by the
event loop. Within the tasklet, any yield of a Future waits for and
returns the Future's result. For example:
@tasklet
def foo():
a = yield <some Future>
b = yield <another Future>
raise Return(a + b)
def main():
f = foo()
x = f.get_result()
print x
Note that blocking until the Future's result is available using
get_result() is somewhat inefficient (though not vastly -- it is not
busy-waiting). In most cases such code should be rewritten as a tasklet
instead:
@tasklet
def main_tasklet():
f = foo()
x = yield f
print x
Calling a tasklet automatically schedules it with the event loop:
def main():
f = main_tasklet()
eventloop.run() # Run until no tasklets left to do
f.done() # Returns True
As a special feature, if the wrapped function is not a generator
function, its return value is returned via the Future. This makes the
following two equivalent:
@tasklet
def foo():
return 42
@tasklet
def foo():
if False: yield # The presence of 'yield' makes foo a generator
raise Return(42) # Or, after PEP 380, return 42
This feature (inspired by Monocle) is handy in case you are
implementing an interface that expects tasklets but you have no need to
suspend -- there's no need to insert a dummy yield in order to make
the tasklet into a generator.
"""
import collections
import logging
import os
import sys
import types
from .google_imports import apiproxy_stub_map
from .google_imports import apiproxy_rpc
from .google_imports import datastore
from .google_imports import datastore_errors
from .google_imports import datastore_rpc
from .google_imports import namespace_manager
from . import eventloop
from . import utils
__all__ = ['Return', 'tasklet', 'synctasklet', 'toplevel', 'sleep',
'add_flow_exception', 'get_return_value',
'get_context', 'set_context',
'make_default_context', 'make_context',
'Future', 'MultiFuture', 'QueueFuture', 'SerialQueueFuture',
'ReducingFuture',
]
_logging_debug = utils.logging_debug
def _is_generator(obj):
"""Helper to test for a generator object.
NOTE: This tests for the (iterable) object returned by calling a
generator function, not for a generator function.
"""
return isinstance(obj, types.GeneratorType)
class _State(utils.threading_local):
"""Hold thread-local state."""
current_context = None
def __init__(self):
super(_State, self).__init__()
self.all_pending = set()
def add_pending(self, fut):
_logging_debug('all_pending: add %s', fut)
self.all_pending.add(fut)
def remove_pending(self, fut, status='success'):
if fut in self.all_pending:
_logging_debug('all_pending: %s: remove %s', status, fut)
self.all_pending.remove(fut)
else:
_logging_debug('all_pending: %s: not found %s', status, fut)
def clear_all_pending(self):
if self.all_pending:
logging.info('all_pending: clear %s', self.all_pending)
self.all_pending.clear()
else:
_logging_debug('all_pending: clear no-op')
def dump_all_pending(self, verbose=False):
pending = []
for fut in self.all_pending:
if verbose:
line = fut.dump() + ('\n' + '-'*40)
else:
line = fut.dump_stack()
pending.append(line)
return '\n'.join(pending)
_state = _State()
# Tuple of exceptions that should not be logged (except in debug mode).
_flow_exceptions = ()
def add_flow_exception(exc):
"""Add an exception that should not be logged.
The argument must be a subclass of Exception.
"""
global _flow_exceptions
if not isinstance(exc, type) or not issubclass(exc, Exception):
raise TypeError('Expected an Exception subclass, got %r' % (exc,))
as_set = set(_flow_exceptions)
as_set.add(exc)
_flow_exceptions = tuple(as_set)
def _init_flow_exceptions():
"""Internal helper to initialize _flow_exceptions.
This automatically adds webob.exc.HTTPException, if it can be imported.
"""
global _flow_exceptions
_flow_exceptions = ()
add_flow_exception(datastore_errors.Rollback)
try:
from webob import exc
except ImportError:
pass
else:
add_flow_exception(exc.HTTPException)
_init_flow_exceptions()
class Future(object):
"""A Future has 0 or more callbacks.
The callbacks will be called when the result is ready.
NOTE: This is somewhat inspired but not conformant to the Future interface
defined by PEP 3148. It is also inspired (and tries to be somewhat
compatible with) the App Engine specific UserRPC and MultiRpc classes.
"""
# TODO: Trim the API; there are too many ways to do the same thing.
# TODO: Compare to Monocle's much simpler Callback class.
# Constants for state property.
IDLE = apiproxy_rpc.RPC.IDLE # Not yet running (unused)
RUNNING = apiproxy_rpc.RPC.RUNNING # Not yet completed.
FINISHING = apiproxy_rpc.RPC.FINISHING # Completed.
# XXX Add docstrings to all methods. Separate PEP 3148 API from RPC API.
_geninfo = None # Extra info about suspended generator.
def __init__(self, info=None):
# TODO: Make done a method, to match PEP 3148?
__ndb_debug__ = 'SKIP' # Hide this frame from self._where
self._info = info # Info from the caller about this Future's purpose.
self._where = utils.get_stack()
self._context = None
self._reset()
def _reset(self):
self._done = False
self._result = None
self._exception = None
self._traceback = None
self._callbacks = []
self._immediate_callbacks = []
_state.add_pending(self)
self._next = None # Links suspended Futures together in a stack.
# TODO: Add a __del__ that complains if neither get_exception() nor
# check_success() was ever called? What if it's not even done?
def __repr__(self):
if self._done:
if self._exception is not None:
state = 'exception %s: %s' % (self._exception.__class__.__name__,
self._exception)
else:
state = 'result %r' % (self._result,)
else:
state = 'pending'
line = '?'
for line in self._where:
if 'tasklets.py' not in line:
break
if self._info:
line += ' for %s' % self._info
if self._geninfo:
line += ' %s' % self._geninfo
return '<%s %x created by %s; %s>' % (
self.__class__.__name__, id(self), line, state)
def dump(self):
return '%s\nCreated by %s' % (self.dump_stack(),
'\n called by '.join(self._where))
def dump_stack(self):
lines = []
fut = self
while fut is not None:
lines.append(str(fut))
fut = fut._next
return '\n waiting for '.join(lines)
def add_callback(self, callback, *args, **kwds):
if self._done:
eventloop.queue_call(None, callback, *args, **kwds)
else:
self._callbacks.append((callback, args, kwds))
def add_immediate_callback(self, callback, *args, **kwds):
if self._done:
callback(*args, **kwds)
else:
self._immediate_callbacks.append((callback, args, kwds))
def set_result(self, result):
if self._done:
raise RuntimeError('Result cannot be set twice.')
self._result = result
self._done = True
_state.remove_pending(self)
for callback, args, kwds in self._immediate_callbacks:
callback(*args, **kwds)
for callback, args, kwds in self._callbacks:
eventloop.queue_call(None, callback, *args, **kwds)
def set_exception(self, exc, tb=None):
if not isinstance(exc, BaseException):
raise TypeError('exc must be an Exception; received %r' % exc)
if self._done:
raise RuntimeError('Exception cannot be set twice.')
self._exception = exc
self._traceback = tb
self._done = True
_state.remove_pending(self, status='fail')
for callback, args, kwds in self._immediate_callbacks:
callback(*args, **kwds)
for callback, args, kwds in self._callbacks:
eventloop.queue_call(None, callback, *args, **kwds)
def done(self):
return self._done
@property
def state(self):
# This is just for compatibility with UserRPC and MultiRpc.
# A Future is considered running as soon as it is created.
if self._done:
return self.FINISHING
else:
return self.RUNNING
def wait(self):
if self._done:
return
ev = eventloop.get_event_loop()
while not self._done:
if not ev.run1():
logging.info('Deadlock in %s', self)
logging.info('All pending Futures:\n%s', _state.dump_all_pending())
_logging_debug('All pending Futures (verbose):\n%s',
_state.dump_all_pending(verbose=True))
self.set_exception(RuntimeError('Deadlock waiting for %s' % self))
def get_exception(self):
self.wait()
return self._exception
def get_traceback(self):
self.wait()
return self._traceback
def check_success(self):
self.wait()
if self._exception is not None:
raise self._exception.__class__, self._exception, self._traceback
def get_result(self):
self.check_success()
return self._result
# TODO: Have a tasklet that does this
@classmethod
def wait_any(cls, futures):
# TODO: Flatten MultiRpcs.
waiting_on = set(futures)
ev = eventloop.get_event_loop()
while waiting_on:
for f in waiting_on:
if f.state == cls.FINISHING:
return f
ev.run1()
return None
# TODO: Have a tasklet that does this
@classmethod
def wait_all(cls, futures):
# TODO: Flatten MultiRpcs.
waiting_on = set(futures)
ev = eventloop.get_event_loop()
while waiting_on:
waiting_on = set(f for f in waiting_on if f.state == cls.RUNNING)
ev.run1()
def _help_tasklet_along(self, ns, ds_conn, gen, val=None, exc=None, tb=None):
# XXX Docstring
info = utils.gen_info(gen)
__ndb_debug__ = info
try:
save_context = get_context()
save_namespace = namespace_manager.get_namespace()
save_ds_connection = datastore._GetConnection()
try:
set_context(self._context)
if ns != save_namespace:
namespace_manager.set_namespace(ns)
if ds_conn is not save_ds_connection:
datastore._SetConnection(ds_conn)
if exc is not None:
_logging_debug('Throwing %s(%s) into %s',
exc.__class__.__name__, exc, info)
value = gen.throw(exc.__class__, exc, tb)
else:
_logging_debug('Sending %r to %s', val, info)
value = gen.send(val)
self._context = get_context()
finally:
ns = namespace_manager.get_namespace()
ds_conn = datastore._GetConnection()
set_context(save_context)
if save_namespace != ns:
namespace_manager.set_namespace(save_namespace)
if save_ds_connection is not ds_conn:
datastore._SetConnection(save_ds_connection)
except StopIteration, err:
result = get_return_value(err)
_logging_debug('%s returned %r', info, result)
self.set_result(result)
return
except GeneratorExit:
# In Python 2.5, this derives from Exception, but we don't want
# to handle it like other Exception instances. So we catch and
# re-raise it immediately. See issue 127. http://goo.gl/2p5Pn
# TODO: Remove when Python 2.5 is no longer supported.
raise
except Exception, err:
_, _, tb = sys.exc_info()
if isinstance(err, _flow_exceptions):
# Flow exceptions aren't logged except in "heavy debug" mode,
# and then only at DEBUG level, without a traceback.
_logging_debug('%s raised %s(%s)',
info, err.__class__.__name__, err)
elif utils.DEBUG and logging.getLogger().level < logging.DEBUG:
# In "heavy debug" mode, log a warning with traceback.
# (This is the same condition as used in utils.logging_debug().)
logging.warning('%s raised %s(%s)',
info, err.__class__.__name__, err, exc_info=True)
else:
# Otherwise, log a warning without a traceback.
logging.warning('%s raised %s(%s)', info, err.__class__.__name__, err)
self.set_exception(err, tb)
return
else:
_logging_debug('%s yielded %r', info, value)
if isinstance(value, (apiproxy_stub_map.UserRPC,
datastore_rpc.MultiRpc)):
# TODO: Tail recursion if the RPC is already complete.
eventloop.queue_rpc(value, self._on_rpc_completion,
value, ns, ds_conn, gen)
return
if isinstance(value, Future):
# TODO: Tail recursion if the Future is already done.
if self._next:
raise RuntimeError('Future has already completed yet next is %r' %
self._next)
self._next = value
self._geninfo = utils.gen_info(gen)
_logging_debug('%s is now blocked waiting for %s', self, value)
value.add_callback(self._on_future_completion, value, ns, ds_conn, gen)
return
if isinstance(value, (tuple, list)):
# Arrange for yield to return a list of results (not Futures).
info = 'multi-yield from %s' % utils.gen_info(gen)
mfut = MultiFuture(info)
try:
for subfuture in value:
mfut.add_dependent(subfuture)
mfut.complete()
except GeneratorExit:
raise
except Exception, err:
_, _, tb = sys.exc_info()
mfut.set_exception(err, tb)
mfut.add_callback(self._on_future_completion, mfut, ns, ds_conn, gen)
return
if _is_generator(value):
# TODO: emulate PEP 380 here?
raise NotImplementedError('Cannot defer to another generator.')
raise RuntimeError('A tasklet should not yield a plain value: '
'%.200s yielded %.200r' % (info, value))
def _on_rpc_completion(self, rpc, ns, ds_conn, gen):
try:
result = rpc.get_result()
except GeneratorExit:
raise
except Exception, err:
_, _, tb = sys.exc_info()
self._help_tasklet_along(ns, ds_conn, gen, exc=err, tb=tb)
else:
self._help_tasklet_along(ns, ds_conn, gen, result)
def _on_future_completion(self, future, ns, ds_conn, gen):
if self._next is future:
self._next = None
self._geninfo = None
_logging_debug('%s is no longer blocked waiting for %s', self, future)
exc = future.get_exception()
if exc is not None:
self._help_tasklet_along(ns, ds_conn, gen,
exc=exc, tb=future.get_traceback())
else:
val = future.get_result() # This won't raise an exception.
self._help_tasklet_along(ns, ds_conn, gen, val)
def sleep(dt):
"""Public function to sleep some time.
Example:
yield tasklets.sleep(0.5) # Sleep for half a sec.
"""
fut = Future('sleep(%.3f)' % dt)
eventloop.queue_call(dt, fut.set_result, None)
return fut
class MultiFuture(Future):
"""A Future that depends on multiple other Futures.
This is used internally by 'v1, v2, ... = yield f1, f2, ...'; the
semantics (e.g. error handling) are constrained by that use case.
The protocol from the caller's POV is:
mf = MultiFuture()
mf.add_dependent(<some other Future>) -OR- mf.putq(<some value>)
mf.add_dependent(<some other Future>) -OR- mf.putq(<some value>)
.
. (More mf.add_dependent() and/or mf.putq() calls)
.
mf.complete() # No more dependents will be added.
.
. (Time passes)
.
results = mf.get_result()
Now, results is a list of results from all dependent Futures in
the order in which they were added.
It is legal to add the same dependent multiple times.
Callbacks can be added at any point.
From a dependent Future POV, there's nothing to be done: a callback
is automatically added to each dependent Future which will signal
its completion to the MultiFuture.
Error handling: if any dependent future raises an error, it is
propagated to mf. To force an early error, you can call
mf.set_exception() instead of mf.complete(). After this you can't
call mf.add_dependent() or mf.putq() any more.
"""
def __init__(self, info=None):
__ndb_debug__ = 'SKIP' # Hide this frame from self._where
self._full = False
self._dependents = set()
self._results = []
super(MultiFuture, self).__init__(info=info)
def __repr__(self):
# TODO: This may be invoked before __init__() returns,
# from Future.__init__(). Beware.
line = super(MultiFuture, self).__repr__()
lines = [line]
for fut in self._results:
lines.append(fut.dump_stack().replace('\n', '\n '))
return '\n waiting for '.join(lines)
# TODO: Maybe rename this method, since completion of a Future/RPC
# already means something else. But to what?
def complete(self):
if self._full:
raise RuntimeError('MultiFuture cannot complete twice.')
self._full = True
if not self._dependents:
self._finish()
# TODO: Maybe don't overload set_exception() with this?
def set_exception(self, exc, tb=None):
self._full = True
super(MultiFuture, self).set_exception(exc, tb)
def _finish(self):
if not self._full:
raise RuntimeError('MultiFuture cannot finish until completed.')
if self._dependents:
raise RuntimeError('MultiFuture cannot finish whilst waiting for '
'dependents %r' % self._dependents)
if self._done:
raise RuntimeError('MultiFuture done before finishing.')
try:
result = [r.get_result() for r in self._results]
except GeneratorExit:
raise
except Exception, err:
_, _, tb = sys.exc_info()
self.set_exception(err, tb)
else:
self.set_result(result)
def putq(self, value):
if isinstance(value, Future):
fut = value
else:
fut = Future()
fut.set_result(value)
self.add_dependent(fut)
def add_dependent(self, fut):
if isinstance(fut, list):
mfut = MultiFuture()
map(mfut.add_dependent, fut)
mfut.complete()
fut = mfut
elif not isinstance(fut, Future):
raise TypeError('Expected Future, received %s: %r' % (type(fut), fut))
if self._full:
raise RuntimeError('MultiFuture cannot add a dependent once complete.')
self._results.append(fut)
if fut not in self._dependents:
self._dependents.add(fut)
fut.add_callback(self._signal_dependent_done, fut)
def _signal_dependent_done(self, fut):
self._dependents.remove(fut)
if self._full and not self._dependents and not self._done:
self._finish()
class QueueFuture(Future):
"""A Queue following the same protocol as MultiFuture.
However, instead of returning results as a list, it lets you
retrieve results as soon as they are ready, one at a time, using
getq(). The Future itself finishes with a result of None when the
last result is ready (regardless of whether it was retrieved).
The getq() method returns a Future which blocks until the next
result is ready, and then returns that result. Each getq() call
retrieves one unique result. Extra getq() calls after the last
result is already returned return EOFError as their Future's
exception. (I.e., q.getq() returns a Future as always, but yieding
that Future raises EOFError.)
NOTE: Values can also be pushed directly via .putq(value). However
there is no flow control -- if the producer is faster than the
consumer, the queue will grow unbounded.
"""
# TODO: Refactor to share code with MultiFuture.
def __init__(self, info=None):
self._full = False
self._dependents = set()
self._completed = collections.deque()
self._waiting = collections.deque()
# Invariant: at least one of _completed and _waiting is empty.
# Also: _full and not _dependents <==> _done.
super(QueueFuture, self).__init__(info=info)
# TODO: __repr__
def complete(self):
if self._full:
raise RuntimeError('MultiFuture cannot complete twice.')
self._full = True
if not self._dependents:
self.set_result(None)
self._mark_finished()
def set_exception(self, exc, tb=None):
self._full = True
super(QueueFuture, self).set_exception(exc, tb)
if not self._dependents:
self._mark_finished()
def putq(self, value):
if isinstance(value, Future):
fut = value
else:
fut = Future()
fut.set_result(value)
self.add_dependent(fut)
def add_dependent(self, fut):
if not isinstance(fut, Future):
raise TypeError('fut must be a Future instance; received %r' % fut)
if self._full:
raise RuntimeError('QueueFuture add dependent once complete.')
if fut not in self._dependents:
self._dependents.add(fut)
fut.add_callback(self._signal_dependent_done, fut)
def _signal_dependent_done(self, fut):
if not fut.done():
raise RuntimeError('Future not done before signalling dependant done.')
self._dependents.remove(fut)
exc = fut.get_exception()
tb = fut.get_traceback()
val = None
if exc is None:
val = fut.get_result()
if self._waiting:
waiter = self._waiting.popleft()
self._pass_result(waiter, exc, tb, val)
else:
self._completed.append((exc, tb, val))
if self._full and not self._dependents and not self._done:
self.set_result(None)
self._mark_finished()
def _mark_finished(self):
if not self.done():
raise RuntimeError('Future not done before marking as finished.')
while self._waiting:
waiter = self._waiting.popleft()
self._pass_eof(waiter)
def getq(self):
fut = Future()
if self._completed:
exc, tb, val = self._completed.popleft()
self._pass_result(fut, exc, tb, val)
elif self._full and not self._dependents:
self._pass_eof(fut)
else:
self._waiting.append(fut)
return fut
def _pass_eof(self, fut):
if not self._done:
raise RuntimeError('QueueFuture cannot pass EOF until done.')
exc = self.get_exception()
if exc is not None:
tb = self.get_traceback()
else:
exc = EOFError('Queue is empty')
tb = None
self._pass_result(fut, exc, tb, None)
def _pass_result(self, fut, exc, tb, val):
if exc is not None:
fut.set_exception(exc, tb)
else:
fut.set_result(val)
class SerialQueueFuture(Future):
"""Like QueueFuture but maintains the order of insertion.
This class is used by Query operations.
Invariants:
- At least one of _queue and _waiting is empty.
- The Futures in _waiting are always pending.
(The Futures in _queue may be pending or completed.)
In the discussion below, add_dependent() is treated the same way as
putq().
If putq() is ahead of getq(), the situation is like this:
putq()
v
_queue: [f1, f2, ...]; _waiting: []
^
getq()
Here, putq() appends a Future to the right of _queue, and getq()
removes one from the left.
If getq() is ahead of putq(), it's like this:
putq()
v
_queue: []; _waiting: [f1, f2, ...]
^
getq()
Here, putq() removes a Future from the left of _waiting, and getq()
appends one to the right.
When both are empty, putq() appends a Future to the right of _queue,
while getq() appends one to the right of _waiting.
The _full flag means that no more calls to putq() will be made; it
is set by calling either complete() or set_exception().
Calling complete() signals that no more putq() calls will be made.
If getq() is behind, subsequent getq() calls will eat up _queue
until it is empty, and after that will return a Future that passes
EOFError (note that getq() itself never raises EOFError). If getq()
is ahead when complete() is called, the Futures in _waiting are all
passed an EOFError exception (thereby eating up _waiting).
If, instead of complete(), set_exception() is called, the exception
and traceback set there will be used instead of EOFError.
"""
def __init__(self, info=None):
self._full = False
self._queue = collections.deque()
self._waiting = collections.deque()
super(SerialQueueFuture, self).__init__(info=info)
# TODO: __repr__
def complete(self):
if self._full:
raise RuntimeError('SerialQueueFuture cannot complete twice.')
self._full = True
while self._waiting:
waiter = self._waiting.popleft()
waiter.set_exception(EOFError('Queue is empty'))
if not self._queue:
self.set_result(None)
def set_exception(self, exc, tb=None):
self._full = True
super(SerialQueueFuture, self).set_exception(exc, tb)
while self._waiting:
waiter = self._waiting.popleft()
waiter.set_exception(exc, tb)
def putq(self, value):
if isinstance(value, Future):
fut = value
else:
if self._waiting:
waiter = self._waiting.popleft()
waiter.set_result(value)
return
fut = Future()
fut.set_result(value)
self.add_dependent(fut)
def add_dependent(self, fut):
if not isinstance(fut, Future):
raise TypeError('fut must be a Future instance; received %r' % fut)
if self._full:
raise RuntimeError('SerialQueueFuture cannot add dependent '
'once complete.')
if self._waiting:
waiter = self._waiting.popleft()
fut.add_callback(_transfer_result, fut, waiter)
else:
self._queue.append(fut)
def getq(self):
if self._queue:
fut = self._queue.popleft()
# TODO: Isn't it better to call self.set_result(None) in complete()?
if not self._queue and self._full and not self._done:
self.set_result(None)
else:
fut = Future()
if self._full:
if not self._done:
raise RuntimeError('self._queue should be non-empty.')
err = self.get_exception()
if err is not None:
tb = self.get_traceback()
else:
err = EOFError('Queue is empty')
tb = None
fut.set_exception(err, tb)
else:
self._waiting.append(fut)
return fut
def _transfer_result(fut1, fut2):
"""Helper to transfer result or errors from one Future to another."""
exc = fut1.get_exception()
if exc is not None:
tb = fut1.get_traceback()
fut2.set_exception(exc, tb)
else:
val = fut1.get_result()
fut2.set_result(val)
class ReducingFuture(Future):
"""A Queue following the same protocol as MultiFuture.
However the result, instead of being a list of results of dependent
Futures, is computed by calling a 'reducer' tasklet. The reducer tasklet
takes a list of values and returns a single value. It may be called
multiple times on sublists of values and should behave like
e.g. sum().
NOTE: The reducer input values may be reordered compared to the
order in which they were added to the queue.
"""
# TODO: Refactor to reuse some code with MultiFuture.
def __init__(self, reducer, info=None, batch_size=20):
self._reducer = reducer
self._batch_size = batch_size
self._full = False
self._dependents = set()
self._completed = collections.deque()
self._queue = collections.deque()
super(ReducingFuture, self).__init__(info=info)
# TODO: __repr__
def complete(self):
if self._full:
raise RuntimeError('ReducingFuture cannot complete twice.')
self._full = True
if not self._dependents:
self._mark_finished()
def set_exception(self, exc, tb=None):
self._full = True
self._queue.clear()
super(ReducingFuture, self).set_exception(exc, tb)
def putq(self, value):
if isinstance(value, Future):
fut = value
else:
fut = Future()
fut.set_result(value)
self.add_dependent(fut)
def add_dependent(self, fut):
if self._full:
raise RuntimeError('ReducingFuture cannot add dependent once complete.')
self._internal_add_dependent(fut)
def _internal_add_dependent(self, fut):
if not isinstance(fut, Future):
raise TypeError('fut must be a Future; received %r' % fut)
if fut not in self._dependents:
self._dependents.add(fut)
fut.add_callback(self._signal_dependent_done, fut)
def _signal_dependent_done(self, fut):
if not fut.done():
raise RuntimeError('Future not done before signalling dependant done.')
self._dependents.remove(fut)
if self._done:
return # Already done.
try:
val = fut.get_result()
except GeneratorExit:
raise
except Exception, err:
_, _, tb = sys.exc_info()
self.set_exception(err, tb)
return
self._queue.append(val)
if len(self._queue) >= self._batch_size:
todo = list(self._queue)
self._queue.clear()
try:
nval = self._reducer(todo)
except GeneratorExit:
raise
except Exception, err:
_, _, tb = sys.exc_info()
self.set_exception(err, tb)
return
if isinstance(nval, Future):
self._internal_add_dependent(nval)
else:
self._queue.append(nval)
if self._full and not self._dependents:
self._mark_finished()
def _mark_finished(self):
if not self._queue:
self.set_result(None)
elif len(self._queue) == 1:
self.set_result(self._queue.pop())
else:
todo = list(self._queue)
self._queue.clear()
try:
nval = self._reducer(todo)
except GeneratorExit:
raise
except Exception, err:
_, _, tb = sys.exc_info()
self.set_exception(err, tb)
return
if isinstance(nval, Future):
self._internal_add_dependent(nval)
else:
self.set_result(nval)
# Alias for StopIteration used to mark return values.
# To use this, raise Return(<your return value>). The semantics
# are exactly the same as raise StopIteration(<your return value>)
# but using Return clarifies that you are intending this to be the
# return value of a tasklet.
# TODO: According to Monocle authors Steve and Greg Hazel, Twisted
# used an exception to signal a return value from a generator early
# on, and they found out it was error-prone. Should I worry?
Return = StopIteration
def get_return_value(err):
# XXX Docstring
if not err.args:
result = None
elif len(err.args) == 1:
result = err.args[0]
else:
result = err.args
return result
def tasklet(func):
# XXX Docstring
@utils.wrapping(func)
def tasklet_wrapper(*args, **kwds):
# XXX Docstring
# TODO: make most of this a public function so you can take a bare
# generator and turn it into a tasklet dynamically. (Monocle has
# this I believe.)
# __ndb_debug__ = utils.func_info(func)
fut = Future('tasklet %s' % utils.func_info(func))
fut._context = get_context()
try:
result = func(*args, **kwds)
except StopIteration, err:
# Just in case the function is not a generator but still uses
# the "raise Return(...)" idiom, we'll extract the return value.
result = get_return_value(err)
if _is_generator(result):
ns = namespace_manager.get_namespace()
ds_conn = datastore._GetConnection()
eventloop.queue_call(None, fut._help_tasklet_along, ns, ds_conn, result)
else:
fut.set_result(result)
return fut
return tasklet_wrapper
def synctasklet(func):
"""Decorator to run a function as a tasklet when called.
Use this to wrap a request handler function that will be called by
some web application framework (e.g. a Django view function or a
webapp.RequestHandler.get method).
"""
@utils.wrapping(func)
def synctasklet_wrapper(*args, **kwds):
__ndb_debug__ = utils.func_info(func)
taskletfunc = tasklet(func)
return taskletfunc(*args, **kwds).get_result()
return synctasklet_wrapper
def toplevel(func):
"""A sync tasklet that sets a fresh default Context.
Use this for toplevel view functions such as
webapp.RequestHandler.get() or Django view functions.
"""
@utils.wrapping(func)
def add_context_wrapper(*args, **kwds):
__ndb_debug__ = utils.func_info(func)
_state.clear_all_pending()
# Create and install a new context.
ctx = make_default_context()
try:
set_context(ctx)
return synctasklet(func)(*args, **kwds)
finally:
set_context(None)
ctx.flush().check_success()
eventloop.run() # Ensure writes are flushed, etc.
return add_context_wrapper
_CONTEXT_KEY = '__CONTEXT__'
def get_context():
# XXX Docstring
ctx = None
if os.getenv(_CONTEXT_KEY):
ctx = _state.current_context
if ctx is None:
ctx = make_default_context()
set_context(ctx)
return ctx
def make_default_context():
# XXX Docstring
return make_context()
@utils.positional(0)
def make_context(conn=None, config=None):
# XXX Docstring
from . import context # Late import to deal with circular imports.
return context.Context(conn=conn, config=config)
def set_context(new_context):
# XXX Docstring
os.environ[_CONTEXT_KEY] = '1'
_state.current_context = new_context
# TODO: Rework the following into documentation.
# A tasklet/coroutine/generator can yield the following things:
# - Another tasklet/coroutine/generator; this is entirely equivalent to
# "for x in g: yield x"; this is handled entirely by the @tasklet wrapper.
# (Actually, not. @tasklet returns a function that when called returns
# a Future. You can use the pep380 module's @gwrap decorator to support
# yielding bare generators though.)
# - An RPC (or MultiRpc); the tasklet will be resumed when this completes.
# This does not use the RPC's callback mechanism.
# - A Future; the tasklet will be resumed when the Future is done.
# This uses the Future's callback mechanism.
# A Future can be used in several ways:
# - Yield it from a tasklet; see above.
# - Check (poll) its status via f.done.
# - Call its wait() method, perhaps indirectly via check_success()
# or get_result(). This invokes the event loop.
# - Call the Future.wait_any() or Future.wait_all() method.
# This is waits for any or all Futures and RPCs in the argument list.
# XXX HIRO XXX
# - A tasklet is a (generator) function decorated with @tasklet.
# - Calling a tasklet schedules the function for execution and returns a Future.
# - A function implementing a tasklet may:
# = yield a Future; this waits for the Future which returns f.get_result();
# = yield an RPC; this waits for the RPC and then returns rpc.get_result();
# = raise Return(result); this sets the outer Future's result;
# = raise StopIteration or return; this sets the outer Future's result;
# = raise another exception: this sets the outer Future's exception.
# - If a function implementing a tasklet is not a generator it will be
# immediately executed to completion and the tasklet wrapper will
# return a Future that is already done. (XXX Alternative behavior:
# it schedules the call to be run by the event loop.)
# - Code not running in a tasklet can call f.get_result() or f.wait() on
# a future. This is implemented by a simple loop like the following:
# while not self._done:
# eventloop.run1()
# - Here eventloop.run1() runs one "atomic" part of the event loop:
# = either it calls one immediately ready callback;
# = or it waits for the first RPC to complete;
# = or it sleeps until the first callback should be ready;
# = or it raises an exception indicating all queues are empty.
# - It is possible but suboptimal to call rpc.get_result() or
# rpc.wait() directly on an RPC object since this will not allow
# other callbacks to run as they become ready. Wrapping an RPC in a
# Future will take care of this issue.
# - The important insight is that when a generator function
# implementing a tasklet yields, raises or returns, there is always a
# wrapper that catches this event and either turns it into a
# callback sent to the event loop, or sets the result or exception
# for the tasklet's Future.
| mit |
dataxu/ansible-modules-core | cloud/google/gce_lb.py | 20 | 12539 | #!/usr/bin/python
# Copyright 2013 Google Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: gce_lb
version_added: "1.5"
short_description: create/destroy GCE load-balancer resources
description:
- This module can create and destroy Google Compute Engine C(loadbalancer)
and C(httphealthcheck) resources. The primary LB resource is the
C(load_balancer) resource and the health check parameters are all
prefixed with I(httphealthcheck).
The full documentation for Google Compute Engine load balancing is at
U(https://developers.google.com/compute/docs/load-balancing/). However,
the ansible module simplifies the configuration by following the
libcloud model.
Full install/configuration instructions for the gce* modules can
be found in the comments of ansible/test/gce_tests.py.
options:
httphealthcheck_name:
description:
- the name identifier for the HTTP health check
required: false
default: null
httphealthcheck_port:
description:
- the TCP port to use for HTTP health checking
required: false
default: 80
httphealthcheck_path:
description:
- the url path to use for HTTP health checking
required: false
default: "/"
httphealthcheck_interval:
description:
- the duration in seconds between each health check request
required: false
default: 5
httphealthcheck_timeout:
description:
- the timeout in seconds before a request is considered a failed check
required: false
default: 5
httphealthcheck_unhealthy_count:
description:
- number of consecutive failed checks before marking a node unhealthy
required: false
default: 2
httphealthcheck_healthy_count:
description:
- number of consecutive successful checks before marking a node healthy
required: false
default: 2
httphealthcheck_host:
description:
- host header to pass through on HTTP check requests
required: false
default: null
name:
description:
- name of the load-balancer resource
required: false
default: null
protocol:
description:
- the protocol used for the load-balancer packet forwarding, tcp or udp
required: false
default: "tcp"
choices: ['tcp', 'udp']
region:
description:
- the GCE region where the load-balancer is defined
required: false
external_ip:
description:
- the external static IPv4 (or auto-assigned) address for the LB
required: false
default: null
port_range:
description:
- the port (range) to forward, e.g. 80 or 8000-8888 defaults to all ports
required: false
default: null
members:
description:
- a list of zone/nodename pairs, e.g ['us-central1-a/www-a', ...]
required: false
aliases: ['nodes']
state:
description:
- desired state of the LB
default: "present"
choices: ["active", "present", "absent", "deleted"]
aliases: []
service_account_email:
version_added: "1.6"
description:
- service account email
required: false
default: null
aliases: []
pem_file:
version_added: "1.6"
description:
- path to the pem file associated with the service account email
This option is deprecated. Use 'credentials_file'.
required: false
default: null
aliases: []
credentials_file:
version_added: "2.1.0"
description:
- path to the JSON file associated with the service account email
default: null
required: false
project_id:
version_added: "1.6"
description:
- your GCE project ID
required: false
default: null
aliases: []
requirements:
- "python >= 2.6"
- "apache-libcloud >= 0.13.3, >= 0.17.0 if using JSON credentials"
author: "Eric Johnson (@erjohnso) <[email protected]>"
'''
EXAMPLES = '''
# Simple example of creating a new LB, adding members, and a health check
- local_action:
module: gce_lb
name: testlb
region: us-central1
members: ["us-central1-a/www-a", "us-central1-b/www-b"]
httphealthcheck_name: hc
httphealthcheck_port: 80
httphealthcheck_path: "/up"
'''
try:
from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
from libcloud.loadbalancer.types import Provider as Provider_lb
from libcloud.loadbalancer.providers import get_driver as get_driver_lb
from libcloud.common.google import GoogleBaseError, QuotaExceededError, \
ResourceExistsError, ResourceNotFoundError
_ = Provider.GCE
HAS_LIBCLOUD = True
except ImportError:
HAS_LIBCLOUD = False
def main():
module = AnsibleModule(
argument_spec = dict(
httphealthcheck_name = dict(),
httphealthcheck_port = dict(default=80),
httphealthcheck_path = dict(default='/'),
httphealthcheck_interval = dict(default=5),
httphealthcheck_timeout = dict(default=5),
httphealthcheck_unhealthy_count = dict(default=2),
httphealthcheck_healthy_count = dict(default=2),
httphealthcheck_host = dict(),
name = dict(),
protocol = dict(default='tcp'),
region = dict(),
external_ip = dict(),
port_range = dict(),
members = dict(type='list'),
state = dict(default='present'),
service_account_email = dict(),
pem_file = dict(),
credentials_file = dict(),
project_id = dict(),
)
)
if not HAS_LIBCLOUD:
module.fail_json(msg='libcloud with GCE support (0.13.3+) required for this module.')
gce = gce_connect(module)
httphealthcheck_name = module.params.get('httphealthcheck_name')
httphealthcheck_port = module.params.get('httphealthcheck_port')
httphealthcheck_path = module.params.get('httphealthcheck_path')
httphealthcheck_interval = module.params.get('httphealthcheck_interval')
httphealthcheck_timeout = module.params.get('httphealthcheck_timeout')
httphealthcheck_unhealthy_count = \
module.params.get('httphealthcheck_unhealthy_count')
httphealthcheck_healthy_count = \
module.params.get('httphealthcheck_healthy_count')
httphealthcheck_host = module.params.get('httphealthcheck_host')
name = module.params.get('name')
protocol = module.params.get('protocol')
region = module.params.get('region')
external_ip = module.params.get('external_ip')
port_range = module.params.get('port_range')
members = module.params.get('members')
state = module.params.get('state')
try:
gcelb = get_driver_lb(Provider_lb.GCE)(gce_driver=gce)
gcelb.connection.user_agent_append("%s/%s" % (
USER_AGENT_PRODUCT, USER_AGENT_VERSION))
except Exception, e:
module.fail_json(msg=unexpected_error_msg(e), changed=False)
changed = False
json_output = {'name': name, 'state': state}
if not name and not httphealthcheck_name:
module.fail_json(msg='Nothing to do, please specify a "name" ' + \
'or "httphealthcheck_name" parameter', changed=False)
if state in ['active', 'present']:
# first, create the httphealthcheck if requested
hc = None
if httphealthcheck_name:
json_output['httphealthcheck_name'] = httphealthcheck_name
try:
hc = gcelb.ex_create_healthcheck(httphealthcheck_name,
host=httphealthcheck_host, path=httphealthcheck_path,
port=httphealthcheck_port,
interval=httphealthcheck_interval,
timeout=httphealthcheck_timeout,
unhealthy_threshold=httphealthcheck_unhealthy_count,
healthy_threshold=httphealthcheck_healthy_count)
changed = True
except ResourceExistsError:
hc = gce.ex_get_healthcheck(httphealthcheck_name)
except Exception, e:
module.fail_json(msg=unexpected_error_msg(e), changed=False)
if hc is not None:
json_output['httphealthcheck_host'] = hc.extra['host']
json_output['httphealthcheck_path'] = hc.path
json_output['httphealthcheck_port'] = hc.port
json_output['httphealthcheck_interval'] = hc.interval
json_output['httphealthcheck_timeout'] = hc.timeout
json_output['httphealthcheck_unhealthy_count'] = \
hc.unhealthy_threshold
json_output['httphealthcheck_healthy_count'] = \
hc.healthy_threshold
# create the forwarding rule (and target pool under the hood)
lb = None
if name:
if not region:
module.fail_json(msg='Missing required region name',
changed=False)
nodes = []
output_nodes = []
json_output['name'] = name
# members is a python list of 'zone/inst' strings
if members:
for node in members:
try:
zone, node_name = node.split('/')
nodes.append(gce.ex_get_node(node_name, zone))
output_nodes.append(node)
except:
# skip nodes that are badly formatted or don't exist
pass
try:
if hc is not None:
lb = gcelb.create_balancer(name, port_range, protocol,
None, nodes, ex_region=region, ex_healthchecks=[hc],
ex_address=external_ip)
else:
lb = gcelb.create_balancer(name, port_range, protocol,
None, nodes, ex_region=region, ex_address=external_ip)
changed = True
except ResourceExistsError:
lb = gcelb.get_balancer(name)
except Exception, e:
module.fail_json(msg=unexpected_error_msg(e), changed=False)
if lb is not None:
json_output['members'] = output_nodes
json_output['protocol'] = protocol
json_output['region'] = region
json_output['external_ip'] = lb.ip
json_output['port_range'] = lb.port
hc_names = []
if 'healthchecks' in lb.extra:
for hc in lb.extra['healthchecks']:
hc_names.append(hc.name)
json_output['httphealthchecks'] = hc_names
if state in ['absent', 'deleted']:
# first, delete the load balancer (forwarding rule and target pool)
# if specified.
if name:
json_output['name'] = name
try:
lb = gcelb.get_balancer(name)
gcelb.destroy_balancer(lb)
changed = True
except ResourceNotFoundError:
pass
except Exception, e:
module.fail_json(msg=unexpected_error_msg(e), changed=False)
# destroy the health check if specified
if httphealthcheck_name:
json_output['httphealthcheck_name'] = httphealthcheck_name
try:
hc = gce.ex_get_healthcheck(httphealthcheck_name)
gce.ex_destroy_healthcheck(hc)
changed = True
except ResourceNotFoundError:
pass
except Exception, e:
module.fail_json(msg=unexpected_error_msg(e), changed=False)
json_output['changed'] = changed
module.exit_json(**json_output)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.gce import *
if __name__ == '__main__':
main()
| gpl-3.0 |
USGSDenverPychron/pychron | pychron/processing/analyses/view/main_view.py | 1 | 16975 | # ===============================================================================
# Copyright 2013 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= enthought library imports =======================
from traits.api import HasTraits, Str, List, Event, Instance, Any, Property, cached_property
from traitsui.api import View, UItem, VGroup, HGroup
from uncertainties import std_dev, nominal_value, ufloat
from pychron.core.helpers.formatting import floatfmt, format_percent_error
from pychron.core.ui.tabular_editor import myTabularEditor
from pychron.processing.analyses.view.adapters import ComputedValueTabularAdapter, \
DetectorRatioTabularAdapter, ExtractionTabularAdapter, MeasurementTabularAdapter
from pychron.processing.analyses.view.values import ExtractionValue, ComputedValue, MeasurementValue, DetectorRatio
# class MainViewHandler(Handler):
# def show_isotope_evolution(self, uiinfo, obj):
# isos = obj.selected
# obj.show_iso_evo_needed = isos
class MainView(HasTraits):
name = 'Main'
analysis_id = Str
analysis_type = Str
isotopes = List
refresh_needed = Event
computed_values = List
corrected_values = List
extraction_values = List
measurement_values = List
_corrected_enabled = True
measurement_adapter = Instance(MeasurementTabularAdapter, ())
extraction_adapter = Instance(ExtractionTabularAdapter, ())
computed_adapter = Property(depends_on='analysis_type')
selected = Any
show_iso_evo_needed = Event
def __init__(self, analysis=None, *args, **kw):
super(MainView, self).__init__(*args, **kw)
if analysis:
self._load(analysis)
def load(self, an, refresh=False):
self._load(an)
if refresh:
self.refresh_needed = True
def _load(self, an):
# self.isotopes = an.isotopes
print an.isotope_keys
self.isotopes = [an.isotopes[k] for k in an.isotope_keys]
self.load_computed(an)
self.load_extraction(an)
self.load_measurement(an, an)
def _get_irradiation(self, an):
return an.irradiation_label
# def _get_j(self, an):
# return ufloat(an.j, an.j_err)
def load_measurement(self, an, ar):
# j = self._get_j(an)
j = ar.j
jf = 'NaN'
if j is not None:
jj = floatfmt(nominal_value(j), n=7, s=5)
pe = format_percent_error(nominal_value(j), std_dev(j), include_percent_sign=True)
jf = u'{} \u00b1{:0.2e}({})'.format(jj, std_dev(j), pe)
a39 = ar.ar39decayfactor
a37 = ar.ar37decayfactor
ms = [
# MeasurementValue(name='DR Version',
# value=an.data_reduction_tag),
MeasurementValue(name='Branch',
value=an.branch),
MeasurementValue(name='DAQ Version',
value=an.collection_version),
MeasurementValue(name='ExperimentID',
value=an.repository_identifier),
# MeasurementValue(name='AnalysisID',
# value=self.analysis_ida),
MeasurementValue(name='Spectrometer',
value=an.mass_spectrometer),
MeasurementValue(name='Run Date',
value=an.rundate.strftime('%Y-%m-%d %H:%M:%S')),
MeasurementValue(name='Irradiation',
value=self._get_irradiation(an)),
MeasurementValue(name='J',
value=jf),
MeasurementValue(name='Lambda K',
value=nominal_value(ar.arar_constants.lambda_k)),
MeasurementValue(name='Project',
value=an.project),
MeasurementValue(name='Sample',
value=an.sample),
MeasurementValue(name='Material',
value=an.material),
MeasurementValue(name='Comment',
value=an.comment),
MeasurementValue(name='Ar39Decay',
value=floatfmt(a39)),
MeasurementValue(name='Ar37Decay',
value=floatfmt(a37)),
MeasurementValue(name='Sens.',
value=floatfmt(an.sensitivity))]
self.measurement_values = ms
def load_extraction(self, an):
ev = [
ExtractionValue(name='Extract Script',
value=an.extraction_script_name),
ExtractionValue(name='Meas. Script',
value=an.measurement_script_name),
ExtractionValue(name='Device',
value=an.extract_device),
ExtractionValue(name='Position',
value=an.position, ),
ExtractionValue(name='XYZ',
value=an.xyz_position),
ExtractionValue(name='Extract Value',
value=an.extract_value,
units=an.extract_units, ),
ExtractionValue(name='Duration',
value=an.extract_duration,
units='s'),
ExtractionValue(name='Cleanup',
value=an.cleanup_duration,
units='s'),
ExtractionValue(name='T_o',
value=an.collection_time_zero_offset,
units='s')]
if 'UV' in an.extract_device:
extra = [ExtractionValue(name='Mask Pos.',
value=an.mask_position,
units='steps'),
ExtractionValue(name='Mask Name',
value=an.mask_name),
ExtractionValue(name='Reprate',
value=an.reprate,
units='1/s')]
else:
extra = [ExtractionValue(name='Beam Diam.',
value=an.beam_diameter,
units='mm'),
ExtractionValue(name='Pattern',
value=an.pattern),
ExtractionValue(name='Ramp Dur.',
value=an.ramp_duration,
units='s'),
ExtractionValue(name='Ramp Rate',
value=an.ramp_rate,
units='1/s')]
ev.extend(extra)
self.extraction_values = ev
def load_computed(self, an, new_list=True):
if self.analysis_type == 'unknown':
self._load_unknown_computed(an, new_list)
if self._corrected_enabled:
self._load_corrected_values(an, new_list)
elif self.analysis_type in ('air', 'blank_air', 'blank_unknown', 'blank_cocktail'):
self._load_air_computed(an, new_list)
elif self.analysis_type == 'cocktail':
self._load_cocktail_computed(an, new_list)
# def _get_isotope(self, name):
# return next((iso for iso in self.isotopes if iso.name == name), None)
def _make_ratios(self, ratios):
cv = []
for name, nd, ref in ratios:
dr = DetectorRatio(name=name,
value='',
error='',
noncorrected_value=0,
noncorrected_error=0,
ic_factor='',
ref_ratio=ref,
detectors=nd)
cv.append(dr)
return cv
def _get_non_corrected_ratio(self, niso, diso):
"""
niso: Isotope
diso: Isotope
return ufloat
calculate non_corrected ratio as
r = (Intensity_A-baseline_A-blank_A)/(Intensity_B-baseline_B-blank_B)
"""
if niso and diso:
try:
return niso.get_non_detector_corrected_value() / diso.get_non_detector_corrected_value()
except ZeroDivisionError:
pass
return ufloat(0, 1e-20)
def _get_corrected_ratio(self, niso, diso):
"""
niso: Isotope
diso: Isotope
return ufloat, ufloat
calculate corrected ratio as
r = IC_A*(Intensity_A-baseline_A-blank_A)/(IC_B*(Intensity_B-baseline_B-blank_B))
rr = IC_B/IC_A
"""
if niso and diso:
try:
return (niso.get_ic_corrected_value() / diso.get_ic_corrected_value(),
diso.ic_factor / niso.ic_factor)
except (ZeroDivisionError, TypeError):
pass
return ufloat(0, 1e-20), 1
def _update_ratios(self):
def get_iso(kk):
return next((v for v in self.isotopes if v.name == kk), None)
for ci in self.computed_values:
if not isinstance(ci, DetectorRatio):
continue
nd = ci.detectors
n, d = nd.split('/')
niso, diso = get_iso(n), get_iso(d)
if niso and diso:
noncorrected = self._get_non_corrected_ratio(niso, diso)
corrected, ic = self._get_corrected_ratio(niso, diso)
ci.trait_set(value=floatfmt(nominal_value(corrected)),
error=floatfmt(std_dev(corrected)),
noncorrected_value=nominal_value(noncorrected),
noncorrected_error=std_dev(noncorrected),
ic_factor=nominal_value(ic))
def _load_air_computed(self, an, new_list):
if new_list:
c = an.arar_constants
ratios = [('40Ar/36Ar', 'Ar40/Ar36', nominal_value(c.atm4036)),
('40Ar/38Ar', 'Ar40/Ar38', nominal_value(c.atm4038))]
cv = self._make_ratios(ratios)
self.computed_values = cv
self._update_ratios()
def _load_cocktail_computed(self, an, new_list):
if new_list:
c = an.arar_constants
ratios = [('40Ar/36Ar', 'Ar40/Ar36', nominal_value(c.atm4036)),
('40Ar/38Ar', 'Ar40/Ar38', nominal_value(c.atm4038)),
('40Ar/39Ar', 'Ar40/Ar39', 1)]
cv = self._make_ratios(ratios)
an.calculate_age()
cv.append(ComputedValue(name='Age',
tag='uage',
value=nominal_value(an.uage),
error=std_dev(an.uage)))
self.computed_values = cv
self._update_ratios()
else:
self._update_ratios()
def _load_corrected_values(self, an, new_list):
attrs = (('40/39', 'Ar40/Ar39_decay_corrected'),
('40/37', 'Ar40/Ar37_decay_corrected'),
('40/36', 'Ar40/Ar36'),
('40/38', 'Ar40/Ar38'),
('(40/36)non_ic', 'uAr40_Ar36'),
('(40/38)non_ic', 'uAr40_Ar38'),
('38/39', 'Ar38/Ar39_decay_corrected'),
('37/39', 'Ar37_decay_corrected/Ar39_decay_corrected'),
('36/39', 'Ar36/Ar39_decay_corrected'))
if new_list:
def comp_factory(n, a, value=None, value_tag=None, error_tag=None):
if value is None:
value = getattr(an, a)
display_value = True
if value_tag:
value = getattr(an, value_tag)
display_value = False
if error_tag:
e = getattr(an, error_tag)
else:
e = std_dev(value)
return ComputedValue(name=n,
tag=a,
value=nominal_value(value or 0),
display_value=display_value,
error=e or 0)
cv = [comp_factory(*args)
for args in attrs]
self.corrected_values = cv
else:
for ci in self.corrected_values:
attr = ci.tag
v = getattr(an, attr)
ci.value = nominal_value(v)
ci.error = std_dev(v)
def _load_unknown_computed(self, an, new_list):
attrs = (('Age', 'uage'),
# ('Age', 'age', None, None, 'age_err'),
('w/o J', 'wo_j', '', 'uage', 'age_err_wo_j'),
('K/Ca', 'kca'),
('K/Cl', 'kcl'),
('40Ar*', 'rad40_percent'),
('F', 'uF'),
('w/o Irrad', 'wo_irrad', '', 'uF', 'F_err_wo_irrad'))
if new_list:
def comp_factory(n, a, value=None, value_tag=None, error_tag=None):
if value is None:
value = getattr(an, a)
display_value = True
if value_tag:
value = getattr(an, value_tag)
display_value = False
if error_tag:
e = getattr(an, error_tag)
else:
e = std_dev(value)
return ComputedValue(name=n,
tag=a,
value=nominal_value(value) or 0,
value_tag=value_tag or '',
display_value=display_value,
error=e or 0)
cv = [comp_factory(*args)
for args in attrs]
self.computed_values = cv
else:
for ci in self.computed_values:
attr = ci.tag
if attr == 'wo_j':
ci.error = an.age_err_wo_j or 0
ci.value = nominal_value(getattr(an, ci.value_tag))
elif attr == 'wo_irrad':
ci.error = an.F_err_wo_irrad or 0
ci.value = nominal_value(getattr(an, ci.value_tag))
else:
v = getattr(an, attr)
if v is not None:
ci.value = nominal_value(v)
ci.error = std_dev(v)
@cached_property
def _get_computed_adapter(self):
adapter = ComputedValueTabularAdapter
if self.analysis_type in ('air', 'cocktail',
'blank_unknown', 'blank_air',
'blank_cocktail'):
adapter = DetectorRatioTabularAdapter
return adapter()
def _get_editors(self):
ceditor = myTabularEditor(adapter=self.computed_adapter,
editable=False,
drag_enabled=False,
refresh='refresh_needed')
eeditor = myTabularEditor(adapter=self.extraction_adapter,
drag_enabled=False,
editable=False,
refresh='refresh_needed')
meditor = myTabularEditor(adapter=self.measurement_adapter,
drag_enabled=False,
editable=False,
refresh='refresh_needed')
return ceditor, eeditor, meditor
def traits_view(self):
ceditor, eeditor, meditor = self._get_editors()
g1 = HGroup(UItem('measurement_values',
editor=meditor,
height=200,
width=0.4),
UItem('extraction_values',
editor=eeditor,
height=200,
width=0.6))
g2 = HGroup(UItem('computed_values',
editor=ceditor, ),
UItem('corrected_values',
editor=ceditor))
v = View(VGroup(g1, g2))
return v
# ============= EOF =============================================
| apache-2.0 |
twz915/django | django/contrib/auth/views.py | 2 | 21880 | import warnings
from urllib.parse import urlparse, urlunparse
from django.conf import settings
# Avoid shadowing the login() and logout() views below.
from django.contrib.auth import (
REDIRECT_FIELD_NAME, get_user_model, login as auth_login,
logout as auth_logout, update_session_auth_hash,
)
from django.contrib.auth.decorators import login_required
from django.contrib.auth.forms import (
AuthenticationForm, PasswordChangeForm, PasswordResetForm, SetPasswordForm,
)
from django.contrib.auth.tokens import default_token_generator
from django.contrib.sites.shortcuts import get_current_site
from django.http import HttpResponseRedirect, QueryDict
from django.shortcuts import resolve_url
from django.template.response import TemplateResponse
from django.urls import reverse, reverse_lazy
from django.utils.decorators import method_decorator
from django.utils.deprecation import RemovedInDjango21Warning
from django.utils.encoding import force_text
from django.utils.http import is_safe_url, urlsafe_base64_decode
from django.utils.translation import ugettext_lazy as _
from django.views.decorators.cache import never_cache
from django.views.decorators.csrf import csrf_protect
from django.views.decorators.debug import sensitive_post_parameters
from django.views.generic.base import TemplateView
from django.views.generic.edit import FormView
UserModel = get_user_model()
class SuccessURLAllowedHostsMixin:
success_url_allowed_hosts = set()
def get_success_url_allowed_hosts(self):
allowed_hosts = {self.request.get_host()}
allowed_hosts.update(self.success_url_allowed_hosts)
return allowed_hosts
class LoginView(SuccessURLAllowedHostsMixin, FormView):
"""
Displays the login form and handles the login action.
"""
form_class = AuthenticationForm
authentication_form = None
redirect_field_name = REDIRECT_FIELD_NAME
template_name = 'registration/login.html'
redirect_authenticated_user = False
extra_context = None
@method_decorator(sensitive_post_parameters())
@method_decorator(csrf_protect)
@method_decorator(never_cache)
def dispatch(self, request, *args, **kwargs):
if self.redirect_authenticated_user and self.request.user.is_authenticated:
redirect_to = self.get_success_url()
if redirect_to == self.request.path:
raise ValueError(
"Redirection loop for authenticated user detected. Check that "
"your LOGIN_REDIRECT_URL doesn't point to a login page."
)
return HttpResponseRedirect(redirect_to)
return super(LoginView, self).dispatch(request, *args, **kwargs)
def get_success_url(self):
"""Ensure the user-originating redirection URL is safe."""
redirect_to = self.request.POST.get(
self.redirect_field_name,
self.request.GET.get(self.redirect_field_name, '')
)
url_is_safe = is_safe_url(
url=redirect_to,
allowed_hosts=self.get_success_url_allowed_hosts(),
require_https=self.request.is_secure(),
)
if not url_is_safe:
return resolve_url(settings.LOGIN_REDIRECT_URL)
return redirect_to
def get_form_class(self):
return self.authentication_form or self.form_class
def form_valid(self, form):
"""Security check complete. Log the user in."""
auth_login(self.request, form.get_user())
return HttpResponseRedirect(self.get_success_url())
def get_context_data(self, **kwargs):
context = super(LoginView, self).get_context_data(**kwargs)
current_site = get_current_site(self.request)
context.update({
self.redirect_field_name: self.get_success_url(),
'site': current_site,
'site_name': current_site.name,
})
if self.extra_context is not None:
context.update(self.extra_context)
return context
def login(request, *args, **kwargs):
warnings.warn(
'The login() view is superseded by the class-based LoginView().',
RemovedInDjango21Warning, stacklevel=2
)
return LoginView.as_view(**kwargs)(request, *args, **kwargs)
class LogoutView(SuccessURLAllowedHostsMixin, TemplateView):
"""
Logs out the user and displays 'You are logged out' message.
"""
next_page = None
redirect_field_name = REDIRECT_FIELD_NAME
template_name = 'registration/logged_out.html'
extra_context = None
@method_decorator(never_cache)
def dispatch(self, request, *args, **kwargs):
auth_logout(request)
next_page = self.get_next_page()
if next_page:
# Redirect to this page until the session has been cleared.
return HttpResponseRedirect(next_page)
return super(LogoutView, self).dispatch(request, *args, **kwargs)
def get_next_page(self):
if self.next_page is not None:
next_page = resolve_url(self.next_page)
elif settings.LOGOUT_REDIRECT_URL:
next_page = resolve_url(settings.LOGOUT_REDIRECT_URL)
else:
next_page = self.next_page
if (self.redirect_field_name in self.request.POST or
self.redirect_field_name in self.request.GET):
next_page = self.request.POST.get(
self.redirect_field_name,
self.request.GET.get(self.redirect_field_name)
)
url_is_safe = is_safe_url(
url=next_page,
allowed_hosts=self.get_success_url_allowed_hosts(),
require_https=self.request.is_secure(),
)
# Security check -- Ensure the user-originating redirection URL is
# safe.
if not url_is_safe:
next_page = self.request.path
return next_page
def get_context_data(self, **kwargs):
context = super(LogoutView, self).get_context_data(**kwargs)
current_site = get_current_site(self.request)
context.update({
'site': current_site,
'site_name': current_site.name,
'title': _('Logged out'),
})
if self.extra_context is not None:
context.update(self.extra_context)
return context
def logout(request, *args, **kwargs):
warnings.warn(
'The logout() view is superseded by the class-based LogoutView().',
RemovedInDjango21Warning, stacklevel=2
)
return LogoutView.as_view(**kwargs)(request, *args, **kwargs)
_sentinel = object()
def logout_then_login(request, login_url=None, extra_context=_sentinel):
"""
Logs out the user if they are logged in. Then redirects to the log-in page.
"""
if extra_context is not _sentinel:
warnings.warn(
"The unused `extra_context` parameter to `logout_then_login` "
"is deprecated.", RemovedInDjango21Warning
)
if not login_url:
login_url = settings.LOGIN_URL
login_url = resolve_url(login_url)
return LogoutView.as_view(next_page=login_url)(request)
def redirect_to_login(next, login_url=None,
redirect_field_name=REDIRECT_FIELD_NAME):
"""
Redirects the user to the login page, passing the given 'next' page
"""
resolved_url = resolve_url(login_url or settings.LOGIN_URL)
login_url_parts = list(urlparse(resolved_url))
if redirect_field_name:
querystring = QueryDict(login_url_parts[4], mutable=True)
querystring[redirect_field_name] = next
login_url_parts[4] = querystring.urlencode(safe='/')
return HttpResponseRedirect(urlunparse(login_url_parts))
# 4 views for password reset:
# - password_reset sends the mail
# - password_reset_done shows a success message for the above
# - password_reset_confirm checks the link the user clicked and
# prompts for a new password
# - password_reset_complete shows a success message for the above
@csrf_protect
def password_reset(request,
template_name='registration/password_reset_form.html',
email_template_name='registration/password_reset_email.html',
subject_template_name='registration/password_reset_subject.txt',
password_reset_form=PasswordResetForm,
token_generator=default_token_generator,
post_reset_redirect=None,
from_email=None,
extra_context=None,
html_email_template_name=None,
extra_email_context=None):
warnings.warn("The password_reset() view is superseded by the "
"class-based PasswordResetView().",
RemovedInDjango21Warning, stacklevel=2)
if post_reset_redirect is None:
post_reset_redirect = reverse('password_reset_done')
else:
post_reset_redirect = resolve_url(post_reset_redirect)
if request.method == "POST":
form = password_reset_form(request.POST)
if form.is_valid():
opts = {
'use_https': request.is_secure(),
'token_generator': token_generator,
'from_email': from_email,
'email_template_name': email_template_name,
'subject_template_name': subject_template_name,
'request': request,
'html_email_template_name': html_email_template_name,
'extra_email_context': extra_email_context,
}
form.save(**opts)
return HttpResponseRedirect(post_reset_redirect)
else:
form = password_reset_form()
context = {
'form': form,
'title': _('Password reset'),
}
if extra_context is not None:
context.update(extra_context)
return TemplateResponse(request, template_name, context)
def password_reset_done(request,
template_name='registration/password_reset_done.html',
extra_context=None):
warnings.warn("The password_reset_done() view is superseded by the "
"class-based PasswordResetDoneView().",
RemovedInDjango21Warning, stacklevel=2)
context = {
'title': _('Password reset sent'),
}
if extra_context is not None:
context.update(extra_context)
return TemplateResponse(request, template_name, context)
# Doesn't need csrf_protect since no-one can guess the URL
@sensitive_post_parameters()
@never_cache
def password_reset_confirm(request, uidb64=None, token=None,
template_name='registration/password_reset_confirm.html',
token_generator=default_token_generator,
set_password_form=SetPasswordForm,
post_reset_redirect=None,
extra_context=None):
"""
View that checks the hash in a password reset link and presents a
form for entering a new password.
"""
warnings.warn("The password_reset_confirm() view is superseded by the "
"class-based PasswordResetConfirmView().",
RemovedInDjango21Warning, stacklevel=2)
assert uidb64 is not None and token is not None # checked by URLconf
if post_reset_redirect is None:
post_reset_redirect = reverse('password_reset_complete')
else:
post_reset_redirect = resolve_url(post_reset_redirect)
try:
# urlsafe_base64_decode() decodes to bytestring on Python 3
uid = force_text(urlsafe_base64_decode(uidb64))
user = UserModel._default_manager.get(pk=uid)
except (TypeError, ValueError, OverflowError, UserModel.DoesNotExist):
user = None
if user is not None and token_generator.check_token(user, token):
validlink = True
title = _('Enter new password')
if request.method == 'POST':
form = set_password_form(user, request.POST)
if form.is_valid():
form.save()
return HttpResponseRedirect(post_reset_redirect)
else:
form = set_password_form(user)
else:
validlink = False
form = None
title = _('Password reset unsuccessful')
context = {
'form': form,
'title': title,
'validlink': validlink,
}
if extra_context is not None:
context.update(extra_context)
return TemplateResponse(request, template_name, context)
def password_reset_complete(request,
template_name='registration/password_reset_complete.html',
extra_context=None):
warnings.warn("The password_reset_complete() view is superseded by the "
"class-based PasswordResetCompleteView().",
RemovedInDjango21Warning, stacklevel=2)
context = {
'login_url': resolve_url(settings.LOGIN_URL),
'title': _('Password reset complete'),
}
if extra_context is not None:
context.update(extra_context)
return TemplateResponse(request, template_name, context)
# Class-based password reset views
# - PasswordResetView sends the mail
# - PasswordResetDoneView shows a success message for the above
# - PasswordResetConfirmView checks the link the user clicked and
# prompts for a new password
# - PasswordResetCompleteView shows a success message for the above
class PasswordContextMixin:
extra_context = None
def get_context_data(self, **kwargs):
context = super(PasswordContextMixin, self).get_context_data(**kwargs)
context['title'] = self.title
if self.extra_context is not None:
context.update(self.extra_context)
return context
class PasswordResetView(PasswordContextMixin, FormView):
email_template_name = 'registration/password_reset_email.html'
extra_email_context = None
form_class = PasswordResetForm
from_email = None
html_email_template_name = None
subject_template_name = 'registration/password_reset_subject.txt'
success_url = reverse_lazy('password_reset_done')
template_name = 'registration/password_reset_form.html'
title = _('Password reset')
token_generator = default_token_generator
@method_decorator(csrf_protect)
def dispatch(self, *args, **kwargs):
return super(PasswordResetView, self).dispatch(*args, **kwargs)
def form_valid(self, form):
opts = {
'use_https': self.request.is_secure(),
'token_generator': self.token_generator,
'from_email': self.from_email,
'email_template_name': self.email_template_name,
'subject_template_name': self.subject_template_name,
'request': self.request,
'html_email_template_name': self.html_email_template_name,
'extra_email_context': self.extra_email_context,
}
form.save(**opts)
return super(PasswordResetView, self).form_valid(form)
INTERNAL_RESET_URL_TOKEN = 'set-password'
INTERNAL_RESET_SESSION_TOKEN = '_password_reset_token'
class PasswordResetDoneView(PasswordContextMixin, TemplateView):
template_name = 'registration/password_reset_done.html'
title = _('Password reset sent')
class PasswordResetConfirmView(PasswordContextMixin, FormView):
form_class = SetPasswordForm
post_reset_login = False
success_url = reverse_lazy('password_reset_complete')
template_name = 'registration/password_reset_confirm.html'
title = _('Enter new password')
token_generator = default_token_generator
@method_decorator(sensitive_post_parameters())
@method_decorator(never_cache)
def dispatch(self, *args, **kwargs):
assert 'uidb64' in kwargs and 'token' in kwargs
self.validlink = False
self.user = self.get_user(kwargs['uidb64'])
if self.user is not None:
token = kwargs['token']
if token == INTERNAL_RESET_URL_TOKEN:
session_token = self.request.session.get(INTERNAL_RESET_SESSION_TOKEN)
if self.token_generator.check_token(self.user, session_token):
# If the token is valid, display the password reset form.
self.validlink = True
return super(PasswordResetConfirmView, self).dispatch(*args, **kwargs)
else:
if self.token_generator.check_token(self.user, token):
# Store the token in the session and redirect to the
# password reset form at a URL without the token. That
# avoids the possibility of leaking the token in the
# HTTP Referer header.
self.request.session[INTERNAL_RESET_SESSION_TOKEN] = token
redirect_url = self.request.path.replace(token, INTERNAL_RESET_URL_TOKEN)
return HttpResponseRedirect(redirect_url)
# Display the "Password reset unsuccessful" page.
return self.render_to_response(self.get_context_data())
def get_user(self, uidb64):
try:
# urlsafe_base64_decode() decodes to bytestring on Python 3
uid = force_text(urlsafe_base64_decode(uidb64))
user = UserModel._default_manager.get(pk=uid)
except (TypeError, ValueError, OverflowError, UserModel.DoesNotExist):
user = None
return user
def get_form_kwargs(self):
kwargs = super(PasswordResetConfirmView, self).get_form_kwargs()
kwargs['user'] = self.user
return kwargs
def form_valid(self, form):
user = form.save()
if self.post_reset_login:
auth_login(self.request, user)
del self.request.session[INTERNAL_RESET_SESSION_TOKEN]
return super(PasswordResetConfirmView, self).form_valid(form)
def get_context_data(self, **kwargs):
context = super(PasswordResetConfirmView, self).get_context_data(**kwargs)
if self.validlink:
context['validlink'] = True
else:
context.update({
'form': None,
'title': _('Password reset unsuccessful'),
'validlink': False,
})
return context
class PasswordResetCompleteView(PasswordContextMixin, TemplateView):
template_name = 'registration/password_reset_complete.html'
title = _('Password reset complete')
def get_context_data(self, **kwargs):
context = super(PasswordResetCompleteView, self).get_context_data(**kwargs)
context['login_url'] = resolve_url(settings.LOGIN_URL)
return context
@sensitive_post_parameters()
@csrf_protect
@login_required
def password_change(request,
template_name='registration/password_change_form.html',
post_change_redirect=None,
password_change_form=PasswordChangeForm,
extra_context=None):
warnings.warn("The password_change() view is superseded by the "
"class-based PasswordChangeView().",
RemovedInDjango21Warning, stacklevel=2)
if post_change_redirect is None:
post_change_redirect = reverse('password_change_done')
else:
post_change_redirect = resolve_url(post_change_redirect)
if request.method == "POST":
form = password_change_form(user=request.user, data=request.POST)
if form.is_valid():
form.save()
# Updating the password logs out all other sessions for the user
# except the current one.
update_session_auth_hash(request, form.user)
return HttpResponseRedirect(post_change_redirect)
else:
form = password_change_form(user=request.user)
context = {
'form': form,
'title': _('Password change'),
}
if extra_context is not None:
context.update(extra_context)
return TemplateResponse(request, template_name, context)
@login_required
def password_change_done(request,
template_name='registration/password_change_done.html',
extra_context=None):
warnings.warn("The password_change_done() view is superseded by the "
"class-based PasswordChangeDoneView().",
RemovedInDjango21Warning, stacklevel=2)
context = {
'title': _('Password change successful'),
}
if extra_context is not None:
context.update(extra_context)
return TemplateResponse(request, template_name, context)
class PasswordChangeView(PasswordContextMixin, FormView):
form_class = PasswordChangeForm
success_url = reverse_lazy('password_change_done')
template_name = 'registration/password_change_form.html'
title = _('Password change')
@method_decorator(sensitive_post_parameters())
@method_decorator(csrf_protect)
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(PasswordChangeView, self).dispatch(*args, **kwargs)
def get_form_kwargs(self):
kwargs = super(PasswordChangeView, self).get_form_kwargs()
kwargs['user'] = self.request.user
return kwargs
def form_valid(self, form):
form.save()
# Updating the password logs out all other sessions for the user
# except the current one.
update_session_auth_hash(self.request, form.user)
return super(PasswordChangeView, self).form_valid(form)
class PasswordChangeDoneView(PasswordContextMixin, TemplateView):
template_name = 'registration/password_change_done.html'
title = _('Password change successful')
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(PasswordChangeDoneView, self).dispatch(*args, **kwargs)
| bsd-3-clause |
guorendong/iridium-browser-ubuntu | tools/gyp/test/intermediate_dir/gyptest-intermediate-dir.py | 100 | 1400 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that targets have independent INTERMEDIATE_DIRs.
"""
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('test.gyp', chdir='src')
test.build('test.gyp', 'target1', chdir='src')
# Check stuff exists.
intermediate_file1 = test.read('src/outfile.txt')
test.must_contain(intermediate_file1, 'target1')
shared_intermediate_file1 = test.read('src/shared_outfile.txt')
test.must_contain(shared_intermediate_file1, 'shared_target1')
test.run_gyp('test2.gyp', chdir='src')
# Force the shared intermediate to be rebuilt.
test.sleep()
test.touch('src/shared_infile.txt')
test.build('test2.gyp', 'target2', chdir='src')
# Check INTERMEDIATE_DIR file didn't get overwritten but SHARED_INTERMEDIATE_DIR
# file did.
intermediate_file2 = test.read('src/outfile.txt')
test.must_contain(intermediate_file1, 'target1')
test.must_contain(intermediate_file2, 'target2')
shared_intermediate_file2 = test.read('src/shared_outfile.txt')
if shared_intermediate_file1 != shared_intermediate_file2:
test.fail_test(shared_intermediate_file1 + ' != ' + shared_intermediate_file2)
test.must_contain(shared_intermediate_file1, 'shared_target2')
test.must_contain(shared_intermediate_file2, 'shared_target2')
test.pass_test()
| bsd-3-clause |
nimzco/Environment | Sublime/Packages/mdpopups/st3/mdpopups/x11colors.py | 7 | 18277 | """
X11 colors.
A simple name to hex and hex to name map of X11 colors.
"""
name2hex_map = {
"black": "#000000",
"aliceblue": "#f0f8ff",
"blueviolet": "#8a2be2",
"cadetblue": "#5f9ea0",
"cadetblue1": "#98f5ff",
"cadetblue2": "#8ee5ee",
"cadetblue3": "#7ac5cd",
"cadetblue4": "#53868b",
"cornflowerblue": "#6495ed",
"darkblue": "#00008b",
"darkcyan": "#008b8b",
"darkslateblue": "#483d8b",
"darkturquoise": "#00ced1",
"deepskyblue": "#00bfff",
"deepskyblue1": "#00bfff",
"deepskyblue2": "#00b2ee",
"deepskyblue3": "#009acd",
"deepskyblue4": "#00688b",
"dodgerblue": "#1e90ff",
"dodgerblue1": "#1e90ff",
"dodgerblue2": "#1c86ee",
"dodgerblue3": "#1874cd",
"dodgerblue4": "#104e8b",
"lightblue": "#add8e6",
"lightblue1": "#bfefff",
"lightblue2": "#b2dfee",
"lightblue3": "#9ac0cd",
"lightblue4": "#68838b",
"lightcyan": "#e0ffff",
"lightcyan1": "#e0ffff",
"lightcyan2": "#d1eeee",
"lightcyan3": "#b4cdcd",
"lightcyan4": "#7a8b8b",
"lightskyblue": "#87cefa",
"lightskyblue1": "#b0e2ff",
"lightskyblue2": "#a4d3ee",
"lightskyblue3": "#8db6cd",
"lightskyblue4": "#607b8b",
"lightslateblue": "#8470ff",
"lightsteelblue": "#b0c4de",
"lightsteelblue1": "#cae1ff",
"lightsteelblue2": "#bcd2ee",
"lightsteelblue3": "#a2b5cd",
"lightsteelblue4": "#6e7b8b",
"mediumaquamarine": "#66cdaa",
"mediumblue": "#0000cd",
"mediumslateblue": "#7b68ee",
"mediumturquoise": "#48d1cc",
"midnightblue": "#191970",
"navyblue": "#000080",
"paleturquoise": "#afeeee",
"paleturquoise1": "#bbffff",
"paleturquoise2": "#aeeeee",
"paleturquoise3": "#96cdcd",
"paleturquoise4": "#668b8b",
"powderblue": "#b0e0e6",
"royalblue": "#4169e1",
"royalblue1": "#4876ff",
"royalblue2": "#436eee",
"royalblue3": "#3a5fcd",
"royalblue4": "#27408b",
"skyblue": "#87ceeb",
"skyblue1": "#87ceff",
"skyblue2": "#7ec0ee",
"skyblue3": "#6ca6cd",
"skyblue4": "#4a708b",
"slateblue": "#6a5acd",
"slateblue1": "#836fff",
"slateblue2": "#7a67ee",
"slateblue3": "#6959cd",
"slateblue4": "#473c8b",
"steelblue": "#4682b4",
"steelblue1": "#63b8ff",
"steelblue2": "#5cacee",
"steelblue3": "#4f94cd",
"steelblue4": "#36648b",
"aquamarine": "#7fffd4",
"aquamarine1": "#7fffd4",
"aquamarine2": "#76eec6",
"aquamarine3": "#66cdaa",
"aquamarine4": "#458b74",
"azure": "#f0ffff",
"azure1": "#f0ffff",
"azure2": "#e0eeee",
"azure3": "#c1cdcd",
"azure4": "#838b8b",
"blue": "#0000ff",
"blue1": "#0000ff",
"blue2": "#0000ee",
"blue3": "#0000cd",
"blue4": "#00008b",
"cyan": "#00ffff",
"cyan1": "#00ffff",
"cyan2": "#00eeee",
"cyan3": "#00cdcd",
"cyan4": "#008b8b",
"navy": "#000080",
"turquoise": "#40e0d0",
"turquoise1": "#00f5ff",
"turquoise2": "#00e5ee",
"turquoise3": "#00c5cd",
"turquoise4": "#00868b",
"rosybrown": "#bc8f8f",
"rosybrown1": "#ffc1c1",
"rosybrown2": "#eeb4b4",
"rosybrown3": "#cd9b9b",
"rosybrown4": "#8b6969",
"saddlebrown": "#8b4513",
"sandybrown": "#f4a460",
"beige": "#f5f5dc",
"brown": "#a52a2a",
"brown1": "#ff4040",
"brown2": "#ee3b3b",
"brown3": "#cd3333",
"brown4": "#8b2323",
"burlywood": "#deb887",
"burlywood1": "#ffd39b",
"burlywood2": "#eec591",
"burlywood3": "#cdaa7d",
"burlywood4": "#8b7355",
"chocolate": "#d2691e",
"chocolate1": "#ff7f24",
"chocolate2": "#ee7621",
"chocolate3": "#cd661d",
"chocolate4": "#8b4513",
"peru": "#cd853f",
"tan": "#d2b48c",
"tan1": "#ffa54f",
"tan2": "#ee9a49",
"tan3": "#cd853f",
"tan4": "#8b5a2b",
"darkslategray": "#2f4f4f",
"darkslategray1": "#97ffff",
"darkslategray2": "#8deeee",
"darkslategray3": "#79cdcd",
"darkslategray4": "#528b8b",
"darkslategrey": "#2f4f4f",
"dimgray": "#696969",
"dimgrey": "#696969",
"lightgray": "#d3d3d3",
"lightgrey": "#d3d3d3",
"lightslategray": "#778899",
"lightslategrey": "#778899",
"slategray": "#708090",
"slategray1": "#c6e2ff",
"slategray2": "#b9d3ee",
"slategray3": "#9fb6cd",
"slategray4": "#6c7b8b",
"slategrey": "#708090",
"gray": "#bebebe",
"gray0": "#000000",
"gray1": "#030303",
"gray2": "#050505",
"gray3": "#080808",
"gray4": "#0a0a0a",
"gray5": "#0d0d0d",
"gray6": "#0f0f0f",
"gray7": "#121212",
"gray8": "#141414",
"gray9": "#171717",
"gray10": "#1a1a1a",
"gray11": "#1c1c1c",
"gray12": "#1f1f1f",
"gray13": "#212121",
"gray14": "#242424",
"gray15": "#262626",
"gray16": "#292929",
"gray17": "#2b2b2b",
"gray18": "#2e2e2e",
"gray19": "#303030",
"gray20": "#333333",
"gray21": "#363636",
"gray22": "#383838",
"gray23": "#3b3b3b",
"gray24": "#3d3d3d",
"gray25": "#404040",
"gray26": "#424242",
"gray27": "#454545",
"gray28": "#474747",
"gray29": "#4a4a4a",
"gray30": "#4d4d4d",
"gray31": "#4f4f4f",
"gray32": "#525252",
"gray33": "#545454",
"gray34": "#575757",
"gray35": "#595959",
"gray36": "#5c5c5c",
"gray37": "#5e5e5e",
"gray38": "#616161",
"gray39": "#636363",
"gray40": "#666666",
"gray41": "#696969",
"gray42": "#6b6b6b",
"gray43": "#6e6e6e",
"gray44": "#707070",
"gray45": "#737373",
"gray46": "#757575",
"gray47": "#787878",
"gray48": "#7a7a7a",
"gray49": "#7d7d7d",
"gray50": "#7f7f7f",
"gray51": "#828282",
"gray52": "#858585",
"gray53": "#878787",
"gray54": "#8a8a8a",
"gray55": "#8c8c8c",
"gray56": "#8f8f8f",
"gray57": "#919191",
"gray58": "#949494",
"gray59": "#969696",
"gray60": "#999999",
"gray61": "#9c9c9c",
"gray62": "#9e9e9e",
"gray63": "#a1a1a1",
"gray64": "#a3a3a3",
"gray65": "#a6a6a6",
"gray66": "#a8a8a8",
"gray67": "#ababab",
"gray68": "#adadad",
"gray69": "#b0b0b0",
"gray70": "#b3b3b3",
"gray71": "#b5b5b5",
"gray72": "#b8b8b8",
"gray73": "#bababa",
"gray74": "#bdbdbd",
"gray75": "#bfbfbf",
"gray76": "#c2c2c2",
"gray77": "#c4c4c4",
"gray78": "#c7c7c7",
"gray79": "#c9c9c9",
"gray80": "#cccccc",
"gray81": "#cfcfcf",
"gray82": "#d1d1d1",
"gray83": "#d4d4d4",
"gray84": "#d6d6d6",
"gray85": "#d9d9d9",
"gray86": "#dbdbdb",
"gray87": "#dedede",
"gray88": "#e0e0e0",
"gray89": "#e3e3e3",
"gray90": "#e5e5e5",
"gray91": "#e8e8e8",
"gray92": "#ebebeb",
"gray93": "#ededed",
"gray94": "#f0f0f0",
"gray95": "#f2f2f2",
"gray96": "#f5f5f5",
"gray97": "#f7f7f7",
"gray98": "#fafafa",
"gray99": "#fcfcfc",
"gray100": "#ffffff",
"grey": "#bebebe",
"grey0": "#000000",
"grey1": "#030303",
"grey2": "#050505",
"grey3": "#080808",
"grey4": "#0a0a0a",
"grey5": "#0d0d0d",
"grey6": "#0f0f0f",
"grey7": "#121212",
"grey8": "#141414",
"grey9": "#171717",
"grey10": "#1a1a1a",
"grey11": "#1c1c1c",
"grey12": "#1f1f1f",
"grey13": "#212121",
"grey14": "#242424",
"grey15": "#262626",
"grey16": "#292929",
"grey17": "#2b2b2b",
"grey18": "#2e2e2e",
"grey19": "#303030",
"grey20": "#333333",
"grey21": "#363636",
"grey22": "#383838",
"grey23": "#3b3b3b",
"grey24": "#3d3d3d",
"grey25": "#404040",
"grey26": "#424242",
"grey27": "#454545",
"grey28": "#474747",
"grey29": "#4a4a4a",
"grey30": "#4d4d4d",
"grey31": "#4f4f4f",
"grey32": "#525252",
"grey33": "#545454",
"grey34": "#575757",
"grey35": "#595959",
"grey36": "#5c5c5c",
"grey37": "#5e5e5e",
"grey38": "#616161",
"grey39": "#636363",
"grey40": "#666666",
"grey41": "#696969",
"grey42": "#6b6b6b",
"grey43": "#6e6e6e",
"grey44": "#707070",
"grey45": "#737373",
"grey46": "#757575",
"grey47": "#787878",
"grey48": "#7a7a7a",
"grey49": "#7d7d7d",
"grey50": "#7f7f7f",
"grey51": "#828282",
"grey52": "#858585",
"grey53": "#878787",
"grey54": "#8a8a8a",
"grey55": "#8c8c8c",
"grey56": "#8f8f8f",
"grey57": "#919191",
"grey58": "#949494",
"grey59": "#969696",
"grey60": "#999999",
"grey61": "#9c9c9c",
"grey62": "#9e9e9e",
"grey63": "#a1a1a1",
"grey64": "#a3a3a3",
"grey65": "#a6a6a6",
"grey66": "#a8a8a8",
"grey67": "#ababab",
"grey68": "#adadad",
"grey69": "#b0b0b0",
"grey70": "#b3b3b3",
"grey71": "#b5b5b5",
"grey72": "#b8b8b8",
"grey73": "#bababa",
"grey74": "#bdbdbd",
"grey75": "#bfbfbf",
"grey76": "#c2c2c2",
"grey77": "#c4c4c4",
"grey78": "#c7c7c7",
"grey79": "#c9c9c9",
"grey80": "#cccccc",
"grey81": "#cfcfcf",
"grey82": "#d1d1d1",
"grey83": "#d4d4d4",
"grey84": "#d6d6d6",
"grey85": "#d9d9d9",
"grey86": "#dbdbdb",
"grey87": "#dedede",
"grey88": "#e0e0e0",
"grey89": "#e3e3e3",
"grey90": "#e5e5e5",
"grey91": "#e8e8e8",
"grey92": "#ebebeb",
"grey93": "#ededed",
"grey94": "#f0f0f0",
"grey95": "#f2f2f2",
"grey96": "#f5f5f5",
"grey97": "#f7f7f7",
"grey98": "#fafafa",
"grey99": "#fcfcfc",
"grey100": "#ffffff",
"darkgreen": "#006400",
"darkkhaki": "#bdb76b",
"darkolivegreen": "#556b2f",
"darkolivegreen1": "#caff70",
"darkolivegreen2": "#bcee68",
"darkolivegreen3": "#a2cd5a",
"darkolivegreen4": "#6e8b3d",
"darkseagreen": "#8fbc8f",
"darkseagreen1": "#c1ffc1",
"darkseagreen2": "#b4eeb4",
"darkseagreen3": "#9bcd9b",
"darkseagreen4": "#698b69",
"forestgreen": "#228b22",
"greenyellow": "#adff2f",
"lawngreen": "#7cfc00",
"lightgreen": "#90ee90",
"lightseagreen": "#20b2aa",
"limegreen": "#32cd32",
"mediumseagreen": "#3cb371",
"mediumspringgreen": "#00fa9a",
"mintcream": "#f5fffa",
"olivedrab": "#6b8e23",
"olivedrab1": "#c0ff3e",
"olivedrab2": "#b3ee3a",
"olivedrab3": "#9acd32",
"olivedrab4": "#698b22",
"palegreen": "#98fb98",
"palegreen1": "#9aff9a",
"palegreen2": "#90ee90",
"palegreen3": "#7ccd7c",
"palegreen4": "#548b54",
"seagreen": "#2e8b57",
"seagreen1": "#54ff9f",
"seagreen2": "#4eee94",
"seagreen3": "#43cd80",
"seagreen4": "#2e8b57",
"springgreen": "#00ff7f",
"springgreen1": "#00ff7f",
"springgreen2": "#00ee76",
"springgreen3": "#00cd66",
"springgreen4": "#008b45",
"yellowgreen": "#9acd32",
"chartreuse": "#7fff00",
"chartreuse1": "#7fff00",
"chartreuse2": "#76ee00",
"chartreuse3": "#66cd00",
"chartreuse4": "#458b00",
"green": "#00ff00",
"green1": "#00ff00",
"green2": "#00ee00",
"green3": "#00cd00",
"green4": "#008b00",
"khaki": "#f0e68c",
"khaki1": "#fff68f",
"khaki2": "#eee685",
"khaki3": "#cdc673",
"khaki4": "#8b864e",
"darkorange": "#ff8c00",
"darkorange1": "#ff7f00",
"darkorange2": "#ee7600",
"darkorange3": "#cd6600",
"darkorange4": "#8b4500",
"darksalmon": "#e9967a",
"lightcoral": "#f08080",
"lightsalmon": "#ffa07a",
"lightsalmon1": "#ffa07a",
"lightsalmon2": "#ee9572",
"lightsalmon3": "#cd8162",
"lightsalmon4": "#8b5742",
"peachpuff": "#ffdab9",
"peachpuff1": "#ffdab9",
"peachpuff2": "#eecbad",
"peachpuff3": "#cdaf95",
"peachpuff4": "#8b7765",
"bisque": "#ffe4c4",
"bisque1": "#ffe4c4",
"bisque2": "#eed5b7",
"bisque3": "#cdb79e",
"bisque4": "#8b7d6b",
"coral": "#ff7f50",
"coral1": "#ff7256",
"coral2": "#ee6a50",
"coral3": "#cd5b45",
"coral4": "#8b3e2f",
"honeydew": "#f0fff0",
"honeydew1": "#f0fff0",
"honeydew2": "#e0eee0",
"honeydew3": "#c1cdc1",
"honeydew4": "#838b83",
"orange": "#ffa500",
"orange1": "#ffa500",
"orange2": "#ee9a00",
"orange3": "#cd8500",
"orange4": "#8b5a00",
"salmon": "#fa8072",
"salmon1": "#ff8c69",
"salmon2": "#ee8262",
"salmon3": "#cd7054",
"salmon4": "#8b4c39",
"sienna": "#a0522d",
"sienna1": "#ff8247",
"sienna2": "#ee7942",
"sienna3": "#cd6839",
"sienna4": "#8b4726",
"darkred": "#8b0000",
"deeppink": "#ff1493",
"deeppink1": "#ff1493",
"deeppink2": "#ee1289",
"deeppink3": "#cd1076",
"deeppink4": "#8b0a50",
"hotpink": "#ff69b4",
"hotpink1": "#ff6eb4",
"hotpink2": "#ee6aa7",
"hotpink3": "#cd6090",
"hotpink4": "#8b3a62",
"indianred": "#cd5c5c",
"indianred1": "#ff6a6a",
"indianred2": "#ee6363",
"indianred3": "#cd5555",
"indianred4": "#8b3a3a",
"lightpink": "#ffb6c1",
"lightpink1": "#ffaeb9",
"lightpink2": "#eea2ad",
"lightpink3": "#cd8c95",
"lightpink4": "#8b5f65",
"mediumvioletred": "#c71585",
"mistyrose": "#ffe4e1",
"mistyrose1": "#ffe4e1",
"mistyrose2": "#eed5d2",
"mistyrose3": "#cdb7b5",
"mistyrose4": "#8b7d7b",
"orangered": "#ff4500",
"orangered1": "#ff4500",
"orangered2": "#ee4000",
"orangered3": "#cd3700",
"orangered4": "#8b2500",
"palevioletred": "#db7093",
"palevioletred1": "#ff82ab",
"palevioletred2": "#ee799f",
"palevioletred3": "#cd6889",
"palevioletred4": "#8b475d",
"violetred": "#d02090",
"violetred1": "#ff3e96",
"violetred2": "#ee3a8c",
"violetred3": "#cd3278",
"violetred4": "#8b2252",
"firebrick": "#b22222",
"firebrick1": "#ff3030",
"firebrick2": "#ee2c2c",
"firebrick3": "#cd2626",
"firebrick4": "#8b1a1a",
"pink": "#ffc0cb",
"pink1": "#ffb5c5",
"pink2": "#eea9b8",
"pink3": "#cd919e",
"pink4": "#8b636c",
"red": "#ff0000",
"red1": "#ff0000",
"red2": "#ee0000",
"red3": "#cd0000",
"red4": "#8b0000",
"tomato": "#ff6347",
"tomato1": "#ff6347",
"tomato2": "#ee5c42",
"tomato3": "#cd4f39",
"tomato4": "#8b3626",
"darkmagenta": "#8b008b",
"darkorchid": "#9932cc",
"darkorchid1": "#bf3eff",
"darkorchid2": "#b23aee",
"darkorchid3": "#9a32cd",
"darkorchid4": "#68228b",
"darkviolet": "#9400d3",
"lavenderblush": "#fff0f5",
"lavenderblush1": "#fff0f5",
"lavenderblush2": "#eee0e5",
"lavenderblush3": "#cdc1c5",
"lavenderblush4": "#8b8386",
"mediumorchid": "#ba55d3",
"mediumorchid1": "#e066ff",
"mediumorchid2": "#d15fee",
"mediumorchid3": "#b452cd",
"mediumorchid4": "#7a378b",
"mediumpurple": "#9370db",
"mediumpurple1": "#ab82ff",
"mediumpurple2": "#9f79ee",
"mediumpurple3": "#8968cd",
"mediumpurple4": "#5d478b",
"lavender": "#e6e6fa",
"magenta": "#ff00ff",
"magenta1": "#ff00ff",
"magenta2": "#ee00ee",
"magenta3": "#cd00cd",
"magenta4": "#8b008b",
"maroon": "#b03060",
"maroon1": "#ff34b3",
"maroon2": "#ee30a7",
"maroon3": "#cd2990",
"maroon4": "#8b1c62",
"orchid": "#da70d6",
"orchid1": "#ff83fa",
"orchid2": "#ee7ae9",
"orchid3": "#cd69c9",
"orchid4": "#8b4789",
"plum": "#dda0dd",
"plum1": "#ffbbff",
"plum2": "#eeaeee",
"plum3": "#cd96cd",
"plum4": "#8b668b",
"purple": "#a020f0",
"purple1": "#9b30ff",
"purple2": "#912cee",
"purple3": "#7d26cd",
"purple4": "#551a8b",
"thistle": "#d8bfd8",
"thistle1": "#ffe1ff",
"thistle2": "#eed2ee",
"thistle3": "#cdb5cd",
"thistle4": "#8b7b8b",
"violet": "#ee82ee",
"antiquewhite": "#faebd7",
"antiquewhite1": "#ffefdb",
"antiquewhite2": "#eedfcc",
"antiquewhite3": "#cdc0b0",
"antiquewhite4": "#8b8378",
"floralwhite": "#fffaf0",
"ghostwhite": "#f8f8ff",
"navajowhite": "#ffdead",
"navajowhite1": "#ffdead",
"navajowhite2": "#eecfa1",
"navajowhite3": "#cdb38b",
"navajowhite4": "#8b795e",
"oldlace": "#fdf5e6",
"whitesmoke": "#f5f5f5",
"gainsboro": "#dcdcdc",
"ivory": "#fffff0",
"ivory1": "#fffff0",
"ivory2": "#eeeee0",
"ivory3": "#cdcdc1",
"ivory4": "#8b8b83",
"linen": "#faf0e6",
"seashell": "#fff5ee",
"seashell1": "#fff5ee",
"seashell2": "#eee5de",
"seashell3": "#cdc5bf",
"seashell4": "#8b8682",
"snow": "#fffafa",
"snow1": "#fffafa",
"snow2": "#eee9e9",
"snow3": "#cdc9c9",
"snow4": "#8b8989",
"wheat": "#f5deb3",
"wheat1": "#ffe7ba",
"wheat2": "#eed8ae",
"wheat3": "#cdba96",
"wheat4": "#8b7e66",
"white": "#ffffff",
"blanchedalmond": "#ffebcd",
"darkgoldenrod": "#b8860b",
"darkgoldenrod1": "#ffb90f",
"darkgoldenrod2": "#eead0e",
"darkgoldenrod3": "#cd950c",
"darkgoldenrod4": "#8b6508",
"lemonchiffon": "#fffacd",
"lemonchiffon1": "#fffacd",
"lemonchiffon2": "#eee9bf",
"lemonchiffon3": "#cdc9a5",
"lemonchiffon4": "#8b8970",
"lightgoldenrod": "#eedd82",
"lightgoldenrod1": "#ffec8b",
"lightgoldenrod2": "#eedc82",
"lightgoldenrod3": "#cdbe70",
"lightgoldenrod4": "#8b814c",
"lightgoldenrodyellow": "#fafad2",
"lightyellow": "#ffffe0",
"lightyellow1": "#ffffe0",
"lightyellow2": "#eeeed1",
"lightyellow3": "#cdcdb4",
"lightyellow4": "#8b8b7a",
"palegoldenrod": "#eee8aa",
"papayawhip": "#ffefd5",
"cornsilk": "#fff8dc",
"cornsilk1": "#fff8dc",
"cornsilk2": "#eee8cd",
"cornsilk3": "#cdc8b1",
"cornsilk4": "#8b8878",
"gold": "#ffd700",
"gold1": "#ffd700",
"gold2": "#eec900",
"gold3": "#cdad00",
"gold4": "#8b7500",
"goldenrod": "#daa520",
"goldenrod1": "#ffc125",
"goldenrod2": "#eeb422",
"goldenrod3": "#cd9b1d",
"goldenrod4": "#8b6914",
"moccasin": "#ffe4b5",
"yellow": "#ffff00",
"yellow1": "#ffff00",
"yellow2": "#eeee00",
"yellow3": "#cdcd00",
"yellow4": "#8b8b00"
}
hex2name_map = dict([(v, k) for k, v in name2hex_map.items()])
def hex2name(value):
"""Convert X11 hex to webcolor name."""
return hex2name_map.get(value.lower(), None)
def name2hex(name):
"""Convert X11 webcolor name to hex."""
return name2hex_map.get(name.lower(), None)
| mit |
Integral-Technology-Solutions/ConfigNOW-4.3 | core/validators/machines.py | 2 | 5255 | # ============================================================================
#
# Copyright (c) 2007-2010 Integral Technology Solutions Pty Ltd,
# All Rights Reserved.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF THIRD PARTY RIGHTS.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS NOTICE BE
# LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES, OR
# ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
# IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
#
# FOR FURTHER INFORMATION PLEASE SEE THE INTEGRAL TECHNOLOGY SOLUTIONS
# END USER LICENSE AGREEMENT (ELUA).
#
# ============================================================================
import validation_helper as helper
def run (config):
machinesValid=helper.validateList(config, 'wls.domain.machines')
if machinesValid:
if validateMachines(config):
return False
else:
return False
return True
def validateMachines(domainProperties):
error = 0
machines = domainProperties.getProperty('wls.domain.machines')
if not machines is None and len(machines)>0:
machineList = machines.split(',')
for machine in machineList:
helper.printHeader('[VALIDATING] machine ' + str(machine) + ' properties')
machineName = domainProperties.getProperty('wls.domain.machine.' + str(machine) + '.name')
if machineName is None or len(machineName)==0:
error = 1
log.error('Please verify wls.domain.machine.' + str(machine) + '.name property if it exists in configuration.')
else:
log.debug('Machine [' + str(machine) + '] name property [' + str(machineName) + '] is valid.')
nodeType = domainProperties.getProperty('wls.domain.machine.' + str(machine) + '.nodemanager.type')
if not nodeType is None and len(nodeType)>0:
if not nodeType=='SSH' and not nodeType=='RSH' and not nodeType=='Plain' and not nodeType=='SSL' and not nodeType=='ssh' and not nodeType=='rsh' and not nodeType=='ssl' and not nodeType=='plain':
error = 1
log.error('The wls.domain.machine.' + str(machine) + '.nodemanager.type property support only [SSH,RSH,Plain,SSL,ssh,rsh,ssl,plain].')
else:
log.debug('Machine [' + str(machine) + '] node type property [' + str(nodeType) + '] is valid.')
nodeAddr = domainProperties.getProperty('wls.domain.machine.' + str(machine) + '.nodemanager.address')
if nodeAddr is None or len(nodeAddr)==0:
error = 1
log.error('Please verify wls.domain.machine.' + str(machine) + '.nodemanager.address property if it exists in configuration.')
else:
log.debug('Machine [' + str(machine) + '] node address property [' + str(nodeAddr) + '] is valid.')
nodePort = domainProperties.getProperty('wls.domain.machine.' + str(machine) + '.nodemanager.port')
if not nodePort is None and len(nodePort)>0:
try:
int(nodePort)
except ValueError:
log.error('Please verify wls.domain.machine.' + str(machine) + '.nodemanager.port property.')
else:
if int(nodePort)<0 or int(nodePort)>65535:
log.error('Please verify wls.domain.machine.' + str(machine) + '.nodemanager.port property, port number is not in valid range [0-65535].')
else:
log.debug('Machine [' + str(machine) + '] node manager port [' + str(nodePort) + '] is valid.')
if nodeType=='SSH' or nodeType=='ssh' or nodeType=='RSH' or nodeType=='rsh':
nodehome = domainProperties.getProperty('wls.domain.machine.' + str(machine) + '.nodemanager.nodeManagerHome')
if nodehome is None or len(nodehome)==0:
error = 1
log.error('Please verify wls.domain.machine.' + str(machine) + '.nodemanager.nodeManagerHome property if it exists in configuration.')
else:
log.debug('Machine [' + str(machine) + '] nodemanager home property [' + str(nodeAddr) + '] is valid.')
nodeShell = domainProperties.getProperty('wls.domain.machine.' + str(machine) + '.nodemanager.shellCommand')
if nodeShell is None or len(nodeShell)==0:
error = 1
log.error('Please verify wls.domain.machine.' + str(machine) + '.nodemanager.shellCommand property if it exists in configuration.')
else:
log.debug('Machine [' + str(machine) + '] nodemanager shell command property [' + str(nodeShell) + '] is valid.')
return error
| mit |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.