repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
dirn/ansible | lib/ansible/parsing/vault/__init__.py | 18 | 19977 | # (c) 2014, James Tanner <[email protected]>
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
# ansible-pull is a script that runs ansible in local mode
# after checking out a playbooks directory from source repo. There is an
# example playbook to bootstrap this script in the examples/ dir which
# installs ansible and sets it up to run on cron.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import sys
import os
import shlex
import shutil
import tempfile
from io import BytesIO
from subprocess import call
from ansible.errors import AnsibleError
from hashlib import sha256
from binascii import hexlify
from binascii import unhexlify
from six import binary_type, PY3, text_type
# Note: Only used for loading obsolete VaultAES files. All files are written
# using the newer VaultAES256 which does not require md5
from hashlib import md5
try:
from six import byte2int
except ImportError:
# bytes2int added in six-1.4.0
if PY3:
import operator
byte2int = operator.itemgetter(0)
else:
def byte2int(bs):
return ord(bs[0])
from ansible.utils.unicode import to_unicode, to_bytes
try:
from Crypto.Hash import SHA256, HMAC
HAS_HASH = True
except ImportError:
HAS_HASH = False
# Counter import fails for 2.0.1, requires >= 2.6.1 from pip
try:
from Crypto.Util import Counter
HAS_COUNTER = True
except ImportError:
HAS_COUNTER = False
# KDF import fails for 2.0.1, requires >= 2.6.1 from pip
try:
from Crypto.Protocol.KDF import PBKDF2
HAS_PBKDF2 = True
except ImportError:
HAS_PBKDF2 = False
# AES IMPORTS
try:
from Crypto.Cipher import AES as AES
HAS_AES = True
except ImportError:
HAS_AES = False
CRYPTO_UPGRADE = "ansible-vault requires a newer version of pycrypto than the one installed on your platform. You may fix this with OS-specific commands such as: yum install python-devel; rpm -e --nodeps python-crypto; pip install pycrypto"
HEADER=u'$ANSIBLE_VAULT'
CIPHER_WHITELIST=['AES', 'AES256']
def check_prereqs():
if not HAS_AES or not HAS_COUNTER or not HAS_PBKDF2 or not HAS_HASH:
raise AnsibleError(CRYPTO_UPGRADE)
class VaultLib(object):
def __init__(self, password):
self.password = password
self.cipher_name = None
self.version = '1.1'
def is_encrypted(self, data):
data = to_unicode(data)
if data.startswith(HEADER):
return True
else:
return False
def encrypt(self, data):
data = to_unicode(data)
if self.is_encrypted(data):
raise AnsibleError("data is already encrypted")
if not self.cipher_name:
self.cipher_name = "AES256"
# raise AnsibleError("the cipher must be set before encrypting data")
if 'Vault' + self.cipher_name in globals() and self.cipher_name in CIPHER_WHITELIST:
cipher = globals()['Vault' + self.cipher_name]
this_cipher = cipher()
else:
raise AnsibleError("{0} cipher could not be found".format(self.cipher_name))
"""
# combine sha + data
this_sha = sha256(data).hexdigest()
tmp_data = this_sha + "\n" + data
"""
# encrypt sha + data
enc_data = this_cipher.encrypt(data, self.password)
# add header
tmp_data = self._add_header(enc_data)
return tmp_data
def decrypt(self, data):
data = to_bytes(data)
if self.password is None:
raise AnsibleError("A vault password must be specified to decrypt data")
if not self.is_encrypted(data):
raise AnsibleError("data is not encrypted")
# clean out header
data = self._split_header(data)
# create the cipher object
ciphername = to_unicode(self.cipher_name)
if 'Vault' + ciphername in globals() and ciphername in CIPHER_WHITELIST:
cipher = globals()['Vault' + ciphername]
this_cipher = cipher()
else:
raise AnsibleError("{0} cipher could not be found".format(ciphername))
# try to unencrypt data
data = this_cipher.decrypt(data, self.password)
if data is None:
raise AnsibleError("Decryption failed")
return data
def _add_header(self, data):
# combine header and encrypted data in 80 char columns
#tmpdata = hexlify(data)
tmpdata = [to_bytes(data[i:i+80]) for i in range(0, len(data), 80)]
if not self.cipher_name:
raise AnsibleError("the cipher must be set before adding a header")
dirty_data = to_bytes(HEADER + ";" + self.version + ";" + self.cipher_name + "\n")
for l in tmpdata:
dirty_data += l + b'\n'
return dirty_data
def _split_header(self, data):
# used by decrypt
tmpdata = data.split(b'\n')
tmpheader = tmpdata[0].strip().split(b';')
self.version = to_unicode(tmpheader[1].strip())
self.cipher_name = to_unicode(tmpheader[2].strip())
clean_data = b'\n'.join(tmpdata[1:])
"""
# strip out newline, join, unhex
clean_data = [ x.strip() for x in clean_data ]
clean_data = unhexlify(''.join(clean_data))
"""
return clean_data
def __enter__(self):
return self
def __exit__(self, *err):
pass
class VaultEditor(object):
# uses helper methods for write_file(self, filename, data)
# to write a file so that code isn't duplicated for simple
# file I/O, ditto read_file(self, filename) and launch_editor(self, filename)
# ... "Don't Repeat Yourself", etc.
def __init__(self, cipher_name, password, filename):
# instantiates a member variable for VaultLib
self.cipher_name = cipher_name
self.password = password
self.filename = filename
def _edit_file_helper(self, existing_data=None, cipher=None):
# make sure the umask is set to a sane value
old_umask = os.umask(0o077)
# Create a tempfile
_, tmp_path = tempfile.mkstemp()
if existing_data:
self.write_data(existing_data, tmp_path)
# drop the user into an editor on the tmp file
call(self._editor_shell_command(tmp_path))
tmpdata = self.read_data(tmp_path)
# create new vault
this_vault = VaultLib(self.password)
if cipher:
this_vault.cipher_name = cipher
# encrypt new data and write out to tmp
enc_data = this_vault.encrypt(tmpdata)
self.write_data(enc_data, tmp_path)
# shuffle tmp file into place
self.shuffle_files(tmp_path, self.filename)
# and restore umask
os.umask(old_umask)
def create_file(self):
""" create a new encrypted file """
check_prereqs()
if os.path.isfile(self.filename):
raise AnsibleError("%s exists, please use 'edit' instead" % self.filename)
# Let the user specify contents and save file
self._edit_file_helper(cipher=self.cipher_name)
def decrypt_file(self):
check_prereqs()
if not os.path.isfile(self.filename):
raise AnsibleError("%s does not exist" % self.filename)
tmpdata = self.read_data(self.filename)
this_vault = VaultLib(self.password)
if this_vault.is_encrypted(tmpdata):
dec_data = this_vault.decrypt(tmpdata)
if dec_data is None:
raise AnsibleError("Decryption failed")
else:
self.write_data(dec_data, self.filename)
else:
raise AnsibleError("%s is not encrypted" % self.filename)
def edit_file(self):
check_prereqs()
# decrypt to tmpfile
tmpdata = self.read_data(self.filename)
this_vault = VaultLib(self.password)
dec_data = this_vault.decrypt(tmpdata)
# let the user edit the data and save
self._edit_file_helper(existing_data=dec_data)
###we want the cipher to default to AES256 (get rid of files
# encrypted with the AES cipher)
#self._edit_file_helper(existing_data=dec_data, cipher=this_vault.cipher_name)
def view_file(self):
check_prereqs()
# decrypt to tmpfile
tmpdata = self.read_data(self.filename)
this_vault = VaultLib(self.password)
dec_data = this_vault.decrypt(tmpdata)
_, tmp_path = tempfile.mkstemp()
self.write_data(dec_data, tmp_path)
# drop the user into pager on the tmp file
call(self._pager_shell_command(tmp_path))
os.remove(tmp_path)
def encrypt_file(self):
check_prereqs()
if not os.path.isfile(self.filename):
raise AnsibleError("%s does not exist" % self.filename)
tmpdata = self.read_data(self.filename)
this_vault = VaultLib(self.password)
this_vault.cipher_name = self.cipher_name
if not this_vault.is_encrypted(tmpdata):
enc_data = this_vault.encrypt(tmpdata)
self.write_data(enc_data, self.filename)
else:
raise AnsibleError("%s is already encrypted" % self.filename)
def rekey_file(self, new_password):
check_prereqs()
# decrypt
tmpdata = self.read_data(self.filename)
this_vault = VaultLib(self.password)
dec_data = this_vault.decrypt(tmpdata)
# create new vault
new_vault = VaultLib(new_password)
# we want to force cipher to the default
#new_vault.cipher_name = this_vault.cipher_name
# re-encrypt data and re-write file
enc_data = new_vault.encrypt(dec_data)
self.write_data(enc_data, self.filename)
def read_data(self, filename):
f = open(filename, "rb")
tmpdata = f.read()
f.close()
return tmpdata
def write_data(self, data, filename):
if os.path.isfile(filename):
os.remove(filename)
f = open(filename, "wb")
f.write(to_bytes(data))
f.close()
def shuffle_files(self, src, dest):
# overwrite dest with src
if os.path.isfile(dest):
os.remove(dest)
shutil.move(src, dest)
def _editor_shell_command(self, filename):
EDITOR = os.environ.get('EDITOR','vim')
editor = shlex.split(EDITOR)
editor.append(filename)
return editor
def _pager_shell_command(self, filename):
PAGER = os.environ.get('PAGER','less')
pager = shlex.split(PAGER)
pager.append(filename)
return pager
class VaultFile(object):
def __init__(self, password, filename):
self.password = password
self.filename = filename
if not os.path.isfile(self.filename):
raise AnsibleError("%s does not exist" % self.filename)
try:
self.filehandle = open(filename, "rb")
except Exception as e:
raise AnsibleError("Could not open %s: %s" % (self.filename, str(e)))
_, self.tmpfile = tempfile.mkstemp()
def __del__(self):
self.filehandle.close()
os.unlink(self.tmplfile)
def is_encrypted(self):
peak = self.filehandler.readline()
if peak.startswith(HEADER):
return True
else:
return False
def get_decrypted(self):
check_prereqs()
if self.is_encrypted():
tmpdata = self.filehandle.read()
this_vault = VaultLib(self.password)
dec_data = this_vault.decrypt(tmpdata)
if dec_data is None:
raise AnsibleError("Decryption failed")
else:
self.tempfile.write(dec_data)
return self.tmpfile
else:
return self.filename
########################################
# CIPHERS #
########################################
class VaultAES(object):
# this version has been obsoleted by the VaultAES256 class
# which uses encrypt-then-mac (fixing order) and also improving the KDF used
# code remains for upgrade purposes only
# http://stackoverflow.com/a/16761459
def __init__(self):
if not HAS_AES:
raise AnsibleError(CRYPTO_UPGRADE)
def aes_derive_key_and_iv(self, password, salt, key_length, iv_length):
""" Create a key and an initialization vector """
d = d_i = b''
while len(d) < key_length + iv_length:
text = "{0}{1}{2}".format(d_i, password, salt)
d_i = md5(to_bytes(text)).digest()
d += d_i
key = d[:key_length]
iv = d[key_length:key_length+iv_length]
return key, iv
def encrypt(self, data, password, key_length=32):
""" Read plaintext data from in_file and write encrypted to out_file """
# combine sha + data
this_sha = sha256(to_bytes(data)).hexdigest()
tmp_data = this_sha + "\n" + data
in_file = BytesIO(to_bytes(tmp_data))
in_file.seek(0)
out_file = BytesIO()
bs = AES.block_size
# Get a block of random data. EL does not have Crypto.Random.new()
# so os.urandom is used for cross platform purposes
salt = os.urandom(bs - len('Salted__'))
key, iv = self.aes_derive_key_and_iv(password, salt, key_length, bs)
cipher = AES.new(key, AES.MODE_CBC, iv)
full = to_bytes(b'Salted__' + salt)
out_file.write(full)
finished = False
while not finished:
chunk = in_file.read(1024 * bs)
if len(chunk) == 0 or len(chunk) % bs != 0:
padding_length = (bs - len(chunk) % bs) or bs
chunk += to_bytes(padding_length * chr(padding_length))
finished = True
out_file.write(cipher.encrypt(chunk))
out_file.seek(0)
enc_data = out_file.read()
tmp_data = hexlify(enc_data)
return tmp_data
def decrypt(self, data, password, key_length=32):
""" Read encrypted data from in_file and write decrypted to out_file """
# http://stackoverflow.com/a/14989032
data = b''.join(data.split(b'\n'))
data = unhexlify(data)
in_file = BytesIO(data)
in_file.seek(0)
out_file = BytesIO()
bs = AES.block_size
tmpsalt = in_file.read(bs)
salt = tmpsalt[len('Salted__'):]
key, iv = self.aes_derive_key_and_iv(password, salt, key_length, bs)
cipher = AES.new(key, AES.MODE_CBC, iv)
next_chunk = b''
finished = False
while not finished:
chunk, next_chunk = next_chunk, cipher.decrypt(in_file.read(1024 * bs))
if len(next_chunk) == 0:
if PY3:
padding_length = chunk[-1]
else:
padding_length = ord(chunk[-1])
chunk = chunk[:-padding_length]
finished = True
out_file.write(chunk)
out_file.flush()
# reset the stream pointer to the beginning
out_file.seek(0)
out_data = out_file.read()
out_file.close()
new_data = to_unicode(out_data)
# split out sha and verify decryption
split_data = new_data.split("\n")
this_sha = split_data[0]
this_data = '\n'.join(split_data[1:])
test_sha = sha256(to_bytes(this_data)).hexdigest()
if this_sha != test_sha:
raise AnsibleError("Decryption failed")
return this_data
class VaultAES256(object):
"""
Vault implementation using AES-CTR with an HMAC-SHA256 authentication code.
Keys are derived using PBKDF2
"""
# http://www.daemonology.net/blog/2009-06-11-cryptographic-right-answers.html
def __init__(self):
check_prereqs()
def gen_key_initctr(self, password, salt):
# 16 for AES 128, 32 for AES256
keylength = 32
# match the size used for counter.new to avoid extra work
ivlength = 16
hash_function = SHA256
# make two keys and one iv
pbkdf2_prf = lambda p, s: HMAC.new(p, s, hash_function).digest()
derivedkey = PBKDF2(password, salt, dkLen=(2 * keylength) + ivlength,
count=10000, prf=pbkdf2_prf)
key1 = derivedkey[:keylength]
key2 = derivedkey[keylength:(keylength * 2)]
iv = derivedkey[(keylength * 2):(keylength * 2) + ivlength]
return key1, key2, hexlify(iv)
def encrypt(self, data, password):
salt = os.urandom(32)
key1, key2, iv = self.gen_key_initctr(password, salt)
# PKCS#7 PAD DATA http://tools.ietf.org/html/rfc5652#section-6.3
bs = AES.block_size
padding_length = (bs - len(data) % bs) or bs
data += padding_length * chr(padding_length)
# COUNTER.new PARAMETERS
# 1) nbits (integer) - Length of the counter, in bits.
# 2) initial_value (integer) - initial value of the counter. "iv" from gen_key_initctr
ctr = Counter.new(128, initial_value=int(iv, 16))
# AES.new PARAMETERS
# 1) AES key, must be either 16, 24, or 32 bytes long -- "key" from gen_key_initctr
# 2) MODE_CTR, is the recommended mode
# 3) counter=<CounterObject>
cipher = AES.new(key1, AES.MODE_CTR, counter=ctr)
# ENCRYPT PADDED DATA
cryptedData = cipher.encrypt(data)
# COMBINE SALT, DIGEST AND DATA
hmac = HMAC.new(key2, cryptedData, SHA256)
message = b''.join([hexlify(salt), b"\n", to_bytes(hmac.hexdigest()), b"\n", hexlify(cryptedData)])
message = hexlify(message)
return message
def decrypt(self, data, password):
# SPLIT SALT, DIGEST, AND DATA
data = b''.join(data.split(b"\n"))
data = unhexlify(data)
salt, cryptedHmac, cryptedData = data.split(b"\n", 2)
salt = unhexlify(salt)
cryptedData = unhexlify(cryptedData)
key1, key2, iv = self.gen_key_initctr(password, salt)
# EXIT EARLY IF DIGEST DOESN'T MATCH
hmacDecrypt = HMAC.new(key2, cryptedData, SHA256)
if not self.is_equal(cryptedHmac, to_bytes(hmacDecrypt.hexdigest())):
return None
# SET THE COUNTER AND THE CIPHER
ctr = Counter.new(128, initial_value=int(iv, 16))
cipher = AES.new(key1, AES.MODE_CTR, counter=ctr)
# DECRYPT PADDED DATA
decryptedData = cipher.decrypt(cryptedData)
# UNPAD DATA
try:
padding_length = ord(decryptedData[-1])
except TypeError:
padding_length = decryptedData[-1]
decryptedData = decryptedData[:-padding_length]
return to_unicode(decryptedData)
def is_equal(self, a, b):
"""
Comparing 2 byte arrrays in constant time
to avoid timing attacks.
It would be nice if there was a library for this but
hey.
"""
# http://codahale.com/a-lesson-in-timing-attacks/
if len(a) != len(b):
return False
result = 0
for x, y in zip(a, b):
if PY3:
result |= x ^ y
else:
result |= ord(x) ^ ord(y)
return result == 0
| gpl-3.0 |
sestrella/ansible | lib/ansible/module_utils/network/junos/config/vlans/vlans.py | 19 | 7322 | # Copyright (C) 2019 Red Hat, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
The junos_vlans class
It is in this file where the current configuration (as dict)
is compared to the provided configuration (as dict) and the command set
necessary to bring the current configuration to it's desired end-state is
created
"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
from ansible.module_utils.network.common.cfg.base import ConfigBase
from ansible.module_utils.network.common.utils import to_list
from ansible.module_utils.network.junos.facts.facts import Facts
from ansible.module_utils.network.junos.junos import (locked_config,
load_config,
commit_configuration,
discard_changes,
tostring)
from ansible.module_utils.network.common.netconf import (build_root_xml_node,
build_child_xml_node)
class Vlans(ConfigBase):
"""
The junos_vlans class
"""
gather_subset = [
'!all',
'!min',
]
gather_network_resources = [
'vlans',
]
def __init__(self, module):
super(Vlans, self).__init__(module)
def get_vlans_facts(self):
""" Get the 'facts' (the current configuration)
:rtype: A dictionary
:returns: The current configuration as a dictionary
"""
facts, _warnings = Facts(self._module).get_facts(
self.gather_subset, self.gather_network_resources)
vlans_facts = facts['ansible_network_resources'].get('vlans')
if not vlans_facts:
return []
return vlans_facts
def execute_module(self):
""" Execute the module
:rtype: A dictionary
:returns: The result from module execution
"""
result = {'changed': False}
warnings = list()
existing_vlans_facts = self.get_vlans_facts()
config_xmls = self.set_config(existing_vlans_facts)
with locked_config(self._module):
for config_xml in to_list(config_xmls):
diff = load_config(self._module, config_xml, warnings)
commit = not self._module.check_mode
if diff:
if commit:
commit_configuration(self._module)
else:
discard_changes(self._module)
result['changed'] = True
if self._module._diff:
result['diff'] = {'prepared': diff}
result['commands'] = config_xmls
changed_vlans_facts = self.get_vlans_facts()
result['before'] = existing_vlans_facts
if result['changed']:
result['after'] = changed_vlans_facts
result['warnings'] = warnings
return result
def set_config(self, existing_vlans_facts):
""" Collect the configuration from the args passed to the module,
collect the current configuration (as a dict from facts)
:rtype: A list
:returns: the commands necessary to migrate the current configuration
to the desired configuration
"""
want = self._module.params['config']
have = existing_vlans_facts
resp = self.set_state(want, have)
return to_list(resp)
def set_state(self, want, have):
""" Select the appropriate function based on the state provided
:param want: the desired configuration as a dictionary
:param have: the current configuration as a dictionary
:rtype: A list
:returns: the commands necessary to migrate the current configuration
to the desired configuration
"""
root = build_root_xml_node('vlans')
state = self._module.params['state']
if state == 'overridden':
config_xmls = self._state_overridden(want, have)
elif state == 'deleted':
config_xmls = self._state_deleted(want, have)
elif state == 'merged':
config_xmls = self._state_merged(want, have)
elif state == 'replaced':
config_xmls = self._state_replaced(want, have)
for xml in config_xmls:
root.append(xml)
return tostring(root)
def _state_replaced(self, want, have):
""" The command generator when state is replaced
:rtype: A list
:returns: the xml necessary to migrate the current configuration
to the desired configuration
"""
intf_xml = []
intf_xml.extend(self._state_deleted(want, have))
intf_xml.extend(self._state_merged(want, have))
return intf_xml
def _state_overridden(self, want, have):
""" The command generator when state is overridden
:rtype: A list
:returns: the xml necessary to migrate the current configuration
to the desired configuration
"""
intf_xml = []
intf_xml.extend(self._state_deleted(have, have))
intf_xml.extend(self._state_merged(want, have))
return intf_xml
def _state_merged(self, want, have):
""" The command generator when state is merged
:rtype: A list
:returns: the xml necessary to merge the provided into
the current configuration
"""
intf_xml = []
for config in want:
vlan_name = str(config['name'])
vlan_id = str(config['vlan_id'])
vlan_description = config.get('description')
vlan_root = build_root_xml_node('vlan')
build_child_xml_node(vlan_root, 'name', vlan_name)
build_child_xml_node(vlan_root, 'vlan-id', vlan_id)
if vlan_description:
build_child_xml_node(vlan_root, 'description',
vlan_description)
intf_xml.append(vlan_root)
return intf_xml
def _state_deleted(self, want, have):
""" The command generator when state is deleted
:rtype: A list
:returns: the xml necessary to remove the current configuration
of the provided objects
"""
intf_xml = []
if not want:
want = have
for config in want:
vlan_name = config['name']
vlan_root = build_root_xml_node('vlan')
vlan_root.attrib.update({'delete': 'delete'})
build_child_xml_node(vlan_root, 'name', vlan_name)
intf_xml.append(vlan_root)
return intf_xml
| gpl-3.0 |
dxmahata/InformativeTweetCollection | oauthlib/oauth2/rfc6749/clients/service_application.py | 36 | 7081 | # -*- coding: utf-8 -*-
"""
oauthlib.oauth2.rfc6749
~~~~~~~~~~~~~~~~~~~~~~~
This module is an implementation of various logic needed
for consuming and providing OAuth 2.0 RFC6749.
"""
from __future__ import absolute_import, unicode_literals
import time
from oauthlib.common import to_unicode
from .base import Client
from ..parameters import prepare_token_request
from ..parameters import parse_token_response
class ServiceApplicationClient(Client):
"""A public client utilizing the JWT bearer grant.
JWT bearer tokes can be used to request an access token when a client
wishes to utilize an existing trust relationship, expressed through the
semantics of (and digital signature or keyed message digest calculated
over) the JWT, without a direct user approval step at the authorization
server.
This grant type does not involve an authorization step. It may be
used by both public and confidential clients.
"""
grant_type = 'urn:ietf:params:oauth:grant-type:jwt-bearer'
def __init__(self, client_id, private_key=None, subject=None, issuer=None,
audience=None, **kwargs):
"""Initalize a JWT client with defaults for implicit use later.
:param client_id: Client identifier given by the OAuth provider upon
registration.
:param private_key: Private key used for signing and encrypting.
Must be given as a string.
:param subject: The principal that is the subject of the JWT, i.e.
which user is the token requested on behalf of.
For example, ``[email protected].
:param issuer: The JWT MUST contain an "iss" (issuer) claim that
contains a unique identifier for the entity that issued
the JWT. For example, ``[email protected]``.
:param audience: A value identifying the authorization server as an
intended audience, e.g.
``https://provider.com/oauth2/token``.
:param kwargs: Additional arguments to pass to base client, such as
state and token. See Client.__init__.__doc__ for
details.
"""
super(ServiceApplicationClient, self).__init__(client_id, **kwargs)
self.private_key = private_key
self.subject = subject
self.issuer = issuer
self.audience = audience
def prepare_request_body(self,
private_key=None,
subject=None,
issuer=None,
audience=None,
expires_at=None,
issued_at=None,
extra_claims=None,
body='',
scope=None,
**kwargs):
"""Create and add a JWT assertion to the request body.
:param private_key: Private key used for signing and encrypting.
Must be given as a string.
:param subject: (sub) The principal that is the subject of the JWT,
i.e. which user is the token requested on behalf of.
For example, ``[email protected].
:param issuer: (iss) The JWT MUST contain an "iss" (issuer) claim that
contains a unique identifier for the entity that issued
the JWT. For example, ``[email protected]``.
:param audience: (aud) A value identifying the authorization server as an
intended audience, e.g.
``https://provider.com/oauth2/token``.
:param expires_at: A unix expiration timestamp for the JWT. Defaults
to an hour from now, i.e. ``time.time() + 3600``.
:param issued_at: A unix timestamp of when the JWT was created.
Defaults to now, i.e. ``time.time()``.
:param not_before: A unix timestamp after which the JWT may be used.
Not included unless provided.
:param jwt_id: A unique JWT token identifier. Not included unless
provided.
:param extra_claims: A dict of additional claims to include in the JWT.
:param scope: The scope of the access request.
:param body: Request body (string) with extra parameters.
:param kwargs: Extra credentials to include in the token request.
The "scope" parameter may be used, as defined in the Assertion
Framework for OAuth 2.0 Client Authentication and Authorization Grants
[I-D.ietf-oauth-assertions] specification, to indicate the requested
scope.
Authentication of the client is optional, as described in
`Section 3.2.1`_ of OAuth 2.0 [RFC6749] and consequently, the
"client_id" is only needed when a form of client authentication that
relies on the parameter is used.
The following non-normative example demonstrates an Access Token
Request with a JWT as an authorization grant (with extra line breaks
for display purposes only):
.. code-block: http
POST /token.oauth2 HTTP/1.1
Host: as.example.com
Content-Type: application/x-www-form-urlencoded
grant_type=urn%3Aietf%3Aparams%3Aoauth%3Agrant-type%3Ajwt-bearer
&assertion=eyJhbGciOiJFUzI1NiJ9.
eyJpc3Mi[...omitted for brevity...].
J9l-ZhwP[...omitted for brevity...]
.. _`Section 3.2.1`: http://tools.ietf.org/html/rfc6749#section-3.2.1
"""
import jwt
import Crypto.PublicKey.RSA as RSA
key = private_key or self.private_key
if not key:
raise ValueError('An encryption key must be supplied to make JWT'
' token requests.')
key = RSA.importKey(key)
claim = {
'iss': issuer or self.issuer,
'aud': audience or self.issuer,
'sub': subject or self.issuer,
'exp': int(expires_at or time.time() + 3600),
'iat': int(issued_at or time.time()),
}
for attr in ('iss', 'aud', 'sub'):
if claim[attr] is None:
raise ValueError(
'Claim must include %s but none was given.' % attr)
if 'not_before' in kwargs:
claim['nbf'] = kwargs.pop('not_before')
if 'jwt_id' in kwargs:
claim['jti'] = kwargs.pop('jwt_id')
claim.update(extra_claims or {})
assertion = jwt.encode(claim, key, 'RS256')
assertion = to_unicode(assertion)
return prepare_token_request(self.grant_type,
body=body,
assertion=assertion,
scope=scope,
**kwargs)
| apache-2.0 |
Beeblio/django | django/contrib/gis/db/models/proxy.py | 38 | 2643 | """
The GeometryProxy object, allows for lazy-geometries. The proxy uses
Python descriptors for instantiating and setting Geometry objects
corresponding to geographic model fields.
Thanks to Robert Coup for providing this functionality (see #4322).
"""
from django.contrib.gis import memoryview
from django.utils import six
class GeometryProxy(object):
def __init__(self, klass, field):
"""
Proxy initializes on the given Geometry class (not an instance) and
the GeometryField.
"""
self._field = field
self._klass = klass
def __get__(self, obj, type=None):
"""
This accessor retrieves the geometry, initializing it using the geometry
class specified during initialization and the HEXEWKB value of the field.
Currently, only GEOS or OGR geometries are supported.
"""
if obj is None:
# Accessed on a class, not an instance
return self
# Getting the value of the field.
geom_value = obj.__dict__[self._field.attname]
if isinstance(geom_value, self._klass):
geom = geom_value
elif (geom_value is None) or (geom_value == ''):
geom = None
else:
# Otherwise, a Geometry object is built using the field's contents,
# and the model's corresponding attribute is set.
geom = self._klass(geom_value)
setattr(obj, self._field.attname, geom)
return geom
def __set__(self, obj, value):
"""
This accessor sets the proxied geometry with the geometry class
specified during initialization. Values of None, HEXEWKB, or WKT may
be used to set the geometry as well.
"""
# The OGC Geometry type of the field.
gtype = self._field.geom_type
# The geometry type must match that of the field -- unless the
# general GeometryField is used.
if isinstance(value, self._klass) and (str(value.geom_type).upper() == gtype or gtype == 'GEOMETRY'):
# Assigning the SRID to the geometry.
if value.srid is None:
value.srid = self._field.srid
elif value is None or isinstance(value, six.string_types + (memoryview,)):
# Set with None, WKT, HEX, or WKB
pass
else:
raise TypeError('Cannot set %s GeometryProxy (%s) with value of type: %s' % (
obj.__class__.__name__, gtype, type(value)))
# Setting the objects dictionary with the value, and returning.
obj.__dict__[self._field.attname] = value
return value
| bsd-3-clause |
fillest/7drl2013 | enemies.py | 1 | 2371 | import util
import libtcodpy as tcod
import towers
import items
registered_enemies = []
def enemy_classes ():
return [c for c in registered_enemies if c != Enemy]
class EnemyMeta (type):
def __init__ (class_, name, bases, attrs):
super(EnemyMeta, class_).__init__(name, bases, attrs)
registered_enemies.append(class_)
class Enemy (util.Entity):
__metaclass__ = EnemyMeta
max_hp = 1
speed = 1
damage = 1
def __init__ (self, *args):
super(Enemy, self).__init__(*args)
self.timer = self.state.timers.start(500 / self.speed, self._move)
self.hp = self.max_hp
def _move (self):
# self.x = clamp(self.x + random.randint(-1, 1), 0, self.state.map.w - 1)
# self.y = clamp(self.y + random.randint(-1, 1), 0, self.state.map.h - 1)
step_x, step_y = self.state.heart.x, self.state.heart.y
baits = [e for e in self.state.entities if isinstance(e, towers.Bait)]
if baits:
curr_bait = baits[0]
for bait in baits:
if util.dist(self.x, self.y, curr_bait.x, curr_bait.y) > util.dist(self.x, self.y, bait.x, bait.y):
curr_bait = bait
step_x, step_y = curr_bait.x, curr_bait.y
tcod.line_init(self.x, self.y, step_x, step_y)
x, y = tcod.line_step()
if x is None:
pass
else:
did_hit = False
for e in self.state.entities:
if e.x == x and e.y == y and isinstance(e, towers.Building):
self.hit(e)
did_hit = True
if not did_hit:
self.x = x
self.y = y
def hit (self, e):
if e in self.state.entities:
# print 'Enemy {0} hit the {1}. Damage: {2}'.format(self.__class__.__name__, e.__class__.__name__, self.damage)
e.hurt(self.damage)
def hurt (self, hp):
self.hp -= hp
if self.hp < 1:
self.die()
def die (self):
# TODO: prevent a double drop (add an is_alive attr?)
self.state.entities.append(items.EnergyItem(self.state, self.x, self.y))
if self in self.state.entities:
self.state.entities.remove(self)
if self.timer in self.state.timers:
self.state.timers.remove(self.timer)
class Rat (Enemy):
sym = 'r'
color = tcod.lighter_sepia
score = 1
class Wolf (Enemy):
sym = 'w'
color = tcod.lighter_grey
max_hp = 2
speed = 2
score = 2
class Savage (Enemy):
sym = '@'
color = tcod.darker_pink
max_hp = 4
speed = 1
score = 2
class Elefant (Enemy):
sym = 'e'
color = tcod.lighter_grey
max_hp = 1 * 16
speed = 0.5
score = 16
| mit |
zorroz/microblog | flask/lib/python2.7/site-packages/sqlalchemy/schema.py | 32 | 1200 | # schema.py
# Copyright (C) 2005-2017 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Compatibility namespace for sqlalchemy.sql.schema and related.
"""
from .sql.base import (
SchemaVisitor
)
from .sql.schema import (
BLANK_SCHEMA,
CheckConstraint,
Column,
ColumnDefault,
Constraint,
DefaultClause,
DefaultGenerator,
FetchedValue,
ForeignKey,
ForeignKeyConstraint,
Index,
MetaData,
PassiveDefault,
PrimaryKeyConstraint,
SchemaItem,
Sequence,
Table,
ThreadLocalMetaData,
UniqueConstraint,
_get_table_key,
ColumnCollectionConstraint,
ColumnCollectionMixin
)
from .sql.naming import conv
from .sql.ddl import (
DDL,
CreateTable,
DropTable,
CreateSequence,
DropSequence,
CreateIndex,
DropIndex,
CreateSchema,
DropSchema,
_DropView,
CreateColumn,
AddConstraint,
DropConstraint,
DDLBase,
DDLElement,
_CreateDropBase,
_DDLCompiles,
sort_tables,
sort_tables_and_constraints
)
| bsd-3-clause |
Fizzadar/pyinfra | examples/puppet/step1.py | 1 | 3222 | from pyinfra import host, inventory
from pyinfra.operations import files, init, server, yum
SUDO = True
# update the /etc/hosts file
def update_hosts_file(name, ip):
files.line(
name='Add hosts to /etc/hosts',
path='/etc/hosts',
line=r' {}.example.com '.format(name),
replace='{} {}.example.com {}'.format(ip, name, name),
)
# ensure all hosts are added to each /etc/hosts file
masters = inventory.get_group('master_servers')
for item in masters:
update_hosts_file('master', item.fact.ipv4_addresses['eth0'])
agents = inventory.get_group('agent_servers')
for item in agents:
update_hosts_file('agent', item.fact.ipv4_addresses['eth0'])
if host in masters:
server.hostname(
name='Set the hostname for the Puppet Master',
hostname='master.example.com',
)
if host in agents:
server.hostname(
name='Set the hostname for an agent',
hostname='agent.example.com',
)
if host.fact.linux_name in ['CentOS', 'RedHat']:
yum.packages(
name='Install chrony for Network Time Protocol (NTP)',
packages=['chrony'],
)
major = host.fact.linux_distribution['major']
yum.rpm(
name='Install Puppet Repo',
src='https://yum.puppet.com/puppet6-release-el-{}.noarch.rpm'
.format(major),
)
files.line(
name='Ensure SELINUX is disabled',
path='/etc/sysconfig/selinux',
line=r'SELINUX=.*',
replace='SELINUX=disabled',
)
# TODO: should reboot after SELINUX is disabled (how to check/easy way to reboot)
# TODO: how to determine when reboot is complete
# TODO: run sestatus
if host in masters:
install = yum.packages(
name='Install puppet server',
packages=['puppetserver'],
)
config = files.template(
name='Manage the puppet master configuration',
src='templates/master_puppet.conf.j2',
dest='/etc/puppetlabs/puppet/puppet.conf',
)
# TODO: tune always shows as changed
# I think it should only show as changed if there really was a difference.
# Might have to add a suffix to the sed -i option, then move file only if
# there is a diff. Maybe?
tune = files.line(
name='Tune the puppet server jvm to only use 1gb',
path='/etc/sysconfig/puppetserver',
line=r'^JAVA_ARGS=.*$',
replace='JAVA_ARGS=\\"-Xms1g -Xmx1g -Djruby.logger.class=com.puppetlabs.'
'jruby_utils.jruby.Slf4jLogger\\"',
)
if install.changed or config.changed or tune.changed:
init.systemd(
name='Restart and enable puppetserver',
service='puppetserver',
running=True,
restarted=True,
enabled=True,
)
if host in agents:
yum.packages(
name='Install puppet agent',
packages=['puppet-agent'],
)
files.template(
name='Manage the puppet agent configuration',
src='templates/agent_puppet.conf.j2',
dest='/etc/puppetlabs/puppet/puppet.conf',
)
init.systemd(
name='Restart and enable puppet agent',
service='puppet',
running=True,
restarted=True,
enabled=True,
)
| mit |
blaze33/django | django/contrib/staticfiles/finders.py | 102 | 9658 | import os
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.files.storage import default_storage, Storage, FileSystemStorage
from django.utils.datastructures import SortedDict
from django.utils.functional import empty, memoize, LazyObject
from django.utils.importlib import import_module
from django.utils._os import safe_join
from django.utils import six
from django.contrib.staticfiles import utils
from django.contrib.staticfiles.storage import AppStaticStorage
_finders = SortedDict()
class BaseFinder(object):
"""
A base file finder to be used for custom staticfiles finder classes.
"""
def find(self, path, all=False):
"""
Given a relative file path this ought to find an
absolute file path.
If the ``all`` parameter is ``False`` (default) only
the first found file path will be returned; if set
to ``True`` a list of all found files paths is returned.
"""
raise NotImplementedError()
def list(self, ignore_patterns):
"""
Given an optional list of paths to ignore, this should return
a two item iterable consisting of the relative path and storage
instance.
"""
raise NotImplementedError()
class FileSystemFinder(BaseFinder):
"""
A static files finder that uses the ``STATICFILES_DIRS`` setting
to locate files.
"""
def __init__(self, apps=None, *args, **kwargs):
# List of locations with static files
self.locations = []
# Maps dir paths to an appropriate storage instance
self.storages = SortedDict()
if not isinstance(settings.STATICFILES_DIRS, (list, tuple)):
raise ImproperlyConfigured(
"Your STATICFILES_DIRS setting is not a tuple or list; "
"perhaps you forgot a trailing comma?")
for root in settings.STATICFILES_DIRS:
if isinstance(root, (list, tuple)):
prefix, root = root
else:
prefix = ''
if os.path.abspath(settings.STATIC_ROOT) == os.path.abspath(root):
raise ImproperlyConfigured(
"The STATICFILES_DIRS setting should "
"not contain the STATIC_ROOT setting")
if (prefix, root) not in self.locations:
self.locations.append((prefix, root))
for prefix, root in self.locations:
filesystem_storage = FileSystemStorage(location=root)
filesystem_storage.prefix = prefix
self.storages[root] = filesystem_storage
super(FileSystemFinder, self).__init__(*args, **kwargs)
def find(self, path, all=False):
"""
Looks for files in the extra locations
as defined in ``STATICFILES_DIRS``.
"""
matches = []
for prefix, root in self.locations:
matched_path = self.find_location(root, path, prefix)
if matched_path:
if not all:
return matched_path
matches.append(matched_path)
return matches
def find_location(self, root, path, prefix=None):
"""
Finds a requested static file in a location, returning the found
absolute path (or ``None`` if no match).
"""
if prefix:
prefix = '%s%s' % (prefix, os.sep)
if not path.startswith(prefix):
return None
path = path[len(prefix):]
path = safe_join(root, path)
if os.path.exists(path):
return path
def list(self, ignore_patterns):
"""
List all files in all locations.
"""
for prefix, root in self.locations:
storage = self.storages[root]
for path in utils.get_files(storage, ignore_patterns):
yield path, storage
class AppDirectoriesFinder(BaseFinder):
"""
A static files finder that looks in the directory of each app as
specified in the source_dir attribute of the given storage class.
"""
storage_class = AppStaticStorage
def __init__(self, apps=None, *args, **kwargs):
# The list of apps that are handled
self.apps = []
# Mapping of app module paths to storage instances
self.storages = SortedDict()
if apps is None:
apps = settings.INSTALLED_APPS
for app in apps:
app_storage = self.storage_class(app)
if os.path.isdir(app_storage.location):
self.storages[app] = app_storage
if app not in self.apps:
self.apps.append(app)
super(AppDirectoriesFinder, self).__init__(*args, **kwargs)
def list(self, ignore_patterns):
"""
List all files in all app storages.
"""
for storage in six.itervalues(self.storages):
if storage.exists(''): # check if storage location exists
for path in utils.get_files(storage, ignore_patterns):
yield path, storage
def find(self, path, all=False):
"""
Looks for files in the app directories.
"""
matches = []
for app in self.apps:
match = self.find_in_app(app, path)
if match:
if not all:
return match
matches.append(match)
return matches
def find_in_app(self, app, path):
"""
Find a requested static file in an app's static locations.
"""
storage = self.storages.get(app, None)
if storage:
if storage.prefix:
prefix = '%s%s' % (storage.prefix, os.sep)
if not path.startswith(prefix):
return None
path = path[len(prefix):]
# only try to find a file if the source dir actually exists
if storage.exists(path):
matched_path = storage.path(path)
if matched_path:
return matched_path
class BaseStorageFinder(BaseFinder):
"""
A base static files finder to be used to extended
with an own storage class.
"""
storage = None
def __init__(self, storage=None, *args, **kwargs):
if storage is not None:
self.storage = storage
if self.storage is None:
raise ImproperlyConfigured("The staticfiles storage finder %r "
"doesn't have a storage class "
"assigned." % self.__class__)
# Make sure we have an storage instance here.
if not isinstance(self.storage, (Storage, LazyObject)):
self.storage = self.storage()
super(BaseStorageFinder, self).__init__(*args, **kwargs)
def find(self, path, all=False):
"""
Looks for files in the default file storage, if it's local.
"""
try:
self.storage.path('')
except NotImplementedError:
pass
else:
if self.storage.exists(path):
match = self.storage.path(path)
if all:
match = [match]
return match
return []
def list(self, ignore_patterns):
"""
List all files of the storage.
"""
for path in utils.get_files(self.storage, ignore_patterns):
yield path, self.storage
class DefaultStorageFinder(BaseStorageFinder):
"""
A static files finder that uses the default storage backend.
"""
storage = default_storage
def __init__(self, *args, **kwargs):
super(DefaultStorageFinder, self).__init__(*args, **kwargs)
base_location = getattr(self.storage, 'base_location', empty)
if not base_location:
raise ImproperlyConfigured("The storage backend of the "
"staticfiles finder %r doesn't have "
"a valid location." % self.__class__)
def find(path, all=False):
"""
Find a static file with the given path using all enabled finders.
If ``all`` is ``False`` (default), return the first matching
absolute path (or ``None`` if no match). Otherwise return a list.
"""
matches = []
for finder in get_finders():
result = finder.find(path, all=all)
if not all and result:
return result
if not isinstance(result, (list, tuple)):
result = [result]
matches.extend(result)
if matches:
return matches
# No match.
return all and [] or None
def get_finders():
for finder_path in settings.STATICFILES_FINDERS:
yield get_finder(finder_path)
def _get_finder(import_path):
"""
Imports the staticfiles finder class described by import_path, where
import_path is the full Python path to the class.
"""
module, attr = import_path.rsplit('.', 1)
try:
mod = import_module(module)
except ImportError as e:
raise ImproperlyConfigured('Error importing module %s: "%s"' %
(module, e))
try:
Finder = getattr(mod, attr)
except AttributeError:
raise ImproperlyConfigured('Module "%s" does not define a "%s" '
'class.' % (module, attr))
if not issubclass(Finder, BaseFinder):
raise ImproperlyConfigured('Finder "%s" is not a subclass of "%s"' %
(Finder, BaseFinder))
return Finder()
get_finder = memoize(_get_finder, _finders, 1)
| bsd-3-clause |
BeryJu/ajenti-btsync | main.py | 1 | 2976 | from BTSync import *
from ajenti.api import *
from ajenti.api.http import *
from ajenti.ui import on
from ajenti.ui.binder import Binder
from ajenti.plugins.main.api import SectionPlugin
from ajenti.plugins.configurator.api import ClassConfigEditor
@plugin
class BTSyncConfigEditor (ClassConfigEditor):
title = 'BitTorrent Sync'
icon = 'cloud'
def init(self):
self.append(self.ui.inflate('btsync:config-editor'))
@plugin
class BTSyncSectionPlugin (SectionPlugin):
default_classconfig = { 'host': '', 'port': 0, 'username': '', 'password': ''}
classconfig_editor = BTSyncConfigEditor
def init(self):
self.title = 'BitTorrent Sync'
self.icon = 'cloud'
self.category = _('Web')
self.append(self.ui.inflate('btsync:main'))
def post_item_bind(object, collection, item, ui):
ui.find('remove').on('click', self.on_remove_btn, item)
self.find('folders').post_item_bind = post_item_bind
self.bt_client = HttpClient(**self.classconfig)
alive = self.bt_client.is_alive()
if alive == True and self.classconfig['host'] != '' and self.classconfig['port'] != 0:
self.folders = self.bt_client.create_folder_list()
self.preferences = self.bt_client.get_preferences()
self.update_stats()
self.binder = Binder(self, self.find('tabs'))
self.binder.populate()
else:
self.context.launch('configure-plugin', plugin=self)
def update_stats(self):
os = self.bt_client.get_os()
version = self.bt_client.get_version()
speed = self.bt_client.get_speed()
self.find('upload').text = speed['upload']
self.find('download').text = speed['download']
self.find('version').text = version
self.find('os').text = os
@on('save', 'click')
def on_save_btn(self):
self.binder.update()
self.config.save()
@on('config', 'click')
def on_config_btn(self):
self.context.launch('configure-plugin', plugin=self)
@on('new-secret', 'click')
def on_new_secret_btn(self):
secret = self.bt_client.generate_secret()
self.find('new-folder-secret').value = secret['read_write'] if 'read_write' in secret else ''
@on('new-folder', 'click')
def on_new_folder_btn(self):
if self.find('new-folder-secret').value == "":
self.on_new_secret_btn()
path = self.find('new-folder-name').value
if path == "":
pass
result = self.bt_client.add_folder(dir=path, secret=self.find('new-folder-secret').value)
if 'message' in result:
self.context.notify('error', result['message'])
else:
self.context.notify('info', _('Success!'))
self.update_binder()
def on_remove_btn(self, item):
print repr(item)
@on('update', 'click')
def update_binder(self):
self.binder.update() | mit |
mwest1066/PrairieLearn | exampleCourse/serverFilesCourse/python_autograder/pltest.py | 6 | 3111 | import unittest
from unittest import TestLoader, TestSuite
import json
import traceback
def points(points):
def decorator(f):
f.__dict__['points'] = points
return f
return decorator
def name(name):
def decorator(f):
f.__dict__['name'] = name
return f
return decorator
class PrairieTestRunner:
def run(self, test):
result = PrairieTestResult(self)
test(result)
return result
class PrairieTestResult(unittest.TestResult):
def __init__(self, runner):
unittest.TestResult.__init__(self)
self.results = []
self.buffer = True
def startTest(self, test):
unittest.TestResult.startTest(self, test)
options = getattr(test, test._testMethodName).__func__.__dict__
points = options.get('points', 1)
name = options.get('name', test.shortDescription())
if name is None:
name = test._testMethodName
self.results.append({'name': name, 'max_points': points})
def addSuccess(self, test):
unittest.TestResult.addSuccess(self, test)
self.results[-1]['points'] = self.results[-1]['max_points']
def addError(self, test, err):
unittest.TestResult.addError(self, test, err)
self.results[-1]['points'] = 0
self.results[-1]['output'] = self.errors[-1][1]
def addFailure(self, test, err):
unittest.TestResult.addFailure(self, test, err)
self.results[-1]['points'] = 0
self.results[-1]['output'] = self.failures[-1][1]
def stopTest(self, test):
# Never write output back to the console
self._mirrorOutput = False
unittest.TestResult.stopTest(self, test)
def getResults(self):
return self.results
if __name__ == '__main__':
try:
from test import Test
# Run the tests with our custom setup
loader = TestLoader()
# Maintain the ordering that tests appear in the source code
loader.sortTestMethodsUsing = lambda x, y: -1
tests = loader.loadTestsFromTestCase(Test)
suite = TestSuite([tests])
runner = PrairieTestRunner()
results = runner.run(suite).getResults()
# Compile total number of points
max_points = sum([test['max_points'] for test in results])
earned_points = sum([test['points'] for test in results])
# Assemble final grading results
grading_result = {}
grading_result['tests'] = results
grading_result['score'] = float(earned_points) / float(max_points)
grading_result['succeeded'] = True
print(json.dumps(grading_result, allow_nan=False))
with open('results.json', mode='w') as out:
json.dump(grading_result, out)
except:
# Last-ditch effort to capture meaningful error information
grading_result = {}
grading_result['score'] = 0.0
grading_result['succeeded'] = False
grading_result['output'] = traceback.format_exc()
with open('results.json', mode='w') as out:
json.dump(grading_result, out)
| agpl-3.0 |
seanchen/taiga-back | tests/integration/test_stars.py | 13 | 3867 | # Copyright (C) 2014 Andrey Antukh <[email protected]>
# Copyright (C) 2014 Jesús Espino <[email protected]>
# Copyright (C) 2014 David Barragán <[email protected]>
# Copyright (C) 2014 Anler Hernández <[email protected]>
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import pytest
from django.core.urlresolvers import reverse
from .. import factories as f
pytestmark = pytest.mark.django_db
def test_project_owner_star_project(client):
user = f.UserFactory.create()
project = f.ProjectFactory.create(owner=user)
f.MembershipFactory.create(project=project, is_owner=True, user=user)
url = reverse("projects-star", args=(project.id,))
client.login(user)
response = client.post(url)
assert response.status_code == 200
def test_project_owner_unstar_project(client):
user = f.UserFactory.create()
project = f.ProjectFactory.create(owner=user)
f.MembershipFactory.create(project=project, is_owner=True, user=user)
url = reverse("projects-unstar", args=(project.id,))
client.login(user)
response = client.post(url)
assert response.status_code == 200
def test_project_member_star_project(client):
user = f.UserFactory.create()
project = f.ProjectFactory.create()
role = f.RoleFactory.create(project=project, permissions=["view_project"])
f.MembershipFactory.create(project=project, user=user, role=role)
url = reverse("projects-star", args=(project.id,))
client.login(user)
response = client.post(url)
assert response.status_code == 200
def test_project_member_unstar_project(client):
user = f.UserFactory.create()
project = f.ProjectFactory.create()
role = f.RoleFactory.create(project=project, permissions=["view_project"])
f.MembershipFactory.create(project=project, user=user, role=role)
url = reverse("projects-unstar", args=(project.id,))
client.login(user)
response = client.post(url)
assert response.status_code == 200
def test_list_project_fans(client):
user = f.UserFactory.create()
project = f.ProjectFactory.create(owner=user)
f.MembershipFactory.create(project=project, user=user, is_owner=True)
fan = f.VoteFactory.create(content_object=project)
url = reverse("projects-fans", args=(project.id,))
client.login(user)
response = client.get(url)
assert response.status_code == 200
assert response.data[0]['id'] == fan.user.id
def test_list_user_starred_projects(client):
user = f.UserFactory.create()
project = f.ProjectFactory()
url = reverse("users-starred", args=(user.id,))
f.VoteFactory.create(user=user, content_object=project)
client.login(user)
response = client.get(url)
assert response.status_code == 200
assert response.data[0]['id'] == project.id
def test_get_project_stars(client):
user = f.UserFactory.create()
project = f.ProjectFactory.create(owner=user)
f.MembershipFactory.create(project=project, user=user, is_owner=True)
url = reverse("projects-detail", args=(project.id,))
f.VotesFactory.create(content_object=project, count=5)
f.VotesFactory.create(count=3)
client.login(user)
response = client.get(url)
assert response.status_code == 200
assert response.data['stars'] == 5
| agpl-3.0 |
dongjoon-hyun/tensorflow | tensorflow/python/ops/image_ops.py | 29 | 1413 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=g-short-docstring-punctuation
"""Image processing and decoding ops.
See the [Images](https://tensorflow.org/api_guides/python/image) guide.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# go/tf-wildcard-import
# pylint: disable=wildcard-import
from tensorflow.python.ops.gen_image_ops import *
from tensorflow.python.ops.image_ops_impl import *
# pylint: enable=wildcard-import
# TODO(drpng): remove these once internal use has discontinued.
# pylint: disable=unused-import
from tensorflow.python.ops.image_ops_impl import _Check3DImage
from tensorflow.python.ops.image_ops_impl import _ImageDimensions
# pylint: enable=unused-import
| apache-2.0 |
Himon-SYNCRAFT/taskplus | tests/core/actions/get_role_details/test_get_role_details_action.py | 1 | 2317 | from unittest import mock
from taskplus.core.actions import GetRoleDetailsAction, GetRoleDetailsRequest
from taskplus.core.domain import UserRole
from taskplus.core.shared.response import ResponseFailure
def test_get_role_details_action():
role = mock.Mock()
role = UserRole(name='admin', id=1)
roles_repo = mock.Mock()
roles_repo.one.return_value = role
request = GetRoleDetailsRequest(role.id)
action = GetRoleDetailsAction(roles_repo)
response = action.execute(request)
assert bool(response) is True
roles_repo.one.assert_called_once_with(role.id)
assert response.value == role
def test_get_role_details_action_with_hooks():
role = mock.Mock()
role = UserRole(name='admin', id=1)
roles_repo = mock.Mock()
roles_repo.one.return_value = role
request = GetRoleDetailsRequest(role.id)
action = GetRoleDetailsAction(roles_repo)
before = mock.MagicMock()
after = mock.MagicMock()
action.add_before_execution_hook(before)
action.add_after_execution_hook(after)
response = action.execute(request)
assert before.called
assert after.called
assert bool(response) is True
roles_repo.one.assert_called_once_with(role.id)
assert response.value == role
def test_get_role_details_action_handles_bad_request():
role = mock.Mock()
role = UserRole(name='admin', id=1)
roles_repo = mock.Mock()
roles_repo.one.return_value = role
request = GetRoleDetailsRequest(role_id=None)
action = GetRoleDetailsAction(roles_repo)
response = action.execute(request)
assert bool(response) is False
assert not roles_repo.one.called
assert response.value == {
'type': ResponseFailure.PARAMETER_ERROR,
'message': 'role_id: is required'
}
def test_get_role_details_action_handles_generic_error():
error_message = 'Error!!!'
roles_repo = mock.Mock()
roles_repo.one.side_effect = Exception(error_message)
request = GetRoleDetailsRequest(role_id=1)
action = GetRoleDetailsAction(roles_repo)
response = action.execute(request)
assert bool(response) is False
roles_repo.one.assert_called_once_with(1)
assert response.value == {
'type': ResponseFailure.SYSTEM_ERROR,
'message': 'Exception: {}'.format(error_message)
}
| bsd-3-clause |
openstack/senlin | senlin/events/database.py | 2 | 2322 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_utils import timeutils
from senlin.common import consts
from senlin.events import base
from senlin.objects import event as eo
class DBEvent(base.EventBackend):
"""DB driver for event dumping"""
@classmethod
def dump(cls, level, action, **kwargs):
"""Create an event record into database.
:param level: An integer as defined by python logging module.
:param action: The action that triggered this dump.
:param dict kwargs: Additional parameters such as ``phase``,
``timestamp`` or ``extra``.
"""
ctx = action.context
entity = action.entity
status = kwargs.get('phase') or entity.status
reason = kwargs.get('reason') or entity.status_reason
otype = cls._check_entity(entity)
cluster_id = entity.id if otype == 'CLUSTER' else entity.cluster_id
# use provided timestamp if any
timestamp = kwargs.get('timestamp') or timeutils.utcnow(True)
# use provided extra data if any
extra = kwargs.get("extra") or {}
# Make a guess over the action name
action_name = action.action
if action_name in (consts.NODE_OPERATION, consts.CLUSTER_OPERATION):
action_name = action.inputs.get('operation', action_name)
values = {
'level': level,
'timestamp': timestamp,
'oid': entity.id,
'otype': otype,
'oname': entity.name,
'cluster_id': cluster_id,
'user': ctx.user_id,
'project': ctx.project_id,
'action': action_name,
'status': status,
'status_reason': reason,
'meta_data': extra,
}
eo.Event.create(ctx, values)
| apache-2.0 |
HyperBaton/ansible | lib/ansible/modules/network/netvisor/pn_admin_session_timeout.py | 52 | 3114 | #!/usr/bin/python
# Copyright: (c) 2018, Pluribus Networks
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: pn_admin_session_timeout
author: "Pluribus Networks (@rajaspachipulusu17)"
version_added: "2.8"
short_description: CLI command to modify admin-session-timeout
description:
- This module can be used to modify admin session timeout.
options:
pn_cliswitch:
description:
- Target switch to run the CLI on.
required: False
type: str
state:
description:
- State the action to perform.
C(update) to modify the admin-session-timeout.
required: True
type: str
choices: ['update']
pn_timeout:
description:
- Maximum time to wait for user activity before
terminating login session. Minimum should be 60s.
required: False
type: str
"""
EXAMPLES = """
- name: admin session timeout functionality
pn_admin_session_timeout:
pn_cliswitch: "sw01"
state: "update"
pn_timeout: "61s"
- name: admin session timeout functionality
pn_admin_session_timeout:
pn_cliswitch: "sw01"
state: "update"
pn_timeout: "1d"
- name: admin session timeout functionality
pn_admin_session_timeout:
pn_cliswitch: "sw01"
state: "update"
pn_timeout: "10d20m3h15s"
"""
RETURN = """
command:
description: the CLI command run on the target node.
returned: always
type: str
stdout:
description: set of responses from the admin-session-timeout command.
returned: always
type: list
stderr:
description: set of error responses from the admin-session-timeout command.
returned: on error
type: list
changed:
description: indicates whether the CLI caused changes on the target.
returned: always
type: bool
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.netvisor.pn_nvos import pn_cli, run_cli
def main():
""" This section is for arguments parsing """
state_map = dict(
update='admin-session-timeout-modify'
)
module = AnsibleModule(
argument_spec=dict(
pn_cliswitch=dict(required=False, type='str'),
state=dict(required=True, type='str',
choices=state_map.keys()),
pn_timeout=dict(required=False, type='str'),
),
required_together=[['state', 'pn_timeout']],
)
# Accessing the arguments
cliswitch = module.params['pn_cliswitch']
state = module.params['state']
timeout = module.params['pn_timeout']
command = state_map[state]
# Building the CLI command string
cli = pn_cli(module, cliswitch)
if command == 'admin-session-timeout-modify':
cli += ' %s ' % command
if timeout:
cli += ' timeout ' + timeout
run_cli(module, cli, state_map)
if __name__ == '__main__':
main()
| gpl-3.0 |
vaygr/ansible | lib/ansible/plugins/action/win_template.py | 269 | 1198 | # (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from ansible.plugins.action import ActionBase
from ansible.plugins.action.template import ActionModule as TemplateActionModule
# Even though TemplateActionModule inherits from ActionBase, we still need to
# directly inherit from ActionBase to appease the plugin loader.
class ActionModule(TemplateActionModule, ActionBase):
DEFAULT_NEWLINE_SEQUENCE = '\r\n'
| gpl-3.0 |
thu-ml/zhusuan | zhusuan/framework/meta_bn.py | 1 | 4803 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
import copy
import tensorflow as tf
from functools import wraps
from zhusuan.framework.utils import Context
__all__ = [
'MetaBayesianNet',
'meta_bayesian_net',
]
class Local(Context):
def __getattr__(self, item):
return self.__dict__.get(item, None)
def __setattr__(self, key, value):
self.__dict__[key] = value
class MetaBayesianNet(object):
"""
A lazy-constructed :class:`~zhusuan.framework.bn.BayesianNet`. Conceptually
it's better to view :class:`MetaBayesianNet` rather than
:class:`~zhusuan.framework.bn.BayesianNet` as the model because it
can accept different observations through the :meth:`observe` method.
The suggested usage is through the :func:`meta_bayesian_net` decorator.
.. seealso::
For more information, please refer to :doc:`/tutorials/concepts`.
:param f: A function that constructs and returns a
:class:`~zhusuan.framework.bn.BayesianNet`.
:param args: A list. Ordered arguments that will be passed into `f`.
:param kwargs: A dictionary. Named arguments that will be passed into `f`.
:param scope: A string. The scope name passed to tensorflow
`variable_scope()
<https://www.tensorflow.org/api_docs/python/tf/variable_scope>`_.
:param reuse_variables: A bool. Whether to reuse tensorflow
`Variables <https://www.tensorflow.org/api_docs/python/tf/Variable>`_
in repeated calls of :meth:`observe`.
"""
def __init__(self, f, args=None, kwargs=None, scope=None,
reuse_variables=False):
if (scope is not None) and reuse_variables:
self._f = tf.make_template(scope, f)
elif reuse_variables:
raise ValueError("Cannot reuse tensorflow Variables when `scope` "
"is not provided.")
else:
self._f = f
self._args = copy.copy(args)
self._kwargs = copy.copy(kwargs)
self._scope = scope
self._reuse_variables = reuse_variables
self._log_joint = None
@property
def log_joint(self):
"""
The log joint function of this model. Can be overwritten as::
meta_bn = build_model(...)
def log_joint(bn):
return ...
meta_bn.log_joint = log_joint
"""
return self._log_joint
@log_joint.setter
def log_joint(self, value):
self._log_joint = value
def _run_with_observations(self, func, observations):
with Local() as local_cxt:
local_cxt.observations = observations
local_cxt.meta_bn = self
return func(*self._args, **self._kwargs)
def observe(self, **kwargs):
"""
Construct a :class:`~zhusuan.framework.bn.BayesianNet` given
observations.
:param kwargs: A dictionary that maps from node names to their observed
values.
:return: A :class:`~zhusuan.framework.bn.BayesianNet` instance.
"""
if (self._scope is not None) and (not self._reuse_variables):
with tf.variable_scope(self._scope):
return self._run_with_observations(self._f, kwargs)
else:
return self._run_with_observations(self._f, kwargs)
def meta_bayesian_net(scope=None, reuse_variables=False):
"""
Transform a function that builds a
:class:`~zhusuan.framework.bn.BayesianNet` into returning
:class:`~zhusuan.framework.meta_bn.MetaBayesianNet`.
The suggested usage is as a decorator::
@meta_bayesian_net(scope=..., reuse_variables=True)
def build_model(...):
bn = zs.BayesianNet()
...
return bn
The decorated function will return a :class:`MetaBayesianNet` instance
instead of a :class:`BayesianNet` instance.
.. seealso::
For more details and examples, please refer to
:doc:`/tutorials/concepts`.
:param scope: A string. The scope name passed to tensorflow
`variable_scope()
<https://www.tensorflow.org/api_docs/python/tf/variable_scope>`_.
:param reuse_variables: A bool. Whether to reuse tensorflow
`Variables <https://www.tensorflow.org/api_docs/python/tf/Variable>`_
in repeated calls of :meth:`MetaBayesianNet.observe`.
:return: The transformed function.
"""
def wrapper(f):
@wraps(f)
def _wrapped(*args, **kwargs):
meta_bn = MetaBayesianNet(
f, args=args, kwargs=kwargs, scope=scope,
reuse_variables=reuse_variables)
return meta_bn
return _wrapped
return wrapper
| mit |
valkjsaaa/sl4a | python/src/Lib/test/sample_doctest.py | 228 | 1037 | """This is a sample module that doesn't really test anything all that
interesting.
It simply has a few tests, some of which succeed and some of which fail.
It's important that the numbers remain constant as another test is
testing the running of these tests.
>>> 2+2
4
"""
def foo():
"""
>>> 2+2
5
>>> 2+2
4
"""
def bar():
"""
>>> 2+2
4
"""
def test_silly_setup():
"""
>>> import test.test_doctest
>>> test.test_doctest.sillySetup
True
"""
def w_blank():
"""
>>> if 1:
... print 'a'
... print
... print 'b'
a
<BLANKLINE>
b
"""
x = 1
def x_is_one():
"""
>>> x
1
"""
def y_is_one():
"""
>>> y
1
"""
__test__ = {'good': """
>>> 42
42
""",
'bad': """
>>> 42
666
""",
}
def test_suite():
import doctest
return doctest.DocTestSuite()
| apache-2.0 |
tcc-unb-fga/debile | debile/slave/runners/piuparts.py | 4 | 2859 | # Copyright (c) 2012-2013 Paul Tagliamonte <[email protected]>
# Copyright (c) 2013 Leo Cavaille <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
from debile.slave.wrappers.piuparts import parse_piuparts
from schroot.chroot import SchrootCommandError
from schroot import schroot
try:
import configparser
except ImportError:
import ConfigParser as configparser
import os
import re
LINE_INFO = re.compile(
r"(?P<minutes>\d+)m(?P<sec>(\d(\.?))+)s (?P<severity>\w+): (?P<info>.*)")
def piuparts(chroot, target, analysis):
cfg = configparser.ConfigParser()
if cfg.read("/etc/schroot/chroot.d/%s" % (chroot)) == []:
raise Exception("Shit. No such tarball")
block = cfg[chroot]
if "file" not in block:
raise Exception("Chroot type isn't of tarball")
location = block['file']
copy_location = os.path.join("/tmp", os.path.basename(location))
with schroot(chroot) as chroot:
chroot.copy(location, copy_location)
chroot.copy(target, "/tmp")
print("[ ] Installing...")
chroot.run(['apt-get', 'install', '-y', 'piuparts'], user='root')
print("[ ] Piuparts installed.")
failed = False
try:
print("[ ] Running Piuparts..")
out, err, _ = chroot.run([
'piuparts',
'-b', copy_location,
'/tmp/%s' % target,
'--warn-on-debsums-errors',
'--pedantic-purge-test',
], user='root')
except SchrootCommandError as e:
out, err = e.out, e.err
failed = True
for x in parse_piuparts(out.splitlines(), target):
analysis.results.append(x)
return (analysis, out + err, failed, None, None)
def version():
# TODO
return ('piuparts', 'n/a')
| mit |
tmpkus/photivo | scons-local-2.2.0/SCons/Tool/packaging/targz.py | 14 | 1834 | """SCons.Tool.Packaging.targz
The targz SRC packager.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/packaging/targz.py issue-2856:2676:d23b7a2f45e8 2012/08/05 15:38:28 garyo"
from SCons.Tool.packaging import stripinstallbuilder, putintopackageroot
def package(env, target, source, PACKAGEROOT, **kw):
bld = env['BUILDERS']['Tar']
bld.set_suffix('.tar.gz')
target, source = stripinstallbuilder(target, source, env)
target, source = putintopackageroot(target, source, env, PACKAGEROOT)
return bld(env, target, source, TARFLAGS='-zc')
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| gpl-3.0 |
cyberintruder/django-cms | cms/migrations/0001_initial.py | 38 | 10402 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from cms.models import ACCESS_CHOICES, Page
from cms.utils.conf import get_cms_setting
from django.conf import settings
from django.db import models, migrations
import django.utils.timezone
from django.utils.translation import ugettext_lazy as _
template_choices = [(x, _(y)) for x, y in get_cms_setting('TEMPLATES')]
class Migration(migrations.Migration):
dependencies = [
('auth', '__first__'),
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('sites', '__first__'),
]
operations = [
migrations.CreateModel(
name='CMSPlugin',
fields=[
('id', models.AutoField(primary_key=True, verbose_name=_('ID'), auto_created=True, serialize=False)),
('position', models.PositiveSmallIntegerField(null=True, editable=False, blank=True, verbose_name=_('position'))),
('language', models.CharField(db_index=True, max_length=15, verbose_name=_("language"), editable=False)),
('plugin_type', models.CharField(db_index=True, max_length=50, verbose_name=_('plugin_name'), editable=False)),
('creation_date', models.DateTimeField(default=django.utils.timezone.now, verbose_name=_('creation date'), editable=False)),
('changed_date', models.DateTimeField(auto_now=True)),
('level', models.PositiveIntegerField(db_index=True, editable=False)),
('lft', models.PositiveIntegerField(db_index=True, editable=False)),
('rght', models.PositiveIntegerField(db_index=True, editable=False)),
('tree_id', models.PositiveIntegerField(db_index=True, editable=False)),
],
options={
},
bases=(models.Model,),
),
migrations.CreateModel(
name='AliasPluginModel',
fields=[
('cmsplugin_ptr', models.OneToOneField(primary_key=True, to='cms.CMSPlugin', auto_created=True, parent_link=True, serialize=False)),
('plugin', models.ForeignKey(null=True, to='cms.CMSPlugin', related_name='alias_reference', editable=False)),
],
options={
},
bases=('cms.cmsplugin',),
),
migrations.AddField(
model_name='cmsplugin',
name='parent',
field=models.ForeignKey(null=True, to='cms.CMSPlugin', blank=True, editable=False),
preserve_default=True,
),
migrations.CreateModel(
name='GlobalPagePermission',
fields=[
('id', models.AutoField(primary_key=True, verbose_name=_('ID'), auto_created=True, serialize=False)),
('can_change', models.BooleanField(default=True, verbose_name=_('can edit'))),
('can_add', models.BooleanField(default=True, verbose_name=_('can add'))),
('can_delete', models.BooleanField(default=True, verbose_name=_('can delete'))),
('can_change_advanced_settings', models.BooleanField(default=False, verbose_name=_('can change advanced settings'))),
('can_publish', models.BooleanField(default=True, verbose_name=_('can publish'))),
('can_change_permissions', models.BooleanField(default=False, help_text='on page level', verbose_name=_('can change permissions'))),
('can_move_page', models.BooleanField(default=True, verbose_name=_('can move'))),
('can_view', models.BooleanField(default=False, help_text='frontend view restriction', verbose_name=_('view restricted'))),
('can_recover_page', models.BooleanField(default=True, help_text='can recover any deleted page', verbose_name=_('can recover pages'))),
('group', models.ForeignKey(null=True, to='auth.Group', verbose_name=_('group'), blank=True)),
('sites', models.ManyToManyField(null=True, help_text='If none selected, user haves granted permissions to all sites.', blank=True, to='sites.Site', verbose_name=_('sites'))),
('user', models.ForeignKey(null=True, to=settings.AUTH_USER_MODEL, verbose_name=_('user'), blank=True)),
],
options={
'verbose_name': 'Page global permission',
'verbose_name_plural': 'Pages global permissions',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Page',
fields=[
('id', models.AutoField(primary_key=True, verbose_name=_('ID'), auto_created=True, serialize=False)),
('created_by', models.CharField(max_length=70, verbose_name=_('created by'), editable=False)),
('changed_by', models.CharField(max_length=70, verbose_name=_('changed by'), editable=False)),
('creation_date', models.DateTimeField(auto_now_add=True)),
('changed_date', models.DateTimeField(auto_now=True)),
('publication_date', models.DateTimeField(db_index=True, null=True, help_text='When the page should go live. Status must be "Published" for page to go live.', blank=True, verbose_name=_('publication date'))),
('publication_end_date', models.DateTimeField(db_index=True, null=True, help_text='When to expire the page. Leave empty to never expire.', blank=True, verbose_name=_('publication end date'))),
('in_navigation', models.BooleanField(db_index=True, default=True, verbose_name=_('in navigation'))),
('soft_root', models.BooleanField(db_index=True, default=False, help_text='All ancestors will not be displayed in the navigation', verbose_name=_('soft root'))),
('reverse_id', models.CharField(db_index=True, max_length=40, verbose_name=_('id'), null=True, help_text='A unique identifier that is used with the page_url templatetag for linking to this page', blank=True)),
('navigation_extenders', models.CharField(db_index=True, max_length=80, blank=True, verbose_name=_('attached menu'), null=True)),
('template', models.CharField(max_length=100, default='INHERIT', help_text='The template used to render the content.', verbose_name=_('template'), choices=template_choices)),
('login_required', models.BooleanField(default=False, verbose_name=_('login required'))),
('limit_visibility_in_menu', models.SmallIntegerField(db_index=True, default=None, verbose_name=_('menu visibility'), null=True, choices=Page.LIMIT_VISIBILITY_IN_MENU_CHOICES, help_text='limit when this page is visible in the menu', blank=True)),
('is_home', models.BooleanField(db_index=True, default=False, editable=False)),
('application_urls', models.CharField(db_index=True, max_length=200, blank=True, verbose_name=_('application'), null=True)),
('application_namespace', models.CharField(max_length=200, null=True, blank=True, verbose_name=_('application instance name'))),
('level', models.PositiveIntegerField(db_index=True, editable=False)),
('lft', models.PositiveIntegerField(db_index=True, editable=False)),
('rght', models.PositiveIntegerField(db_index=True, editable=False)),
('tree_id', models.PositiveIntegerField(db_index=True, editable=False)),
('publisher_is_draft', models.BooleanField(db_index=True, default=True, editable=False)),
('languages', models.CharField(max_length=255, null=True, blank=True, editable=False)),
('revision_id', models.PositiveIntegerField(default=0, editable=False)),
('xframe_options', models.IntegerField(default=0, choices=Page.X_FRAME_OPTIONS_CHOICES)),
('parent', models.ForeignKey(null=True, to='cms.Page', related_name='children', blank=True)),
('publisher_public', models.OneToOneField(null=True, to='cms.Page', related_name='publisher_draft', editable=False)),
('site', models.ForeignKey(to='sites.Site', verbose_name=_('site'), related_name='djangocms_pages', help_text='The site the page is accessible at.')),
],
options={
'ordering': ('tree_id', 'lft'),
'permissions': (('view_page', 'Can view page'), ('publish_page', 'Can publish page'), ('edit_static_placeholder', 'Can edit static placeholders')),
'verbose_name_plural': 'pages',
'verbose_name': 'page',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='PagePermission',
fields=[
('id', models.AutoField(primary_key=True, verbose_name=_('ID'), auto_created=True, serialize=False)),
('can_change', models.BooleanField(default=True, verbose_name=_('can edit'))),
('can_add', models.BooleanField(default=True, verbose_name=_('can add'))),
('can_delete', models.BooleanField(default=True, verbose_name=_('can delete'))),
('can_change_advanced_settings', models.BooleanField(default=False, verbose_name=_('can change advanced settings'))),
('can_publish', models.BooleanField(default=True, verbose_name=_('can publish'))),
('can_change_permissions', models.BooleanField(default=False, help_text='on page level', verbose_name=_('can change permissions'))),
('can_move_page', models.BooleanField(default=True, verbose_name=_('can move'))),
('can_view', models.BooleanField(default=False, help_text='frontend view restriction', verbose_name=_('view restricted'))),
('grant_on', models.IntegerField(default=5, verbose_name=_('Grant on'), choices=ACCESS_CHOICES)),
('group', models.ForeignKey(null=True, to='auth.Group', verbose_name=_('group'), blank=True)),
('page', models.ForeignKey(null=True, to='cms.Page', verbose_name=_('page'), blank=True)),
('user', models.ForeignKey(null=True, to=settings.AUTH_USER_MODEL, verbose_name=_('user'), blank=True)),
],
options={
'verbose_name': 'Page permission',
'verbose_name_plural': 'Page permissions',
},
bases=(models.Model,),
),
]
| bsd-3-clause |
kayak/fireant | fireant/tests/dataset/test_pandas_workaround.py | 2 | 2043 | from unittest import TestCase
import numpy as np
import pandas as pd
from fireant.queries.pandas_workaround import df_subtract
class TestSubtract(TestCase):
def test_subtract_partially_aligned_multi_index_dataframes_with_nans(self):
df0 = pd.DataFrame(
data=[
[1, 2],
[3, 4],
[5, 6],
[7, 8],
[9, 10],
[11, 12],
[13, 14],
[15, 16],
[17, 18],
],
columns=["happy", "sad"],
index=pd.MultiIndex.from_product([["a", "b", None], [0, 1, np.nan]], names=["l0", "l1"]),
)
df1 = pd.DataFrame(
data=[
[1, 2],
[3, 4],
[5, 6],
[7, 8],
[9, 10],
[11, 12],
[13, 14],
[15, 16],
[17, 18],
],
columns=["happy", "sad"],
index=pd.MultiIndex.from_product([["b", "c", None], [1, 2, np.nan]], names=["l0", "l1"]),
)
result = df_subtract(df0, df1, fill_value=0)
expected = pd.DataFrame.from_records(
[
["a", 0, 1 - 0, 2 - 0],
["a", 1, 3 - 0, 4 - 0],
["a", np.nan, 5 - 0, 6 - 0],
["b", 0, 7 - 0, 8 - 0],
["b", 1, 9 - 1, 10 - 2],
["b", np.nan, 11 - 5, 12 - 6],
[np.nan, 0, 13 - 0, 14 - 0],
[np.nan, 1, 15 - 13, 16 - 14],
[np.nan, np.nan, 17 - 17, 18 - 18],
["b", 2, 0 - 3, 0 - 4],
["c", 1, 0 - 7, 0 - 8],
["c", 2, 0 - 9, 0 - 10],
["c", np.nan, 0 - 11, 0 - 12],
[np.nan, 2, 0 - 15, 0 - 16],
],
columns=["l0", "l1", "happy", "sad"],
).set_index(["l0", "l1"])
pd.testing.assert_frame_equal(expected, result)
self.assertTrue(result.index.is_unique)
| apache-2.0 |
leansoft/edx-platform | common/lib/xmodule/xmodule/lti_module.py | 22 | 37689 | """
Learning Tools Interoperability (LTI) module.
Resources
---------
Theoretical background and detailed specifications of LTI can be found on:
http://www.imsglobal.org/LTI/v1p1p1/ltiIMGv1p1p1.html
This module is based on the version 1.1.1 of the LTI specifications by the
IMS Global authority. For authentication, it uses OAuth1.
When responding back to the LTI tool provider, we must issue a correct
response. Types of responses and their message payload is available at:
Table A1.2 Interpretation of the 'CodeMajor/severity' matrix.
http://www.imsglobal.org/gws/gwsv1p0/imsgws_wsdlBindv1p0.html
A resource to test the LTI protocol (PHP realization):
http://www.imsglobal.org/developers/LTI/test/v1p1/lms.php
We have also begun to add support for LTI 1.2/2.0. We will keep this
docstring in synch with what support is available. The first LTI 2.0
feature to be supported is the REST API results service, see specification
at
http://www.imsglobal.org/lti/ltiv2p0/uml/purl.imsglobal.org/vocab/lis/v2/outcomes/Result/service.html
What is supported:
------------------
1.) Display of simple LTI in iframe or a new window.
2.) Multiple LTI components on a single page.
3.) The use of multiple LTI providers per course.
4.) Use of advanced LTI component that provides back a grade.
A) LTI 1.1.1 XML endpoint
a.) The LTI provider sends back a grade to a specified URL.
b.) Currently only action "update" is supported. "Read", and "delete"
actions initially weren't required.
B) LTI 2.0 Result Service JSON REST endpoint
(http://www.imsglobal.org/lti/ltiv2p0/uml/purl.imsglobal.org/vocab/lis/v2/outcomes/Result/service.html)
a.) Discovery of all such LTI http endpoints for a course. External tools GET from this discovery
endpoint and receive URLs for interacting with individual grading units.
(see lms/djangoapps/courseware/views.py:get_course_lti_endpoints)
b.) GET, PUT and DELETE in LTI Result JSON binding
(http://www.imsglobal.org/lti/ltiv2p0/mediatype/application/vnd/ims/lis/v2/result+json/index.html)
for a provider to synchronize grades into edx-platform. Reading, Setting, and Deleteing
Numeric grades between 0 and 1 and text + basic HTML feedback comments are supported, via
GET / PUT / DELETE HTTP methods respectively
"""
import datetime
from django.utils.timezone import UTC
import logging
import oauthlib.oauth1
from oauthlib.oauth1.rfc5849 import signature
import hashlib
import base64
import urllib
import textwrap
import bleach
from lxml import etree
from webob import Response
import mock
from xml.sax.saxutils import escape
from xmodule.editing_module import MetadataOnlyEditingDescriptor
from xmodule.raw_module import EmptyDataRawDescriptor
from xmodule.x_module import XModule, module_attr
from xmodule.lti_2_util import LTI20ModuleMixin, LTIError
from pkg_resources import resource_string
from xblock.core import String, Scope, List, XBlock
from xblock.fields import Boolean, Float
log = logging.getLogger(__name__)
# Make '_' a no-op so we can scrape strings
_ = lambda text: text
DOCS_ANCHOR_TAG_OPEN = (
"<a target='_blank' "
"href='http://edx.readthedocs.org/projects/ca/en/latest/exercises_tools/lti_component.html'>"
)
class LTIFields(object):
"""
Fields to define and obtain LTI tool from provider are set here,
except credentials, which should be set in course settings::
`lti_id` is id to connect tool with credentials in course settings. It should not contain :: (double semicolon)
`launch_url` is launch URL of tool.
`custom_parameters` are additional parameters to navigate to proper book and book page.
For example, for Vitalsource provider, `launch_url` should be
*https://bc-staging.vitalsource.com/books/book*,
and to get to proper book and book page, you should set custom parameters as::
vbid=put_book_id_here
book_location=page/put_page_number_here
Default non-empty URL for `launch_url` is needed due to oauthlib demand (URL scheme should be presented)::
https://github.com/idan/oauthlib/blob/master/oauthlib/oauth1/rfc5849/signature.py#L136
"""
display_name = String(
display_name=_("Display Name"),
help=_(
"Enter the name that students see for this component. "
"Analytics reports may also use the display name to identify this component."
),
scope=Scope.settings,
default="LTI",
)
lti_id = String(
display_name=_("LTI ID"),
help=_(
"Enter the LTI ID for the external LTI provider. "
"This value must be the same LTI ID that you entered in the "
"LTI Passports setting on the Advanced Settings page."
"<br />See {docs_anchor_open}the edX LTI documentation{anchor_close} for more details on this setting."
).format(
docs_anchor_open=DOCS_ANCHOR_TAG_OPEN,
anchor_close="</a>"
),
default='',
scope=Scope.settings
)
launch_url = String(
display_name=_("LTI URL"),
help=_(
"Enter the URL of the external tool that this component launches. "
"This setting is only used when Hide External Tool is set to False."
"<br />See {docs_anchor_open}the edX LTI documentation{anchor_close} for more details on this setting."
).format(
docs_anchor_open=DOCS_ANCHOR_TAG_OPEN,
anchor_close="</a>"
),
default='http://www.example.com',
scope=Scope.settings)
custom_parameters = List(
display_name=_("Custom Parameters"),
help=_(
"Add the key/value pair for any custom parameters, such as the page your e-book should open to or "
"the background color for this component."
"<br />See {docs_anchor_open}the edX LTI documentation{anchor_close} for more details on this setting."
).format(
docs_anchor_open=DOCS_ANCHOR_TAG_OPEN,
anchor_close="</a>"
),
scope=Scope.settings)
open_in_a_new_page = Boolean(
display_name=_("Open in New Page"),
help=_(
"Select True if you want students to click a link that opens the LTI tool in a new window. "
"Select False if you want the LTI content to open in an IFrame in the current page. "
"This setting is only used when Hide External Tool is set to False. "
),
default=True,
scope=Scope.settings
)
has_score = Boolean(
display_name=_("Scored"),
help=_(
"Select True if this component will receive a numerical score from the external LTI system."
),
default=False,
scope=Scope.settings
)
weight = Float(
display_name=_("Weight"),
help=_(
"Enter the number of points possible for this component. "
"The default value is 1.0. "
"This setting is only used when Scored is set to True."
),
default=1.0,
scope=Scope.settings,
values={"min": 0},
)
module_score = Float(
help=_("The score kept in the xblock KVS -- duplicate of the published score in django DB"),
default=None,
scope=Scope.user_state
)
score_comment = String(
help=_("Comment as returned from grader, LTI2.0 spec"),
default="",
scope=Scope.user_state
)
hide_launch = Boolean(
display_name=_("Hide External Tool"),
help=_(
"Select True if you want to use this component as a placeholder for syncing with an external grading "
"system rather than launch an external tool. "
"This setting hides the Launch button and any IFrames for this component."
),
default=False,
scope=Scope.settings
)
# Users will be presented with a message indicating that their e-mail/username would be sent to a third
# party application. When "Open in New Page" is not selected, the tool automatically appears without any user action.
ask_to_send_username = Boolean(
display_name=_("Request user's username"),
# Translators: This is used to request the user's username for a third party service.
# Usernames can only be requested if "Open in New Page" is set to True.
help=_(
"Select True to request the user's username. You must also set Open in New Page to True to get the user's information."
),
default=False,
scope=Scope.settings
)
ask_to_send_email = Boolean(
display_name=_("Request user's email"),
# Translators: This is used to request the user's email for a third party service.
# Emails can only be requested if "Open in New Page" is set to True.
help=_(
"Select True to request the user's email address. You must also set Open in New Page to True to get the user's information."
),
default=False,
scope=Scope.settings
)
description = String(
display_name=_("LTI Application Information"),
help=_(
"Enter a description of the third party application. If requesting username and/or email, use this text box to inform users "
"why their username and/or email will be forwarded to a third party application."
),
default="",
scope=Scope.settings
)
button_text = String(
display_name=_("Button Text"),
help=_(
"Enter the text on the button used to launch the third party application."
),
default="",
scope=Scope.settings
)
accept_grades_past_due = Boolean(
display_name=_("Accept grades past deadline"),
help=_("Select True to allow third party systems to post grades past the deadline."),
default=True,
scope=Scope.settings
)
class LTIModule(LTIFields, LTI20ModuleMixin, XModule):
"""
Module provides LTI integration to course.
Except usual Xmodule structure it proceeds with OAuth signing.
How it works::
1. Get credentials from course settings.
2. There is minimal set of parameters need to be signed (presented for Vitalsource)::
user_id
oauth_callback
lis_outcome_service_url
lis_result_sourcedid
launch_presentation_return_url
lti_message_type
lti_version
roles
*+ all custom parameters*
These parameters should be encoded and signed by *OAuth1* together with
`launch_url` and *POST* request type.
3. Signing proceeds with client key/secret pair obtained from course settings.
That pair should be obtained from LTI provider and set into course settings by course author.
After that signature and other OAuth data are generated.
OAuth data which is generated after signing is usual::
oauth_callback
oauth_nonce
oauth_consumer_key
oauth_signature_method
oauth_timestamp
oauth_version
4. All that data is passed to form and sent to LTI provider server by browser via
autosubmit via JavaScript.
Form example::
<form
action="${launch_url}"
name="ltiLaunchForm-${element_id}"
class="ltiLaunchForm"
method="post"
target="ltiLaunchFrame-${element_id}"
encType="application/x-www-form-urlencoded"
>
<input name="launch_presentation_return_url" value="" />
<input name="lis_outcome_service_url" value="" />
<input name="lis_result_sourcedid" value="" />
<input name="lti_message_type" value="basic-lti-launch-request" />
<input name="lti_version" value="LTI-1p0" />
<input name="oauth_callback" value="about:blank" />
<input name="oauth_consumer_key" value="${oauth_consumer_key}" />
<input name="oauth_nonce" value="${oauth_nonce}" />
<input name="oauth_signature_method" value="HMAC-SHA1" />
<input name="oauth_timestamp" value="${oauth_timestamp}" />
<input name="oauth_version" value="1.0" />
<input name="user_id" value="${user_id}" />
<input name="role" value="student" />
<input name="oauth_signature" value="${oauth_signature}" />
<input name="custom_1" value="${custom_param_1_value}" />
<input name="custom_2" value="${custom_param_2_value}" />
<input name="custom_..." value="${custom_param_..._value}" />
<input type="submit" value="Press to Launch" />
</form>
5. LTI provider has same secret key and it signs data string via *OAuth1* and compares signatures.
If signatures are correct, LTI provider redirects iframe source to LTI tool web page,
and LTI tool is rendered to iframe inside course.
Otherwise error message from LTI provider is generated.
"""
js = {
'js': [
resource_string(__name__, 'js/src/lti/lti.js')
]
}
css = {'scss': [resource_string(__name__, 'css/lti/lti.scss')]}
js_module_name = "LTI"
def get_input_fields(self):
# LTI provides a list of default parameters that might be passed as
# part of the POST data. These parameters should not be prefixed.
# Likewise, The creator of an LTI link can add custom key/value parameters
# to a launch which are to be included with the launch of the LTI link.
# In this case, we will automatically add `custom_` prefix before this parameters.
# See http://www.imsglobal.org/LTI/v1p1p1/ltiIMGv1p1p1.html#_Toc316828520
PARAMETERS = [
"lti_message_type",
"lti_version",
"resource_link_title",
"resource_link_description",
"user_image",
"lis_person_name_given",
"lis_person_name_family",
"lis_person_name_full",
"lis_person_contact_email_primary",
"lis_person_sourcedid",
"role_scope_mentor",
"context_type",
"context_title",
"context_label",
"launch_presentation_locale",
"launch_presentation_document_target",
"launch_presentation_css_url",
"launch_presentation_width",
"launch_presentation_height",
"launch_presentation_return_url",
"tool_consumer_info_product_family_code",
"tool_consumer_info_version",
"tool_consumer_instance_guid",
"tool_consumer_instance_name",
"tool_consumer_instance_description",
"tool_consumer_instance_url",
"tool_consumer_instance_contact_email",
]
client_key, client_secret = self.get_client_key_secret()
# parsing custom parameters to dict
custom_parameters = {}
for custom_parameter in self.custom_parameters:
try:
param_name, param_value = [p.strip() for p in custom_parameter.split('=', 1)]
except ValueError:
_ = self.runtime.service(self, "i18n").ugettext
msg = _('Could not parse custom parameter: {custom_parameter}. Should be "x=y" string.').format(
custom_parameter="{0!r}".format(custom_parameter)
)
raise LTIError(msg)
# LTI specs: 'custom_' should be prepended before each custom parameter, as pointed in link above.
if param_name not in PARAMETERS:
param_name = 'custom_' + param_name
custom_parameters[unicode(param_name)] = unicode(param_value)
return self.oauth_params(
custom_parameters,
client_key,
client_secret,
)
def get_context(self):
"""
Returns a context.
"""
# use bleach defaults. see https://github.com/jsocol/bleach/blob/master/bleach/__init__.py
# ALLOWED_TAGS are
# ['a', 'abbr', 'acronym', 'b', 'blockquote', 'code', 'em', 'i', 'li', 'ol', 'strong', 'ul']
#
# ALLOWED_ATTRIBUTES are
# 'a': ['href', 'title'],
# 'abbr': ['title'],
# 'acronym': ['title'],
#
# This lets all plaintext through.
sanitized_comment = bleach.clean(self.score_comment)
return {
'input_fields': self.get_input_fields(),
# These parameters do not participate in OAuth signing.
'launch_url': self.launch_url.strip(),
'element_id': self.location.html_id(),
'element_class': self.category,
'open_in_a_new_page': self.open_in_a_new_page,
'display_name': self.display_name,
'form_url': self.runtime.handler_url(self, 'preview_handler').rstrip('/?'),
'hide_launch': self.hide_launch,
'has_score': self.has_score,
'weight': self.weight,
'module_score': self.module_score,
'comment': sanitized_comment,
'description': self.description,
'ask_to_send_username': self.ask_to_send_username,
'ask_to_send_email': self.ask_to_send_email,
'button_text': self.button_text,
'accept_grades_past_due': self.accept_grades_past_due,
}
def get_html(self):
"""
Renders parameters to template.
"""
return self.system.render_template('lti.html', self.get_context())
@XBlock.handler
def preview_handler(self, _, __):
"""
This is called to get context with new oauth params to iframe.
"""
template = self.system.render_template('lti_form.html', self.get_context())
return Response(template, content_type='text/html')
def get_user_id(self):
user_id = self.runtime.anonymous_student_id
assert user_id is not None
return unicode(urllib.quote(user_id))
def get_outcome_service_url(self, service_name="grade_handler"):
"""
Return URL for storing grades.
To test LTI on sandbox we must use http scheme.
While testing locally and on Jenkins, mock_lti_server use http.referer
to obtain scheme, so it is ok to have http(s) anyway.
The scheme logic is handled in lms/lib/xblock/runtime.py
"""
return self.runtime.handler_url(self, service_name, thirdparty=True).rstrip('/?')
def get_resource_link_id(self):
"""
This is an opaque unique identifier that the TC guarantees will be unique
within the TC for every placement of the link.
If the tool / activity is placed multiple times in the same context,
each of those placements will be distinct.
This value will also change if the item is exported from one system or
context and imported into another system or context.
This parameter is required.
Example: u'edx.org-i4x-2-3-lti-31de800015cf4afb973356dbe81496df'
Hostname, edx.org,
makes resource_link_id change on import to another system.
Last part of location, location.name - 31de800015cf4afb973356dbe81496df,
is random hash, updated by course_id,
this makes resource_link_id unique inside single course.
First part of location is tag-org-course-category, i4x-2-3-lti.
Location.name itself does not change on import to another course,
but org and course_id change.
So together with org and course_id in a form of
i4x-2-3-lti-31de800015cf4afb973356dbe81496df this part of resource_link_id:
makes resource_link_id to be unique among courses inside same system.
"""
return unicode(urllib.quote("{}-{}".format(self.system.hostname, self.location.html_id())))
def get_lis_result_sourcedid(self):
"""
This field contains an identifier that indicates the LIS Result Identifier (if any)
associated with this launch. This field identifies a unique row and column within the
TC gradebook. This field is unique for every combination of context_id / resource_link_id / user_id.
This value may change for a particular resource_link_id / user_id from one launch to the next.
The TP should only retain the most recent value for this field for a particular resource_link_id / user_id.
This field is generally optional, but is required for grading.
"""
return "{context}:{resource_link}:{user_id}".format(
context=urllib.quote(self.context_id),
resource_link=self.get_resource_link_id(),
user_id=self.get_user_id()
)
def get_course(self):
"""
Return course by course id.
"""
return self.descriptor.runtime.modulestore.get_course(self.course_id)
@property
def context_id(self):
"""
Return context_id.
context_id is an opaque identifier that uniquely identifies the context (e.g., a course)
that contains the link being launched.
"""
return self.course_id.to_deprecated_string()
@property
def role(self):
"""
Get system user role and convert it to LTI role.
"""
roles = {
'student': u'Student',
'staff': u'Administrator',
'instructor': u'Instructor',
}
return roles.get(self.system.get_user_role(), u'Student')
def oauth_params(self, custom_parameters, client_key, client_secret):
"""
Signs request and returns signature and OAuth parameters.
`custom_paramters` is dict of parsed `custom_parameter` field
`client_key` and `client_secret` are LTI tool credentials.
Also *anonymous student id* is passed to template and therefore to LTI provider.
"""
client = oauthlib.oauth1.Client(
client_key=unicode(client_key),
client_secret=unicode(client_secret)
)
# Must have parameters for correct signing from LTI:
body = {
u'user_id': self.get_user_id(),
u'oauth_callback': u'about:blank',
u'launch_presentation_return_url': '',
u'lti_message_type': u'basic-lti-launch-request',
u'lti_version': 'LTI-1p0',
u'roles': self.role,
# Parameters required for grading:
u'resource_link_id': self.get_resource_link_id(),
u'lis_result_sourcedid': self.get_lis_result_sourcedid(),
u'context_id': self.context_id,
}
if self.has_score:
body.update({
u'lis_outcome_service_url': self.get_outcome_service_url()
})
self.user_email = ""
self.user_username = ""
# Username and email can't be sent in studio mode, because the user object is not defined.
# To test functionality test in LMS
if callable(self.runtime.get_real_user):
real_user_object = self.runtime.get_real_user(self.runtime.anonymous_student_id)
try:
self.user_email = real_user_object.email
except AttributeError:
self.user_email = ""
try:
self.user_username = real_user_object.username
except AttributeError:
self.user_username = ""
if self.open_in_a_new_page:
if self.ask_to_send_username and self.user_username:
body["lis_person_sourcedid"] = self.user_username
if self.ask_to_send_email and self.user_email:
body["lis_person_contact_email_primary"] = self.user_email
# Appending custom parameter for signing.
body.update(custom_parameters)
headers = {
# This is needed for body encoding:
'Content-Type': 'application/x-www-form-urlencoded',
}
try:
__, headers, __ = client.sign(
unicode(self.launch_url.strip()),
http_method=u'POST',
body=body,
headers=headers)
except ValueError: # Scheme not in url.
# https://github.com/idan/oauthlib/blob/master/oauthlib/oauth1/rfc5849/signature.py#L136
# Stubbing headers for now:
log.info(
u"LTI module %s in course %s does not have oauth parameters correctly configured.",
self.location,
self.location.course_key,
)
headers = {
u'Content-Type': u'application/x-www-form-urlencoded',
u'Authorization': u'OAuth oauth_nonce="80966668944732164491378916897", \
oauth_timestamp="1378916897", oauth_version="1.0", oauth_signature_method="HMAC-SHA1", \
oauth_consumer_key="", oauth_signature="frVp4JuvT1mVXlxktiAUjQ7%2F1cw%3D"'}
params = headers['Authorization']
# Parse headers to pass to template as part of context:
params = dict([param.strip().replace('"', '').split('=') for param in params.split(',')])
params[u'oauth_nonce'] = params[u'OAuth oauth_nonce']
del params[u'OAuth oauth_nonce']
# oauthlib encodes signature with
# 'Content-Type': 'application/x-www-form-urlencoded'
# so '='' becomes '%3D'.
# We send form via browser, so browser will encode it again,
# So we need to decode signature back:
params[u'oauth_signature'] = urllib.unquote(params[u'oauth_signature']).decode('utf8')
# Add LTI parameters to OAuth parameters for sending in form.
params.update(body)
return params
def max_score(self):
return self.weight if self.has_score else None
@XBlock.handler
def grade_handler(self, request, suffix): # pylint: disable=unused-argument
"""
This is called by courseware.module_render, to handle an AJAX call.
Used only for grading. Returns XML response.
Example of request body from LTI provider::
<?xml version = "1.0" encoding = "UTF-8"?>
<imsx_POXEnvelopeRequest xmlns = "some_link (may be not required)">
<imsx_POXHeader>
<imsx_POXRequestHeaderInfo>
<imsx_version>V1.0</imsx_version>
<imsx_messageIdentifier>528243ba5241b</imsx_messageIdentifier>
</imsx_POXRequestHeaderInfo>
</imsx_POXHeader>
<imsx_POXBody>
<replaceResultRequest>
<resultRecord>
<sourcedGUID>
<sourcedId>feb-123-456-2929::28883</sourcedId>
</sourcedGUID>
<result>
<resultScore>
<language>en-us</language>
<textString>0.4</textString>
</resultScore>
</result>
</resultRecord>
</replaceResultRequest>
</imsx_POXBody>
</imsx_POXEnvelopeRequest>
Example of correct/incorrect answer XML body:: see response_xml_template.
"""
response_xml_template = textwrap.dedent("""\
<?xml version="1.0" encoding="UTF-8"?>
<imsx_POXEnvelopeResponse xmlns = "http://www.imsglobal.org/services/ltiv1p1/xsd/imsoms_v1p0">
<imsx_POXHeader>
<imsx_POXResponseHeaderInfo>
<imsx_version>V1.0</imsx_version>
<imsx_messageIdentifier>{imsx_messageIdentifier}</imsx_messageIdentifier>
<imsx_statusInfo>
<imsx_codeMajor>{imsx_codeMajor}</imsx_codeMajor>
<imsx_severity>status</imsx_severity>
<imsx_description>{imsx_description}</imsx_description>
<imsx_messageRefIdentifier>
</imsx_messageRefIdentifier>
</imsx_statusInfo>
</imsx_POXResponseHeaderInfo>
</imsx_POXHeader>
<imsx_POXBody>{response}</imsx_POXBody>
</imsx_POXEnvelopeResponse>
""")
# Returns when `action` is unsupported.
# Supported actions:
# - replaceResultRequest.
unsupported_values = {
'imsx_codeMajor': 'unsupported',
'imsx_description': 'Target does not support the requested operation.',
'imsx_messageIdentifier': 'unknown',
'response': ''
}
# Returns if:
# - past due grades are not accepted and grade is past due
# - score is out of range
# - can't parse response from TP;
# - can't verify OAuth signing or OAuth signing is incorrect.
failure_values = {
'imsx_codeMajor': 'failure',
'imsx_description': 'The request has failed.',
'imsx_messageIdentifier': 'unknown',
'response': ''
}
if not self.accept_grades_past_due and self.is_past_due():
failure_values['imsx_description'] = "Grade is past due"
return Response(response_xml_template.format(**failure_values), content_type="application/xml")
try:
imsx_messageIdentifier, sourcedId, score, action = self.parse_grade_xml_body(request.body)
except Exception as e:
error_message = "Request body XML parsing error: " + escape(e.message)
log.debug("[LTI]: " + error_message)
failure_values['imsx_description'] = error_message
return Response(response_xml_template.format(**failure_values), content_type="application/xml")
# Verify OAuth signing.
try:
self.verify_oauth_body_sign(request)
except (ValueError, LTIError) as e:
failure_values['imsx_messageIdentifier'] = escape(imsx_messageIdentifier)
error_message = "OAuth verification error: " + escape(e.message)
failure_values['imsx_description'] = error_message
log.debug("[LTI]: " + error_message)
return Response(response_xml_template.format(**failure_values), content_type="application/xml")
real_user = self.system.get_real_user(urllib.unquote(sourcedId.split(':')[-1]))
if not real_user: # that means we can't save to database, as we do not have real user id.
failure_values['imsx_messageIdentifier'] = escape(imsx_messageIdentifier)
failure_values['imsx_description'] = "User not found."
return Response(response_xml_template.format(**failure_values), content_type="application/xml")
if action == 'replaceResultRequest':
self.set_user_module_score(real_user, score, self.max_score())
values = {
'imsx_codeMajor': 'success',
'imsx_description': 'Score for {sourced_id} is now {score}'.format(sourced_id=sourcedId, score=score),
'imsx_messageIdentifier': escape(imsx_messageIdentifier),
'response': '<replaceResultResponse/>'
}
log.debug("[LTI]: Grade is saved.")
return Response(response_xml_template.format(**values), content_type="application/xml")
unsupported_values['imsx_messageIdentifier'] = escape(imsx_messageIdentifier)
log.debug("[LTI]: Incorrect action.")
return Response(response_xml_template.format(**unsupported_values), content_type='application/xml')
@classmethod
def parse_grade_xml_body(cls, body):
"""
Parses XML from request.body and returns parsed data
XML body should contain nsmap with namespace, that is specified in LTI specs.
Returns tuple: imsx_messageIdentifier, sourcedId, score, action
Raises Exception if can't parse.
"""
lti_spec_namespace = "http://www.imsglobal.org/services/ltiv1p1/xsd/imsoms_v1p0"
namespaces = {'def': lti_spec_namespace}
data = body.strip().encode('utf-8')
parser = etree.XMLParser(ns_clean=True, recover=True, encoding='utf-8')
root = etree.fromstring(data, parser=parser)
imsx_messageIdentifier = root.xpath("//def:imsx_messageIdentifier", namespaces=namespaces)[0].text or ''
sourcedId = root.xpath("//def:sourcedId", namespaces=namespaces)[0].text
score = root.xpath("//def:textString", namespaces=namespaces)[0].text
action = root.xpath("//def:imsx_POXBody", namespaces=namespaces)[0].getchildren()[0].tag.replace('{' + lti_spec_namespace + '}', '')
# Raise exception if score is not float or not in range 0.0-1.0 regarding spec.
score = float(score)
if not 0 <= score <= 1:
raise LTIError('score value outside the permitted range of 0-1.')
return imsx_messageIdentifier, sourcedId, score, action
def verify_oauth_body_sign(self, request, content_type='application/x-www-form-urlencoded'):
"""
Verify grade request from LTI provider using OAuth body signing.
Uses http://oauth.googlecode.com/svn/spec/ext/body_hash/1.0/oauth-bodyhash.html::
This specification extends the OAuth signature to include integrity checks on HTTP request bodies
with content types other than application/x-www-form-urlencoded.
Arguments:
request: DjangoWebobRequest.
Raises:
LTIError if request is incorrect.
"""
client_key, client_secret = self.get_client_key_secret()
headers = {
'Authorization': unicode(request.headers.get('Authorization')),
'Content-Type': content_type,
}
sha1 = hashlib.sha1()
sha1.update(request.body)
oauth_body_hash = base64.b64encode(sha1.digest())
oauth_params = signature.collect_parameters(headers=headers, exclude_oauth_signature=False)
oauth_headers = dict(oauth_params)
oauth_signature = oauth_headers.pop('oauth_signature')
mock_request_lti_1 = mock.Mock(
uri=unicode(urllib.unquote(self.get_outcome_service_url())),
http_method=unicode(request.method),
params=oauth_headers.items(),
signature=oauth_signature
)
mock_request_lti_2 = mock.Mock(
uri=unicode(urllib.unquote(request.url)),
http_method=unicode(request.method),
params=oauth_headers.items(),
signature=oauth_signature
)
if oauth_body_hash != oauth_headers.get('oauth_body_hash'):
log.error(
"OAuth body hash verification failed, provided: {}, "
"calculated: {}, for url: {}, body is: {}".format(
oauth_headers.get('oauth_body_hash'),
oauth_body_hash,
self.get_outcome_service_url(),
request.body
)
)
raise LTIError("OAuth body hash verification is failed.")
if (not signature.verify_hmac_sha1(mock_request_lti_1, client_secret) and not
signature.verify_hmac_sha1(mock_request_lti_2, client_secret)):
log.error("OAuth signature verification failed, for "
"headers:{} url:{} method:{}".format(
oauth_headers,
self.get_outcome_service_url(),
unicode(request.method)
))
raise LTIError("OAuth signature verification has failed.")
def get_client_key_secret(self):
"""
Obtains client_key and client_secret credentials from current course.
"""
course = self.get_course()
for lti_passport in course.lti_passports:
try:
lti_id, key, secret = [i.strip() for i in lti_passport.split(':')]
except ValueError:
_ = self.runtime.service(self, "i18n").ugettext
msg = _('Could not parse LTI passport: {lti_passport}. Should be "id:key:secret" string.').format(
lti_passport='{0!r}'.format(lti_passport)
)
raise LTIError(msg)
if lti_id == self.lti_id.strip():
return key, secret
return '', ''
def is_past_due(self):
"""
Is it now past this problem's due date, including grace period?
"""
due_date = self.due # pylint: disable=no-member
if self.graceperiod is not None and due_date: # pylint: disable=no-member
close_date = due_date + self.graceperiod # pylint: disable=no-member
else:
close_date = due_date
return close_date is not None and datetime.datetime.now(UTC()) > close_date
class LTIDescriptor(LTIFields, MetadataOnlyEditingDescriptor, EmptyDataRawDescriptor):
"""
Descriptor for LTI Xmodule.
"""
module_class = LTIModule
grade_handler = module_attr('grade_handler')
preview_handler = module_attr('preview_handler')
lti_2_0_result_rest_handler = module_attr('lti_2_0_result_rest_handler')
clear_user_module_score = module_attr('clear_user_module_score')
get_outcome_service_url = module_attr('get_outcome_service_url')
| agpl-3.0 |
phoemur/blog | app/forms.py | 1 | 6590 | # -*- coding: utf-8 -*-
from flask_wtf import Form, RecaptchaField
from wtforms import StringField, BooleanField, PasswordField, TextAreaField
from flask_pagedown.fields import PageDownField
from wtforms.validators import DataRequired, Email, EqualTo, Length
from .models import User
class LoginForm(Form):
email = StringField('email', validators=[DataRequired(message='Obrigatorio preencher o email'),
Email(message='Email Invalido')])
senha = PasswordField('senha', [DataRequired(
message='Obrigatorio preencher a senha')])
remember_me = BooleanField('remember_me', default=False)
recaptcha = RecaptchaField()
class RegisterForm(Form):
nome = StringField('Nome Completo', validators=[
DataRequired('Obrigatorio preencher o nome')])
email = StringField('Email', validators=[DataRequired(message='Obrigatorio preencher o email'),
Email(message='Email Invalido')])
confirm_email = StringField('Confirmar Email', validators=[DataRequired(message='Obrigatorio confirmar o email'),
EqualTo('email',
message='Emails devem ser iguais')])
senha = PasswordField('Senha', [DataRequired(
message='Obrigatorio preencher a senha')])
confirm = PasswordField('Repetir Senha', [DataRequired(message='Obrigatorio preencher a senha'),
EqualTo('senha',
message='Senhas devem ser iguais')])
about_me = TextAreaField('about_me', validators=[Length(min=0,
max=140,
message='Maximo de 140 caracteres')])
recaptcha = RecaptchaField()
def __init__(self, *args, **kwargs):
Form.__init__(self, *args, **kwargs)
def validate(self):
if not Form.validate(self):
return False
user = User.query.filter_by(email=self.email.data).first()
if user is not None:
self.email.errors.append(
'O e-mail pretendido ja esta em uso por outro usuario.')
return False
return True
class EditForm(Form):
nome = StringField('Nome Completo', validators=[
DataRequired('Obrigatorio preencher o nome')])
email = StringField('Email', validators=[DataRequired(message='Obrigatorio preencher o email'),
Email(message='Email Invalido')])
senha = PasswordField('Senha', [DataRequired(
message='Obrigatorio preencher a senha')])
confirm = PasswordField('Repetir Senha', [DataRequired(message='Obrigatorio preencher a senha'),
EqualTo('senha',
message='Senhas devem ser iguais')])
about_me = TextAreaField('about_me', validators=[Length(min=0,
max=140,
message='Maximo de 140 caracteres')])
def __init__(self, email_original, *args, **kwargs):
Form.__init__(self, *args, **kwargs)
self.email_original = email_original
def validate(self):
if not Form.validate(self):
return False
if self.email.data == self.email_original:
return True
user = User.query.filter_by(email=self.email.data).first()
if user is not None:
self.email.errors.append(
'O e-mail pretendido ja esta em uso por outro usuario.')
return False
return True
class EraseForm(Form):
apagar = BooleanField('apagar', default=False)
class RecoverForm(Form):
email = StringField('Email', validators=[DataRequired(message='Obrigatorio preencher o email'),
Email(message='Email Invalido')])
recaptcha = RecaptchaField()
def __init__(self, *args, **kwargs):
Form.__init__(self, *args, **kwargs)
def validate(self):
if not Form.validate(self):
return False
user = User.query.filter_by(email=self.email.data).first()
if user is None:
self.email.errors.append(
'Esta conta nao existe. Favor cadastrar-se.')
return False
return True
class ContactForm(Form):
nome = StringField('Nome Completo', validators=[
DataRequired('Obrigatorio preencher o nome')])
email = StringField('Email', validators=[DataRequired(message='Obrigatorio preencher o email'),
Email(message='Email Invalido')])
assunto = StringField('Assunto', validators=[DataRequired('Obrigatorio preencher assunto'),
Length(min=0, max=140, message='Maximo de 140 caracteres')])
mensagem = TextAreaField('Mensagem', validators=[DataRequired('Obrigatorio preencher a mensagem'),
Length(min=0, max=2048, message='Mensagem muito Longa')])
recaptcha = RecaptchaField()
class EscreverArtigo(Form):
titulo = StringField('Título', validators=[
DataRequired('Obrigatorio preencher o Titulo')])
categoria = StringField('Categoria', validators=[DataRequired('Obrigatorio preencher ao menos uma categoria'),
Length(min=0, max=140, message='Maximo de 140 caracteres')])
desc = StringField('Descricao', validators=[DataRequired('Obrigatorio preencher a descricao'),
Length(min=0, max=300, message='Maximo de 300 caracteres')])
texto = PageDownField('Mensagem', validators=[DataRequired('Obrigatorio preencher o conteudo'),
Length(min=0, max=100000, message='Artigo muito Longo')])
recaptcha = RecaptchaField()
class ComentarArtigo(Form):
texto = TextAreaField('Mensagem', validators=[DataRequired('Obrigatorio preencher o conteudo'),
Length(min=0, max=700, message='Comentario muito longo')])
recaptcha = RecaptchaField()
class SearchForm(Form):
search = StringField('search', validators=[
DataRequired('Preencha a busca')])
| mit |
dinhxuanvu/ssm | ssmlib/problem.py | 1 | 10297 | # (C)2012 Red Hat, Inc., Lukas Czerner <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# problem.py - dealing with problems and errors in ssm
import sys
__all__ = ["ProblemSet", "SsmError", "GeneralError", "ProgrammingError",
"BadEnvVariable", "NotEnoughSpace", "ResizeMatch", "FsNotSpecified",
"DeviceUsed", "ExistingFilesystem", "NoDevices", "ToolMissing",
"CanNotRun", "CommandFailed", "UserInterrupted", "NotSupported",
"NotImplemented"]
# Define prompt codes
PROMPT_NONE = 0
PROMPT_UNMOUNT = 1
PROMPT_SET_DEFAULT = 2
PROMPT_IGNORE = 3
PROMPT_REMOVE = 4
PROMPT_ADJUST = 5
PROMPT_USE = 6
PROMPT_CONTINUE = 7
PROMPT_MSG = [
None,
'Unmount',
'Set default',
'Ignore',
'Remove',
'Adjust',
'Use anyway',
'Continue',
]
# Define problem flags
FL_NONE = 0
FL_MSG_ONLY = 2
FL_VERBOSE_ONLY = (4 | FL_MSG_ONLY)
FL_DEBUG_ONLY = (8 | FL_MSG_ONLY)
FL_DEFAULT_NO = 16
FL_SILENT = 32
FL_EXIT_ON_NO = 64
FL_EXIT_ON_YES = 128
FL_NO_MESSAGE = 256
FL_FORCE_YES = 512
FL_FORCE_NO = 1024
FL_FATAL = (2048 | FL_NO_MESSAGE)
class SsmError(Exception):
"""Base exception class for the ssm."""
def __init__(self, msg, errcode=None):
super(SsmError, self).__init__()
self.msg = msg
self.errcode = errcode
def __str__(self):
return "SSM Error ({0}): {1}".format(self.errcode, self.msg)
class GeneralError(SsmError):
def __init__(self, msg, errcode=2001):
super(GeneralError, self).__init__(msg, errcode)
class ProgrammingError(SsmError):
def __init__(self, msg, errcode=2002):
super(ProgrammingError, self).__init__(msg, errcode)
class FsMounted(SsmError):
def __init__(self, msg, errcode=2003):
super(FsMounted, self).__init__(msg, errcode)
class BadEnvVariable(SsmError):
def __init__(self, msg, errcode=2004):
super(BadEnvVariable, self).__init__(msg, errcode)
class NotEnoughSpace(SsmError):
def __init__(self, msg, errcode=2005):
super(NotEnoughSpace, self).__init__(msg, errcode)
class ResizeMatch(SsmError):
def __init__(self, msg, errcode=2006):
super(ResizeMatch, self).__init__(msg, errcode)
class FsNotSpecified(SsmError):
def __init__(self, msg, errcode=2007):
super(FsNotSpecified, self).__init__(msg, errcode)
class DeviceUsed(SsmError):
def __init__(self, msg, errcode=2008):
super(DeviceUsed, self).__init__(msg, errcode)
class NoDevices(SsmError):
def __init__(self, msg, errcode=2009):
super(NoDevices, self).__init__(msg, errcode)
class ToolMissing(SsmError):
def __init__(self, msg, errcode=2010):
super(ToolMissing, self).__init__(msg, errcode)
class CanNotRun(SsmError):
def __init__(self, msg, errcode=2011):
super(CanNotRun, self).__init__(msg, errcode)
class CommandFailed(SsmError):
def __init__(self, msg, errcode=2012):
super(CommandFailed, self).__init__(msg, errcode)
class UserInterrupted(SsmError):
def __init__(self, msg, errcode=2013):
super(UserInterrupted, self).__init__(msg, errcode)
class NotSupported(SsmError):
def __init__(self, msg, errcode=2014):
super(NotSupported, self).__init__(msg, errcode)
class ExistingFilesystem(SsmError):
def __init__(self, msg, errcode=2015):
super(ExistingFilesystem, self).__init__(msg, errcode)
class NotImplemented(SsmError):
def __init__(self, msg, errcode=2016):
super(NotImplemented, self).__init__(msg, errcode)
class ProblemSet(object):
def __init__(self, options):
self.set_options(options)
self.init_problem_set()
def set_options(self, options):
self.options = options
def init_problem_set(self):
self.PROGRAMMING_ERROR = \
['Programming error detected! {0}',
PROMPT_NONE, FL_FATAL, ProgrammingError]
self.GENERAL_ERROR = \
['{0}!', PROMPT_NONE, FL_FATAL, GeneralError]
self.GENERAL_INFO = \
['SSM Info: {0}', PROMPT_NONE, FL_NONE, None]
self.GENERAL_WARNING = \
['SSM Warning: {0}!', PROMPT_NONE, FL_NONE, None]
self.FS_MOUNTED = \
['Device \'{0}\' is mounted on \'{1}\'',
PROMPT_UNMOUNT,
FL_DEFAULT_NO | FL_EXIT_ON_NO | FL_FORCE_YES, FsMounted]
self.BAD_ENV_VARIABLE = \
['Environment variable \'{0}\' contains unsupported value \'{1}\'!',
PROMPT_SET_DEFAULT, FL_EXIT_ON_NO, BadEnvVariable]
self.RESIZE_NOT_ENOUGH_SPACE = \
['There is not enough space in the pool \'{0}\' to grow volume' +
' \'{1}\' to size {2} KB!',
PROMPT_NONE, FL_FATAL, NotEnoughSpace]
self.CREATE_NOT_ENOUGH_SPACE = \
['Not enough space ({0} KB) in the pool \'{1}\' to create ' +
'volume!', PROMPT_ADJUST,
FL_DEFAULT_NO | FL_EXIT_ON_NO | FL_FORCE_YES, NotEnoughSpace]
self.RESIZE_ALREADY_MATCH = \
['\'{0}\' is already {1} KB long, there is nothing ' +
'to resize!',
PROMPT_NONE, FL_FATAL, ResizeMatch]
self.CREATE_MOUNT_NOFS = \
['Mount point \'{0}\' specified, but no file system provided!',
PROMPT_IGNORE, FL_EXIT_ON_NO | FL_FORCE_YES, FsNotSpecified]
self.DEVICE_USED = \
['Device \'{0}\' is already used in the \'{1}\'!',
PROMPT_REMOVE, FL_DEFAULT_NO | FL_FORCE_YES, DeviceUsed]
self.EXISTING_FILESYSTEM = \
['Filesystem \'{0}\' detected on the device \'{1}\'!',
PROMPT_USE, FL_DEFAULT_NO | FL_FORCE_YES, ExistingFilesystem]
self.NO_DEVICES = \
['No devices available to use for the \'{0}\' pool!',
PROMPT_NONE, FL_FATAL, NoDevices]
self.TOOL_MISSING = \
['\'{0}\' is not installed on the system!',
PROMPT_NONE, FL_FATAL, ToolMissing]
self.CAN_NOT_RUN = \
['Can not run command \'{0}\'',
PROMPT_NONE, FL_FATAL, CanNotRun]
self.COMMAND_FAILED = \
['Error while running command \'{0}\'',
PROMPT_NONE, FL_FATAL, CommandFailed]
self.NOT_SUPPORTED = \
['{0} is not supported!',
PROMPT_NONE, FL_FATAL, NotSupported]
self.NOT_IMPLEMENTED = \
['\'{0}\' function is not implemented by {1}!',
PROMPT_NONE, FL_FATAL, NotImplemented]
def _can_print_message(self, flags):
if (flags & FL_DEBUG_ONLY):
return self.options.debug
elif (flags & FL_VERBOSE_ONLY):
return self.options.verbose
elif (flags & FL_NO_MESSAGE):
return False
else:
return True
def _read_char(self):
import tty
import termios
fd = sys.stdin.fileno()
old_settings = termios.tcgetattr(fd)
try:
tty.setraw(sys.stdin.fileno())
ch = sys.stdin.read(1)
finally:
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
return ch
def _ask_question(self, flags):
if flags & FL_DEFAULT_NO:
sys.stdout.write("(N/y/q) ? ")
else:
sys.stdout.write("(Y/n/q) ? ")
sys.stdout.flush()
ch = ''
if self.options.force and flags & FL_FORCE_NO:
ch = 'N'
elif self.options.force and flags & FL_FORCE_YES:
ch = 'Y'
elif self.options.interactive:
while ch not in ['Y', 'N', 'Q', chr(13)]:
ch = self._read_char().upper()
elif flags & FL_DEFAULT_NO:
ch = 'N'
else:
ch = 'Y'
if ch == chr(13):
if flags & FL_DEFAULT_NO:
ch = 'N'
else:
ch = 'Y'
print(ch)
if ch == 'Y':
return True
elif ch == 'N':
return False
elif ch == 'Q':
err = "Terminated by user!"
raise UserInterrupted(err)
def check(self, problem, args):
if type(args) is not list:
args = [args]
message = problem[0].format(*args)
prompt_msg = PROMPT_MSG[problem[1]]
flags = problem[2]
exc = problem[3]
if (flags & FL_DEFAULT_NO):
res = False
else:
res = True
if self._can_print_message(flags) and \
(flags & FL_MSG_ONLY or prompt_msg is None):
sys.stderr.write(message + "\n")
if not flags & FL_MSG_ONLY and prompt_msg is not None:
sys.stdout.write(message + " ")
sys.stdout.write('{0}'.format(prompt_msg) + " ")
res = self._ask_question(flags)
if (flags & FL_FATAL):
if exc:
raise exc(message)
else:
raise Exception(message)
if ((flags & FL_EXIT_ON_NO) and (not res)) or \
((flags & FL_EXIT_ON_YES) and res):
msg = "Terminated by user!"
if exc:
raise exc(msg)
else:
raise Exception(msg)
return res
def error(self, args):
self.check(self.GENERAL_ERROR, args)
def info(self, args):
self.check(self.GENERAL_INFO, args)
def warn(self, args):
self.check(self.GENERAL_WARNING, args)
def not_supported(self, args):
self.check(self.NOT_SUPPORTED, args)
| gpl-2.0 |
KenV99/service.xbmc.callbacks2 | default.py | 1 | 32187 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2014 KenV99
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
## This script is based on script.randomitems & script.wacthlist & script.xbmc.callbacks
# Thanks to their original authors and pilulli
debug = False
idledebug = False
remote = False
if debug:
import sys
if remote:
sys.path.append('C:\\Users\\Ken User\\AppData\\Roaming\\XBMC\\addons\\script.ambibox\\resources\\lib\\'
'pycharm-debug.py3k\\')
import pydevd
pydevd.settrace('192.168.1.103', port=51234, stdoutToServer=True, stderrToServer=True)
else:
sys.path.append('C:\\Program Files (x86)\\JetBrains\\PyCharm 5.0.2\\debug-eggs\\pycharm-debug.egg')
import pydevd
pydevd.settrace('localhost', port=51234, stdoutToServer=True, stderrToServer=True, suspend=False)
import os
from json import loads as jloads
import xbmc
import xbmcgui
import xbmcaddon
import xbmcvfs
import subprocess
import sys
import abc
import requests as requests
import urllib2
import httplib
from urlparse import urlparse
import socket
import traceback
import stat
from monitorlog import LogChecker
from monitorwindows import MonitorWindows
__addon__ = xbmcaddon.Addon('script.xbmc.callbacks2')
__cwd__ = xbmc.translatePath(__addon__.getAddonInfo('path')).decode('utf-8')
__scriptname__ = __addon__.getAddonInfo('name')
__version__ = str(__addon__.getAddonInfo('version'))
__settings__ = xbmcaddon.Addon("script.xbmc.callbacks2")
__language__ = __settings__.getLocalizedString
__settingsdir__ = xbmc.translatePath(os.path.join(__cwd__, 'resources')).decode('utf-8')
__resource__ = xbmc.translatePath(os.path.join(__cwd__, 'resources', 'lib')).decode('utf-8')
__author__ = 'KenV99'
__options__ = dict()
sys.path.append(__resource__)
import monitorext
from gotham2helix import get_installedversion
ver = get_installedversion()
if int(ver['major']) > 13:
from gotham2helix import helix_abortloop as abortloop
else:
from gotham2helix import gotham_abortloop as abortloop
sysplat = sys.platform
needs_log_monitor = False
needs_window_monitor = False
def notification(text, *silence):
"""
Display an XBMC notification box, optionally turn off sound associated with it
@type text: str
@type silence: bool
"""
text = text.encode('utf-8')
if __options__['notifications'] or __options__['tester']:
icon = __settings__.getAddonInfo("icon")
smallicon = icon.encode("utf-8")
dialog = xbmcgui.Dialog()
if __options__['tester']:
dialog.ok(__scriptname__, text)
else:
if silence:
dialog.notification(__scriptname__, text, smallicon, 1000, False)
else:
dialog.notification(__scriptname__, text, smallicon, 1000, True)
def debug(txt):
if isinstance(txt, str):
txt = txt.decode("utf-8")
message = u"$$$ [%s] - %s" % (__scriptname__, txt)
xbmc.log(msg=message.encode("utf-8"), level=xbmc.LOGDEBUG)
def info(txt):
if isinstance(txt, str):
txt = txt.decode("utf-8")
message = u"$$$ [%s] - %s" % (__scriptname__, txt)
xbmc.log(msg=message.encode("utf-8"), level=xbmc.LOGNOTICE)
def read_settings(ddict):
"""
Reads settings from settings.xml and loads gloval __options__ and Dispatcher.ddict
@param ddict: dictionary object from Dispatcher
@type ddict: dict
"""
global __options__, needs_log_monitor, needs_window_monitor
_settings = xbmcaddon.Addon("script.xbmc.callbacks2")
needs_log_monitor = False
needs_window_monitor = False
# Read in binary options
setlist = ['user_monitor_playback', 'notifications', 'arg_eventid', 'arg_mediatype', 'arg_filename',
'arg_title', 'arg_aspectratio', 'arg_resolution', 'arg_profilepath', 'arg_stereomode']
for i in setlist:
__options__[i] = (_settings.getSetting(i) == 'true')
__options__['interval'] = int(float(_settings.getSetting('interval')))
__options__['needs_listener'] = __options__['monitorStereoMode'] = __options__['monitorProfiles'] = False
__options__['monitorPlayback'] = False
# Read from settings command related settings and create workers in dictionary structure
setlist = ['onPlaybackStarted', 'onPlaybackStopped', 'onPlaybackPaused', 'onPlaybackResumed',
'onScreensaverActivated', 'onScreensaverDeactivated', 'onShutdown', 'onStereoModeChange',
'onProfileChange', 'onIdle', 'onStartup',
'onPlayBackSeekChapter', 'onQueueNextItem', 'onCleanStarted', 'onCleanFinished', 'onScanStarted',
'onScanFinished', 'onDPMSActivated', 'onDPMSDeactivated', 'onLogSimple', 'onLogRegex',
'onWindowOpen', 'onWindowClosed']
for i in setlist:
setid = (i + '_type').decode('utf-8')
mtype = _settings.getSetting(setid)
if mtype != 'none' and mtype != '':
setid = (i + '_str').decode('utf-8')
if mtype == 'script':
mstr = _settings.getSetting(setid + '.scr')
elif mtype == 'python':
mstr = _settings.getSetting(setid + '.pyt')
elif mtype == 'builtin':
mstr = _settings.getSetting(setid + '.btn')
else:
mstr = _settings.getSetting(setid + '.htp')
if mstr == '':
continue
if mtype == 'script' or mtype == 'python':
setid = (i + '_arg').decode('utf-8')
argstr = _settings.getSetting(setid)
else:
argstr = ''
worker = Factory.build_worker(mtype, mstr, argstr)
if worker is not None:
if mtype == 'script':
setid = (i + '_shell').decode('utf-8')
if _settings.getSetting(setid) == 'true':
worker.needs_shell = True
else:
worker.needs_shell = False
worker.event_id = i
ddict[i] = worker
if i in ['onStereoModeChange', 'onProfileChange']:
__options__['needs_listener'] = True
if i == 'onStereoModeChange':
__options__['monitorStereoMode'] = True
else:
__options__['monitorProfiles'] = True
elif i in ['onPlaybackStarted', 'onPlaybackStopped']:
if __options__['user_monitor_playback']:
__options__['needs_listener'] = True
__options__['monitorPlayback'] = True
elif i in ['onLogSimple', 'onLogRegex']:
needs_log_monitor = True
if i == 'onLogSimple':
__options__['onLogSimple_match'] = _settings.getSetting('onLogSimple_match')
__options__['onLogSimple_nomatch'] = _settings.getSetting('onLogSimple_nomatch')
__options__['onLogSimpleSend'] = _settings.getSetting('onLogSimpleSend') == 'true'
elif i == 'onLogRegex':
__options__['onLogRegex_match'] = _settings.getSetting('onLogRegex_match')
__options__['onLogRegex_nomatch'] = _settings.getSetting('onLogRegex_nomatch')
__options__['onLogRegexSend'] = _settings.getSetting('onLogRegexSend') == 'true'
elif i == 'onWindowOpen':
needs_window_monitor = True
__options__['onWindowOpen_id'] = int(_settings.getSetting('onWindowOpen_id'))
__options__['onWindowOpenSend'] = _settings.getSetting('onWindowOpenSend') == 'true'
elif i == 'onWindowClose':
needs_window_monitor = True
__options__['onWindowClose_id'] = int(_settings.getSetting('onWindowClose_id'))
__options__['onWindowCloseSend'] = _settings.getSetting('onWindowCloseSend') == 'true'
if i == 'onIdle':
__options__['idle_time'] = int(_settings.getSetting('idle_time'))
else:
info('Due to errors, unable to register command: %s' % mstr)
class Factory(object):
"""
Factory object for building workers with abstract worker superclass and specific subclasses of worker
"""
@staticmethod
def build_worker(worker_type, cmd_string, argstr):
"""
Builds workers
@param worker_type: script, python, builtin, json, http
@type worker_type: str
@param cmd_string: the main command, language specific
@type cmd_string: str
@param argstr: user arguments as entered in settings
@type argstr: list
@return:
"""
worker = None
if worker_type == 'script':
worker = WorkerScript(cmd_string, argstr)
elif worker_type == 'python':
worker = WorkerPy(cmd_string, argstr)
elif worker_type == 'builtin':
worker = WorkerBuiltin(cmd_string, argstr)
elif worker_type == 'json':
worker = WorkerJson(cmd_string, argstr)
elif worker_type == 'http':
worker = WorkerHTTP(cmd_string, argstr)
if worker.passed:
return worker
else:
del worker
return None
class Player(xbmc.Player):
"""
Subclasses xbmc.Player
"""
global __options__
dispatcher = None
def __init__(self):
super(Player, self).__init__()
@staticmethod
def playing_type():
"""
@return: [music|movie|episode|stream|liveTV|recordedTV|PVRradio|unknown]
"""
substrings = ['-trailer', 'http://']
isMovie = False
if xbmc.Player.isPlayingAudio(xbmc.Player()):
return "music"
else:
if xbmc.getCondVisibility('VideoPlayer.Content(movies)'):
isMovie = True
try:
filename = xbmc.Player.getPlayingFile(xbmc.Player())
except:
filename = ''
if filename != '':
if filename[0:3] == 'pvr':
if xbmc.getCondVisibility('Pvr.IsPlayingTv'):
return 'liveTV'
elif xbmc.getCondVisibility('Pvr.IsPlayingRecording'):
return 'recordedTV'
elif xbmc.getCondVisibility('Pvr.IsPlayingRadio'):
return 'PVRradio'
else:
for string in substrings:
if string in filename:
isMovie = False
break
if isMovie:
return "movie"
elif xbmc.getCondVisibility('VideoPlayer.Content(episodes)'):
# Check for tv show title and season to make sure it's really an episode
if xbmc.getInfoLabel('VideoPlayer.Season') != "" and xbmc.getInfoLabel('VideoPlayer.TVShowTitle') != "":
return "episode"
elif xbmc.getCondVisibility('Player.IsInternetStream'):
return 'stream'
else:
return 'unknown'
def getTitle(self):
if self.isPlayingAudio():
while xbmc.getInfoLabel('MusicPlayer.Title') is None:
xbmc.sleep(250)
return xbmc.getInfoLabel('MusicPlayer.Title')
elif self.isPlayingVideo():
while xbmc.getInfoLabel('VideoPlayer.Title') is None:
xbmc.sleep(250)
if xbmc.getCondVisibility('VideoPlayer.Content(episodes)'):
if xbmc.getInfoLabel('VideoPlayer.Season') != "" and xbmc.getInfoLabel('VideoPlayer.TVShowTitle') != "":
return (xbmc.getInfoLabel('VideoPlayer.TVShowTitle') + '-Season ' +
xbmc.getInfoLabel('VideoPlayer.Season') + '-' + xbmc.getInfoLabel('VideoPlayer.Title'))
else:
return xbmc.getInfoLabel('VideoPlayer.Title')
else:
return 'Kodi cannot detect title'
def getPlayingFileEx(self):
try:
fn = self.getPlayingFile()
except:
fn = 'unknown'
if fn is None:
fn = 'Kodi returned playing file is none'
return xbmc.translatePath(fn)
def getAspectRatio(self):
try:
ar = xbmc.getInfoLabel("VideoPlayer.VideoAspect")
except:
ar = 'unknown'
if ar is None:
ar = 'unknown'
return ar
def getResoluion(self):
try:
vr = xbmc.getInfoLabel("VideoPlayer.VideoResolution")
except:
vr = 'unknown'
if vr is None:
vr = 'unknown'
return vr
def onPlayBackStarted(self):
if not __options__['monitorPlayback']:
for i in xrange(1, 40):
if not (self.isPlayingAudio() or self.isPlayingVideo()):
if i == 40:
return
else:
xbmc.sleep(250)
self.onPlayBackStartedEx()
def getRuntimeArgs(self):
runtimeargs = []
if __options__['arg_mediatype']:
t = self.playing_type()
if t is None:
t = 'unknown'
runtimeargs.append('type=' + t)
if __options__['arg_filename']:
runtimeargs.append('file=' + self.getPlayingFileEx())
if __options__['arg_title']:
runtimeargs.append('title=' + self.getTitle())
if self.isPlayingVideo():
if __options__['arg_aspectratio']:
runtimeargs.append('aspectratio=' + self.getAspectRatio())
if __options__['arg_resolution']:
runtimeargs.append('resolution=' + self.getResoluion())
return runtimeargs
def onPlayBackStartedEx(self):
runtimeargs = self.getRuntimeArgs()
self.dispatcher.dispatch('onPlaybackStarted', runtimeargs)
def onPlayBackStopped(self):
if not __options__['monitorPlayback']:
self.onPlayBackStoppedEx()
def onPlayBackEnded(self):
self.onPlayBackStopped()
def onPlayBackStoppedEx(self):
self.dispatcher.dispatch('onPlaybackStopped', [])
def onPlayBackPaused(self):
self.dispatcher.dispatch('onPlaybackPaused', [])
def onPlayBackResumed(self):
runtimeargs = self.getRuntimeArgs()
self.dispatcher.dispatch('onPlaybackResumed', runtimeargs)
def onPlayBackSeekChapter(self, chapnum):
self.dispatcher.dispatch('onPlayBackSeekChapter', [])
def onPlayBackQueueNextItem(self):
self.dispatcher.dispatch('onPlayBackQueueNextItem', [])
class Monitor(monitorext.MonitorEx): # monitorext.MonitorEx
"""
Subclasses MonitorEx which is a subclass of xbmc.Monitor
"""
player = None
dispatcher = None
def __init__(self, monitorStereoMode, monitorProfiles, monitorPlayback):
"""
@type monitorStereoMode: bool
@type monitorProfiles: bool
@type monitorPlayback: bool
"""
monitorext.MonitorEx.__init__(self, monitorStereoMode, monitorProfiles, monitorPlayback)
def onScanStarted(self, database):
self.dispatcher.dispatch('onScanStarted', [])
def onScanFinished(self, database):
self.dispatcher.dispatch('onScanFinished', [])
def onDPMSActivated(self):
self.dispatcher.dispatch('onDPMSActivated', [])
def onDPMSDeactivated(self):
self.dispatcher.dispatch('onDPMSDeactivated', [])
def onCleanStarted(self, database):
self.dispatcher.dispatch('onCleanStarted', [])
def onCleanFinished(self, database):
self.dispatcher.dispatch('onCleanFinished', [])
def onScreensaverActivated(self):
self.dispatcher.dispatch('onScreensaverActivated', [])
def onScreensaverDeactivated(self):
self.dispatcher.dispatch('onScreensaverDeactivated', [])
def onSettingsChanged(self):
Main.load()
def onStereoModeChange(self):
runtimeargs = []
if __options__['arg_stereomode']:
runtimeargs = ['stereomode=' + self.getCurrentStereoMode()]
self.dispatcher.dispatch('onStereoModeChange', runtimeargs)
def onProfileChange(self):
runtimeargs = []
if __options__['arg_profilepath']:
runtimeargs = ['profilepath=' + self.getCurrentProfile()]
self.dispatcher.dispatch('onProfileChange', runtimeargs)
def onPlaybackStarted(self):
self.player.onPlayBackStartedEx()
def onPlaybackStopped(self):
self.player.onPlayBackStoppedEx()
class Dispatcher():
"""
Class for dispatching workers to jobs
"""
ddict = dict()
def __init__(self):
self.ddict = dict()
def dispatch(self, event_id, runtimeargs):
if event_id in self.ddict:
worker = self.ddict[event_id]
if __options__['arg_eventid']:
runtimeargs = ['event=' + event_id] + runtimeargs
info('Executing command: [%s] for event: %s' % (worker.cmd_str, event_id))
result = worker.run(runtimeargs)
if result[0]:
info('Command for %s resulted in ERROR: %s' % (event_id, result[1]))
notification(__language__(32051) % (event_id, result[1]))
else:
info('Command for %s executed successfully' % event_id)
if not __options__['tester']:
(__language__(32052) % event_id)
return result
else:
return [True, 'No registered command for \'%s\'' % event_id]
class AbstractWorker():
"""
Abstract class for command specific workers to follow
"""
__metaclass__ = abc.ABCMeta
event_id = ''
def __init__(self, cmd_str, userargs):
self.cmd_str = cmd_str.strip()
self.userargs = userargs
self.passed = self.check()
self.needs_shell = False
@abc.abstractmethod
def check(self):
pass
@abc.abstractmethod
def run(self, runtimeargs):
err = None # True if error occured
msg = '' # string containing error message or return message
return[err, msg]
class WorkerScript(AbstractWorker):
def check(self):
tmp = self.cmd_str
self.cmd_str = []
tmp = xbmc.translatePath(tmp).decode('utf-8')
if xbmcvfs.exists(tmp):
try:
mode = os.stat(tmp).st_mode
mode |= stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
os.chmod(tmp, mode)
except:
info('Failed to set execute bit on script: %s' % tmp)
self.cmd_str.append(tmp)
self.separate_userargs()
self.type = 'script'
return True
else:
info('Error - File not found: %s' % tmp)
return False
def separate_userargs(self):
if len(self.userargs) > 0:
ret = []
new = str(self.userargs).split(' ')
tst = ''
for i in new:
tst = tst + i + ' '
if os.path.isfile(tst):
tst.rstrip()
ret.append(tst)
elif len(ret) > 1:
ret.append(i)
if len(ret) == 0:
for i in new:
ret.append(i)
self.userargs = ret
else:
self.userargs = []
def run(self, runtimeargs):
err = False
debg = False
msg = ''
margs = self.cmd_str + runtimeargs + self.userargs
if sysplat.startswith('darwin') or debg:
try:
p = subprocess.Popen(margs, stdout=subprocess.PIPE, shell=self.needs_shell, stderr=subprocess.STDOUT)
result = p.communicate()[0]
if result is not None:
msg = 'Process returned: %s' % str(result)
except subprocess.CalledProcessError, e:
err = True
msg = e.output
except:
e = sys.exc_info()[0]
err = True
if hasattr(e, 'message'):
msg = str(e.message)
msg = msg + '\n' + traceback.format_exc()
return [err, msg]
else:
try:
result = subprocess.check_output(margs, shell=self.needs_shell, stderr=subprocess.STDOUT)
if result is not None:
msg = result
except subprocess.CalledProcessError, e:
err = True
msg = e.output
except:
e = sys.exc_info()[0]
err = True
if hasattr(e, 'message'):
msg = str(e.message)
msg = msg + '\n' + traceback.format_exc()
return [err, msg]
class WorkerPy(AbstractWorker):
def check(self):
tmp = xbmc.translatePath(self.cmd_str).decode('utf-8')
if xbmcvfs.exists(tmp):
fn, ext = os.path.splitext(tmp)
if ext == '.py':
self.cmd_str = tmp
self.type = 'python'
return True
else:
info('Error - not a python script: %s' % tmp)
return False
else:
info('Error - File not found: %s' % tmp)
return False
def run(self, runtimeargs):
err = False
msg = ''
args = ', '.join(runtimeargs) + ', ' + self.userargs
try:
if len(args) > 1:
result = xbmc.executebuiltin('XBMC.RunScript(%s, %s)' % (self.cmd_str, args))
else:
result = xbmc.executebuiltin('XBMC.RunScript(%s)' % self.cmd_str)
if result is not None:
msg = result
except:
e = sys.exc_info()[0]
err = True
if hasattr(e, 'message'):
msg = str(e.message)
msg = msg + '\n' + traceback.format_exc()
return [err, msg]
class WorkerBuiltin(AbstractWorker):
def check(self):
self.type = 'built-in'
return True
def run(self, runtimeargs):
err = False
msg = ''
try:
result = xbmc.executebuiltin(self.cmd_str)
if result != '':
err = True
msg = result
except:
e = sys.exc_info()[0]
err = True
if hasattr(e, 'message'):
msg = str(e.message)
msg = msg + '\n' + traceback.format_exc()
return [err, msg]
class WorkerHTTP(AbstractWorker):
def check(self):
o = urlparse(self.cmd_str)
if o.scheme != '' and o.netloc != '' and o.path != '':
self.type = 'http'
return True
else:
info('Invalid url: %s' % self.cmd_str)
return False
def run(self, runtimeargs):
err = False
msg = ''
try:
u = requests.get(self.cmd_str, timeout=20)
info('requests return code: %s' % str(u.status_code))
# u = urllib2.urlopen(self.cmd_str, timeout=20)
# info('urlib2 return code: %s' % u.getcode())
try:
# result = u.read()
result = u.text
except Exception as e:
err = True
result = ''
msg = 'Error on url read'
if hasattr(e, 'message'):
msg = msg + '\n' + (str(e.message))
del u
msg = str(result)
except requests.ConnectionError:
err = True
msg = 'Requests Connection Error'
except requests.HTTPError:
err = True
msg = 'Requests HTTPError'
except requests.URLRequired:
err = True
msg = 'Requests URLRequired Error'
except requests.Timeout:
err = True
msg = 'Requests Timeout Error'
except requests.RequestException:
err = True
msg = 'Generic Requests Error'
except urllib2.HTTPError, e:
err = True
msg = 'HTTPError = ' + str(e.code)
except urllib2.URLError, e:
err = True
msg = 'URLError\n' + e.reason
except httplib.BadStatusLine, e:
err = False
info('Http Bad Status Line caught and passed')
# pass - returned a status code that is not understood in the library
if u is not None:
try:
result = u.read()
info('Successful read after catching BadStatusLine')
except Exception as e:
err = True
result = ''
msg = 'Error on url read'
if hasattr(e, 'message'):
msg = msg + '\n' + (str(e.message))
del u
msg = str(result)
except httplib.HTTPException, e:
err = True
msg = 'HTTPException'
if hasattr(e, 'message'):
msg = msg + '\n' + e.message
except socket.timeout, e:
err = True
msg = 'The request timed out, host unreachable'
except Exception:
err = True
e = sys.exc_info()[0]
if hasattr(e, 'message'):
msg = str(e.message)
msg = msg + '\n' + traceback.format_exc()
return [err, msg]
class WorkerJson(AbstractWorker):
def check(self):
self.type = 'json'
return True
def run(self, runtimeargs):
err = False
msg = ''
try:
result = xbmc.executeJSONRPC(self.cmd_str)
msg = jloads(result)
except:
e = sys.exc_info()[0]
err = True
if hasattr(e, 'message'):
msg = str(e.message)
msg = msg + '\n' + traceback.format_exc()
return [err, msg]
class Main():
dispatcher = None
mm = None
player = None
lc = None
wm = None
@staticmethod
def log_dispatch_simple(args):
Main.dispatcher.dispatch('onLogSimple', runtimeargs=args)
@staticmethod
def log_dispatch_regex(args):
Main.dispatcher.dispatch('onLogRegex', runtimeargs=args)
@staticmethod
def window_open_dispatch(args):
Main.dispatcher.dispatch('onWindowOpen', runtimeargs=args)
@staticmethod
def window_close_dispatch(args):
Main.dispatcher.dispatch('onWindowClose', runtimeargs=args)
@staticmethod
def load():
if Main.dispatcher is not None:
del Main.dispatcher
if Main.mm is not None:
del Main.mm
if Main.player is not None:
del Main.player
if Main.lc is not None:
Main.lc.abort()
del Main.lc
Main.dispatcher = Dispatcher()
read_settings(Main.dispatcher.ddict)
Main.mm = Monitor(__options__['monitorStereoMode'], __options__['monitorProfiles'],
__options__['monitorPlayback'])
Main.mm.dispatcher = Main.dispatcher
Main.player = Player()
Main.player.dispatcher = Main.dispatcher
Main.mm.player = Main.player
if __options__['needs_listener']:
Main.mm.Listen(interval=__options__['interval'])
if needs_log_monitor:
Main.lc = LogChecker()
try:
t = __options__['onLogSimple_match']
except:
pass
else:
Main.lc.add_simple_check(t, __options__['onLogSimple_nomatch'], Main.log_dispatch_simple, '')
try:
t = __options__['onLogRegex_match']
except:
pass
else:
Main.lc.add_re_check(t, __options__['onLogRegex_nomatch'], Main.log_dispatch_regex, '')
try:
Main.lc.start()
except:
info('LogChecker thread start failed')
else:
info('LogChecker thread started')
if needs_window_monitor:
Main.wm = MonitorWindows(500)
try:
id = __options__['onWindowOpen_id']
except:
pass
else:
Main.wm.monitoropen = {id:Main.window_open_dispatch}
try:
id = __options__['onWindowClose_id']
except:
pass
else:
Main.wm.monitorclose = {id:Main.window_close_dispatch}
try:
Main.wm.start()
except Exception as e:
info('WindowMonitor thread start failed')
else:
info('WindowMonitor thread started')
@staticmethod
def run():
# global __options__
try:
__options__['tester'] = False
info('Starting %s version %s' % (__scriptname__, __version__))
Main.load()
if 'onStartup' in Main.dispatcher.ddict:
Main.dispatcher.dispatch('onStartup', [])
sleep_int = __options__['interval']
executed_idle = False
doidle = (('onIdle' in Main.dispatcher.ddict) is True)
if doidle:
idletimeThreshold = 60 * __options__['idle_time']
else:
idletimeThreshold = 10e10
startidle = 0
playeridle = False
while not abortloop(sleep_int, Main.mm):
XBMCit = xbmc.getGlobalIdleTime()
if Main.mm.player.isPlaying(): #Condition 1: Player is playing
playeridle = False
startidle = XBMCit
else:
if playeridle is False: #Condition 2: Player not playing, start of idle time not set
playeridle = True
startidle = XBMCit
else: #Condition 3: Player not playing, start of idle already set
pass
myit = XBMCit - startidle
if idledebug:
info('Kodi idle for %u sec(s): Event in T minus %i s' % (myit, -(myit-idletimeThreshold)))
if doidle:
if myit > idletimeThreshold:
if not executed_idle:
Main.dispatcher.dispatch('onIdle', [])
executed_idle = True
else:
executed_idle = False
# xbmc.sleep(sleep_int)
if 'onShutdown' in Main.dispatcher.ddict:
Main.dispatcher.dispatch('onShutdown', [])
if Main.mm is not None:
Main.mm.StopListening()
del Main.mm
if Main.lc is not None:
Main.lc.abort()
del Main.player
del Main.dispatcher
info('Stopped %s' % __scriptname__)
except Exception, e:
e = sys.exc_info()[0]
msg = ''
if hasattr(e, 'message'):
msg = msg + str(e.message)
msg = msg + '\n' + traceback.format_exc()
info(__language__(32053) % msg)
notification(__language__(32053) % msg)
sys.exit()
def __init__(self):
pass
if __name__ == '__main__':
Main().run()
| gpl-3.0 |
tarak/django-password-policies | password_policies/tests/test_utils.py | 1 | 1278 | from password_policies.models import PasswordChangeRequired, PasswordHistory
from password_policies.utils import PasswordCheck
from password_policies.tests.lib import BaseTest
from password_policies.tests.lib import create_user
from password_policies.tests.lib import create_password_history
class PasswordPoliciesUtilsTest(BaseTest):
def setUp(self):
self.user = create_user()
self.check = PasswordCheck(self.user)
create_password_history(self.user)
return super(PasswordPoliciesUtilsTest, self).setUp()
def test_password_check_is_required(self):
# by default no change is required
self.assertFalse(self.check.is_required())
# until a change is required (usually by middleware)
PasswordChangeRequired.objects.create(user=self.user)
self.assertTrue(self.check.is_required())
def test_password_check_is_expired(self):
# `create_password_history` creates a history starting at
# t - PASSWORD_DURATION_SECONDS, so the password is expired
self.assertTrue(self.check.is_expired())
# now we create a password now, so it isn't expired
PasswordHistory.objects.create(user=self.user, password='testpass')
self.assertFalse(self.check.is_expired())
| bsd-3-clause |
tiagoantao/tools-iuc | tools/raxml/raxml.py | 2 | 17222 | #!/usr/bin/env python
"""
Runs RAxML on a sequence file.
For use with RAxML version 8.2.4
"""
import fnmatch
import glob
import optparse
import os
import subprocess
import sys
def stop_err(msg):
sys.stderr.write("%s\n" % msg)
sys.exit()
def getint(name):
basename = name.partition('RUN.')
if basename[2] != '':
num = basename[2]
return int(num)
def __main__():
usage = "usage: %prog -T <threads> -s <input> -n <output> -m <model> [optional arguments]"
# Parse the primary wrapper's command line options
parser = optparse.OptionParser(usage=usage)
# raxml binary name, hardcoded in the xml file
parser.add_option("--binary", action="store", type="string", dest="binary", help="Command to run")
# (-a)
parser.add_option("--weightfile", action="store", type="string", dest="weightfile", help="Column weight file")
# (-A)
parser.add_option("--secondary_structure_model", action="store", type="string", dest="secondary_structure_model", help="Secondary structure model")
# (-b)
parser.add_option("--bootseed", action="store", type="int", dest="bootseed", help="Bootstrap random number seed")
# (-c)
parser.add_option("--numofcats", action="store", type="int", dest="numofcats", help="Number of distinct rate categories")
# (-d)
parser.add_option("--search_complete_random_tree", action="store_true", dest="search_complete_random_tree", help="Search with a complete random starting tree")
# (-D)
parser.add_option("--ml_search_convergence", action="store_true", dest="ml_search_convergence", help="ML search onvergence criterion")
# (-e)
parser.add_option("--model_opt_precision", action="store", type="float", dest="model_opt_precision", help="Model Optimization Precision (-e)")
# (-E)
parser.add_option("--excludefile", action="store", type="string", dest="excludefile", help="Exclude File Name")
# (-f)
parser.add_option("--search_algorithm", action="store", type="string", dest="search_algorithm", help="Search Algorithm")
# (-F)
parser.add_option("--save_memory_cat_model", action="store_true", dest="save_memory_cat_model", help="Save memory under CAT and GTRGAMMA models")
# (-g)
parser.add_option("--groupingfile", action="store", type="string", dest="groupingfile", help="Grouping File Name")
# (-G)
parser.add_option("--enable_evol_heuristics", action="store_true", dest="enable_evol_heuristics", help="Enable evol algo heuristics")
# (-i)
parser.add_option("--initial_rearrangement_setting", action="store", type="int", dest="initial_rearrangement_setting", help="Initial Rearrangement Setting")
# (-I)
parser.add_option("--posterior_bootstopping_analysis", action="store", type="string", dest="posterior_bootstopping_analysis", help="Posterior bootstopping analysis")
# (-J)
parser.add_option("--majority_rule_consensus", action="store", type="string", dest="majority_rule_consensus", help="Majority rule consensus")
# (-k)
parser.add_option("--print_branch_lengths", action="store_true", dest="print_branch_lengths", help="Print branch lengths")
# (-K)
parser.add_option("--multistate_sub_model", action="store", type="string", dest="multistate_sub_model", help="Multistate substitution model")
# (-m)
parser.add_option("--model_type", action="store", type="string", dest="model_type", help="Model Type")
parser.add_option("--base_model", action="store", type="string", dest="base_model", help="Base Model")
parser.add_option("--aa_empirical_freq", action="store_true", dest="aa_empirical_freq", help="Use AA Empirical base frequences")
parser.add_option("--aa_search_matrix", action="store", type="string", dest="aa_search_matrix", help="AA Search Matrix")
# (-n)
parser.add_option("--name", action="store", type="string", dest="name", help="Run Name")
# (-N/#)
parser.add_option("--number_of_runs", action="store", type="int", dest="number_of_runs", help="Number of alternative runs")
parser.add_option("--number_of_runs_bootstop", action="store", type="string", dest="number_of_runs_bootstop", help="Number of alternative runs based on the bootstop criteria")
# (-M)
parser.add_option("--estimate_individual_branch_lengths", action="store_true", dest="estimate_individual_branch_lengths", help="Estimate individual branch lengths")
# (-o)
parser.add_option("--outgroup_name", action="store", type="string", dest="outgroup_name", help="Outgroup Name")
# (-O)
parser.add_option("--disable_undetermined_seq_check", action="store_true", dest="disable_undetermined_seq_check", help="Disable undetermined sequence check")
# (-p)
parser.add_option("--random_seed", action="store", type="int", dest="random_seed", help="Random Number Seed")
# (-P)
parser.add_option("--external_protein_model", action="store", type="string", dest="external_protein_model", help="External Protein Model")
# (-q)
parser.add_option("--multiple_model", action="store", type="string", dest="multiple_model", help="Multiple Model File")
# (-r)
parser.add_option("--constraint_file", action="store", type="string", dest="constraint_file", help="Constraint File")
# (-R)
parser.add_option("--bin_model_parameter_file", action="store", type="string", dest="bin_model_parameter_file", help="Constraint File")
# (-s)
parser.add_option("--source", action="store", type="string", dest="source", help="Input file")
# (-S)
parser.add_option("--secondary_structure_file", action="store", type="string", dest="secondary_structure_file", help="Secondary structure file")
# (-t)
parser.add_option("--starting_tree", action="store", type="string", dest="starting_tree", help="Starting Tree")
# (-T)
parser.add_option("--threads", action="store", type="int", dest="threads", help="Number of threads to use")
# (-u)
parser.add_option("--use_median_approximation", action="store_true", dest="use_median_approximation", help="Use median approximation")
# (-U)
parser.add_option("--save_memory_gappy_alignments", action="store_true", dest="save_memory_gappy_alignments", help="Save memory in large gapped alignments")
# (-V)
parser.add_option("--disable_rate_heterogeneity", action="store_true", dest="disable_rate_heterogeneity", help="Disable rate heterogeneity")
# (-W)
parser.add_option("--sliding_window_size", action="store", type="string", dest="sliding_window_size", help="Sliding window size")
# (-x)
parser.add_option("--rapid_bootstrap_random_seed", action="store", type="int", dest="rapid_bootstrap_random_seed", help="Rapid Boostrap Random Seed")
# (-y)
parser.add_option("--parsimony_starting_tree_only", action="store_true", dest="parsimony_starting_tree_only", help="Generate a parsimony starting tree only")
# (-z)
parser.add_option("--file_multiple_trees", action="store", type="string", dest="file_multiple_trees", help="Multiple Trees File")
(options, args) = parser.parse_args()
cmd = []
# Required parameters
binary = options.binary
cmd.append(binary)
# Threads
if options.threads > 1:
threads = "-T %d" % options.threads
cmd.append(threads)
# Source
source = "-s %s" % options.source
cmd.append(source)
# Hardcode to "galaxy" first to simplify the output part of the wrapper
# name = "-n %s" % options.name
name = "-n galaxy"
cmd.append(name)
# Model
model_type = options.model_type
base_model = options.base_model
aa_search_matrix = options.aa_search_matrix
aa_empirical_freq = options.aa_empirical_freq
if model_type == 'aminoacid':
model = "-m %s%s" % (base_model, aa_search_matrix)
if aa_empirical_freq:
model = "-m %s%s%s" % (base_model, aa_search_matrix, 'F')
# (-P)
if options.external_protein_model:
external_protein_model = "-P %s" % options.external_protein_model
cmd.append(external_protein_model)
else:
model = "-m %s" % base_model
cmd.append(model)
if model == "GTRCAT":
# (-c)
if options.numofcats:
numofcats = "-c %d" % options.numofcats
cmd.append(numofcats)
# Optional parameters
if options.number_of_runs_bootstop:
number_of_runs_bootstop = "-N %s" % options.number_of_runs_bootstop
cmd.append(number_of_runs_bootstop)
else:
number_of_runs_bootstop = ''
if options.number_of_runs:
number_of_runs_opt = "-N %d" % options.number_of_runs
cmd.append(number_of_runs_opt)
else:
number_of_runs_opt = 0
# (-a)
if options.weightfile:
weightfile = "-a %s" % options.weightfile
cmd.append(weightfile)
# (-A)
if options.secondary_structure_model:
secondary_structure_model = "-A %s" % options.secondary_structure_model
cmd.append(secondary_structure_model )
# (-b)
if options.bootseed:
bootseed = "-b %d" % options.bootseed
cmd.append(bootseed)
else:
bootseed = 0
# -C - doesn't work in pthreads version, skipped
if options.search_complete_random_tree:
cmd.append("-d")
if options.ml_search_convergence:
cmd.append("-D" )
if options.model_opt_precision:
model_opt_precision = "-e %f" % options.model_opt_precision
cmd.append(model_opt_precision)
if options.excludefile:
excludefile = "-E %s" % options.excludefile
cmd.append(excludefile)
if options.search_algorithm:
search_algorithm = "-f %s" % options.search_algorithm
cmd.append(search_algorithm)
if options.save_memory_cat_model:
cmd.append("-F")
if options.groupingfile:
groupingfile = "-g %s" % options.groupingfile
cmd.append(groupingfile)
if options.enable_evol_heuristics:
enable_evol_heuristics = "-G %f" % options.enable_evol_heuristics
cmd.append(enable_evol_heuristics )
if options.initial_rearrangement_setting:
initial_rearrangement_setting = "-i %s" % options.initial_rearrangement_setting
cmd.append(initial_rearrangement_setting)
if options.posterior_bootstopping_analysis:
posterior_bootstopping_analysis = "-I %s" % options.posterior_bootstopping_analysis
cmd.append(posterior_bootstopping_analysis)
if options.majority_rule_consensus:
majority_rule_consensus = "-J %s" % options.majority_rule_consensus
cmd.append(majority_rule_consensus)
if options.print_branch_lengths:
cmd.append("-k")
if options.multistate_sub_model:
multistate_sub_model = "-K %s" % options.multistate_sub_model
cmd.append(multistate_sub_model)
if options.estimate_individual_branch_lengths:
cmd.append("-M")
if options.outgroup_name:
outgroup_name = "-o %s" % options.outgroup_name
cmd.append(outgroup_name)
if options.disable_undetermined_seq_check:
cmd.append("-O")
if options.random_seed:
random_seed = "-p %d" % options.random_seed
cmd.append(random_seed)
multiple_model = None
if options.multiple_model:
multiple_model = "-q %s" % options.multiple_model
cmd.append(multiple_model)
if options.constraint_file:
constraint_file = "-r %s" % options.constraint_file
cmd.append(constraint_file)
if options.bin_model_parameter_file:
bin_model_parameter_file_name = "RAxML_binaryModelParameters.galaxy"
os.symlink(options.bin_model_parameter_file, bin_model_parameter_file_name )
bin_model_parameter_file = "-R %s" % options.bin_model_parameter_file
# Needs testing. Is the hardcoded name or the real path needed?
cmd.append(bin_model_parameter_file)
if options.secondary_structure_file:
secondary_structure_file = "-S %s" % options.secondary_structure_file
cmd.append(secondary_structure_file)
if options.starting_tree:
starting_tree = "-t %s" % options.starting_tree
cmd.append(starting_tree)
if options.use_median_approximation:
cmd.append("-u")
if options.save_memory_gappy_alignments:
cmd.append("-U")
if options.disable_rate_heterogeneity:
cmd.append("-V")
if options.sliding_window_size:
sliding_window_size = "-W %d" % options.sliding_window_size
cmd.append(sliding_window_size)
if options.rapid_bootstrap_random_seed:
rapid_bootstrap_random_seed = "-x %d" % options.rapid_bootstrap_random_seed
cmd.append(rapid_bootstrap_random_seed)
else:
rapid_bootstrap_random_seed = 0
if options.parsimony_starting_tree_only:
cmd.append("-y")
if options.file_multiple_trees:
file_multiple_trees = "-z %s" % options.file_multiple_trees
cmd.append(file_multiple_trees)
print "cmd list: ", cmd, "\n"
full_cmd = " ".join(cmd)
print "Command string: %s" % full_cmd
try:
proc = subprocess.Popen(args=full_cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
except Exception as err:
sys.stderr.write("Error invoking command: \n%s\n\n%s\n" % (cmd, err))
sys.exit(1)
stdout, stderr = proc.communicate()
return_code = proc.returncode
if return_code:
sys.stdout.write(stdout)
sys.stderr.write(stderr)
sys.stderr.write("Return error code %i from command:\n" % return_code)
sys.stderr.write("%s\n" % cmd)
else:
sys.stdout.write(stdout)
sys.stdout.write(stderr)
# Multiple runs - concatenate
if number_of_runs_opt > 0:
if (bootseed == 0) and (rapid_bootstrap_random_seed == 0 ):
runfiles = glob.glob('RAxML*RUN*')
runfiles.sort(key=getint)
# Logs
outfile = open('RAxML_log.galaxy', 'w')
for filename in runfiles:
if fnmatch.fnmatch(filename, 'RAxML_log.galaxy.RUN.*'):
infile = open(filename, 'r')
filename_line = "%s\n" % filename
outfile.write(filename_line)
for line in infile:
outfile.write(line)
infile.close()
outfile.close()
# Parsimony Trees
outfile = open('RAxML_parsimonyTree.galaxy', 'w')
for filename in runfiles:
if fnmatch.fnmatch(filename, 'RAxML_parsimonyTree.galaxy.RUN.*'):
infile = open(filename, 'r')
filename_line = "%s\n" % filename
outfile.write(filename_line)
for line in infile:
outfile.write(line)
infile.close()
outfile.close()
# Results
outfile = open('RAxML_result.galaxy', 'w')
for filename in runfiles:
if fnmatch.fnmatch(filename, 'RAxML_result.galaxy.RUN.*'):
infile = open(filename, 'r')
filename_line = "%s\n" % filename
outfile.write(filename_line)
for line in infile:
outfile.write(line)
infile.close()
outfile.close()
# Multiple Model Partition Files
if multiple_model:
files = glob.glob('RAxML_bestTree.galaxy.PARTITION.*')
if len(files) > 0:
files.sort(key=getint)
outfile = open('RAxML_bestTreePartitions.galaxy', 'w')
# Best Tree Partitions
for filename in files:
if fnmatch.fnmatch(filename, 'RAxML_bestTree.galaxy.PARTITION.*'):
infile = open(filename, 'r')
filename_line = "%s\n" % filename
outfile.write(filename_line)
for line in infile:
outfile.write(line)
infile.close()
outfile.close()
else:
outfile = open('RAxML_bestTreePartitions.galaxy', 'w')
outfile.write("No partition files were produced.\n")
outfile.close()
# Result Partitions
files = glob.glob('RAxML_result.galaxy.PARTITION.*')
if len(files) > 0:
files.sort(key=getint)
outfile = open('RAxML_resultPartitions.galaxy', 'w')
for filename in files:
if fnmatch.fnmatch(filename, 'RAxML_result.galaxy.PARTITION.*'):
infile = open(filename, 'r')
filename_line = "%s\n" % filename
outfile.write(filename_line)
for line in infile:
outfile.write(line)
infile.close()
outfile.close()
else:
outfile = open('RAxML_resultPartitions.galaxy', 'w')
outfile.write("No partition files were produced.\n")
outfile.close()
# DEBUG options
infof = open('RAxML_info.galaxy', 'a')
infof.write('\nOM: CLI options DEBUG START:\n')
infof.write(options.__repr__())
infof.write('\nOM: CLI options DEBUG END\n')
if __name__ == "__main__":
__main__()
| mit |
40223221/2015cd_midterm | static/Brython3.1.1-20150328-091302/Lib/xml/dom/__init__.py | 873 | 4019 | """W3C Document Object Model implementation for Python.
The Python mapping of the Document Object Model is documented in the
Python Library Reference in the section on the xml.dom package.
This package contains the following modules:
minidom -- A simple implementation of the Level 1 DOM with namespace
support added (based on the Level 2 specification) and other
minor Level 2 functionality.
pulldom -- DOM builder supporting on-demand tree-building for selected
subtrees of the document.
"""
class Node:
"""Class giving the NodeType constants."""
__slots__ = ()
# DOM implementations may use this as a base class for their own
# Node implementations. If they don't, the constants defined here
# should still be used as the canonical definitions as they match
# the values given in the W3C recommendation. Client code can
# safely refer to these values in all tests of Node.nodeType
# values.
ELEMENT_NODE = 1
ATTRIBUTE_NODE = 2
TEXT_NODE = 3
CDATA_SECTION_NODE = 4
ENTITY_REFERENCE_NODE = 5
ENTITY_NODE = 6
PROCESSING_INSTRUCTION_NODE = 7
COMMENT_NODE = 8
DOCUMENT_NODE = 9
DOCUMENT_TYPE_NODE = 10
DOCUMENT_FRAGMENT_NODE = 11
NOTATION_NODE = 12
#ExceptionCode
INDEX_SIZE_ERR = 1
DOMSTRING_SIZE_ERR = 2
HIERARCHY_REQUEST_ERR = 3
WRONG_DOCUMENT_ERR = 4
INVALID_CHARACTER_ERR = 5
NO_DATA_ALLOWED_ERR = 6
NO_MODIFICATION_ALLOWED_ERR = 7
NOT_FOUND_ERR = 8
NOT_SUPPORTED_ERR = 9
INUSE_ATTRIBUTE_ERR = 10
INVALID_STATE_ERR = 11
SYNTAX_ERR = 12
INVALID_MODIFICATION_ERR = 13
NAMESPACE_ERR = 14
INVALID_ACCESS_ERR = 15
VALIDATION_ERR = 16
class DOMException(Exception):
"""Abstract base class for DOM exceptions.
Exceptions with specific codes are specializations of this class."""
def __init__(self, *args, **kw):
if self.__class__ is DOMException:
raise RuntimeError(
"DOMException should not be instantiated directly")
Exception.__init__(self, *args, **kw)
def _get_code(self):
return self.code
class IndexSizeErr(DOMException):
code = INDEX_SIZE_ERR
class DomstringSizeErr(DOMException):
code = DOMSTRING_SIZE_ERR
class HierarchyRequestErr(DOMException):
code = HIERARCHY_REQUEST_ERR
class WrongDocumentErr(DOMException):
code = WRONG_DOCUMENT_ERR
class InvalidCharacterErr(DOMException):
code = INVALID_CHARACTER_ERR
class NoDataAllowedErr(DOMException):
code = NO_DATA_ALLOWED_ERR
class NoModificationAllowedErr(DOMException):
code = NO_MODIFICATION_ALLOWED_ERR
class NotFoundErr(DOMException):
code = NOT_FOUND_ERR
class NotSupportedErr(DOMException):
code = NOT_SUPPORTED_ERR
class InuseAttributeErr(DOMException):
code = INUSE_ATTRIBUTE_ERR
class InvalidStateErr(DOMException):
code = INVALID_STATE_ERR
class SyntaxErr(DOMException):
code = SYNTAX_ERR
class InvalidModificationErr(DOMException):
code = INVALID_MODIFICATION_ERR
class NamespaceErr(DOMException):
code = NAMESPACE_ERR
class InvalidAccessErr(DOMException):
code = INVALID_ACCESS_ERR
class ValidationErr(DOMException):
code = VALIDATION_ERR
class UserDataHandler:
"""Class giving the operation constants for UserDataHandler.handle()."""
# Based on DOM Level 3 (WD 9 April 2002)
NODE_CLONED = 1
NODE_IMPORTED = 2
NODE_DELETED = 3
NODE_RENAMED = 4
XML_NAMESPACE = "http://www.w3.org/XML/1998/namespace"
XMLNS_NAMESPACE = "http://www.w3.org/2000/xmlns/"
XHTML_NAMESPACE = "http://www.w3.org/1999/xhtml"
EMPTY_NAMESPACE = None
EMPTY_PREFIX = None
from .domreg import getDOMImplementation, registerDOMImplementation
| gpl-3.0 |
trozamon/hadmin | hadmin/jmx.py | 1 | 3639 | """
Apache Hadoop-specific JMX parsing
----------------------------------
Parse JMX JSON objects to get some stats
"""
import json
import re
try:
from http.client import HTTPConnection
except ImportError:
from httplib import HTTPConnection
class JMX(dict):
"""
Base class that does the majority of the JMX/JSON work.
Subclass this in order to provide nice, easy-to-use wrappers.
"""
def __init__(self, json_str=''):
self.load(json_str)
def load(self, json_str):
"""
Load JMX data from a JSON string
"""
try:
beans = json.loads(json_str)
for bean in beans['beans']:
try:
self[bean['name']] = bean
except KeyError:
pass
except ValueError:
pass
def load_from_host(self, addr):
"""
Load JMX data from a host
"""
conn = HTTPConnection(addr)
return self.load_from_connection(conn)
def load_from_connection(self, conn):
"""
Load JMX data from a connection. Connections must have a
:py:func:`request` function and a
:py:func:`getresponse` function.
"""
conn.request('GET', '/jmx')
res = conn.getresponse()
if res.status == 200:
self.load(res.read())
def __getitem__(self, k):
if k in self.keys():
return self.get(k)
else:
for key in self.keys():
if re.match(k, key) is not None:
return self.get(key)
raise KeyError(k)
class DataNodeJMX(JMX):
def __init__(self, json_str=''):
self.load(json_str)
def getFailedVolumes(self):
return self['.*FSDatasetState-null$']['NumFailedVolumes']
class NameNodeJMX(JMX):
"""
NameNode/HDFS statistics from JMX
Since the NameNode is responsible for managing HDFS metadata, various
statistics about the NameNode and HDFS can be obtained from an instance of
this class.
"""
def __init__(self, json_str=''):
self.load(json_str)
def getHeapMemoryUsed(self):
"""
Get the amount of memory used of the JVM Heap, in bytes
"""
return self['^java.lang:type=Memory$']['HeapMemoryUsage']['used']
def getNumThreads(self):
"""
Get the number of currently spawned threads the NameNode is running
"""
return self['^java.lang:type=Threading$']['ThreadCount']
def getTotalCapacity(self):
"""
Get the total capacity of HDFS, in GiB
"""
tmp = self['^Hadoop:service=NameNode,name=FSNamesystem$']
return tmp['CapacityTotalGB']
def getUsedCapacity(self):
"""
Get the used capacity of HDFS, in GiB
"""
tmp = self['^Hadoop:service=NameNode,name=FSNamesystem$']
return tmp['CapacityUsedGB']
def getUnderReplicatedBlocks(self):
"""
Get the number of under-replicated blocks in HDFS
"""
tmp = self['^Hadoop:service=NameNode,name=FSNamesystem$']
return tmp['UnderReplicatedBlocks']
def getCorruptBlocks(self):
"""
Get the number of corrupt blocks in HDFS
"""
tmp = self['^Hadoop:service=NameNode,name=FSNamesystem$']
return tmp['CorruptBlocks']
def getBlocksPendingReplication(self):
"""
Get the number of blocks whose replication is currently pending
"""
tmp = self['^Hadoop:service=NameNode,name=FSNamesystem$']
return tmp['PendingReplicationBlocks']
| mit |
SpaceKatt/CSPLN | apps/scaffolding/win/web2py/gluon/contrib/pymysql/tests/test_issues.py | 43 | 10550 | import pymysql
from pymysql.tests import base
import unittest
import sys
try:
import imp
reload = imp.reload
except AttributeError:
pass
import datetime
# backwards compatibility:
if not hasattr(unittest, "skip"):
unittest.skip = lambda message: lambda f: f
class TestOldIssues(base.PyMySQLTestCase):
def test_issue_3(self):
""" undefined methods datetime_or_None, date_or_None """
conn = self.connections[0]
c = conn.cursor()
c.execute("create table issue3 (d date, t time, dt datetime, ts timestamp)")
try:
c.execute("insert into issue3 (d, t, dt, ts) values (%s,%s,%s,%s)", (None, None, None, None))
c.execute("select d from issue3")
self.assertEqual(None, c.fetchone()[0])
c.execute("select t from issue3")
self.assertEqual(None, c.fetchone()[0])
c.execute("select dt from issue3")
self.assertEqual(None, c.fetchone()[0])
c.execute("select ts from issue3")
self.assertTrue(isinstance(c.fetchone()[0], datetime.datetime))
finally:
c.execute("drop table issue3")
def test_issue_4(self):
""" can't retrieve TIMESTAMP fields """
conn = self.connections[0]
c = conn.cursor()
c.execute("create table issue4 (ts timestamp)")
try:
c.execute("insert into issue4 (ts) values (now())")
c.execute("select ts from issue4")
self.assertTrue(isinstance(c.fetchone()[0], datetime.datetime))
finally:
c.execute("drop table issue4")
def test_issue_5(self):
""" query on information_schema.tables fails """
con = self.connections[0]
cur = con.cursor()
cur.execute("select * from information_schema.tables")
def test_issue_6(self):
""" exception: TypeError: ord() expected a character, but string of length 0 found """
conn = pymysql.connect(host="localhost",user="root",passwd="",db="mysql")
c = conn.cursor()
c.execute("select * from user")
conn.close()
def test_issue_8(self):
""" Primary Key and Index error when selecting data """
conn = self.connections[0]
c = conn.cursor()
c.execute("""CREATE TABLE `test` (`station` int(10) NOT NULL DEFAULT '0', `dh`
datetime NOT NULL DEFAULT '0000-00-00 00:00:00', `echeance` int(1) NOT NULL
DEFAULT '0', `me` double DEFAULT NULL, `mo` double DEFAULT NULL, PRIMARY
KEY (`station`,`dh`,`echeance`)) ENGINE=MyISAM DEFAULT CHARSET=latin1;""")
try:
self.assertEqual(0, c.execute("SELECT * FROM test"))
c.execute("ALTER TABLE `test` ADD INDEX `idx_station` (`station`)")
self.assertEqual(0, c.execute("SELECT * FROM test"))
finally:
c.execute("drop table test")
def test_issue_9(self):
""" sets DeprecationWarning in Python 2.6 """
try:
reload(pymysql)
except DeprecationWarning:
self.fail()
def test_issue_10(self):
""" Allocate a variable to return when the exception handler is permissive """
conn = self.connections[0]
conn.errorhandler = lambda cursor, errorclass, errorvalue: None
cur = conn.cursor()
cur.execute( "create table t( n int )" )
cur.execute( "create table t( n int )" )
def test_issue_13(self):
""" can't handle large result fields """
conn = self.connections[0]
cur = conn.cursor()
try:
cur.execute("create table issue13 (t text)")
# ticket says 18k
size = 18*1024
cur.execute("insert into issue13 (t) values (%s)", ("x" * size,))
cur.execute("select t from issue13")
# use assertTrue so that obscenely huge error messages don't print
r = cur.fetchone()[0]
self.assertTrue("x" * size == r)
finally:
cur.execute("drop table issue13")
def test_issue_14(self):
""" typo in converters.py """
self.assertEqual('1', pymysql.converters.escape_item(1, "utf8"))
self.assertEqual('1', pymysql.converters.escape_item(1L, "utf8"))
self.assertEqual('1', pymysql.converters.escape_object(1))
self.assertEqual('1', pymysql.converters.escape_object(1L))
def test_issue_15(self):
""" query should be expanded before perform character encoding """
conn = self.connections[0]
c = conn.cursor()
c.execute("create table issue15 (t varchar(32))")
try:
c.execute("insert into issue15 (t) values (%s)", (u'\xe4\xf6\xfc',))
c.execute("select t from issue15")
self.assertEqual(u'\xe4\xf6\xfc', c.fetchone()[0])
finally:
c.execute("drop table issue15")
def test_issue_16(self):
""" Patch for string and tuple escaping """
conn = self.connections[0]
c = conn.cursor()
c.execute("create table issue16 (name varchar(32) primary key, email varchar(32))")
try:
c.execute("insert into issue16 (name, email) values ('pete', 'floydophone')")
c.execute("select email from issue16 where name=%s", ("pete",))
self.assertEqual("floydophone", c.fetchone()[0])
finally:
c.execute("drop table issue16")
@unittest.skip("test_issue_17() requires a custom, legacy MySQL configuration and will not be run.")
def test_issue_17(self):
""" could not connect mysql use passwod """
conn = self.connections[0]
host = self.databases[0]["host"]
db = self.databases[0]["db"]
c = conn.cursor()
# grant access to a table to a user with a password
try:
c.execute("create table issue17 (x varchar(32) primary key)")
c.execute("insert into issue17 (x) values ('hello, world!')")
c.execute("grant all privileges on %s.issue17 to 'issue17user'@'%%' identified by '1234'" % db)
conn.commit()
conn2 = pymysql.connect(host=host, user="issue17user", passwd="1234", db=db)
c2 = conn2.cursor()
c2.execute("select x from issue17")
self.assertEqual("hello, world!", c2.fetchone()[0])
finally:
c.execute("drop table issue17")
def _uni(s, e):
# hack for py3
if sys.version_info[0] > 2:
return unicode(bytes(s, sys.getdefaultencoding()), e)
else:
return unicode(s, e)
class TestNewIssues(base.PyMySQLTestCase):
def test_issue_34(self):
try:
pymysql.connect(host="localhost", port=1237, user="root")
self.fail()
except pymysql.OperationalError, e:
self.assertEqual(2003, e.args[0])
except:
self.fail()
def test_issue_33(self):
conn = pymysql.connect(host="localhost", user="root", db=self.databases[0]["db"], charset="utf8")
c = conn.cursor()
try:
c.execute(_uni("create table hei\xc3\x9fe (name varchar(32))", "utf8"))
c.execute(_uni("insert into hei\xc3\x9fe (name) values ('Pi\xc3\xb1ata')", "utf8"))
c.execute(_uni("select name from hei\xc3\x9fe", "utf8"))
self.assertEqual(_uni("Pi\xc3\xb1ata","utf8"), c.fetchone()[0])
finally:
c.execute(_uni("drop table hei\xc3\x9fe", "utf8"))
@unittest.skip("This test requires manual intervention")
def test_issue_35(self):
conn = self.connections[0]
c = conn.cursor()
print "sudo killall -9 mysqld within the next 10 seconds"
try:
c.execute("select sleep(10)")
self.fail()
except pymysql.OperationalError, e:
self.assertEqual(2013, e.args[0])
def test_issue_36(self):
conn = self.connections[0]
c = conn.cursor()
# kill connections[0]
c.execute("show processlist")
kill_id = None
for id,user,host,db,command,time,state,info in c.fetchall():
if info == "show processlist":
kill_id = id
break
# now nuke the connection
conn.kill(kill_id)
# make sure this connection has broken
try:
c.execute("show tables")
self.fail()
except:
pass
# check the process list from the other connection
try:
c = self.connections[1].cursor()
c.execute("show processlist")
ids = [row[0] for row in c.fetchall()]
self.assertFalse(kill_id in ids)
finally:
del self.connections[0]
def test_issue_37(self):
conn = self.connections[0]
c = conn.cursor()
self.assertEqual(1, c.execute("SELECT @foo"))
self.assertEqual((None,), c.fetchone())
self.assertEqual(0, c.execute("SET @foo = 'bar'"))
c.execute("set @foo = 'bar'")
def test_issue_38(self):
conn = self.connections[0]
c = conn.cursor()
datum = "a" * 1024 * 1023 # reduced size for most default mysql installs
try:
c.execute("create table issue38 (id integer, data mediumblob)")
c.execute("insert into issue38 values (1, %s)", (datum,))
finally:
c.execute("drop table issue38")
def disabled_test_issue_54(self):
conn = self.connections[0]
c = conn.cursor()
big_sql = "select * from issue54 where "
big_sql += " and ".join("%d=%d" % (i,i) for i in xrange(0, 100000))
try:
c.execute("create table issue54 (id integer primary key)")
c.execute("insert into issue54 (id) values (7)")
c.execute(big_sql)
self.assertEqual(7, c.fetchone()[0])
finally:
c.execute("drop table issue54")
class TestGitHubIssues(base.PyMySQLTestCase):
def test_issue_66(self):
conn = self.connections[0]
c = conn.cursor()
self.assertEqual(0, conn.insert_id())
try:
c.execute("create table issue66 (id integer primary key auto_increment, x integer)")
c.execute("insert into issue66 (x) values (1)")
c.execute("insert into issue66 (x) values (1)")
self.assertEqual(2, conn.insert_id())
finally:
c.execute("drop table issue66")
__all__ = ["TestOldIssues", "TestNewIssues", "TestGitHubIssues"]
if __name__ == "__main__":
import unittest
unittest.main()
| gpl-3.0 |
RiceMunk/omnifit | omnifit/utils/utils.py | 1 | 17927 | import numpy as np
import matplotlib.pyplot as plt
from astropy import units as u
import scipy.integrate
from sys import float_info
import warnings
class Baseliner:
"""
A class for interactive baseliner of spectroscopic data.
The class works by being fed a spectrum and a matplotlib axis on which
it should be plotted. The spectrum is then plotted to the given axis,
and a number of interactive options are made available to the user.
Left-clicking with the mouse for the first time starts defining a window
from the x-axis location of the click. A second click finishes the
window between the locations of the first and second click.
A third click will finish selecting windows, and perform the baselining.
Alternately, right-clicking will cancel the last left-clicking action,
allowing misplaced windows to be adjusted.
Two keys are also accepted:
Pressing "q" will cause the baselining process to be canceled,
effectively skipping the baselining of this spectrum.
Pressing "a" will allow an additional window to be defined, assuming
one has been defined so far (by left-clicking twice to define its
boundaries).
Attributes
----------
windows : `list`
A list of all the set windows.
"""
def __init__(self,ax,spec):
"""
Baseliner(ax,spec,order=1)
Initialise the `Baseliner` class by giving it the target axis and
spectrum.
Parameters
----------
ax : `matplotlib.axis`
The matplotlib axis on which the interation will happen.
spec : `omnifit.spectrum.BaseSpectrum`
The spectrum which will be plotted as the visual reference on
the given axis.
"""
self.__ax = ax
self.__spec = spec
self.__x = spec.x.value
self.__y = spec.y.value
self.__limlo=None
self.__limhi=None
self.__minx=np.min(self.__x)
self.__maxx=np.max(self.__x)
self.__miny=np.min(self.__y)
self.__maxy=np.max(self.__y)
self.__ax.set_xlim(self.__minx,self.__maxx)
self.__ax.set_ylim(self.__miny,self.__maxy)
self.__specplot,=self.__ax.plot(self.__x,self.__y,'k-',drawstyle='steps-mid')
self.__buttonListener = self.__ax.figure.canvas.mpl_connect('button_press_event', self.__mouse_press)
self.__keyListener = self.__ax.figure.canvas.mpl_connect('key_press_event', self.__key_press)
self.windows=[]
def __key_press(self, event):
if event.key=='q':
self.__skip()
if event.key=='a' and self.__limlo != None and self.__limhi != None:
self.__addwindow(self.__limlo,self.__limhi)
self.__ax.plot([self.__limlo,self.__limlo],[self.__miny,self.__maxy],'g-')
self.__ax.plot([self.__limhi,self.__limhi],[self.__miny,self.__maxy],'g-')
self.__remlim()
self.__remlim()
print 'Window added. Ready to receive another one.'
else:
return
def __mouse_press(self, event):
if event.button==1:
self.__setlim(event.xdata)
elif event.button==2:
return
elif event.button==3:
self.__remlim()
def __skip(self):
plt.close()
def __setlim(self,i_x):
if self.__limlo==None:
self.__limlo=i_x
self.__limloplot,=self.__ax.plot([i_x,i_x],[self.__miny,self.__maxy],'b-')
self.__ax.figure.canvas.draw()
elif self.__limhi==None:
self.__limhi=i_x
self.__limhiplot,=self.__ax.plot([i_x,i_x],[self.__miny,self.__maxy],'b-')
self.__ax.figure.canvas.draw()
print 'Ready for finalising. Press once more to do so, or press a to add another window.'
else:
self.__finalise()
def __remlim(self):
if self.__limhi!=None:
self.__limhi=None
self.__limhiplot.set_ydata([self.__miny,self.__miny])
self.__ax.figure.canvas.draw()
elif self.__limlo!=None:
self.__limlo=None
self.__limloplot.set_ydata([self.__miny,self.__miny])
self.__ax.figure.canvas.draw()
else:
print 'No limits to cancel.'
def __addwindow(self,limlo,limhi):
if limhi < limlo:
limlo,limhi = limhi,limlo
self.windows.append([limlo,limhi])
def __finalise(self):
self.__addwindow(self.__limlo,self.__limhi)
self.__ax.figure.canvas.mpl_disconnect(self.__buttonListener)
self.__ax.figure.canvas.mpl_disconnect(self.__keyListener)
plt.close(self.__ax.figure)
#---------------------
#New units definitions
#---------------------
#the units themselves
unit_t = u.def_unit('transmittance units',doc='Transmittance of radiation')
unit_transmittance = unit_t
unit_abs = u.def_unit('absorbance units',doc='Absorbance of radiation')
unit_absorbance = unit_abs
unit_od = u.def_unit('optical depth units',doc='Optical depth of radiation')
unit_opticaldepth = unit_od
#the equivalencies between the units
equivalencies_absorption = [
(unit_t,unit_abs,lambda x:-np.log10(x),lambda x:10**-x),
(unit_od,unit_abs,lambda x:x/np.log(10),lambda x:x*np.log(10)),
(unit_od,unit_t,lambda x:10**(-x/np.log(10)),lambda x:-np.log10(x)*np.log(10))
]
#------------------------------------------------------
#Functions related to light scattering and transmission
#------------------------------------------------------
def cde_correct(freq,m):
"""
cde_correct(freq,m)
Generate a CDE-corrected spectrum from a complex refractive index
spectrum.
Parameters
----------
freq : `numpy.ndarray`
The frequency data of the input spectrum, in reciprocal
wavenumbers (cm^-1).
m : `numpy.ndarray`
The complex refractive index spectrum.
Returns
-------
A list containing the following numpy arrays, in given order:
* The spectrum of the absorption cross section of the simulated grain.
* The spectrum of the absorption cross section of the simulated grain,
normalized by the volume distribution of the grain. This parameter
is the equivalent of optical depth in most cases.
* The spectrum of the scattering cross section of the simulated grain,
normalized by the volume distribution of the grain.
* The spectrum of the total cross section of the simulated grain.
"""
wl=1.e4/freq
m2=m**2.0
im_part=((m2/(m2-1.0))*np.log(m2)).imag
cabs_vol=(4.0*np.pi/wl)*im_part
cabs=freq*(2.0*m.imag/(m.imag-1))*np.log10(m.imag)
cscat_vol=(freq**3.0/(6.0*np.pi))*cabs
ctot=cabs+cscat_vol
return cabs,cabs_vol,cscat_vol,ctot
def complex_transmission_reflection(in_m0,in_m1,in_m2):
"""
complex_transmission_reflection(in_m0,in_m1,in_m2)
Calculate the complex transmission and reflection coefficients between
media 0, 1, and 2 given their complex refractive indices.
In the Kramers-Kronig implementation (in which this is most likely used
in the context of Omnifit) media 0, 1, and 2 correspond
respectively to the vacuum, ice, and substrate.
Parameters
----------
in_m0 : `complex` or `numpy.ndarray`
The complex refractive index of medium 0.
in_m1 : `complex` or `numpy.ndarray`
The complex refractive index of medium 1.
in_m2 : `complex` or `numpy.ndarray`
The complex refractive index of medium 2.
Returns
-------
A tuple containing the following elements:
* The complex transmission coefficient between media 0 and 1
* The complex transmission coefficient between media 0 and 2
* The complex transmission coefficient between media 1 and 2
* The complex reflection coefficient between media 0 and 1
* The complex reflection coefficient between media 0 and 2
* The complex reflection coefficient between media 1 and 2
"""
complex_transmission = lambda m1,m2: (2.*m1.real)/(m1+m2)
complex_reflection = lambda m1,m2: (m1-m2)/(m1+m2)
return (
complex_transmission(in_m0,in_m1),
complex_transmission(in_m0,in_m2),
complex_transmission(in_m1,in_m2),
complex_reflection(in_m0,in_m1),
complex_reflection(in_m0,in_m2),
complex_reflection(in_m1,in_m2)
)
def kramers_kronig(freq,transmittance,m_substrate,d_ice,m0,freq_m0,m_guess=1.0+0.0j,tol=0.001,maxiter=100,ignore_fraction=0.1,force_kkint_unity=False,precalc=False):
"""
kramers_kronig(freq,transmittance,m_substrate,d_ice,m0,freq_m0,
m_guess=1.0+0.0j,tol=0.001,maxiter=100,ignore_fraction=0.1,
force_kkint_unity=False,precalc=False)
Kramers-Kronig relation.
This is an implementation of the Kramers-Kronig relation calculation
presented in Hudgins et al 1993 (1993ApJS...86..713H), with an improved
integration method adapted from Trotta et al 1996
(The Cosmic Dust Connection, 1996 169-184)
Parameters
----------
wn : `astropy.units.Quantity` or `numpy.ndarray`
The frequency data of the input spectrum. If no units are given, this
is assumed to be in reciprocal wavenumbers (cm^-1).
transmittance : `astropy.units.Quantity` or `numpy.ndarray`
The transmittance data of the input spectrum. This can be given in
units other than transmittance, as long as they can be converted to
transmittance by making use of the `utils.equivalencies_absorption`
equivalency information. If no units are given, transmittance is
assumed.
m_substrate : `complex`
The complex refractive index of the substrate on which the ice being
studied was grown.
d_ice : `astropy.units.Quantity` or `float`
The thickness of the ice which is being studied. If no units are given,
centimeters are assumed.
m0 : `complex`
The complex refractive index of the ice at the reference frequency
defined by `freq_m0` (see below).
freq_m0 : `astropy.units.Quantity` or `float`
The frequency at which the reference complex refractive index `m0`
(see above) is defined. Best results are usually achieved if this
frequency is high compared to the frequency range being probed by
the spectrum.
If this is not defined as `astropy.units.Quantity` in spectroscopic
units, it is assumed to be in reciprocal wavenumbers (cm^-1).
m_guess : `complex` or `numpy.ndarray`
The starting guess of the complex refractive index of the ice. This
can either be a single number (in which case it is assumed to be this
number throughout the entire spectrum) or an array
tol : `float`
The square-sum of the residual between the original transmittance and
the transmittance modeled with the iterated complex refractive index
of the ice must be below this value for the iteration to converge. In
other words, the smaller this number is, the better the final result
will be at the expense of extra iterations.
maxiter : `int`
The maximum number of iterations allowed. If this number is reached,
the iteration is considered to not have converged, and an exception is
raised.
ignore_fraction : `float` between 0 and 0.5
The edges of the spectrum are blanked out (and replaced with the
non-blanked value closest to the edge) during iteration to avoid edge
effects arising from the usage of a non-infinite integration range.
This parameter controls how large of a fraction of the edges is blanked
out.
force_kkint_unity : `bool`
The results of the Kramers-Kronig integration are responsible for
determining the real part of the complex refractive index i.e. the
one which represents refraction. Normally this number should not drop
below unity, and unexpected behaviour can arise if it does.
Usually this means that there is something wrong with the input
parameters, but sometimes forcing the result to always be greater or
equal to unity can help. It should be noted, however, that the
accuracy of the results of an integration forced in this way are
suspect at best.
precalc : `bool`
The Kramers-Kronig iteration can be a very computationally intensive
operation. In some situations it may result in a faster iteration to
pre-calculate the large denominator which is part of the
Kramers-Kronig integration instead of computing new values of it in a
for loop. This denominator can be, however, a very
large variable as it contains a number of elements equal to the size
of the spectrum squared. Pre-calculating this can outright fail on
lower-end computers as Python runs out of available memory.
High-end systems may benefit from such pre-calculation, though.
Returns
-------
A `numpy.ndarray` which contains the complex refractive index of the
ice, in order of increasing frequency.
"""
#set up constants
m_vacuum = 1.0+0.0j
#make sure the input array units are correct; convert if necessary
if type(freq) != u.quantity.Quantity:
warnings.warn('No units detected in input freq. Assuming kayser.',RuntimeWarning)
freq *= u.kayser
else:
with u.set_enabled_equivalencies(u.equivalencies.spectral()):
freq=freq.to(u.kayser)
if type(transmittance) != u.quantity.Quantity:
warnings.warn('No units detected in input transmittance. Assuming transmittance units.',RuntimeWarning)
transmittance *= unit_t
else:
with u.set_enabled_equivalencies(equivalencies_absorption):
transmittance = transmittance.to(unit_t)
if type(d_ice) != u.quantity.Quantity:
warnings.warn('No units detected in input d_ice. Assuming centimeters.',RuntimeWarning)
d_ice *= u.cm
else:
d_ice = d_ice.to(u.cm)
#sort the arrays and get rid of units; won't need them after this
initial_sorter = np.argsort(freq)
freq = freq[initial_sorter].value
transmittance = transmittance[initial_sorter].value
d_ice = d_ice.value
#initialise complex refractive index and alpha arrays
m = np.full_like(freq,np.nan+np.nan*1j,dtype=complex)
alpha = np.full_like(freq,np.nan+np.nan*1j,dtype=complex)
#initial guess at m at first index
if type(m_guess)==complex:
m_ice = np.full_like(freq,m_guess,dtype=complex)
else:
m_ice = m_guess
#find top and bottom fraction indices. These will be replaced with dummy values after each integration to get rid of edge effects
if ignore_fraction > 0.5 or ignore_fraction < 0:
raise RuntimeError('ignore_fraction must be between 0.0 and 0.5')
bot_fraction = round(ignore_fraction*len(freq))
top_fraction = len(freq)-bot_fraction
#pre-calculate the large denominator component of the KK integration, if desired
if precalc:
try:
sfreq=(freq).reshape(len(freq),1)
kkint_deno1 = freq**2-sfreq**2
kkint_deno1[kkint_deno1!=0] = 1./kkint_deno1[kkint_deno1!=0]
precalc = True
#or at least try to do so; if run out of memory, switch to the slower no-precalc mode
except MemoryError:
precalc = False
#some other parts can always be precalced
kkint_mul = 1./(2*np.pi*np.pi)
kkint_deno2 = freq**2-freq_m0**2
kkint_deno2[kkint_deno2!=0] = 1./kkint_deno2[kkint_deno2!=0]
#calculate alpha at freq0
alpha0 = m0.imag/(4*np.pi*freq)
#iteration begin!
niter = 0
squaresum_diff = tol+1
while squaresum_diff > tol and niter < maxiter:
#calculate transmission and relfection coefficients
#in these 0 means vacuum, 1 means ice, 2 means substrate
t01,t02,t12,r01,r02,r12 = complex_transmission_reflection(m_vacuum,m_ice,m_substrate)
#the reflection component
# reflection_component = np.abs((t01*t12/t02)/(1.+r01*r12*np.exp(4.j*np.pi*d_ice*m_ice*freq)))**2.)
#this is an evil equation. do NOT touch it
#it calculates the lambert absorption coefficient using the current best guess at m_ice
alpha = (1./d_ice)*(-np.log(transmittance)+np.log(np.abs((t01*t12/t02)/(1.+r01*r12*np.exp(4.j*np.pi*d_ice*m_ice*freq)))**2.))
#using the new alpha, calculate a new n (and thus m) for the ice
#this is done in a parallel for loop, to avoid killing the computer when dealing with large amounts of data
kkint_nomi = alpha-alpha0
kkint = np.full_like(alpha,m0.real)
numcols = kkint_nomi.shape[0]
for current_col in range(numcols):
if precalc:
kkint[current_col]+=kkint_mul*scipy.integrate.simps((alpha-alpha[current_col])*kkint_deno1[current_col,:]-kkint_nomi*kkint_deno2)
else:
kkint_deno1 = freq[current_col]**2-freq**2
kkint_deno1[kkint_deno1!=0] = 1./kkint_deno1[kkint_deno1!=0]
kkint[current_col]+=kkint_mul*scipy.integrate.simps((alpha-alpha[current_col])*kkint_deno1-kkint_nomi/(freq**2-freq_m0**2))
if np.any(kkint<1):
if np.any(kkint<0):
warnings.warn('KK integration is producing negative refractive indices! This will most likely produce nonsensical results.',RuntimeWarning)
else:
warnings.warn('KK integration is producing refractive indices below unity! This may result in unexpected behaviour.',RuntimeWarning)
if force_kkint_unity:
kkint[kkint<1]=1.
m_ice = kkint+1j*alpha/(4*np.pi*freq)
if np.any(np.isnan(m_ice.real)) or np.any(np.isnan(m_ice.imag)):
raise RuntimeError('Produced complex refractive index contains NaNs. Check your input parameters.')
#replace top and bottom fractions of m_ice with the value closest to that edge
#this is done to combat edge effects arising from integrating over a non-infinite range
m_ice[:bot_fraction] = m_ice[bot_fraction]
m_ice[top_fraction:] = m_ice[top_fraction]
#calculate transmission and relfection coefficients (again)
#in these 0 means vacuum, 1 means ice, 2 means substrate
t01,t02,t12,r01,r02,r12 = complex_transmission_reflection(m_vacuum,m_ice,m_substrate)
#model a transmittance using given m_ice and alpha
#yes, this is another evil equation
transmittance_model = np.exp(-alpha*d_ice)*np.abs((t01*t12/t02)/(1.+r01*r12*np.exp(4.j*np.pi*d_ice*m_ice*freq)))**2.
diff = transmittance - transmittance_model
diff[:bot_fraction] = 0. #ignore top...
diff[top_fraction:] = 0. #...and bottom fraction differences
squaresum_diff = np.sum(diff**2) #square sum of difference
niter += 1
#at this point we are done
if niter>=maxiter:
raise RuntimeError('Maximum number of iterations reached before convergence criterion was met.')
return m_ice | bsd-3-clause |
jonghyeopkim/p2pool | p2pool/data.py | 38 | 38487 | from __future__ import division
import hashlib
import os
import random
import sys
import time
from twisted.python import log
import p2pool
from p2pool.bitcoin import data as bitcoin_data, script, sha256
from p2pool.util import math, forest, pack
# hashlink
hash_link_type = pack.ComposedType([
('state', pack.FixedStrType(32)),
('extra_data', pack.FixedStrType(0)), # bit of a hack, but since the donation script is at the end, const_ending is long enough to always make this empty
('length', pack.VarIntType()),
])
def prefix_to_hash_link(prefix, const_ending=''):
assert prefix.endswith(const_ending), (prefix, const_ending)
x = sha256.sha256(prefix)
return dict(state=x.state, extra_data=x.buf[:max(0, len(x.buf)-len(const_ending))], length=x.length//8)
def check_hash_link(hash_link, data, const_ending=''):
extra_length = hash_link['length'] % (512//8)
assert len(hash_link['extra_data']) == max(0, extra_length - len(const_ending))
extra = (hash_link['extra_data'] + const_ending)[len(hash_link['extra_data']) + len(const_ending) - extra_length:]
assert len(extra) == extra_length
return pack.IntType(256).unpack(hashlib.sha256(sha256.sha256(data, (hash_link['state'], extra, 8*hash_link['length'])).digest()).digest())
# shares
share_type = pack.ComposedType([
('type', pack.VarIntType()),
('contents', pack.VarStrType()),
])
def load_share(share, net, peer_addr):
assert peer_addr is None or isinstance(peer_addr, tuple)
if share['type'] < Share.VERSION:
from p2pool import p2p
raise p2p.PeerMisbehavingError('sent an obsolete share')
elif share['type'] == Share.VERSION:
return Share(net, peer_addr, Share.share_type.unpack(share['contents']))
else:
raise ValueError('unknown share type: %r' % (share['type'],))
DONATION_SCRIPT = '4104ffd03de44a6e11b9917f3a29f9443283d9871c9d743ef30d5eddcd37094b64d1b3d8090496b53256786bf5c82932ec23c3b74d9f05a6f95a8b5529352656664bac'.decode('hex')
class Share(object):
VERSION = 13
VOTING_VERSION = 13
SUCCESSOR = None
small_block_header_type = pack.ComposedType([
('version', pack.VarIntType()),
('previous_block', pack.PossiblyNoneType(0, pack.IntType(256))),
('timestamp', pack.IntType(32)),
('bits', bitcoin_data.FloatingIntegerType()),
('nonce', pack.IntType(32)),
])
share_info_type = pack.ComposedType([
('share_data', pack.ComposedType([
('previous_share_hash', pack.PossiblyNoneType(0, pack.IntType(256))),
('coinbase', pack.VarStrType()),
('nonce', pack.IntType(32)),
('pubkey_hash', pack.IntType(160)),
('subsidy', pack.IntType(64)),
('donation', pack.IntType(16)),
('stale_info', pack.EnumType(pack.IntType(8), dict((k, {0: None, 253: 'orphan', 254: 'doa'}.get(k, 'unk%i' % (k,))) for k in xrange(256)))),
('desired_version', pack.VarIntType()),
])),
('new_transaction_hashes', pack.ListType(pack.IntType(256))),
('transaction_hash_refs', pack.ListType(pack.VarIntType(), 2)), # pairs of share_count, tx_count
('far_share_hash', pack.PossiblyNoneType(0, pack.IntType(256))),
('max_bits', bitcoin_data.FloatingIntegerType()),
('bits', bitcoin_data.FloatingIntegerType()),
('timestamp', pack.IntType(32)),
('absheight', pack.IntType(32)),
('abswork', pack.IntType(128)),
])
share_type = pack.ComposedType([
('min_header', small_block_header_type),
('share_info', share_info_type),
('ref_merkle_link', pack.ComposedType([
('branch', pack.ListType(pack.IntType(256))),
('index', pack.IntType(0)),
])),
('last_txout_nonce', pack.IntType(64)),
('hash_link', hash_link_type),
('merkle_link', pack.ComposedType([
('branch', pack.ListType(pack.IntType(256))),
('index', pack.IntType(0)), # it will always be 0
])),
])
ref_type = pack.ComposedType([
('identifier', pack.FixedStrType(64//8)),
('share_info', share_info_type),
])
gentx_before_refhash = pack.VarStrType().pack(DONATION_SCRIPT) + pack.IntType(64).pack(0) + pack.VarStrType().pack('\x6a\x28' + pack.IntType(256).pack(0) + pack.IntType(64).pack(0))[:3]
@classmethod
def generate_transaction(cls, tracker, share_data, block_target, desired_timestamp, desired_target, ref_merkle_link, desired_other_transaction_hashes_and_fees, net, known_txs=None, last_txout_nonce=0, base_subsidy=None):
previous_share = tracker.items[share_data['previous_share_hash']] if share_data['previous_share_hash'] is not None else None
height, last = tracker.get_height_and_last(share_data['previous_share_hash'])
assert height >= net.REAL_CHAIN_LENGTH or last is None
if height < net.TARGET_LOOKBEHIND:
pre_target3 = net.MAX_TARGET
else:
attempts_per_second = get_pool_attempts_per_second(tracker, share_data['previous_share_hash'], net.TARGET_LOOKBEHIND, min_work=True, integer=True)
pre_target = 2**256//(net.SHARE_PERIOD*attempts_per_second) - 1 if attempts_per_second else 2**256-1
pre_target2 = math.clip(pre_target, (previous_share.max_target*9//10, previous_share.max_target*11//10))
pre_target3 = math.clip(pre_target2, (net.MIN_TARGET, net.MAX_TARGET))
max_bits = bitcoin_data.FloatingInteger.from_target_upper_bound(pre_target3)
bits = bitcoin_data.FloatingInteger.from_target_upper_bound(math.clip(desired_target, (pre_target3//30, pre_target3)))
new_transaction_hashes = []
new_transaction_size = 0
transaction_hash_refs = []
other_transaction_hashes = []
past_shares = list(tracker.get_chain(share_data['previous_share_hash'], min(height, 100)))
tx_hash_to_this = {}
for i, share in enumerate(past_shares):
for j, tx_hash in enumerate(share.new_transaction_hashes):
if tx_hash not in tx_hash_to_this:
tx_hash_to_this[tx_hash] = [1+i, j] # share_count, tx_count
for tx_hash, fee in desired_other_transaction_hashes_and_fees:
if tx_hash in tx_hash_to_this:
this = tx_hash_to_this[tx_hash]
else:
if known_txs is not None:
this_size = bitcoin_data.tx_type.packed_size(known_txs[tx_hash])
if new_transaction_size + this_size > 50000: # only allow 50 kB of new txns/share
break
new_transaction_size += this_size
new_transaction_hashes.append(tx_hash)
this = [0, len(new_transaction_hashes)-1]
transaction_hash_refs.extend(this)
other_transaction_hashes.append(tx_hash)
included_transactions = set(other_transaction_hashes)
removed_fees = [fee for tx_hash, fee in desired_other_transaction_hashes_and_fees if tx_hash not in included_transactions]
definite_fees = sum(0 if fee is None else fee for tx_hash, fee in desired_other_transaction_hashes_and_fees if tx_hash in included_transactions)
if None not in removed_fees:
share_data = dict(share_data, subsidy=share_data['subsidy'] - sum(removed_fees))
else:
assert base_subsidy is not None
share_data = dict(share_data, subsidy=base_subsidy + definite_fees)
weights, total_weight, donation_weight = tracker.get_cumulative_weights(previous_share.share_data['previous_share_hash'] if previous_share is not None else None,
max(0, min(height, net.REAL_CHAIN_LENGTH) - 1),
65535*net.SPREAD*bitcoin_data.target_to_average_attempts(block_target),
)
assert total_weight == sum(weights.itervalues()) + donation_weight, (total_weight, sum(weights.itervalues()) + donation_weight)
amounts = dict((script, share_data['subsidy']*(199*weight)//(200*total_weight)) for script, weight in weights.iteritems()) # 99.5% goes according to weights prior to this share
this_script = bitcoin_data.pubkey_hash_to_script2(share_data['pubkey_hash'])
amounts[this_script] = amounts.get(this_script, 0) + share_data['subsidy']//200 # 0.5% goes to block finder
amounts[DONATION_SCRIPT] = amounts.get(DONATION_SCRIPT, 0) + share_data['subsidy'] - sum(amounts.itervalues()) # all that's left over is the donation weight and some extra satoshis due to rounding
if sum(amounts.itervalues()) != share_data['subsidy'] or any(x < 0 for x in amounts.itervalues()):
raise ValueError()
dests = sorted(amounts.iterkeys(), key=lambda script: (script == DONATION_SCRIPT, amounts[script], script))[-4000:] # block length limit, unlikely to ever be hit
share_info = dict(
share_data=share_data,
far_share_hash=None if last is None and height < 99 else tracker.get_nth_parent_hash(share_data['previous_share_hash'], 99),
max_bits=max_bits,
bits=bits,
timestamp=math.clip(desired_timestamp, (
(previous_share.timestamp + net.SHARE_PERIOD) - (net.SHARE_PERIOD - 1), # = previous_share.timestamp + 1
(previous_share.timestamp + net.SHARE_PERIOD) + (net.SHARE_PERIOD - 1),
)) if previous_share is not None else desired_timestamp,
new_transaction_hashes=new_transaction_hashes,
transaction_hash_refs=transaction_hash_refs,
absheight=((previous_share.absheight if previous_share is not None else 0) + 1) % 2**32,
abswork=((previous_share.abswork if previous_share is not None else 0) + bitcoin_data.target_to_average_attempts(bits.target)) % 2**128,
)
gentx = dict(
version=1,
tx_ins=[dict(
previous_output=None,
sequence=None,
script=share_data['coinbase'],
)],
tx_outs=[dict(value=amounts[script], script=script) for script in dests if amounts[script] or script == DONATION_SCRIPT] + [dict(
value=0,
script='\x6a\x28' + cls.get_ref_hash(net, share_info, ref_merkle_link) + pack.IntType(64).pack(last_txout_nonce),
)],
lock_time=0,
)
def get_share(header, last_txout_nonce=last_txout_nonce):
min_header = dict(header); del min_header['merkle_root']
share = cls(net, None, dict(
min_header=min_header,
share_info=share_info,
ref_merkle_link=dict(branch=[], index=0),
last_txout_nonce=last_txout_nonce,
hash_link=prefix_to_hash_link(bitcoin_data.tx_type.pack(gentx)[:-32-8-4], cls.gentx_before_refhash),
merkle_link=bitcoin_data.calculate_merkle_link([None] + other_transaction_hashes, 0),
))
assert share.header == header # checks merkle_root
return share
return share_info, gentx, other_transaction_hashes, get_share
@classmethod
def get_ref_hash(cls, net, share_info, ref_merkle_link):
return pack.IntType(256).pack(bitcoin_data.check_merkle_link(bitcoin_data.hash256(cls.ref_type.pack(dict(
identifier=net.IDENTIFIER,
share_info=share_info,
))), ref_merkle_link))
__slots__ = 'net peer_addr contents min_header share_info hash_link merkle_link hash share_data max_target target timestamp previous_hash new_script desired_version gentx_hash header pow_hash header_hash new_transaction_hashes time_seen absheight abswork'.split(' ')
def __init__(self, net, peer_addr, contents):
self.net = net
self.peer_addr = peer_addr
self.contents = contents
self.min_header = contents['min_header']
self.share_info = contents['share_info']
self.hash_link = contents['hash_link']
self.merkle_link = contents['merkle_link']
if not (2 <= len(self.share_info['share_data']['coinbase']) <= 100):
raise ValueError('''bad coinbase size! %i bytes''' % (len(self.share_info['share_data']['coinbase']),))
if len(self.merkle_link['branch']) > 16:
raise ValueError('merkle branch too long!')
assert not self.hash_link['extra_data'], repr(self.hash_link['extra_data'])
self.share_data = self.share_info['share_data']
self.max_target = self.share_info['max_bits'].target
self.target = self.share_info['bits'].target
self.timestamp = self.share_info['timestamp']
self.previous_hash = self.share_data['previous_share_hash']
self.new_script = bitcoin_data.pubkey_hash_to_script2(self.share_data['pubkey_hash'])
self.desired_version = self.share_data['desired_version']
self.absheight = self.share_info['absheight']
self.abswork = self.share_info['abswork']
n = set()
for share_count, tx_count in self.iter_transaction_hash_refs():
assert share_count < 110
if share_count == 0:
n.add(tx_count)
assert n == set(range(len(self.share_info['new_transaction_hashes'])))
self.gentx_hash = check_hash_link(
self.hash_link,
self.get_ref_hash(net, self.share_info, contents['ref_merkle_link']) + pack.IntType(64).pack(self.contents['last_txout_nonce']) + pack.IntType(32).pack(0),
self.gentx_before_refhash,
)
merkle_root = bitcoin_data.check_merkle_link(self.gentx_hash, self.merkle_link)
self.header = dict(self.min_header, merkle_root=merkle_root)
self.pow_hash = net.PARENT.POW_FUNC(bitcoin_data.block_header_type.pack(self.header))
self.hash = self.header_hash = bitcoin_data.hash256(bitcoin_data.block_header_type.pack(self.header))
if self.target > net.MAX_TARGET:
from p2pool import p2p
raise p2p.PeerMisbehavingError('share target invalid')
if self.pow_hash > self.target:
from p2pool import p2p
raise p2p.PeerMisbehavingError('share PoW invalid')
self.new_transaction_hashes = self.share_info['new_transaction_hashes']
# XXX eww
self.time_seen = time.time()
def __repr__(self):
return 'Share' + repr((self.net, self.peer_addr, self.contents))
def as_share(self):
return dict(type=self.VERSION, contents=self.share_type.pack(self.contents))
def iter_transaction_hash_refs(self):
return zip(self.share_info['transaction_hash_refs'][::2], self.share_info['transaction_hash_refs'][1::2])
def check(self, tracker):
from p2pool import p2p
if self.share_data['previous_share_hash'] is not None:
previous_share = tracker.items[self.share_data['previous_share_hash']]
if type(self) is type(previous_share):
pass
elif type(self) is type(previous_share).SUCCESSOR:
if tracker.get_height(previous_share.hash) < self.net.CHAIN_LENGTH:
from p2pool import p2p
raise p2p.PeerMisbehavingError('switch without enough history')
# switch only valid if 85% of hashes in [self.net.CHAIN_LENGTH*9//10, self.net.CHAIN_LENGTH] for new version
counts = get_desired_version_counts(tracker,
tracker.get_nth_parent_hash(previous_share.hash, self.net.CHAIN_LENGTH*9//10), self.net.CHAIN_LENGTH//10)
if counts.get(self.VERSION, 0) < sum(counts.itervalues())*85//100:
raise p2p.PeerMisbehavingError('switch without enough hash power upgraded')
else:
raise p2p.PeerMisbehavingError('''%s can't follow %s''' % (type(self).__name__, type(previous_share).__name__))
other_tx_hashes = [tracker.items[tracker.get_nth_parent_hash(self.hash, share_count)].share_info['new_transaction_hashes'][tx_count] for share_count, tx_count in self.iter_transaction_hash_refs()]
share_info, gentx, other_tx_hashes2, get_share = self.generate_transaction(tracker, self.share_info['share_data'], self.header['bits'].target, self.share_info['timestamp'], self.share_info['bits'].target, self.contents['ref_merkle_link'], [(h, None) for h in other_tx_hashes], self.net, last_txout_nonce=self.contents['last_txout_nonce'])
assert other_tx_hashes2 == other_tx_hashes
if share_info != self.share_info:
raise ValueError('share_info invalid')
if bitcoin_data.hash256(bitcoin_data.tx_type.pack(gentx)) != self.gentx_hash:
raise ValueError('''gentx doesn't match hash_link''')
if bitcoin_data.calculate_merkle_link([None] + other_tx_hashes, 0) != self.merkle_link:
raise ValueError('merkle_link and other_tx_hashes do not match')
return gentx # only used by as_block
def get_other_tx_hashes(self, tracker):
parents_needed = max(share_count for share_count, tx_count in self.iter_transaction_hash_refs()) if self.share_info['transaction_hash_refs'] else 0
parents = tracker.get_height(self.hash) - 1
if parents < parents_needed:
return None
last_shares = list(tracker.get_chain(self.hash, parents_needed + 1))
return [last_shares[share_count].share_info['new_transaction_hashes'][tx_count] for share_count, tx_count in self.iter_transaction_hash_refs()]
def _get_other_txs(self, tracker, known_txs):
other_tx_hashes = self.get_other_tx_hashes(tracker)
if other_tx_hashes is None:
return None # not all parents present
if not all(tx_hash in known_txs for tx_hash in other_tx_hashes):
return None # not all txs present
return [known_txs[tx_hash] for tx_hash in other_tx_hashes]
def should_punish_reason(self, previous_block, bits, tracker, known_txs):
if (self.header['previous_block'], self.header['bits']) != (previous_block, bits) and self.header_hash != previous_block and self.peer_addr is not None:
return True, 'Block-stale detected! height(%x) < height(%x) or %08x != %08x' % (self.header['previous_block'], previous_block, self.header['bits'].bits, bits.bits)
if self.pow_hash <= self.header['bits'].target:
return -1, 'block solution'
other_txs = self._get_other_txs(tracker, known_txs)
if other_txs is None:
pass
else:
all_txs_size = sum(bitcoin_data.tx_type.packed_size(tx) for tx in other_txs)
if all_txs_size > 1000000:
return True, 'txs over block size limit'
new_txs_size = sum(bitcoin_data.tx_type.packed_size(known_txs[tx_hash]) for tx_hash in self.share_info['new_transaction_hashes'])
if new_txs_size > 50000:
return True, 'new txs over limit'
return False, None
def as_block(self, tracker, known_txs):
other_txs = self._get_other_txs(tracker, known_txs)
if other_txs is None:
return None # not all txs present
return dict(header=self.header, txs=[self.check(tracker)] + other_txs)
class WeightsSkipList(forest.TrackerSkipList):
# share_count, weights, total_weight
def get_delta(self, element):
from p2pool.bitcoin import data as bitcoin_data
share = self.tracker.items[element]
att = bitcoin_data.target_to_average_attempts(share.target)
return 1, {share.new_script: att*(65535-share.share_data['donation'])}, att*65535, att*share.share_data['donation']
def combine_deltas(self, (share_count1, weights1, total_weight1, total_donation_weight1), (share_count2, weights2, total_weight2, total_donation_weight2)):
return share_count1 + share_count2, math.add_dicts(weights1, weights2), total_weight1 + total_weight2, total_donation_weight1 + total_donation_weight2
def initial_solution(self, start, (max_shares, desired_weight)):
assert desired_weight % 65535 == 0, divmod(desired_weight, 65535)
return 0, None, 0, 0
def apply_delta(self, (share_count1, weights_list, total_weight1, total_donation_weight1), (share_count2, weights2, total_weight2, total_donation_weight2), (max_shares, desired_weight)):
if total_weight1 + total_weight2 > desired_weight and share_count2 == 1:
assert (desired_weight - total_weight1) % 65535 == 0
script, = weights2.iterkeys()
new_weights = {script: (desired_weight - total_weight1)//65535*weights2[script]//(total_weight2//65535)}
return share_count1 + share_count2, (weights_list, new_weights), desired_weight, total_donation_weight1 + (desired_weight - total_weight1)//65535*total_donation_weight2//(total_weight2//65535)
return share_count1 + share_count2, (weights_list, weights2), total_weight1 + total_weight2, total_donation_weight1 + total_donation_weight2
def judge(self, (share_count, weights_list, total_weight, total_donation_weight), (max_shares, desired_weight)):
if share_count > max_shares or total_weight > desired_weight:
return 1
elif share_count == max_shares or total_weight == desired_weight:
return 0
else:
return -1
def finalize(self, (share_count, weights_list, total_weight, total_donation_weight), (max_shares, desired_weight)):
assert share_count <= max_shares and total_weight <= desired_weight
assert share_count == max_shares or total_weight == desired_weight
return math.add_dicts(*math.flatten_linked_list(weights_list)), total_weight, total_donation_weight
class OkayTracker(forest.Tracker):
def __init__(self, net):
forest.Tracker.__init__(self, delta_type=forest.get_attributedelta_type(dict(forest.AttributeDelta.attrs,
work=lambda share: bitcoin_data.target_to_average_attempts(share.target),
min_work=lambda share: bitcoin_data.target_to_average_attempts(share.max_target),
)))
self.net = net
self.verified = forest.SubsetTracker(delta_type=forest.get_attributedelta_type(dict(forest.AttributeDelta.attrs,
work=lambda share: bitcoin_data.target_to_average_attempts(share.target),
)), subset_of=self)
self.get_cumulative_weights = WeightsSkipList(self)
def attempt_verify(self, share):
if share.hash in self.verified.items:
return True
height, last = self.get_height_and_last(share.hash)
if height < self.net.CHAIN_LENGTH + 1 and last is not None:
raise AssertionError()
try:
share.check(self)
except:
log.err(None, 'Share check failed:')
return False
else:
self.verified.add(share)
return True
def think(self, block_rel_height_func, previous_block, bits, known_txs):
desired = set()
# O(len(self.heads))
# make 'unverified heads' set?
# for each overall head, attempt verification
# if it fails, attempt on parent, and repeat
# if no successful verification because of lack of parents, request parent
bads = set()
for head in set(self.heads) - set(self.verified.heads):
head_height, last = self.get_height_and_last(head)
for share in self.get_chain(head, head_height if last is None else min(5, max(0, head_height - self.net.CHAIN_LENGTH))):
if self.attempt_verify(share):
break
if share.hash in self.heads:
bads.add(share.hash)
else:
if last is not None:
desired.add((
self.items[random.choice(list(self.reverse[last]))].peer_addr,
last,
max(x.timestamp for x in self.get_chain(head, min(head_height, 5))),
min(x.target for x in self.get_chain(head, min(head_height, 5))),
))
for bad in bads:
assert bad not in self.verified.items
assert bad in self.heads
if p2pool.DEBUG:
print "BAD", bad
self.remove(bad)
# try to get at least CHAIN_LENGTH height for each verified head, requesting parents if needed
for head in list(self.verified.heads):
head_height, last_hash = self.verified.get_height_and_last(head)
last_height, last_last_hash = self.get_height_and_last(last_hash)
# XXX review boundary conditions
want = max(self.net.CHAIN_LENGTH - head_height, 0)
can = max(last_height - 1 - self.net.CHAIN_LENGTH, 0) if last_last_hash is not None else last_height
get = min(want, can)
#print 'Z', head_height, last_hash is None, last_height, last_last_hash is None, want, can, get
for share in self.get_chain(last_hash, get):
if not self.attempt_verify(share):
break
if head_height < self.net.CHAIN_LENGTH and last_last_hash is not None:
desired.add((
self.items[random.choice(list(self.verified.reverse[last_hash]))].peer_addr,
last_last_hash,
max(x.timestamp for x in self.get_chain(head, min(head_height, 5))),
min(x.target for x in self.get_chain(head, min(head_height, 5))),
))
# decide best tree
decorated_tails = sorted((self.score(max(self.verified.tails[tail_hash], key=self.verified.get_work), block_rel_height_func), tail_hash) for tail_hash in self.verified.tails)
if p2pool.DEBUG:
print len(decorated_tails), 'tails:'
for score, tail_hash in decorated_tails:
print format_hash(tail_hash), score
best_tail_score, best_tail = decorated_tails[-1] if decorated_tails else (None, None)
# decide best verified head
decorated_heads = sorted(((
self.verified.get_work(self.verified.get_nth_parent_hash(h, min(5, self.verified.get_height(h)))),
#self.items[h].peer_addr is None,
-self.items[h].should_punish_reason(previous_block, bits, self, known_txs)[0],
-self.items[h].time_seen,
), h) for h in self.verified.tails.get(best_tail, []))
if p2pool.DEBUG:
print len(decorated_heads), 'heads. Top 10:'
for score, head_hash in decorated_heads[-10:]:
print ' ', format_hash(head_hash), format_hash(self.items[head_hash].previous_hash), score
best_head_score, best = decorated_heads[-1] if decorated_heads else (None, None)
if best is not None:
best_share = self.items[best]
punish, punish_reason = best_share.should_punish_reason(previous_block, bits, self, known_txs)
if punish > 0:
print 'Punishing share for %r! Jumping from %s to %s!' % (punish_reason, format_hash(best), format_hash(best_share.previous_hash))
best = best_share.previous_hash
timestamp_cutoff = min(int(time.time()), best_share.timestamp) - 3600
target_cutoff = int(2**256//(self.net.SHARE_PERIOD*best_tail_score[1] + 1) * 2 + .5) if best_tail_score[1] is not None else 2**256-1
else:
timestamp_cutoff = int(time.time()) - 24*60*60
target_cutoff = 2**256-1
if p2pool.DEBUG:
print 'Desire %i shares. Cutoff: %s old diff>%.2f' % (len(desired), math.format_dt(time.time() - timestamp_cutoff), bitcoin_data.target_to_difficulty(target_cutoff))
for peer_addr, hash, ts, targ in desired:
print ' ', None if peer_addr is None else '%s:%i' % peer_addr, format_hash(hash), math.format_dt(time.time() - ts), bitcoin_data.target_to_difficulty(targ), ts >= timestamp_cutoff, targ <= target_cutoff
return best, [(peer_addr, hash) for peer_addr, hash, ts, targ in desired if ts >= timestamp_cutoff], decorated_heads
def score(self, share_hash, block_rel_height_func):
# returns approximate lower bound on chain's hashrate in the last self.net.CHAIN_LENGTH*15//16*self.net.SHARE_PERIOD time
head_height = self.verified.get_height(share_hash)
if head_height < self.net.CHAIN_LENGTH:
return head_height, None
end_point = self.verified.get_nth_parent_hash(share_hash, self.net.CHAIN_LENGTH*15//16)
block_height = max(block_rel_height_func(share.header['previous_block']) for share in
self.verified.get_chain(end_point, self.net.CHAIN_LENGTH//16))
return self.net.CHAIN_LENGTH, self.verified.get_delta(share_hash, end_point).work/((0 - block_height + 1)*self.net.PARENT.BLOCK_PERIOD)
def get_pool_attempts_per_second(tracker, previous_share_hash, dist, min_work=False, integer=False):
assert dist >= 2
near = tracker.items[previous_share_hash]
far = tracker.items[tracker.get_nth_parent_hash(previous_share_hash, dist - 1)]
attempts = tracker.get_delta(near.hash, far.hash).work if not min_work else tracker.get_delta(near.hash, far.hash).min_work
time = near.timestamp - far.timestamp
if time <= 0:
time = 1
if integer:
return attempts//time
return attempts/time
def get_average_stale_prop(tracker, share_hash, lookbehind):
stales = sum(1 for share in tracker.get_chain(share_hash, lookbehind) if share.share_data['stale_info'] is not None)
return stales/(lookbehind + stales)
def get_stale_counts(tracker, share_hash, lookbehind, rates=False):
res = {}
for share in tracker.get_chain(share_hash, lookbehind - 1):
res['good'] = res.get('good', 0) + bitcoin_data.target_to_average_attempts(share.target)
s = share.share_data['stale_info']
if s is not None:
res[s] = res.get(s, 0) + bitcoin_data.target_to_average_attempts(share.target)
if rates:
dt = tracker.items[share_hash].timestamp - tracker.items[tracker.get_nth_parent_hash(share_hash, lookbehind - 1)].timestamp
res = dict((k, v/dt) for k, v in res.iteritems())
return res
def get_user_stale_props(tracker, share_hash, lookbehind):
res = {}
for share in tracker.get_chain(share_hash, lookbehind - 1):
stale, total = res.get(share.share_data['pubkey_hash'], (0, 0))
total += 1
if share.share_data['stale_info'] is not None:
stale += 1
total += 1
res[share.share_data['pubkey_hash']] = stale, total
return dict((pubkey_hash, stale/total) for pubkey_hash, (stale, total) in res.iteritems())
def get_expected_payouts(tracker, best_share_hash, block_target, subsidy, net):
weights, total_weight, donation_weight = tracker.get_cumulative_weights(best_share_hash, min(tracker.get_height(best_share_hash), net.REAL_CHAIN_LENGTH), 65535*net.SPREAD*bitcoin_data.target_to_average_attempts(block_target))
res = dict((script, subsidy*weight//total_weight) for script, weight in weights.iteritems())
res[DONATION_SCRIPT] = res.get(DONATION_SCRIPT, 0) + subsidy - sum(res.itervalues())
return res
def get_desired_version_counts(tracker, best_share_hash, dist):
res = {}
for share in tracker.get_chain(best_share_hash, dist):
res[share.desired_version] = res.get(share.desired_version, 0) + bitcoin_data.target_to_average_attempts(share.target)
return res
def get_warnings(tracker, best_share, net, bitcoind_getinfo, bitcoind_work_value):
res = []
desired_version_counts = get_desired_version_counts(tracker, best_share,
min(net.CHAIN_LENGTH, 60*60//net.SHARE_PERIOD, tracker.get_height(best_share)))
majority_desired_version = max(desired_version_counts, key=lambda k: desired_version_counts[k])
if majority_desired_version > (Share.SUCCESSOR if Share.SUCCESSOR is not None else Share).VOTING_VERSION and desired_version_counts[majority_desired_version] > sum(desired_version_counts.itervalues())/2:
res.append('A MAJORITY OF SHARES CONTAIN A VOTE FOR AN UNSUPPORTED SHARE IMPLEMENTATION! (v%i with %i%% support)\n'
'An upgrade is likely necessary. Check http://p2pool.forre.st/ for more information.' % (
majority_desired_version, 100*desired_version_counts[majority_desired_version]/sum(desired_version_counts.itervalues())))
if bitcoind_getinfo['errors'] != '':
if 'This is a pre-release test build' not in bitcoind_getinfo['errors']:
res.append('(from bitcoind) %s' % (bitcoind_getinfo['errors'],))
version_warning = getattr(net, 'VERSION_WARNING', lambda v: None)(bitcoind_getinfo['version'])
if version_warning is not None:
res.append(version_warning)
if time.time() > bitcoind_work_value['last_update'] + 60:
res.append('''LOST CONTACT WITH BITCOIND for %s! Check that it isn't frozen or dead!''' % (math.format_dt(time.time() - bitcoind_work_value['last_update']),))
return res
def format_hash(x):
if x is None:
return 'xxxxxxxx'
return '%08x' % (x % 2**32)
class ShareStore(object):
def __init__(self, prefix, net, share_cb, verified_hash_cb):
self.dirname = os.path.dirname(os.path.abspath(prefix))
self.filename = os.path.basename(os.path.abspath(prefix))
self.net = net
known = {}
filenames, next = self.get_filenames_and_next()
for filename in filenames:
share_hashes, verified_hashes = known.setdefault(filename, (set(), set()))
with open(filename, 'rb') as f:
for line in f:
try:
type_id_str, data_hex = line.strip().split(' ')
type_id = int(type_id_str)
if type_id == 0:
pass
elif type_id == 1:
pass
elif type_id == 2:
verified_hash = int(data_hex, 16)
verified_hash_cb(verified_hash)
verified_hashes.add(verified_hash)
elif type_id == 5:
raw_share = share_type.unpack(data_hex.decode('hex'))
if raw_share['type'] < Share.VERSION:
continue
share = load_share(raw_share, self.net, None)
share_cb(share)
share_hashes.add(share.hash)
else:
raise NotImplementedError("share type %i" % (type_id,))
except Exception:
log.err(None, "HARMLESS error while reading saved shares, continuing where left off:")
self.known = known # filename -> (set of share hashes, set of verified hashes)
self.known_desired = dict((k, (set(a), set(b))) for k, (a, b) in known.iteritems())
def _add_line(self, line):
filenames, next = self.get_filenames_and_next()
if filenames and os.path.getsize(filenames[-1]) < 10e6:
filename = filenames[-1]
else:
filename = next
with open(filename, 'ab') as f:
f.write(line + '\n')
return filename
def add_share(self, share):
for filename, (share_hashes, verified_hashes) in self.known.iteritems():
if share.hash in share_hashes:
break
else:
filename = self._add_line("%i %s" % (5, share_type.pack(share.as_share()).encode('hex')))
share_hashes, verified_hashes = self.known.setdefault(filename, (set(), set()))
share_hashes.add(share.hash)
share_hashes, verified_hashes = self.known_desired.setdefault(filename, (set(), set()))
share_hashes.add(share.hash)
def add_verified_hash(self, share_hash):
for filename, (share_hashes, verified_hashes) in self.known.iteritems():
if share_hash in verified_hashes:
break
else:
filename = self._add_line("%i %x" % (2, share_hash))
share_hashes, verified_hashes = self.known.setdefault(filename, (set(), set()))
verified_hashes.add(share_hash)
share_hashes, verified_hashes = self.known_desired.setdefault(filename, (set(), set()))
verified_hashes.add(share_hash)
def get_filenames_and_next(self):
suffixes = sorted(int(x[len(self.filename):]) for x in os.listdir(self.dirname) if x.startswith(self.filename) and x[len(self.filename):].isdigit())
return [os.path.join(self.dirname, self.filename + str(suffix)) for suffix in suffixes], os.path.join(self.dirname, self.filename + (str(suffixes[-1] + 1) if suffixes else str(0)))
def forget_share(self, share_hash):
for filename, (share_hashes, verified_hashes) in self.known_desired.iteritems():
if share_hash in share_hashes:
share_hashes.remove(share_hash)
self.check_remove()
def forget_verified_share(self, share_hash):
for filename, (share_hashes, verified_hashes) in self.known_desired.iteritems():
if share_hash in verified_hashes:
verified_hashes.remove(share_hash)
self.check_remove()
def check_remove(self):
to_remove = set()
for filename, (share_hashes, verified_hashes) in self.known_desired.iteritems():
#print filename, len(share_hashes) + len(verified_hashes)
if not share_hashes and not verified_hashes:
to_remove.add(filename)
for filename in to_remove:
self.known.pop(filename)
self.known_desired.pop(filename)
os.remove(filename)
print "REMOVED", filename
| gpl-3.0 |
kalahbrown/HueBigSQL | apps/sqoop/src/sqoop/api/link.py | 32 | 5440 | #!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import logging
import socket
from django.utils.encoding import smart_str
from django.utils.translation import ugettext as _
from sqoop import client, conf
from sqoop.client.exception import SqoopException
from decorators import get_link_or_exception
from desktop.lib.django_util import JsonResponse
from desktop.lib.exceptions import StructuredException
from desktop.lib.rest.http_client import RestException
from exception import handle_rest_exception
from utils import list_to_dict
from django.views.decorators.cache import never_cache
__all__ = ['get_links', 'create_link', 'update_link', 'link', 'links', 'link_clone', 'link_delete']
LOG = logging.getLogger(__name__)
@never_cache
def get_links(request):
response = {
'status': 0,
'errors': None,
'links': []
}
try:
c = client.SqoopClient(conf.SERVER_URL.get(), request.user.username, request.LANGUAGE_CODE)
response['links'] = list_to_dict(c.get_links())
except RestException, e:
response.update(handle_rest_exception(e, _('Could not get links.')))
return JsonResponse(response)
@never_cache
def create_link(request):
response = {
'status': 0,
'errors': None,
'link': None
}
if 'link' not in request.POST:
raise StructuredException(code="INVALID_REQUEST_ERROR", message=_('Error saving link'), data={'errors': 'Link is missing.'}, error_code=400)
d = json.loads(smart_str(request.POST['link']))
link = client.Link.from_dict(d)
try:
c = client.SqoopClient(conf.SERVER_URL.get(), request.user.username, request.LANGUAGE_CODE)
response['link'] = c.create_link(link).to_dict()
except RestException, e:
response.update(handle_rest_exception(e, _('Could not create link.')))
except SqoopException, e:
response['status'] = 100
response['errors'] = e.to_dict()
return JsonResponse(response)
@never_cache
def update_link(request, link):
response = {
'status': 0,
'errors': None,
'link': None
}
if 'link' not in request.POST:
raise StructuredException(code="INVALID_REQUEST_ERROR", message=_('Error saving link'), data={'errors': 'Link is missing.'}, error_code=400)
link.update_from_dict(json.loads(smart_str(request.POST['link'])))
try:
c = client.SqoopClient(conf.SERVER_URL.get(), request.user.username, request.LANGUAGE_CODE)
response['link'] = c.update_link(link).to_dict()
except RestException, e:
response.update(handle_rest_exception(e, _('Could not update link.')))
except SqoopException, e:
response['status'] = 100
response['errors'] = e.to_dict()
return JsonResponse(response)
@never_cache
def links(request):
if request.method == 'GET':
return get_links(request)
elif request.method == 'POST':
return create_link(request)
else:
raise StructuredException(code="INVALID_METHOD", message=_('GET or POST request required.'), error_code=405)
@never_cache
@get_link_or_exception()
def link(request, link):
response = {
'status': 0,
'errors': None,
'link': None
}
if request.method == 'GET':
response['link'] = link.to_dict()
return JsonResponse(response)
elif request.method == 'POST':
return update_link(request, link)
else:
raise StructuredException(code="INVALID_METHOD", message=_('GET or POST request required.'), error_code=405)
@never_cache
@get_link_or_exception()
def link_clone(request, link):
if request.method != 'POST':
raise StructuredException(code="INVALID_METHOD", message=_('POST request required.'), error_code=405)
response = {
'status': 0,
'errors': None,
'link': None
}
link.id = -1
link.name = '%s-copy' % link.name
try:
c = client.SqoopClient(conf.SERVER_URL.get(), request.user.username, request.LANGUAGE_CODE)
response['link'] = c.create_link(link).to_dict()
except RestException, e:
response.update(handle_rest_exception(e, _('Could not clone link.')))
except SqoopException, e:
response['status'] = 100
response['errors'] = e.to_dict()
return JsonResponse(response)
@never_cache
@get_link_or_exception()
def link_delete(request, link):
if request.method != 'POST':
raise StructuredException(code="INVALID_METHOD", message=_('POST request required.'), error_code=405)
response = {
'status': 0,
'errors': None
}
try:
c = client.SqoopClient(conf.SERVER_URL.get(), request.user.username, request.LANGUAGE_CODE)
c.delete_link(link)
except RestException, e:
response.update(handle_rest_exception(e, _('Could not delete link.')))
except SqoopException, e:
response['status'] = 100
response['errors'] = e.to_dict()
return JsonResponse(response)
| apache-2.0 |
DanielSBrown/osf.io | scripts/migrate_github_oauth_settings.py | 55 | 7419 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Script to migrate addongithubusersettings and create and attach addongithuboauthsettings.
Log:
Executed on production by SL on 2014-10-05 at 23:11 EST. 269 AddonGithubUserSettings records
were successfully migrated. 3 records with invalidated credentials were skipped.
Script was modified by @chennan47 to handle records with invalidated credentials by unsetting
the oauth_access_token, oauth_token_type, and github_user fields. Run on production by @sloria
on 2014-10-07 at 12:34 EST. 3 records with invalidated credentials were migrated.
"""
import sys
import mock
from nose.tools import *
import github3
from framework.mongo import database
from website.app import init_app
from tests.base import OsfTestCase
from website.addons.github.api import GitHub
from website.addons.github.model import AddonGitHubOauthSettings, AddonGitHubUserSettings
def do_migration(records, dry=True):
count, inval_cred_handled = 0, 0
for raw_user_settings in records:
# False if missing, None if field exists
access_token = raw_user_settings.get('oauth_access_token', False)
token_type = raw_user_settings.get('oauth_token_type', False)
github_user_name = raw_user_settings.get('github_user', False)
if access_token and token_type and github_user_name:
if not dry:
gh = GitHub(access_token, token_type)
try:
github_user = gh.user()
except github3.models.GitHubError:
AddonGitHubUserSettings._storage[0].store.update(
{'_id': raw_user_settings['_id']},
{
'$unset': {
"oauth_access_token" : True,
"oauth_token_type" : True,
"github_user" : True,
},
}
)
inval_cred_handled += 1
print('invalidated credentials handled record: {}'.format(raw_user_settings['_id']))
continue
oauth_settings = AddonGitHubOauthSettings()
oauth_settings.github_user_id = str(github_user.id)
oauth_settings.save()
oauth_settings.oauth_access_token = access_token
oauth_settings.oauth_token_type = token_type
oauth_settings.github_user_name = github_user_name
oauth_settings.save()
AddonGitHubUserSettings._storage[0].store.update(
{'_id': raw_user_settings['_id']},
{
'$unset': {
'oauth_access_token': True,
'oauth_token_type': True,
'github_user': True,
},
'$set': {
'oauth_settings': oauth_settings.github_user_id,
}
}
)
AddonGitHubOauthSettings._storage[0].store.update(
{'github_user_id': oauth_settings.github_user_id},
{
'$push': {
'__backrefs.accessed.addongithubusersettings.oauth_settings': raw_user_settings['_id'],
}
}
)
print('Finished migrating AddonGithubUserSettings record: {}'.format(raw_user_settings['_id']))
count += 1
# Old fields have not yet been unset
elif None in set([access_token, token_type, github_user_name]):
if not dry:
AddonGitHubUserSettings._storage[0].store.update(
{'_id': raw_user_settings['_id']},
{
'$unset': {
'oauth_access_token': True,
'oauth_token_type': True,
'github_user': True,
},
}
)
print('Unset oauth_access_token and oauth_token_type: {0}'.format(raw_user_settings['_id']))
count += 1
return count, inval_cred_handled
def get_user_settings():
# ... return the StoredObjects to migrate ...
return database.addongithubusersettings.find()
def main():
init_app('website.settings', set_backends=True, routes=True) # Sets the storage backends on all models
user_settings = get_user_settings()
n_migrated, n_inval_cred_handled = do_migration(user_settings, dry='dry' in sys.argv)
print("Total migrated records: {}".format(n_migrated))
print("Total invalidated credentials handled records: {}".format(n_inval_cred_handled))
class TestMigrateGitHubOauthSettings(OsfTestCase):
def setUp(self):
super(TestMigrateGitHubOauthSettings, self).setUp()
self.mongo_collection = database.addongithubusersettings
self.user_settings = {
"__backrefs" : {
"authorized" : {
"addongithubnodesettings" : {
"user_settings" : [
"678910",
]
}
}
},
"_id" : "123456",
"_version" : 1,
"deletedAddonGitHubUserSettings" : False,
"github_user" : "testing user",
"oauth_access_token" : "testing acess token",
"oauth_state" : "no state",
"oauth_token_type" : "testing token type",
"owner" : "abcde"
}
self.mongo_collection.insert(self.user_settings)
def test_get_user_settings(self):
records = list(get_user_settings())
assert_equal(1, len(records))
assert_equal(
records[0]['github_user'],
self.user_settings['github_user']
)
assert_equal(
records[0]['oauth_state'],
self.user_settings['oauth_state']
)
assert_equal(
records[0]['oauth_access_token'],
self.user_settings['oauth_access_token']
)
assert_equal(
records[0]['oauth_token_type'],
self.user_settings['oauth_token_type']
)
@mock.patch('website.addons.github.api.GitHub.user')
def test_do_migration(self, mock_github_user):
user = mock.Mock()
user.id = "testing user id"
mock_github_user.return_value = user
do_migration(get_user_settings())
user_settings = AddonGitHubUserSettings.find()[0]
assert_true(user_settings.oauth_settings)
assert_true(user_settings.oauth_state)
assert_equal(
user_settings.oauth_settings.github_user_name,
"testing user"
)
assert_equal(
user_settings.oauth_settings.oauth_access_token,
"testing acess token"
)
assert_equal(
user_settings.oauth_settings.oauth_token_type,
"testing token type"
)
assert_equal(
user_settings.oauth_settings.github_user_id,
"testing user id"
)
def tearDown(self):
self.mongo_collection.remove()
if __name__ == '__main__':
main()
| apache-2.0 |
adityacs/ansible | lib/ansible/modules/notification/twilio.py | 38 | 5973 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2015, Matt Makai <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
version_added: "1.6"
module: twilio
short_description: Sends a text message to a mobile phone through Twilio.
description:
- Sends a text message to a phone number through the Twilio messaging API.
notes:
- This module is non-idempotent because it sends an email through the
external API. It is idempotent only in the case that the module fails.
- Like the other notification modules, this one requires an external
dependency to work. In this case, you'll need a Twilio account with
a purchased or verified phone number to send the text message.
options:
account_sid:
description:
user's Twilio account token found on the account page
required: true
auth_token:
description: user's Twilio authentication token
required: true
msg:
description:
the body of the text message
required: true
to_number:
description:
one or more phone numbers to send the text message to,
format +15551112222
required: true
from_number:
description:
the Twilio number to send the text message from, format +15551112222
required: true
media_url:
description:
a URL with a picture, video or sound clip to send with an MMS
(multimedia message) instead of a plain SMS
required: false
author: "Matt Makai (@makaimc)"
'''
EXAMPLES = '''
# send an SMS about the build status to (555) 303 5681
# note: replace account_sid and auth_token values with your credentials
# and you have to have the 'from_number' on your Twilio account
- twilio:
msg: All servers with webserver role are now configured.
account_sid: ACXXXXXXXXXXXXXXXXX
auth_token: ACXXXXXXXXXXXXXXXXX
from_number: +15552014545
to_number: +15553035681
delegate_to: localhost
# send an SMS to multiple phone numbers about the deployment
# note: replace account_sid and auth_token values with your credentials
# and you have to have the 'from_number' on your Twilio account
- twilio:
msg: This server configuration is now complete.
account_sid: ACXXXXXXXXXXXXXXXXX
auth_token: ACXXXXXXXXXXXXXXXXX
from_number: +15553258899
to_number:
- +15551113232
- +12025551235
- +19735559010
delegate_to: localhost
# send an MMS to a single recipient with an update on the deployment
# and an image of the results
# note: replace account_sid and auth_token values with your credentials
# and you have to have the 'from_number' on your Twilio account
- twilio:
msg: Deployment complete!
account_sid: ACXXXXXXXXXXXXXXXXX
auth_token: ACXXXXXXXXXXXXXXXXX
from_number: +15552014545
to_number: +15553035681
media_url: https://demo.twilio.com/logo.png
delegate_to: localhost
'''
# =======================================
# twilio module support methods
#
import urllib
def post_twilio_api(module, account_sid, auth_token, msg, from_number,
to_number, media_url=None):
URI = "https://api.twilio.com/2010-04-01/Accounts/%s/Messages.json" \
% (account_sid,)
AGENT = "Ansible"
data = {'From':from_number, 'To':to_number, 'Body':msg}
if media_url:
data['MediaUrl'] = media_url
encoded_data = urllib.urlencode(data)
headers = {'User-Agent': AGENT,
'Content-type': 'application/x-www-form-urlencoded',
'Accept': 'application/json',
}
# Hack module params to have the Basic auth params that fetch_url expects
module.params['url_username'] = account_sid.replace('\n', '')
module.params['url_password'] = auth_token.replace('\n', '')
return fetch_url(module, URI, data=encoded_data, headers=headers)
# =======================================
# Main
#
def main():
module = AnsibleModule(
argument_spec=dict(
account_sid=dict(required=True),
auth_token=dict(required=True, no_log=True),
msg=dict(required=True),
from_number=dict(required=True),
to_number=dict(required=True),
media_url=dict(default=None, required=False),
),
supports_check_mode=True
)
account_sid = module.params['account_sid']
auth_token = module.params['auth_token']
msg = module.params['msg']
from_number = module.params['from_number']
to_number = module.params['to_number']
media_url = module.params['media_url']
if not isinstance(to_number, list):
to_number = [to_number]
for number in to_number:
r, info = post_twilio_api(module, account_sid, auth_token, msg,
from_number, number, media_url)
if info['status'] not in [200, 201]:
body_message = "unknown error"
if 'body' in info:
body = json.loads(info['body'])
body_message = body['message']
module.fail_json(msg="unable to send message to %s: %s" % (number, body_message))
module.exit_json(msg=msg, changed=False)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.urls import *
if __name__ == '__main__':
main()
| gpl-3.0 |
Teamxrtc/webrtc-streaming-node | third_party/depot_tools/external_bin/gsutil/gsutil_4.15/gsutil/third_party/boto/boto/cloudhsm/__init__.py | 111 | 1654 | # Copyright (c) 2015 Amazon.com, Inc. or its affiliates.
# All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
from boto.regioninfo import RegionInfo, get_regions
def regions():
"""
Get all available regions for the AWS CloudHSM service.
:rtype: list
:return: A list of :class:`boto.regioninfo.RegionInfo`
"""
from boto.cloudhsm.layer1 import CloudHSMConnection
return get_regions('cloudhsm', connection_cls=CloudHSMConnection)
def connect_to_region(region_name, **kw_params):
for region in regions():
if region.name == region_name:
return region.connect(**kw_params)
return None
| mit |
xen0l/ansible | lib/ansible/module_utils/facts/hardware/freebsd.py | 101 | 7355 | # This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import json
import re
from ansible.module_utils.facts.hardware.base import Hardware, HardwareCollector
from ansible.module_utils.facts.timeout import TimeoutError, timeout
from ansible.module_utils.facts.utils import get_file_content, get_mount_size
class FreeBSDHardware(Hardware):
"""
FreeBSD-specific subclass of Hardware. Defines memory and CPU facts:
- memfree_mb
- memtotal_mb
- swapfree_mb
- swaptotal_mb
- processor (a list)
- processor_cores
- processor_count
- devices
"""
platform = 'FreeBSD'
DMESG_BOOT = '/var/run/dmesg.boot'
def populate(self, collected_facts=None):
hardware_facts = {}
cpu_facts = self.get_cpu_facts()
memory_facts = self.get_memory_facts()
dmi_facts = self.get_dmi_facts()
device_facts = self.get_device_facts()
mount_facts = {}
try:
mount_facts = self.get_mount_facts()
except TimeoutError:
pass
hardware_facts.update(cpu_facts)
hardware_facts.update(memory_facts)
hardware_facts.update(dmi_facts)
hardware_facts.update(device_facts)
hardware_facts.update(mount_facts)
return hardware_facts
def get_cpu_facts(self):
cpu_facts = {}
cpu_facts['processor'] = []
sysctl = self.module.get_bin_path('sysctl')
if sysctl:
rc, out, err = self.module.run_command("%s -n hw.ncpu" % sysctl, check_rc=False)
cpu_facts['processor_count'] = out.strip()
dmesg_boot = get_file_content(FreeBSDHardware.DMESG_BOOT)
if not dmesg_boot:
try:
rc, dmesg_boot, err = self.module.run_command(self.module.get_bin_path("dmesg"), check_rc=False)
except Exception:
dmesg_boot = ''
for line in dmesg_boot.splitlines():
if 'CPU:' in line:
cpu = re.sub(r'CPU:\s+', r"", line)
cpu_facts['processor'].append(cpu.strip())
if 'Logical CPUs per core' in line:
cpu_facts['processor_cores'] = line.split()[4]
return cpu_facts
def get_memory_facts(self):
memory_facts = {}
sysctl = self.module.get_bin_path('sysctl')
if sysctl:
rc, out, err = self.module.run_command("%s vm.stats" % sysctl, check_rc=False)
for line in out.splitlines():
data = line.split()
if 'vm.stats.vm.v_page_size' in line:
pagesize = int(data[1])
if 'vm.stats.vm.v_page_count' in line:
pagecount = int(data[1])
if 'vm.stats.vm.v_free_count' in line:
freecount = int(data[1])
memory_facts['memtotal_mb'] = pagesize * pagecount // 1024 // 1024
memory_facts['memfree_mb'] = pagesize * freecount // 1024 // 1024
swapinfo = self.module.get_bin_path('swapinfo')
if swapinfo:
# Get swapinfo. swapinfo output looks like:
# Device 1M-blocks Used Avail Capacity
# /dev/ada0p3 314368 0 314368 0%
#
rc, out, err = self.module.run_command("%s -k" % swapinfo)
lines = out.splitlines()
if len(lines[-1]) == 0:
lines.pop()
data = lines[-1].split()
if data[0] != 'Device':
memory_facts['swaptotal_mb'] = int(data[1]) // 1024
memory_facts['swapfree_mb'] = int(data[3]) // 1024
return memory_facts
@timeout()
def get_mount_facts(self):
mount_facts = {}
mount_facts['mounts'] = []
fstab = get_file_content('/etc/fstab')
if fstab:
for line in fstab.splitlines():
if line.startswith('#') or line.strip() == '':
continue
fields = re.sub(r'\s+', ' ', line).split()
mount_statvfs_info = get_mount_size(fields[1])
mount_info = {'mount': fields[1],
'device': fields[0],
'fstype': fields[2],
'options': fields[3]}
mount_info.update(mount_statvfs_info)
mount_facts['mounts'].append(mount_info)
return mount_facts
def get_device_facts(self):
device_facts = {}
sysdir = '/dev'
device_facts['devices'] = {}
drives = re.compile(r'(ada?\d+|da\d+|a?cd\d+)') # TODO: rc, disks, err = self.module.run_command("/sbin/sysctl kern.disks")
slices = re.compile(r'(ada?\d+s\d+\w*|da\d+s\d+\w*)')
if os.path.isdir(sysdir):
dirlist = sorted(os.listdir(sysdir))
for device in dirlist:
d = drives.match(device)
if d:
device_facts['devices'][d.group(1)] = []
s = slices.match(device)
if s:
device_facts['devices'][d.group(1)].append(s.group(1))
return device_facts
def get_dmi_facts(self):
''' learn dmi facts from system
Use dmidecode executable if available'''
dmi_facts = {}
# Fall back to using dmidecode, if available
dmi_bin = self.module.get_bin_path('dmidecode')
DMI_DICT = dict(
bios_date='bios-release-date',
bios_version='bios-version',
form_factor='chassis-type',
product_name='system-product-name',
product_serial='system-serial-number',
product_uuid='system-uuid',
product_version='system-version',
system_vendor='system-manufacturer'
)
for (k, v) in DMI_DICT.items():
if dmi_bin is not None:
(rc, out, err) = self.module.run_command('%s -s %s' % (dmi_bin, v))
if rc == 0:
# Strip out commented lines (specific dmidecode output)
# FIXME: why add the fact and then test if it is json?
dmi_facts[k] = ''.join([line for line in out.splitlines() if not line.startswith('#')])
try:
json.dumps(dmi_facts[k])
except UnicodeDecodeError:
dmi_facts[k] = 'NA'
else:
dmi_facts[k] = 'NA'
else:
dmi_facts[k] = 'NA'
return dmi_facts
class FreeBSDHardwareCollector(HardwareCollector):
_fact_class = FreeBSDHardware
_platform = 'FreeBSD'
| gpl-3.0 |
crosick/zhishu | ENV/lib/python2.7/site-packages/pip/vcs/subversion.py | 280 | 10468 | from __future__ import absolute_import
import logging
import os
import re
from pip._vendor.six.moves.urllib import parse as urllib_parse
from pip.index import Link
from pip.utils import rmtree, display_path
from pip.utils.logging import indent_log
from pip.vcs import vcs, VersionControl
_svn_xml_url_re = re.compile('url="([^"]+)"')
_svn_rev_re = re.compile('committed-rev="(\d+)"')
_svn_url_re = re.compile(r'URL: (.+)')
_svn_revision_re = re.compile(r'Revision: (.+)')
_svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"')
_svn_info_xml_url_re = re.compile(r'<url>(.*)</url>')
logger = logging.getLogger(__name__)
class Subversion(VersionControl):
name = 'svn'
dirname = '.svn'
repo_name = 'checkout'
schemes = ('svn', 'svn+ssh', 'svn+http', 'svn+https', 'svn+svn')
def get_info(self, location):
"""Returns (url, revision), where both are strings"""
assert not location.rstrip('/').endswith(self.dirname), \
'Bad directory: %s' % location
output = self.run_command(
['info', location],
show_stdout=False,
extra_environ={'LANG': 'C'},
)
match = _svn_url_re.search(output)
if not match:
logger.warning(
'Cannot determine URL of svn checkout %s',
display_path(location),
)
logger.debug('Output that cannot be parsed: \n%s', output)
return None, None
url = match.group(1).strip()
match = _svn_revision_re.search(output)
if not match:
logger.warning(
'Cannot determine revision of svn checkout %s',
display_path(location),
)
logger.debug('Output that cannot be parsed: \n%s', output)
return url, None
return url, match.group(1)
def export(self, location):
"""Export the svn repository at the url to the destination location"""
url, rev = self.get_url_rev()
rev_options = get_rev_options(url, rev)
logger.info('Exporting svn repository %s to %s', url, location)
with indent_log():
if os.path.exists(location):
# Subversion doesn't like to check out over an existing
# directory --force fixes this, but was only added in svn 1.5
rmtree(location)
self.run_command(
['export'] + rev_options + [url, location],
show_stdout=False)
def switch(self, dest, url, rev_options):
self.run_command(['switch'] + rev_options + [url, dest])
def update(self, dest, rev_options):
self.run_command(['update'] + rev_options + [dest])
def obtain(self, dest):
url, rev = self.get_url_rev()
rev_options = get_rev_options(url, rev)
if rev:
rev_display = ' (to revision %s)' % rev
else:
rev_display = ''
if self.check_destination(dest, url, rev_options, rev_display):
logger.info(
'Checking out %s%s to %s',
url,
rev_display,
display_path(dest),
)
self.run_command(['checkout', '-q'] + rev_options + [url, dest])
def get_location(self, dist, dependency_links):
for url in dependency_links:
egg_fragment = Link(url).egg_fragment
if not egg_fragment:
continue
if '-' in egg_fragment:
# FIXME: will this work when a package has - in the name?
key = '-'.join(egg_fragment.split('-')[:-1]).lower()
else:
key = egg_fragment
if key == dist.key:
return url.split('#', 1)[0]
return None
def get_revision(self, location):
"""
Return the maximum revision for all files under a given location
"""
# Note: taken from setuptools.command.egg_info
revision = 0
for base, dirs, files in os.walk(location):
if self.dirname not in dirs:
dirs[:] = []
continue # no sense walking uncontrolled subdirs
dirs.remove(self.dirname)
entries_fn = os.path.join(base, self.dirname, 'entries')
if not os.path.exists(entries_fn):
# FIXME: should we warn?
continue
dirurl, localrev = self._get_svn_url_rev(base)
if base == location:
base_url = dirurl + '/' # save the root url
elif not dirurl or not dirurl.startswith(base_url):
dirs[:] = []
continue # not part of the same svn tree, skip it
revision = max(revision, localrev)
return revision
def get_url_rev(self):
# hotfix the URL scheme after removing svn+ from svn+ssh:// readd it
url, rev = super(Subversion, self).get_url_rev()
if url.startswith('ssh://'):
url = 'svn+' + url
return url, rev
def get_url(self, location):
# In cases where the source is in a subdirectory, not alongside
# setup.py we have to look up in the location until we find a real
# setup.py
orig_location = location
while not os.path.exists(os.path.join(location, 'setup.py')):
last_location = location
location = os.path.dirname(location)
if location == last_location:
# We've traversed up to the root of the filesystem without
# finding setup.py
logger.warning(
"Could not find setup.py for directory %s (tried all "
"parent directories)",
orig_location,
)
return None
return self._get_svn_url_rev(location)[0]
def _get_svn_url_rev(self, location):
from pip.exceptions import InstallationError
with open(os.path.join(location, self.dirname, 'entries')) as f:
data = f.read()
if (data.startswith('8') or
data.startswith('9') or
data.startswith('10')):
data = list(map(str.splitlines, data.split('\n\x0c\n')))
del data[0][0] # get rid of the '8'
url = data[0][3]
revs = [int(d[9]) for d in data if len(d) > 9 and d[9]] + [0]
elif data.startswith('<?xml'):
match = _svn_xml_url_re.search(data)
if not match:
raise ValueError('Badly formatted data: %r' % data)
url = match.group(1) # get repository URL
revs = [int(m.group(1)) for m in _svn_rev_re.finditer(data)] + [0]
else:
try:
# subversion >= 1.7
xml = self.run_command(
['info', '--xml', location],
show_stdout=False,
)
url = _svn_info_xml_url_re.search(xml).group(1)
revs = [
int(m.group(1)) for m in _svn_info_xml_rev_re.finditer(xml)
]
except InstallationError:
url, revs = None, []
if revs:
rev = max(revs)
else:
rev = 0
return url, rev
def get_tag_revs(self, svn_tag_url):
stdout = self.run_command(['ls', '-v', svn_tag_url], show_stdout=False)
results = []
for line in stdout.splitlines():
parts = line.split()
rev = int(parts[0])
tag = parts[-1].strip('/')
results.append((tag, rev))
return results
def find_tag_match(self, rev, tag_revs):
best_match_rev = None
best_tag = None
for tag, tag_rev in tag_revs:
if (tag_rev > rev and
(best_match_rev is None or best_match_rev > tag_rev)):
# FIXME: Is best_match > tag_rev really possible?
# or is it a sign something is wacky?
best_match_rev = tag_rev
best_tag = tag
return best_tag
def get_src_requirement(self, dist, location, find_tags=False):
repo = self.get_url(location)
if repo is None:
return None
parts = repo.split('/')
# FIXME: why not project name?
egg_project_name = dist.egg_name().split('-', 1)[0]
rev = self.get_revision(location)
if parts[-2] in ('tags', 'tag'):
# It's a tag, perfect!
full_egg_name = '%s-%s' % (egg_project_name, parts[-1])
elif parts[-2] in ('branches', 'branch'):
# It's a branch :(
full_egg_name = '%s-%s-r%s' % (dist.egg_name(), parts[-1], rev)
elif parts[-1] == 'trunk':
# Trunk :-/
full_egg_name = '%s-dev_r%s' % (dist.egg_name(), rev)
if find_tags:
tag_url = '/'.join(parts[:-1]) + '/tags'
tag_revs = self.get_tag_revs(tag_url)
match = self.find_tag_match(rev, tag_revs)
if match:
logger.info(
'trunk checkout %s seems to be equivalent to tag %s',
match,
)
repo = '%s/%s' % (tag_url, match)
full_egg_name = '%s-%s' % (egg_project_name, match)
else:
# Don't know what it is
logger.warning(
'svn URL does not fit normal structure (tags/branches/trunk): '
'%s',
repo,
)
full_egg_name = '%s-dev_r%s' % (egg_project_name, rev)
return 'svn+%s@%s#egg=%s' % (repo, rev, full_egg_name)
def get_rev_options(url, rev):
if rev:
rev_options = ['-r', rev]
else:
rev_options = []
r = urllib_parse.urlsplit(url)
if hasattr(r, 'username'):
# >= Python-2.5
username, password = r.username, r.password
else:
netloc = r[1]
if '@' in netloc:
auth = netloc.split('@')[0]
if ':' in auth:
username, password = auth.split(':', 1)
else:
username, password = auth, None
else:
username, password = None, None
if username:
rev_options += ['--username', username]
if password:
rev_options += ['--password', password]
return rev_options
vcs.register(Subversion)
| mit |
CodeNameGhost/shiva | thirdparty/scapy/layers/tls/automaton_cli.py | 1 | 32357 | ## This file is part of Scapy
## Copyright (C) 2007, 2008, 2009 Arnaud Ebalard
## 2015, 2016, 2017 Maxence Tury
## This program is published under a GPLv2 license
"""
TLS client automaton. This makes for a primitive TLS stack.
Obviously you need rights for network access.
We support versions SSLv2 to TLS 1.2, along with many features.
There is no session resumption mechanism for now.
In order to run a client to tcp/50000 with one cipher suite of your choice:
> from scapy.all import *
> ch = TLSClientHello(ciphers=<int code of the cipher suite>)
> t = TLSClientAutomaton(dport=50000, client_hello=ch)
> t.run()
"""
from __future__ import print_function
import socket
from scapy.utils import randstring
from scapy.automaton import ATMT
from scapy.layers.tls.automaton import _TLSAutomaton
from scapy.layers.tls.basefields import _tls_version, _tls_version_options
from scapy.layers.tls.session import tlsSession
from scapy.layers.tls.extensions import (TLS_Ext_SupportedGroups,
TLS_Ext_SupportedVersions,
TLS_Ext_SignatureAlgorithms,
TLS_Ext_ServerName, ServerName)
from scapy.layers.tls.handshake import *
from scapy.layers.tls.handshake_sslv2 import *
from scapy.layers.tls.keyexchange_tls13 import (TLS_Ext_KeyShare_CH,
KeyShareEntry)
from scapy.layers.tls.record import (TLS, TLSAlert, TLSChangeCipherSpec,
TLSApplicationData)
class TLSClientAutomaton(_TLSAutomaton):
"""
A simple TLS test client automaton. Try to overload some states or
conditions and see what happens on the other side.
Rather than with an interruption, the best way to stop this client is by
typing 'quit'. This won't be a message sent to the server.
_'mycert' and 'mykey' may be provided as filenames. They will be used in
the handshake, should the server ask for client authentication.
_'server_name' does not need to be set.
_'client_hello' may hold a TLSClientHello or SSLv2ClientHello to be sent
to the server. This is particularly useful for extensions tweaking.
_'version' is a quicker way to advertise a protocol version ("sslv2",
"tls1", "tls12", etc.) It may be overriden by the previous 'client_hello'.
_'data' is a list of raw data to be sent to the server once the handshake
has been completed. Both 'stop_server' and 'quit' will work this way.
"""
def parse_args(self, server="127.0.0.1", dport=4433, server_name=None,
mycert=None, mykey=None,
client_hello=None, version=None,
data=None,
**kargs):
super(TLSClientAutomaton, self).parse_args(mycert=mycert,
mykey=mykey,
**kargs)
tmp = socket.getaddrinfo(server, dport)
self.remote_name = None
try:
if ':' in server:
socket.inet_pton(socket.AF_INET6, server)
else:
socket.inet_pton(socket.AF_INET, server)
except:
self.remote_name = socket.getfqdn(server)
if self.remote_name != server:
tmp = socket.getaddrinfo(self.remote_name, dport)
if server_name:
self.remote_name = server_name
self.remote_family = tmp[0][0]
self.remote_ip = tmp[0][4][0]
self.remote_port = dport
self.local_ip = None
self.local_port = None
self.socket = None
self.client_hello = client_hello
self.advertised_tls_version = None
if version:
v = _tls_version_options.get(version, None)
if not v:
self.vprint("Unrecognized TLS version option.")
else:
self.advertised_tls_version = v
self.linebreak = False
if isinstance(data, str):
self.data_to_send = [data]
elif isinstance(data, list):
# parse_args is called two times, this is why we have to copy
# the data list for reversing it afterwards...
self.data_to_send = list(data)
self.data_to_send.reverse()
else:
self.data_to_send = []
def vprint_sessioninfo(self):
if self.verbose:
s = self.cur_session
v = _tls_version[s.tls_version]
self.vprint("Version : %s" % v)
cs = s.wcs.ciphersuite.name
self.vprint("Cipher suite : %s" % cs)
if s.tls_version >= 0x0304:
ms = s.tls13_master_secret
else:
ms = s.master_secret
self.vprint("Master secret : %s" % repr_hex(ms))
if s.server_certs:
self.vprint("Server certificate chain: %r" % s.server_certs)
self.vprint()
@ATMT.state(initial=True)
def INITIAL(self):
self.vprint("Starting TLS client automaton.")
raise self.INIT_TLS_SESSION()
@ATMT.state()
def INIT_TLS_SESSION(self):
self.cur_session = tlsSession(connection_end="client")
self.cur_session.client_certs = self.mycert
self.cur_session.client_key = self.mykey
v = self.advertised_tls_version
if v:
self.cur_session.advertised_tls_version = v
else:
default_version = self.cur_session.advertised_tls_version
self.advertised_tls_version = default_version
raise self.CONNECT()
@ATMT.state()
def CONNECT(self):
s = socket.socket(self.remote_family, socket.SOCK_STREAM)
self.vprint()
self.vprint("Trying to connect on %s:%d" % (self.remote_ip,
self.remote_port))
s.connect((self.remote_ip, self.remote_port))
self.socket = s
self.local_ip, self.local_port = self.socket.getsockname()[:2]
self.vprint()
if self.cur_session.advertised_tls_version in [0x0200, 0x0002]:
raise self.SSLv2_PREPARE_CLIENTHELLO()
elif self.cur_session.advertised_tls_version >= 0x0304:
raise self.TLS13_START()
else:
raise self.PREPARE_CLIENTFLIGHT1()
########################### TLS handshake #################################
@ATMT.state()
def PREPARE_CLIENTFLIGHT1(self):
self.add_record()
@ATMT.condition(PREPARE_CLIENTFLIGHT1)
def should_add_ClientHello(self):
self.add_msg(self.client_hello or TLSClientHello())
raise self.ADDED_CLIENTHELLO()
@ATMT.state()
def ADDED_CLIENTHELLO(self):
pass
@ATMT.condition(ADDED_CLIENTHELLO)
def should_send_ClientFlight1(self):
self.flush_records()
raise self.SENT_CLIENTFLIGHT1()
@ATMT.state()
def SENT_CLIENTFLIGHT1(self):
raise self.WAITING_SERVERFLIGHT1()
@ATMT.state()
def WAITING_SERVERFLIGHT1(self):
self.get_next_msg()
raise self.RECEIVED_SERVERFLIGHT1()
@ATMT.state()
def RECEIVED_SERVERFLIGHT1(self):
pass
@ATMT.condition(RECEIVED_SERVERFLIGHT1, prio=1)
def should_handle_ServerHello(self):
"""
XXX We should check the ServerHello attributes for discrepancies with
our own ClientHello.
"""
self.raise_on_packet(TLSServerHello,
self.HANDLED_SERVERHELLO)
@ATMT.state()
def HANDLED_SERVERHELLO(self):
pass
@ATMT.condition(RECEIVED_SERVERFLIGHT1, prio=2)
def missing_ServerHello(self):
raise self.MISSING_SERVERHELLO()
@ATMT.state()
def MISSING_SERVERHELLO(self):
self.vprint("Missing TLS ServerHello message!")
raise self.CLOSE_NOTIFY()
@ATMT.condition(HANDLED_SERVERHELLO, prio=1)
def should_handle_ServerCertificate(self):
if not self.cur_session.prcs.key_exchange.anonymous:
self.raise_on_packet(TLSCertificate,
self.HANDLED_SERVERCERTIFICATE)
raise self.HANDLED_SERVERCERTIFICATE()
@ATMT.state()
def HANDLED_SERVERCERTIFICATE(self):
pass
@ATMT.condition(HANDLED_SERVERHELLO, prio=2)
def missing_ServerCertificate(self):
raise self.MISSING_SERVERCERTIFICATE()
@ATMT.state()
def MISSING_SERVERCERTIFICATE(self):
self.vprint("Missing TLS Certificate message!")
raise self.CLOSE_NOTIFY()
@ATMT.state()
def HANDLED_CERTIFICATEREQUEST(self):
self.vprint("Server asked for a certificate...")
if not self.mykey or not self.mycert:
self.vprint("No client certificate to send!")
self.vprint("Will try and send an empty Certificate message...")
@ATMT.condition(HANDLED_SERVERCERTIFICATE, prio=1)
def should_handle_ServerKeyExchange_from_ServerCertificate(self):
"""
XXX We should check the ServerKeyExchange attributes for discrepancies
with our own ClientHello, along with the ServerHello and Certificate.
"""
self.raise_on_packet(TLSServerKeyExchange,
self.HANDLED_SERVERKEYEXCHANGE)
@ATMT.state(final=True)
def MISSING_SERVERKEYEXCHANGE(self):
pass
@ATMT.condition(HANDLED_SERVERCERTIFICATE, prio=2)
def missing_ServerKeyExchange(self):
if not self.cur_session.prcs.key_exchange.no_ske:
raise self.MISSING_SERVERKEYEXCHANGE()
@ATMT.state()
def HANDLED_SERVERKEYEXCHANGE(self):
pass
def should_handle_CertificateRequest(self):
"""
XXX We should check the CertificateRequest attributes for discrepancies
with the cipher suite, etc.
"""
self.raise_on_packet(TLSCertificateRequest,
self.HANDLED_CERTIFICATEREQUEST)
@ATMT.condition(HANDLED_SERVERKEYEXCHANGE, prio=2)
def should_handle_CertificateRequest_from_ServerKeyExchange(self):
self.should_handle_CertificateRequest()
@ATMT.condition(HANDLED_SERVERCERTIFICATE, prio=3)
def should_handle_CertificateRequest_from_ServerCertificate(self):
self.should_handle_CertificateRequest()
def should_handle_ServerHelloDone(self):
self.raise_on_packet(TLSServerHelloDone,
self.HANDLED_SERVERHELLODONE)
@ATMT.condition(HANDLED_SERVERKEYEXCHANGE, prio=1)
def should_handle_ServerHelloDone_from_ServerKeyExchange(self):
return self.should_handle_ServerHelloDone()
@ATMT.condition(HANDLED_CERTIFICATEREQUEST, prio=4)
def should_handle_ServerHelloDone_from_CertificateRequest(self):
return self.should_handle_ServerHelloDone()
@ATMT.condition(HANDLED_SERVERCERTIFICATE, prio=4)
def should_handle_ServerHelloDone_from_ServerCertificate(self):
return self.should_handle_ServerHelloDone()
@ATMT.state()
def HANDLED_SERVERHELLODONE(self):
raise self.PREPARE_CLIENTFLIGHT2()
@ATMT.state()
def PREPARE_CLIENTFLIGHT2(self):
self.add_record()
@ATMT.condition(PREPARE_CLIENTFLIGHT2, prio=1)
def should_add_ClientCertificate(self):
"""
If the server sent a CertificateRequest, we send a Certificate message.
If no certificate is available, an empty Certificate message is sent:
- this is a SHOULD in RFC 4346 (Section 7.4.6)
- this is a MUST in RFC 5246 (Section 7.4.6)
XXX We may want to add a complete chain.
"""
hs_msg = [type(m) for m in self.cur_session.handshake_messages_parsed]
if not TLSCertificateRequest in hs_msg:
return
certs = []
if self.mycert:
certs = [self.mycert]
self.add_msg(TLSCertificate(certs=certs))
raise self.ADDED_CLIENTCERTIFICATE()
@ATMT.state()
def ADDED_CLIENTCERTIFICATE(self):
pass
def should_add_ClientKeyExchange(self):
self.add_msg(TLSClientKeyExchange())
raise self.ADDED_CLIENTKEYEXCHANGE()
@ATMT.condition(PREPARE_CLIENTFLIGHT2, prio=2)
def should_add_ClientKeyExchange_from_ClientFlight2(self):
return self.should_add_ClientKeyExchange()
@ATMT.condition(ADDED_CLIENTCERTIFICATE)
def should_add_ClientKeyExchange_from_ClientCertificate(self):
return self.should_add_ClientKeyExchange()
@ATMT.state()
def ADDED_CLIENTKEYEXCHANGE(self):
pass
@ATMT.condition(ADDED_CLIENTKEYEXCHANGE, prio=1)
def should_add_ClientVerify(self):
"""
XXX Section 7.4.7.1 of RFC 5246 states that the CertificateVerify
message is only sent following a client certificate that has signing
capability (i.e. not those containing fixed DH params).
We should verify that before adding the message. We should also handle
the case when the Certificate message was empty.
"""
hs_msg = [type(m) for m in self.cur_session.handshake_messages_parsed]
if (not TLSCertificateRequest in hs_msg or
self.mycert is None or
self.mykey is None):
return
self.add_msg(TLSCertificateVerify())
raise self.ADDED_CERTIFICATEVERIFY()
@ATMT.state()
def ADDED_CERTIFICATEVERIFY(self):
pass
@ATMT.condition(ADDED_CERTIFICATEVERIFY)
def should_add_ChangeCipherSpec_from_CertificateVerify(self):
self.add_record()
self.add_msg(TLSChangeCipherSpec())
raise self.ADDED_CHANGECIPHERSPEC()
@ATMT.condition(ADDED_CLIENTKEYEXCHANGE, prio=2)
def should_add_ChangeCipherSpec_from_ClientKeyExchange(self):
self.add_record()
self.add_msg(TLSChangeCipherSpec())
raise self.ADDED_CHANGECIPHERSPEC()
@ATMT.state()
def ADDED_CHANGECIPHERSPEC(self):
pass
@ATMT.condition(ADDED_CHANGECIPHERSPEC)
def should_add_ClientFinished(self):
self.add_record()
self.add_msg(TLSFinished())
raise self.ADDED_CLIENTFINISHED()
@ATMT.state()
def ADDED_CLIENTFINISHED(self):
pass
@ATMT.condition(ADDED_CLIENTFINISHED)
def should_send_ClientFlight2(self):
self.flush_records()
raise self.SENT_CLIENTFLIGHT2()
@ATMT.state()
def SENT_CLIENTFLIGHT2(self):
raise self.WAITING_SERVERFLIGHT2()
@ATMT.state()
def WAITING_SERVERFLIGHT2(self):
self.get_next_msg()
raise self.RECEIVED_SERVERFLIGHT2()
@ATMT.state()
def RECEIVED_SERVERFLIGHT2(self):
pass
@ATMT.condition(RECEIVED_SERVERFLIGHT2)
def should_handle_ChangeCipherSpec(self):
self.raise_on_packet(TLSChangeCipherSpec,
self.HANDLED_CHANGECIPHERSPEC)
@ATMT.state()
def HANDLED_CHANGECIPHERSPEC(self):
pass
@ATMT.condition(HANDLED_CHANGECIPHERSPEC)
def should_handle_Finished(self):
self.raise_on_packet(TLSFinished,
self.HANDLED_SERVERFINISHED)
@ATMT.state()
def HANDLED_SERVERFINISHED(self):
self.vprint("TLS handshake completed!")
self.vprint_sessioninfo()
self.vprint("You may send data or use 'quit'.")
####################### end of TLS handshake ##############################
@ATMT.condition(HANDLED_SERVERFINISHED)
def should_wait_ClientData(self):
raise self.WAIT_CLIENTDATA()
@ATMT.state()
def WAIT_CLIENTDATA(self):
pass
@ATMT.condition(WAIT_CLIENTDATA, prio=1)
def add_ClientData(self):
"""
The user may type in:
GET / HTTP/1.1\r\nHost: testserver.com\r\n\r\n
Special characters are handled so that it becomes a valid HTTP request.
"""
if not self.data_to_send:
data = raw_input()
data = data.replace("\\r", "\r")
data = data.replace("\\n", "\n")
else:
data = self.data_to_send.pop()
if data == "quit":
return
if self.linebreak:
data += "\n"
self.add_record()
self.add_msg(TLSApplicationData(data=data))
raise self.ADDED_CLIENTDATA()
@ATMT.condition(WAIT_CLIENTDATA, prio=2)
def no_more_ClientData(self):
raise self.CLOSE_NOTIFY()
@ATMT.state()
def ADDED_CLIENTDATA(self):
pass
@ATMT.condition(ADDED_CLIENTDATA)
def should_send_ClientData(self):
self.flush_records()
raise self.SENT_CLIENTDATA()
@ATMT.state()
def SENT_CLIENTDATA(self):
raise self.WAITING_SERVERDATA()
@ATMT.state()
def WAITING_SERVERDATA(self):
self.get_next_msg(0.3, 1)
raise self.RECEIVED_SERVERDATA()
@ATMT.state()
def RECEIVED_SERVERDATA(self):
pass
@ATMT.condition(RECEIVED_SERVERDATA, prio=1)
def should_handle_ServerData(self):
if not self.buffer_in:
raise self.WAIT_CLIENTDATA()
p = self.buffer_in[0]
if isinstance(p, TLSApplicationData):
print("> Received: %s" % p.data)
elif isinstance(p, TLSAlert):
print("> Received: %r" % p)
raise self.CLOSE_NOTIFY()
else:
print("> Received: %r" % p)
self.buffer_in = self.buffer_in[1:]
raise self.HANDLED_SERVERDATA()
@ATMT.state()
def HANDLED_SERVERDATA(self):
raise self.WAIT_CLIENTDATA()
@ATMT.state()
def CLOSE_NOTIFY(self):
self.vprint()
self.vprint("Trying to send a TLSAlert to the server...")
@ATMT.condition(CLOSE_NOTIFY)
def close_session(self):
self.add_record()
self.add_msg(TLSAlert(level=1, descr=0))
try:
self.flush_records()
except:
self.vprint("Could not send termination Alert, maybe the server stopped?")
raise self.FINAL()
########################## SSLv2 handshake ################################
@ATMT.state()
def SSLv2_PREPARE_CLIENTHELLO(self):
pass
@ATMT.condition(SSLv2_PREPARE_CLIENTHELLO)
def sslv2_should_add_ClientHello(self):
self.add_record(is_sslv2=True)
p = self.client_hello or SSLv2ClientHello(challenge=randstring(16))
self.add_msg(p)
raise self.SSLv2_ADDED_CLIENTHELLO()
@ATMT.state()
def SSLv2_ADDED_CLIENTHELLO(self):
pass
@ATMT.condition(SSLv2_ADDED_CLIENTHELLO)
def sslv2_should_send_ClientHello(self):
self.flush_records()
raise self.SSLv2_SENT_CLIENTHELLO()
@ATMT.state()
def SSLv2_SENT_CLIENTHELLO(self):
raise self.SSLv2_WAITING_SERVERHELLO()
@ATMT.state()
def SSLv2_WAITING_SERVERHELLO(self):
self.get_next_msg()
raise self.SSLv2_RECEIVED_SERVERHELLO()
@ATMT.state()
def SSLv2_RECEIVED_SERVERHELLO(self):
pass
@ATMT.condition(SSLv2_RECEIVED_SERVERHELLO, prio=1)
def sslv2_should_handle_ServerHello(self):
self.raise_on_packet(SSLv2ServerHello,
self.SSLv2_HANDLED_SERVERHELLO)
@ATMT.state()
def SSLv2_HANDLED_SERVERHELLO(self):
pass
@ATMT.condition(SSLv2_RECEIVED_SERVERHELLO, prio=2)
def sslv2_missing_ServerHello(self):
raise self.SSLv2_MISSING_SERVERHELLO()
@ATMT.state()
def SSLv2_MISSING_SERVERHELLO(self):
self.vprint("Missing SSLv2 ServerHello message!")
raise self.SSLv2_CLOSE_NOTIFY()
@ATMT.condition(SSLv2_HANDLED_SERVERHELLO)
def sslv2_should_add_ClientMasterKey(self):
self.add_record(is_sslv2=True)
self.add_msg(SSLv2ClientMasterKey())
raise self.SSLv2_ADDED_CLIENTMASTERKEY()
@ATMT.state()
def SSLv2_ADDED_CLIENTMASTERKEY(self):
pass
@ATMT.condition(SSLv2_ADDED_CLIENTMASTERKEY)
def sslv2_should_send_ClientMasterKey(self):
self.flush_records()
raise self.SSLv2_SENT_CLIENTMASTERKEY()
@ATMT.state()
def SSLv2_SENT_CLIENTMASTERKEY(self):
raise self.SSLv2_WAITING_SERVERVERIFY()
@ATMT.state()
def SSLv2_WAITING_SERVERVERIFY(self):
# We give the server 0.5 second to send his ServerVerify.
# Else we assume that he's waiting for our ClientFinished.
self.get_next_msg(0.5, 0)
raise self.SSLv2_RECEIVED_SERVERVERIFY()
@ATMT.state()
def SSLv2_RECEIVED_SERVERVERIFY(self):
pass
@ATMT.condition(SSLv2_RECEIVED_SERVERVERIFY, prio=1)
def sslv2_should_handle_ServerVerify(self):
self.raise_on_packet(SSLv2ServerVerify,
self.SSLv2_HANDLED_SERVERVERIFY,
get_next_msg=False)
@ATMT.state()
def SSLv2_HANDLED_SERVERVERIFY(self):
pass
def sslv2_should_add_ClientFinished(self):
hs_msg = [type(m) for m in self.cur_session.handshake_messages_parsed]
if SSLv2ClientFinished in hs_msg:
return
self.add_record(is_sslv2=True)
self.add_msg(SSLv2ClientFinished())
raise self.SSLv2_ADDED_CLIENTFINISHED()
@ATMT.condition(SSLv2_HANDLED_SERVERVERIFY, prio=1)
def sslv2_should_add_ClientFinished_from_ServerVerify(self):
return self.sslv2_should_add_ClientFinished()
@ATMT.condition(SSLv2_HANDLED_SERVERVERIFY, prio=2)
def sslv2_should_wait_ServerFinished_from_ServerVerify(self):
raise self.SSLv2_WAITING_SERVERFINISHED()
@ATMT.condition(SSLv2_RECEIVED_SERVERVERIFY, prio=2)
def sslv2_should_add_ClientFinished_from_NoServerVerify(self):
return self.sslv2_should_add_ClientFinished()
@ATMT.condition(SSLv2_RECEIVED_SERVERVERIFY, prio=3)
def sslv2_missing_ServerVerify(self):
raise self.SSLv2_MISSING_SERVERVERIFY()
@ATMT.state(final=True)
def SSLv2_MISSING_SERVERVERIFY(self):
self.vprint("Missing SSLv2 ServerVerify message!")
raise self.SSLv2_CLOSE_NOTIFY()
@ATMT.state()
def SSLv2_ADDED_CLIENTFINISHED(self):
pass
@ATMT.condition(SSLv2_ADDED_CLIENTFINISHED)
def sslv2_should_send_ClientFinished(self):
self.flush_records()
raise self.SSLv2_SENT_CLIENTFINISHED()
@ATMT.state()
def SSLv2_SENT_CLIENTFINISHED(self):
hs_msg = [type(m) for m in self.cur_session.handshake_messages_parsed]
if SSLv2ServerVerify in hs_msg:
raise self.SSLv2_WAITING_SERVERFINISHED()
else:
self.get_next_msg()
raise self.SSLv2_RECEIVED_SERVERVERIFY()
@ATMT.state()
def SSLv2_WAITING_SERVERFINISHED(self):
self.get_next_msg()
raise self.SSLv2_RECEIVED_SERVERFINISHED()
@ATMT.state()
def SSLv2_RECEIVED_SERVERFINISHED(self):
pass
@ATMT.condition(SSLv2_RECEIVED_SERVERFINISHED, prio=1)
def sslv2_should_handle_ServerFinished(self):
self.raise_on_packet(SSLv2ServerFinished,
self.SSLv2_HANDLED_SERVERFINISHED)
####################### SSLv2 client authentication #######################
@ATMT.condition(SSLv2_RECEIVED_SERVERFINISHED, prio=2)
def sslv2_should_handle_RequestCertificate(self):
self.raise_on_packet(SSLv2RequestCertificate,
self.SSLv2_HANDLED_REQUESTCERTIFICATE)
@ATMT.state()
def SSLv2_HANDLED_REQUESTCERTIFICATE(self):
self.vprint("Server asked for a certificate...")
if not self.mykey or not self.mycert:
self.vprint("No client certificate to send!")
raise self.SSLv2_CLOSE_NOTIFY()
@ATMT.condition(SSLv2_HANDLED_REQUESTCERTIFICATE)
def sslv2_should_add_ClientCertificate(self):
self.add_record(is_sslv2=True)
self.add_msg(SSLv2ClientCertificate(certdata=self.mycert))
raise self.SSLv2_ADDED_CLIENTCERTIFICATE()
@ATMT.state()
def SSLv2_ADDED_CLIENTCERTIFICATE(self):
pass
@ATMT.condition(SSLv2_ADDED_CLIENTCERTIFICATE)
def sslv2_should_send_ClientCertificate(self):
self.flush_records()
raise self.SSLv2_SENT_CLIENTCERTIFICATE()
@ATMT.state()
def SSLv2_SENT_CLIENTCERTIFICATE(self):
raise self.SSLv2_WAITING_SERVERFINISHED()
################### end of SSLv2 client authentication ####################
@ATMT.state()
def SSLv2_HANDLED_SERVERFINISHED(self):
self.vprint("SSLv2 handshake completed!")
self.vprint_sessioninfo()
self.vprint("You may send data or use 'quit'.")
@ATMT.condition(SSLv2_RECEIVED_SERVERFINISHED, prio=3)
def sslv2_missing_ServerFinished(self):
raise self.SSLv2_MISSING_SERVERFINISHED()
@ATMT.state()
def SSLv2_MISSING_SERVERFINISHED(self):
self.vprint("Missing SSLv2 ServerFinished message!")
raise self.SSLv2_CLOSE_NOTIFY()
######################## end of SSLv2 handshake ###########################
@ATMT.condition(SSLv2_HANDLED_SERVERFINISHED)
def sslv2_should_wait_ClientData(self):
raise self.SSLv2_WAITING_CLIENTDATA()
@ATMT.state()
def SSLv2_WAITING_CLIENTDATA(self):
pass
@ATMT.condition(SSLv2_WAITING_CLIENTDATA, prio=1)
def sslv2_add_ClientData(self):
if not self.data_to_send:
data = raw_input()
data = data.replace("\\r", "\r")
data = data.replace("\\n", "\n")
else:
data = self.data_to_send.pop()
self.vprint("> Read from list: %s" % data)
if data == "quit":
return
if self.linebreak:
data += "\n"
self.add_record(is_sslv2=True)
self.add_msg(Raw(data))
raise self.SSLv2_ADDED_CLIENTDATA()
@ATMT.condition(SSLv2_WAITING_CLIENTDATA, prio=2)
def sslv2_no_more_ClientData(self):
raise self.SSLv2_CLOSE_NOTIFY()
@ATMT.state()
def SSLv2_ADDED_CLIENTDATA(self):
pass
@ATMT.condition(SSLv2_ADDED_CLIENTDATA)
def sslv2_should_send_ClientData(self):
self.flush_records()
raise self.SSLv2_SENT_CLIENTDATA()
@ATMT.state()
def SSLv2_SENT_CLIENTDATA(self):
raise self.SSLv2_WAITING_SERVERDATA()
@ATMT.state()
def SSLv2_WAITING_SERVERDATA(self):
self.get_next_msg(0.3, 1)
raise self.SSLv2_RECEIVED_SERVERDATA()
@ATMT.state()
def SSLv2_RECEIVED_SERVERDATA(self):
pass
@ATMT.condition(SSLv2_RECEIVED_SERVERDATA)
def sslv2_should_handle_ServerData(self):
if not self.buffer_in:
raise self.SSLv2_WAITING_CLIENTDATA()
p = self.buffer_in[0]
print("> Received: %s" % p.load)
if p.load.startswith("goodbye"):
raise self.SSLv2_CLOSE_NOTIFY()
self.buffer_in = self.buffer_in[1:]
raise self.SSLv2_HANDLED_SERVERDATA()
@ATMT.state()
def SSLv2_HANDLED_SERVERDATA(self):
raise self.SSLv2_WAITING_CLIENTDATA()
@ATMT.state()
def SSLv2_CLOSE_NOTIFY(self):
"""
There is no proper way to end an SSLv2 session.
We try and send a 'goodbye' message as a substitute.
"""
self.vprint()
self.vprint("Trying to send a 'goodbye' to the server...")
@ATMT.condition(SSLv2_CLOSE_NOTIFY)
def sslv2_close_session(self):
self.add_record()
self.add_msg(Raw('goodbye'))
try:
self.flush_records()
except:
self.vprint("Could not send our goodbye. The server probably stopped.")
self.socket.close()
raise self.FINAL()
######################### TLS 1.3 handshake ###############################
@ATMT.state()
def TLS13_START(self):
pass
@ATMT.condition(TLS13_START)
def tls13_should_add_ClientHello(self):
# we have to use the legacy, plaintext TLS record here
self.add_record(is_tls13=False)
if self.client_hello:
p = self.client_hello
else:
# When trying to connect to a public TLS 1.3 server,
# you will most likely need to provide an SNI extension.
#sn = ServerName(servername="<put server name here>")
ext = [TLS_Ext_SupportedGroups(groups=["secp256r1"]),
#TLS_Ext_ServerName(servernames=[sn]),
TLS_Ext_KeyShare_CH(client_shares=[KeyShareEntry(group=23)]),
TLS_Ext_SupportedVersions(versions=["TLS 1.3-d18"]),
TLS_Ext_SignatureAlgorithms(sig_algs=["sha256+rsapss",
"sha256+rsa"]) ]
p = TLSClientHello(ciphers=0x1301, ext=ext)
self.add_msg(p)
raise self.TLS13_ADDED_CLIENTHELLO()
@ATMT.state()
def TLS13_ADDED_CLIENTHELLO(self):
pass
@ATMT.condition(TLS13_ADDED_CLIENTHELLO)
def tls13_should_send_ClientHello(self):
self.flush_records()
raise self.TLS13_SENT_CLIENTHELLO()
@ATMT.state()
def TLS13_SENT_CLIENTHELLO(self):
raise self.TLS13_WAITING_SERVERHELLO()
@ATMT.state()
def TLS13_WAITING_SERVERHELLO(self):
self.get_next_msg()
@ATMT.condition(TLS13_WAITING_SERVERHELLO)
def tls13_should_handle_ServerHello(self):
self.raise_on_packet(TLS13ServerHello,
self.TLS13_WAITING_ENCRYPTEDEXTENSIONS)
@ATMT.state()
def TLS13_WAITING_ENCRYPTEDEXTENSIONS(self):
self.get_next_msg()
@ATMT.condition(TLS13_WAITING_ENCRYPTEDEXTENSIONS)
def tls13_should_handle_EncryptedExtensions(self):
self.raise_on_packet(TLSEncryptedExtensions,
self.TLS13_WAITING_CERTIFICATE)
@ATMT.state()
def TLS13_WAITING_CERTIFICATE(self):
self.get_next_msg()
@ATMT.condition(TLS13_WAITING_CERTIFICATE, prio=1)
def tls13_should_handle_Certificate(self):
self.raise_on_packet(TLS13Certificate,
self.TLS13_WAITING_CERTIFICATEVERIFY)
@ATMT.condition(TLS13_WAITING_CERTIFICATE, prio=2)
def tls13_should_handle_CertificateRequest(self):
hs_msg = [type(m) for m in self.cur_session.handshake_messages_parsed]
if TLSCertificateRequest in hs_msg:
self.vprint("TLSCertificateRequest already received!")
self.raise_on_packet(TLSCertificateRequest,
self.TLS13_WAITING_CERTIFICATE)
@ATMT.condition(TLS13_WAITING_CERTIFICATE, prio=3)
def tls13_should_handle_ServerFinished_from_EncryptedExtensions(self):
self.raise_on_packet(TLSFinished,
self.TLS13_CONNECTED)
@ATMT.condition(TLS13_WAITING_CERTIFICATE, prio=4)
def tls13_missing_Certificate(self):
self.vprint("Missing TLS 1.3 message after EncryptedExtensions!")
raise self.FINAL()
@ATMT.state()
def TLS13_WAITING_CERTIFICATEVERIFY(self):
self.get_next_msg()
@ATMT.condition(TLS13_WAITING_CERTIFICATEVERIFY)
def tls13_should_handle_CertificateVerify(self):
self.raise_on_packet(TLSCertificateVerify,
self.TLS13_WAITING_SERVERFINISHED)
@ATMT.state()
def TLS13_WAITING_SERVERFINISHED(self):
self.get_next_msg()
@ATMT.condition(TLS13_WAITING_SERVERFINISHED)
def tls13_should_handle_ServerFinished_from_CertificateVerify(self):
self.raise_on_packet(TLSFinished,
self.TLS13_PREPARE_CLIENTFLIGHT2)
@ATMT.state()
def TLS13_PREPARE_CLIENTFLIGHT2(self):
self.add_record(is_tls13=True)
#raise self.FINAL()
@ATMT.condition(TLS13_PREPARE_CLIENTFLIGHT2)
def tls13_should_add_ClientFinished(self):
self.add_msg(TLSFinished())
raise self.TLS13_ADDED_CLIENTFINISHED()
@ATMT.state()
def TLS13_ADDED_CLIENTFINISHED(self):
pass
@ATMT.condition(TLS13_ADDED_CLIENTFINISHED)
def tls13_should_send_ClientFlight2(self):
self.flush_records()
raise self.TLS13_SENT_CLIENTFLIGHT2()
@ATMT.state()
def TLS13_SENT_CLIENTFLIGHT2(self):
raise self.HANDLED_SERVERFINISHED()
@ATMT.state(final=True)
def FINAL(self):
# We might call shutdown, but it may happen that the server
# did not wait for us to shutdown after answering our data query.
#self.socket.shutdown(1)
self.vprint("Closing client socket...")
self.socket.close()
self.vprint("Ending TLS client automaton.")
| mit |
kioma/densenet | models/densenet121.py | 2 | 6939 | from keras.models import Model
from keras.layers import Input, merge, ZeroPadding2D
from keras.layers.core import Dense, Dropout, Activation
from keras.layers.convolutional import Convolution2D
from keras.layers.pooling import AveragePooling2D, GlobalAveragePooling2D, MaxPooling2D
from keras.layers.normalization import BatchNormalization
import keras.backend as K
from custom_layers import Scale
def DenseNet(nb_dense_block=4, growth_rate=32, nb_filter=64, reduction=0.0, dropout_rate=0.0, weight_decay=1e-4, classes=1000, weights_path=None):
'''Instantiate the DenseNet 121 architecture,
# Arguments
nb_dense_block: number of dense blocks to add to end
growth_rate: number of filters to add per dense block
nb_filter: initial number of filters
reduction: reduction factor of transition blocks.
dropout_rate: dropout rate
weight_decay: weight decay factor
classes: optional number of classes to classify images
weights_path: path to pre-trained weights
# Returns
A Keras model instance.
'''
eps = 1.1e-5
# compute compression factor
compression = 1.0 - reduction
# Handle Dimension Ordering for different backends
global concat_axis
if K.image_dim_ordering() == 'tf':
concat_axis = 3
img_input = Input(shape=(224, 224, 3), name='data')
else:
concat_axis = 1
img_input = Input(shape=(3, 224, 224), name='data')
# From architecture for ImageNet (Table 1 in the paper)
nb_filter = 64
nb_layers = [6,12,24,16] # For DenseNet-121
# Initial convolution
x = ZeroPadding2D((3, 3), name='conv1_zeropadding')(img_input)
x = Convolution2D(nb_filter, 7, 7, subsample=(2, 2), name='conv1', bias=False)(x)
x = BatchNormalization(epsilon=eps, axis=concat_axis, name='conv1_bn')(x)
x = Scale(axis=concat_axis, name='conv1_scale')(x)
x = Activation('relu', name='relu1')(x)
x = ZeroPadding2D((1, 1), name='pool1_zeropadding')(x)
x = MaxPooling2D((3, 3), strides=(2, 2), name='pool1')(x)
# Add dense blocks
for block_idx in range(nb_dense_block - 1):
stage = block_idx+2
x, nb_filter = dense_block(x, stage, nb_layers[block_idx], nb_filter, growth_rate, dropout_rate=dropout_rate, weight_decay=weight_decay)
# Add transition_block
x = transition_block(x, stage, nb_filter, compression=compression, dropout_rate=dropout_rate, weight_decay=weight_decay)
nb_filter = int(nb_filter * compression)
final_stage = stage + 1
x, nb_filter = dense_block(x, final_stage, nb_layers[-1], nb_filter, growth_rate, dropout_rate=dropout_rate, weight_decay=weight_decay)
x = BatchNormalization(epsilon=eps, axis=concat_axis, name='conv'+str(final_stage)+'_blk_bn')(x)
x = Scale(axis=concat_axis, name='conv'+str(final_stage)+'_blk_scale')(x)
x = Activation('relu', name='relu'+str(final_stage)+'_blk')(x)
x = GlobalAveragePooling2D(name='pool'+str(final_stage))(x)
x = Dense(classes, name='fc6')(x)
x = Activation('softmax', name='prob')(x)
model = Model(img_input, x, name='densenet')
if weights_path is not None:
model.load_weights(weights_path)
return model
def conv_block(x, stage, branch, nb_filter, dropout_rate=None, weight_decay=1e-4):
'''Apply BatchNorm, Relu, bottleneck 1x1 Conv2D, 3x3 Conv2D, and option dropout
# Arguments
x: input tensor
stage: index for dense block
branch: layer index within each dense block
nb_filter: number of filters
dropout_rate: dropout rate
weight_decay: weight decay factor
'''
eps = 1.1e-5
conv_name_base = 'conv' + str(stage) + '_' + str(branch)
relu_name_base = 'relu' + str(stage) + '_' + str(branch)
# 1x1 Convolution (Bottleneck layer)
inter_channel = nb_filter * 4
x = BatchNormalization(epsilon=eps, axis=concat_axis, name=conv_name_base+'_x1_bn')(x)
x = Scale(axis=concat_axis, name=conv_name_base+'_x1_scale')(x)
x = Activation('relu', name=relu_name_base+'_x1')(x)
x = Convolution2D(inter_channel, 1, 1, name=conv_name_base+'_x1', bias=False)(x)
if dropout_rate:
x = Dropout(dropout_rate)(x)
# 3x3 Convolution
x = BatchNormalization(epsilon=eps, axis=concat_axis, name=conv_name_base+'_x2_bn')(x)
x = Scale(axis=concat_axis, name=conv_name_base+'_x2_scale')(x)
x = Activation('relu', name=relu_name_base+'_x2')(x)
x = ZeroPadding2D((1, 1), name=conv_name_base+'_x2_zeropadding')(x)
x = Convolution2D(nb_filter, 3, 3, name=conv_name_base+'_x2', bias=False)(x)
if dropout_rate:
x = Dropout(dropout_rate)(x)
return x
def transition_block(x, stage, nb_filter, compression=1.0, dropout_rate=None, weight_decay=1E-4):
''' Apply BatchNorm, 1x1 Convolution, averagePooling, optional compression, dropout
# Arguments
x: input tensor
stage: index for dense block
nb_filter: number of filters
compression: calculated as 1 - reduction. Reduces the number of feature maps in the transition block.
dropout_rate: dropout rate
weight_decay: weight decay factor
'''
eps = 1.1e-5
conv_name_base = 'conv' + str(stage) + '_blk'
relu_name_base = 'relu' + str(stage) + '_blk'
pool_name_base = 'pool' + str(stage)
x = BatchNormalization(epsilon=eps, axis=concat_axis, name=conv_name_base+'_bn')(x)
x = Scale(axis=concat_axis, name=conv_name_base+'_scale')(x)
x = Activation('relu', name=relu_name_base)(x)
x = Convolution2D(int(nb_filter * compression), 1, 1, name=conv_name_base, bias=False)(x)
if dropout_rate:
x = Dropout(dropout_rate)(x)
x = AveragePooling2D((2, 2), strides=(2, 2), name=pool_name_base)(x)
return x
def dense_block(x, stage, nb_layers, nb_filter, growth_rate, dropout_rate=None, weight_decay=1e-4, grow_nb_filters=True):
''' Build a dense_block where the output of each conv_block is fed to subsequent ones
# Arguments
x: input tensor
stage: index for dense block
nb_layers: the number of layers of conv_block to append to the model.
nb_filter: number of filters
growth_rate: growth rate
dropout_rate: dropout rate
weight_decay: weight decay factor
grow_nb_filters: flag to decide to allow number of filters to grow
'''
eps = 1.1e-5
concat_feat = x
for i in range(nb_layers):
branch = i+1
x = conv_block(concat_feat, stage, branch, growth_rate, dropout_rate, weight_decay)
concat_feat = merge([concat_feat, x], mode='concat', concat_axis=concat_axis, name='concat_'+str(stage)+'_'+str(branch))
if grow_nb_filters:
nb_filter += growth_rate
return concat_feat, nb_filter
| mit |
XiaosongWei/crosswalk-test-suite | webapi/webapi-vehicleinfo-ivi-tests/inst.xpk.py | 456 | 6809 | #!/usr/bin/env python
import os
import shutil
import glob
import time
import sys
import subprocess
import string
from optparse import OptionParser, make_option
SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__))
PKG_NAME = os.path.basename(SCRIPT_DIR)
PARAMETERS = None
#XW_ENV = "export DBUS_SESSION_BUS_ADDRESS=unix:path=/run/user/5000/dbus/user_bus_socket"
SRC_DIR = ""
PKG_SRC_DIR = ""
def doCMD(cmd):
# Do not need handle timeout in this short script, let tool do it
print "-->> \"%s\"" % cmd
output = []
cmd_return_code = 1
cmd_proc = subprocess.Popen(
cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=True)
while True:
output_line = cmd_proc.stdout.readline().strip("\r\n")
cmd_return_code = cmd_proc.poll()
if output_line == '' and cmd_return_code is not None:
break
sys.stdout.write("%s\n" % output_line)
sys.stdout.flush()
output.append(output_line)
return (cmd_return_code, output)
def updateCMD(cmd=None):
if "pkgcmd" in cmd:
cmd = "su - %s -c '%s;%s'" % (PARAMETERS.user, XW_ENV, cmd)
return cmd
def getUSERID():
if PARAMETERS.mode == "SDB":
cmd = "sdb -s %s shell id -u %s" % (
PARAMETERS.device, PARAMETERS.user)
else:
cmd = "ssh %s \"id -u %s\"" % (
PARAMETERS.device, PARAMETERS.user)
return doCMD(cmd)
def getPKGID(pkg_name=None):
if PARAMETERS.mode == "SDB":
cmd = "sdb -s %s shell %s" % (
PARAMETERS.device, updateCMD('pkgcmd -l'))
else:
cmd = "ssh %s \"%s\"" % (
PARAMETERS.device, updateCMD('pkgcmd -l'))
(return_code, output) = doCMD(cmd)
if return_code != 0:
return None
test_pkg_id = None
for line in output:
if line.find("[" + pkg_name + "]") != -1:
pkgidIndex = line.split().index("pkgid")
test_pkg_id = line.split()[pkgidIndex + 1].strip("[]")
break
return test_pkg_id
def doRemoteCMD(cmd=None):
if PARAMETERS.mode == "SDB":
cmd = "sdb -s %s shell %s" % (PARAMETERS.device, updateCMD(cmd))
else:
cmd = "ssh %s \"%s\"" % (PARAMETERS.device, updateCMD(cmd))
return doCMD(cmd)
def doRemoteCopy(src=None, dest=None):
if PARAMETERS.mode == "SDB":
cmd_prefix = "sdb -s %s push" % PARAMETERS.device
cmd = "%s %s %s" % (cmd_prefix, src, dest)
else:
cmd = "scp -r %s %s:/%s" % (src, PARAMETERS.device, dest)
(return_code, output) = doCMD(cmd)
doRemoteCMD("sync")
if return_code != 0:
return True
else:
return False
def uninstPKGs():
action_status = True
for root, dirs, files in os.walk(SCRIPT_DIR):
if root.endswith("mediasrc"):
continue
for file in files:
if file.endswith(".xpk"):
pkg_id = getPKGID(os.path.basename(os.path.splitext(file)[0]))
if not pkg_id:
action_status = False
continue
(return_code, output) = doRemoteCMD(
"pkgcmd -u -t xpk -q -n %s" % pkg_id)
for line in output:
if "Failure" in line:
action_status = False
break
(return_code, output) = doRemoteCMD(
"rm -rf %s" % PKG_SRC_DIR)
if return_code != 0:
action_status = False
return action_status
def instPKGs():
action_status = True
(return_code, output) = doRemoteCMD(
"mkdir -p %s" % PKG_SRC_DIR)
if return_code != 0:
action_status = False
for root, dirs, files in os.walk(SCRIPT_DIR):
if root.endswith("mediasrc"):
continue
for file in files:
if file.endswith(".xpk"):
if not doRemoteCopy(
os.path.join(root, file), "%s/%s" % (SRC_DIR, file)):
action_status = False
(return_code, output) = doRemoteCMD(
"pkgcmd -i -t xpk -q -p %s/%s" % (SRC_DIR, file))
doRemoteCMD("rm -rf %s/%s" % (SRC_DIR, file))
for line in output:
if "Failure" in line:
action_status = False
break
# Do some special copy/delete... steps
'''
(return_code, output) = doRemoteCMD(
"mkdir -p %s/tests" % PKG_SRC_DIR)
if return_code != 0:
action_status = False
if not doRemoteCopy("specname/tests", "%s/tests" % PKG_SRC_DIR):
action_status = False
'''
return action_status
def main():
try:
usage = "usage: inst.py -i"
opts_parser = OptionParser(usage=usage)
opts_parser.add_option(
"-m", dest="mode", action="store", help="Specify mode")
opts_parser.add_option(
"-s", dest="device", action="store", help="Specify device")
opts_parser.add_option(
"-i", dest="binstpkg", action="store_true", help="Install package")
opts_parser.add_option(
"-u", dest="buninstpkg", action="store_true", help="Uninstall package")
opts_parser.add_option(
"-a", dest="user", action="store", help="User name")
global PARAMETERS
(PARAMETERS, args) = opts_parser.parse_args()
except Exception as e:
print "Got wrong option: %s, exit ..." % e
sys.exit(1)
if not PARAMETERS.user:
PARAMETERS.user = "app"
global SRC_DIR, PKG_SRC_DIR
SRC_DIR = "/home/%s/content" % PARAMETERS.user
PKG_SRC_DIR = "%s/tct/opt/%s" % (SRC_DIR, PKG_NAME)
if not PARAMETERS.mode:
PARAMETERS.mode = "SDB"
if PARAMETERS.mode == "SDB":
if not PARAMETERS.device:
(return_code, output) = doCMD("sdb devices")
for line in output:
if str.find(line, "\tdevice") != -1:
PARAMETERS.device = line.split("\t")[0]
break
else:
PARAMETERS.mode = "SSH"
if not PARAMETERS.device:
print "No device provided"
sys.exit(1)
user_info = getUSERID()
re_code = user_info[0]
if re_code == 0:
global XW_ENV
userid = user_info[1][0]
XW_ENV = "export DBUS_SESSION_BUS_ADDRESS=unix:path=/run/user/%s/dbus/user_bus_socket" % str(
userid)
else:
print "[Error] cmd commands error : %s" % str(user_info[1])
sys.exit(1)
if PARAMETERS.binstpkg and PARAMETERS.buninstpkg:
print "-i and -u are conflict"
sys.exit(1)
if PARAMETERS.buninstpkg:
if not uninstPKGs():
sys.exit(1)
else:
if not instPKGs():
sys.exit(1)
if __name__ == "__main__":
main()
sys.exit(0)
| bsd-3-clause |
Danielweber7624/pybuilder | src/unittest/python/cli_tests.py | 7 | 9951 | # -*- coding: utf-8 -*-
#
# This file is part of PyBuilder
#
# Copyright 2011-2015 PyBuilder Team
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import pybuilder
from pybuilder.cli import (parse_options,
ColoredStdOutLogger,
CommandLineUsageException,
StdOutLogger,
length_of_longest_string,
print_list_of_tasks)
from pybuilder.core import Logger
from fluentmock import when, verify, Mock, ANY_STRING, UnitTests
class TaskListTests(UnitTests):
def set_up(self):
def __eq__(self, other):
return False
def __ne__(self, other):
return not self.__eq__(other)
def __lt__(self, other):
return False
self.mock_reactor = Mock()
self.mock_reactor.project.name = "any-project-name"
self.task_1 = Mock()
self.task_1.__eq__ = __eq__
self.task_1.__ne__ = __ne__
self.task_1.__lt__ = __lt__
self.task_1.name = "task-1"
self.task_1.description = ""
self.task_1.dependencies = []
self.task_2 = Mock()
self.task_2.__eq__ = __eq__
self.task_2.__ne__ = __ne__
self.task_2.__lt__ = __lt__
self.task_2.name = "task-2"
self.task_2.description = ""
self.task_2.dependencies = []
when(self.mock_reactor).get_tasks().then_return([self.task_1,
self.task_2])
when(pybuilder.cli).print_text_line(ANY_STRING).then_return(None)
def test_should_render_minimal_task_list_when_in_quiet_mode(self):
print_list_of_tasks(self.mock_reactor, quiet=True)
verify(pybuilder.cli).print_text_line('task-1:<no description available>\ntask-2:<no description available>')
def test_should_render_verbose_task_list_without_descriptions_and_dependencies(self):
print_list_of_tasks(self.mock_reactor, quiet=False)
verify(pybuilder.cli).print_text_line('Tasks found for project "any-project-name":')
verify(pybuilder.cli).print_text_line(' task-1 - <no description available>')
verify(pybuilder.cli).print_text_line(' task-2 - <no description available>')
def test_should_render_verbose_task_list_with_dependencies(self):
self.task_1.dependencies = ["any-dependency", "any-other-dependency"]
print_list_of_tasks(self.mock_reactor, quiet=False)
verify(pybuilder.cli).print_text_line('Tasks found for project "any-project-name":')
verify(pybuilder.cli).print_text_line(' task-1 - <no description available>')
verify(pybuilder.cli).print_text_line(' depends on tasks: any-dependency any-other-dependency')
verify(pybuilder.cli).print_text_line(' task-2 - <no description available>')
def test_should_render_verbose_task_list_with_descriptions(self):
self.task_1.description = ["any", "description", "for", "task", "1"]
self.task_2.description = ["any", "description", "for", "task", "2"]
print_list_of_tasks(self.mock_reactor, quiet=False)
verify(pybuilder.cli).print_text_line('Tasks found for project "any-project-name":')
verify(pybuilder.cli).print_text_line(' task-1 - any description for task 1')
verify(pybuilder.cli).print_text_line(' task-2 - any description for task 2')
class StdOutLoggerTest(unittest.TestCase):
def setUp(self):
self.stdout_logger = StdOutLogger(Logger)
def test_should_return_debug_message_when_debug_level_given(self):
actual_message = self.stdout_logger._level_to_string(Logger.DEBUG)
self.assertEqual(actual_message, "[DEBUG]")
def test_should_return_info_message_when_info_level_given(self):
actual_message = self.stdout_logger._level_to_string(Logger.INFO)
self.assertEqual(actual_message, "[INFO] ")
def test_should_return_warning_message_when_warning_level_given(self):
actual_message = self.stdout_logger._level_to_string(Logger.WARN)
self.assertEqual(actual_message, "[WARN] ")
def test_should_return_error_message_when_any_not_defined_level_given(self):
actual_message = self.stdout_logger._level_to_string(-1)
self.assertEqual(actual_message, "[ERROR]")
class ColoredStdOutLoggerTest(unittest.TestCase):
def setUp(self):
self.colored_stdout_logger = ColoredStdOutLogger(Logger)
def test_should_return_italic_debug_message_when_debug_level_given(self):
actual_message = self.colored_stdout_logger._level_to_string(Logger.DEBUG)
self.assertEqual(actual_message, "\x1b[2m[DEBUG]\x1b[0;0m")
def test_should_return_bold_info_message_when_info_level_given(self):
actual_message = self.colored_stdout_logger._level_to_string(Logger.INFO)
self.assertEqual(actual_message, "\x1b[1m[INFO] \x1b[0;0m")
def test_should_return_brown_and_bold_warning_message_when_warning_level_given(self):
actual_message = self.colored_stdout_logger._level_to_string(Logger.WARN)
self.assertEqual(actual_message, "\x1b[1;33m[WARN] \x1b[0;0m")
def test_should_return_bold_and_red_error_message_when_any_not_defined_level_given(self):
actual_message = self.colored_stdout_logger._level_to_string(-1)
self.assertEqual(actual_message, "\x1b[1;31m[ERROR]\x1b[0;0m")
class ParseOptionsTest(unittest.TestCase):
def assert_options(self, options, **overrides):
self.assertEquals(options.project_directory,
overrides.get("project_directory", "."))
self.assertEquals(options.debug,
overrides.get("debug", False))
self.assertEquals(options.quiet,
overrides.get("quiet", False))
self.assertEquals(options.list_tasks,
overrides.get("list_tasks", False))
self.assertEquals(options.no_color,
overrides.get("no_color", False))
self.assertEquals(options.property_overrides,
overrides.get("property_overrides", {}))
self.assertEquals(options.start_project,
overrides.get("start_project", False))
def test_should_parse_empty_arguments(self):
options, arguments = parse_options([])
self.assert_options(options)
self.assertEquals([], arguments)
def test_should_parse_task_list_without_options(self):
options, arguments = parse_options(["clean", "spam"])
self.assert_options(options)
self.assertEquals(["clean", "spam"], arguments)
def test_should_parse_start_project_without_options(self):
options, arguments = parse_options(["clean", "spam"])
self.assert_options(options)
self.assertEquals(["clean", "spam"], arguments)
def test_should_parse_empty_arguments_with_option(self):
options, arguments = parse_options(["-X"])
self.assert_options(options, debug=True)
self.assertEquals([], arguments)
def test_should_parse_arguments_and_option(self):
options, arguments = parse_options(["-X", "-D", "spam", "eggs"])
self.assert_options(options, debug=True, project_directory="spam")
self.assertEquals(["eggs"], arguments)
def test_should_set_property(self):
options, arguments = parse_options(["-P", "spam=eggs"])
self.assert_options(options, property_overrides={"spam": "eggs"})
self.assertEquals([], arguments)
def test_should_set_multiple_properties(self):
options, arguments = parse_options(["-P", "spam=eggs",
"-P", "foo=bar"])
self.assert_options(options, property_overrides={"spam": "eggs",
"foo": "bar"})
self.assertEquals([], arguments)
def test_should_abort_execution_when_property_definition_has_syntax_error(self):
self.assertRaises(
CommandLineUsageException, parse_options, ["-P", "spam"])
def test_should_parse_single_environment(self):
options, arguments = parse_options(["-E", "spam"])
self.assert_options(options, environments=["spam"])
self.assertEquals([], arguments)
def test_should_parse_multiple_environments(self):
options, arguments = parse_options(["-E", "spam", "-E", "eggs"])
self.assert_options(options, environments=["spam", "eggs"])
self.assertEquals([], arguments)
def test_should_parse_empty_environments(self):
options, arguments = parse_options([])
self.assert_options(options, environments=[])
self.assertEquals([], arguments)
class LengthOfLongestStringTests(unittest.TestCase):
def test_should_return_zero_when_list_is_empty(self):
self.assertEqual(0, length_of_longest_string([]))
def test_should_return_one_when_list_contains_string_with_single_character(self):
self.assertEqual(1, length_of_longest_string(['a']))
def test_should_return_four_when_list_contains_egg_and_spam(self):
self.assertEqual(4, length_of_longest_string(['egg', 'spam']))
def test_should_return_four_when_list_contains_foo_bar_egg_and_spam(self):
self.assertEqual(
4, length_of_longest_string(['egg', 'spam', 'foo', 'bar']))
| apache-2.0 |
SurfasJones/icecream-info | icecream/lib/python2.7/site-packages/django/contrib/gis/tests/test_spatialrefsys.py | 104 | 4758 | from django.contrib.gis.gdal import HAS_GDAL
from django.contrib.gis.tests.utils import (no_mysql, oracle, postgis,
spatialite, HAS_SPATIALREFSYS, SpatialRefSys)
from django.utils import six
from django.utils import unittest
test_srs = ({'srid' : 4326,
'auth_name' : ('EPSG', True),
'auth_srid' : 4326,
# Only the beginning, because there are differences depending on installed libs
'srtext' : 'GEOGCS["WGS 84",DATUM["WGS_1984",SPHEROID["WGS 84"',
# +ellps=WGS84 has been removed in the 4326 proj string in proj-4.8
'proj4_re' : r'\+proj=longlat (\+ellps=WGS84 )?\+datum=WGS84 \+no_defs ',
'spheroid' : 'WGS 84', 'name' : 'WGS 84',
'geographic' : True, 'projected' : False, 'spatialite' : True,
'ellipsoid' : (6378137.0, 6356752.3, 298.257223563), # From proj's "cs2cs -le" and Wikipedia (semi-minor only)
'eprec' : (1, 1, 9),
},
{'srid' : 32140,
'auth_name' : ('EPSG', False),
'auth_srid' : 32140,
'srtext' : 'PROJCS["NAD83 / Texas South Central",GEOGCS["NAD83",DATUM["North_American_Datum_1983",SPHEROID["GRS 1980"',
'proj4_re' : r'\+proj=lcc \+lat_1=30.28333333333333 \+lat_2=28.38333333333333 \+lat_0=27.83333333333333 '
r'\+lon_0=-99 \+x_0=600000 \+y_0=4000000 (\+ellps=GRS80 )?'
r'(\+datum=NAD83 |\+towgs84=0,0,0,0,0,0,0 )?\+units=m \+no_defs ',
'spheroid' : 'GRS 1980', 'name' : 'NAD83 / Texas South Central',
'geographic' : False, 'projected' : True, 'spatialite' : False,
'ellipsoid' : (6378137.0, 6356752.31414, 298.257222101), # From proj's "cs2cs -le" and Wikipedia (semi-minor only)
'eprec' : (1, 5, 10),
},
)
@unittest.skipUnless(HAS_GDAL and HAS_SPATIALREFSYS,
"SpatialRefSysTest needs gdal support and a spatial database")
class SpatialRefSysTest(unittest.TestCase):
@no_mysql
def test01_retrieve(self):
"Testing retrieval of SpatialRefSys model objects."
for sd in test_srs:
srs = SpatialRefSys.objects.get(srid=sd['srid'])
self.assertEqual(sd['srid'], srs.srid)
# Some of the authority names are borked on Oracle, e.g., SRID=32140.
# also, Oracle Spatial seems to add extraneous info to fields, hence the
# the testing with the 'startswith' flag.
auth_name, oracle_flag = sd['auth_name']
if postgis or (oracle and oracle_flag):
self.assertEqual(True, srs.auth_name.startswith(auth_name))
self.assertEqual(sd['auth_srid'], srs.auth_srid)
# No proj.4 and different srtext on oracle backends :(
if postgis:
self.assertTrue(srs.wkt.startswith(sd['srtext']))
six.assertRegex(self, srs.proj4text, sd['proj4_re'])
@no_mysql
def test02_osr(self):
"Testing getting OSR objects from SpatialRefSys model objects."
for sd in test_srs:
sr = SpatialRefSys.objects.get(srid=sd['srid'])
self.assertEqual(True, sr.spheroid.startswith(sd['spheroid']))
self.assertEqual(sd['geographic'], sr.geographic)
self.assertEqual(sd['projected'], sr.projected)
if not (spatialite and not sd['spatialite']):
# Can't get 'NAD83 / Texas South Central' from PROJ.4 string
# on SpatiaLite
self.assertEqual(True, sr.name.startswith(sd['name']))
# Testing the SpatialReference object directly.
if postgis or spatialite:
srs = sr.srs
six.assertRegex(self, srs.proj4, sd['proj4_re'])
# No `srtext` field in the `spatial_ref_sys` table in SpatiaLite
if not spatialite:
self.assertTrue(srs.wkt.startswith(sd['srtext']))
@no_mysql
def test03_ellipsoid(self):
"Testing the ellipsoid property."
for sd in test_srs:
# Getting the ellipsoid and precision parameters.
ellps1 = sd['ellipsoid']
prec = sd['eprec']
# Getting our spatial reference and its ellipsoid
srs = SpatialRefSys.objects.get(srid=sd['srid'])
ellps2 = srs.ellipsoid
for i in range(3):
param1 = ellps1[i]
param2 = ellps2[i]
self.assertAlmostEqual(ellps1[i], ellps2[i], prec[i])
def suite():
s = unittest.TestSuite()
s.addTest(unittest.makeSuite(SpatialRefSysTest))
return s
def run(verbosity=2):
unittest.TextTestRunner(verbosity=verbosity).run(suite())
| mit |
google-research/google-research | robust_loss/util.py | 1 | 5609 | # coding=utf-8
# Copyright 2021 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helper functions."""
import numpy as np
import tensorflow.compat.v2 as tf
def log_safe(x):
"""The same as tf.math.log(x), but clamps the input to prevent NaNs."""
return tf.math.log(tf.minimum(x, tf.cast(3e37, x.dtype)))
def log1p_safe(x):
"""The same as tf.math.log1p(x), but clamps the input to prevent NaNs."""
return tf.math.log1p(tf.minimum(x, tf.cast(3e37, x.dtype)))
def exp_safe(x):
"""The same as tf.math.exp(x), but clamps the input to prevent NaNs."""
return tf.math.exp(tf.minimum(x, tf.cast(87.5, x.dtype)))
def expm1_safe(x):
"""The same as tf.math.expm1(x), but clamps the input to prevent NaNs."""
return tf.math.expm1(tf.minimum(x, tf.cast(87.5, x.dtype)))
def inv_softplus(y):
"""The inverse of tf.nn.softplus()."""
return tf.where(y > 87.5, y, tf.math.log(tf.math.expm1(y)))
def logit(y):
"""The inverse of tf.nn.sigmoid()."""
return -tf.math.log(1. / y - 1.)
def affine_sigmoid(real, lo=0, hi=1):
"""Maps reals to (lo, hi), where 0 maps to (lo+hi)/2."""
if not lo < hi:
raise ValueError('`lo` (%g) must be < `hi` (%g)' % (lo, hi))
alpha = tf.sigmoid(real) * (hi - lo) + lo
return alpha
def inv_affine_sigmoid(alpha, lo=0, hi=1):
"""The inverse of affine_sigmoid(., lo, hi)."""
if not lo < hi:
raise ValueError('`lo` (%g) must be < `hi` (%g)' % (lo, hi))
real = logit((alpha - lo) / (hi - lo))
return real
def affine_softplus(real, lo=0, ref=1):
"""Maps real numbers to (lo, infinity), where 0 maps to ref."""
if not lo < ref:
raise ValueError('`lo` (%g) must be < `ref` (%g)' % (lo, ref))
shift = inv_softplus(tf.cast(1., real.dtype))
scale = (ref - lo) * tf.nn.softplus(real + shift) + lo
return scale
def inv_affine_softplus(scale, lo=0, ref=1):
"""The inverse of affine_softplus(., lo, ref)."""
if not lo < ref:
raise ValueError('`lo` (%g) must be < `ref` (%g)' % (lo, ref))
shift = inv_softplus(tf.cast(1., scale.dtype))
real = inv_softplus((scale - lo) / (ref - lo)) - shift
return real
def students_t_nll(x, df, scale):
"""The NLL of a Generalized Student's T distribution (w/o including TFP)."""
return 0.5 * ((df + 1.) * tf.math.log1p(
(x / scale)**2. / df) + tf.math.log(df)) + tf.math.log(
tf.abs(scale)) + tf.math.lgamma(
0.5 * df) - tf.math.lgamma(0.5 * df + 0.5) + 0.5 * np.log(np.pi)
# A constant scale that makes tf.image.rgb_to_yuv() volume preserving.
_VOLUME_PRESERVING_YUV_SCALE = 1.580227820074
def rgb_to_syuv(rgb):
"""A volume preserving version of tf.image.rgb_to_yuv().
By "volume preserving" we mean that rgb_to_syuv() is in the "special linear
group", or equivalently, that the Jacobian determinant of the transformation
is 1.
Args:
rgb: A tensor whose last dimension corresponds to RGB channels and is of
size 3.
Returns:
A scaled YUV version of the input tensor, such that this transformation is
volume-preserving.
"""
return _VOLUME_PRESERVING_YUV_SCALE * tf.image.rgb_to_yuv(rgb)
def syuv_to_rgb(yuv):
"""A volume preserving version of tf.image.yuv_to_rgb().
By "volume preserving" we mean that rgb_to_syuv() is in the "special linear
group", or equivalently, that the Jacobian determinant of the transformation
is 1.
Args:
yuv: A tensor whose last dimension corresponds to scaled YUV channels and is
of size 3 (ie, the output of rgb_to_syuv()).
Returns:
An RGB version of the input tensor, such that this transformation is
volume-preserving.
"""
return tf.image.yuv_to_rgb(yuv / _VOLUME_PRESERVING_YUV_SCALE)
def image_dct(image):
"""Does a type-II DCT (aka "The DCT") on axes 1 and 2 of a rank-3 tensor."""
dct_y = tf.transpose(
a=tf.signal.dct(image, type=2, norm='ortho'), perm=[0, 2, 1])
dct_x = tf.transpose(
a=tf.signal.dct(dct_y, type=2, norm='ortho'), perm=[0, 2, 1])
return dct_x
def image_idct(dct_x):
"""Inverts image_dct(), by performing a type-III DCT."""
dct_y = tf.signal.idct(
tf.transpose(dct_x, perm=[0, 2, 1]), type=2, norm='ortho')
image = tf.signal.idct(
tf.transpose(dct_y, perm=[0, 2, 1]), type=2, norm='ortho')
return image
def compute_jacobian(f, x):
"""Computes the Jacobian of function `f` with respect to input `x`."""
x = tf.convert_to_tensor(x)
with tf.GradientTape(persistent=True) as tape:
tape.watch(x)
vec = lambda x: tf.reshape(x, [-1])
jacobian = tf.stack(
[vec(tape.gradient(vec(f(x))[d], x)) for d in range(tf.size(x))])
return jacobian
def get_resource_as_file(path):
"""A uniform interface for internal/open-source files."""
class NullContextManager(object):
def __init__(self, dummy_resource=None):
self.dummy_resource = dummy_resource
def __enter__(self):
return self.dummy_resource
def __exit__(self, *args):
pass
return NullContextManager('./' + path)
def get_resource_filename(path):
"""A uniform interface for internal/open-source filenames."""
return './' + path
| apache-2.0 |
alexandretrentesaux/aws_automation | otec_aws.py | 1 | 12192 | import boto3
import collections
class BranchOffice:
def __init__(self, **kwargs):
self.pubIp = kwargs.get('public_ip', '89.225.243.226')
self.label = kwargs.get('label', 'MNGT SITE 1')
self.bgpAsn = kwargs.get('asn', 65000)
self.cidr = kwargs.get('cidr', '192.168.1.0/26')
self.wanIp = kwargs.get('wan_ip', '10.0.1.101')
self.wanMask = kwargs.get('wan_mask', '255.255.255.0')
self.wanGw = kwargs.get('wan_gw', '10.0.1.1')
class OtecAws:
def __init__(self, **kwargs):
self.client = boto3.client('ec2')
self.debug = True
self.project = kwargs.get('project', 'OTEC')
self.bpId = kwargs.get('bpid', '1002')
self.cidr = kwargs.get('cidr', '10.102.0.0/16')
self.vpcId = None
self.defRtTblId = None
self.defAclId = None
self.defSgId = None
self.igwId = None
self.cgwId = None
self.vpnGwId = None
self.vpnConnId = None
def otec_aws_tags(self, my_obj_id, my_tags):
try:
tag = self.client.create_tags(
Resources=[my_obj_id],
Tags=my_tags
)
except:
if self.debug:
print('Object tagging failed: {}'.format(my_obj_id))
return 0
return 1
def otec_aws_find_vpc(self):
my_filter = [{'Name': 'tag-key', 'Values': ['Project']}, {'Name': 'tag-value', 'Values': [self.project]},
{'Name': 'tag-key', 'Values': ['BPID']}, {'Name': 'tag-value', 'Values': [self.bpId]}]
try:
my_vpc = self.client.describe_vpcs(Filters=my_filter)
except:
if self.debug:
print('Cannot find any VPC matching filters')
return
self.vpcId = my_vpc['Vpcs'][0]['VpcId']
def otec_aws_create_vpc(self):
try:
if self.debug:
print('Creating VPC with following CIDR: {}'.format(self.cidr))
my_vpc = self.client.create_vpc(CidrBlock=self.cidr)
except:
if self.debug:
print('VPC creation failed')
return
if self.debug:
print('VPC successfully created: {}'.format(my_vpc['Vpc']['VpcId']))
# TAG VPC
self.otec_aws_tags(my_vpc['Vpc']['VpcId'],
[{'Key': 'Project', 'Value': self.project}, {'Key': 'BPID', 'Value': self.bpId},
{'Key': 'Type', 'Value': 'B'}, {'Key': 'Name', 'Value': self.project + ' B' + self.bpId}])
self.vpcId = my_vpc['Vpc']['VpcId']
# TAG OBJECTS AUTOMATICALLY CREATED WITH VPC
self.otec_aws_tag_def_rt()
self.otec_aws_tag_def_acl()
self.otec_aws_tag_def_sg()
def otec_aws_tag_def_rt(self):
try:
my_rt_tbl = self.client.describe_route_tables(Filters=[{'Name': 'vpc-id', 'Values': [self.vpcId]}])
except:
if self.debug:
print('Routing Table not found for VPC: {}'.format(self.vpcId))
return None
self.otec_aws_tags(my_rt_tbl['RouteTables'][0]['RouteTableId'],
[{'Key': 'Project', 'Value': self.project}, {'Key': 'BPID', 'Value': self.bpId},
{'Key': 'Type', 'Value': 'B'}, {'Key': 'Name', 'Value': self.project + ' B' + self.bpId}])
self.defRtTblId = my_rt_tbl['RouteTables'][0]['RouteTableId']
def otec_aws_tag_def_acl(self):
try:
my_acl = self.client.describe_network_acls(Filters=[{'Name': 'vpc-id', 'Values': [self.vpcId]}])
except:
if self.debug:
print('Default ACL not found for VPC: {}'.format(self.vpcId))
return None
self.otec_aws_tags(my_acl['NetworkAcls'][0]['NetworkAclId'],
[{'Key': 'Project', 'Value': self.project}, {'Key': 'BPID', 'Value': self.bpId},
{'Key': 'Type', 'Value': 'B'}, {'Key': 'Name', 'Value': self.project + ' B' + self.bpId}])
self.defAclId = my_acl['NetworkAcls'][0]['NetworkAclId']
def otec_aws_tag_def_sg(self):
try:
my_sg = self.client.describe_security_groups(Filters=[{'Name': 'vpc-id', 'Values': [self.vpcId]}])
except:
if self.debug:
print('Default SG not found for VPC: {}'.format(self.vpcId))
return None
self.otec_aws_tags(my_sg['SecurityGroups'][0]['GroupId'],
[{'Key': 'Project', 'Value': self.project}, {'Key': 'BPID', 'Value': self.bpId},
{'Key': 'Type', 'Value': 'B'}, {'Key': 'Name', 'Value': self.project + ' B' + self.bpId}])
self.defSgId = my_sg['SecurityGroups'][0]['GroupId']
def otec_aws_create_igw(self):
try:
my_igw = self.client.create_internet_gateway()
except:
if self.debug:
print('Internet GateWay creation failed')
self.otec_aws_tags(my_igw['InternetGateway']['InternetGatewayId'],
[{'Key': 'Project', 'Value': self.project}, {'Key': 'BPID', 'Value': self.bpId},
{'Key': 'Type', 'Value': 'B'}, {'Key': 'Name', 'Value': self.project + ' B' + self.bpId}])
try:
self.client.attach_internet_gateway(InternetGatewayId=my_igw['InternetGateway']['InternetGatewayId'],
VpcId=self.vpcId)
except:
if self.debug:
print('Failed to attach IGW {} to VPC {}'.format(my_igw['InternetGateway']['InternetGatewayId'], self.vpcId))
return None
self.igwId = my_igw['InternetGateway']['InternetGatewayId']
def otec_aws_create_nets(self, my_subnets): # Modify to generate subnets from cidr block
for subn, label in my_subnets.items():
try:
my_cur_subnet = self.client.create_subnet(CidrBlock=str(subn), VpcId=self.vpcId)
except:
if self.debug:
print('Subnet creation failed: {}'.format(subn))
tmp_zone = 'UNKNOWN'
for key, value in label.items():
if key == 'label':
tmp_zone = value
self.otec_aws_tags(my_cur_subnet['Subnet']['SubnetId'],
[{'Key': 'Project', 'Value': self.project}, {'Key': 'BPID', 'Value': self.bpId},
{'Key': 'Type', 'Value': 'B'}, {'Key': 'Zone', 'Value': tmp_zone},
{'Key': 'Name', 'Value': self.project + ' B' + self.bpId + ' ' + tmp_zone}])
try:
self.client.associate_route_table(SubnetId=my_cur_subnet['Subnet']['SubnetId'],
RouteTableId=self.defRtTblId)
except:
if self.debug:
print('Failed to associate Subnet {} with Route Table {}'.format(my_cur_subnet['Subnet']['SubnetId'], self.defRtTblId))
def otec_aws_create_cgw(self, branch_office):
try:
my_cgw = self.client.create_customer_gateway(PublicIp=branch_office.pubIp, BgpAsn=branch_office.bgpAsn,
Type='ipsec.1')
except:
if self.debug:
print('Fail to Create Customer GW')
return None
self.otec_aws_tags(my_cgw['CustomerGateway']['CustomerGatewayId'],
[{'Key': 'Project', 'Value': self.project}, {'Key': 'BPID', 'Value': self.bpId},
{'Key': 'Type', 'Value': 'B'},
{'Key': 'Name', 'Value': self.project + ' B' + self.bpId + ' ' + branch_office.label}])
self.cgwId = my_cgw['CustomerGateway']['CustomerGatewayId']
def otec_aws_create_vpn_gw(self, branch_office):
try:
my_vpn_gw = self.client.create_vpn_gateway(Type='ipsec.1')
except:
if self.debug:
print('Fail to Create Vpn GW')
return None
self.otec_aws_tags(my_vpn_gw['VpnGateway']['VpnGatewayId'],
[{'Key': 'Project', 'Value': self.project}, {'Key': 'BPID', 'Value': self.bpId},
{'Key': 'Type', 'Value': 'B'},
{'Key': 'Name', 'Value': self.project + ' B' + self.bpId + ' ' + branch_office.label}])
try:
self.client.attach_vpn_gateway(VpnGatewayId=my_vpn_gw['VpnGateway']['VpnGatewayId'], VpcId=self.vpcId)
except:
if self.debug:
print('CGW attachement failed {} to VPC {}'.format(my_vpn_gw['VpnGateway']['VpnGatewayId'], self.vpcId))
self.vpnGwId = my_vpn_gw['VpnGateway']['VpnGatewayId']
def otec_aws_create_vpn_conn(self, branch_office):
try:
my_vpn_conn = self.client.create_vpn_connection(VpnGatewayId=self.vpnGwId, CustomerGatewayId=self.cgwId,
Type='ipsec.1')
except:
if self.debug:
print('Fail to Create Vpn Connection')
return None
self.otec_aws_tags(my_vpn_conn['VpnConnection']['VpnConnectionId'],
[{'Key': 'Project', 'Value': self.project}, {'Key': 'BPID', 'Value': self.bpId},
{'Key': 'Type', 'Value': 'B'},
{'Key': 'Name', 'Value': self.project + ' B' + self.bpId + ' ' + branch_office.label}])
if self.debug:
print(
'Waiting VpnConnection {} : still pending state'.format(my_vpn_conn['VpnConnection']['VpnConnectionId']))
waiter = self.client.get_waiter('vpn_connection_available')
waiter.wait(VpnConnectionIds=[my_vpn_conn['VpnConnection']['VpnConnectionId']])
if self.debug:
print('VpnConnection {} is available'.format(my_vpn_conn['VpnConnection']['VpnConnectionId']))
self.vpnConnId = my_vpn_conn['VpnConnection']['VpnConnectionId']
def otec_aws_get_fg_vpn_config(self):
try:
my_vpn_conn = self.client.describe_vpn_connections(VpnConnectionIds=self.vpnConnId)
except:
if self.debug:
print('VpnConnection not found: {}'.format(self.vpnConnId))
if self.debug:
print('Fortigate Config Description: {}'.format(
my_vpn_conn['VpnConnections'][0]['CustomerGatewayConfiguration']))
return my_vpn_conn['VpnConnections'][0]['CustomerGatewayConfiguration']
def otec_aws_add_branch_rt(self, branch_office):
try:
self.client.create_route(DestinationCidrBlock=branch_office.cidr, GatewayId=self.vpnGwId,
RouteTableId=self.defRtTblId)
except:
if self.debug:
print('Failed adding branch cidr to routing table: {}'.format(self.defRtTblId))
def otec_aws_amis_list(self):
my_filter = [{'Name': 'tag-key', 'Values': ['Project']}, {'Name': 'tag-value', 'Values': [self.project]}]
my_amis = self.client.describe_images(Filters=my_filter)
my_amis_list = {}
for elem in my_amis['Images']:
my_amis_list[elem['Name']] = elem['ImageId']
return collections.OrderedDict(sorted(my_amis_list.items()))
# class OtecBp(OtecAws):
# def __init__(self, **kwargs):
# self.project = kwargs.get('project', 'OTEC')
# self.bpId = kwargs.get('bpid', '1002')
# self.cidr= kwargs.get('cidr', '10.102.0.0/16')
# self.vpcId = None
# self.defRtTblId = None
# self.defAclId = None
# self.defSgId = None
# self.igwId = None
# self.vpnConn = None
#
#
# class OtecCustomer(OtecAws):
# def __init__(self, **kwargs):
# self.project = kwargs.get('project', 'OTEC')
# self.clid = kwargs.get('clid', '1007')
# self.bpId = kwargs.get('bpid', '1002')
# self.vpcId = None
# self.defRtTblId = None
# self.defAclId = None
# self.defSgId = None
# self.igwId = None
# self.cgwId = None
# self.vpnGwId = None
# self.vpnConn = None
| gpl-3.0 |
huwenboshi/hess | misc/local_rhog_manhattan.py | 1 | 7113 | #!/usr/bin/python
# (c) 2016-2021 Huwenbo Shi
import numpy as np
import pandas as pd
import argparse, math, sys
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import matplotlib.ticker as mtick
from matplotlib import gridspec
import matplotlib.ticker as ticker
from matplotlib.ticker import FormatStrFormatter
matplotlib.rcParams['pdf.fonttype'] = 42
matplotlib.rcParams['ps.fonttype'] = 42
matplotlib.rcParams['ytick.labelsize'] = 14
matplotlib.rc('font', family='sans-serif')
alpha = 0.05
def main():
# get command line input
args = get_command_line()
gcov = pd.read_table(args.local_rhog_file)
hsq1 = pd.read_table(args.local_hsqg_file[0])
hsq2 = pd.read_table(args.local_hsqg_file[1])
trait1, trait2 = args.trait_names
trait1 = trait1.upper()
trait2 = trait2.upper()
num_loci = hsq1.shape[0]
thres = alpha / num_loci
# get the correlation estimates
gcor = np.array([0.0]*num_loci)
pos_idx = (hsq1['local_h2g']>0) & (hsq2['local_h2g']>0)
pos_idx = np.where(pos_idx == True)[0]
denom = np.multiply(hsq1['local_h2g'][pos_idx], hsq2['local_h2g'][pos_idx])
denom = np.sqrt(denom)
gcor[pos_idx] = np.divide(gcov['local_rhog'][pos_idx], denom)
# create indices
index = gcov.index.values
even_chr_idx = index[np.where(gcov['chr']%2 == 0)]
odd_chr_idx = index[np.where(gcov['chr']%2 == 1)]
sig_even_chr_idx = index[np.where((gcov['chr']%2 == 0)&(gcov['p']<thres))]
sig_odd_chr_idx = index[np.where((gcov['chr']%2 == 1)&(gcov['p']<thres))]
# set up canvas
fig = plt.figure(figsize=(20, 6))
gs = gridspec.GridSpec(4, 1, height_ratios=[2, 4, 1.0, 1.0])
ax0 = plt.subplot(gs[0])
ax1 = plt.subplot(gs[1])
ax2 = plt.subplot(gs[2])
ax3 = plt.subplot(gs[3])
fig.tight_layout()
# make bar plot for gcov #################################################
ax1.bar(even_chr_idx, gcov['local_rhog'][even_chr_idx], 1,
color='whitesmoke', edgecolor='whitesmoke', linewidth=0.1)
ax1.bar(odd_chr_idx, gcov['local_rhog'][odd_chr_idx], 1,
color='gainsboro', edgecolor='gainsboro', linewidth=0.1)
# plot significant loci
ax1.bar(sig_even_chr_idx, gcov['local_rhog'][sig_even_chr_idx], 1,
color='r', edgecolor='r', linewidth=0.1)
ax1.bar(sig_odd_chr_idx, gcov['local_rhog'][sig_odd_chr_idx], 1,
color='b', edgecolor='b', linewidth=0.1)
# get tick location
tick_index = [0]*22
for i in xrange(22):
tick_index[i] = int(np.mean(index[np.where(gcov['chr']==(i+1))[0]]))
# modify ticks
ax1.set_xticks(tick_index)
ax1.set_xticklabels(range(1,num_loci+1))
ax1.tick_params(axis=u'both', which=u'both',length=0)
# tuning figure
ax1.set_xlim(0, num_loci)
ax1.set_ylabel(trait1+' & '+trait2, fontsize=18)
ax1.yaxis.set_label_coords(-0.04,1.02)
ax1.set_title("local genetic covariance", fontsize=18)
tick_int = (np.max(gcov['local_rhog'])-np.min(gcov['local_rhog']))/5.0
ax1.yaxis.set_major_locator(ticker.MultipleLocator(tick_int))
ax1.yaxis.set_major_formatter(ticker.FormatStrFormatter('%.4f'))
plt.ticklabel_format(style='sci', axis='y', scilimits=(0,0))
# make bar plot for gcov #################################################
# make bar plot for gcor ################################################
ax0.bar(even_chr_idx, gcor[even_chr_idx], 1, color='whitesmoke',
edgecolor='whitesmoke', linewidth=0.1)
ax0.bar(odd_chr_idx, gcor[odd_chr_idx], 1, color='gainsboro',
edgecolor='gainsboro', linewidth=0.1)
# plot significant loci
ax0.bar(sig_even_chr_idx, gcor[sig_even_chr_idx], 1, color='r',
edgecolor='r', linewidth=0.1)
ax0.bar(sig_odd_chr_idx, gcor[sig_odd_chr_idx], 1, color='b',
edgecolor='b', linewidth=0.1)
# modify ticks
ax0.set_xticks(tick_index)
ax0.set_xticklabels(range(1,num_loci+1))
ax0.tick_params(axis=u'both', which=u'both',length=0)
# tuning figure
ax0.set_xlim(0, num_loci)
ax0.set_ylim(-1, 1)
ax0.set_title("local genetic correlation", fontsize=18)
tick_int = 3.0/3.0
ax0.yaxis.set_major_locator(ticker.MultipleLocator(tick_int))
ax0.yaxis.set_major_formatter(ticker.FormatStrFormatter('%.1f'))
plt.ticklabel_format(style='sci', axis='y', scilimits=(0,0))
# make bar plot for gcor #################################################
# make bar plot for h2g ##################################################
ax2.bar(odd_chr_idx, hsq1['local_h2g'][odd_chr_idx], 1, color='b',
edgecolor='b', linewidth=0.1)
ax2.bar(even_chr_idx, hsq1['local_h2g'][even_chr_idx], 1, color='r',
edgecolor='r', linewidth=0.1)
# modify ticks
ax2.set_xticks(tick_index)
ax2.set_xticklabels(range(1,num_loci+1))
ax2.tick_params(axis=u'both', which=u'both',length=0)
# tuning figure
ax2.set_xlim(0, num_loci)
ax2.set_ylabel(trait1, fontsize=18)
ax2.set_ylim(0.0, 0.002)
ax2.yaxis.set_major_locator(ticker.MultipleLocator(0.001))
ax2.set_title("local SNP-heritability", fontsize=18)
plt.ticklabel_format(style='plain', axis='y', scilimits=(0,0))
# make bar plot for h2g ##################################################
# make bar plot for h2g ##################################################
ax3.bar(odd_chr_idx, hsq2['local_h2g'][odd_chr_idx], 1, color='b',
edgecolor='b', linewidth=0.1)
ax3.bar(even_chr_idx, hsq2['local_h2g'][even_chr_idx], 1, color='r',
edgecolor='r', linewidth=0.1)
# modify ticks
ax3.set_xticks(tick_index)
ax3.set_xticklabels(range(1,num_loci+1))
ax3.tick_params(axis=u'both', which=u'both',length=0)
# tuning figure
ax3.set_xlim(0, num_loci)
ax3.set_ylabel(trait2, fontsize=18)
ax3.set_ylim(0.0, 0.002)
ax3.yaxis.set_major_locator(ticker.MultipleLocator(0.001))
plt.ticklabel_format(style='plain', axis='y', scilimits=(0,0))
# make bar plot for h2g ##################################################
# save figure ###########################################################
plt.gca().yaxis.set_major_formatter(FormatStrFormatter('%g'))
plt.savefig(args.out, bbox_inches='tight')
# get command line
def get_command_line():
parser = argparse.ArgumentParser(description='Make manhattan plot '\
'for local genetic covariance estimates')
parser.add_argument('--local-rhog-est', dest='local_rhog_file', type=str,
help='Local genetic covariance estimation results', required=True)
parser.add_argument('--local-hsqg-est', dest='local_hsqg_file', type=str,
help='Local SNP-heritability estimation results',
nargs=2, required=True)
parser.add_argument('--trait-names', dest='trait_names', nargs=2, type=str,
help='Names of traits', required=True)
parser.add_argument('--out', type=str, dest='out', required=True,
help='Output file name')
args = parser.parse_args()
return args
if(__name__ == '__main__'):
main()
| gpl-3.0 |
DPaaS-Raksha/raksha | raksha/virt/libvirt/utils.py | 1 | 1681 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
# Copyright (c) 2010 Citrix Systems, Inc.
# Copyright (c) 2011 Piston Cloud Computing, Inc
# Copyright (c) 2011 OpenStack Foundation
# (c) Copyright 2013 Hewlett-Packard Development Company, L.P.
# Copyright (c) 2013 TrilioData
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from oslo.config import cfg
from raksha import exception
from raksha.openstack.common import log as logging
from raksha import utils
from raksha.virt import images
libvirt_opts = [
]
CONF = cfg.CONF
CONF.register_opts(libvirt_opts)
LOG = logging.getLogger(__name__)
def get_disk_backing_file(path, basename=True):
"""Get the backing file of a disk image
:param path: Path to the disk image
:returns: a path to the image's backing store
"""
backing_file = images.qemu_img_info(path).backing_file
if backing_file and basename:
backing_file = os.path.basename(backing_file)
return backing_file
| apache-2.0 |
mdrumond/tensorflow | tensorflow/python/keras/_impl/keras/datasets/cifar10.py | 15 | 2118 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""CIFAR10 small image classification dataset.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import numpy as np
from tensorflow.python.keras._impl.keras import backend as K
from tensorflow.python.keras._impl.keras.datasets.cifar import load_batch
from tensorflow.python.keras._impl.keras.utils.data_utils import get_file
def load_data():
"""Loads CIFAR10 dataset.
Returns:
Tuple of Numpy arrays: `(x_train, y_train), (x_test, y_test)`.
"""
dirname = 'cifar-10-batches-py'
origin = 'http://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz'
path = get_file(dirname, origin=origin, untar=True)
num_train_samples = 50000
x_train = np.zeros((num_train_samples, 3, 32, 32), dtype='uint8')
y_train = np.zeros((num_train_samples,), dtype='uint8')
for i in range(1, 6):
fpath = os.path.join(path, 'data_batch_' + str(i))
data, labels = load_batch(fpath)
x_train[(i - 1) * 10000:i * 10000, :, :, :] = data
y_train[(i - 1) * 10000:i * 10000] = labels
fpath = os.path.join(path, 'test_batch')
x_test, y_test = load_batch(fpath)
y_train = np.reshape(y_train, (len(y_train), 1))
y_test = np.reshape(y_test, (len(y_test), 1))
if K.image_data_format() == 'channels_last':
x_train = x_train.transpose(0, 2, 3, 1)
x_test = x_test.transpose(0, 2, 3, 1)
return (x_train, y_train), (x_test, y_test)
| apache-2.0 |
nextgenusfs/funannotate | funannotate/aux_scripts/phobius-multiproc.py | 1 | 3842 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import os
import uuid
import time
import multiprocessing
import argparse
import shutil
import funannotate.library as lib
# setup menu with argparse
class MyFormatter(argparse.ArgumentDefaultsHelpFormatter):
def __init__(self, prog):
super(MyFormatter, self).__init__(prog, max_help_position=48)
parser = argparse.ArgumentParser(
prog='phobius-multiproc.py',
usage="%(prog)s [options] -i proteome.fasta",
description='''Script that runs phobius remotely.''',
epilog="""Written by Jon Palmer (2016) [email protected]""",
formatter_class=MyFormatter)
parser.add_argument('-i', '--input', required=True, help='whole proteome')
parser.add_argument('-o', '--out', required=True, help='Phobius results')
parser.add_argument('-e', '--email', help='Email address for IPRSCAN server')
parser.add_argument('-l', '--logfile',
default='phobius-multiproc.log', help='Logfile')
parser.add_argument('--debug', action='store_true',
help='Keep intermediate files')
args = parser.parse_args()
def runPhobiusRemote(Input):
base = Input.split('/')[-1]
base = base.split('.fa')[0]
OUTPATH = os.path.join(TMPDIR, base)
cmd = ['perl', os.path.join(parentdir, 'phobius-remote.pl'),
'--email', args.email, '-f', 'short', '--outfile', base, Input]
lib.runSubprocess(cmd, TMPDIR, lib.log)
time.sleep(1) # make sure there is time for all files to show up
os.rename(OUTPATH+'.out.txt', OUTPATH+'.phobius')
os.remove(OUTPATH+'.sequence.txt')
def runPhobiusLocal(Input):
base = Input.split('/')[-1]
base = base.split('.fa')[0]
OUTPATH = os.path.join(TMPDIR, base+'.phobius')
cmd = ['phobius.pl', '-short', Input]
lib.runSubprocess2(cmd, TMPDIR, lib.log, OUTPATH)
global parentdir
parentdir = os.path.join(os.path.dirname(__file__))
# create log file
log_name = args.logfile
if os.path.isfile(log_name):
os.remove(log_name)
# initialize script, log system info and cmd issue at runtime
lib.setupLogging(log_name)
FNULL = open(os.devnull, 'w')
cmd_args = " ".join(sys.argv)+'\n'
lib.log.debug(cmd_args)
# create tmpdir to store fasta files and output files
TMPDIR = 'phobius_' + str(uuid.uuid4())
# split fasta
lib.splitFASTA(args.input, TMPDIR)
# now get list of files in tmpdir
proteins = []
for file in os.listdir(TMPDIR):
if file.endswith('.fa'):
proteins.append(file)
# now run the script
if lib.which('phobius.pl'):
lib.runMultiProgress(runPhobiusLocal, proteins,
multiprocessing.cpu_count())
else:
lib.runMultiProgress(runPhobiusRemote, proteins,
29) # max is 30 jobs at a time
# collect all results
phobius = []
for file in os.listdir(TMPDIR):
if file.endswith('.phobius'):
phobius.append(os.path.join(TMPDIR, file))
# write output
TMdomain = 0
SigPep = 0
with open(args.out, 'w') as output:
output.write("%s\t%s\t%s\t%s\n" % ('ID', 'TM', 'SP', 'Prediction'))
for x in phobius:
with open(x, 'r') as input:
line = input.readlines()
try:
result = line[1].split(' ')
result = [x for x in result if x]
if result[1] == 'prediction':
continue
if int(result[1]) > 0:
TMdomain += 1
if result[2] == 'Y':
SigPep += 1
output.write("%s\t%s\t%s\t%s\n" % (
result[0], result[1],
result[2], result[3].replace('\n', '')))
except IndexError:
pass
# clean
if not args.debug:
shutil.rmtree(TMPDIR)
lib.log.debug("%i total proteins, %i TMdomain, %i Signal Peptide" %
(len(phobius), TMdomain, SigPep))
| bsd-2-clause |
neharejanjeva/techstitution | venv/lib/python2.7/site-packages/pip/__init__.py | 328 | 11348 | #!/usr/bin/env python
from __future__ import absolute_import
import locale
import logging
import os
import optparse
import warnings
import sys
import re
# 2016-06-17 [email protected]: urllib3 1.14 added optional support for socks,
# but if invoked (i.e. imported), it will issue a warning to stderr if socks
# isn't available. requests unconditionally imports urllib3's socks contrib
# module, triggering this warning. The warning breaks DEP-8 tests (because of
# the stderr output) and is just plain annoying in normal usage. I don't want
# to add socks as yet another dependency for pip, nor do I want to allow-stder
# in the DEP-8 tests, so just suppress the warning. pdb tells me this has to
# be done before the import of pip.vcs.
from pip._vendor.requests.packages.urllib3.exceptions import DependencyWarning
warnings.filterwarnings("ignore", category=DependencyWarning) # noqa
from pip.exceptions import InstallationError, CommandError, PipError
from pip.utils import get_installed_distributions, get_prog
from pip.utils import deprecation, dist_is_editable
from pip.vcs import git, mercurial, subversion, bazaar # noqa
from pip.baseparser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
from pip.commands import get_summaries, get_similar_commands
from pip.commands import commands_dict
from pip._vendor.requests.packages.urllib3.exceptions import (
InsecureRequestWarning,
)
# assignment for flake8 to be happy
# This fixes a peculiarity when importing via __import__ - as we are
# initialising the pip module, "from pip import cmdoptions" is recursive
# and appears not to work properly in that situation.
import pip.cmdoptions
cmdoptions = pip.cmdoptions
# The version as used in the setup.py and the docs conf.py
__version__ = "9.0.1"
logger = logging.getLogger(__name__)
# Hide the InsecureRequestWarning from urllib3
warnings.filterwarnings("ignore", category=InsecureRequestWarning)
def autocomplete():
"""Command and option completion for the main option parser (and options)
and its subcommands (and options).
Enable by sourcing one of the completion shell scripts (bash, zsh or fish).
"""
# Don't complete if user hasn't sourced bash_completion file.
if 'PIP_AUTO_COMPLETE' not in os.environ:
return
cwords = os.environ['COMP_WORDS'].split()[1:]
cword = int(os.environ['COMP_CWORD'])
try:
current = cwords[cword - 1]
except IndexError:
current = ''
subcommands = [cmd for cmd, summary in get_summaries()]
options = []
# subcommand
try:
subcommand_name = [w for w in cwords if w in subcommands][0]
except IndexError:
subcommand_name = None
parser = create_main_parser()
# subcommand options
if subcommand_name:
# special case: 'help' subcommand has no options
if subcommand_name == 'help':
sys.exit(1)
# special case: list locally installed dists for uninstall command
if subcommand_name == 'uninstall' and not current.startswith('-'):
installed = []
lc = current.lower()
for dist in get_installed_distributions(local_only=True):
if dist.key.startswith(lc) and dist.key not in cwords[1:]:
installed.append(dist.key)
# if there are no dists installed, fall back to option completion
if installed:
for dist in installed:
print(dist)
sys.exit(1)
subcommand = commands_dict[subcommand_name]()
options += [(opt.get_opt_string(), opt.nargs)
for opt in subcommand.parser.option_list_all
if opt.help != optparse.SUPPRESS_HELP]
# filter out previously specified options from available options
prev_opts = [x.split('=')[0] for x in cwords[1:cword - 1]]
options = [(x, v) for (x, v) in options if x not in prev_opts]
# filter options by current input
options = [(k, v) for k, v in options if k.startswith(current)]
for option in options:
opt_label = option[0]
# append '=' to options which require args
if option[1]:
opt_label += '='
print(opt_label)
else:
# show main parser options only when necessary
if current.startswith('-') or current.startswith('--'):
opts = [i.option_list for i in parser.option_groups]
opts.append(parser.option_list)
opts = (o for it in opts for o in it)
subcommands += [i.get_opt_string() for i in opts
if i.help != optparse.SUPPRESS_HELP]
print(' '.join([x for x in subcommands if x.startswith(current)]))
sys.exit(1)
def create_main_parser():
parser_kw = {
'usage': '\n%prog <command> [options]',
'add_help_option': False,
'formatter': UpdatingDefaultsHelpFormatter(),
'name': 'global',
'prog': get_prog(),
}
parser = ConfigOptionParser(**parser_kw)
parser.disable_interspersed_args()
pip_pkg_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
parser.version = 'pip %s from %s (python %s)' % (
__version__, pip_pkg_dir, sys.version[:3])
# add the general options
gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser)
parser.add_option_group(gen_opts)
parser.main = True # so the help formatter knows
# create command listing for description
command_summaries = get_summaries()
description = [''] + ['%-27s %s' % (i, j) for i, j in command_summaries]
parser.description = '\n'.join(description)
return parser
def parseopts(args):
parser = create_main_parser()
# Note: parser calls disable_interspersed_args(), so the result of this
# call is to split the initial args into the general options before the
# subcommand and everything else.
# For example:
# args: ['--timeout=5', 'install', '--user', 'INITools']
# general_options: ['--timeout==5']
# args_else: ['install', '--user', 'INITools']
general_options, args_else = parser.parse_args(args)
# --version
if general_options.version:
sys.stdout.write(parser.version)
sys.stdout.write(os.linesep)
sys.exit()
# pip || pip help -> print_help()
if not args_else or (args_else[0] == 'help' and len(args_else) == 1):
parser.print_help()
sys.exit()
# the subcommand name
cmd_name = args_else[0]
if cmd_name not in commands_dict:
guess = get_similar_commands(cmd_name)
msg = ['unknown command "%s"' % cmd_name]
if guess:
msg.append('maybe you meant "%s"' % guess)
raise CommandError(' - '.join(msg))
# all the args without the subcommand
cmd_args = args[:]
cmd_args.remove(cmd_name)
return cmd_name, cmd_args
def check_isolated(args):
isolated = False
if "--isolated" in args:
isolated = True
return isolated
def main(args=None):
if args is None:
args = sys.argv[1:]
# Configure our deprecation warnings to be sent through loggers
deprecation.install_warning_logger()
autocomplete()
try:
cmd_name, cmd_args = parseopts(args)
except PipError as exc:
sys.stderr.write("ERROR: %s" % exc)
sys.stderr.write(os.linesep)
sys.exit(1)
# Needed for locale.getpreferredencoding(False) to work
# in pip.utils.encoding.auto_decode
try:
locale.setlocale(locale.LC_ALL, '')
except locale.Error as e:
# setlocale can apparently crash if locale are uninitialized
logger.debug("Ignoring error %s when setting locale", e)
command = commands_dict[cmd_name](isolated=check_isolated(cmd_args))
return command.main(cmd_args)
# ###########################################################
# # Writing freeze files
class FrozenRequirement(object):
def __init__(self, name, req, editable, comments=()):
self.name = name
self.req = req
self.editable = editable
self.comments = comments
_rev_re = re.compile(r'-r(\d+)$')
_date_re = re.compile(r'-(20\d\d\d\d\d\d)$')
@classmethod
def from_dist(cls, dist, dependency_links):
location = os.path.normcase(os.path.abspath(dist.location))
comments = []
from pip.vcs import vcs, get_src_requirement
if dist_is_editable(dist) and vcs.get_backend_name(location):
editable = True
try:
req = get_src_requirement(dist, location)
except InstallationError as exc:
logger.warning(
"Error when trying to get requirement for VCS system %s, "
"falling back to uneditable format", exc
)
req = None
if req is None:
logger.warning(
'Could not determine repository location of %s', location
)
comments.append(
'## !! Could not determine repository location'
)
req = dist.as_requirement()
editable = False
else:
editable = False
req = dist.as_requirement()
specs = req.specs
assert len(specs) == 1 and specs[0][0] in ["==", "==="], \
'Expected 1 spec with == or ===; specs = %r; dist = %r' % \
(specs, dist)
version = specs[0][1]
ver_match = cls._rev_re.search(version)
date_match = cls._date_re.search(version)
if ver_match or date_match:
svn_backend = vcs.get_backend('svn')
if svn_backend:
svn_location = svn_backend().get_location(
dist,
dependency_links,
)
if not svn_location:
logger.warning(
'Warning: cannot find svn location for %s', req)
comments.append(
'## FIXME: could not find svn URL in dependency_links '
'for this package:'
)
else:
comments.append(
'# Installing as editable to satisfy requirement %s:' %
req
)
if ver_match:
rev = ver_match.group(1)
else:
rev = '{%s}' % date_match.group(1)
editable = True
req = '%s@%s#egg=%s' % (
svn_location,
rev,
cls.egg_name(dist)
)
return cls(dist.project_name, req, editable, comments)
@staticmethod
def egg_name(dist):
name = dist.egg_name()
match = re.search(r'-py\d\.\d$', name)
if match:
name = name[:match.start()]
return name
def __str__(self):
req = self.req
if self.editable:
req = '-e %s' % req
return '\n'.join(list(self.comments) + [str(req)]) + '\n'
if __name__ == '__main__':
sys.exit(main())
| cc0-1.0 |
praaline/Praaline | dependency-builds/sv/src/sord-0.12.0/.waf-1.7.9-070f761e9f6fd5d4df451bcef126793c/waflib/Tools/xlc.py | 330 | 1175 | #! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
from waflib.Tools import ccroot,ar
from waflib.Configure import conf
@conf
def find_xlc(conf):
cc=conf.find_program(['xlc_r','xlc'],var='CC')
cc=conf.cmd_to_list(cc)
conf.get_xlc_version(cc)
conf.env.CC_NAME='xlc'
conf.env.CC=cc
@conf
def xlc_common_flags(conf):
v=conf.env
v['CC_SRC_F']=[]
v['CC_TGT_F']=['-c','-o']
if not v['LINK_CC']:v['LINK_CC']=v['CC']
v['CCLNK_SRC_F']=[]
v['CCLNK_TGT_F']=['-o']
v['CPPPATH_ST']='-I%s'
v['DEFINES_ST']='-D%s'
v['LIB_ST']='-l%s'
v['LIBPATH_ST']='-L%s'
v['STLIB_ST']='-l%s'
v['STLIBPATH_ST']='-L%s'
v['RPATH_ST']='-Wl,-rpath,%s'
v['SONAME_ST']=[]
v['SHLIB_MARKER']=[]
v['STLIB_MARKER']=[]
v['LINKFLAGS_cprogram']=['-Wl,-brtl']
v['cprogram_PATTERN']='%s'
v['CFLAGS_cshlib']=['-fPIC']
v['LINKFLAGS_cshlib']=['-G','-Wl,-brtl,-bexpfull']
v['cshlib_PATTERN']='lib%s.so'
v['LINKFLAGS_cstlib']=[]
v['cstlib_PATTERN']='lib%s.a'
def configure(conf):
conf.find_xlc()
conf.find_ar()
conf.xlc_common_flags()
conf.cc_load_tools()
conf.cc_add_flags()
conf.link_add_flags()
| gpl-3.0 |
viaict/viaduct | migrations/versions/2018_07_14_42930577deff_new_table_structure_for_authlib.py | 1 | 11895 | """New table structure for authlib.
Revision ID: 42930577deff
Revises: c0687b086ee3
Create Date: 2018-07-14 11:09:54.346801
"""
import sqlalchemy as sa
from alembic import op
from sqlalchemy.dialects import postgresql
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import sessionmaker, relationship
# revision identifiers, used by Alembic.
revision = '42930577deff'
down_revision = 'c0687b086ee3'
Base = declarative_base()
db = sa
db.Model = Base
db.relationship = relationship
def create_session():
connection = op.get_bind()
session_maker = sa.orm.sessionmaker()
session = session_maker(bind=connection)
db.session = session
def upgrade():
create_session()
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
'oauth_authorization_code',
sa.Column('code', sa.String(length=120), nullable=False),
sa.Column('client_id', sa.String(length=48),
nullable=True),
sa.Column('redirect_uri', sa.Text(), nullable=True),
sa.Column('response_type', sa.Text(), nullable=True),
sa.Column('scope', sa.Text(), nullable=True),
sa.Column('auth_time', sa.Integer(), nullable=False),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('user_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(
['user_id'], ['user.id'], name=op.f(
'fk_oauth_authorization_code_user_id_user'),
ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name=op.f(
'pk_oauth_authorization_code')),
sa.UniqueConstraint('code', name=op.f(
'uq_oauth_authorization_code_code'))
)
op.drop_table('oauth_token_scope')
op.drop_table('oauth_client_scope')
op.drop_table('oauth_grant_scope')
op.drop_table('oauth_grant')
op.drop_table('oauth_client_redirect')
op.add_column('oauth_client',
sa.Column('client_name', sa.String(length=100),
nullable=True))
op.add_column('oauth_client',
sa.Column('client_uri', sa.Text(), nullable=True))
op.add_column('oauth_client',
sa.Column('contact', sa.Text(), nullable=True))
op.add_column('oauth_client',
sa.Column('expires_at', sa.Integer(), nullable=False,
server_default='0'))
op.add_column('oauth_client',
sa.Column('grant_type', sa.Text(), nullable=False,
server_default=''))
op.add_column('oauth_client',
sa.Column('i18n_metadata', sa.Text(), nullable=True))
op.add_column('oauth_client',
sa.Column('issued_at', sa.Integer(), nullable=False,
server_default='0'))
op.add_column('oauth_client',
sa.Column('jwks_text', sa.Text(), nullable=True))
op.add_column('oauth_client',
sa.Column('jwks_uri', sa.Text(), nullable=True))
op.add_column('oauth_client',
sa.Column('logo_uri', sa.Text(), nullable=True))
op.add_column('oauth_client',
sa.Column('policy_uri', sa.Text(), nullable=True))
op.add_column('oauth_client',
sa.Column('redirect_uri', sa.Text(), nullable=False,
server_default=''))
op.add_column('oauth_client',
sa.Column('response_type', sa.Text(), nullable=False,
server_default=''))
op.add_column('oauth_client',
sa.Column('scope', sa.Text(), nullable=False,
server_default=''))
op.add_column('oauth_client',
sa.Column('software_id', sa.String(length=36),
nullable=True))
op.add_column('oauth_client',
sa.Column('software_version', sa.String(length=48),
nullable=True))
op.add_column('oauth_client',
sa.Column('token_endpoint_auth_method', sa.String(length=48),
nullable=True))
op.add_column('oauth_client',
sa.Column('tos_uri', sa.Text(), nullable=True))
op.drop_index('ix_oauth_client_client_secret', table_name='oauth_client')
op.drop_column('oauth_client', 'confidential')
op.drop_column('oauth_client', 'name')
op.drop_column('oauth_client', 'description')
op.add_column('oauth_token',
sa.Column('expires_in', sa.Integer(), nullable=False,
server_default='0'))
op.add_column('oauth_token',
sa.Column('issued_at', sa.Integer(), nullable=False,
server_default='0'))
op.add_column('oauth_token',
sa.Column('revoked', sa.Boolean(), nullable=True))
op.add_column('oauth_token', sa.Column('scope', sa.Text(), nullable=True))
op.alter_column('oauth_token', 'access_token',
existing_type=sa.VARCHAR(length=255),
nullable=False, server_default='')
op.alter_column('oauth_token', 'client_id',
existing_type=sa.VARCHAR(length=64),
nullable=True)
op.create_index(op.f('ix_oauth_token_refresh_token'), 'oauth_token',
['refresh_token'], unique=False)
# op.drop_constraint('uq_oauth_token_refresh_token', 'oauth_token',
# type_='unique')
op.drop_constraint('fk_oauth_token_client_id_oauth_client', 'oauth_token',
type_='foreignkey')
op.drop_column('oauth_token', 'expires')
# ### end Alembic commands ###
def downgrade():
create_session()
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('oauth_token', sa.Column('expires', postgresql.TIMESTAMP(),
autoincrement=False, nullable=True))
op.create_foreign_key('fk_oauth_token_client_id_oauth_client',
'oauth_token', 'oauth_client', ['client_id'],
['client_id'], ondelete='CASCADE')
op.create_unique_constraint('uq_oauth_token_refresh_token', 'oauth_token',
['refresh_token'])
op.drop_index(op.f('ix_oauth_token_refresh_token'),
table_name='oauth_token')
op.alter_column('oauth_token', 'client_id',
existing_type=sa.VARCHAR(length=64),
nullable=False)
op.alter_column('oauth_token', 'access_token',
existing_type=sa.VARCHAR(length=255),
nullable=True)
op.drop_column('oauth_token', 'scope')
op.drop_column('oauth_token', 'revoked')
op.drop_column('oauth_token', 'issued_at')
op.drop_column('oauth_token', 'expires_in')
op.add_column('oauth_client',
sa.Column('description', sa.VARCHAR(length=512),
autoincrement=False, nullable=True))
op.add_column('oauth_client',
sa.Column('name', sa.VARCHAR(length=64), autoincrement=False,
nullable=True))
op.add_column('oauth_client',
sa.Column('confidential', sa.BOOLEAN(), autoincrement=False,
nullable=True))
op.create_index('ix_oauth_client_client_secret', 'oauth_client',
['client_secret'], unique=True)
op.drop_column('oauth_client', 'tos_uri')
op.drop_column('oauth_client', 'token_endpoint_auth_method')
op.drop_column('oauth_client', 'software_version')
op.drop_column('oauth_client', 'software_id')
op.drop_column('oauth_client', 'scope')
op.drop_column('oauth_client', 'response_type')
op.drop_column('oauth_client', 'redirect_uri')
op.drop_column('oauth_client', 'policy_uri')
op.drop_column('oauth_client', 'logo_uri')
op.drop_column('oauth_client', 'jwks_uri')
op.drop_column('oauth_client', 'jwks_text')
op.drop_column('oauth_client', 'issued_at')
op.drop_column('oauth_client', 'i18n_metadata')
op.drop_column('oauth_client', 'grant_type')
op.drop_column('oauth_client', 'expires_at')
op.drop_column('oauth_client', 'contact')
op.drop_column('oauth_client', 'client_uri')
op.drop_column('oauth_client', 'client_name')
op.create_table(
'oauth_client_redirect',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('client_id', sa.VARCHAR(length=64),
autoincrement=False, nullable=True),
sa.Column('redirect_uri', sa.VARCHAR(length=256),
autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(
['client_id'], ['oauth_client.client_id'],
name='fk_oauth_client_redirect_client_id_oauth_client',
ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id',
name='pk_oauth_client_redirect')
)
op.create_table(
'oauth_grant_scope',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('client_id', sa.INTEGER(), autoincrement=False,
nullable=True),
sa.Column('scope', sa.VARCHAR(length=256),
autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(
['client_id'], ['oauth_grant.id'],
name='fk_oauth_grant_scope_client_id_oauth_grant',
ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name='pk_oauth_grant_scope')
)
op.create_table(
'oauth_client_scope',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('client_id', sa.VARCHAR(length=64),
autoincrement=False, nullable=True),
sa.Column('scope', sa.VARCHAR(length=256),
autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(
['client_id'], ['oauth_client.client_id'],
name='fk_oauth_client_scope_client_id_oauth_client',
ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name='pk_oauth_client_scope')
)
op.create_table(
'oauth_token_scope',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('token_id', sa.INTEGER(), autoincrement=False,
nullable=True),
sa.Column('scope', sa.VARCHAR(length=256),
autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(
['token_id'], ['oauth_token.id'],
name='fk_oauth_token_scope_token_id_oauth_token',
ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name='pk_oauth_token_scope')
)
op.create_table(
'oauth_grant',
sa.Column('id', sa.INTEGER(), nullable=False),
sa.Column('user_id', sa.INTEGER(), autoincrement=False,
nullable=True),
sa.Column('client_id', sa.VARCHAR(length=64),
autoincrement=False, nullable=False),
sa.Column('code', sa.VARCHAR(length=255),
autoincrement=False, nullable=False),
sa.Column('redirect_uri', sa.VARCHAR(length=255),
autoincrement=False, nullable=True),
sa.Column('expires', postgresql.TIMESTAMP(),
autoincrement=False, nullable=True),
sa.ForeignKeyConstraint(['client_id'],
['oauth_client.client_id'],
name='fk_oauth_grant_client_id_oauth_client',
ondelete='CASCADE'),
sa.ForeignKeyConstraint(['user_id'], ['user.id'],
name='fk_oauth_grant_user_id_user',
ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', name='pk_oauth_grant')
)
op.drop_table('oauth_authorization_code')
# ### end Alembic commands ###
# vim: ft=python
| mit |
UCL-INGI/INGInious | inginious/frontend/pages/api/authentication.py | 1 | 1615 | # -*- coding: utf-8 -*-
#
# This file is part of INGInious. See the LICENSE and the COPYRIGHTS files for
# more information about the licensing of this file.
""" Authentication """
import flask
from inginious.frontend.pages.api._api_page import APIPage, APIInvalidArguments
class APIAuthentication(APIPage):
"""
Endpoint /api/v0/authentication
"""
def API_GET(self): # pylint: disable=arguments-differ
"""
Returns {"authenticated": false} or {"authenticated": true, "username": "your_username"} (always 200 OK)
"""
if self.user_manager.session_logged_in():
return 200, {"authenticated": True, "username": self.user_manager.session_username()}
else:
return 200, {"authenticated": False}
def API_POST(self): # pylint: disable=arguments-differ
"""
Authenticates the remote client. Takes as input:
login
the INGInious account login
password
the associated password
Response: a dict in the form {"status": "success"} (200 OK) or {"status": "error"} (403 Forbidden)
"""
user_input = flask.request.form
if "login" not in user_input or "password" not in user_input:
raise APIInvalidArguments()
try:
if self.user_manager.auth_user(user_input["login"].strip(), user_input["password"]) is not None:
return 200, {"status": "success", "username": self.user_manager.session_username()}
except:
pass
return 403, {"status": "error"}
| agpl-3.0 |
tempbottle/servo | tests/wpt/css-tests/tools/html5lib/html5lib/treeadapters/sax.py | 1835 | 1661 | from __future__ import absolute_import, division, unicode_literals
from xml.sax.xmlreader import AttributesNSImpl
from ..constants import adjustForeignAttributes, unadjustForeignAttributes
prefix_mapping = {}
for prefix, localName, namespace in adjustForeignAttributes.values():
if prefix is not None:
prefix_mapping[prefix] = namespace
def to_sax(walker, handler):
"""Call SAX-like content handler based on treewalker walker"""
handler.startDocument()
for prefix, namespace in prefix_mapping.items():
handler.startPrefixMapping(prefix, namespace)
for token in walker:
type = token["type"]
if type == "Doctype":
continue
elif type in ("StartTag", "EmptyTag"):
attrs = AttributesNSImpl(token["data"],
unadjustForeignAttributes)
handler.startElementNS((token["namespace"], token["name"]),
token["name"],
attrs)
if type == "EmptyTag":
handler.endElementNS((token["namespace"], token["name"]),
token["name"])
elif type == "EndTag":
handler.endElementNS((token["namespace"], token["name"]),
token["name"])
elif type in ("Characters", "SpaceCharacters"):
handler.characters(token["data"])
elif type == "Comment":
pass
else:
assert False, "Unknown token type"
for prefix, namespace in prefix_mapping.items():
handler.endPrefixMapping(prefix)
handler.endDocument()
| mpl-2.0 |
ktaneishi/deepchem | examples/delaney/delaney_textcnn.py | 2 | 1177 | """
Script that trains textCNN models on delaney dataset.
"""
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
import numpy as np
np.random.seed(123)
import tensorflow as tf
tf.set_random_seed(123)
import deepchem as dc
# Load Delaney dataset
delaney_tasks, delaney_datasets, transformers = dc.molnet.load_delaney(
featurizer='Raw', split='index')
train_dataset, valid_dataset, test_dataset = delaney_datasets
# Fit models
metric = dc.metrics.Metric(dc.metrics.pearson_r2_score, np.mean)
char_dict, length = dc.models.TextCNNModel.build_char_dict(train_dataset)
# Batch size of models
batch_size = 64
model = dc.models.TextCNNModel(
len(delaney_tasks),
char_dict,
seq_length=length,
mode='regression',
learning_rate=1e-3,
batch_size=batch_size,
use_queue=False)
# Fit trained model
model.fit(train_dataset, nb_epoch=100)
print("Evaluating model")
train_scores = model.evaluate(train_dataset, [metric], transformers)
valid_scores = model.evaluate(valid_dataset, [metric], transformers)
print("Train scores")
print(train_scores)
print("Validation scores")
print(valid_scores)
| mit |
googleads/google-ads-python | google/ads/googleads/v7/enums/types/user_list_access_status.py | 1 | 1155 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
__protobuf__ = proto.module(
package="google.ads.googleads.v7.enums",
marshal="google.ads.googleads.v7",
manifest={"UserListAccessStatusEnum",},
)
class UserListAccessStatusEnum(proto.Message):
r"""Indicates if this client still has access to the list. """
class UserListAccessStatus(proto.Enum):
r"""Enum containing possible user list access statuses."""
UNSPECIFIED = 0
UNKNOWN = 1
ENABLED = 2
DISABLED = 3
__all__ = tuple(sorted(__protobuf__.manifest))
| apache-2.0 |
veloutin/python-flaskext.babel | setup.py | 12 | 1313 | """
Flask-Babel
-----------
Adds i18n/l10n support to Flask applications with the help of the
`Babel`_ library.
Links
`````
* `documentation <http://packages.python.org/Flask-Babel>`_
* `development version
<http://github.com/mitsuhiko/flask-babel/zipball/master#egg=Flask-Babel-dev>`_
.. _Babel: http://babel.edgewall.org/
"""
from setuptools import setup
setup(
name='Flask-Babel',
version='0.8',
url='http://github.com/mitsuhiko/flask-babel',
license='BSD',
author='Armin Ronacher',
author_email='[email protected]',
description='Adds i18n/l10n support to Flask applications',
long_description=__doc__,
packages=['flaskext'],
namespace_packages=['flaskext'],
zip_safe=False,
platforms='any',
install_requires=[
'Flask',
'Babel',
'pytz',
'speaklater>=1.2',
'Jinja2>=2.5'
],
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP :: Dynamic Content',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
| bsd-3-clause |
adsorensen/girder | girder/api/v1/group.py | 1 | 16668 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright 2013 Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
from ..describe import Description, autoDescribeRoute
from ..rest import Resource, filtermodel
from girder.api import access
from girder.constants import AccessType, SettingKey
from girder.models.model_base import AccessException
from girder.utility import mail_utils
from girder.utility.model_importer import ModelImporter
class Group(Resource):
"""API Endpoint for groups."""
def __init__(self):
super(Group, self).__init__()
self.resourceName = 'group'
self.route('DELETE', (':id',), self.deleteGroup)
self.route('DELETE', (':id', 'member'), self.removeFromGroup)
self.route('DELETE', (':id', 'moderator'), self.demote)
self.route('DELETE', (':id', 'admin'), self.demote)
self.route('GET', (), self.find)
self.route('GET', (':id',), self.getGroup)
self.route('GET', (':id', 'access'), self.getGroupAccess)
self.route('GET', (':id', 'invitation'), self.getGroupInvitations)
self.route('GET', (':id', 'member'), self.listMembers)
self.route('POST', (), self.createGroup)
self.route('POST', (':id', 'invitation'), self.inviteToGroup)
self.route('POST', (':id', 'member'), self.joinGroup)
self.route('POST', (':id', 'moderator'), self.promoteToModerator)
self.route('POST', (':id', 'admin'), self.promoteToAdmin)
self.route('PUT', (':id',), self.updateGroup)
@access.public
@filtermodel(model='group')
@autoDescribeRoute(
Description('Search for groups or list all groups.')
.param('text', 'Pass this to perform a full-text search for groups.', required=False)
.param('exact', 'If true, only return exact name matches. This is '
'case sensitive.', required=False, dataType='boolean', default=False)
.pagingParams(defaultSort='name')
.errorResponse()
)
def find(self, text, exact, limit, offset, sort):
user = self.getCurrentUser()
if text is not None:
if exact:
groupList = self.model('group').find(
{'name': text}, offset=offset, limit=limit, sort=sort)
else:
groupList = self.model('group').textSearch(
text, user=user, offset=offset, limit=limit, sort=sort)
else:
groupList = self.model('group').list(
user=user, offset=offset, limit=limit, sort=sort)
return list(groupList)
@access.user
@filtermodel(model='group')
@autoDescribeRoute(
Description('Create a new group.')
.responseClass('Group')
.notes('Must be logged in.')
.param('name', 'Unique name for the group.', strip=True)
.param('description', 'Description of the group.', required=False,
default='', strip=True)
.param('public', 'Whether the group should be publicly visible.',
required=False, dataType='boolean', default=False)
.errorResponse()
.errorResponse('Write access was denied on the parent', 403)
)
def createGroup(self, name, description, public):
return self.model('group').createGroup(
name=name, creator=self.getCurrentUser(), description=description, public=public)
@access.public
@filtermodel(model='group')
@autoDescribeRoute(
Description('Get a group by ID.')
.responseClass('Group')
.modelParam('id', model='group', level=AccessType.READ)
.errorResponse('ID was invalid.')
.errorResponse('Read access was denied for the group.', 403)
)
def getGroup(self, group):
# Add in the current setting for adding to groups
group['_addToGroupPolicy'] = self.model('setting').get(SettingKey.ADD_TO_GROUP_POLICY)
return group
@access.public
@filtermodel(model='group', addFields={'access', 'requests'})
@autoDescribeRoute(
Description('Get the access control list for a group.')
.responseClass('Group')
.modelParam('id', model='group', level=AccessType.READ)
.errorResponse('ID was invalid.')
.errorResponse('Read access was denied for the group.', 403)
)
def getGroupAccess(self, group):
groupModel = self.model('group')
group['access'] = groupModel.getFullAccessList(group)
group['requests'] = list(groupModel.getFullRequestList(group))
return group
@access.public
@filtermodel(model='user')
@autoDescribeRoute(
Description('Show outstanding invitations for a group.')
.responseClass('Group')
.modelParam('id', model='group', level=AccessType.READ)
.pagingParams(defaultSort='lastName')
.errorResponse()
.errorResponse('Read access was denied for the group.', 403)
)
def getGroupInvitations(self, group, limit, offset, sort):
return list(self.model('group').getInvites(group, limit, offset, sort))
@access.user
@filtermodel(model='group')
@autoDescribeRoute(
Description('Update a group by ID.')
.modelParam('id', model='group', level=AccessType.WRITE)
.param('name', 'The name to set on the group.', required=False, strip=True)
.param('description', 'Description for the group.', required=False, strip=True)
.param('public', 'Whether the group should be publicly visible', dataType='boolean',
required=False)
.param('addAllowed', 'Can admins or moderators directly add members '
'to this group? Only system administrators are allowed to '
'set this field', required=False,
enum=['default', 'no', 'yesmod', 'yesadmin'])
.errorResponse()
.errorResponse('Write access was denied for the group.', 403)
)
def updateGroup(self, group, name, description, public, addAllowed):
if public is not None:
self.model('group').setPublic(group, public)
if name is not None:
group['name'] = name
if description is not None:
group['description'] = description
if addAllowed is not None:
self.requireAdmin(self.getCurrentUser())
group['addAllowed'] = addAllowed
return self.model('group').updateGroup(group)
@access.user
@filtermodel(model='group', addFields={'access', 'requests'})
@autoDescribeRoute(
Description('Request to join a group, or accept an invitation to join.')
.responseClass('Group')
.modelParam('id', model='group', level=AccessType.READ)
.errorResponse('ID was invalid.')
.errorResponse('You were not invited to this group, or do not have '
'read access to it.', 403)
)
def joinGroup(self, group):
groupModel = self.model('group')
group = groupModel.joinGroup(group, self.getCurrentUser())
group['access'] = groupModel.getFullAccessList(group)
group['requests'] = list(groupModel.getFullRequestList(group))
return group
@access.public
@filtermodel(model='user')
@autoDescribeRoute(
Description('List members of a group.')
.modelParam('id', model='group', level=AccessType.READ)
.pagingParams(defaultSort='lastName')
.errorResponse('ID was invalid.')
.errorResponse('Read access was denied for the group.', 403)
)
def listMembers(self, group, limit, offset, sort):
return list(self.model('group').listMembers(group, offset=offset, limit=limit, sort=sort))
@access.user
@filtermodel(model='group', addFields={'access', 'requests'})
@autoDescribeRoute(
Description("Invite a user to join a group, or accept a user's request to join.")
.responseClass('Group')
.notes('The "force" option to this endpoint is only available to '
'administrators and can be used to bypass the invitation process'
' and instead add the user directly to the group.')
.modelParam('id', model='group', level=AccessType.WRITE)
.modelParam('userId', 'The ID of the user to invite or accept.',
destName='userToInvite', level=AccessType.READ, paramType='form')
.param('level', 'The access level the user will be given when they accept the invitation.',
required=False, dataType='integer', default=AccessType.READ)
.param('quiet', 'If you do not want this action to send an email to '
'the target user, set this to true.', dataType='boolean',
required=False, default=False)
.param('force', 'Add user directly rather than sending an invitation '
'(admin-only option).', dataType='boolean', required=False, default=False)
.errorResponse()
.errorResponse('Write access was denied for the group.', 403)
)
def inviteToGroup(self, group, userToInvite, level, quiet, force):
groupModel = self.model('group')
user = self.getCurrentUser()
if force:
if not user['admin']:
mustBeAdmin = True
addPolicy = self.model('setting').get(SettingKey.ADD_TO_GROUP_POLICY)
addGroup = group.get('addAllowed', 'default')
if addGroup not in ['no', 'yesadmin', 'yesmod']:
addGroup = addPolicy
if (groupModel.hasAccess(
group, user, AccessType.ADMIN) and
('mod' in addPolicy or 'admin' in addPolicy) and
addGroup.startswith('yes')):
mustBeAdmin = False
elif (groupModel.hasAccess(
group, user, AccessType.WRITE) and
'mod' in addPolicy and
addGroup == 'yesmod'):
mustBeAdmin = False
if mustBeAdmin:
self.requireAdmin(user)
groupModel.addUser(group, userToInvite, level=level)
else:
# Can only invite into access levels that you yourself have
groupModel.requireAccess(group, user, level)
groupModel.inviteUser(group, userToInvite, level)
if not quiet:
html = mail_utils.renderTemplate('groupInvite.mako', {
'userToInvite': userToInvite,
'user': user,
'group': group
})
mail_utils.sendEmail(
to=userToInvite['email'], text=html,
subject="%s: You've been invited to a group"
% ModelImporter.model('setting').get(SettingKey.BRAND_NAME)
)
group['access'] = groupModel.getFullAccessList(group)
group['requests'] = list(groupModel.getFullRequestList(group))
return group
@access.user
@filtermodel(model='group', addFields={'access'})
@autoDescribeRoute(
Description('Promote a member to be a moderator of the group.')
.responseClass('Group')
.modelParam('id', model='group', level=AccessType.ADMIN)
.modelParam('userId', 'The ID of the user to promote.',
level=AccessType.READ, paramType='formData')
.errorResponse('ID was invalid.')
.errorResponse("You don't have permission to promote users.", 403)
)
def promoteToModerator(self, group, user):
return self._promote(group, user, AccessType.WRITE)
@access.user
@filtermodel(model='group', addFields={'access'})
@autoDescribeRoute(
Description('Promote a member to be an administrator of the group.')
.responseClass('Group')
.modelParam('id', model='group', level=AccessType.ADMIN)
.modelParam('userId', 'The ID of the user to promote.',
level=AccessType.READ, paramType='formData')
.errorResponse('ID was invalid.')
.errorResponse("You don't have permission to promote users.", 403)
)
def promoteToAdmin(self, group, user):
return self._promote(group, user, AccessType.ADMIN)
def _promote(self, group, user, level):
"""
Promote a user to moderator or administrator.
:param group: The group to promote within.
:param user: The user to promote.
:param level: Either WRITE or ADMIN, for moderator or administrator.
:type level: AccessType
:returns: The updated group document.
"""
if not group['_id'] in user.get('groups', []):
raise AccessException('That user is not a group member.')
group = self.model('group').setUserAccess(group, user, level=level, save=True)
group['access'] = self.model('group').getFullAccessList(group)
return group
@access.user
@filtermodel(model='group', addFields={'access', 'requests'})
@autoDescribeRoute(
Description('Demote a user to a normal group member.')
.responseClass('Group')
.modelParam('id', model='group', level=AccessType.ADMIN)
.modelParam('userId', 'The ID of the user to demote.',
level=AccessType.READ, paramType='formData')
.errorResponse()
.errorResponse("You don't have permission to demote users.", 403)
)
def demote(self, group, user):
groupModel = self.model('group')
group = groupModel.setUserAccess(group, user, level=AccessType.READ, save=True)
group['access'] = groupModel.getFullAccessList(group)
group['requests'] = list(groupModel.getFullRequestList(group))
return group
@access.user
@filtermodel(model='group', addFields={'access', 'requests'})
@autoDescribeRoute(
Description('Remove a user from a group, or uninvite them.')
.responseClass('Group')
.notes('If the specified user is not yet a member of the group, this '
'will delete any outstanding invitation or membership request for '
'the user. Passing no userId parameter will assume that the '
'current user is removing themself.')
.modelParam('id', model='group', level=AccessType.READ)
.modelParam('userId', 'The ID of the user to remove. If not passed, will '
'remove yourself from the group.', required=False,
level=AccessType.READ, destName='userToRemove', paramType='formData')
.errorResponse()
.errorResponse("You don't have permission to remove that user.", 403)
)
def removeFromGroup(self, group, userToRemove):
user = self.getCurrentUser()
groupModel = self.model('group')
if userToRemove is None:
# Assume user is removing themself from the group
userToRemove = user
# If removing someone else, you must have at least as high an
# access level as they do, and you must have at least write access
# to remove any user other than yourself.
if user['_id'] != userToRemove['_id']:
if groupModel.hasAccess(group, userToRemove, AccessType.ADMIN):
groupModel.requireAccess(group, user, AccessType.ADMIN)
else:
groupModel.requireAccess(group, user, AccessType.WRITE)
group = groupModel.removeUser(group, userToRemove)
group['access'] = groupModel.getFullAccessList(group)
group['requests'] = list(groupModel.getFullRequestList(group))
return group
@access.user
@autoDescribeRoute(
Description('Delete a group by ID.')
.modelParam('id', model='group', level=AccessType.ADMIN)
.errorResponse('ID was invalid.')
.errorResponse('Admin access was denied for the group.', 403)
)
def deleteGroup(self, group):
self.model('group').remove(group)
return {'message': 'Deleted the group %s.' % group['name']}
| apache-2.0 |
rahushen/ansible | contrib/vault/vault-keyring-client.py | 40 | 5169 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# (c) 2014, Matt Martz <[email protected]>
# (c) 2016, Justin Mayer <https://justinmayer.com/>
# This file is part of Ansible.
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
# =============================================================================
#
# This script is to be used with ansible-vault's --vault-id arg
# to retrieve the vault password via your OS's native keyring application.
#
# This file *MUST* be saved with executable permissions. Otherwise, Ansible
# will try to parse as a password file and display: "ERROR! Decryption failed"
#
# The `keyring` Python module is required: https://pypi.python.org/pypi/keyring
#
# By default, this script will store the specified password in the keyring of
# the user that invokes the script. To specify a user keyring, add a [vault]
# section to your ansible.cfg file with a 'username' option. Example:
#
# [vault]
# username = 'ansible-vault'
#
# In useage like:
#
# ansible-vault --vault-id keyring_id@contrib/vault/vault-keyring-client.py view some_encrypted_file
#
# --vault-id will call this script like:
#
# contrib/vault/vault-keyring-client.py --vault-id keyring_id
#
# That will retrieve the password from users keyring for the
# keyring service 'keyring_id'. The equilivent of:
#
# keyring get keyring_id $USER
#
# If no vault-id name is specified to ansible command line, the vault-keyring-client.py
# script will be called without a '--vault-id' and will default to the keyring service 'ansible'
# This is equilivent to:
#
# keyring get ansible $USER
#
# You can configure the `vault_password_file` option in ansible.cfg:
#
# [defaults]
# ...
# vault_password_file = /path/to/vault-keyring-client.py
# ...
#
# To set your password, `cd` to your project directory and run:
#
# # will use default keyring service / vault-id of 'ansible'
# /path/to/vault-keyring-client.py --set
#
# or to specify the keyring service / vault-id of 'my_ansible_secret':
#
# /path/to/vault-keyring-client.py --vault-id my_ansible_secret --set
#
# If you choose not to configure the path to `vault_password_file` in
# ansible.cfg, your `ansible-playbook` command might look like:
#
# ansible-playbook --vault-id=keyring_id@/path/to/vault-keyring-client.py site.yml
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
import argparse
import sys
import getpass
import keyring
from ansible.config.manager import ConfigManager
KEYNAME_UNKNOWN_RC = 2
def build_arg_parser():
parser = argparse.ArgumentParser(description='Get a vault password from user keyring')
parser.add_argument('--vault-id', action='store', default=None,
dest='vault_id',
help='name of the vault secret to get from keyring')
parser.add_argument('--username', action='store', default=None,
help='the username whose keyring is queried')
parser.add_argument('--set', action='store_true', default=False,
dest='set_password',
help='set the password instead of getting it')
return parser
def main():
config_manager = ConfigManager()
username = config_manager.data.get_setting('vault.username')
if not username:
username = getpass.getuser()
keyname = config_manager.data.get_setting('vault.keyname')
if not keyname:
keyname = 'ansible'
arg_parser = build_arg_parser()
args = arg_parser.parse_args()
username = args.username or username
keyname = args.vault_id or keyname
# print('username: %s keyname: %s' % (username, keyname))
if args.set_password:
intro = 'Storing password in "{}" user keyring using key name: {}\n'
sys.stdout.write(intro.format(username, keyname))
password = getpass.getpass()
confirm = getpass.getpass('Confirm password: ')
if password == confirm:
keyring.set_password(keyname, username, password)
else:
sys.stderr.write('Passwords do not match\n')
sys.exit(1)
else:
secret = keyring.get_password(keyname, username)
if secret is None:
sys.stderr.write('vault-keyring-client could not find key="%s" for user="%s" via backend="%s"\n' %
(keyname, username, keyring.get_keyring().name))
sys.exit(KEYNAME_UNKNOWN_RC)
# print('secret: %s' % secret)
sys.stdout.write('%s\n' % secret)
sys.exit(0)
if __name__ == '__main__':
main()
| gpl-3.0 |
rousseab/pymatgen | pymatgen/io/xyz.py | 4 | 2657 | # coding: utf-8
from __future__ import division, unicode_literals
"""
Module implementing an XYZ file object class.
"""
__author__ = "Shyue Ping Ong"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "[email protected]"
__date__ = "Apr 17, 2012"
import re
from pymatgen.core.structure import Molecule
from monty.io import zopen
class XYZ(object):
"""
Basic class for importing and exporting Molecules or Structures in XYZ
format.
Args:
mol: Input molecule
.. note::
Exporting periodic structures in the XYZ format will lose information
about the periodicity. Essentially, only cartesian coordinates are
written in this format and no information is retained about the
lattice.
"""
def __init__(self, mol, coord_precision=6):
self._mol = mol
self.precision = coord_precision
@property
def molecule(self):
"""
Returns molecule associated with this XYZ.
"""
return self._mol
@staticmethod
def from_string(contents):
"""
Creates XYZ object from a string.
Args:
contents: String representing an XYZ file.
Returns:
XYZ object
"""
lines = contents.split("\n")
num_sites = int(lines[0])
coords = []
sp = []
coord_patt = re.compile(
"(\w+)\s+([0-9\-\.e]+)\s+([0-9\-\.e]+)\s+([0-9\-\.e]+)"
)
for i in range(2, 2 + num_sites):
m = coord_patt.search(lines[i])
if m:
sp.append(m.group(1)) # this is 1-indexed
# this is 0-indexed
coords.append([float(j) for j in m.groups()[1:4]])
return XYZ(Molecule(sp, coords))
@staticmethod
def from_file(filename):
"""
Creates XYZ object from a file.
Args:
filename: XYZ filename
Returns:
XYZ object
"""
with zopen(filename) as f:
return XYZ.from_string(f.read())
def __str__(self):
output = [str(len(self._mol)), self._mol.composition.formula]
fmtstr = "{{}} {{:.{0}f}} {{:.{0}f}} {{:.{0}f}}".format(self.precision)
for site in self._mol:
output.append(fmtstr.format(site.specie, site.x, site.y, site.z))
return "\n".join(output)
def write_file(self, filename):
"""
Writes XYZ to file.
Args:
filename: File name of output file.
"""
with zopen(filename, "wt") as f:
f.write(self.__str__())
| mit |
wangyum/tensorflow | tensorflow/python/training/momentum_test.py | 74 | 22862 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for Momentum."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import embedding_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
from tensorflow.python.training import momentum as momentum_lib
class MomentumOptimizerTest(test.TestCase):
def _update_nesterov_momentum_numpy(self, var, accum, g, lr, momentum):
var = var + accum * lr * momentum
accum = accum * momentum + g
var = var - lr * accum
var = var - accum * lr * momentum
return var, accum
def doTestBasic(self, use_resource=False):
for dtype in [dtypes.half, dtypes.float32, dtypes.float64]:
with self.test_session():
if use_resource:
var0 = resource_variable_ops.ResourceVariable([1.0, 2.0], dtype=dtype)
var1 = resource_variable_ops.ResourceVariable([3.0, 4.0], dtype=dtype)
else:
var0 = variables.Variable([1.0, 2.0], dtype=dtype)
var1 = variables.Variable([3.0, 4.0], dtype=dtype)
grads0 = constant_op.constant([0.1, 0.1], dtype=dtype)
grads1 = constant_op.constant([0.01, 0.01], dtype=dtype)
mom_opt = momentum_lib.MomentumOptimizer(
learning_rate=2.0, momentum=0.9)
mom_update = mom_opt.apply_gradients(
zip([grads0, grads1], [var0, var1]))
variables.global_variables_initializer().run()
# Check we have slots
self.assertEqual(["momentum"], mom_opt.get_slot_names())
slot0 = mom_opt.get_slot(var0, "momentum")
self.assertEquals(slot0.get_shape(), var0.get_shape())
self.assertFalse(slot0 in variables.trainable_variables())
slot1 = mom_opt.get_slot(var1, "momentum")
self.assertEquals(slot1.get_shape(), var1.get_shape())
self.assertFalse(slot1 in variables.trainable_variables())
# Fetch params to validate initial values
self.assertAllClose([1.0, 2.0], var0.eval())
self.assertAllClose([3.0, 4.0], var1.eval())
# Step 1: the momentum accumulators where 0. So we should see a normal
# update: v -= grad * learning_rate
mom_update.run()
# Check that the momentum accumulators have been updated.
self.assertAllCloseAccordingToType(np.array([0.1, 0.1]), slot0.eval())
self.assertAllCloseAccordingToType(np.array([0.01, 0.01]), slot1.eval())
# Check that the parameters have been updated.
self.assertAllCloseAccordingToType(
np.array([1.0 - (0.1 * 2.0), 2.0 - (0.1 * 2.0)]), var0.eval())
self.assertAllCloseAccordingToType(
np.array([3.0 - (0.01 * 2.0), 4.0 - (0.01 * 2.0)]), var1.eval())
# Step 2: the momentum accumulators contain the previous update.
mom_update.run()
# Check that the momentum accumulators have been updated.
self.assertAllCloseAccordingToType(
np.array([(0.9 * 0.1 + 0.1), (0.9 * 0.1 + 0.1)]), slot0.eval())
self.assertAllCloseAccordingToType(
np.array([(0.9 * 0.01 + 0.01), (0.9 * 0.01 + 0.01)]), slot1.eval())
# Check that the parameters have been updated.
self.assertAllCloseAccordingToType(
np.array([
1.0 - (0.1 * 2.0) - ((0.9 * 0.1 + 0.1) * 2.0),
2.0 - (0.1 * 2.0) - ((0.9 * 0.1 + 0.1) * 2.0)
]), var0.eval())
self.assertAllCloseAccordingToType(
np.array([
2.98 - ((0.9 * 0.01 + 0.01) * 2.0), 3.98 - (
(0.9 * 0.01 + 0.01) * 2.0)
]), var1.eval())
def testBasic(self):
self.doTestBasic(use_resource=False)
def testResourceBasic(self):
self.doTestBasic(use_resource=True)
def testNesterovMomentum(self):
for dtype in [dtypes.float32, dtypes.float64]:
with self.test_session():
var0 = variables.Variable([1.0, 2.0], dtype=dtype)
var1 = variables.Variable([3.0, 4.0], dtype=dtype)
var0_np = np.array([1.0, 2.0], dtype=dtype.as_numpy_dtype)
var1_np = np.array([3.0, 4.0], dtype=dtype.as_numpy_dtype)
accum0_np = np.array([0.0, 0.0], dtype=dtype.as_numpy_dtype)
accum1_np = np.array([0.0, 0.0], dtype=dtype.as_numpy_dtype)
cost = 5 * var0 * var0 + 3 * var1
global_step = variables.Variable(
array_ops.zeros([], dtypes.int64), name="global_step")
mom_op = momentum_lib.MomentumOptimizer(
learning_rate=2.0, momentum=0.9, use_nesterov=True)
opt_op = mom_op.minimize(cost, global_step, [var0, var1])
variables.global_variables_initializer().run()
for t in range(1, 5):
opt_op.run()
var0_np, accum0_np = self._update_nesterov_momentum_numpy(
var0_np, accum0_np, var0_np * 10, 2.0, 0.9)
var1_np, accum1_np = self._update_nesterov_momentum_numpy(var1_np,
accum1_np,
3, 2.0, 0.9)
self.assertAllClose(var0_np, var0.eval())
self.assertAllClose(var1_np, var1.eval())
def testSparseNesterovMomentum(self):
for dtype in [dtypes.float32, dtypes.float64]:
with self.test_session():
var0_np = np.array([1.0, 2.0], dtype=dtype.as_numpy_dtype)
var1_np = np.array([3.0, 4.0], dtype=dtype.as_numpy_dtype)
accum0_np = np.array([0.0, 0.0], dtype=dtype.as_numpy_dtype)
accum1_np = np.array([0.0, 0.0], dtype=dtype.as_numpy_dtype)
grads = []
for t in range(1, 5):
grads.append(var0_np * 10)
var0_np, accum0_np = self._update_nesterov_momentum_numpy(
var0_np, accum0_np, var0_np * 10, 2.0, 0.9)
var1_np, accum1_np = self._update_nesterov_momentum_numpy(var1_np,
accum1_np,
3, 2.0, 0.9)
var0_np = np.array([1.0, 2.0], dtype=dtype.as_numpy_dtype)
var1_np = np.array([3.0, 4.0], dtype=dtype.as_numpy_dtype)
accum0_np = np.array([0.0, 0.0], dtype=dtype.as_numpy_dtype)
accum1_np = np.array([0.0, 0.0], dtype=dtype.as_numpy_dtype)
var0 = variables.Variable(var0_np)
var1 = variables.Variable(var1_np)
loss = 5 * var0 * var0 + 3 * var1
mom_op = momentum_lib.MomentumOptimizer(
learning_rate=2.0, momentum=0.9, use_nesterov=True)
x_feed = array_ops.placeholder(dtype)
y_feed = ops.IndexedSlices(
x_feed, constant_op.constant([0, 1]), constant_op.constant([2]))
grads_and_vars = [(y_feed, var0), (constant_op.constant(
[3.0, 3.0], dtype=dtype), var1)]
opt_update = mom_op.apply_gradients(grads_and_vars)
variables.global_variables_initializer().run()
for t in range(1, 5):
opt_update.run(feed_dict={x_feed: grads[t - 1]})
var0_np, accum0_np = self._update_nesterov_momentum_numpy(
var0_np, accum0_np, var0_np * 10, 2.0, 0.9)
var1_np, accum1_np = self._update_nesterov_momentum_numpy(var1_np,
accum1_np,
3, 2.0, 0.9)
self.assertAllClose(var0_np, var0.eval())
self.assertAllClose(var1_np, var1.eval())
def testMinimizeSparseResourceVariable(self):
for dtype in [dtypes.half, dtypes.float32, dtypes.float64]:
with self.test_session():
var0 = resource_variable_ops.ResourceVariable([[1.0, 2.0]], dtype=dtype)
x = constant_op.constant([[4.0], [5.0]], dtype=dtype)
pred = math_ops.matmul(embedding_ops.embedding_lookup([var0], [0]), x)
loss = pred * pred
sgd_op = momentum_lib.MomentumOptimizer(
learning_rate=1.0, momentum=0.0).minimize(loss)
variables.global_variables_initializer().run()
# Fetch params to validate initial values
self.assertAllCloseAccordingToType([[1.0, 2.0]], var0.eval())
# Run 1 step of sgd
sgd_op.run()
# Validate updated params
self.assertAllCloseAccordingToType(
[[-111, -138]], var0.eval())
def testTensorLearningRateAndMomentum(self):
for dtype in [dtypes.half, dtypes.float32, dtypes.float64]:
with self.test_session():
var0 = variables.Variable([1.0, 2.0], dtype=dtype)
var1 = variables.Variable([3.0, 4.0], dtype=dtype)
grads0 = constant_op.constant([0.1, 0.1], dtype=dtype)
grads1 = constant_op.constant([0.01, 0.01], dtype=dtype)
mom_opt = momentum_lib.MomentumOptimizer(
learning_rate=constant_op.constant(2.0),
momentum=constant_op.constant(0.9))
mom_update = mom_opt.apply_gradients(
zip([grads0, grads1], [var0, var1]))
variables.global_variables_initializer().run()
# Check we have slots
self.assertEqual(["momentum"], mom_opt.get_slot_names())
slot0 = mom_opt.get_slot(var0, "momentum")
self.assertEquals(slot0.get_shape(), var0.get_shape())
self.assertFalse(slot0 in variables.trainable_variables())
slot1 = mom_opt.get_slot(var1, "momentum")
self.assertEquals(slot1.get_shape(), var1.get_shape())
self.assertFalse(slot1 in variables.trainable_variables())
# Fetch params to validate initial values
self.assertAllClose([1.0, 2.0], var0.eval())
self.assertAllClose([3.0, 4.0], var1.eval())
# Step 1: the momentum accumulators where 0. So we should see a normal
# update: v -= grad * learning_rate
mom_update.run()
# Check that the momentum accumulators have been updated.
self.assertAllCloseAccordingToType(np.array([0.1, 0.1]), slot0.eval())
self.assertAllCloseAccordingToType(np.array([0.01, 0.01]), slot1.eval())
# Check that the parameters have been updated.
self.assertAllCloseAccordingToType(
np.array([1.0 - (0.1 * 2.0), 2.0 - (0.1 * 2.0)]), var0.eval())
self.assertAllCloseAccordingToType(
np.array([3.0 - (0.01 * 2.0), 4.0 - (0.01 * 2.0)]), var1.eval())
# Step 2: the momentum accumulators contain the previous update.
mom_update.run()
# Check that the momentum accumulators have been updated.
self.assertAllCloseAccordingToType(
np.array([(0.9 * 0.1 + 0.1), (0.9 * 0.1 + 0.1)]), slot0.eval())
self.assertAllCloseAccordingToType(
np.array([(0.9 * 0.01 + 0.01), (0.9 * 0.01 + 0.01)]), slot1.eval())
# Check that the parameters have been updated.
self.assertAllCloseAccordingToType(
np.array([
1.0 - (0.1 * 2.0) - ((0.9 * 0.1 + 0.1) * 2.0),
2.0 - (0.1 * 2.0) - ((0.9 * 0.1 + 0.1) * 2.0)
]), var0.eval())
self.assertAllCloseAccordingToType(
np.array([
2.98 - ((0.9 * 0.01 + 0.01) * 2.0), 3.98 - (
(0.9 * 0.01 + 0.01) * 2.0)
]), var1.eval())
def _dbParamsMom01(self):
"""Return dist-belief momentum values.
Return values been generated from the dist-belief momentum unittest,
running with a learning rate of 0.1 and a momentum of 0.1.
These values record how a parameter vector of size 10, initialized with 0.0,
gets updated with 10 consecutive momentum steps. It uses random gradients.
Returns:
db_grad: The gradients to apply
db_out: The parameters after the momentum update.
"""
db_grad = [[]] * 10
db_out = [[]] * 10
# pylint: disable=line-too-long
db_grad[0] = [
0.00096264342, 0.17914793, 0.93945462, 0.41396621, 0.53037018,
0.93197989, 0.78648776, 0.50036013, 0.55345792, 0.96722615
]
db_out[0] = [
-9.6264346e-05, -0.017914793, -0.093945466, -0.041396622, -0.053037018,
-0.093197994, -0.078648776, -0.050036013, -0.055345792, -0.096722618
]
db_grad[1] = [
0.17075552, 0.88821375, 0.20873757, 0.25236958, 0.57578111, 0.15312378,
0.5513742, 0.94687688, 0.16012503, 0.22159521
]
db_out[1] = [
-0.017181443, -0.10852765, -0.12421377, -0.070773244, -0.11591884,
-0.11783017, -0.14165108, -0.14972731, -0.076892875, -0.1285544
]
db_grad[2] = [
0.35077485, 0.47304362, 0.44412705, 0.44368884, 0.078527533, 0.81223965,
0.31168157, 0.43203235, 0.16792089, 0.24644311
]
db_out[2] = [
-0.053967446, -0.1648933, -0.1716533, -0.1180798, -0.13005978,
-0.20151734, -0.17911947, -0.20289968, -0.095839672, -0.15638189
]
db_grad[3] = [
0.9694621, 0.75035888, 0.28171822, 0.83813518, 0.53807181, 0.3728098,
0.81454384, 0.03848977, 0.89759839, 0.93665648
]
db_out[3] = [
-0.15459226, -0.24556576, -0.20456907, -0.20662397, -0.18528105,
-0.24716705, -0.2643207, -0.21206589, -0.18749419, -0.2528303
]
db_grad[4] = [
0.38578293, 0.8536852, 0.88722926, 0.66276771, 0.13678469, 0.94036359,
0.69107032, 0.81897682, 0.5433259, 0.67860287
]
db_out[4] = [
-0.20323303, -0.33900154, -0.29658359, -0.28175515, -0.20448165,
-0.34576839, -0.34194785, -0.29488021, -0.25099224, -0.33033544
]
db_grad[5] = [
0.27885768, 0.76100707, 0.24625534, 0.81354135, 0.18959245, 0.48038563,
0.84163809, 0.41172323, 0.83259648, 0.44941229
]
db_out[5] = [
-0.23598288, -0.42444581, -0.33041057, -0.3706224, -0.22536094,
-0.40366709, -0.43387437, -0.34433398, -0.34060168, -0.38302717
]
db_grad[6] = [
0.27233034, 0.056316052, 0.5039115, 0.24105175, 0.35697976, 0.75913221,
0.73577434, 0.16014607, 0.57500273, 0.071136251
]
db_out[6] = [
-0.26649091, -0.43862185, -0.38418442, -0.40361428, -0.26314685,
-0.48537019, -0.51664448, -0.36529395, -0.40706289, -0.39540997
]
db_grad[7] = [
0.58697265, 0.2494842, 0.08106143, 0.39954534, 0.15892942, 0.12683646,
0.74053431, 0.16033, 0.66625422, 0.73515922
]
db_out[7] = [
-0.32823896, -0.46498787, -0.39766794, -0.446868, -0.28281838,
-0.50622416, -0.59897494, -0.38342294, -0.48033443, -0.47016418
]
db_grad[8] = [
0.8215279, 0.41994119, 0.95172721, 0.68000203, 0.79439718, 0.43384039,
0.55561525, 0.22567581, 0.93331909, 0.29438227
]
db_out[8] = [
-0.41656655, -0.50961858, -0.49418902, -0.51919359, -0.36422527,
-0.55169362, -0.6627695, -0.40780342, -0.58099347, -0.50707781
]
db_grad[9] = [
0.68297005, 0.67758518, 0.1748755, 0.13266537, 0.70697063, 0.055731893,
0.68593478, 0.50580865, 0.12602448, 0.093537711
]
db_out[9] = [
-0.49369633, -0.58184016, -0.52132869, -0.5396927, -0.44306302,
-0.56181377, -0.73774242, -0.46082234, -0.60366184, -0.52012295
]
# pylint: enable=line-too-long
return db_grad, db_out
def testLikeDistBeliefMom01(self):
with self.test_session():
db_grad, db_out = self._dbParamsMom01()
num_samples = len(db_grad)
var0 = variables.Variable([0.0] * num_samples)
grads0 = constant_op.constant([0.0] * num_samples)
mom_opt = momentum_lib.MomentumOptimizer(learning_rate=0.1, momentum=0.1)
mom_update = mom_opt.apply_gradients(zip([grads0], [var0]))
variables.global_variables_initializer().run()
for i in xrange(num_samples):
mom_update.run(feed_dict={grads0: db_grad[i]})
self.assertAllClose(np.array(db_out[i]), var0.eval())
def testSparse(self):
for dtype in [dtypes.half, dtypes.float32, dtypes.float64]:
with self.test_session():
var0 = variables.Variable(array_ops.zeros([4, 2], dtype=dtype))
var1 = variables.Variable(constant_op.constant(1.0, dtype, [4, 2]))
grads0 = ops.IndexedSlices(
constant_op.constant(
[[.1, .1]], dtype=dtype),
constant_op.constant([1]),
constant_op.constant([4, 2]))
grads1 = ops.IndexedSlices(
constant_op.constant(
[[.01, .01], [.01, .01]], dtype=dtype),
constant_op.constant([2, 3]),
constant_op.constant([4, 2]))
mom_opt = momentum_lib.MomentumOptimizer(
learning_rate=2.0, momentum=0.9)
mom_update = mom_opt.apply_gradients(
zip([grads0, grads1], [var0, var1]))
variables.global_variables_initializer().run()
# Check we have slots
self.assertEqual(["momentum"], mom_opt.get_slot_names())
slot0 = mom_opt.get_slot(var0, "momentum")
self.assertEquals(slot0.get_shape(), var0.get_shape())
slot1 = mom_opt.get_slot(var1, "momentum")
self.assertEquals(slot1.get_shape(), var1.get_shape())
# Fetch params to validate initial values
self.assertAllClose([0, 0], var0.eval()[0])
self.assertAllClose([0, 0], var0.eval()[1])
self.assertAllClose([1, 1], var1.eval()[2])
# Step 1: the momentum accumulators are 0. So we should see a normal
# update: v -= grad * learning_rate
mom_update.run()
# Check that the momentum accumulators have been updated.
self.assertAllCloseAccordingToType(np.array([0, 0]), slot0.eval()[0])
self.assertAllCloseAccordingToType(np.array([.1, .1]), slot0.eval()[1])
self.assertAllCloseAccordingToType(
np.array([.01, .01]), slot1.eval()[2])
# Check that the parameters have been updated.
self.assertAllCloseAccordingToType(np.array([0, 0]), var0.eval()[0])
self.assertAllCloseAccordingToType(
np.array([-(0.1 * 2.0), -(0.1 * 2.0)]), var0.eval()[1])
self.assertAllCloseAccordingToType(
np.array([1.0 - (0.01 * 2.0), 1.0 - (0.01 * 2.0)]), var1.eval()[2])
# Step 2: the momentum accumulators contain the previous update.
mom_update.run()
# Check that the momentum accumulators have been updated.
self.assertAllClose(np.array([0, 0]), slot0.eval()[0])
self.assertAllCloseAccordingToType(
np.array([(0.9 * 0.1 + 0.1), (0.9 * 0.1 + 0.1)]), slot0.eval()[1])
self.assertAllCloseAccordingToType(
np.array([(0.9 * 0.01 + 0.01), (0.9 * 0.01 + 0.01)]),
slot1.eval()[2])
# Check that the parameters have been updated.
self.assertAllClose(np.array([0, 0]), var0.eval()[0])
self.assertAllCloseAccordingToType(
np.array([
-(0.1 * 2.0) - ((0.9 * 0.1 + 0.1) * 2.0), -(0.1 * 2.0) - (
(0.9 * 0.1 + 0.1) * 2.0)
]), var0.eval()[1])
self.assertAllCloseAccordingToType(
np.array([
0.98 - ((0.9 * 0.01 + 0.01) * 2.0), 0.98 - (
(0.9 * 0.01 + 0.01) * 2.0)
]), var1.eval()[2])
def testSharing(self):
for dtype in [dtypes.half, dtypes.float32, dtypes.float64]:
with self.test_session():
var0 = variables.Variable([1.0, 2.0], dtype=dtype)
var1 = variables.Variable([3.0, 4.0], dtype=dtype)
grads0 = constant_op.constant([0.1, 0.1], dtype=dtype)
grads1 = constant_op.constant([0.01, 0.01], dtype=dtype)
mom_opt = momentum_lib.MomentumOptimizer(
learning_rate=2.0, momentum=0.9)
mom_update1 = mom_opt.apply_gradients(
zip([grads0, grads1], [var0, var1]))
mom_update2 = mom_opt.apply_gradients(
zip([grads0, grads1], [var0, var1]))
variables.global_variables_initializer().run()
self.assertEqual(["momentum"], mom_opt.get_slot_names())
slot0 = mom_opt.get_slot(var0, "momentum")
self.assertEquals(slot0.get_shape(), var0.get_shape())
slot1 = mom_opt.get_slot(var1, "momentum")
self.assertEquals(slot1.get_shape(), var1.get_shape())
# Fetch params to validate initial values
self.assertAllClose([1.0, 2.0], var0.eval())
self.assertAllClose([3.0, 4.0], var1.eval())
# Step 1: the momentum accumulators where 0. So we should see a normal
# update: v -= grad * learning_rate
mom_update1.run()
# Check that the momentum accumulators have been updated.
self.assertAllCloseAccordingToType(np.array([0.1, 0.1]), slot0.eval())
self.assertAllCloseAccordingToType(np.array([0.01, 0.01]), slot1.eval())
# Check that the parameters have been updated.
self.assertAllCloseAccordingToType(
np.array([1.0 - (0.1 * 2.0), 2.0 - (0.1 * 2.0)]), var0.eval())
self.assertAllCloseAccordingToType(
np.array([3.0 - (0.01 * 2.0), 4.0 - (0.01 * 2.0)]), var1.eval())
# Step 2: the second momentum accumulators contain the previous update.
mom_update2.run()
# Check that the momentum accumulators have been updated.
self.assertAllCloseAccordingToType(
np.array([(0.9 * 0.1 + 0.1), (0.9 * 0.1 + 0.1)]), slot0.eval())
self.assertAllCloseAccordingToType(
np.array([(0.9 * 0.01 + 0.01), (0.9 * 0.01 + 0.01)]), slot1.eval())
# Check that the parameters have been updated.
self.assertAllCloseAccordingToType(
np.array([
1.0 - (0.1 * 2.0) - ((0.9 * 0.1 + 0.1) * 2.0),
2.0 - (0.1 * 2.0) - ((0.9 * 0.1 + 0.1) * 2.0)
]), var0.eval())
self.assertAllCloseAccordingToType(
np.array([
2.98 - ((0.9 * 0.01 + 0.01) * 2.0), 3.98 - (
(0.9 * 0.01 + 0.01) * 2.0)
]), var1.eval())
if __name__ == "__main__":
test.main()
| apache-2.0 |
51itclub/pelican | pelican/tests/test_importer.py | 24 | 15914 | # -*- coding: utf-8 -*-
from __future__ import print_function, unicode_literals
import locale
import os
import re
from codecs import open
from pelican.tests.support import (mute, skipIfNoExecutable, temporary_folder,
unittest)
from pelican.tools.pelican_import import (build_header, build_markdown_header,
decode_wp_content,
download_attachments, fields2pelican,
get_attachments, wp2fields)
from pelican.utils import path_to_file_url, slugify
CUR_DIR = os.path.abspath(os.path.dirname(__file__))
WORDPRESS_XML_SAMPLE = os.path.join(CUR_DIR, 'content', 'wordpressexport.xml')
WORDPRESS_ENCODED_CONTENT_SAMPLE = os.path.join(CUR_DIR,
'content',
'wordpress_content_encoded')
WORDPRESS_DECODED_CONTENT_SAMPLE = os.path.join(CUR_DIR,
'content',
'wordpress_content_decoded')
try:
from bs4 import BeautifulSoup
except ImportError:
BeautifulSoup = False # NOQA
try:
import bs4.builder._lxml as LXML
except ImportError:
LXML = False
@skipIfNoExecutable(['pandoc', '--version'])
@unittest.skipUnless(BeautifulSoup, 'Needs BeautifulSoup module')
class TestWordpressXmlImporter(unittest.TestCase):
def setUp(self):
self.old_locale = locale.setlocale(locale.LC_ALL)
locale.setlocale(locale.LC_ALL, str('C'))
self.posts = list(wp2fields(WORDPRESS_XML_SAMPLE))
self.custposts = list(wp2fields(WORDPRESS_XML_SAMPLE, True))
def tearDown(self):
locale.setlocale(locale.LC_ALL, self.old_locale)
def test_ignore_empty_posts(self):
self.assertTrue(self.posts)
for (title, content, fname, date, author,
categ, tags, status, kind, format) in self.posts:
self.assertTrue(title.strip())
def test_recognise_page_kind(self):
""" Check that we recognise pages in wordpress, as opposed to posts """
self.assertTrue(self.posts)
# Collect (title, filename, kind) of non-empty posts recognised as page
pages_data = []
for (title, content, fname, date, author,
categ, tags, status, kind, format) in self.posts:
if kind == 'page':
pages_data.append((title, fname))
self.assertEqual(2, len(pages_data))
self.assertEqual(('Page', 'contact'), pages_data[0])
self.assertEqual(('Empty Page', 'empty'), pages_data[1])
def test_dirpage_directive_for_page_kind(self):
silent_f2p = mute(True)(fields2pelican)
test_post = filter(lambda p: p[0].startswith("Empty Page"), self.posts)
with temporary_folder() as temp:
fname = list(silent_f2p(test_post, 'markdown',
temp, dirpage=True))[0]
self.assertTrue(fname.endswith('pages%sempty.md' % os.path.sep))
def test_dircat(self):
silent_f2p = mute(True)(fields2pelican)
test_posts = []
for post in self.posts:
# check post kind
if len(post[5]) > 0: # Has a category
test_posts.append(post)
with temporary_folder() as temp:
fnames = list(silent_f2p(test_posts, 'markdown',
temp, dircat=True))
index = 0
for post in test_posts:
name = post[2]
category = slugify(post[5][0])
name += '.md'
filename = os.path.join(category, name)
out_name = fnames[index]
self.assertTrue(out_name.endswith(filename))
index += 1
def test_unless_custom_post_all_items_should_be_pages_or_posts(self):
self.assertTrue(self.posts)
pages_data = []
for (title, content, fname, date, author, categ,
tags, status, kind, format) in self.posts:
if kind == 'page' or kind == 'article':
pass
else:
pages_data.append((title, fname))
self.assertEqual(0, len(pages_data))
def test_recognise_custom_post_type(self):
self.assertTrue(self.custposts)
cust_data = []
for (title, content, fname, date, author, categ,
tags, status, kind, format) in self.custposts:
if kind == 'article' or kind == 'page':
pass
else:
cust_data.append((title, kind))
self.assertEqual(3, len(cust_data))
self.assertEqual(
('A custom post in category 4', 'custom1'),
cust_data[0])
self.assertEqual(
('A custom post in category 5', 'custom1'),
cust_data[1])
self.assertEqual(
('A 2nd custom post type also in category 5', 'custom2'),
cust_data[2])
def test_custom_posts_put_in_own_dir(self):
silent_f2p = mute(True)(fields2pelican)
test_posts = []
for post in self.custposts:
# check post kind
if post[8] == 'article' or post[8] == 'page':
pass
else:
test_posts.append(post)
with temporary_folder() as temp:
fnames = list(silent_f2p(test_posts, 'markdown',
temp, wp_custpost=True))
index = 0
for post in test_posts:
name = post[2]
kind = post[8]
name += '.md'
filename = os.path.join(kind, name)
out_name = fnames[index]
self.assertTrue(out_name.endswith(filename))
index += 1
def test_custom_posts_put_in_own_dir_and_catagory_sub_dir(self):
silent_f2p = mute(True)(fields2pelican)
test_posts = []
for post in self.custposts:
# check post kind
if post[8] == 'article' or post[8] == 'page':
pass
else:
test_posts.append(post)
with temporary_folder() as temp:
fnames = list(silent_f2p(test_posts, 'markdown', temp,
wp_custpost=True, dircat=True))
index = 0
for post in test_posts:
name = post[2]
kind = post[8]
category = slugify(post[5][0])
name += '.md'
filename = os.path.join(kind, category, name)
out_name = fnames[index]
self.assertTrue(out_name.endswith(filename))
index += 1
def test_wp_custpost_true_dirpage_false(self):
# pages should only be put in their own directory when dirpage = True
silent_f2p = mute(True)(fields2pelican)
test_posts = []
for post in self.custposts:
# check post kind
if post[8] == 'page':
test_posts.append(post)
with temporary_folder() as temp:
fnames = list(silent_f2p(test_posts, 'markdown', temp,
wp_custpost=True, dirpage=False))
index = 0
for post in test_posts:
name = post[2]
name += '.md'
filename = os.path.join('pages', name)
out_name = fnames[index]
self.assertFalse(out_name.endswith(filename))
def test_can_toggle_raw_html_code_parsing(self):
def r(f):
with open(f, encoding='utf-8') as infile:
return infile.read()
silent_f2p = mute(True)(fields2pelican)
with temporary_folder() as temp:
rst_files = (r(f) for f
in silent_f2p(self.posts, 'markdown', temp))
self.assertTrue(any('<iframe' in rst for rst in rst_files))
rst_files = (r(f) for f
in silent_f2p(self.posts, 'markdown',
temp, strip_raw=True))
self.assertFalse(any('<iframe' in rst for rst in rst_files))
# no effect in rst
rst_files = (r(f) for f in silent_f2p(self.posts, 'rst', temp))
self.assertFalse(any('<iframe' in rst for rst in rst_files))
rst_files = (r(f) for f in silent_f2p(self.posts, 'rst', temp,
strip_raw=True))
self.assertFalse(any('<iframe' in rst for rst in rst_files))
def test_decode_html_entities_in_titles(self):
test_posts = [post for post
in self.posts if post[2] == 'html-entity-test']
self.assertEqual(len(test_posts), 1)
post = test_posts[0]
title = post[0]
self.assertTrue(title, "A normal post with some <html> entities in "
"the title. You can't miss them.")
self.assertNotIn('&', title)
def test_decode_wp_content_returns_empty(self):
""" Check that given an empty string we return an empty string."""
self.assertEqual(decode_wp_content(""), "")
def test_decode_wp_content(self):
""" Check that we can decode a wordpress content string."""
with open(WORDPRESS_ENCODED_CONTENT_SAMPLE, 'r') as encoded_file:
encoded_content = encoded_file.read()
with open(WORDPRESS_DECODED_CONTENT_SAMPLE, 'r') as decoded_file:
decoded_content = decoded_file.read()
self.assertEqual(
decode_wp_content(encoded_content, br=False),
decoded_content)
def test_preserve_verbatim_formatting(self):
def r(f):
with open(f, encoding='utf-8') as infile:
return infile.read()
silent_f2p = mute(True)(fields2pelican)
test_post = filter(
lambda p: p[0].startswith("Code in List"),
self.posts)
with temporary_folder() as temp:
md = [r(f) for f in silent_f2p(test_post, 'markdown', temp)][0]
self.assertTrue(re.search(r'\s+a = \[1, 2, 3\]', md))
self.assertTrue(re.search(r'\s+b = \[4, 5, 6\]', md))
for_line = re.search(r'\s+for i in zip\(a, b\):', md).group(0)
print_line = re.search(r'\s+print i', md).group(0)
self.assertTrue(
for_line.rindex('for') < print_line.rindex('print'))
def test_code_in_list(self):
def r(f):
with open(f, encoding='utf-8') as infile:
return infile.read()
silent_f2p = mute(True)(fields2pelican)
test_post = filter(
lambda p: p[0].startswith("Code in List"),
self.posts)
with temporary_folder() as temp:
md = [r(f) for f in silent_f2p(test_post, 'markdown', temp)][0]
sample_line = re.search(r'- This is a code sample', md).group(0)
code_line = re.search(r'\s+a = \[1, 2, 3\]', md).group(0)
self.assertTrue(sample_line.rindex('This') < code_line.rindex('a'))
class TestBuildHeader(unittest.TestCase):
def test_build_header(self):
header = build_header('test', None, None, None, None, None)
self.assertEqual(header, 'test\n####\n\n')
def test_build_header_with_fields(self):
header_data = [
'Test Post',
'2014-11-04',
'Alexis Métaireau',
['Programming'],
['Pelican', 'Python'],
'test-post',
]
expected_docutils = '\n'.join([
'Test Post',
'#########',
':date: 2014-11-04',
':author: Alexis Métaireau',
':category: Programming',
':tags: Pelican, Python',
':slug: test-post',
'\n',
])
expected_md = '\n'.join([
'Title: Test Post',
'Date: 2014-11-04',
'Author: Alexis Métaireau',
'Category: Programming',
'Tags: Pelican, Python',
'Slug: test-post',
'\n',
])
self.assertEqual(build_header(*header_data), expected_docutils)
self.assertEqual(build_markdown_header(*header_data), expected_md)
def test_build_header_with_east_asian_characters(self):
header = build_header('これは広い幅の文字だけで構成されたタイトルです',
None, None, None, None, None)
self.assertEqual(header,
('これは広い幅の文字だけで構成されたタイトルです\n'
'##############################################'
'\n\n'))
def test_galleries_added_to_header(self):
header = build_header('test', None, None, None, None, None,
attachments=['output/test1', 'output/test2'])
self.assertEqual(header, ('test\n####\n'
':attachments: output/test1, '
'output/test2\n\n'))
def test_galleries_added_to_markdown_header(self):
header = build_markdown_header('test', None, None, None, None, None,
attachments=['output/test1',
'output/test2'])
self.assertEqual(
header,
'Title: test\nAttachments: output/test1, output/test2\n\n')
@unittest.skipUnless(BeautifulSoup, 'Needs BeautifulSoup module')
@unittest.skipUnless(LXML, 'Needs lxml module')
class TestWordpressXMLAttachements(unittest.TestCase):
def setUp(self):
self.old_locale = locale.setlocale(locale.LC_ALL)
locale.setlocale(locale.LC_ALL, str('C'))
self.attachments = get_attachments(WORDPRESS_XML_SAMPLE)
def tearDown(self):
locale.setlocale(locale.LC_ALL, self.old_locale)
def test_recognise_attachments(self):
self.assertTrue(self.attachments)
self.assertTrue(len(self.attachments.keys()) == 3)
def test_attachments_associated_with_correct_post(self):
self.assertTrue(self.attachments)
for post in self.attachments.keys():
if post is None:
expected = ('https://upload.wikimedia.org/wikipedia/commons/'
'thumb/2/2c/Pelican_lakes_entrance02.jpg/'
'240px-Pelican_lakes_entrance02.jpg')
self.assertEqual(self.attachments[post][0], expected)
elif post == 'with-excerpt':
expected_invalid = ('http://thisurlisinvalid.notarealdomain/'
'not_an_image.jpg')
expected_pelikan = ('http://en.wikipedia.org/wiki/'
'File:Pelikan_Walvis_Bay.jpg')
self.assertEqual(self.attachments[post][0], expected_invalid)
self.assertEqual(self.attachments[post][1], expected_pelikan)
elif post == 'with-tags':
expected_invalid = ('http://thisurlisinvalid.notarealdomain')
self.assertEqual(self.attachments[post][0], expected_invalid)
else:
self.fail('all attachments should match to a '
'filename or None, {}'
.format(post))
def test_download_attachments(self):
real_file = os.path.join(CUR_DIR, 'content/article.rst')
good_url = path_to_file_url(real_file)
bad_url = 'http://localhost:1/not_a_file.txt'
silent_da = mute()(download_attachments)
with temporary_folder() as temp:
locations = list(silent_da(temp, [good_url, bad_url]))
self.assertEqual(1, len(locations))
directory = locations[0]
self.assertTrue(
directory.endswith(os.path.join('content', 'article.rst')),
directory)
| agpl-3.0 |
ronniebhatti/googletest | test/gtest_xml_test_utils.py | 1815 | 8876 | #!/usr/bin/env python
#
# Copyright 2006, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Unit test utilities for gtest_xml_output"""
__author__ = '[email protected] (Sean Mcafee)'
import re
from xml.dom import minidom, Node
import gtest_test_utils
GTEST_OUTPUT_FLAG = '--gtest_output'
GTEST_DEFAULT_OUTPUT_FILE = 'test_detail.xml'
class GTestXMLTestCase(gtest_test_utils.TestCase):
"""
Base class for tests of Google Test's XML output functionality.
"""
def AssertEquivalentNodes(self, expected_node, actual_node):
"""
Asserts that actual_node (a DOM node object) is equivalent to
expected_node (another DOM node object), in that either both of
them are CDATA nodes and have the same value, or both are DOM
elements and actual_node meets all of the following conditions:
* It has the same tag name as expected_node.
* It has the same set of attributes as expected_node, each with
the same value as the corresponding attribute of expected_node.
Exceptions are any attribute named "time", which needs only be
convertible to a floating-point number and any attribute named
"type_param" which only has to be non-empty.
* It has an equivalent set of child nodes (including elements and
CDATA sections) as expected_node. Note that we ignore the
order of the children as they are not guaranteed to be in any
particular order.
"""
if expected_node.nodeType == Node.CDATA_SECTION_NODE:
self.assertEquals(Node.CDATA_SECTION_NODE, actual_node.nodeType)
self.assertEquals(expected_node.nodeValue, actual_node.nodeValue)
return
self.assertEquals(Node.ELEMENT_NODE, actual_node.nodeType)
self.assertEquals(Node.ELEMENT_NODE, expected_node.nodeType)
self.assertEquals(expected_node.tagName, actual_node.tagName)
expected_attributes = expected_node.attributes
actual_attributes = actual_node .attributes
self.assertEquals(
expected_attributes.length, actual_attributes.length,
'attribute numbers differ in element %s:\nExpected: %r\nActual: %r' % (
actual_node.tagName, expected_attributes.keys(),
actual_attributes.keys()))
for i in range(expected_attributes.length):
expected_attr = expected_attributes.item(i)
actual_attr = actual_attributes.get(expected_attr.name)
self.assert_(
actual_attr is not None,
'expected attribute %s not found in element %s' %
(expected_attr.name, actual_node.tagName))
self.assertEquals(
expected_attr.value, actual_attr.value,
' values of attribute %s in element %s differ: %s vs %s' %
(expected_attr.name, actual_node.tagName,
expected_attr.value, actual_attr.value))
expected_children = self._GetChildren(expected_node)
actual_children = self._GetChildren(actual_node)
self.assertEquals(
len(expected_children), len(actual_children),
'number of child elements differ in element ' + actual_node.tagName)
for child_id, child in expected_children.iteritems():
self.assert_(child_id in actual_children,
'<%s> is not in <%s> (in element %s)' %
(child_id, actual_children, actual_node.tagName))
self.AssertEquivalentNodes(child, actual_children[child_id])
identifying_attribute = {
'testsuites': 'name',
'testsuite': 'name',
'testcase': 'name',
'failure': 'message',
}
def _GetChildren(self, element):
"""
Fetches all of the child nodes of element, a DOM Element object.
Returns them as the values of a dictionary keyed by the IDs of the
children. For <testsuites>, <testsuite> and <testcase> elements, the ID
is the value of their "name" attribute; for <failure> elements, it is
the value of the "message" attribute; CDATA sections and non-whitespace
text nodes are concatenated into a single CDATA section with ID
"detail". An exception is raised if any element other than the above
four is encountered, if two child elements with the same identifying
attributes are encountered, or if any other type of node is encountered.
"""
children = {}
for child in element.childNodes:
if child.nodeType == Node.ELEMENT_NODE:
self.assert_(child.tagName in self.identifying_attribute,
'Encountered unknown element <%s>' % child.tagName)
childID = child.getAttribute(self.identifying_attribute[child.tagName])
self.assert_(childID not in children)
children[childID] = child
elif child.nodeType in [Node.TEXT_NODE, Node.CDATA_SECTION_NODE]:
if 'detail' not in children:
if (child.nodeType == Node.CDATA_SECTION_NODE or
not child.nodeValue.isspace()):
children['detail'] = child.ownerDocument.createCDATASection(
child.nodeValue)
else:
children['detail'].nodeValue += child.nodeValue
else:
self.fail('Encountered unexpected node type %d' % child.nodeType)
return children
def NormalizeXml(self, element):
"""
Normalizes Google Test's XML output to eliminate references to transient
information that may change from run to run.
* The "time" attribute of <testsuites>, <testsuite> and <testcase>
elements is replaced with a single asterisk, if it contains
only digit characters.
* The "timestamp" attribute of <testsuites> elements is replaced with a
single asterisk, if it contains a valid ISO8601 datetime value.
* The "type_param" attribute of <testcase> elements is replaced with a
single asterisk (if it sn non-empty) as it is the type name returned
by the compiler and is platform dependent.
* The line info reported in the first line of the "message"
attribute and CDATA section of <failure> elements is replaced with the
file's basename and a single asterisk for the line number.
* The directory names in file paths are removed.
* The stack traces are removed.
"""
if element.tagName == 'testsuites':
timestamp = element.getAttributeNode('timestamp')
timestamp.value = re.sub(r'^\d{4}-\d\d-\d\dT\d\d:\d\d:\d\d$',
'*', timestamp.value)
if element.tagName in ('testsuites', 'testsuite', 'testcase'):
time = element.getAttributeNode('time')
time.value = re.sub(r'^\d+(\.\d+)?$', '*', time.value)
type_param = element.getAttributeNode('type_param')
if type_param and type_param.value:
type_param.value = '*'
elif element.tagName == 'failure':
source_line_pat = r'^.*[/\\](.*:)\d+\n'
# Replaces the source line information with a normalized form.
message = element.getAttributeNode('message')
message.value = re.sub(source_line_pat, '\\1*\n', message.value)
for child in element.childNodes:
if child.nodeType == Node.CDATA_SECTION_NODE:
# Replaces the source line information with a normalized form.
cdata = re.sub(source_line_pat, '\\1*\n', child.nodeValue)
# Removes the actual stack trace.
child.nodeValue = re.sub(r'\nStack trace:\n(.|\n)*',
'', cdata)
for child in element.childNodes:
if child.nodeType == Node.ELEMENT_NODE:
self.NormalizeXml(child)
| bsd-3-clause |
solos/pylons | tests/test_webapps/filestotest/functional_controller_cache_decorator.py | 4 | 1874 | import time
from projectname.tests import *
class TestCacheController(TestController):
def test_default_cache_decorator(self):
response = self.app.get(url(controller='cache', action='test_default_cache_decorator'))
assert 'Counter=1' in response
response = self.app.get(url(controller='cache', action='test_default_cache_decorator'))
assert 'Counter=1' in response
response = self.app.get(url(controller='cache', action='test_get_cache_decorator', param="123"))
assert 'Counter=2' in response
response = self.app.get(url(controller='cache', action='test_get_cache_decorator', param="123"))
assert 'Counter=2' in response
response = self.app.get(url(controller='cache', action='test_expire_cache_decorator'))
assert 'Counter=3' in response
response = self.app.get(url(controller='cache', action='test_expire_cache_decorator'))
assert 'Counter=3' in response
time.sleep(8)
response = self.app.get(url(controller='cache', action='test_expire_cache_decorator'))
assert 'Counter=4' in response
response = self.app.get(url(controller='cache', action='test_key_cache_decorator', id=1))
assert 'Counter=5' in response
response = self.app.get(url(controller='cache', action='test_key_cache_decorator', id=2))
assert 'Counter=6' in response
response = self.app.get(url(controller='cache', action='test_key_cache_decorator', id=1))
assert 'Counter=5' in response
response = self.app.get(url(controller='cache', action='test_keyslist_cache_decorator', id=1, id2=2))
assert 'Counter=7' in response
response = self.app.get(url(controller='cache', action='test_keyslist_cache_decorator', id=1, id2=2))
assert 'Counter=7' in response
| bsd-3-clause |
tanyaweaver/data-structures | src/binary_heap.py | 1 | 3042 | #!/usr/bin/env python
# -*- coding: utf -8 -*-
from __future__ import unicode_literals, division
class BinaryHeap(object):
"""Binary Heap data structure class object"""
def __init__(self, iterable=None):
"""Init an instance with the option to pass in an iterable"""
self._list = []
try:
for element in iterable:
self._list.append(element)
self._list.sort()
except TypeError:
if iterable is not None:
self._list.append(iterable)
def _find_parent_index(self, child_index):
"""Returns parent's index of a value given its index"""
return (child_index - 1) // 2
def _find_smaller_child_index(self, parent_index):
"""Returns a value's smaller child's index given its index,
returns left child's index if right child does not exist"""
left_child_index = parent_index * 2 + 1
left_child_value = self._list[left_child_index]
right_child_index = parent_index * 2 + 2
try:
right_child_value = self._list[right_child_index]
except IndexError:
return left_child_index
return left_child_index if left_child_value <=\
right_child_value else right_child_index
def _swap_indexes(self, index1, index2):
"""Helper function that swap 2 indexes of _list"""
self._list[index1], self._list[index2] =\
self._list[index2], self._list[index1]
def push(self, value):
"""Push to the end of heap and compare with its parent's value.
swap if needed"""
self._list.append(value)
if len(self._list) > 1:
child_index = len(self._list) - 1
parent_index = self._find_parent_index(child_index)
while self._list[child_index] < self._list[parent_index]:
self._swap_indexes(child_index, parent_index)
if parent_index > 0:
child_index = parent_index
else:
break
parent_index = self._find_parent_index(child_index)
def pop(self):
"""Pop the top value off the heap and replace with the last value
Then compare with its children value and swap if needed"""
try:
self._swap_indexes(0, -1)
except IndexError:
raise IndexError("can't pop off an empty heap")
popped_value = self._list.pop()
if len(self._list) > 1:
parent_index = 0
smaller_child_index = self._find_smaller_child_index(parent_index)
while self._list[parent_index] > self._list[smaller_child_index]:
self._swap_indexes(smaller_child_index, parent_index)
parent_index = smaller_child_index
try:
smaller_child_index = self._find_smaller_child_index(
parent_index
)
except IndexError:
break
return popped_value
| mit |
enddo/HatKey | Lib/web/wsgiserver/wsgiserver3.py | 1 | 81717 | """A high-speed, production ready, thread pooled, generic HTTP server.
Simplest example on how to use this module directly
(without using CherryPy's application machinery)::
from cherrypy import wsgiserver
def my_crazy_app(environ, start_response):
status = '200 OK'
response_headers = [('Content-type','text/plain')]
start_response(status, response_headers)
return ['Hello world!']
server = wsgiserver.CherryPyWSGIServer(
('0.0.0.0', 8070), my_crazy_app,
server_name='www.cherrypy.example')
server.start()
The CherryPy WSGI server can serve as many WSGI applications
as you want in one instance by using a WSGIPathInfoDispatcher::
d = WSGIPathInfoDispatcher({'/': my_crazy_app, '/blog': my_blog_app})
server = wsgiserver.CherryPyWSGIServer(('0.0.0.0', 80), d)
Want SSL support? Just set server.ssl_adapter to an SSLAdapter instance.
This won't call the CherryPy engine (application side) at all, only the
HTTP server, which is independent from the rest of CherryPy. Don't
let the name "CherryPyWSGIServer" throw you; the name merely reflects
its origin, not its coupling.
For those of you wanting to understand internals of this module, here's the
basic call flow. The server's listening thread runs a very tight loop,
sticking incoming connections onto a Queue::
server = CherryPyWSGIServer(...)
server.start()
while True:
tick()
# This blocks until a request comes in:
child = socket.accept()
conn = HTTPConnection(child, ...)
server.requests.put(conn)
Worker threads are kept in a pool and poll the Queue, popping off and then
handling each connection in turn. Each connection can consist of an arbitrary
number of requests and their responses, so we run a nested loop::
while True:
conn = server.requests.get()
conn.communicate()
-> while True:
req = HTTPRequest(...)
req.parse_request()
-> # Read the Request-Line, e.g. "GET /page HTTP/1.1"
req.rfile.readline()
read_headers(req.rfile, req.inheaders)
req.respond()
-> response = app(...)
try:
for chunk in response:
if chunk:
req.write(chunk)
finally:
if hasattr(response, "close"):
response.close()
if req.close_connection:
return
"""
__all__ = ['HTTPRequest', 'HTTPConnection', 'HTTPServer',
'SizeCheckWrapper', 'KnownLengthRFile', 'ChunkedRFile',
'CP_makefile',
'MaxSizeExceeded', 'NoSSLError', 'FatalSSLAlert',
'WorkerThread', 'ThreadPool', 'SSLAdapter',
'CherryPyWSGIServer',
'Gateway', 'WSGIGateway', 'WSGIGateway_10', 'WSGIGateway_u0',
'WSGIPathInfoDispatcher', 'get_ssl_adapter_class',
'socket_errors_to_ignore']
import os
try:
import queue
except:
import Queue as queue
import re
import email.utils
import socket
import sys
import threading
import time
import traceback as traceback_
import errno
import logging
from urllib.parse import urlparse
try:
# prefer slower Python-based io module
import _pyio as io
except ImportError:
# Python 2.6
import io
try:
import pkg_resources
except ImportError:
pass
if 'win' in sys.platform and hasattr(socket, "AF_INET6"):
if not hasattr(socket, 'IPPROTO_IPV6'):
socket.IPPROTO_IPV6 = 41
if not hasattr(socket, 'IPV6_V6ONLY'):
socket.IPV6_V6ONLY = 27
DEFAULT_BUFFER_SIZE = io.DEFAULT_BUFFER_SIZE
try:
cp_version = pkg_resources.require('cherrypy')[0].version
except Exception:
cp_version = 'unknown'
if sys.version_info >= (3, 0):
unicodestr = str
basestring = (bytes, str)
def ntob(n, encoding='ISO-8859-1'):
"""Return the given native string as a byte string in the given
encoding.
"""
# In Python 3, the native string type is unicode
return n.encode(encoding)
else:
unicodestr = unicode
basestring = basestring
def ntob(n, encoding='ISO-8859-1'):
"""Return the given native string as a byte string in the given
encoding.
"""
# In Python 2, the native string type is bytes. Assume it's already
# in the given encoding, which for ISO-8859-1 is almost always what
# was intended.
return n
LF = ntob('\n')
CRLF = ntob('\r\n')
TAB = ntob('\t')
SPACE = ntob(' ')
COLON = ntob(':')
SEMICOLON = ntob(';')
EMPTY = ntob('')
NUMBER_SIGN = ntob('#')
QUESTION_MARK = ntob('?')
ASTERISK = ntob('*')
FORWARD_SLASH = ntob('/')
quoted_slash = re.compile(ntob("(?i)%2F"))
def plat_specific_errors(*errnames):
"""Return error numbers for all errors in errnames on this platform.
The 'errno' module contains different global constants depending on
the specific platform (OS). This function will return the list of
numeric values for a given list of potential names.
"""
errno_names = dir(errno)
nums = [getattr(errno, k) for k in errnames if k in errno_names]
# de-dupe the list
return list(dict.fromkeys(nums).keys())
socket_error_eintr = plat_specific_errors("EINTR", "WSAEINTR")
socket_errors_to_ignore = plat_specific_errors(
"EPIPE",
"EBADF", "WSAEBADF",
"ENOTSOCK", "WSAENOTSOCK",
"ETIMEDOUT", "WSAETIMEDOUT",
"ECONNREFUSED", "WSAECONNREFUSED",
"ECONNRESET", "WSAECONNRESET",
"ECONNABORTED", "WSAECONNABORTED",
"ENETRESET", "WSAENETRESET",
"EHOSTDOWN", "EHOSTUNREACH",
)
socket_errors_to_ignore.append("timed out")
socket_errors_to_ignore.append("The read operation timed out")
if sys.platform == 'darwin':
socket_errors_to_ignore.append(plat_specific_errors("EPROTOTYPE"))
socket_errors_nonblocking = plat_specific_errors(
'EAGAIN', 'EWOULDBLOCK', 'WSAEWOULDBLOCK')
comma_separated_headers = [
ntob(h) for h in
['Accept', 'Accept-Charset', 'Accept-Encoding',
'Accept-Language', 'Accept-Ranges', 'Allow', 'Cache-Control',
'Connection', 'Content-Encoding', 'Content-Language', 'Expect',
'If-Match', 'If-None-Match', 'Pragma', 'Proxy-Authenticate', 'TE',
'Trailer', 'Transfer-Encoding', 'Upgrade', 'Vary', 'Via', 'Warning',
'WWW-Authenticate']
]
if not hasattr(logging, 'statistics'):
logging.statistics = {}
def read_headers(rfile, hdict=None):
"""Read headers from the given stream into the given header dict.
If hdict is None, a new header dict is created. Returns the populated
header dict.
Headers which are repeated are folded together using a comma if their
specification so dictates.
This function raises ValueError when the read bytes violate the HTTP spec.
You should probably return "400 Bad Request" if this happens.
"""
if hdict is None:
hdict = {}
while True:
line = rfile.readline()
if not line:
# No more data--illegal end of headers
raise ValueError("Illegal end of headers.")
if line == CRLF:
# Normal end of headers
break
if not line.endswith(CRLF):
raise ValueError("HTTP requires CRLF terminators")
if line[0] in (SPACE, TAB):
# It's a continuation line.
v = line.strip()
else:
try:
k, v = line.split(COLON, 1)
except ValueError:
raise ValueError("Illegal header line.")
# TODO: what about TE and WWW-Authenticate?
k = k.strip().title()
v = v.strip()
hname = k
if k in comma_separated_headers:
existing = hdict.get(hname)
if existing:
v = b", ".join((existing, v))
hdict[hname] = v
return hdict
class MaxSizeExceeded(Exception):
pass
class SizeCheckWrapper(object):
"""Wraps a file-like object, raising MaxSizeExceeded if too large."""
def __init__(self, rfile, maxlen):
self.rfile = rfile
self.maxlen = maxlen
self.bytes_read = 0
def _check_length(self):
if self.maxlen and self.bytes_read > self.maxlen:
raise MaxSizeExceeded()
def read(self, size=None):
data = self.rfile.read(size)
self.bytes_read += len(data)
self._check_length()
return data
def readline(self, size=None):
if size is not None:
data = self.rfile.readline(size)
self.bytes_read += len(data)
self._check_length()
return data
# User didn't specify a size ...
# We read the line in chunks to make sure it's not a 100MB line !
res = []
while True:
data = self.rfile.readline(256)
self.bytes_read += len(data)
self._check_length()
res.append(data)
# See https://github.com/cherrypy/cherrypy/issues/421
if len(data) < 256 or data[-1:] == LF:
return EMPTY.join(res)
def readlines(self, sizehint=0):
# Shamelessly stolen from StringIO
total = 0
lines = []
line = self.readline()
while line:
lines.append(line)
total += len(line)
if 0 < sizehint <= total:
break
line = self.readline()
return lines
def close(self):
self.rfile.close()
def __iter__(self):
return self
def __next__(self):
data = next(self.rfile)
self.bytes_read += len(data)
self._check_length()
return data
def next(self):
data = self.rfile.next()
self.bytes_read += len(data)
self._check_length()
return data
class KnownLengthRFile(object):
"""Wraps a file-like object, returning an empty string when exhausted."""
def __init__(self, rfile, content_length):
self.rfile = rfile
self.remaining = content_length
def read(self, size=None):
if self.remaining == 0:
return b''
if size is None:
size = self.remaining
else:
size = min(size, self.remaining)
data = self.rfile.read(size)
self.remaining -= len(data)
return data
def readline(self, size=None):
if self.remaining == 0:
return b''
if size is None:
size = self.remaining
else:
size = min(size, self.remaining)
data = self.rfile.readline(size)
self.remaining -= len(data)
return data
def readlines(self, sizehint=0):
# Shamelessly stolen from StringIO
total = 0
lines = []
line = self.readline(sizehint)
while line:
lines.append(line)
total += len(line)
if 0 < sizehint <= total:
break
line = self.readline(sizehint)
return lines
def close(self):
self.rfile.close()
def __iter__(self):
return self
def __next__(self):
data = next(self.rfile)
self.remaining -= len(data)
return data
class ChunkedRFile(object):
"""Wraps a file-like object, returning an empty string when exhausted.
This class is intended to provide a conforming wsgi.input value for
request entities that have been encoded with the 'chunked' transfer
encoding.
"""
def __init__(self, rfile, maxlen, bufsize=8192):
self.rfile = rfile
self.maxlen = maxlen
self.bytes_read = 0
self.buffer = EMPTY
self.bufsize = bufsize
self.closed = False
def _fetch(self):
if self.closed:
return
line = self.rfile.readline()
self.bytes_read += len(line)
if self.maxlen and self.bytes_read > self.maxlen:
raise MaxSizeExceeded("Request Entity Too Large", self.maxlen)
line = line.strip().split(SEMICOLON, 1)
try:
chunk_size = line.pop(0)
chunk_size = int(chunk_size, 16)
except ValueError:
raise ValueError("Bad chunked transfer size: " + repr(chunk_size))
if chunk_size <= 0:
self.closed = True
return
## if line: chunk_extension = line[0]
if self.maxlen and self.bytes_read + chunk_size > self.maxlen:
raise IOError("Request Entity Too Large")
chunk = self.rfile.read(chunk_size)
self.bytes_read += len(chunk)
self.buffer += chunk
crlf = self.rfile.read(2)
if crlf != CRLF:
raise ValueError(
"Bad chunked transfer coding (expected '\\r\\n', "
"got " + repr(crlf) + ")")
def read(self, size=None):
data = EMPTY
while True:
if size and len(data) >= size:
return data
if not self.buffer:
self._fetch()
if not self.buffer:
# EOF
return data
if size:
remaining = size - len(data)
data += self.buffer[:remaining]
self.buffer = self.buffer[remaining:]
else:
data += self.buffer
def readline(self, size=None):
data = EMPTY
while True:
if size and len(data) >= size:
return data
if not self.buffer:
self._fetch()
if not self.buffer:
# EOF
return data
newline_pos = self.buffer.find(LF)
if size:
if newline_pos == -1:
remaining = size - len(data)
data += self.buffer[:remaining]
self.buffer = self.buffer[remaining:]
else:
remaining = min(size - len(data), newline_pos)
data += self.buffer[:remaining]
self.buffer = self.buffer[remaining:]
else:
if newline_pos == -1:
data += self.buffer
else:
data += self.buffer[:newline_pos]
self.buffer = self.buffer[newline_pos:]
def readlines(self, sizehint=0):
# Shamelessly stolen from StringIO
total = 0
lines = []
line = self.readline(sizehint)
while line:
lines.append(line)
total += len(line)
if 0 < sizehint <= total:
break
line = self.readline(sizehint)
return lines
def read_trailer_lines(self):
if not self.closed:
raise ValueError(
"Cannot read trailers until the request body has been read.")
while True:
line = self.rfile.readline()
if not line:
# No more data--illegal end of headers
raise ValueError("Illegal end of headers.")
self.bytes_read += len(line)
if self.maxlen and self.bytes_read > self.maxlen:
raise IOError("Request Entity Too Large")
if line == CRLF:
# Normal end of headers
break
if not line.endswith(CRLF):
raise ValueError("HTTP requires CRLF terminators")
yield line
def close(self):
self.rfile.close()
def __iter__(self):
# Shamelessly stolen from StringIO
total = 0
line = self.readline(sizehint)
while line:
yield line
total += len(line)
if 0 < sizehint <= total:
break
line = self.readline(sizehint)
class HTTPRequest(object):
"""An HTTP Request (and response).
A single HTTP connection may consist of multiple request/response pairs.
"""
server = None
"""The HTTPServer object which is receiving this request."""
conn = None
"""The HTTPConnection object on which this request connected."""
inheaders = {}
"""A dict of request headers."""
outheaders = []
"""A list of header tuples to write in the response."""
ready = False
"""When True, the request has been parsed and is ready to begin generating
the response. When False, signals the calling Connection that the response
should not be generated and the connection should close."""
close_connection = False
"""Signals the calling Connection that the request should close. This does
not imply an error! The client and/or server may each request that the
connection be closed."""
chunked_write = False
"""If True, output will be encoded with the "chunked" transfer-coding.
This value is set automatically inside send_headers."""
def __init__(self, server, conn):
self.server = server
self.conn = conn
self.ready = False
self.started_request = False
self.scheme = ntob("http")
if self.server.ssl_adapter is not None:
self.scheme = ntob("https")
# Use the lowest-common protocol in case read_request_line errors.
self.response_protocol = 'HTTP/1.0'
self.inheaders = {}
self.status = ""
self.outheaders = []
self.sent_headers = False
self.close_connection = self.__class__.close_connection
self.chunked_read = False
self.chunked_write = self.__class__.chunked_write
def parse_request(self):
"""Parse the next HTTP request start-line and message-headers."""
self.rfile = SizeCheckWrapper(self.conn.rfile,
self.server.max_request_header_size)
try:
success = self.read_request_line()
except MaxSizeExceeded:
self.simple_response(
"414 Request-URI Too Long",
"The Request-URI sent with the request exceeds the maximum "
"allowed bytes.")
return
else:
if not success:
return
try:
success = self.read_request_headers()
except MaxSizeExceeded:
self.simple_response(
"413 Request Entity Too Large",
"The headers sent with the request exceed the maximum "
"allowed bytes.")
return
else:
if not success:
return
self.ready = True
def read_request_line(self):
# HTTP/1.1 connections are persistent by default. If a client
# requests a page, then idles (leaves the connection open),
# then rfile.readline() will raise socket.error("timed out").
# Note that it does this based on the value given to settimeout(),
# and doesn't need the client to request or acknowledge the close
# (although your TCP stack might suffer for it: cf Apache's history
# with FIN_WAIT_2).
request_line = self.rfile.readline()
# Set started_request to True so communicate() knows to send 408
# from here on out.
self.started_request = True
if not request_line:
return False
if request_line == CRLF:
# RFC 2616 sec 4.1: "...if the server is reading the protocol
# stream at the beginning of a message and receives a CRLF
# first, it should ignore the CRLF."
# But only ignore one leading line! else we enable a DoS.
request_line = self.rfile.readline()
if not request_line:
return False
if not request_line.endswith(CRLF):
self.simple_response(
"400 Bad Request", "HTTP requires CRLF terminators")
return False
try:
method, uri, req_protocol = request_line.strip().split(SPACE, 2)
# The [x:y] slicing is necessary for byte strings to avoid getting
# ord's
rp = int(req_protocol[5:6]), int(req_protocol[7:8])
except ValueError:
self.simple_response("400 Bad Request", "Malformed Request-Line")
return False
self.uri = uri
self.method = method
# uri may be an abs_path (including "http://host.domain.tld");
scheme, authority, path = self.parse_request_uri(uri)
if path is None:
self.simple_response("400 Bad Request",
"Invalid path in Request-URI.")
return False
if NUMBER_SIGN in path:
self.simple_response("400 Bad Request",
"Illegal #fragment in Request-URI.")
return False
if scheme:
self.scheme = scheme
qs = EMPTY
if QUESTION_MARK in path:
path, qs = path.split(QUESTION_MARK, 1)
# Unquote the path+params (e.g. "/this%20path" -> "/this path").
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec5.html#sec5.1.2
#
# But note that "...a URI must be separated into its components
# before the escaped characters within those components can be
# safely decoded." http://www.ietf.org/rfc/rfc2396.txt, sec 2.4.2
# Therefore, "/this%2Fpath" becomes "/this%2Fpath", not "/this/path".
try:
atoms = [self.unquote_bytes(x) for x in quoted_slash.split(path)]
except ValueError:
ex = sys.exc_info()[1]
self.simple_response("400 Bad Request", ex.args[0])
return False
path = b"%2F".join(atoms)
self.path = path
# Note that, like wsgiref and most other HTTP servers,
# we "% HEX HEX"-unquote the path but not the query string.
self.qs = qs
# Compare request and server HTTP protocol versions, in case our
# server does not support the requested protocol. Limit our output
# to min(req, server). We want the following output:
# request server actual written supported response
# protocol protocol response protocol feature set
# a 1.0 1.0 1.0 1.0
# b 1.0 1.1 1.1 1.0
# c 1.1 1.0 1.0 1.0
# d 1.1 1.1 1.1 1.1
# Notice that, in (b), the response will be "HTTP/1.1" even though
# the client only understands 1.0. RFC 2616 10.5.6 says we should
# only return 505 if the _major_ version is different.
# The [x:y] slicing is necessary for byte strings to avoid getting
# ord's
sp = int(self.server.protocol[5:6]), int(self.server.protocol[7:8])
if sp[0] != rp[0]:
self.simple_response("505 HTTP Version Not Supported")
return False
self.request_protocol = req_protocol
self.response_protocol = "HTTP/%s.%s" % min(rp, sp)
return True
def read_request_headers(self):
"""Read self.rfile into self.inheaders. Return success."""
# then all the http headers
try:
read_headers(self.rfile, self.inheaders)
except ValueError:
ex = sys.exc_info()[1]
self.simple_response("400 Bad Request", ex.args[0])
return False
mrbs = self.server.max_request_body_size
if mrbs and int(self.inheaders.get(b"Content-Length", 0)) > mrbs:
self.simple_response(
"413 Request Entity Too Large",
"The entity sent with the request exceeds the maximum "
"allowed bytes.")
return False
# Persistent connection support
if self.response_protocol == "HTTP/1.1":
# Both server and client are HTTP/1.1
if self.inheaders.get(b"Connection", b"") == b"close":
self.close_connection = True
else:
# Either the server or client (or both) are HTTP/1.0
if self.inheaders.get(b"Connection", b"") != b"Keep-Alive":
self.close_connection = True
# Transfer-Encoding support
te = None
if self.response_protocol == "HTTP/1.1":
te = self.inheaders.get(b"Transfer-Encoding")
if te:
te = [x.strip().lower() for x in te.split(b",") if x.strip()]
self.chunked_read = False
if te:
for enc in te:
if enc == b"chunked":
self.chunked_read = True
else:
# Note that, even if we see "chunked", we must reject
# if there is an extension we don't recognize.
self.simple_response("501 Unimplemented")
self.close_connection = True
return False
# From PEP 333:
# "Servers and gateways that implement HTTP 1.1 must provide
# transparent support for HTTP 1.1's "expect/continue" mechanism.
# This may be done in any of several ways:
# 1. Respond to requests containing an Expect: 100-continue request
# with an immediate "100 Continue" response, and proceed normally.
# 2. Proceed with the request normally, but provide the application
# with a wsgi.input stream that will send the "100 Continue"
# response if/when the application first attempts to read from
# the input stream. The read request must then remain blocked
# until the client responds.
# 3. Wait until the client decides that the server does not support
# expect/continue, and sends the request body on its own.
# (This is suboptimal, and is not recommended.)
#
# We used to do 3, but are now doing 1. Maybe we'll do 2 someday,
# but it seems like it would be a big slowdown for such a rare case.
if self.inheaders.get(b"Expect", b"") == b"100-continue":
# Don't use simple_response here, because it emits headers
# we don't want. See
# https://github.com/cherrypy/cherrypy/issues/951
msg = self.server.protocol.encode(
'ascii') + b" 100 Continue\r\n\r\n"
try:
self.conn.wfile.write(msg)
except socket.error:
x = sys.exc_info()[1]
if x.args[0] not in socket_errors_to_ignore:
raise
return True
def parse_request_uri(self, uri):
"""Parse a Request-URI into (scheme, authority, path).
Note that Request-URI's must be one of::
Request-URI = "*" | absoluteURI | abs_path | authority
Therefore, a Request-URI which starts with a double forward-slash
cannot be a "net_path"::
net_path = "//" authority [ abs_path ]
Instead, it must be interpreted as an "abs_path" with an empty first
path segment::
abs_path = "/" path_segments
path_segments = segment *( "/" segment )
segment = *pchar *( ";" param )
param = *pchar
"""
if uri == ASTERISK:
return None, None, uri
scheme, authority, path, params, query, fragment = urlparse(uri)
if scheme and QUESTION_MARK not in scheme:
# An absoluteURI.
# If there's a scheme (and it must be http or https), then:
# http_URL = "http:" "//" host [ ":" port ] [ abs_path [ "?" query
# ]]
return scheme, authority, path
if uri.startswith(FORWARD_SLASH):
# An abs_path.
return None, None, uri
else:
# An authority.
return None, uri, None
def unquote_bytes(self, path):
"""takes quoted string and unquotes % encoded values"""
res = path.split(b'%')
for i in range(1, len(res)):
item = res[i]
try:
res[i] = bytes([int(item[:2], 16)]) + item[2:]
except ValueError:
raise
return b''.join(res)
def respond(self):
"""Call the gateway and write its iterable output."""
mrbs = self.server.max_request_body_size
if self.chunked_read:
self.rfile = ChunkedRFile(self.conn.rfile, mrbs)
else:
cl = int(self.inheaders.get(b"Content-Length", 0))
if mrbs and mrbs < cl:
if not self.sent_headers:
self.simple_response(
"413 Request Entity Too Large",
"The entity sent with the request exceeds the "
"maximum allowed bytes.")
return
self.rfile = KnownLengthRFile(self.conn.rfile, cl)
self.server.gateway(self).respond()
if (self.ready and not self.sent_headers):
self.sent_headers = True
self.send_headers()
if self.chunked_write:
self.conn.wfile.write(b"0\r\n\r\n")
def simple_response(self, status, msg=""):
"""Write a simple response back to the client."""
status = str(status)
buf = [bytes(self.server.protocol, "ascii") + SPACE +
bytes(status, "ISO-8859-1") + CRLF,
bytes("Content-Length: %s\r\n" % len(msg), "ISO-8859-1"),
b"Content-Type: text/plain\r\n"]
if status[:3] in ("413", "414"):
# Request Entity Too Large / Request-URI Too Long
self.close_connection = True
if self.response_protocol == 'HTTP/1.1':
# This will not be true for 414, since read_request_line
# usually raises 414 before reading the whole line, and we
# therefore cannot know the proper response_protocol.
buf.append(b"Connection: close\r\n")
else:
# HTTP/1.0 had no 413/414 status nor Connection header.
# Emit 400 instead and trust the message body is enough.
status = "400 Bad Request"
buf.append(CRLF)
if msg:
if isinstance(msg, unicodestr):
msg = msg.encode("ISO-8859-1")
buf.append(msg)
try:
self.conn.wfile.write(b"".join(buf))
except socket.error:
x = sys.exc_info()[1]
if x.args[0] not in socket_errors_to_ignore:
raise
def write(self, chunk):
"""Write unbuffered data to the client."""
if self.chunked_write and chunk:
buf = [bytes(hex(len(chunk)), 'ASCII')[2:], CRLF, chunk, CRLF]
self.conn.wfile.write(EMPTY.join(buf))
else:
self.conn.wfile.write(chunk)
def send_headers(self):
"""Assert, process, and send the HTTP response message-headers.
You must set self.status, and self.outheaders before calling this.
"""
hkeys = [key.lower() for key, value in self.outheaders]
status = int(self.status[:3])
if status == 413:
# Request Entity Too Large. Close conn to avoid garbage.
self.close_connection = True
elif b"content-length" not in hkeys:
# "All 1xx (informational), 204 (no content),
# and 304 (not modified) responses MUST NOT
# include a message-body." So no point chunking.
if status < 200 or status in (204, 205, 304):
pass
else:
if (self.response_protocol == 'HTTP/1.1'
and self.method != b'HEAD'):
# Use the chunked transfer-coding
self.chunked_write = True
self.outheaders.append((b"Transfer-Encoding", b"chunked"))
else:
# Closing the conn is the only way to determine len.
self.close_connection = True
if b"connection" not in hkeys:
if self.response_protocol == 'HTTP/1.1':
# Both server and client are HTTP/1.1 or better
if self.close_connection:
self.outheaders.append((b"Connection", b"close"))
else:
# Server and/or client are HTTP/1.0
if not self.close_connection:
self.outheaders.append((b"Connection", b"Keep-Alive"))
if (not self.close_connection) and (not self.chunked_read):
# Read any remaining request body data on the socket.
# "If an origin server receives a request that does not include an
# Expect request-header field with the "100-continue" expectation,
# the request includes a request body, and the server responds
# with a final status code before reading the entire request body
# from the transport connection, then the server SHOULD NOT close
# the transport connection until it has read the entire request,
# or until the client closes the connection. Otherwise, the client
# might not reliably receive the response message. However, this
# requirement is not be construed as preventing a server from
# defending itself against denial-of-service attacks, or from
# badly broken client implementations."
remaining = getattr(self.rfile, 'remaining', 0)
if remaining > 0:
self.rfile.read(remaining)
if b"date" not in hkeys:
self.outheaders.append((
b"Date",
email.utils.formatdate(usegmt=True).encode('ISO-8859-1')
))
if b"server" not in hkeys:
self.outheaders.append(
(b"Server", self.server.server_name.encode('ISO-8859-1')))
buf = [self.server.protocol.encode(
'ascii') + SPACE + self.status + CRLF]
for k, v in self.outheaders:
buf.append(k + COLON + SPACE + v + CRLF)
buf.append(CRLF)
self.conn.wfile.write(EMPTY.join(buf))
class NoSSLError(Exception):
"""Exception raised when a client speaks HTTP to an HTTPS socket."""
pass
class FatalSSLAlert(Exception):
"""Exception raised when the SSL implementation signals a fatal alert."""
pass
class CP_BufferedWriter(io.BufferedWriter):
"""Faux file object attached to a socket object."""
def write(self, b):
self._checkClosed()
if isinstance(b, str):
raise TypeError("can't write str to binary stream")
with self._write_lock:
self._write_buf.extend(b)
self._flush_unlocked()
return len(b)
def _flush_unlocked(self):
self._checkClosed("flush of closed file")
while self._write_buf:
try:
# ssl sockets only except 'bytes', not bytearrays
# so perhaps we should conditionally wrap this for perf?
n = self.raw.write(bytes(self._write_buf))
except io.BlockingIOError as e:
n = e.characters_written
del self._write_buf[:n]
def CP_makefile(sock, mode='r', bufsize=DEFAULT_BUFFER_SIZE):
if 'r' in mode:
return io.BufferedReader(socket.SocketIO(sock, mode), bufsize)
else:
return CP_BufferedWriter(socket.SocketIO(sock, mode), bufsize)
class HTTPConnection(object):
"""An HTTP connection (active socket).
server: the Server object which received this connection.
socket: the raw socket object (usually TCP) for this connection.
makefile: a fileobject class for reading from the socket.
"""
remote_addr = None
remote_port = None
ssl_env = None
rbufsize = DEFAULT_BUFFER_SIZE
wbufsize = DEFAULT_BUFFER_SIZE
RequestHandlerClass = HTTPRequest
def __init__(self, server, sock, makefile=CP_makefile):
self.server = server
self.socket = sock
self.rfile = makefile(sock, "rb", self.rbufsize)
self.wfile = makefile(sock, "wb", self.wbufsize)
self.requests_seen = 0
def communicate(self):
"""Read each request and respond appropriately."""
request_seen = False
try:
while True:
# (re)set req to None so that if something goes wrong in
# the RequestHandlerClass constructor, the error doesn't
# get written to the previous request.
req = None
req = self.RequestHandlerClass(self.server, self)
# This order of operations should guarantee correct pipelining.
req.parse_request()
if self.server.stats['Enabled']:
self.requests_seen += 1
if not req.ready:
# Something went wrong in the parsing (and the server has
# probably already made a simple_response). Return and
# let the conn close.
return
request_seen = True
req.respond()
if req.close_connection:
return
except socket.error:
e = sys.exc_info()[1]
errnum = e.args[0]
# sadly SSL sockets return a different (longer) time out string
if (
errnum == 'timed out' or
errnum == 'The read operation timed out'
):
# Don't error if we're between requests; only error
# if 1) no request has been started at all, or 2) we're
# in the middle of a request.
# See https://github.com/cherrypy/cherrypy/issues/853
if (not request_seen) or (req and req.started_request):
# Don't bother writing the 408 if the response
# has already started being written.
if req and not req.sent_headers:
try:
req.simple_response("408 Request Timeout")
except FatalSSLAlert:
# Close the connection.
return
elif errnum not in socket_errors_to_ignore:
self.server.error_log("socket.error %s" % repr(errnum),
level=logging.WARNING, traceback=True)
if req and not req.sent_headers:
try:
req.simple_response("500 Internal Server Error")
except FatalSSLAlert:
# Close the connection.
return
return
except (KeyboardInterrupt, SystemExit):
raise
except FatalSSLAlert:
# Close the connection.
return
except NoSSLError:
if req and not req.sent_headers:
# Unwrap our wfile
self.wfile = CP_makefile(
self.socket._sock, "wb", self.wbufsize)
req.simple_response(
"400 Bad Request",
"The client sent a plain HTTP request, but this server "
"only speaks HTTPS on this port.")
self.linger = True
except Exception:
e = sys.exc_info()[1]
self.server.error_log(repr(e), level=logging.ERROR, traceback=True)
if req and not req.sent_headers:
try:
req.simple_response("500 Internal Server Error")
except FatalSSLAlert:
# Close the connection.
return
linger = False
def close(self):
"""Close the socket underlying this connection."""
self.rfile.close()
if not self.linger:
# Python's socket module does NOT call close on the kernel
# socket when you call socket.close(). We do so manually here
# because we want this server to send a FIN TCP segment
# immediately. Note this must be called *before* calling
# socket.close(), because the latter drops its reference to
# the kernel socket.
# Python 3 *probably* fixed this with socket._real_close;
# hard to tell.
# self.socket._sock.close()
self.socket.close()
else:
# On the other hand, sometimes we want to hang around for a bit
# to make sure the client has a chance to read our entire
# response. Skipping the close() calls here delays the FIN
# packet until the socket object is garbage-collected later.
# Someday, perhaps, we'll do the full lingering_close that
# Apache does, but not today.
pass
class TrueyZero(object):
"""An object which equals and does math like the integer 0 but evals True.
"""
def __add__(self, other):
return other
def __radd__(self, other):
return other
trueyzero = TrueyZero()
_SHUTDOWNREQUEST = None
class WorkerThread(threading.Thread):
"""Thread which continuously polls a Queue for Connection objects.
Due to the timing issues of polling a Queue, a WorkerThread does not
check its own 'ready' flag after it has started. To stop the thread,
it is necessary to stick a _SHUTDOWNREQUEST object onto the Queue
(one for each running WorkerThread).
"""
conn = None
"""The current connection pulled off the Queue, or None."""
server = None
"""The HTTP Server which spawned this thread, and which owns the
Queue and is placing active connections into it."""
ready = False
"""A simple flag for the calling server to know when this thread
has begun polling the Queue."""
def __init__(self, server):
self.ready = False
self.server = server
self.requests_seen = 0
self.bytes_read = 0
self.bytes_written = 0
self.start_time = None
self.work_time = 0
self.stats = {
'Requests': lambda s: self.requests_seen + (
(self.start_time is None) and
trueyzero or
self.conn.requests_seen
),
'Bytes Read': lambda s: self.bytes_read + (
(self.start_time is None) and
trueyzero or
self.conn.rfile.bytes_read
),
'Bytes Written': lambda s: self.bytes_written + (
(self.start_time is None) and
trueyzero or
self.conn.wfile.bytes_written
),
'Work Time': lambda s: self.work_time + (
(self.start_time is None) and
trueyzero or
time.time() - self.start_time
),
'Read Throughput': lambda s: s['Bytes Read'](s) / (
s['Work Time'](s) or 1e-6),
'Write Throughput': lambda s: s['Bytes Written'](s) / (
s['Work Time'](s) or 1e-6),
}
threading.Thread.__init__(self)
def run(self):
self.server.stats['Worker Threads'][self.getName()] = self.stats
try:
self.ready = True
while True:
conn = self.server.requests.get()
if conn is _SHUTDOWNREQUEST:
return
self.conn = conn
if self.server.stats['Enabled']:
self.start_time = time.time()
try:
conn.communicate()
finally:
conn.close()
if self.server.stats['Enabled']:
self.requests_seen += self.conn.requests_seen
self.bytes_read += self.conn.rfile.bytes_read
self.bytes_written += self.conn.wfile.bytes_written
self.work_time += time.time() - self.start_time
self.start_time = None
self.conn = None
except (KeyboardInterrupt, SystemExit):
exc = sys.exc_info()[1]
self.server.interrupt = exc
class ThreadPool(object):
"""A Request Queue for an HTTPServer which pools threads.
ThreadPool objects must provide min, get(), put(obj), start()
and stop(timeout) attributes.
"""
def __init__(self, server, min=10, max=-1,
accepted_queue_size=-1, accepted_queue_timeout=10):
self.server = server
self.min = min
self.max = max
self._threads = []
self._queue = queue.Queue(maxsize=accepted_queue_size)
self._queue_put_timeout = accepted_queue_timeout
self.get = self._queue.get
def start(self):
"""Start the pool of threads."""
for i in range(self.min):
self._threads.append(WorkerThread(self.server))
for worker in self._threads:
worker.setName("CP Server " + worker.getName())
worker.start()
for worker in self._threads:
while not worker.ready:
time.sleep(.1)
def _get_idle(self):
"""Number of worker threads which are idle. Read-only."""
return len([t for t in self._threads if t.conn is None])
idle = property(_get_idle, doc=_get_idle.__doc__)
def put(self, obj):
self._queue.put(obj, block=True, timeout=self._queue_put_timeout)
if obj is _SHUTDOWNREQUEST:
return
def grow(self, amount):
"""Spawn new worker threads (not above self.max)."""
if self.max > 0:
budget = max(self.max - len(self._threads), 0)
else:
# self.max <= 0 indicates no maximum
budget = float('inf')
n_new = min(amount, budget)
workers = [self._spawn_worker() for i in range(n_new)]
while not all(worker.ready for worker in workers):
time.sleep(.1)
self._threads.extend(workers)
def _spawn_worker(self):
worker = WorkerThread(self.server)
worker.setName("CP Server " + worker.getName())
worker.start()
return worker
def shrink(self, amount):
"""Kill off worker threads (not below self.min)."""
# Grow/shrink the pool if necessary.
# Remove any dead threads from our list
for t in self._threads:
if not t.isAlive():
self._threads.remove(t)
amount -= 1
# calculate the number of threads above the minimum
n_extra = max(len(self._threads) - self.min, 0)
# don't remove more than amount
n_to_remove = min(amount, n_extra)
# put shutdown requests on the queue equal to the number of threads
# to remove. As each request is processed by a worker, that worker
# will terminate and be culled from the list.
for n in range(n_to_remove):
self._queue.put(_SHUTDOWNREQUEST)
def stop(self, timeout=5):
# Must shut down threads here so the code that calls
# this method can know when all threads are stopped.
for worker in self._threads:
self._queue.put(_SHUTDOWNREQUEST)
# Don't join currentThread (when stop is called inside a request).
current = threading.currentThread()
if timeout and timeout >= 0:
endtime = time.time() + timeout
while self._threads:
worker = self._threads.pop()
if worker is not current and worker.isAlive():
try:
if timeout is None or timeout < 0:
worker.join()
else:
remaining_time = endtime - time.time()
if remaining_time > 0:
worker.join(remaining_time)
if worker.isAlive():
# We exhausted the timeout.
# Forcibly shut down the socket.
c = worker.conn
if c and not c.rfile.closed:
try:
c.socket.shutdown(socket.SHUT_RD)
except TypeError:
# pyOpenSSL sockets don't take an arg
c.socket.shutdown()
worker.join()
except (AssertionError,
# Ignore repeated Ctrl-C.
# See
# https://github.com/cherrypy/cherrypy/issues/691.
KeyboardInterrupt):
pass
def _get_qsize(self):
return self._queue.qsize()
qsize = property(_get_qsize)
try:
import fcntl
except ImportError:
try:
from ctypes import windll, WinError
import ctypes.wintypes
_SetHandleInformation = windll.kernel32.SetHandleInformation
_SetHandleInformation.argtypes = [
ctypes.wintypes.HANDLE,
ctypes.wintypes.DWORD,
ctypes.wintypes.DWORD,
]
_SetHandleInformation.restype = ctypes.wintypes.BOOL
except ImportError:
def prevent_socket_inheritance(sock):
"""Dummy function, since neither fcntl nor ctypes are available."""
pass
else:
def prevent_socket_inheritance(sock):
"""Mark the given socket fd as non-inheritable (Windows)."""
if not _SetHandleInformation(sock.fileno(), 1, 0):
raise WinError()
else:
def prevent_socket_inheritance(sock):
"""Mark the given socket fd as non-inheritable (POSIX)."""
fd = sock.fileno()
old_flags = fcntl.fcntl(fd, fcntl.F_GETFD)
fcntl.fcntl(fd, fcntl.F_SETFD, old_flags | fcntl.FD_CLOEXEC)
class SSLAdapter(object):
"""Base class for SSL driver library adapters.
Required methods:
* ``wrap(sock) -> (wrapped socket, ssl environ dict)``
* ``makefile(sock, mode='r', bufsize=DEFAULT_BUFFER_SIZE) ->
socket file object``
"""
def __init__(self, certificate, private_key, certificate_chain=None):
self.certificate = certificate
self.private_key = private_key
self.certificate_chain = certificate_chain
def wrap(self, sock):
raise NotImplemented
def makefile(self, sock, mode='r', bufsize=DEFAULT_BUFFER_SIZE):
raise NotImplemented
class HTTPServer(object):
"""An HTTP server."""
_bind_addr = "127.0.0.1"
_interrupt = None
gateway = None
"""A Gateway instance."""
minthreads = None
"""The minimum number of worker threads to create (default 10)."""
maxthreads = None
"""The maximum number of worker threads to create (default -1 = no limit).
"""
server_name = None
"""The name of the server; defaults to socket.gethostname()."""
protocol = "HTTP/1.1"
"""The version string to write in the Status-Line of all HTTP responses.
For example, "HTTP/1.1" is the default. This also limits the supported
features used in the response."""
request_queue_size = 5
"""The 'backlog' arg to socket.listen(); max queued connections
(default 5).
"""
shutdown_timeout = 5
"""The total time, in seconds, to wait for worker threads to cleanly exit.
"""
timeout = 10
"""The timeout in seconds for accepted connections (default 10)."""
version = "CherryPy/" + cp_version
"""A version string for the HTTPServer."""
software = None
"""The value to set for the SERVER_SOFTWARE entry in the WSGI environ.
If None, this defaults to ``'%s Server' % self.version``."""
ready = False
"""An internal flag which marks whether the socket is accepting
connections.
"""
max_request_header_size = 0
"""The maximum size, in bytes, for request headers, or 0 for no limit."""
max_request_body_size = 0
"""The maximum size, in bytes, for request bodies, or 0 for no limit."""
nodelay = True
"""If True (the default since 3.1), sets the TCP_NODELAY socket option."""
ConnectionClass = HTTPConnection
"""The class to use for handling HTTP connections."""
ssl_adapter = None
"""An instance of SSLAdapter (or a subclass).
You must have the corresponding SSL driver library installed."""
def __init__(self, bind_addr, gateway, minthreads=10, maxthreads=-1,
server_name=None):
self.bind_addr = bind_addr
self.gateway = gateway
self.requests = ThreadPool(self, min=minthreads or 1, max=maxthreads)
if not server_name:
server_name = socket.gethostname()
self.server_name = server_name
self.clear_stats()
def clear_stats(self):
self._start_time = None
self._run_time = 0
self.stats = {
'Enabled': False,
'Bind Address': lambda s: repr(self.bind_addr),
'Run time': lambda s: (not s['Enabled']) and -1 or self.runtime(),
'Accepts': 0,
'Accepts/sec': lambda s: s['Accepts'] / self.runtime(),
'Queue': lambda s: getattr(self.requests, "qsize", None),
'Threads': lambda s: len(getattr(self.requests, "_threads", [])),
'Threads Idle': lambda s: getattr(self.requests, "idle", None),
'Socket Errors': 0,
'Requests': lambda s: (not s['Enabled']) and -1 or sum(
[w['Requests'](w) for w in s['Worker Threads'].values()], 0),
'Bytes Read': lambda s: (not s['Enabled']) and -1 or sum(
[w['Bytes Read'](w) for w in s['Worker Threads'].values()], 0),
'Bytes Written': lambda s: (not s['Enabled']) and -1 or sum(
[w['Bytes Written'](w) for w in s['Worker Threads'].values()],
0),
'Work Time': lambda s: (not s['Enabled']) and -1 or sum(
[w['Work Time'](w) for w in s['Worker Threads'].values()], 0),
'Read Throughput': lambda s: (not s['Enabled']) and -1 or sum(
[w['Bytes Read'](w) / (w['Work Time'](w) or 1e-6)
for w in s['Worker Threads'].values()], 0),
'Write Throughput': lambda s: (not s['Enabled']) and -1 or sum(
[w['Bytes Written'](w) / (w['Work Time'](w) or 1e-6)
for w in s['Worker Threads'].values()], 0),
'Worker Threads': {},
}
logging.statistics["CherryPy HTTPServer %d" % id(self)] = self.stats
def runtime(self):
if self._start_time is None:
return self._run_time
else:
return self._run_time + (time.time() - self._start_time)
def __str__(self):
return "%s.%s(%r)" % (self.__module__, self.__class__.__name__,
self.bind_addr)
def _get_bind_addr(self):
return self._bind_addr
def _set_bind_addr(self, value):
if isinstance(value, tuple) and value[0] in ('', None):
# Despite the socket module docs, using '' does not
# allow AI_PASSIVE to work. Passing None instead
# returns '0.0.0.0' like we want. In other words:
# host AI_PASSIVE result
# '' Y 192.168.x.y
# '' N 192.168.x.y
# None Y 0.0.0.0
# None N 127.0.0.1
# But since you can get the same effect with an explicit
# '0.0.0.0', we deny both the empty string and None as values.
raise ValueError("Host values of '' or None are not allowed. "
"Use '0.0.0.0' (IPv4) or '::' (IPv6) instead "
"to listen on all active interfaces.")
self._bind_addr = value
bind_addr = property(
_get_bind_addr,
_set_bind_addr,
doc="""The interface on which to listen for connections.
For TCP sockets, a (host, port) tuple. Host values may be any IPv4
or IPv6 address, or any valid hostname. The string 'localhost' is a
synonym for '127.0.0.1' (or '::1', if your hosts file prefers IPv6).
The string '0.0.0.0' is a special IPv4 entry meaning "any active
interface" (INADDR_ANY), and '::' is the similar IN6ADDR_ANY for
IPv6. The empty string or None are not allowed.
For UNIX sockets, supply the filename as a string.""")
def start(self):
"""Run the server forever."""
# We don't have to trap KeyboardInterrupt or SystemExit here,
# because cherrpy.server already does so, calling self.stop() for us.
# If you're using this server with another framework, you should
# trap those exceptions in whatever code block calls start().
self._interrupt = None
if self.software is None:
self.software = "%s Server" % self.version
# Select the appropriate socket
if isinstance(self.bind_addr, basestring):
# AF_UNIX socket
# So we can reuse the socket...
try:
os.unlink(self.bind_addr)
except:
pass
# So everyone can access the socket...
try:
os.chmod(self.bind_addr, 0o777)
except:
pass
info = [
(socket.AF_UNIX, socket.SOCK_STREAM, 0, "", self.bind_addr)]
else:
# AF_INET or AF_INET6 socket
# Get the correct address family for our host (allows IPv6
# addresses)
host, port = self.bind_addr
try:
info = socket.getaddrinfo(host, port, socket.AF_UNSPEC,
socket.SOCK_STREAM, 0,
socket.AI_PASSIVE)
except socket.gaierror:
if ':' in self.bind_addr[0]:
info = [(socket.AF_INET6, socket.SOCK_STREAM,
0, "", self.bind_addr + (0, 0))]
else:
info = [(socket.AF_INET, socket.SOCK_STREAM,
0, "", self.bind_addr)]
self.socket = None
msg = "No socket could be created"
for res in info:
af, socktype, proto, canonname, sa = res
try:
self.bind(af, socktype, proto)
except socket.error as serr:
msg = "%s -- (%s: %s)" % (msg, sa, serr)
if self.socket:
self.socket.close()
self.socket = None
continue
break
if not self.socket:
raise socket.error(msg)
# Timeout so KeyboardInterrupt can be caught on Win32
self.socket.settimeout(1)
self.socket.listen(self.request_queue_size)
# Create worker threads
self.requests.start()
self.ready = True
self._start_time = time.time()
while self.ready:
try:
self.tick()
except (KeyboardInterrupt, SystemExit):
raise
except:
self.error_log("Error in HTTPServer.tick", level=logging.ERROR,
traceback=True)
if self.interrupt:
while self.interrupt is True:
# Wait for self.stop() to complete. See _set_interrupt.
time.sleep(0.1)
if self.interrupt:
raise self.interrupt
def error_log(self, msg="", level=20, traceback=False):
# Override this in subclasses as desired
sys.stderr.write(msg + '\n')
sys.stderr.flush()
if traceback:
tblines = traceback_.format_exc()
sys.stderr.write(tblines)
sys.stderr.flush()
def bind(self, family, type, proto=0):
"""Create (or recreate) the actual socket object."""
self.socket = socket.socket(family, type, proto)
prevent_socket_inheritance(self.socket)
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if self.nodelay and not isinstance(self.bind_addr, str):
self.socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
if self.ssl_adapter is not None:
self.socket = self.ssl_adapter.bind(self.socket)
# If listening on the IPV6 any address ('::' = IN6ADDR_ANY),
# activate dual-stack. See
# https://github.com/cherrypy/cherrypy/issues/871.
if (hasattr(socket, 'AF_INET6') and family == socket.AF_INET6
and self.bind_addr[0] in ('::', '::0', '::0.0.0.0')):
try:
self.socket.setsockopt(
socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 0)
except (AttributeError, socket.error):
# Apparently, the socket option is not available in
# this machine's TCP stack
pass
self.socket.bind(self.bind_addr)
def tick(self):
"""Accept a new connection and put it on the Queue."""
try:
s, addr = self.socket.accept()
if self.stats['Enabled']:
self.stats['Accepts'] += 1
if not self.ready:
return
prevent_socket_inheritance(s)
if hasattr(s, 'settimeout'):
s.settimeout(self.timeout)
makefile = CP_makefile
ssl_env = {}
# if ssl cert and key are set, we try to be a secure HTTP server
if self.ssl_adapter is not None:
try:
s, ssl_env = self.ssl_adapter.wrap(s)
except NoSSLError:
msg = ("The client sent a plain HTTP request, but "
"this server only speaks HTTPS on this port.")
buf = ["%s 400 Bad Request\r\n" % self.protocol,
"Content-Length: %s\r\n" % len(msg),
"Content-Type: text/plain\r\n\r\n",
msg]
wfile = makefile(s, "wb", DEFAULT_BUFFER_SIZE)
try:
wfile.write("".join(buf).encode('ISO-8859-1'))
except socket.error:
x = sys.exc_info()[1]
if x.args[0] not in socket_errors_to_ignore:
raise
return
if not s:
return
makefile = self.ssl_adapter.makefile
# Re-apply our timeout since we may have a new socket object
if hasattr(s, 'settimeout'):
s.settimeout(self.timeout)
conn = self.ConnectionClass(self, s, makefile)
if not isinstance(self.bind_addr, basestring):
# optional values
# Until we do DNS lookups, omit REMOTE_HOST
if addr is None: # sometimes this can happen
# figure out if AF_INET or AF_INET6.
if len(s.getsockname()) == 2:
# AF_INET
addr = ('0.0.0.0', 0)
else:
# AF_INET6
addr = ('::', 0)
conn.remote_addr = addr[0]
conn.remote_port = addr[1]
conn.ssl_env = ssl_env
try:
self.requests.put(conn)
except queue.Full:
# Just drop the conn. TODO: write 503 back?
conn.close()
return
except socket.timeout:
# The only reason for the timeout in start() is so we can
# notice keyboard interrupts on Win32, which don't interrupt
# accept() by default
return
except socket.error:
x = sys.exc_info()[1]
if self.stats['Enabled']:
self.stats['Socket Errors'] += 1
if x.args[0] in socket_error_eintr:
# I *think* this is right. EINTR should occur when a signal
# is received during the accept() call; all docs say retry
# the call, and I *think* I'm reading it right that Python
# will then go ahead and poll for and handle the signal
# elsewhere. See
# https://github.com/cherrypy/cherrypy/issues/707.
return
if x.args[0] in socket_errors_nonblocking:
# Just try again. See
# https://github.com/cherrypy/cherrypy/issues/479.
return
if x.args[0] in socket_errors_to_ignore:
# Our socket was closed.
# See https://github.com/cherrypy/cherrypy/issues/686.
return
raise
def _get_interrupt(self):
return self._interrupt
def _set_interrupt(self, interrupt):
self._interrupt = True
self.stop()
self._interrupt = interrupt
interrupt = property(_get_interrupt, _set_interrupt,
doc="Set this to an Exception instance to "
"interrupt the server.")
def stop(self):
"""Gracefully shutdown a server that is serving forever."""
self.ready = False
if self._start_time is not None:
self._run_time += (time.time() - self._start_time)
self._start_time = None
sock = getattr(self, "socket", None)
if sock:
if not isinstance(self.bind_addr, basestring):
# Touch our own socket to make accept() return immediately.
try:
host, port = sock.getsockname()[:2]
except socket.error:
x = sys.exc_info()[1]
if x.args[0] not in socket_errors_to_ignore:
# Changed to use error code and not message
# See
# https://github.com/cherrypy/cherrypy/issues/860.
raise
else:
# Note that we're explicitly NOT using AI_PASSIVE,
# here, because we want an actual IP to touch.
# localhost won't work if we've bound to a public IP,
# but it will if we bound to '0.0.0.0' (INADDR_ANY).
for res in socket.getaddrinfo(host, port, socket.AF_UNSPEC,
socket.SOCK_STREAM):
af, socktype, proto, canonname, sa = res
s = None
try:
s = socket.socket(af, socktype, proto)
# See
# http://groups.google.com/group/cherrypy-users/
# browse_frm/thread/bbfe5eb39c904fe0
s.settimeout(1.0)
s.connect((host, port))
s.close()
except socket.error:
if s:
s.close()
if hasattr(sock, "close"):
sock.close()
self.socket = None
self.requests.stop(self.shutdown_timeout)
class Gateway(object):
"""A base class to interface HTTPServer with other systems, such as WSGI.
"""
def __init__(self, req):
self.req = req
def respond(self):
"""Process the current request. Must be overridden in a subclass."""
raise NotImplemented
# These may either be wsgiserver.SSLAdapter subclasses or the string names
# of such classes (in which case they will be lazily loaded).
ssl_adapters = {
'builtin': 'cherrypy.wsgiserver.ssl_builtin.BuiltinSSLAdapter',
'pyopenssl': 'cherrypy.wsgiserver.ssl_pyopenssl.pyOpenSSLAdapter',
}
def get_ssl_adapter_class(name='builtin'):
"""Return an SSL adapter class for the given name."""
adapter = ssl_adapters[name.lower()]
if isinstance(adapter, basestring):
last_dot = adapter.rfind(".")
attr_name = adapter[last_dot + 1:]
mod_path = adapter[:last_dot]
try:
mod = sys.modules[mod_path]
if mod is None:
raise KeyError()
except KeyError:
# The last [''] is important.
mod = __import__(mod_path, globals(), locals(), [''])
# Let an AttributeError propagate outward.
try:
adapter = getattr(mod, attr_name)
except AttributeError:
raise AttributeError("'%s' object has no attribute '%s'"
% (mod_path, attr_name))
return adapter
# ------------------------------- WSGI Stuff -------------------------------- #
class CherryPyWSGIServer(HTTPServer):
"""A subclass of HTTPServer which calls a WSGI application."""
wsgi_version = (1, 0)
"""The version of WSGI to produce."""
def __init__(self, bind_addr, wsgi_app, numthreads=10, server_name=None,
max=-1, request_queue_size=5, timeout=10, shutdown_timeout=5,
accepted_queue_size=-1, accepted_queue_timeout=10):
self.requests = ThreadPool(self, min=numthreads or 1, max=max,
accepted_queue_size=accepted_queue_size,
accepted_queue_timeout=accepted_queue_timeout)
self.wsgi_app = wsgi_app
self.gateway = wsgi_gateways[self.wsgi_version]
self.bind_addr = bind_addr
if not server_name:
server_name = socket.gethostname()
self.server_name = server_name
self.request_queue_size = request_queue_size
self.timeout = timeout
self.shutdown_timeout = shutdown_timeout
self.clear_stats()
def _get_numthreads(self):
return self.requests.min
def _set_numthreads(self, value):
self.requests.min = value
numthreads = property(_get_numthreads, _set_numthreads)
class WSGIGateway(Gateway):
"""A base class to interface HTTPServer with WSGI."""
def __init__(self, req):
self.req = req
self.started_response = False
self.env = self.get_environ()
self.remaining_bytes_out = None
def get_environ(self):
"""Return a new environ dict targeting the given wsgi.version"""
raise NotImplemented
def respond(self):
"""Process the current request."""
response = self.req.server.wsgi_app(self.env, self.start_response)
try:
for chunk in response:
# "The start_response callable must not actually transmit
# the response headers. Instead, it must store them for the
# server or gateway to transmit only after the first
# iteration of the application return value that yields
# a NON-EMPTY string, or upon the application's first
# invocation of the write() callable." (PEP 333)
if chunk:
if isinstance(chunk, unicodestr):
chunk = chunk.encode('ISO-8859-1')
self.write(chunk)
finally:
if hasattr(response, "close"):
response.close()
def start_response(self, status, headers, exc_info=None):
"""WSGI callable to begin the HTTP response."""
# "The application may call start_response more than once,
# if and only if the exc_info argument is provided."
if self.started_response and not exc_info:
raise AssertionError("WSGI start_response called a second "
"time with no exc_info.")
self.started_response = True
# "if exc_info is provided, and the HTTP headers have already been
# sent, start_response must raise an error, and should raise the
# exc_info tuple."
if self.req.sent_headers:
try:
raise exc_info[0](exc_info[1]).with_traceback(exc_info[2])
finally:
exc_info = None
# According to PEP 3333, when using Python 3, the response status
# and headers must be bytes masquerading as unicode; that is, they
# must be of type "str" but are restricted to code points in the
# "latin-1" set.
if not isinstance(status, str):
raise TypeError("WSGI response status is not of type str.")
self.req.status = status.encode('ISO-8859-1')
for k, v in headers:
if not isinstance(k, str):
raise TypeError(
"WSGI response header key %r is not of type str." % k)
if not isinstance(v, str):
raise TypeError(
"WSGI response header value %r is not of type str." % v)
if k.lower() == 'content-length':
self.remaining_bytes_out = int(v)
self.req.outheaders.append(
(k.encode('ISO-8859-1'), v.encode('ISO-8859-1')))
return self.write
def write(self, chunk):
"""WSGI callable to write unbuffered data to the client.
This method is also used internally by start_response (to write
data from the iterable returned by the WSGI application).
"""
if not self.started_response:
raise AssertionError("WSGI write called before start_response.")
chunklen = len(chunk)
rbo = self.remaining_bytes_out
if rbo is not None and chunklen > rbo:
if not self.req.sent_headers:
# Whew. We can send a 500 to the client.
self.req.simple_response("500 Internal Server Error",
"The requested resource returned "
"more bytes than the declared "
"Content-Length.")
else:
# Dang. We have probably already sent data. Truncate the chunk
# to fit (so the client doesn't hang) and raise an error later.
chunk = chunk[:rbo]
if not self.req.sent_headers:
self.req.sent_headers = True
self.req.send_headers()
self.req.write(chunk)
if rbo is not None:
rbo -= chunklen
if rbo < 0:
raise ValueError(
"Response body exceeds the declared Content-Length.")
class WSGIGateway_10(WSGIGateway):
"""A Gateway class to interface HTTPServer with WSGI 1.0.x."""
def get_environ(self):
"""Return a new environ dict targeting the given wsgi.version"""
req = self.req
env = {
# set a non-standard environ entry so the WSGI app can know what
# the *real* server protocol is (and what features to support).
# See http://www.faqs.org/rfcs/rfc2145.html.
'ACTUAL_SERVER_PROTOCOL': req.server.protocol,
'PATH_INFO': req.path.decode('ISO-8859-1'),
'QUERY_STRING': req.qs.decode('ISO-8859-1'),
'REMOTE_ADDR': req.conn.remote_addr or '',
'REMOTE_PORT': str(req.conn.remote_port or ''),
'REQUEST_METHOD': req.method.decode('ISO-8859-1'),
'REQUEST_URI': req.uri.decode('ISO-8859-1'),
'SCRIPT_NAME': '',
'SERVER_NAME': req.server.server_name,
# Bah. "SERVER_PROTOCOL" is actually the REQUEST protocol.
'SERVER_PROTOCOL': req.request_protocol.decode('ISO-8859-1'),
'SERVER_SOFTWARE': req.server.software,
'wsgi.errors': sys.stderr,
'wsgi.input': req.rfile,
'wsgi.multiprocess': False,
'wsgi.multithread': True,
'wsgi.run_once': False,
'wsgi.url_scheme': req.scheme.decode('ISO-8859-1'),
'wsgi.version': (1, 0),
}
if isinstance(req.server.bind_addr, basestring):
# AF_UNIX. This isn't really allowed by WSGI, which doesn't
# address unix domain sockets. But it's better than nothing.
env["SERVER_PORT"] = ""
else:
env["SERVER_PORT"] = str(req.server.bind_addr[1])
# Request headers
for k, v in req.inheaders.items():
k = k.decode('ISO-8859-1').upper().replace("-", "_")
env["HTTP_" + k] = v.decode('ISO-8859-1')
# CONTENT_TYPE/CONTENT_LENGTH
ct = env.pop("HTTP_CONTENT_TYPE", None)
if ct is not None:
env["CONTENT_TYPE"] = ct
cl = env.pop("HTTP_CONTENT_LENGTH", None)
if cl is not None:
env["CONTENT_LENGTH"] = cl
if req.conn.ssl_env:
env.update(req.conn.ssl_env)
return env
class WSGIGateway_u0(WSGIGateway_10):
"""A Gateway class to interface HTTPServer with WSGI u.0.
WSGI u.0 is an experimental protocol, which uses unicode for keys
and values in both Python 2 and Python 3.
"""
def get_environ(self):
"""Return a new environ dict targeting the given wsgi.version"""
req = self.req
env_10 = WSGIGateway_10.get_environ(self)
env = env_10.copy()
env['wsgi.version'] = ('u', 0)
# Request-URI
env.setdefault('wsgi.url_encoding', 'utf-8')
try:
# SCRIPT_NAME is the empty string, who cares what encoding it is?
env["PATH_INFO"] = req.path.decode(env['wsgi.url_encoding'])
env["QUERY_STRING"] = req.qs.decode(env['wsgi.url_encoding'])
except UnicodeDecodeError:
# Fall back to latin 1 so apps can transcode if needed.
env['wsgi.url_encoding'] = 'ISO-8859-1'
env["PATH_INFO"] = env_10["PATH_INFO"]
env["QUERY_STRING"] = env_10["QUERY_STRING"]
return env
wsgi_gateways = {
(1, 0): WSGIGateway_10,
('u', 0): WSGIGateway_u0,
}
class WSGIPathInfoDispatcher(object):
"""A WSGI dispatcher for dispatch based on the PATH_INFO.
apps: a dict or list of (path_prefix, app) pairs.
"""
def __init__(self, apps):
try:
apps = list(apps.items())
except AttributeError:
pass
# Sort the apps by len(path), descending
apps.sort()
apps.reverse()
# The path_prefix strings must start, but not end, with a slash.
# Use "" instead of "/".
self.apps = [(p.rstrip("/"), a) for p, a in apps]
def __call__(self, environ, start_response):
path = environ["PATH_INFO"] or "/"
for p, app in self.apps:
# The apps list should be sorted by length, descending.
if path.startswith(p + "/") or path == p:
environ = environ.copy()
environ["SCRIPT_NAME"] = environ["SCRIPT_NAME"] + p
environ["PATH_INFO"] = path[len(p):]
return app(environ, start_response)
start_response('404 Not Found', [('Content-Type', 'text/plain'),
('Content-Length', '0')])
return ['']
| gpl-3.0 |
bwhitelock/garmon-ng | scripts/elm_sim.py | 1 | 7050 | #!/usr/bin/python
#
# elm_sim.py
#
# Copyright (C) Ben Van Mechelen 2007 <[email protected]>
#
# elm_sim.py is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Library General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor Boston, MA 02110-1301, USA
import serial
commands = {'atz' : 'ELM327 v1.2',
'ate0' : 'OK',
'atrv' : '12.4V',
'atdp' : 'Some Protocol',
'0100' : '41 00 FFFFFFFF',
'0120' : '41 00 00000000',
'0101' : '41 01 82 07 65 04',
'0102' : '41 02 00 00',
'0103' : '41 03 02 04',
'0104' : '41 04 99',
'0105' : '41 05 88',
'0106' : '41 06 99',
'0107' : '41 07 88',
'0108' : '41 08 AA',
'0109' : '41 09 AB',
'010A' : '41 0A 56',
'010B' : '41 0B 50',
'010C' : '41 0C 55 44',
'010D' : '41 0D 78',
'010E' : '41 0E 70',
'010F' : '41 0F 36',
'0110' : '41 10 75 A6',
'0111' : '41 11 26',
'0112' : '41 12 01',
'0113' : '41 13 88',
'0114' : '41 14 55 99',
'0115' : '41 15 55 98',
'0116' : '41 16 55 99',
'0117' : '41 17 55 98',
'0118' : '41 18 55 99',
'0119' : '41 19 55 98',
'011A' : '41 1A 55 99',
'011B' : '41 1B 55 97',
'011C' : '41 1C 06',
'011D' : '41 1D 45',
'011E' : '41 1E 80',
'011F' : '41 1F 45 78',
'0120' : '41 20 00180000',
'012C' : '41 2C 97',
'012D' : '41 2C AA',
'03' : '43 07 04 06 34 05 23',
'04' : '44',
'0200' : '42 00 FFFFFFFF',
'0220' : '42 00 00000000',
'0201' : '42 01 82 07 65 04',
'0202' : '42 02 01 67',
'0203' : '42 03 02 04',
'0204' : '42 04 99',
'0205' : '42 05 88',
'0206' : '42 06 99',
'0207' : '42 07 88',
'0208' : '42 08 AA',
'0209' : '42 09 AB',
'020A' : '42 0A 56',
'020B' : '42 0B 50',
'020C' : '42 0C 55 44',
'020D' : '42 0D 78',
'020E' : '42 0E 70',
'020F' : '42 0F 36',
'0210' : '42 10 75 A6',
'0211' : '42 11 26',
'0212' : '42 12 01',
'0213' : '42 13 88',
'0214' : '42 14 55 AA',
'0215' : '42 15 55 BA',
'0216' : '42 16 55 BB',
'0217' : '42 17 55 AB',
'0218' : '42 18 55 AC',
'0219' : '42 19 55 A3',
'021A' : '42 1A 55 B3',
'021B' : '42 1B 55 AA',
'021C' : '42 1C 06',
'021D' : '42 1D 45',
'021E' : '42 1E 80',
'021F' : '42 1F 45 78',
}
alternate_commands = {'atz' : 'ELM327 v1.2',
'ate0' : 'OK',
'atrv' : '14.0V',
'atdp' : 'other Protocol',
'0100' : '41 00 FFFFFFFF',
'0120' : '41 00 00180000',
'0101' : '41 01 82 07 65 04',
'0102' : '41 02 00 00',
'0103' : '41 03 02 04',
'0104' : '41 04 77',
'0105' : '41 05 66',
'0106' : '41 06 45',
'0107' : '41 07 66',
'0108' : '41 08 55',
'0109' : '41 09 BA',
'010A' : '41 0A 99',
'010B' : '41 0B 99',
'010C' : '41 0C 44 55',
'010D' : '41 0D 65',
'010E' : '41 0E 60',
'010F' : '41 0F 77',
'0110' : '41 10 66 99',
'0111' : '41 11 45',
'0112' : '41 12 22',
'0113' : '41 13 55',
'0114' : '41 14 55 AA',
'0115' : '41 15 55 AB',
'0116' : '41 16 55 AA',
'0117' : '41 17 55 AC',
'0118' : '41 18 55 A9',
'0119' : '41 19 55 AA',
'011A' : '41 1A 55 AB',
'011B' : '41 1B 55 AA',
'011C' : '41 1C 06',
'011D' : '41 1D 23',
'011E' : '41 1E 80',
'011F' : '41 1F 34 56',
'012C' : '41 2C 55',
'012D' : '41 2C BB',
'03' : '43 05 35 03 23 03 34',
'04' : '44',
}
class ElmSimulator(object):
def __init__(self, port, baudrate=9600,
size=serial.EIGHTBITS,
parity=serial.PARITY_NONE,
stopbits=serial.STOPBITS_ONE ):
try:
self.port = serial.Serial(port,
baudrate,
size,
parity,
stopbits,
timeout = 0)
except serial.SerialException, e:
print 'Failed to open serial port: %s' % port
print e
def start(self):
alternate = False
if not self.port:
print 'No serial port open'
return
while True:
buf = ""
while True:
ch = self.port.read(1)
if ch == '\r' and len(buf) > 0:
break
else:
buf = buf + ch
print 'received %s' % buf
self.port.flushOutput()
self.port.flushInput()
if commands.has_key(buf):
if buf[:2] == '02':
ret = commands[buf]
else:
if alternate:
ret = alternate_commands[buf]
else:
ret = commands[buf]
alternate = not alternate
print 'sending %s' % ret
if not ret == 'Nothing':
self.port.write(ret + '\r\r>')
else:
print 'unknown command'
self.port.write('?\r\r>')
if __name__ == "__main__":
sim = ElmSimulator('/dev/ttyUSB1')
sim.start()
| gpl-3.0 |
elijah513/django | tests/httpwrappers/tests.py | 63 | 27550 | # -*- encoding: utf-8 -*-
from __future__ import unicode_literals
import copy
import json
import os
import pickle
import unittest
import uuid
from django.core.exceptions import SuspiciousOperation
from django.core.serializers.json import DjangoJSONEncoder
from django.core.signals import request_finished
from django.db import close_old_connections
from django.http import (
BadHeaderError, HttpResponse, HttpResponseNotAllowed,
HttpResponseNotModified, HttpResponsePermanentRedirect,
HttpResponseRedirect, JsonResponse, QueryDict, SimpleCookie,
StreamingHttpResponse, parse_cookie,
)
from django.test import SimpleTestCase
from django.utils import six
from django.utils._os import upath
from django.utils.encoding import force_text, smart_str
from django.utils.functional import lazy
lazystr = lazy(force_text, six.text_type)
class QueryDictTests(unittest.TestCase):
def test_create_with_no_args(self):
self.assertEqual(QueryDict(), QueryDict(str('')))
def test_missing_key(self):
q = QueryDict()
self.assertRaises(KeyError, q.__getitem__, 'foo')
def test_immutability(self):
q = QueryDict()
self.assertRaises(AttributeError, q.__setitem__, 'something', 'bar')
self.assertRaises(AttributeError, q.setlist, 'foo', ['bar'])
self.assertRaises(AttributeError, q.appendlist, 'foo', ['bar'])
self.assertRaises(AttributeError, q.update, {'foo': 'bar'})
self.assertRaises(AttributeError, q.pop, 'foo')
self.assertRaises(AttributeError, q.popitem)
self.assertRaises(AttributeError, q.clear)
def test_immutable_get_with_default(self):
q = QueryDict()
self.assertEqual(q.get('foo', 'default'), 'default')
def test_immutable_basic_operations(self):
q = QueryDict()
self.assertEqual(q.getlist('foo'), [])
if six.PY2:
self.assertEqual(q.has_key('foo'), False)
self.assertEqual('foo' in q, False)
self.assertEqual(list(six.iteritems(q)), [])
self.assertEqual(list(six.iterlists(q)), [])
self.assertEqual(list(six.iterkeys(q)), [])
self.assertEqual(list(six.itervalues(q)), [])
self.assertEqual(len(q), 0)
self.assertEqual(q.urlencode(), '')
def test_single_key_value(self):
"""Test QueryDict with one key/value pair"""
q = QueryDict(str('foo=bar'))
self.assertEqual(q['foo'], 'bar')
self.assertRaises(KeyError, q.__getitem__, 'bar')
self.assertRaises(AttributeError, q.__setitem__, 'something', 'bar')
self.assertEqual(q.get('foo', 'default'), 'bar')
self.assertEqual(q.get('bar', 'default'), 'default')
self.assertEqual(q.getlist('foo'), ['bar'])
self.assertEqual(q.getlist('bar'), [])
self.assertRaises(AttributeError, q.setlist, 'foo', ['bar'])
self.assertRaises(AttributeError, q.appendlist, 'foo', ['bar'])
if six.PY2:
self.assertTrue(q.has_key('foo'))
self.assertIn('foo', q)
if six.PY2:
self.assertFalse(q.has_key('bar'))
self.assertNotIn('bar', q)
self.assertEqual(list(six.iteritems(q)), [('foo', 'bar')])
self.assertEqual(list(six.iterlists(q)), [('foo', ['bar'])])
self.assertEqual(list(six.iterkeys(q)), ['foo'])
self.assertEqual(list(six.itervalues(q)), ['bar'])
self.assertEqual(len(q), 1)
self.assertRaises(AttributeError, q.update, {'foo': 'bar'})
self.assertRaises(AttributeError, q.pop, 'foo')
self.assertRaises(AttributeError, q.popitem)
self.assertRaises(AttributeError, q.clear)
self.assertRaises(AttributeError, q.setdefault, 'foo', 'bar')
self.assertEqual(q.urlencode(), 'foo=bar')
def test_urlencode(self):
q = QueryDict(mutable=True)
q['next'] = '/a&b/'
self.assertEqual(q.urlencode(), 'next=%2Fa%26b%2F')
self.assertEqual(q.urlencode(safe='/'), 'next=/a%26b/')
q = QueryDict(mutable=True)
q['next'] = '/t\xebst&key/'
self.assertEqual(q.urlencode(), 'next=%2Ft%C3%ABst%26key%2F')
self.assertEqual(q.urlencode(safe='/'), 'next=/t%C3%ABst%26key/')
def test_mutable_copy(self):
"""A copy of a QueryDict is mutable."""
q = QueryDict().copy()
self.assertRaises(KeyError, q.__getitem__, "foo")
q['name'] = 'john'
self.assertEqual(q['name'], 'john')
def test_mutable_delete(self):
q = QueryDict(mutable=True)
q['name'] = 'john'
del q['name']
self.assertNotIn('name', q)
def test_basic_mutable_operations(self):
q = QueryDict(mutable=True)
q['name'] = 'john'
self.assertEqual(q.get('foo', 'default'), 'default')
self.assertEqual(q.get('name', 'default'), 'john')
self.assertEqual(q.getlist('name'), ['john'])
self.assertEqual(q.getlist('foo'), [])
q.setlist('foo', ['bar', 'baz'])
self.assertEqual(q.get('foo', 'default'), 'baz')
self.assertEqual(q.getlist('foo'), ['bar', 'baz'])
q.appendlist('foo', 'another')
self.assertEqual(q.getlist('foo'), ['bar', 'baz', 'another'])
self.assertEqual(q['foo'], 'another')
if six.PY2:
self.assertTrue(q.has_key('foo'))
self.assertIn('foo', q)
self.assertListEqual(sorted(six.iteritems(q)),
[('foo', 'another'), ('name', 'john')])
self.assertListEqual(sorted(six.iterlists(q)),
[('foo', ['bar', 'baz', 'another']), ('name', ['john'])])
self.assertListEqual(sorted(six.iterkeys(q)),
['foo', 'name'])
self.assertListEqual(sorted(six.itervalues(q)),
['another', 'john'])
q.update({'foo': 'hello'})
self.assertEqual(q['foo'], 'hello')
self.assertEqual(q.get('foo', 'not available'), 'hello')
self.assertEqual(q.getlist('foo'), ['bar', 'baz', 'another', 'hello'])
self.assertEqual(q.pop('foo'), ['bar', 'baz', 'another', 'hello'])
self.assertEqual(q.pop('foo', 'not there'), 'not there')
self.assertEqual(q.get('foo', 'not there'), 'not there')
self.assertEqual(q.setdefault('foo', 'bar'), 'bar')
self.assertEqual(q['foo'], 'bar')
self.assertEqual(q.getlist('foo'), ['bar'])
self.assertIn(q.urlencode(), ['foo=bar&name=john', 'name=john&foo=bar'])
q.clear()
self.assertEqual(len(q), 0)
def test_multiple_keys(self):
"""Test QueryDict with two key/value pairs with same keys."""
q = QueryDict(str('vote=yes&vote=no'))
self.assertEqual(q['vote'], 'no')
self.assertRaises(AttributeError, q.__setitem__, 'something', 'bar')
self.assertEqual(q.get('vote', 'default'), 'no')
self.assertEqual(q.get('foo', 'default'), 'default')
self.assertEqual(q.getlist('vote'), ['yes', 'no'])
self.assertEqual(q.getlist('foo'), [])
self.assertRaises(AttributeError, q.setlist, 'foo', ['bar', 'baz'])
self.assertRaises(AttributeError, q.setlist, 'foo', ['bar', 'baz'])
self.assertRaises(AttributeError, q.appendlist, 'foo', ['bar'])
if six.PY2:
self.assertEqual(q.has_key('vote'), True)
self.assertEqual('vote' in q, True)
if six.PY2:
self.assertEqual(q.has_key('foo'), False)
self.assertEqual('foo' in q, False)
self.assertEqual(list(six.iteritems(q)), [('vote', 'no')])
self.assertEqual(list(six.iterlists(q)), [('vote', ['yes', 'no'])])
self.assertEqual(list(six.iterkeys(q)), ['vote'])
self.assertEqual(list(six.itervalues(q)), ['no'])
self.assertEqual(len(q), 1)
self.assertRaises(AttributeError, q.update, {'foo': 'bar'})
self.assertRaises(AttributeError, q.pop, 'foo')
self.assertRaises(AttributeError, q.popitem)
self.assertRaises(AttributeError, q.clear)
self.assertRaises(AttributeError, q.setdefault, 'foo', 'bar')
self.assertRaises(AttributeError, q.__delitem__, 'vote')
if six.PY2:
def test_invalid_input_encoding(self):
"""
QueryDicts must be able to handle invalid input encoding (in this
case, bad UTF-8 encoding), falling back to ISO-8859-1 decoding.
This test doesn't apply under Python 3 because the URL is a string
and not a bytestring.
"""
q = QueryDict(str(b'foo=bar&foo=\xff'))
self.assertEqual(q['foo'], '\xff')
self.assertEqual(q.getlist('foo'), ['bar', '\xff'])
def test_pickle(self):
q = QueryDict()
q1 = pickle.loads(pickle.dumps(q, 2))
self.assertEqual(q == q1, True)
q = QueryDict(str('a=b&c=d'))
q1 = pickle.loads(pickle.dumps(q, 2))
self.assertEqual(q == q1, True)
q = QueryDict(str('a=b&c=d&a=1'))
q1 = pickle.loads(pickle.dumps(q, 2))
self.assertEqual(q == q1, True)
def test_update_from_querydict(self):
"""Regression test for #8278: QueryDict.update(QueryDict)"""
x = QueryDict(str("a=1&a=2"), mutable=True)
y = QueryDict(str("a=3&a=4"))
x.update(y)
self.assertEqual(x.getlist('a'), ['1', '2', '3', '4'])
def test_non_default_encoding(self):
"""#13572 - QueryDict with a non-default encoding"""
q = QueryDict(str('cur=%A4'), encoding='iso-8859-15')
self.assertEqual(q.encoding, 'iso-8859-15')
self.assertEqual(list(six.iteritems(q)), [('cur', '€')])
self.assertEqual(q.urlencode(), 'cur=%A4')
q = q.copy()
self.assertEqual(q.encoding, 'iso-8859-15')
self.assertEqual(list(six.iteritems(q)), [('cur', '€')])
self.assertEqual(q.urlencode(), 'cur=%A4')
self.assertEqual(copy.copy(q).encoding, 'iso-8859-15')
self.assertEqual(copy.deepcopy(q).encoding, 'iso-8859-15')
class HttpResponseTests(unittest.TestCase):
def test_headers_type(self):
r = HttpResponse()
# The following tests explicitly test types in addition to values
# because in Python 2 u'foo' == b'foo'.
# ASCII unicode or bytes values are converted to native strings.
r['key'] = 'test'
self.assertEqual(r['key'], str('test'))
self.assertIsInstance(r['key'], str)
r['key'] = 'test'.encode('ascii')
self.assertEqual(r['key'], str('test'))
self.assertIsInstance(r['key'], str)
self.assertIn(b'test', r.serialize_headers())
# Latin-1 unicode or bytes values are also converted to native strings.
r['key'] = 'café'
self.assertEqual(r['key'], smart_str('café', 'latin-1'))
self.assertIsInstance(r['key'], str)
r['key'] = 'café'.encode('latin-1')
self.assertEqual(r['key'], smart_str('café', 'latin-1'))
self.assertIsInstance(r['key'], str)
self.assertIn('café'.encode('latin-1'), r.serialize_headers())
# Other unicode values are MIME-encoded (there's no way to pass them as bytes).
r['key'] = '†'
self.assertEqual(r['key'], str('=?utf-8?b?4oCg?='))
self.assertIsInstance(r['key'], str)
self.assertIn(b'=?utf-8?b?4oCg?=', r.serialize_headers())
# The response also converts unicode or bytes keys to strings, but requires
# them to contain ASCII
r = HttpResponse()
del r['Content-Type']
r['foo'] = 'bar'
l = list(r.items())
self.assertEqual(len(l), 1)
self.assertEqual(l[0], ('foo', 'bar'))
self.assertIsInstance(l[0][0], str)
r = HttpResponse()
del r['Content-Type']
r[b'foo'] = 'bar'
l = list(r.items())
self.assertEqual(len(l), 1)
self.assertEqual(l[0], ('foo', 'bar'))
self.assertIsInstance(l[0][0], str)
r = HttpResponse()
self.assertRaises(UnicodeError, r.__setitem__, 'føø', 'bar')
self.assertRaises(UnicodeError, r.__setitem__, 'føø'.encode('utf-8'), 'bar')
def test_long_line(self):
# Bug #20889: long lines trigger newlines to be added to headers
# (which is not allowed due to bug #10188)
h = HttpResponse()
f = 'zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz a\xcc\x88'.encode('latin-1')
f = f.decode('utf-8')
h['Content-Disposition'] = 'attachment; filename="%s"' % f
# This one is triggering http://bugs.python.org/issue20747, that is Python
# will itself insert a newline in the header
h['Content-Disposition'] = 'attachement; filename="EdelRot_Blu\u0308te (3)-0.JPG"'
def test_newlines_in_headers(self):
# Bug #10188: Do not allow newlines in headers (CR or LF)
r = HttpResponse()
self.assertRaises(BadHeaderError, r.__setitem__, 'test\rstr', 'test')
self.assertRaises(BadHeaderError, r.__setitem__, 'test\nstr', 'test')
def test_dict_behavior(self):
"""
Test for bug #14020: Make HttpResponse.get work like dict.get
"""
r = HttpResponse()
self.assertEqual(r.get('test'), None)
def test_non_string_content(self):
# Bug 16494: HttpResponse should behave consistently with non-strings
r = HttpResponse(12345)
self.assertEqual(r.content, b'12345')
# test content via property
r = HttpResponse()
r.content = 12345
self.assertEqual(r.content, b'12345')
def test_iter_content(self):
r = HttpResponse(['abc', 'def', 'ghi'])
self.assertEqual(r.content, b'abcdefghi')
# test iter content via property
r = HttpResponse()
r.content = ['idan', 'alex', 'jacob']
self.assertEqual(r.content, b'idanalexjacob')
r = HttpResponse()
r.content = [1, 2, 3]
self.assertEqual(r.content, b'123')
# test odd inputs
r = HttpResponse()
r.content = ['1', '2', 3, '\u079e']
# '\xde\x9e' == unichr(1950).encode('utf-8')
self.assertEqual(r.content, b'123\xde\x9e')
# .content can safely be accessed multiple times.
r = HttpResponse(iter(['hello', 'world']))
self.assertEqual(r.content, r.content)
self.assertEqual(r.content, b'helloworld')
# __iter__ can safely be called multiple times (#20187).
self.assertEqual(b''.join(r), b'helloworld')
self.assertEqual(b''.join(r), b'helloworld')
# Accessing .content still works.
self.assertEqual(r.content, b'helloworld')
# Accessing .content also works if the response was iterated first.
r = HttpResponse(iter(['hello', 'world']))
self.assertEqual(b''.join(r), b'helloworld')
self.assertEqual(r.content, b'helloworld')
# Additional content can be written to the response.
r = HttpResponse(iter(['hello', 'world']))
self.assertEqual(r.content, b'helloworld')
r.write('!')
self.assertEqual(r.content, b'helloworld!')
def test_iterator_isnt_rewound(self):
# Regression test for #13222
r = HttpResponse('abc')
i = iter(r)
self.assertEqual(list(i), [b'abc'])
self.assertEqual(list(i), [])
def test_lazy_content(self):
r = HttpResponse(lazystr('helloworld'))
self.assertEqual(r.content, b'helloworld')
def test_file_interface(self):
r = HttpResponse()
r.write(b"hello")
self.assertEqual(r.tell(), 5)
r.write("привет")
self.assertEqual(r.tell(), 17)
r = HttpResponse(['abc'])
r.write('def')
self.assertEqual(r.tell(), 6)
self.assertEqual(r.content, b'abcdef')
# with Content-Encoding header
r = HttpResponse()
r['Content-Encoding'] = 'winning'
r.write(b'abc')
r.write(b'def')
self.assertEqual(r.content, b'abcdef')
def test_stream_interface(self):
r = HttpResponse('asdf')
self.assertEqual(r.getvalue(), b'asdf')
r = HttpResponse()
self.assertEqual(r.writable(), True)
r.writelines(['foo\n', 'bar\n', 'baz\n'])
self.assertEqual(r.content, b'foo\nbar\nbaz\n')
def test_unsafe_redirect(self):
bad_urls = [
'data:text/html,<script>window.alert("xss")</script>',
'mailto:[email protected]',
'file:///etc/passwd',
]
for url in bad_urls:
self.assertRaises(SuspiciousOperation,
HttpResponseRedirect, url)
self.assertRaises(SuspiciousOperation,
HttpResponsePermanentRedirect, url)
class HttpResponseSubclassesTests(SimpleTestCase):
def test_redirect(self):
response = HttpResponseRedirect('/redirected/')
self.assertEqual(response.status_code, 302)
# Test that standard HttpResponse init args can be used
response = HttpResponseRedirect('/redirected/',
content='The resource has temporarily moved',
content_type='text/html')
self.assertContains(response, 'The resource has temporarily moved', status_code=302)
# Test that url attribute is right
self.assertEqual(response.url, response['Location'])
def test_redirect_lazy(self):
"""Make sure HttpResponseRedirect works with lazy strings."""
r = HttpResponseRedirect(lazystr('/redirected/'))
self.assertEqual(r.url, '/redirected/')
def test_redirect_repr(self):
response = HttpResponseRedirect('/redirected/')
expected = '<HttpResponseRedirect status_code=302, "text/html; charset=utf-8", url="/redirected/">'
self.assertEqual(repr(response), expected)
def test_not_modified(self):
response = HttpResponseNotModified()
self.assertEqual(response.status_code, 304)
# 304 responses should not have content/content-type
with self.assertRaises(AttributeError):
response.content = "Hello dear"
self.assertNotIn('content-type', response)
def test_not_allowed(self):
response = HttpResponseNotAllowed(['GET'])
self.assertEqual(response.status_code, 405)
# Test that standard HttpResponse init args can be used
response = HttpResponseNotAllowed(['GET'],
content='Only the GET method is allowed',
content_type='text/html')
self.assertContains(response, 'Only the GET method is allowed', status_code=405)
def test_not_allowed_repr(self):
response = HttpResponseNotAllowed(['GET', 'OPTIONS'], content_type='text/plain')
expected = '<HttpResponseNotAllowed [GET, OPTIONS] status_code=405, "text/plain">'
self.assertEqual(repr(response), expected)
class JsonResponseTests(SimpleTestCase):
def test_json_response_non_ascii(self):
data = {'key': 'łóżko'}
response = JsonResponse(data)
self.assertEqual(json.loads(response.content.decode()), data)
def test_json_response_raises_type_error_with_default_setting(self):
with self.assertRaisesMessage(TypeError,
'In order to allow non-dict objects to be serialized set the '
'safe parameter to False'):
JsonResponse([1, 2, 3])
def test_json_response_text(self):
response = JsonResponse('foobar', safe=False)
self.assertEqual(json.loads(response.content.decode()), 'foobar')
def test_json_response_list(self):
response = JsonResponse(['foo', 'bar'], safe=False)
self.assertEqual(json.loads(response.content.decode()), ['foo', 'bar'])
def test_json_response_uuid(self):
u = uuid.uuid4()
response = JsonResponse(u, safe=False)
self.assertEqual(json.loads(response.content.decode()), str(u))
def test_json_response_custom_encoder(self):
class CustomDjangoJSONEncoder(DjangoJSONEncoder):
def encode(self, o):
return json.dumps({'foo': 'bar'})
response = JsonResponse({}, encoder=CustomDjangoJSONEncoder)
self.assertEqual(json.loads(response.content.decode()), {'foo': 'bar'})
class StreamingHttpResponseTests(SimpleTestCase):
def test_streaming_response(self):
r = StreamingHttpResponse(iter(['hello', 'world']))
# iterating over the response itself yields bytestring chunks.
chunks = list(r)
self.assertEqual(chunks, [b'hello', b'world'])
for chunk in chunks:
self.assertIsInstance(chunk, six.binary_type)
# and the response can only be iterated once.
self.assertEqual(list(r), [])
# even when a sequence that can be iterated many times, like a list,
# is given as content.
r = StreamingHttpResponse(['abc', 'def'])
self.assertEqual(list(r), [b'abc', b'def'])
self.assertEqual(list(r), [])
# iterating over Unicode strings still yields bytestring chunks.
r.streaming_content = iter(['hello', 'café'])
chunks = list(r)
# '\xc3\xa9' == unichr(233).encode('utf-8')
self.assertEqual(chunks, [b'hello', b'caf\xc3\xa9'])
for chunk in chunks:
self.assertIsInstance(chunk, six.binary_type)
# streaming responses don't have a `content` attribute.
self.assertFalse(hasattr(r, 'content'))
# and you can't accidentally assign to a `content` attribute.
with self.assertRaises(AttributeError):
r.content = 'xyz'
# but they do have a `streaming_content` attribute.
self.assertTrue(hasattr(r, 'streaming_content'))
# that exists so we can check if a response is streaming, and wrap or
# replace the content iterator.
r.streaming_content = iter(['abc', 'def'])
r.streaming_content = (chunk.upper() for chunk in r.streaming_content)
self.assertEqual(list(r), [b'ABC', b'DEF'])
# coercing a streaming response to bytes doesn't return a complete HTTP
# message like a regular response does. it only gives us the headers.
r = StreamingHttpResponse(iter(['hello', 'world']))
self.assertEqual(
six.binary_type(r), b'Content-Type: text/html; charset=utf-8')
# and this won't consume its content.
self.assertEqual(list(r), [b'hello', b'world'])
# additional content cannot be written to the response.
r = StreamingHttpResponse(iter(['hello', 'world']))
with self.assertRaises(Exception):
r.write('!')
# and we can't tell the current position.
with self.assertRaises(Exception):
r.tell()
r = StreamingHttpResponse(iter(['hello', 'world']))
self.assertEqual(r.getvalue(), b'helloworld')
class FileCloseTests(SimpleTestCase):
def setUp(self):
# Disable the request_finished signal during this test
# to avoid interfering with the database connection.
request_finished.disconnect(close_old_connections)
def tearDown(self):
request_finished.connect(close_old_connections)
def test_response(self):
filename = os.path.join(os.path.dirname(upath(__file__)), 'abc.txt')
# file isn't closed until we close the response.
file1 = open(filename)
r = HttpResponse(file1)
self.assertFalse(file1.closed)
r.close()
self.assertTrue(file1.closed)
# don't automatically close file when we finish iterating the response.
file1 = open(filename)
r = HttpResponse(file1)
self.assertFalse(file1.closed)
list(r)
self.assertFalse(file1.closed)
r.close()
self.assertTrue(file1.closed)
# when multiple file are assigned as content, make sure they are all
# closed with the response.
file1 = open(filename)
file2 = open(filename)
r = HttpResponse(file1)
r.content = file2
self.assertFalse(file1.closed)
self.assertFalse(file2.closed)
r.close()
self.assertTrue(file1.closed)
self.assertTrue(file2.closed)
def test_streaming_response(self):
filename = os.path.join(os.path.dirname(upath(__file__)), 'abc.txt')
# file isn't closed until we close the response.
file1 = open(filename)
r = StreamingHttpResponse(file1)
self.assertFalse(file1.closed)
r.close()
self.assertTrue(file1.closed)
# when multiple file are assigned as content, make sure they are all
# closed with the response.
file1 = open(filename)
file2 = open(filename)
r = StreamingHttpResponse(file1)
r.streaming_content = file2
self.assertFalse(file1.closed)
self.assertFalse(file2.closed)
r.close()
self.assertTrue(file1.closed)
self.assertTrue(file2.closed)
class CookieTests(unittest.TestCase):
def test_encode(self):
"""
Test that we don't output tricky characters in encoded value
"""
c = SimpleCookie()
c['test'] = "An,awkward;value"
self.assertNotIn(";", c.output().rstrip(';')) # IE compat
self.assertNotIn(",", c.output().rstrip(';')) # Safari compat
def test_decode(self):
"""
Test that we can still preserve semi-colons and commas
"""
c = SimpleCookie()
c['test'] = "An,awkward;value"
c2 = SimpleCookie()
c2.load(c.output()[12:])
self.assertEqual(c['test'].value, c2['test'].value)
def test_decode_2(self):
"""
Test that we haven't broken normal encoding
"""
c = SimpleCookie()
c['test'] = b"\xf0"
c2 = SimpleCookie()
c2.load(c.output()[12:])
self.assertEqual(c['test'].value, c2['test'].value)
def test_nonstandard_keys(self):
"""
Test that a single non-standard cookie name doesn't affect all cookies. Ticket #13007.
"""
self.assertIn('good_cookie', parse_cookie('good_cookie=yes;bad:cookie=yes').keys())
def test_repeated_nonstandard_keys(self):
"""
Test that a repeated non-standard name doesn't affect all cookies. Ticket #15852
"""
self.assertIn('good_cookie', parse_cookie('a:=b; a:=c; good_cookie=yes').keys())
def test_httponly_after_load(self):
"""
Test that we can use httponly attribute on cookies that we load
"""
c = SimpleCookie()
c.load("name=val")
c['name']['httponly'] = True
self.assertTrue(c['name']['httponly'])
def test_load_dict(self):
c = SimpleCookie()
c.load({'name': 'val'})
self.assertEqual(c['name'].value, 'val')
@unittest.skipUnless(six.PY2, "PY3 throws an exception on invalid cookie keys.")
def test_bad_cookie(self):
"""
Regression test for #18403
"""
r = HttpResponse()
r.set_cookie("a:.b/", 1)
self.assertEqual(len(r.cookies.bad_cookies), 1)
def test_pickle(self):
rawdata = 'Customer="WILE_E_COYOTE"; Path=/acme; Version=1'
expected_output = 'Set-Cookie: %s' % rawdata
C = SimpleCookie()
C.load(rawdata)
self.assertEqual(C.output(), expected_output)
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
C1 = pickle.loads(pickle.dumps(C, protocol=proto))
self.assertEqual(C1.output(), expected_output)
| bsd-3-clause |
jwalgran/otm-core | opentreemap/opentreemap/context_processors.py | 4 | 3606 | # -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
import copy
from datetime import datetime
from django.conf import settings
from django.contrib.staticfiles import finders
from django.utils.timezone import now
from django.utils.translation import ugettext as _
from opentreemap.util import request_is_embedded
from treemap.units import Convertible
from treemap.util import get_last_visited_instance, leaf_models_of_class
from treemap.models import InstanceUser
REPLACEABLE_TERMS = {
'Resource': {'singular': _('Resource'),
'plural': _('Resources')}
}
def global_settings(request):
last_instance = get_last_visited_instance(request)
if hasattr(request, 'user') and request.user.is_authenticated():
last_effective_instance_user =\
request.user.get_effective_instance_user(last_instance)
_update_last_seen(last_effective_instance_user)
else:
if hasattr(request, 'instance'):
instance = request.instance
default_role = instance.default_role
last_effective_instance_user = InstanceUser(
role=default_role, instance=instance)
else:
last_effective_instance_user = None
if hasattr(request, 'instance') and request.instance.logo:
logo_url = request.instance.logo.url
else:
logo_url = settings.STATIC_URL + "img/logo.png"
try:
comment_file_path = finders.find('version.txt')
with open(comment_file_path, 'r') as f:
header_comment = f.read()
except:
header_comment = "Version information not available\n"
term = copy.copy(REPLACEABLE_TERMS)
if hasattr(request, 'instance'):
term.update(request.instance.config.get('terms', {}))
# config.get('terms') above populates the term context variable with
# model terminology provided it has been customized for the treemap
# instance, but fails to populate it with the default terminology. The
# for loop below ensures that term is populated with model terminology
# whether it has been customized or not.
# Convertible is the base class where the terminology class property is
# defined, so its leaf subclasses are the ones with default terminology
# we might care about.
# leaf_models_of_class uses recursive descent through the
# clz.__subclasses__ attributes, but it only iterates through a total
# of around ten nodes at present, so it is unlikely to be a performance
# problem.
for clz in leaf_models_of_class(Convertible):
term.update({
clz.__name__: clz.terminology(request.instance)})
ctx = {
'SITE_ROOT': settings.SITE_ROOT,
'settings': settings,
'last_instance': last_instance,
'last_effective_instance_user': last_effective_instance_user,
'logo_url': logo_url,
'header_comment': header_comment,
'term': term,
'embed': request_is_embedded(request),
'datepicker_start_date': datetime.min.replace(year=1900),
}
return ctx
def _update_last_seen(last_effective_instance_user):
# Update the instance user's "last seen" date if necessary.
# Done here instead of in middleware to avoid looking up
# the request's InstanceUser again.
iu = last_effective_instance_user
today = now().date()
if iu and iu.id and (not iu.last_seen or iu.last_seen < today):
iu.last_seen = today
iu.save_base()
| gpl-3.0 |
kcleung/pyreform | pyreform/pddl_planner.py | 1 | 3754 | '''
PDDL Merge and Translator - Planner Module
Author: Dr. Patricia Riddle @ 2013
Contact: [email protected]
Functions for calling an external planner in order to validate and process the intermediary problem files
-------
Copyright (C) 2013 Dr. Patricia Riddle, University of Auckland
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import os
import sys
import subprocess
from pddl_debug import _DEBUG, _D
from pddl_debug import _PRINT, _P
import pddl_config as default_config
from pddl_timeit import timeit
def execute(local_config=None):
config = default_config
if not local_config is None:
config = local_config
os.system("bash %s/bin/planner.sh %s %s" % (config.APP_HOME, config.APP_HOME, config.PLANNER_HOME))
myfileplan = open("downwardtmp-0/sas_plan")
plan = myfileplan.read()
myfileplan.close()
return plan
def giveup(domain,prob, domain_file, problem_file, local_config=None):
_DEBUG("OH NO")
new_file_name = "newdomain.pddl"
text_file = open(new_file_name, "w") #!write prob string into a file
text_file.write(domain)
text_file.close()
new_file_name = "newprob.pddl"
text_file = open(new_file_name, "w") #!write prob string into a file
text_file.write(prob)
text_file.close()
timer = timeit("timing.txt", "elapsed")
timer.start(domain_file, problem_file)
timer.capture()
plan = execute(local_config=local_config)
timer.capture()
solution_file = open("sas_plan_new", "w")
solution_file.write(plan)
solution_file.close()
timer.capture()
timer.stop()
return []
def runvalidation(new_plan, local_config=None): #! code that ran the validator with the final solution....also not run anymore
config = default_config
if not local_config is None:
config = local_config
_DEBUG("new_plan_yea",new_plan)
solutionstring = tostring(new_plan)
_DEBUG("solutionstring",solutionstring)
_DEBUG("test",solutionstring[1:-1])
solution_file = open("sas_plan_new", "w")
solution_file.write(solutionstring[1:-1])
solution_file.close()
test_file = open("tester","w")
subprocess.call(["%s/src/validate newdomain.pddl newprob.pddl sas_plan" % config.PLANNER_HOME], shell=True, stdout=test_file)
_DEBUG("subprocess",subprocess)
#! omp_cmd = '/Users/prid013/Documents/Fast-Downward-8ea549f76262/src/validate /Users/prid013/Documents/IPCdomains/benchmarks3/gripper/domain.pddl /Users/prid013/Documents/IPCdomains/benchmarks3/gripper/prob01.pddl /Users/prid013/Documents/IPCdomains/sas_plan_new'
#!with open(test_file) as stdout:
#! xmlResult = Popen(shlex.split(omp_cmd), stdin=stdin, stdout=PIPE, stderr=STDOUT)
test_file.close()
test_file = open("tester","r")
output = test_file.read()
_DEBUG("output",output)
if "Successful" in output:
return new_plan
else:
return False
| gpl-3.0 |
SchweizerischeBundesbahnen/cimon_controller | tests/test_collector.py | 1 | 5893 | __author__ = 'florianseidl'
from collector import *
from urllib.error import HTTPError
from unittest import TestCase
from unittest.mock import Mock, DEFAULT
from types import SimpleNamespace
from concurrent import futures
class TestHttpClient(TestCase):
json_str = '{ "foo": "bar" }'
def test_ok(self):
h = self.create_http_client("foobar42")
self.assertEqual(h.open_and_read("http://irgendw.as"), "foobar42")
self.assertEqual(h.__open__.call_count, 1)
def test_http_exception_500(self):
h = self.create_http_client(http_error_codes=[500]*99)
with self.assertRaises(HTTPError):
h.open_and_read("/mypath")
self.assertEqual(h.__open__.call_count, 4)
def test_http_exception_500_then_OK(self):
h = self.create_http_client(http_error_codes=[500, None])
h.open_and_read("/mypath")
self.assertEqual(h.__open__.call_count, 2)
def test_http_exception_500_2times_OK(self):
h = self.create_http_client(http_error_codes=[500, 500, None])
h.open_and_read("/mypath")
self.assertEqual(h.__open__.call_count, 3)
def test_http_exception_500_3times_OK(self):
h = self.create_http_client(http_error_codes=[500, 500, 500, None])
h.open_and_read("/mypath")
self.assertEqual(h.__open__.call_count, 4)
def test_http_exception_401_no_retry(self):
h = self.create_http_client(http_error_codes=[401]*99)
with self.assertRaises(HTTPError):
h.open_and_read("/mypath")
self.assertEqual(h.__open__.call_count, 1)
def test_basic_auth(self):
h = self.create_http_client(authentication_handler=BasicAuthenticationHandler("bla", "blo"))
h.open_and_read()
self.assertEqual(h.__open__.call_count, 1)
request = self.__get_request__(h.__open__)
self.assertTrue(request.has_header("Authorization"))
def test_basic_auth_http_exception_401_retry_ok(self):
h = self.create_http_client(http_error_codes=(401,None), authentication_handler=BasicAuthenticationHandler("bla", "blo"))
h.open_and_read("/mypath")
self.assertEqual(h.__open__.call_count, 2)
def test_basic_auth_http_exception_401_retry_fail(self):
h = self.create_http_client(http_error_codes=[401]*99, authentication_handler=BasicAuthenticationHandler("bla", "blo"))
with self.assertRaises(HTTPError):
h.open_and_read("/mypath")
self.assertEqual(h.__open__.call_count, 4)
def test_saml(self):
saml = SamlAuthenticationHandler("irgendwer", "geheim", "")
saml.login_http_client = self.create_http_client(header="bla")
h = self.create_http_client(http_error_codes=(401,None), response_str="hallo", authentication_handler=saml)
h.open_and_read("/mypath")
self.assertEqual(h.__open__.call_count, 2)
self.assertEqual(saml.login_http_client.__open__.call_count, 2)
request = self.__get_request__(h.__open__)
self.assertEqual(request.get_header("Cookie"), "bla")
def test_saml_mulitthreading(self):
saml = SamlAuthenticationHandler("irgendwer", "geheim", "")
saml.login_http_client = self.create_http_client(header="bla")
h = self.create_http_client(http_error_codes=[401] + [None]*43, response_str="hallo", authentication_handler=saml)
with futures.ThreadPoolExecutor(max_workers=42) as executor:
future_requests = ({executor.submit(h.open_and_read, "/mypath"):
i for i in range(0,42)})
futures.wait(future_requests)
self.assertEqual(h.__open__.call_count, 43)
self.assertEqual(saml.login_http_client.__open__.call_count, 2)
request = self.__get_request__(h.__open__)
self.assertEqual(request.get_header("Cookie"), "bla")
def test_saml_http_exception_401_saml_no_cookie_sent(self):
saml = SamlAuthenticationHandler("irgendwer", "geheim", "")
saml.login_http_client = self.create_http_client()
h = self.create_http_client(http_error_codes=[401]*99, authentication_handler=saml)
with self.assertRaises(HTTPError):
h.open_and_read("/mypath")
self.assertEqual(h.__open__.call_count, 4)
self.assertEqual(saml.login_http_client.__open__.call_count, 4)
def test_jwt(self):
jwt = JwtAuthenticationHandler("irgendwer", "geheim", "")
jwt.login_http_client = self.create_http_client(header="bla")
h = self.create_http_client(http_error_codes=(401,None), response_str="hallo", authentication_handler=jwt)
h.open_and_read("/mypath")
self.assertEqual(h.__open__.call_count, 2)
self.assertEqual(jwt.login_http_client.__open__.call_count, 2)
request = self.__get_request__(h.__open__)
self.assertEqual(request.get_header("Authorization"), "bla")
def create_http_client(self, response_str="", http_error_codes=None, authentication_handler=EmptyAuthenticationHandler(), header=None):
h = HttpClient(base_url="http://irgendw.as",
authentication_handler= authentication_handler,
retry_delay_sec=0)
response = SimpleNamespace()
response.read = Mock(spec=(""), return_value=response_str.encode("UTF-8"))
response.headers = SimpleNamespace()
response.headers.get_content_charset= Mock(spec=(""), return_value="UTF-8")
response.getheader = Mock(spec=(""), return_value=header)
side_effects = [HTTPError("http://foo.bar", code, None, None, None) if code else DEFAULT for code in http_error_codes] if http_error_codes else None
h.__open__ = Mock(spec=(""),
return_value=response,
side_effect=side_effects)
return h
def __get_request__(self, open):
return open.call_args[0][0] | apache-2.0 |
yongshengwang/hue | desktop/core/ext-py/Pygments-1.3.1/pygments/formatter.py | 75 | 2790 | # -*- coding: utf-8 -*-
"""
pygments.formatter
~~~~~~~~~~~~~~~~~~
Base formatter class.
:copyright: Copyright 2006-2010 by the Pygments team, see AUTHORS.
:license: BSD, see LICENSE for details.
"""
import codecs
from pygments.util import get_bool_opt
from pygments.styles import get_style_by_name
__all__ = ['Formatter']
def _lookup_style(style):
if isinstance(style, basestring):
return get_style_by_name(style)
return style
class Formatter(object):
"""
Converts a token stream to text.
Options accepted:
``style``
The style to use, can be a string or a Style subclass
(default: "default"). Not used by e.g. the
TerminalFormatter.
``full``
Tells the formatter to output a "full" document, i.e.
a complete self-contained document. This doesn't have
any effect for some formatters (default: false).
``title``
If ``full`` is true, the title that should be used to
caption the document (default: '').
``encoding``
If given, must be an encoding name. This will be used to
convert the Unicode token strings to byte strings in the
output. If it is "" or None, Unicode strings will be written
to the output file, which most file-like objects do not
support (default: None).
``outencoding``
Overrides ``encoding`` if given.
"""
#: Name of the formatter
name = None
#: Shortcuts for the formatter
aliases = []
#: fn match rules
filenames = []
#: If True, this formatter outputs Unicode strings when no encoding
#: option is given.
unicodeoutput = True
def __init__(self, **options):
self.style = _lookup_style(options.get('style', 'default'))
self.full = get_bool_opt(options, 'full', False)
self.title = options.get('title', '')
self.encoding = options.get('encoding', None) or None
self.encoding = options.get('outencoding', None) or self.encoding
self.options = options
def get_style_defs(self, arg=''):
"""
Return the style definitions for the current style as a string.
``arg`` is an additional argument whose meaning depends on the
formatter used. Note that ``arg`` can also be a list or tuple
for some formatters like the html formatter.
"""
return ''
def format(self, tokensource, outfile):
"""
Format ``tokensource``, an iterable of ``(tokentype, tokenstring)``
tuples and write it into ``outfile``.
"""
if self.encoding:
# wrap the outfile in a StreamWriter
outfile = codecs.lookup(self.encoding)[3](outfile)
return self.format_unencoded(tokensource, outfile)
| apache-2.0 |
rickerbh/tictactoe_py | tictactoe/game_state.py | 1 | 2357 | from tictactoe.game_board import GameBoard
from collections import Counter
import functools
class GameState():
def __init__(self, board):
self._board = board
def is_draw(self):
def is_not_empty(item):
return not item == ""
if self.has_winner():
return False
return len(list(filter(is_not_empty, self._board.positions))) == len(self._board.positions)
def has_winner(self):
return any([self._row_check(),
self._column_check(),
self._diagonal_check()])
def _row_check(self):
return self.check_items(self._board.rows)
def check_items(self, items):
checked_items = map(self._all_same, items)
return any(checked_items)
def _all_same(self, items):
return len(set(items)) == 1 and not items[0] == ""
def _column_check(self):
return self.check_items(self._board.columns)
def _diagonal_check(self):
return self.check_items(self._board.diagonals)
def corner_available(self):
return "" in self._board.corners
def edge_available(self):
return "" in self._board.edges
def nearly_won_check(self, symbol, items):
freqs = Counter(items)
return freqs[symbol] == 2 and freqs[""] == 1
def win_available(self, symbol):
winnables = self._board.all_winnables
win_determinator = functools.partial(self.is_winnable, symbol)
return any(map(win_determinator, winnables))
def is_winnable(self, symbol, items):
return self.nearly_won_check(symbol, items)
def block_opposite_fork_opportunity(self, symbol):
if self._board.center[0] == symbol:
other_symbol_filter = lambda x: x != "" and x != symbol
return 2 == len(list(filter(other_symbol_filter, self._board.corners)))
return False
def block_corner_fork_opportunity(self, symbol):
edges = self._board.edges
pairs = [(0, 1), (0, 2), (1, 3), (2, 3)]
def is_other_symbol(position):
return position != "" and position != symbol
def find_fork(pair):
first = pair[0]
second = pair[1]
return is_other_symbol(edges[first]) and is_other_symbol(edges[second])
return any(map(find_fork, pairs))
| mit |
EmadMokhtar/Django | tests/model_fields/test_imagefield.py | 40 | 16219 | import os
import shutil
from unittest import skipIf
from django.core.exceptions import ImproperlyConfigured
from django.core.files import File
from django.core.files.images import ImageFile
from django.test import TestCase
from django.test.testcases import SerializeMixin
try:
from .models import Image
except ImproperlyConfigured:
Image = None
if Image:
from .models import (
Person, PersonWithHeight, PersonWithHeightAndWidth,
PersonDimensionsFirst, PersonTwoImages, TestImageFieldFile,
)
from .models import temp_storage_dir
else:
# Pillow not available, create dummy classes (tests will be skipped anyway)
class Person:
pass
PersonWithHeight = PersonWithHeightAndWidth = PersonDimensionsFirst = Person
PersonTwoImages = Person
class ImageFieldTestMixin(SerializeMixin):
"""
Mixin class to provide common functionality to ImageField test classes.
"""
lockfile = __file__
# Person model to use for tests.
PersonModel = PersonWithHeightAndWidth
# File class to use for file instances.
File = ImageFile
def setUp(self):
"""
Creates a pristine temp directory (or deletes and recreates if it
already exists) that the model uses as its storage directory.
Sets up two ImageFile instances for use in tests.
"""
if os.path.exists(temp_storage_dir):
shutil.rmtree(temp_storage_dir)
os.mkdir(temp_storage_dir)
file_path1 = os.path.join(os.path.dirname(__file__), '4x8.png')
self.file1 = self.File(open(file_path1, 'rb'), name='4x8.png')
file_path2 = os.path.join(os.path.dirname(__file__), '8x4.png')
self.file2 = self.File(open(file_path2, 'rb'), name='8x4.png')
def tearDown(self):
"""
Removes temp directory and all its contents.
"""
self.file1.close()
self.file2.close()
shutil.rmtree(temp_storage_dir)
def check_dimensions(self, instance, width, height, field_name='mugshot'):
"""
Asserts that the given width and height values match both the
field's height and width attributes and the height and width fields
(if defined) the image field is caching to.
Note, this method will check for dimension fields named by adding
"_width" or "_height" to the name of the ImageField. So, the
models used in these tests must have their fields named
accordingly.
By default, we check the field named "mugshot", but this can be
specified by passing the field_name parameter.
"""
field = getattr(instance, field_name)
# Check height/width attributes of field.
if width is None and height is None:
with self.assertRaises(ValueError):
getattr(field, 'width')
with self.assertRaises(ValueError):
getattr(field, 'height')
else:
self.assertEqual(field.width, width)
self.assertEqual(field.height, height)
# Check height/width fields of model, if defined.
width_field_name = field_name + '_width'
if hasattr(instance, width_field_name):
self.assertEqual(getattr(instance, width_field_name), width)
height_field_name = field_name + '_height'
if hasattr(instance, height_field_name):
self.assertEqual(getattr(instance, height_field_name), height)
@skipIf(Image is None, "Pillow is required to test ImageField")
class ImageFieldTests(ImageFieldTestMixin, TestCase):
"""
Tests for ImageField that don't need to be run with each of the
different test model classes.
"""
def test_equal_notequal_hash(self):
"""
Bug #9786: Ensure '==' and '!=' work correctly.
Bug #9508: make sure hash() works as expected (equal items must
hash to the same value).
"""
# Create two Persons with different mugshots.
p1 = self.PersonModel(name="Joe")
p1.mugshot.save("mug", self.file1)
p2 = self.PersonModel(name="Bob")
p2.mugshot.save("mug", self.file2)
self.assertIs(p1.mugshot == p2.mugshot, False)
self.assertIs(p1.mugshot != p2.mugshot, True)
# Test again with an instance fetched from the db.
p1_db = self.PersonModel.objects.get(name="Joe")
self.assertIs(p1_db.mugshot == p2.mugshot, False)
self.assertIs(p1_db.mugshot != p2.mugshot, True)
# Instance from db should match the local instance.
self.assertIs(p1_db.mugshot == p1.mugshot, True)
self.assertEqual(hash(p1_db.mugshot), hash(p1.mugshot))
self.assertIs(p1_db.mugshot != p1.mugshot, False)
def test_instantiate_missing(self):
"""
If the underlying file is unavailable, still create instantiate the
object without error.
"""
p = self.PersonModel(name="Joan")
p.mugshot.save("shot", self.file1)
p = self.PersonModel.objects.get(name="Joan")
path = p.mugshot.path
shutil.move(path, path + '.moved')
self.PersonModel.objects.get(name="Joan")
def test_delete_when_missing(self):
"""
Bug #8175: correctly delete an object where the file no longer
exists on the file system.
"""
p = self.PersonModel(name="Fred")
p.mugshot.save("shot", self.file1)
os.remove(p.mugshot.path)
p.delete()
def test_size_method(self):
"""
Bug #8534: FileField.size should not leave the file open.
"""
p = self.PersonModel(name="Joan")
p.mugshot.save("shot", self.file1)
# Get a "clean" model instance
p = self.PersonModel.objects.get(name="Joan")
# It won't have an opened file.
self.assertIs(p.mugshot.closed, True)
# After asking for the size, the file should still be closed.
p.mugshot.size
self.assertIs(p.mugshot.closed, True)
def test_pickle(self):
"""
ImageField can be pickled, unpickled, and that the image of
the unpickled version is the same as the original.
"""
import pickle
p = Person(name="Joe")
p.mugshot.save("mug", self.file1)
dump = pickle.dumps(p)
p2 = Person(name="Bob")
p2.mugshot = self.file1
loaded_p = pickle.loads(dump)
self.assertEqual(p.mugshot, loaded_p.mugshot)
def test_defer(self):
self.PersonModel.objects.create(name='Joe', mugshot=self.file1)
with self.assertNumQueries(1):
qs = list(self.PersonModel.objects.defer('mugshot'))
with self.assertNumQueries(0):
self.assertEqual(qs[0].name, 'Joe')
@skipIf(Image is None, "Pillow is required to test ImageField")
class ImageFieldTwoDimensionsTests(ImageFieldTestMixin, TestCase):
"""
Tests behavior of an ImageField and its dimensions fields.
"""
def test_constructor(self):
"""
Tests assigning an image field through the model's constructor.
"""
p = self.PersonModel(name='Joe', mugshot=self.file1)
self.check_dimensions(p, 4, 8)
p.save()
self.check_dimensions(p, 4, 8)
def test_image_after_constructor(self):
"""
Tests behavior when image is not passed in constructor.
"""
p = self.PersonModel(name='Joe')
# TestImageField value will default to being an instance of its
# attr_class, a TestImageFieldFile, with name == None, which will
# cause it to evaluate as False.
self.assertIsInstance(p.mugshot, TestImageFieldFile)
self.assertFalse(p.mugshot)
# Test setting a fresh created model instance.
p = self.PersonModel(name='Joe')
p.mugshot = self.file1
self.check_dimensions(p, 4, 8)
def test_create(self):
"""
Tests assigning an image in Manager.create().
"""
p = self.PersonModel.objects.create(name='Joe', mugshot=self.file1)
self.check_dimensions(p, 4, 8)
def test_default_value(self):
"""
The default value for an ImageField is an instance of
the field's attr_class (TestImageFieldFile in this case) with no
name (name set to None).
"""
p = self.PersonModel()
self.assertIsInstance(p.mugshot, TestImageFieldFile)
self.assertFalse(p.mugshot)
def test_assignment_to_None(self):
"""
Assigning ImageField to None clears dimensions.
"""
p = self.PersonModel(name='Joe', mugshot=self.file1)
self.check_dimensions(p, 4, 8)
# If image assigned to None, dimension fields should be cleared.
p.mugshot = None
self.check_dimensions(p, None, None)
p.mugshot = self.file2
self.check_dimensions(p, 8, 4)
def test_field_save_and_delete_methods(self):
"""
Tests assignment using the field's save method and deletion using
the field's delete method.
"""
p = self.PersonModel(name='Joe')
p.mugshot.save("mug", self.file1)
self.check_dimensions(p, 4, 8)
# A new file should update dimensions.
p.mugshot.save("mug", self.file2)
self.check_dimensions(p, 8, 4)
# Field and dimensions should be cleared after a delete.
p.mugshot.delete(save=False)
self.assertEqual(p.mugshot, None)
self.check_dimensions(p, None, None)
def test_dimensions(self):
"""
Dimensions are updated correctly in various situations.
"""
p = self.PersonModel(name='Joe')
# Dimensions should get set if file is saved.
p.mugshot.save("mug", self.file1)
self.check_dimensions(p, 4, 8)
# Test dimensions after fetching from database.
p = self.PersonModel.objects.get(name='Joe')
# Bug 11084: Dimensions should not get recalculated if file is
# coming from the database. We test this by checking if the file
# was opened.
self.assertIs(p.mugshot.was_opened, False)
self.check_dimensions(p, 4, 8)
# After checking dimensions on the image field, the file will have
# opened.
self.assertIs(p.mugshot.was_opened, True)
# Dimensions should now be cached, and if we reset was_opened and
# check dimensions again, the file should not have opened.
p.mugshot.was_opened = False
self.check_dimensions(p, 4, 8)
self.assertIs(p.mugshot.was_opened, False)
# If we assign a new image to the instance, the dimensions should
# update.
p.mugshot = self.file2
self.check_dimensions(p, 8, 4)
# Dimensions were recalculated, and hence file should have opened.
self.assertIs(p.mugshot.was_opened, True)
@skipIf(Image is None, "Pillow is required to test ImageField")
class ImageFieldNoDimensionsTests(ImageFieldTwoDimensionsTests):
"""
Tests behavior of an ImageField with no dimension fields.
"""
PersonModel = Person
@skipIf(Image is None, "Pillow is required to test ImageField")
class ImageFieldOneDimensionTests(ImageFieldTwoDimensionsTests):
"""
Tests behavior of an ImageField with one dimensions field.
"""
PersonModel = PersonWithHeight
@skipIf(Image is None, "Pillow is required to test ImageField")
class ImageFieldDimensionsFirstTests(ImageFieldTwoDimensionsTests):
"""
Tests behavior of an ImageField where the dimensions fields are
defined before the ImageField.
"""
PersonModel = PersonDimensionsFirst
@skipIf(Image is None, "Pillow is required to test ImageField")
class ImageFieldUsingFileTests(ImageFieldTwoDimensionsTests):
"""
Tests behavior of an ImageField when assigning it a File instance
rather than an ImageFile instance.
"""
PersonModel = PersonDimensionsFirst
File = File
@skipIf(Image is None, "Pillow is required to test ImageField")
class TwoImageFieldTests(ImageFieldTestMixin, TestCase):
"""
Tests a model with two ImageFields.
"""
PersonModel = PersonTwoImages
def test_constructor(self):
p = self.PersonModel(mugshot=self.file1, headshot=self.file2)
self.check_dimensions(p, 4, 8, 'mugshot')
self.check_dimensions(p, 8, 4, 'headshot')
p.save()
self.check_dimensions(p, 4, 8, 'mugshot')
self.check_dimensions(p, 8, 4, 'headshot')
def test_create(self):
p = self.PersonModel.objects.create(mugshot=self.file1, headshot=self.file2)
self.check_dimensions(p, 4, 8)
self.check_dimensions(p, 8, 4, 'headshot')
def test_assignment(self):
p = self.PersonModel()
self.check_dimensions(p, None, None, 'mugshot')
self.check_dimensions(p, None, None, 'headshot')
p.mugshot = self.file1
self.check_dimensions(p, 4, 8, 'mugshot')
self.check_dimensions(p, None, None, 'headshot')
p.headshot = self.file2
self.check_dimensions(p, 4, 8, 'mugshot')
self.check_dimensions(p, 8, 4, 'headshot')
# Clear the ImageFields one at a time.
p.mugshot = None
self.check_dimensions(p, None, None, 'mugshot')
self.check_dimensions(p, 8, 4, 'headshot')
p.headshot = None
self.check_dimensions(p, None, None, 'mugshot')
self.check_dimensions(p, None, None, 'headshot')
def test_field_save_and_delete_methods(self):
p = self.PersonModel(name='Joe')
p.mugshot.save("mug", self.file1)
self.check_dimensions(p, 4, 8, 'mugshot')
self.check_dimensions(p, None, None, 'headshot')
p.headshot.save("head", self.file2)
self.check_dimensions(p, 4, 8, 'mugshot')
self.check_dimensions(p, 8, 4, 'headshot')
# We can use save=True when deleting the image field with null=True
# dimension fields and the other field has an image.
p.headshot.delete(save=True)
self.check_dimensions(p, 4, 8, 'mugshot')
self.check_dimensions(p, None, None, 'headshot')
p.mugshot.delete(save=False)
self.check_dimensions(p, None, None, 'mugshot')
self.check_dimensions(p, None, None, 'headshot')
def test_dimensions(self):
"""
Dimensions are updated correctly in various situations.
"""
p = self.PersonModel(name='Joe')
# Dimensions should get set for the saved file.
p.mugshot.save("mug", self.file1)
p.headshot.save("head", self.file2)
self.check_dimensions(p, 4, 8, 'mugshot')
self.check_dimensions(p, 8, 4, 'headshot')
# Test dimensions after fetching from database.
p = self.PersonModel.objects.get(name='Joe')
# Bug 11084: Dimensions should not get recalculated if file is
# coming from the database. We test this by checking if the file
# was opened.
self.assertIs(p.mugshot.was_opened, False)
self.assertIs(p.headshot.was_opened, False)
self.check_dimensions(p, 4, 8, 'mugshot')
self.check_dimensions(p, 8, 4, 'headshot')
# After checking dimensions on the image fields, the files will
# have been opened.
self.assertIs(p.mugshot.was_opened, True)
self.assertIs(p.headshot.was_opened, True)
# Dimensions should now be cached, and if we reset was_opened and
# check dimensions again, the file should not have opened.
p.mugshot.was_opened = False
p.headshot.was_opened = False
self.check_dimensions(p, 4, 8, 'mugshot')
self.check_dimensions(p, 8, 4, 'headshot')
self.assertIs(p.mugshot.was_opened, False)
self.assertIs(p.headshot.was_opened, False)
# If we assign a new image to the instance, the dimensions should
# update.
p.mugshot = self.file2
p.headshot = self.file1
self.check_dimensions(p, 8, 4, 'mugshot')
self.check_dimensions(p, 4, 8, 'headshot')
# Dimensions were recalculated, and hence file should have opened.
self.assertIs(p.mugshot.was_opened, True)
self.assertIs(p.headshot.was_opened, True)
| mit |
Danielhiversen/home-assistant | homeassistant/components/sensor/tcp.py | 3 | 4829 | """
Support for TCP socket based sensors.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.tcp/
"""
import logging
import socket
import select
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_NAME, CONF_HOST, CONF_PORT, CONF_PAYLOAD, CONF_TIMEOUT,
CONF_UNIT_OF_MEASUREMENT, CONF_VALUE_TEMPLATE)
from homeassistant.exceptions import TemplateError
from homeassistant.helpers.entity import Entity
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
CONF_BUFFER_SIZE = 'buffer_size'
CONF_VALUE_ON = 'value_on'
DEFAULT_BUFFER_SIZE = 1024
DEFAULT_NAME = 'TCP Sensor'
DEFAULT_TIMEOUT = 10
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Required(CONF_HOST): cv.string,
vol.Required(CONF_PORT): cv.port,
vol.Required(CONF_PAYLOAD): cv.string,
vol.Optional(CONF_BUFFER_SIZE, default=DEFAULT_BUFFER_SIZE):
cv.positive_int,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_TIMEOUT, default=DEFAULT_TIMEOUT): cv.positive_int,
vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string,
vol.Optional(CONF_VALUE_ON): cv.string,
vol.Optional(CONF_VALUE_TEMPLATE): cv.template,
})
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the TCP Sensor."""
add_entities([TcpSensor(hass, config)])
class TcpSensor(Entity):
"""Implementation of a TCP socket based sensor."""
required = tuple()
def __init__(self, hass, config):
"""Set all the config values if they exist and get initial state."""
value_template = config.get(CONF_VALUE_TEMPLATE)
if value_template is not None:
value_template.hass = hass
self._hass = hass
self._config = {
CONF_NAME: config.get(CONF_NAME),
CONF_HOST: config.get(CONF_HOST),
CONF_PORT: config.get(CONF_PORT),
CONF_TIMEOUT: config.get(CONF_TIMEOUT),
CONF_PAYLOAD: config.get(CONF_PAYLOAD),
CONF_UNIT_OF_MEASUREMENT: config.get(CONF_UNIT_OF_MEASUREMENT),
CONF_VALUE_TEMPLATE: value_template,
CONF_VALUE_ON: config.get(CONF_VALUE_ON),
CONF_BUFFER_SIZE: config.get(CONF_BUFFER_SIZE),
}
self._state = None
self.update()
@property
def name(self):
"""Return the name of this sensor."""
name = self._config[CONF_NAME]
if name is not None:
return name
return super(TcpSensor, self).name
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity."""
return self._config[CONF_UNIT_OF_MEASUREMENT]
def update(self):
"""Get the latest value for this sensor."""
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
sock.settimeout(self._config[CONF_TIMEOUT])
try:
sock.connect(
(self._config[CONF_HOST], self._config[CONF_PORT]))
except socket.error as err:
_LOGGER.error(
"Unable to connect to %s on port %s: %s",
self._config[CONF_HOST], self._config[CONF_PORT], err)
return
try:
sock.send(self._config[CONF_PAYLOAD].encode())
except socket.error as err:
_LOGGER.error(
"Unable to send payload %r to %s on port %s: %s",
self._config[CONF_PAYLOAD], self._config[CONF_HOST],
self._config[CONF_PORT], err)
return
readable, _, _ = select.select(
[sock], [], [], self._config[CONF_TIMEOUT])
if not readable:
_LOGGER.warning(
"Timeout (%s second(s)) waiting for a response after "
"sending %r to %s on port %s.",
self._config[CONF_TIMEOUT], self._config[CONF_PAYLOAD],
self._config[CONF_HOST], self._config[CONF_PORT])
return
value = sock.recv(self._config[CONF_BUFFER_SIZE]).decode()
if self._config[CONF_VALUE_TEMPLATE] is not None:
try:
self._state = self._config[CONF_VALUE_TEMPLATE].render(
value=value)
return
except TemplateError as err:
_LOGGER.error(
"Unable to render template of %r with value: %r",
self._config[CONF_VALUE_TEMPLATE], value)
return
self._state = value
| mit |
ukanga/SickRage | lib/rebulk/test/rules_module.py | 21 | 1177 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# pylint: disable=no-self-use, pointless-statement, missing-docstring, invalid-name
from ..match import Match
from ..rules import Rule
class Rule3(Rule):
def when(self, matches, context):
return context.get('when')
def then(self, matches, when_response, context):
assert when_response in [True, False]
matches.append(Match(3, 4))
class Rule2(Rule):
dependency = Rule3
def when(self, matches, context):
return True
def then(self, matches, when_response, context):
assert when_response
matches.append(Match(3, 4))
class Rule1(Rule):
dependency = Rule2
def when(self, matches, context):
return True
def then(self, matches, when_response, context):
assert when_response
matches.clear()
class Rule0(Rule):
dependency = Rule1
def when(self, matches, context):
return True
def then(self, matches, when_response, context):
assert when_response
matches.append(Match(3, 4))
class Rule1Disabled(Rule1):
name = "Disabled Rule1"
def enabled(self, context):
return False
| gpl-3.0 |
zhjunlang/kbengine | kbe/src/lib/python/Lib/mailcap.py | 100 | 7437 | """Mailcap file handling. See RFC 1524."""
import os
__all__ = ["getcaps","findmatch"]
# Part 1: top-level interface.
def getcaps():
"""Return a dictionary containing the mailcap database.
The dictionary maps a MIME type (in all lowercase, e.g. 'text/plain')
to a list of dictionaries corresponding to mailcap entries. The list
collects all the entries for that MIME type from all available mailcap
files. Each dictionary contains key-value pairs for that MIME type,
where the viewing command is stored with the key "view".
"""
caps = {}
for mailcap in listmailcapfiles():
try:
fp = open(mailcap, 'r')
except OSError:
continue
with fp:
morecaps = readmailcapfile(fp)
for key, value in morecaps.items():
if not key in caps:
caps[key] = value
else:
caps[key] = caps[key] + value
return caps
def listmailcapfiles():
"""Return a list of all mailcap files found on the system."""
# This is mostly a Unix thing, but we use the OS path separator anyway
if 'MAILCAPS' in os.environ:
pathstr = os.environ['MAILCAPS']
mailcaps = pathstr.split(os.pathsep)
else:
if 'HOME' in os.environ:
home = os.environ['HOME']
else:
# Don't bother with getpwuid()
home = '.' # Last resort
mailcaps = [home + '/.mailcap', '/etc/mailcap',
'/usr/etc/mailcap', '/usr/local/etc/mailcap']
return mailcaps
# Part 2: the parser.
def readmailcapfile(fp):
"""Read a mailcap file and return a dictionary keyed by MIME type.
Each MIME type is mapped to an entry consisting of a list of
dictionaries; the list will contain more than one such dictionary
if a given MIME type appears more than once in the mailcap file.
Each dictionary contains key-value pairs for that MIME type, where
the viewing command is stored with the key "view".
"""
caps = {}
while 1:
line = fp.readline()
if not line: break
# Ignore comments and blank lines
if line[0] == '#' or line.strip() == '':
continue
nextline = line
# Join continuation lines
while nextline[-2:] == '\\\n':
nextline = fp.readline()
if not nextline: nextline = '\n'
line = line[:-2] + nextline
# Parse the line
key, fields = parseline(line)
if not (key and fields):
continue
# Normalize the key
types = key.split('/')
for j in range(len(types)):
types[j] = types[j].strip()
key = '/'.join(types).lower()
# Update the database
if key in caps:
caps[key].append(fields)
else:
caps[key] = [fields]
return caps
def parseline(line):
"""Parse one entry in a mailcap file and return a dictionary.
The viewing command is stored as the value with the key "view",
and the rest of the fields produce key-value pairs in the dict.
"""
fields = []
i, n = 0, len(line)
while i < n:
field, i = parsefield(line, i, n)
fields.append(field)
i = i+1 # Skip semicolon
if len(fields) < 2:
return None, None
key, view, rest = fields[0], fields[1], fields[2:]
fields = {'view': view}
for field in rest:
i = field.find('=')
if i < 0:
fkey = field
fvalue = ""
else:
fkey = field[:i].strip()
fvalue = field[i+1:].strip()
if fkey in fields:
# Ignore it
pass
else:
fields[fkey] = fvalue
return key, fields
def parsefield(line, i, n):
"""Separate one key-value pair in a mailcap entry."""
start = i
while i < n:
c = line[i]
if c == ';':
break
elif c == '\\':
i = i+2
else:
i = i+1
return line[start:i].strip(), i
# Part 3: using the database.
def findmatch(caps, MIMEtype, key='view', filename="/dev/null", plist=[]):
"""Find a match for a mailcap entry.
Return a tuple containing the command line, and the mailcap entry
used; (None, None) if no match is found. This may invoke the
'test' command of several matching entries before deciding which
entry to use.
"""
entries = lookup(caps, MIMEtype, key)
# XXX This code should somehow check for the needsterminal flag.
for e in entries:
if 'test' in e:
test = subst(e['test'], filename, plist)
if test and os.system(test) != 0:
continue
command = subst(e[key], MIMEtype, filename, plist)
return command, e
return None, None
def lookup(caps, MIMEtype, key=None):
entries = []
if MIMEtype in caps:
entries = entries + caps[MIMEtype]
MIMEtypes = MIMEtype.split('/')
MIMEtype = MIMEtypes[0] + '/*'
if MIMEtype in caps:
entries = entries + caps[MIMEtype]
if key is not None:
entries = [e for e in entries if key in e]
return entries
def subst(field, MIMEtype, filename, plist=[]):
# XXX Actually, this is Unix-specific
res = ''
i, n = 0, len(field)
while i < n:
c = field[i]; i = i+1
if c != '%':
if c == '\\':
c = field[i:i+1]; i = i+1
res = res + c
else:
c = field[i]; i = i+1
if c == '%':
res = res + c
elif c == 's':
res = res + filename
elif c == 't':
res = res + MIMEtype
elif c == '{':
start = i
while i < n and field[i] != '}':
i = i+1
name = field[start:i]
i = i+1
res = res + findparam(name, plist)
# XXX To do:
# %n == number of parts if type is multipart/*
# %F == list of alternating type and filename for parts
else:
res = res + '%' + c
return res
def findparam(name, plist):
name = name.lower() + '='
n = len(name)
for p in plist:
if p[:n].lower() == name:
return p[n:]
return ''
# Part 4: test program.
def test():
import sys
caps = getcaps()
if not sys.argv[1:]:
show(caps)
return
for i in range(1, len(sys.argv), 2):
args = sys.argv[i:i+2]
if len(args) < 2:
print("usage: mailcap [MIMEtype file] ...")
return
MIMEtype = args[0]
file = args[1]
command, e = findmatch(caps, MIMEtype, 'view', file)
if not command:
print("No viewer found for", type)
else:
print("Executing:", command)
sts = os.system(command)
if sts:
print("Exit status:", sts)
def show(caps):
print("Mailcap files:")
for fn in listmailcapfiles(): print("\t" + fn)
print()
if not caps: caps = getcaps()
print("Mailcap entries:")
print()
ckeys = sorted(caps)
for type in ckeys:
print(type)
entries = caps[type]
for e in entries:
keys = sorted(e)
for k in keys:
print(" %-15s" % k, e[k])
print()
if __name__ == '__main__':
test()
| lgpl-3.0 |
idaholab/civet | ci/recipe/RecipeWriter.py | 2 | 3316 |
# Copyright 2016 Battelle Energy Alliance, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import unicode_literals, absolute_import
import os
from ci.recipe import file_utils
from django.utils.six import StringIO
try:
import configparser
except ImportError:
import ConfigParser as configparser
def add_list(config, recipe, recipe_key, section, prefix):
l = recipe.get(recipe_key, [])
if l:
config.add_section(section)
for i, dep in enumerate(l):
config.set(section, "%s%s" % (prefix, i), dep)
def write_recipe_to_string(recipe):
config = configparser.ConfigParser()
config.optionxform = str
config.add_section("Main")
sections = ["steps",
"global_sources",
"global_env",
"pullrequest_dependencies",
"manual_dependencies",
"push_dependencies",
]
for key, value in recipe.items():
if key not in sections:
if isinstance(value, list):
config.set("Main", key, ','.join(value))
else:
config.set("Main", key, str(value))
add_list(config, recipe, "pullrequest_dependencies", "PullRequest Dependencies", "recipe")
add_list(config, recipe, "push_dependencies", "Push Dependencies", "recipe")
add_list(config, recipe, "manual_dependencies", "Manual Dependencies", "recipe")
add_list(config, recipe, "global_sources", "Global Sources", "source")
global_env = recipe.get("global_env", {})
if global_env:
config.add_section("Global Environment")
for key, value in global_env.items():
config.set("Global Environment", key, str(value))
steps = recipe.get("steps", [])
for step in steps:
name = step["name"]
config.add_section(name)
for key, value in step.items():
if key != "name" and key != "environment" and key != "position":
config.set(name, key, str(value))
for key, value in step["environment"].items():
config.set(name, key, str(value))
output = StringIO()
config.write(output)
return output.getvalue()
def write_recipe_to_repo(repo_dir, recipe, filename):
"""
Get an option from the config file and convert it to its proper type based on the default.
Input:
repo_dir: str: path to recipe repo dir
recipe: dict of values as created by RecipeReader
filename: .cfg file to write
Return:
bool: True on success, else False
"""
full_path = os.path.join(repo_dir, filename)
if not file_utils.is_subdir(full_path, repo_dir):
print("Not a valid recipe filename: %s" % filename)
return False
data = write_recipe_to_string(recipe)
with open(full_path, "w") as f:
f.write(data)
return True
| apache-2.0 |
swordqiu/ASPathInference | script/inferRelation.py | 1 | 10819 | #!/usr/bin/env python
import sys
import string
import bgplib
import argparse
R = 60
L = 1
g_asgraph = {}
g_transfreq = {}
g_prefixfreq = {}
g_maxfreq = 0
g_asexit = {}
def addExit(src, exitas):
global g_asexit
if not g_asexit.has_key(src):
g_asexit[src] = {}
g_asexit[src][exitas] = g_asexit[src].get(exitas, 0) + 1
def isPopularPrefix(prefix):
global g_prefixfreq, g_maxfreq
if g_prefixfreq.has_key(prefix) and g_prefixfreq[prefix] >= g_maxfreq*2/3:
return True
else:
return False
def addEdge(as1, as2):
if not g_asgraph.has_key(as1):
g_asgraph[as1] = {}
if not g_asgraph[as1].has_key(as2):
g_asgraph[as1][as2] = [0, True]
if not g_asgraph.has_key(as2):
g_asgraph[as2] = {}
if not g_asgraph[as2].has_key(as1):
g_asgraph[as2][as1] = [0, True]
def addPrefix(prefix):
global g_prefixfreq, g_maxfreq;
g_prefixfreq[prefix] = g_prefixfreq.get(prefix, 0) + 1;
if g_maxfreq < g_prefixfreq[prefix]:
g_maxfreq = g_prefixfreq[prefix];
def constructGraph(prefix, nexthop, path):
global g_transfreq;
if not bgplib.validRoute(prefix, path):
return;
addPrefix(prefix);
if len(path) >= 2:
for i in range(len(path) - 1):
addEdge(path[i], path[i+1]);
if i != 0:
g_transfreq[path[i]] = g_transfreq.get(path[i], 0) + 1;
addExit(int(path[-1]), int(path[-2]));
def getDegree(asn):
global g_asgraph;
if g_asgraph.has_key(asn):
return len(g_asgraph[asn]);
else:
return 0;
def getTransfreq(asn):
global g_transfreq;
if g_transfreq.has_key(asn):
return g_transfreq[asn];
else:
return 0;
def getMetric(asn):
return getDegree(asn);
#if getTransfreq(asn) == 0:
# return 0;
#else:
# return getDegree(asn);
filternum = 0;
totalnum = 0;
def peakresearch(prefix, path):
global filternum, totalnum;
totalnum = totalnum + 1;
if not isPopularPrefix(prefix):
filternum = filternum + 1;
return;
isUp = True;
peak = [];
for i in range(len(path)-1):
if getDegree(path[i]) < getDegree(path[i+1]):
isUp = True;
else:
if isUp:
peak.append(path[i]);
isUp = False;
if len(peak) > 1:
tmpstr = "";
for i in range(len(path)):
tmpstr = tmpstr + str(path[i]) + "(" + str(getDegree(path[i])) + ") ";
print tmpstr;
def findMaxDegree(path):
maxidx = -1
maxdeg = 0
for i in range(len(path)):
if maxdeg < getMetric(path[i]):
maxdeg = getMetric(path[i])
maxidx = i
return maxidx
def statisTransitNumber(prefix, nexthop, path):
global g_asgraph;
if not bgplib.validRoute(prefix, path):
return;
if not isPopularPrefix(prefix):
return;
#print path;
if len(path) >= 2:
topidx = findMaxDegree(path);
if topidx > 0:
for i in range(topidx):
g_asgraph[path[i]][path[i+1]][0] = g_asgraph[path[i]][path[i+1]][0] + 1;
#print path[i], path[i+1];
#if path[i] == 32756 and path[i+1] == 26677:
# print path[i], path[i+1], g_asgraph[path[i]][path[i+1]];
if topidx + 1 < len(path):
for i in range(topidx+1, len(path)):
g_asgraph[path[i]][path[i-1]][0] = g_asgraph[path[i]][path[i-1]][0] + 1;
#if path[i] == 32756 and path[i-1] == 26677:
# print path[i], path[i-1], g_asgraph[path[i]][path[i-1]];
def isSibling(as1, as2):
tr12 = g_asgraph[as1][as2][0];
tr21 = g_asgraph[as2][as1][0];
if (tr12 > L and tr21 > L) or (tr12 > 0 and tr12 <= L and tr21 > 0 and tr21 <= L):
return True;
else:
return False;
def identifyNotPeerLink(prefix, nexthop, path):
global g_asgraph;
if not bgplib.validRoute(prefix, path):
return;
if not isPopularPrefix(prefix):
return;
if len(path) >= 2:
topidx = findMaxDegree(path);
if topidx >= 2:
for i in range(topidx-1):
g_asgraph[path[i]][path[i+1]][1] = False;
if topidx + 2 < len(path):
for i in range(topidx+1, len(path) - 1):
g_asgraph[path[i]][path[i+1]][1] = False;
if topidx > 0 and topidx < len(path) - 1 and not isSibling(path[topidx-1], path[topidx]) and not isSibling(path[topidx], path[topidx+1]):
if getDegree(path[topidx-1]) > getDegree(path[topidx+1]):
g_asgraph[path[topidx]][path[topidx+1]][1] = False;
else:
g_asgraph[path[topidx-1]][path[topidx]][1] = False;
def printLinkPref(filename):
with open(filename, 'w') as f:
for i in g_asexit.keys():
for j in g_asexit[i].keys():
f.write(str(i) + ":" + str(j) + ":" + str(g_asexit[i][j]) + "\n");
def getRelationship(as1, as2):
tr12 = g_asgraph[as1][as2][0];
p12 = g_asgraph[as1][as2][1];
tr21 = g_asgraph[as2][as1][0];
p21 = g_asgraph[as2][as1][1];
d1 = getDegree(as1);
d2 = getDegree(as2);
if p12 and p21 and d1*1.0/d2 < R and d2*1.0/d1 > 1.0/R and tr12+tr21 > 0:
return bgplib.PEER_TO_PEER;
elif (tr21 > L and tr12 > 0 and tr12 <= L) or (tr21 > 0 and tr12 == 0):
return bgplib.PROVIDER_TO_CUSTOMER;
elif (tr12 > L and tr21 > 0 and tr21 <= L) or (tr12 > 0 and tr21 == 0):
return bgplib.CUSTOMER_TO_PROVIDER;
#elif (tr12 > L and tr21 > L) or (tr12 > 0 and tr12 <= L and tr21 > 0 and tr21 <= L):
# return bgplib.SIBLING_TO_SIBLING;
else:
return bgplib.SIBLING_TO_SIBLING;
def printRelationship(filename):
p2c = 0
p2p = 0
s2s = 0
with open(filename, 'w') as f:
for asn in sorted(g_asgraph.keys()):
nblist = g_asgraph[asn]
provider = []
customer = []
peer = []
sibling = []
for asn2 in nblist.keys():
rel = getRelationship(asn, asn2)
if rel == bgplib.CUSTOMER_TO_PROVIDER:
provider.append(asn2)
elif rel == bgplib.PROVIDER_TO_CUSTOMER:
customer.append(asn2)
elif rel == bgplib.PEER_TO_PEER:
peer.append(asn2)
else:
sibling.append(asn2)
f.write("AS" + str(asn) + ":" + "\n")
f.write("Providers:#" + str(len(provider)) + "::" + bgplib.array2str(provider, ":") + "\n")
f.write("Customers:#" + str(len(customer)) + "::" + bgplib.array2str(customer, ":") + "\n")
f.write("Peers:#" + str(len(peer)) + "::" + bgplib.array2str(peer, ":") + "\n")
f.write("Siblings:#" + str(len(sibling)) + "::" + bgplib.array2str(sibling, ":") + "\n")
p2c = p2c + len(provider) + len(customer)
p2p = p2p + len(peer)
s2s = s2s + len(sibling)
print "P2C: " + str(p2c/2)
print "P2P: " + str(p2p/2)
print "S2S: " + str(s2s/2)
def printRelationship2(filename):
try:
f = open(filename, 'w');
for asn in g_asgraph.keys():
nblist = g_asgraph[asn];
for asn2 in nblist.keys():
if asn < asn2:
rel = getRelationship(asn, asn2);
#if g_asgraph[asn][asn2][0]*g_asgraph[asn2][asn][0] != 0:
# mark = "*";
# if g_asgraph[asn][asn2][3]*g_asgraph[asn2][asn][3] != 0:
# mark = mark + "&";
#else:
# mark = "";
#f.write(str(asn) + " " + str(asn2) + " " + relationCode2Str(rel) + " " + str(g_asgraph[asn][asn2]) + " " + str(g_asgraph[asn2][asn]) + " " + str(getPeerStatistics(asn)) + " " + str(getPeerStatistics(asn2)) + " " + str(asaccess.get(asn, 0)) + " " + str(asaccess.get(asn2, 0)) + " " + mark + "\n");
f.write(str(asn) + " " + str(asn2) + " " + str(rel) + " " + str(g_asgraph[asn][asn2]) + " " + str(g_asgraph[asn2][asn]) + "\n");
f.close();
except:
print "Exception: ", sys.exc_info()[0];
raise;
def readTableList(filename):
try:
f = open(filename, "r");
line = f.readline();
tlt = [];
while len(line) > 0:
if line[0] != '#':
tlt.append(tuple(line[:-1].split(" ")));
line = f.readline();
f.close();
return tlt;
except:
print "Exception", sys.exc_info()[0];
raise;
def main():
parser = argparse.ArgumentParser(description='AS Relationship Inference')
parser.add_argument('--tablelist', metavar='<TABLE_LIST>',
help='File that contains a list of tables')
parser.add_argument('--table', metavar='<TABLE>',
help='File of a BGP routing table')
parser.add_argument('--as-relationship', metavar='<RELATION>',
required=True, help='Output file of AS relationship')
parser.add_argument('--link-preference', metavar='<PREFERENCE>',
required=True, help='Output file of Prefix list')
args = parser.parse_args()
if args.tablelist:
tablelist = readTableList(args.tablelist)
elif args.table:
tablelist = [(args.table, "TABLE")]
else:
raise Exception('Missing either --tablelist or --table arguments')
for i in range(3):
for table in tablelist:
if table[1] == "TABLE":
if i == 0:
bgplib.readBGPTable(table[0], constructGraph)
elif i == 1:
bgplib.readBGPTable(table[0], statisTransitNumber)
else:
bgplib.readBGPTable(table[0], identifyNotPeerLink)
elif table[1] == "DUMP":
if i == 0:
bgplib.readBGPDump(table[0], constructGraph)
elif i == 1:
bgplib.readBGPDump(table[0], statisTransitNumber)
else:
bgplib.readBGPDump(table[0], identifyNotPeerLink)
else:
print "unsupported format"
sys.exit(-1)
printRelationship(args.as_relationship)
printLinkPref(args.link_preference)
#printRelationship2(sys.argv[4])
if __name__ == '__main__':
main()
| gpl-2.0 |
anryko/ansible | lib/ansible/modules/cloud/amazon/ec2_vpc_vpn_info.py | 6 | 7525 | #!/usr/bin/python
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: ec2_vpc_vpn_info
short_description: Gather information about VPN Connections in AWS.
description:
- Gather information about VPN Connections in AWS.
- This module was called C(ec2_vpc_vpn_facts) before Ansible 2.9. The usage did not change.
version_added: "2.6"
requirements: [ boto3 ]
author: Madhura Naniwadekar (@Madhura-CSI)
options:
filters:
description:
- A dict of filters to apply. Each dict item consists of a filter key and a filter value.
See U(https://docs.aws.amazon.com/AWSEC2/latest/APIReference/API_DescribeVpnConnections.html) for possible filters.
required: false
type: dict
vpn_connection_ids:
description:
- Get details of a specific VPN connections using vpn connection ID/IDs. This value should be provided as a list.
required: false
type: list
elements: str
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
# # Note: These examples do not set authentication details, see the AWS Guide for details.
- name: Gather information about all vpn connections
ec2_vpc_vpn_info:
- name: Gather information about a filtered list of vpn connections, based on tags
ec2_vpc_vpn_info:
filters:
"tag:Name": test-connection
register: vpn_conn_info
- name: Gather information about vpn connections by specifying connection IDs.
ec2_vpc_vpn_info:
filters:
vpn-gateway-id: vgw-cbe66beb
register: vpn_conn_info
'''
RETURN = '''
vpn_connections:
description: List of one or more VPN Connections.
returned: always
type: complex
contains:
category:
description: The category of the VPN connection.
returned: always
type: str
sample: VPN
customer_gatway_configuration:
description: The configuration information for the VPN connection's customer gateway (in the native XML format).
returned: always
type: str
customer_gateway_id:
description: The ID of the customer gateway at your end of the VPN connection.
returned: always
type: str
sample: cgw-17a53c37
options:
description: The VPN connection options.
returned: always
type: dict
sample: {
"static_routes_only": false
}
routes:
description: List of static routes associated with the VPN connection.
returned: always
type: complex
contains:
destination_cidr_block:
description: The CIDR block associated with the local subnet of the customer data center.
returned: always
type: str
sample: 10.0.0.0/16
state:
description: The current state of the static route.
returned: always
type: str
sample: available
state:
description: The current state of the VPN connection.
returned: always
type: str
sample: available
tags:
description: Any tags assigned to the VPN connection.
returned: always
type: dict
sample: {
"Name": "test-conn"
}
type:
description: The type of VPN connection.
returned: always
type: str
sample: ipsec.1
vgw_telemetry:
description: Information about the VPN tunnel.
returned: always
type: complex
contains:
accepted_route_count:
description: The number of accepted routes.
returned: always
type: int
sample: 0
last_status_change:
description: The date and time of the last change in status.
returned: always
type: str
sample: "2018-02-09T14:35:27+00:00"
outside_ip_address:
description: The Internet-routable IP address of the virtual private gateway's outside interface.
returned: always
type: str
sample: 13.127.79.191
status:
description: The status of the VPN tunnel.
returned: always
type: str
sample: DOWN
status_message:
description: If an error occurs, a description of the error.
returned: always
type: str
sample: IPSEC IS DOWN
certificate_arn:
description: The Amazon Resource Name of the virtual private gateway tunnel endpoint certificate.
returned: when a private certificate is used for authentication
type: str
sample: "arn:aws:acm:us-east-1:123456789101:certificate/c544d8ce-20b8-4fff-98b0-example"
vpn_connection_id:
description: The ID of the VPN connection.
returned: always
type: str
sample: vpn-f700d5c0
vpn_gateway_id:
description: The ID of the virtual private gateway at the AWS side of the VPN connection.
returned: always
type: str
sample: vgw-cbe56bfb
'''
import json
try:
from botocore.exceptions import ClientError, BotoCoreError
except ImportError:
pass # caught by AnsibleAWSModule
from ansible.module_utils.aws.core import AnsibleAWSModule
from ansible.module_utils.ec2 import ansible_dict_to_boto3_filter_list, boto3_tag_list_to_ansible_dict, camel_dict_to_snake_dict
def date_handler(obj):
return obj.isoformat() if hasattr(obj, 'isoformat') else obj
def list_vpn_connections(connection, module):
params = dict()
params['Filters'] = ansible_dict_to_boto3_filter_list(module.params.get('filters'))
params['VpnConnectionIds'] = module.params.get('vpn_connection_ids')
try:
result = json.loads(json.dumps(connection.describe_vpn_connections(**params), default=date_handler))
except ValueError as e:
module.fail_json_aws(e, msg="Cannot validate JSON data")
except (ClientError, BotoCoreError) as e:
module.fail_json_aws(e, msg="Could not describe customer gateways")
snaked_vpn_connections = [camel_dict_to_snake_dict(vpn_connection) for vpn_connection in result['VpnConnections']]
if snaked_vpn_connections:
for vpn_connection in snaked_vpn_connections:
vpn_connection['tags'] = boto3_tag_list_to_ansible_dict(vpn_connection.get('tags', []))
module.exit_json(changed=False, vpn_connections=snaked_vpn_connections)
def main():
argument_spec = dict(
vpn_connection_ids=dict(default=[], type='list'),
filters=dict(default={}, type='dict')
)
module = AnsibleAWSModule(argument_spec=argument_spec,
mutually_exclusive=[['vpn_connection_ids', 'filters']],
supports_check_mode=True)
if module._module._name == 'ec2_vpc_vpn_facts':
module._module.deprecate("The 'ec2_vpc_vpn_facts' module has been renamed to 'ec2_vpc_vpn_info'", version='2.13')
connection = module.client('ec2')
list_vpn_connections(connection, module)
if __name__ == '__main__':
main()
| gpl-3.0 |
lidavidm/mathics-heroku | venv/lib/python2.7/site-packages/sympy/combinatorics/prufer.py | 1 | 11827 | from sympy.core import Basic
from sympy.core.compatibility import iterable, as_int
from sympy.utilities.iterables import flatten
from collections import defaultdict
class Prufer(Basic):
"""
The Prufer correspondence is an algorithm that describes the
bijection between labeled trees and the Prufer code. A Prufer
code of a labeled tree is unique up to isomorphism and has
a length of n - 2.
Prufer sequences were first used by Heinz Prufer to give a
proof of Cayley's formula.
References
==========
.. [1] http://mathworld.wolfram.com/LabeledTree.html
"""
_prufer_repr = None
_tree_repr = None
_nodes = None
_rank = None
@property
def prufer_repr(self):
"""Returns Prufer sequence for the Prufer object.
This sequence is found by removing the highest numbered vertex,
recording the node it was attached to, and continuuing until only
two verices remain. The Prufer sequence is the list of recorded nodes.
Examples
========
>>> from sympy.combinatorics.prufer import Prufer
>>> Prufer([[0, 3], [1, 3], [2, 3], [3, 4], [4, 5]]).prufer_repr
[3, 3, 3, 4]
>>> Prufer([1, 0, 0]).prufer_repr
[1, 0, 0]
See Also
========
to_prufer
"""
if self._prufer_repr is None:
self._prufer_repr = self.to_prufer(self._tree_repr[:], self.nodes)
return self._prufer_repr
@property
def tree_repr(self):
"""Returns the tree representation of the Prufer object.
Examples
========
>>> from sympy.combinatorics.prufer import Prufer
>>> Prufer([[0, 3], [1, 3], [2, 3], [3, 4], [4, 5]]).tree_repr
[[0, 3], [1, 3], [2, 3], [3, 4], [4, 5]]
>>> Prufer([1, 0, 0]).tree_repr
[[1, 2], [0, 1], [0, 3], [0, 4]]
See Also
========
to_tree
"""
if self._tree_repr is None:
self._tree_repr = self.to_tree(self._prufer_repr[:])
return self._tree_repr
@property
def nodes(self):
"""Returns the number of nodes in the tree.
Examples
========
>>> from sympy.combinatorics.prufer import Prufer
>>> Prufer([[0, 3], [1, 3], [2, 3], [3, 4], [4, 5]]).nodes
6
>>> Prufer([1, 0, 0]).nodes
5
"""
return self._nodes
@property
def rank(self):
"""Returns the rank of the Prufer sequence.
Examples
========
>>> from sympy.combinatorics.prufer import Prufer
>>> p = Prufer([[0, 3], [1, 3], [2, 3], [3, 4], [4, 5]])
>>> p.rank
778
>>> p.next(1).rank
779
>>> p.prev().rank
777
See Also
========
prufer_rank, next, prev, size
"""
if self._rank is None:
self._rank = self.prufer_rank()
return self._rank
@property
def size(self):
"""Return the number of possible trees of this Prufer object.
Examples
========
>>> from sympy.combinatorics.prufer import Prufer
>>> Prufer([0]*4).size == Prufer([6]*4).size == 1296
True
See Also
========
prufer_rank, rank, next, prev
"""
return self.prev(self.rank).prev().rank + 1
@staticmethod
def to_prufer(tree, n):
"""Return the Prufer sequence for a tree given as a list of edges where
``n`` is the number of nodes in the tree.
Examples
========
>>> from sympy.combinatorics.prufer import Prufer
>>> a = Prufer([[0, 1], [0, 2], [0, 3]])
>>> a.prufer_repr
[0, 0]
>>> Prufer.to_prufer([[0, 1], [0, 2], [0, 3]], 4)
[0, 0]
See Also
========
prufer_repr: returns Prufer sequence of a Prufer object.
"""
d = defaultdict(int)
L = []
for edge in tree:
# Increment the value of the corresponding
# node in the degree list as we encounter an
# edge involving it.
d[edge[0]] += 1
d[edge[1]] += 1
for i in xrange(n - 2):
# find the smallest leaf
for x in xrange(n):
if d[x] == 1:
break
# find the node it was connected to
y = None
for edge in tree:
if x == edge[0]:
y = edge[1]
elif x == edge[1]:
y = edge[0]
if y is not None:
break
# record and update
L.append(y)
for j in (x, y):
d[j] -= 1
if not d[j]:
d.pop(j)
tree.remove(edge)
return L
@staticmethod
def to_tree(prufer):
"""Return the tree (as a list of edges) of the given Prufer sequence.
Examples
========
>>> from sympy.combinatorics.prufer import Prufer
>>> a = Prufer([0, 2], 4)
>>> a.tree_repr
[[0, 1], [0, 2], [2, 3]]
>>> Prufer.to_tree([0, 2])
[[0, 1], [0, 2], [2, 3]]
References
==========
- http://hamberg.no/erlend/2010/11/06/prufer-sequence/
See Also
========
tree_repr: returns tree representation of a Prufer object.
"""
tree = []
last = []
n = len(prufer) + 2
d = defaultdict(lambda: 1)
for p in prufer:
d[p] += 1
for i in prufer:
for j in xrange(n):
# find the smallest leaf (degree = 1)
if d[j] == 1:
break
# (i, j) is the new edge that we append to the tree
# and remove from the degree dictionary
d[i] -= 1
d[j] -= 1
tree.append(sorted([i, j]))
last = [i for i in xrange(n) if d[i] == 1] or [0, 1]
tree.append(last)
return tree
@staticmethod
def edges(*runs):
"""Return a list of edges and the number of nodes from the given runs
that connect nodes in an integer-labelled tree.
All node numbers will be shifted so that the minimum node is 0. It is
not a problem if edges are repeated in the runs; only unique edges are
returned. There is no assumption made about what the range of the node
labels should be, but all nodes from the smallest through the largest
must be present.
Examples
========
>>> from sympy.combinatorics.prufer import Prufer
>>> Prufer.edges([1, 2, 3], [2, 4, 5]) # a T
([[0, 1], [1, 2], [1, 3], [3, 4]], 5)
Duplicate edges are removed:
>>> Prufer.edges([0, 1, 2, 3], [1, 4, 5], [1, 4, 6]) # a K
([[0, 1], [1, 2], [1, 4], [2, 3], [4, 5], [4, 6]], 7)
"""
e = set()
nmin = runs[0][0]
for r in runs:
for i in range(len(r) - 1):
a, b = r[i: i + 2]
if b < a:
a, b = b, a
e.add((a, b))
rv = []
got = set()
nmin = nmax = None
for ei in e:
for i in ei:
got.add(i)
nmin = min(ei[0], nmin) if nmin is not None else ei[0]
nmax = max(ei[1], nmax) if nmax is not None else ei[1]
rv.append(list(ei))
missing = set(range(nmin, nmax + 1)) - got
if missing:
missing = [i + nmin for i in missing]
if len(missing) == 1:
msg = 'Node %s is missing.' % missing.pop()
else:
msg = 'Nodes %s are missing.' % list(sorted(missing))
raise ValueError(msg)
if nmin != 0:
for i, ei in enumerate(rv):
rv[i] = [n - nmin for n in ei]
nmax -= nmin
return sorted(rv), nmax + 1
def prufer_rank(self):
"""Computes the rank of a Prufer sequence.
Examples
========
>>> from sympy.combinatorics.prufer import Prufer
>>> a = Prufer([[0, 1], [0, 2], [0, 3]])
>>> a.prufer_rank()
0
See Also
========
rank, next, prev, size
"""
r = 0
p = 1
for i in xrange(self.nodes - 3, -1, -1):
r += p*self.prufer_repr[i]
p *= self.nodes
return r
@classmethod
def unrank(self, rank, n):
"""Finds the unranked Prufer sequence.
Examples
========
>>> from sympy.combinatorics.prufer import Prufer
>>> Prufer.unrank(0, 4)
Prufer([0, 0])
"""
n, rank = as_int(n), as_int(rank)
L = defaultdict(int)
for i in xrange(n - 3, -1, -1):
L[i] = rank % n
rank = (rank - L[i])//n
return Prufer([L[i] for i in xrange(len(L))])
def __new__(cls, *args, **kw_args):
"""The constructor for the Prufer object.
Examples
========
>>> from sympy.combinatorics.prufer import Prufer
A Prufer object can be constructed from a list of edges:
>>> a = Prufer([[0, 1], [0, 2], [0, 3]])
>>> a.prufer_repr
[0, 0]
If the number of nodes is given, no checking of the nodes will
be performed; it will be assumed that nodes 0 through n - 1 are
present:
>>> Prufer([[0, 1], [0, 2], [0, 3]], 4)
Prufer([[0, 1], [0, 2], [0, 3]], 4)
A Prufer object can be constructed from a Prufer sequence:
>>> b = Prufer([1, 3])
>>> b.tree_repr
[[0, 1], [1, 3], [2, 3]]
"""
ret_obj = Basic.__new__(cls, *args, **kw_args)
args = [list(args[0])]
if args[0] and iterable(args[0][0]):
if not args[0][0]:
raise ValueError(
'Prufer expects at least one edge in the tree.')
if len(args) > 1:
nnodes = args[1]
else:
nodes = set(flatten(args[0]))
nnodes = max(nodes) + 1
if nnodes != len(nodes):
missing = set(range(nnodes)) - nodes
if len(missing) == 1:
msg = 'Node %s is missing.' % missing.pop()
else:
msg = 'Nodes %s are missing.' % list(sorted(missing))
raise ValueError(msg)
ret_obj._tree_repr = [list(i) for i in args[0]]
ret_obj._nodes = nnodes
else:
ret_obj._prufer_repr = args[0]
ret_obj._nodes = len(ret_obj._prufer_repr) + 2
return ret_obj
def next(self, delta=1):
"""Generates the Prufer sequence that is delta beyond the current one.
Examples
========
>>> from sympy.combinatorics.prufer import Prufer
>>> a = Prufer([[0, 1], [0, 2], [0, 3]])
>>> b = a.next(1) # == a.next()
>>> b.tree_repr
[[0, 2], [0, 1], [1, 3]]
>>> b.rank
1
See Also
========
prufer_rank, rank, prev, size
"""
return Prufer.unrank(self.rank + delta, self.nodes)
def prev(self, delta=1):
"""Generates the Prufer sequence that is -delta before the current one.
Examples
========
>>> from sympy.combinatorics.prufer import Prufer
>>> a = Prufer([[0, 1], [1, 2], [2, 3], [1, 4]])
>>> a.rank
36
>>> b = a.prev()
>>> b
Prufer([1, 2, 0])
>>> b.rank
35
See Also
========
prufer_rank, rank, next, size
"""
return Prufer.unrank(self.rank -delta, self.nodes)
| gpl-3.0 |
ininex/geofire-python | resource/lib/python2.7/site-packages/werkzeug/__init__.py | 36 | 6920 | # -*- coding: utf-8 -*-
"""
werkzeug
~~~~~~~~
Werkzeug is the Swiss Army knife of Python web development.
It provides useful classes and functions for any WSGI application to make
the life of a python web developer much easier. All of the provided
classes are independent from each other so you can mix it with any other
library.
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
from types import ModuleType
import sys
from werkzeug._compat import iteritems
# the version. Usually set automatically by a script.
__version__ = '0.11.15'
# This import magic raises concerns quite often which is why the implementation
# and motivation is explained here in detail now.
#
# The majority of the functions and classes provided by Werkzeug work on the
# HTTP and WSGI layer. There is no useful grouping for those which is why
# they are all importable from "werkzeug" instead of the modules where they are
# implemented. The downside of that is, that now everything would be loaded at
# once, even if unused.
#
# The implementation of a lazy-loading module in this file replaces the
# werkzeug package when imported from within. Attribute access to the werkzeug
# module will then lazily import from the modules that implement the objects.
# import mapping to objects in other modules
all_by_module = {
'werkzeug.debug': ['DebuggedApplication'],
'werkzeug.local': ['Local', 'LocalManager', 'LocalProxy', 'LocalStack',
'release_local'],
'werkzeug.serving': ['run_simple'],
'werkzeug.test': ['Client', 'EnvironBuilder', 'create_environ',
'run_wsgi_app'],
'werkzeug.testapp': ['test_app'],
'werkzeug.exceptions': ['abort', 'Aborter'],
'werkzeug.urls': ['url_decode', 'url_encode', 'url_quote',
'url_quote_plus', 'url_unquote', 'url_unquote_plus',
'url_fix', 'Href', 'iri_to_uri', 'uri_to_iri'],
'werkzeug.formparser': ['parse_form_data'],
'werkzeug.utils': ['escape', 'environ_property', 'append_slash_redirect',
'redirect', 'cached_property', 'import_string',
'dump_cookie', 'parse_cookie', 'unescape',
'format_string', 'find_modules', 'header_property',
'html', 'xhtml', 'HTMLBuilder', 'validate_arguments',
'ArgumentValidationError', 'bind_arguments',
'secure_filename'],
'werkzeug.wsgi': ['get_current_url', 'get_host', 'pop_path_info',
'peek_path_info', 'SharedDataMiddleware',
'DispatcherMiddleware', 'ClosingIterator', 'FileWrapper',
'make_line_iter', 'LimitedStream', 'responder',
'wrap_file', 'extract_path_info'],
'werkzeug.datastructures': ['MultiDict', 'CombinedMultiDict', 'Headers',
'EnvironHeaders', 'ImmutableList',
'ImmutableDict', 'ImmutableMultiDict',
'TypeConversionDict',
'ImmutableTypeConversionDict', 'Accept',
'MIMEAccept', 'CharsetAccept',
'LanguageAccept', 'RequestCacheControl',
'ResponseCacheControl', 'ETags', 'HeaderSet',
'WWWAuthenticate', 'Authorization',
'FileMultiDict', 'CallbackDict', 'FileStorage',
'OrderedMultiDict', 'ImmutableOrderedMultiDict'
],
'werkzeug.useragents': ['UserAgent'],
'werkzeug.http': ['parse_etags', 'parse_date', 'http_date', 'cookie_date',
'parse_cache_control_header', 'is_resource_modified',
'parse_accept_header', 'parse_set_header', 'quote_etag',
'unquote_etag', 'generate_etag', 'dump_header',
'parse_list_header', 'parse_dict_header',
'parse_authorization_header',
'parse_www_authenticate_header', 'remove_entity_headers',
'is_entity_header', 'remove_hop_by_hop_headers',
'parse_options_header', 'dump_options_header',
'is_hop_by_hop_header', 'unquote_header_value',
'quote_header_value', 'HTTP_STATUS_CODES'],
'werkzeug.wrappers': ['BaseResponse', 'BaseRequest', 'Request', 'Response',
'AcceptMixin', 'ETagRequestMixin',
'ETagResponseMixin', 'ResponseStreamMixin',
'CommonResponseDescriptorsMixin', 'UserAgentMixin',
'AuthorizationMixin', 'WWWAuthenticateMixin',
'CommonRequestDescriptorsMixin'],
'werkzeug.security': ['generate_password_hash', 'check_password_hash'],
# the undocumented easteregg ;-)
'werkzeug._internal': ['_easteregg']
}
# modules that should be imported when accessed as attributes of werkzeug
attribute_modules = frozenset(['exceptions', 'routing', 'script'])
object_origins = {}
for module, items in iteritems(all_by_module):
for item in items:
object_origins[item] = module
class module(ModuleType):
"""Automatically import objects from the modules."""
def __getattr__(self, name):
if name in object_origins:
module = __import__(object_origins[name], None, None, [name])
for extra_name in all_by_module[module.__name__]:
setattr(self, extra_name, getattr(module, extra_name))
return getattr(module, name)
elif name in attribute_modules:
__import__('werkzeug.' + name)
return ModuleType.__getattribute__(self, name)
def __dir__(self):
"""Just show what we want to show."""
result = list(new_module.__all__)
result.extend(('__file__', '__path__', '__doc__', '__all__',
'__docformat__', '__name__', '__path__',
'__package__', '__version__'))
return result
# keep a reference to this module so that it's not garbage collected
old_module = sys.modules['werkzeug']
# setup the new module and patch it into the dict of loaded modules
new_module = sys.modules['werkzeug'] = module('werkzeug')
new_module.__dict__.update({
'__file__': __file__,
'__package__': 'werkzeug',
'__path__': __path__,
'__doc__': __doc__,
'__version__': __version__,
'__all__': tuple(object_origins) + tuple(attribute_modules),
'__docformat__': 'restructuredtext en'
})
# Due to bootstrapping issues we need to import exceptions here.
# Don't ask :-(
__import__('werkzeug.exceptions')
| mit |
virgree/odoo | openerp/addons/test_new_api/tests/test_new_fields.py | 69 | 14924 | #
# test cases for new-style fields
#
from datetime import date, datetime
from collections import defaultdict
from openerp.tests import common
from openerp.exceptions import except_orm
class TestNewFields(common.TransactionCase):
def test_00_basics(self):
""" test accessing new fields """
# find a discussion
discussion = self.env.ref('test_new_api.discussion_0')
# read field as a record attribute or as a record item
self.assertIsInstance(discussion.name, basestring)
self.assertIsInstance(discussion['name'], basestring)
self.assertEqual(discussion['name'], discussion.name)
# read it with method read()
values = discussion.read(['name'])[0]
self.assertEqual(values['name'], discussion.name)
def test_01_basic_get_assertion(self):
""" test item getter """
# field access works on single record
record = self.env.ref('test_new_api.message_0_0')
self.assertEqual(len(record), 1)
ok = record.body
# field access fails on multiple records
records = self.env['test_new_api.message'].search([])
assert len(records) > 1
with self.assertRaises(except_orm):
faulty = records.body
def test_01_basic_set_assertion(self):
""" test item setter """
# field assignment works on single record
record = self.env.ref('test_new_api.message_0_0')
self.assertEqual(len(record), 1)
record.body = 'OK'
# field assignment fails on multiple records
records = self.env['test_new_api.message'].search([])
assert len(records) > 1
with self.assertRaises(except_orm):
records.body = 'Faulty'
def test_10_computed(self):
""" check definition of computed fields """
# by default function fields are not stored and readonly
field = self.env['test_new_api.message']._fields['size']
self.assertFalse(field.store)
self.assertTrue(field.readonly)
field = self.env['test_new_api.message']._fields['name']
self.assertTrue(field.store)
self.assertTrue(field.readonly)
def test_10_non_stored(self):
""" test non-stored fields """
# find messages
for message in self.env['test_new_api.message'].search([]):
# check definition of field
self.assertEqual(message.size, len(message.body or ''))
# check recomputation after record is modified
size = message.size
message.write({'body': (message.body or '') + "!!!"})
self.assertEqual(message.size, size + 3)
# special case: computed field without dependency must be computed
record = self.env['test_new_api.mixed'].create({})
self.assertTrue(record.now)
def test_11_stored(self):
""" test stored fields """
# find the demo discussion
discussion = self.env.ref('test_new_api.discussion_0')
self.assertTrue(len(discussion.messages) > 0)
# check messages
name0 = discussion.name or ""
for message in discussion.messages:
self.assertEqual(message.name, "[%s] %s" % (name0, message.author.name))
# modify discussion name, and check again messages
discussion.name = name1 = 'Talking about stuff...'
for message in discussion.messages:
self.assertEqual(message.name, "[%s] %s" % (name1, message.author.name))
# switch message from discussion, and check again
name2 = 'Another discussion'
discussion2 = discussion.copy({'name': name2})
message2 = discussion.messages[0]
message2.discussion = discussion2
for message in discussion2.messages:
self.assertEqual(message.name, "[%s] %s" % (name2, message.author.name))
def test_12_recursive(self):
""" test recursively dependent fields """
Category = self.env['test_new_api.category']
abel = Category.create({'name': 'Abel'})
beth = Category.create({'name': 'Bethany'})
cath = Category.create({'name': 'Catherine'})
dean = Category.create({'name': 'Dean'})
ewan = Category.create({'name': 'Ewan'})
finn = Category.create({'name': 'Finnley'})
gabe = Category.create({'name': 'Gabriel'})
cath.parent = finn.parent = gabe
abel.parent = beth.parent = cath
dean.parent = ewan.parent = finn
self.assertEqual(abel.display_name, "Gabriel / Catherine / Abel")
self.assertEqual(beth.display_name, "Gabriel / Catherine / Bethany")
self.assertEqual(cath.display_name, "Gabriel / Catherine")
self.assertEqual(dean.display_name, "Gabriel / Finnley / Dean")
self.assertEqual(ewan.display_name, "Gabriel / Finnley / Ewan")
self.assertEqual(finn.display_name, "Gabriel / Finnley")
self.assertEqual(gabe.display_name, "Gabriel")
ewan.parent = cath
self.assertEqual(ewan.display_name, "Gabriel / Catherine / Ewan")
cath.parent = finn
self.assertEqual(ewan.display_name, "Gabriel / Finnley / Catherine / Ewan")
def test_12_cascade(self):
""" test computed field depending on computed field """
message = self.env.ref('test_new_api.message_0_0')
message.invalidate_cache()
double_size = message.double_size
self.assertEqual(double_size, message.size)
def test_13_inverse(self):
""" test inverse computation of fields """
Category = self.env['test_new_api.category']
abel = Category.create({'name': 'Abel'})
beth = Category.create({'name': 'Bethany'})
cath = Category.create({'name': 'Catherine'})
dean = Category.create({'name': 'Dean'})
ewan = Category.create({'name': 'Ewan'})
finn = Category.create({'name': 'Finnley'})
gabe = Category.create({'name': 'Gabriel'})
self.assertEqual(ewan.display_name, "Ewan")
ewan.display_name = "Abel / Bethany / Catherine / Erwan"
self.assertEqual(beth.parent, abel)
self.assertEqual(cath.parent, beth)
self.assertEqual(ewan.parent, cath)
self.assertEqual(ewan.name, "Erwan")
def test_14_search(self):
""" test search on computed fields """
discussion = self.env.ref('test_new_api.discussion_0')
# determine message sizes
sizes = set(message.size for message in discussion.messages)
# search for messages based on their size
for size in sizes:
messages0 = self.env['test_new_api.message'].search(
[('discussion', '=', discussion.id), ('size', '<=', size)])
messages1 = self.env['test_new_api.message'].browse()
for message in discussion.messages:
if message.size <= size:
messages1 += message
self.assertEqual(messages0, messages1)
def test_15_constraint(self):
""" test new-style Python constraints """
discussion = self.env.ref('test_new_api.discussion_0')
# remove oneself from discussion participants: we can no longer create
# messages in discussion
discussion.participants -= self.env.user
with self.assertRaises(Exception):
self.env['test_new_api.message'].create({'discussion': discussion.id, 'body': 'Whatever'})
# make sure that assertRaises() does not leave fields to recompute
self.assertFalse(self.env.has_todo())
# put back oneself into discussion participants: now we can create
# messages in discussion
discussion.participants += self.env.user
self.env['test_new_api.message'].create({'discussion': discussion.id, 'body': 'Whatever'})
def test_20_float(self):
""" test float fields """
record = self.env['test_new_api.mixed'].create({})
# assign value, and expect rounding
record.write({'number': 2.4999999999999996})
self.assertEqual(record.number, 2.50)
# same with field setter
record.number = 2.4999999999999996
self.assertEqual(record.number, 2.50)
def test_21_date(self):
""" test date fields """
record = self.env['test_new_api.mixed'].create({})
# one may assign False or None
record.date = None
self.assertFalse(record.date)
# one may assign date and datetime objects
record.date = date(2012, 05, 01)
self.assertEqual(record.date, '2012-05-01')
record.date = datetime(2012, 05, 01, 10, 45, 00)
self.assertEqual(record.date, '2012-05-01')
# one may assign dates in the default format, and it must be checked
record.date = '2012-05-01'
self.assertEqual(record.date, '2012-05-01')
with self.assertRaises(ValueError):
record.date = '12-5-1'
def test_22_selection(self):
""" test selection fields """
record = self.env['test_new_api.mixed'].create({})
# one may assign False or None
record.lang = None
self.assertFalse(record.lang)
# one may assign a value, and it must be checked
for language in self.env['res.lang'].search([]):
record.lang = language.code
with self.assertRaises(ValueError):
record.lang = 'zz_ZZ'
def test_23_relation(self):
""" test relation fields """
demo = self.env.ref('base.user_demo')
message = self.env.ref('test_new_api.message_0_0')
# check environment of record and related records
self.assertEqual(message.env, self.env)
self.assertEqual(message.discussion.env, self.env)
demo_env = self.env(user=demo)
self.assertNotEqual(demo_env, self.env)
# check environment of record and related records
self.assertEqual(message.env, self.env)
self.assertEqual(message.discussion.env, self.env)
# "migrate" message into demo_env, and check again
demo_message = message.sudo(demo)
self.assertEqual(demo_message.env, demo_env)
self.assertEqual(demo_message.discussion.env, demo_env)
# assign record's parent to a record in demo_env
message.discussion = message.discussion.copy({'name': 'Copy'})
# both message and its parent field must be in self.env
self.assertEqual(message.env, self.env)
self.assertEqual(message.discussion.env, self.env)
def test_24_reference(self):
""" test reference fields. """
record = self.env['test_new_api.mixed'].create({})
# one may assign False or None
record.reference = None
self.assertFalse(record.reference)
# one may assign a user or a partner...
record.reference = self.env.user
self.assertEqual(record.reference, self.env.user)
record.reference = self.env.user.partner_id
self.assertEqual(record.reference, self.env.user.partner_id)
# ... but no record from a model that starts with 'ir.'
with self.assertRaises(ValueError):
record.reference = self.env['ir.model'].search([], limit=1)
def test_25_related(self):
""" test related fields. """
message = self.env.ref('test_new_api.message_0_0')
discussion = message.discussion
# by default related fields are not stored
field = message._fields['discussion_name']
self.assertFalse(field.store)
self.assertFalse(field.readonly)
# check value of related field
self.assertEqual(message.discussion_name, discussion.name)
# change discussion name, and check result
discussion.name = 'Foo'
self.assertEqual(message.discussion_name, 'Foo')
# change discussion name via related field, and check result
message.discussion_name = 'Bar'
self.assertEqual(discussion.name, 'Bar')
self.assertEqual(message.discussion_name, 'Bar')
# search on related field, and check result
search_on_related = self.env['test_new_api.message'].search([('discussion_name', '=', 'Bar')])
search_on_regular = self.env['test_new_api.message'].search([('discussion.name', '=', 'Bar')])
self.assertEqual(search_on_related, search_on_regular)
# check that field attributes are copied
message_field = message.fields_get(['discussion_name'])['discussion_name']
discussion_field = discussion.fields_get(['name'])['name']
self.assertEqual(message_field['help'], discussion_field['help'])
def test_26_inherited(self):
""" test inherited fields. """
# a bunch of fields are inherited from res_partner
for user in self.env['res.users'].search([]):
partner = user.partner_id
for field in ('is_company', 'name', 'email', 'country_id'):
self.assertEqual(getattr(user, field), getattr(partner, field))
self.assertEqual(user[field], partner[field])
def test_30_read(self):
""" test computed fields as returned by read(). """
discussion = self.env.ref('test_new_api.discussion_0')
for message in discussion.messages:
display_name = message.display_name
size = message.size
data = message.read(['display_name', 'size'])[0]
self.assertEqual(data['display_name'], display_name)
self.assertEqual(data['size'], size)
def test_40_new(self):
""" test new records. """
discussion = self.env.ref('test_new_api.discussion_0')
# create a new message
message = self.env['test_new_api.message'].new()
self.assertFalse(message.id)
# assign some fields; should have no side effect
message.discussion = discussion
message.body = BODY = "May the Force be with you."
self.assertEqual(message.discussion, discussion)
self.assertEqual(message.body, BODY)
self.assertFalse(message.author)
self.assertNotIn(message, discussion.messages)
# check computed values of fields
self.assertEqual(message.name, "[%s] %s" % (discussion.name, ''))
self.assertEqual(message.size, len(BODY))
def test_41_defaults(self):
""" test default values. """
fields = ['discussion', 'body', 'author', 'size']
defaults = self.env['test_new_api.message'].default_get(fields)
self.assertEqual(defaults, {'author': self.env.uid})
defaults = self.env['test_new_api.mixed'].default_get(['number'])
self.assertEqual(defaults, {'number': 3.14})
class TestMagicFields(common.TransactionCase):
def test_write_date(self):
record = self.env['test_new_api.discussion'].create({'name': 'Booba'})
self.assertEqual(record.create_uid, self.env.user)
self.assertEqual(record.write_uid, self.env.user)
| agpl-3.0 |
n0trax/ansible | lib/ansible/modules/cloud/ovirt/ovirt_hosts.py | 22 | 21713 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2016 Red Hat, Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: ovirt_hosts
short_description: Module to manage hosts in oVirt/RHV
version_added: "2.3"
author: "Ondra Machacek (@machacekondra)"
description:
- "Module to manage hosts in oVirt/RHV"
options:
name:
description:
- "Name of the host to manage."
required: true
state:
description:
- "State which should a host to be in after successful completion."
- "I(iscsilogin) and I(iscsidiscover) are supported since version 2.4."
choices: [
'present', 'absent', 'maintenance', 'upgraded', 'started',
'restarted', 'stopped', 'reinstalled', 'iscsidiscover', 'iscsilogin'
]
default: present
comment:
description:
- "Description of the host."
cluster:
description:
- "Name of the cluster, where host should be created."
address:
description:
- "Host address. It can be either FQDN (preferred) or IP address."
password:
description:
- "Password of the root. It's required in case C(public_key) is set to I(False)."
public_key:
description:
- "I(True) if the public key should be used to authenticate to host."
- "It's required in case C(password) is not set."
default: False
aliases: ['ssh_public_key']
kdump_integration:
description:
- "Specify if host will have enabled Kdump integration."
choices: ['enabled', 'disabled']
default: enabled
spm_priority:
description:
- "SPM priority of the host. Integer value from 1 to 10, where higher number means higher priority."
override_iptables:
description:
- "If True host iptables will be overridden by host deploy script."
- "Note that C(override_iptables) is I(false) by default in oVirt/RHV."
force:
description:
- "If True host will be forcibly moved to desired state."
default: False
override_display:
description:
- "Override the display address of all VMs on this host with specified address."
kernel_params:
description:
- "List of kernel boot parameters."
- "Following are most common kernel parameters used for host:"
- "Hostdev Passthrough & SR-IOV: intel_iommu=on"
- "Nested Virtualization: kvm-intel.nested=1"
- "Unsafe Interrupts: vfio_iommu_type1.allow_unsafe_interrupts=1"
- "PCI Reallocation: pci=realloc"
- "C(Note:)"
- "Modifying kernel boot parameters settings can lead to a host boot failure.
Please consult the product documentation before doing any changes."
- "Kernel boot parameters changes require host deploy and restart. The host needs
to be I(reinstalled) suceesfully and then to be I(rebooted) for kernel boot parameters
to be applied."
hosted_engine:
description:
- "If I(deploy) it means this host should deploy also hosted engine
components."
- "If I(undeploy) it means this host should un-deploy hosted engine
components and this host will not function as part of the High
Availability cluster."
power_management_enabled:
description:
- "Enable or disable power management of the host."
- "For more comprehensive setup of PM use C(ovirt_host_pm) module."
version_added: 2.4
activate:
description:
- "If C(state) is I(present) activate the host."
- "This parameter is good to disable, when you don't want to change
the state of host when using I(present) C(state)."
default: True
version_added: 2.4
iscsi:
description:
- "If C(state) is I(iscsidiscover) it means that the iscsi attribute is being
used to discover targets"
- "If C(state) is I(iscsilogin) it means that the iscsi attribute is being
used to login to the specified targets passed as part of the iscsi attribute"
version_added: "2.4"
check_upgrade:
description:
- "If I(true) and C(state) is I(upgraded) run check for upgrade
action before executing upgrade action."
default: True
version_added: 2.4
extends_documentation_fragment: ovirt
'''
EXAMPLES = '''
# Examples don't contain auth parameter for simplicity,
# look at ovirt_auth module to see how to reuse authentication:
# Add host with username/password supporting SR-IOV.
# Note that override_iptables is false by default in oVirt/RHV:
- ovirt_hosts:
cluster: Default
name: myhost
address: 10.34.61.145
password: secret
override_iptables: true
kernel_params:
- intel_iommu=on
# Add host using public key
- ovirt_hosts:
public_key: true
cluster: Default
name: myhost2
address: 10.34.61.145
override_iptables: true
# Deploy hosted engine host
- ovirt_hosts:
cluster: Default
name: myhost2
password: secret
address: 10.34.61.145
override_iptables: true
hosted_engine: deploy
# Maintenance
- ovirt_hosts:
state: maintenance
name: myhost
# Restart host using power management:
- ovirt_hosts:
state: restarted
name: myhost
# Upgrade host
- ovirt_hosts:
state: upgraded
name: myhost
# discover iscsi targets
- ovirt_hosts:
state: iscsidiscover
name: myhost
iscsi:
username: iscsi_user
password: secret
address: 10.34.61.145
port: 3260
# login to iscsi targets
- ovirt_hosts:
state: iscsilogin
name: myhost
iscsi:
username: iscsi_user
password: secret
address: 10.34.61.145
target: "iqn.2015-07.com.mlipchuk2.redhat:444"
port: 3260
# Reinstall host using public key
- ovirt_hosts:
state: reinstalled
name: myhost
public_key: true
# Remove host
- ovirt_hosts:
state: absent
name: myhost
force: True
'''
RETURN = '''
id:
description: ID of the host which is managed
returned: On success if host is found.
type: str
sample: 7de90f31-222c-436c-a1ca-7e655bd5b60c
host:
description: "Dictionary of all the host attributes. Host attributes can be found on your oVirt/RHV instance
at following url: http://ovirt.github.io/ovirt-engine-api-model/master/#types/host."
returned: On success if host is found.
type: dict
iscsi_targets:
description: "List of host iscsi targets"
returned: On success if host is found and state is iscsidiscover.
type: list
'''
import time
import traceback
try:
import ovirtsdk4.types as otypes
from ovirtsdk4.types import HostStatus as hoststate
except ImportError:
pass
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ovirt import (
BaseModule,
check_sdk,
create_connection,
equal,
get_id_by_name,
ovirt_full_argument_spec,
wait,
)
class HostsModule(BaseModule):
def build_entity(self):
return otypes.Host(
name=self.param('name'),
cluster=otypes.Cluster(
name=self.param('cluster')
) if self.param('cluster') else None,
comment=self.param('comment'),
address=self.param('address'),
root_password=self.param('password'),
ssh=otypes.Ssh(
authentication_method=otypes.SshAuthenticationMethod.PUBLICKEY,
) if self.param('public_key') else None,
kdump_status=otypes.KdumpStatus(
self.param('kdump_integration')
) if self.param('kdump_integration') else None,
spm=otypes.Spm(
priority=self.param('spm_priority'),
) if self.param('spm_priority') else None,
override_iptables=self.param('override_iptables'),
display=otypes.Display(
address=self.param('override_display'),
) if self.param('override_display') else None,
os=otypes.OperatingSystem(
custom_kernel_cmdline=' '.join(self.param('kernel_params')),
) if self.param('kernel_params') else None,
power_management=otypes.PowerManagement(
enabled=self.param('power_management_enabled'),
) if self.param('power_management_enabled') is not None else None,
)
def update_check(self, entity):
kernel_params = self.param('kernel_params')
return (
equal(self.param('comment'), entity.comment) and
equal(self.param('kdump_integration'), entity.kdump_status) and
equal(self.param('spm_priority'), entity.spm.priority) and
equal(self.param('power_management_enabled'), entity.power_management.enabled) and
equal(self.param('override_display'), getattr(entity.display, 'address', None)) and
equal(
sorted(kernel_params) if kernel_params else None,
sorted(entity.os.custom_kernel_cmdline.split(' '))
)
)
def pre_remove(self, entity):
self.action(
entity=entity,
action='deactivate',
action_condition=lambda h: h.status != hoststate.MAINTENANCE,
wait_condition=lambda h: h.status == hoststate.MAINTENANCE,
)
def post_reinstall(self, host):
wait(
service=self._service.service(host.id),
condition=lambda h: h.status != hoststate.MAINTENANCE,
fail_condition=failed_state,
wait=self.param('wait'),
timeout=self.param('timeout'),
)
def failed_state(host):
return host.status in [
hoststate.ERROR,
hoststate.INSTALL_FAILED,
hoststate.NON_RESPONSIVE,
hoststate.NON_OPERATIONAL,
]
def control_state(host_module):
host = host_module.search_entity()
if host is None:
return
state = host_module._module.params['state']
host_service = host_module._service.service(host.id)
if failed_state(host):
# In case host is in INSTALL_FAILED status, we can reinstall it:
if hoststate.INSTALL_FAILED == host.status and state != 'reinstalled':
raise Exception(
"Not possible to manage host '%s' in state '%s'." % (
host.name,
host.status
)
)
elif host.status in [
hoststate.REBOOT,
hoststate.CONNECTING,
hoststate.INITIALIZING,
hoststate.INSTALLING,
hoststate.INSTALLING_OS,
]:
wait(
service=host_service,
condition=lambda host: host.status == hoststate.UP,
fail_condition=failed_state,
)
elif host.status == hoststate.PREPARING_FOR_MAINTENANCE:
wait(
service=host_service,
condition=lambda host: host.status == hoststate.MAINTENANCE,
fail_condition=failed_state,
)
return host
def main():
argument_spec = ovirt_full_argument_spec(
state=dict(
choices=[
'present', 'absent', 'maintenance', 'upgraded', 'started',
'restarted', 'stopped', 'reinstalled', 'iscsidiscover', 'iscsilogin'
],
default='present',
),
name=dict(required=True),
comment=dict(default=None),
cluster=dict(default=None),
address=dict(default=None),
password=dict(default=None, no_log=True),
public_key=dict(default=False, type='bool', aliases=['ssh_public_key']),
kdump_integration=dict(default=None, choices=['enabled', 'disabled']),
spm_priority=dict(default=None, type='int'),
override_iptables=dict(default=None, type='bool'),
force=dict(default=False, type='bool'),
timeout=dict(default=600, type='int'),
override_display=dict(default=None),
kernel_params=dict(default=None, type='list'),
hosted_engine=dict(default=None, choices=['deploy', 'undeploy']),
power_management_enabled=dict(default=None, type='bool'),
activate=dict(default=True, type='bool'),
iscsi=dict(default=None, type='dict'),
check_upgrade=dict(default=True, type='bool'),
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
required_if=[
['state', 'iscsidiscover', ['iscsi']],
['state', 'iscsilogin', ['iscsi']]
]
)
check_sdk(module)
try:
auth = module.params.pop('auth')
connection = create_connection(auth)
hosts_service = connection.system_service().hosts_service()
hosts_module = HostsModule(
connection=connection,
module=module,
service=hosts_service,
)
state = module.params['state']
host = control_state(hosts_module)
if state == 'present':
ret = hosts_module.create(
deploy_hosted_engine=(
module.params.get('hosted_engine') == 'deploy'
) if module.params.get('hosted_engine') is not None else None,
result_state=hoststate.UP if host is None else None,
fail_condition=failed_state if host is None else lambda h: False,
)
if module.params['activate'] and host is not None:
ret = hosts_module.action(
action='activate',
action_condition=lambda h: h.status != hoststate.UP,
wait_condition=lambda h: h.status == hoststate.UP,
fail_condition=failed_state,
)
elif state == 'absent':
ret = hosts_module.remove()
elif state == 'maintenance':
hosts_module.action(
action='deactivate',
action_condition=lambda h: h.status != hoststate.MAINTENANCE,
wait_condition=lambda h: h.status == hoststate.MAINTENANCE,
fail_condition=failed_state,
)
ret = hosts_module.create()
elif state == 'upgraded':
result_state = hoststate.MAINTENANCE if host.status == hoststate.MAINTENANCE else hoststate.UP
events_service = connection.system_service().events_service()
last_event = events_service.list(max=1)[0]
if module.params['check_upgrade']:
hosts_module.action(
action='upgrade_check',
action_condition=lambda host: not host.update_available,
wait_condition=lambda host: host.update_available or (
len([
event
for event in events_service.list(
from_=int(last_event.id),
search='type=885 and host.name=%s' % host.name,
)
]) > 0
),
fail_condition=lambda host: len([
event
for event in events_service.list(
from_=int(last_event.id),
search='type=839 or type=887 and host.name=%s' % host.name,
)
]) > 0,
)
# Set to False, because upgrade_check isn't 'changing' action:
hosts_module._changed = False
ret = hosts_module.action(
action='upgrade',
action_condition=lambda h: h.update_available,
wait_condition=lambda h: h.status == result_state,
post_action=lambda h: time.sleep(module.params['poll_interval']),
fail_condition=failed_state,
)
elif state == 'iscsidiscover':
host_id = get_id_by_name(hosts_service, module.params['name'])
iscsi_targets = hosts_service.service(host_id).iscsi_discover(
iscsi=otypes.IscsiDetails(
port=int(module.params['iscsi']['port']) if module.params['iscsi']['port'].isdigit() else None,
username=module.params['iscsi']['username'],
password=module.params['iscsi']['password'],
address=module.params['iscsi']['address'],
),
)
ret = {
'changed': False,
'id': host_id,
'iscsi_targets': iscsi_targets,
}
elif state == 'iscsilogin':
host_id = get_id_by_name(hosts_service, module.params['name'])
ret = hosts_module.action(
action='iscsi_login',
iscsi=otypes.IscsiDetails(
port=int(module.params['iscsi']['port']) if module.params['iscsi']['port'].isdigit() else None,
username=module.params['iscsi']['username'],
password=module.params['iscsi']['password'],
address=module.params['iscsi']['address'],
target=module.params['iscsi']['target'],
),
)
elif state == 'started':
ret = hosts_module.action(
action='fence',
action_condition=lambda h: h.status == hoststate.DOWN,
wait_condition=lambda h: h.status in [hoststate.UP, hoststate.MAINTENANCE],
fail_condition=failed_state,
fence_type='start',
)
elif state == 'stopped':
hosts_module.action(
action='deactivate',
action_condition=lambda h: h.status not in [hoststate.MAINTENANCE, hoststate.DOWN],
wait_condition=lambda h: h.status in [hoststate.MAINTENANCE, hoststate.DOWN],
fail_condition=failed_state,
)
ret = hosts_module.action(
action='fence',
action_condition=lambda h: h.status != hoststate.DOWN,
wait_condition=lambda h: h.status == hoststate.DOWN if module.params['wait'] else True,
fail_condition=failed_state,
fence_type='stop',
)
elif state == 'restarted':
ret = hosts_module.action(
action='fence',
wait_condition=lambda h: h.status == hoststate.UP,
fail_condition=failed_state,
fence_type='restart',
)
elif state == 'reinstalled':
# Deactivate host if not in maintanence:
hosts_module.action(
action='deactivate',
action_condition=lambda h: h.status not in [hoststate.MAINTENANCE, hoststate.DOWN],
wait_condition=lambda h: h.status in [hoststate.MAINTENANCE, hoststate.DOWN],
fail_condition=failed_state,
)
# Reinstall host:
hosts_module.action(
action='install',
action_condition=lambda h: h.status == hoststate.MAINTENANCE,
post_action=hosts_module.post_reinstall,
wait_condition=lambda h: h.status == hoststate.MAINTENANCE,
fail_condition=failed_state,
host=otypes.Host(
override_iptables=module.params['override_iptables'],
) if module.params['override_iptables'] else None,
root_password=module.params['password'],
ssh=otypes.Ssh(
authentication_method=otypes.SshAuthenticationMethod.PUBLICKEY,
) if module.params['public_key'] else None,
deploy_hosted_engine=(
module.params.get('hosted_engine') == 'deploy'
) if module.params.get('hosted_engine') is not None else None,
undeploy_hosted_engine=(
module.params.get('hosted_engine') == 'undeploy'
) if module.params.get('hosted_engine') is not None else None,
)
# Activate host after reinstall:
ret = hosts_module.action(
action='activate',
action_condition=lambda h: h.status == hoststate.MAINTENANCE,
wait_condition=lambda h: h.status == hoststate.UP,
fail_condition=failed_state,
)
module.exit_json(**ret)
except Exception as e:
module.fail_json(msg=str(e), exception=traceback.format_exc())
finally:
connection.close(logout=auth.get('token') is None)
if __name__ == "__main__":
main()
| gpl-3.0 |
raydive/kame | kame.py | 1 | 9919 | #!/usr/bin/env python
# encoding: utf-8
class Kame(object):
"""
道なりの亀
"""
class Node(object):
"""
各文字が入るノード
ノードはそれぞれ上下左右の枝を持つ
"""
def __init__(self, value):
"""
nodeのコンストラクタ
"""
self.value = value
self.top = None
self.bottom = None
self.right = None
self.left = None
def set_nodes(self, top=None, bottom=None, right=None, left=None):
"""
上下左右のnodeを設定する
"""
self.top = top
self.bottom = bottom
self.right = right
self.left = left
def next_node(self, num):
"""
次に進むnodeを取得する
"""
# PythonにSwith case文なんてモノはない
if num == 0 or num == -4:
return self.right
elif num == 1 or num == -3:
return self.bottom
elif num == 2 or num == -2:
return self.left
elif num == 3 or num == -1:
return self.top
else:
raise ValueError('無効な値')
@staticmethod
def make_nodes():
"""
今回問題となる地図を作成する
"""
data1 = [Kame.Node(x) for x in "ABCDEFGHIJK"]
data2 = [Kame.Node(x) for x in "LMNOPQRSTUV"]
data3 = [Kame.Node(x) for x in "WXYZabcdefg"]
data4 = [Kame.Node(x) for x in "hij"]
data5 = [Kame.Node(x) for x in "klm"]
data6 = [Kame.Node(x) for x in "nop"]
data7 = [Kame.Node(x) for x in "qrs"]
data8 = [Kame.Node(x) for x in "tuv"]
data9 = [Kame.Node(x) for x in "wxy"]
data10 = [Kame.Node(x) for x in "z01"]
data11 = [Kame.Node(x) for x in "234"]
data12 = [Kame.Node(x) for x in "567"]
for i in range(0, len(data1)):
left = None if i == 0 else data1[i-1]
right = None if (i+1) == len(data1) else data1[i+1]
data1[i].set_nodes(left=left, bottom=data2[i], right=right)
for i in range(0, len(data2)):
left = None if i == 0 else data2[i-1]
right = None if (i+1) == len(data2) else data2[i+1]
data2[i].set_nodes(top=data1[i], left=left, bottom=data3[i], right=right)
for i in range(0, len(data3)):
left = None if i == 0 else data3[i-1]
right = None if (i+1) == len(data3) else data3[i+1]
bottom = None
if i in range(0, 3):
bottom = data4[i]
elif i in range(len(data3)-3, len(data3)):
bottom = data12[len(data3)-i-1]
data3[i].set_nodes(top=data2[i], left=left, bottom=bottom, right=right)
for i in range(0, len(data4)):
left = None if i == 0 else data4[i-1]
right = None if (i+1) == len(data4) else data4[i+1]
bottom = data5[i]
top = data3[i]
data4[i].set_nodes(top=top, left=left, bottom=bottom, right=right)
for i in range(0, len(data5)):
left = None if i == 0 else data5[i-1]
right = None if (i+1) == len(data5) else data5[i+1]
bottom = data6[i]
top = data4[i]
data5[i].set_nodes(top=top, left=left, bottom=bottom, right=right)
for i in range(0, len(data6)):
left = None if i == 0 else data6[i-1]
right = None if (i+1) == len(data6) else data6[i+1]
bottom = data7[i]
top = data5[i]
data6[i].set_nodes(top=top, left=left, bottom=bottom, right=right)
for i in range(0, len(data7)):
left = None if i == 0 else data7[i-1]
right = None if (i+1) == len(data7) else data7[i+1]
bottom = data8[i]
top = data6[i]
data7[i].set_nodes(top=top, left=left, bottom=bottom, right=right)
for i in range(0, len(data8)):
left = None if i == 0 else data8[i-1]
right = None if (i+1) == len(data8) else data8[i+1]
bottom = data9[i]
top = data7[i]
data8[i].set_nodes(top=top, left=left, bottom=bottom, right=right)
for i in range(0, len(data9)):
left = None if i == 0 else data9[i-1]
right = None if (i+1) == len(data9) else data9[i+1]
bottom = data10[i]
top = data8[i]
data9[i].set_nodes(top=top, left=left, bottom=bottom, right=right)
for i in range(0, len(data10)):
left = None if i == 0 else data10[i-1]
right = None if (i+1) == len(data10) else data10[i+1]
bottom = data11[i]
top = data9[i]
data10[i].set_nodes(top=top, left=left, bottom=bottom, right=right)
for i in range(0, len(data11)):
left = None if i == 0 else data11[i-1]
right = None if (i+1) == len(data10) else data11[i+1]
bottom = data12[i]
top = data10[i]
data11[i].set_nodes(top=top, left=left, bottom=bottom, right=right)
for i in range(0, len(data12)):
left = None if i == 0 else data12[i-1]
right = None if (i+1) == len(data12) else data12[i+1]
bottom = data3[len(data3)-1 - i]
top = data11[i]
data12[i].set_nodes(top=top, left=left, bottom=bottom, right=right)
return data1[0]
# Kame クラス
def __init__(self):
"""
コンストラクタ
"""
self.now_node = Kame.Node.make_nodes()
self.next_direction = 0
def execute(self, commands):
"""
コマンドを一つずつ実行する
"""
result = [self.now_node.value]
for c in commands:
if c == 'L':
self.L()
elif c == 'R':
self.R()
else:
i = int(c, 16)
result += self.forward(i)
if '?' in result:
break
return "".join(result)
def forward(self, i):
"""
i分だけ進む方向に前進する
"""
result = []
for num in range(0, i):
prev_node = self.now_node
self.now_node = self.now_node.next_node(self.next_direction)
if self.now_node is None:
result.append('?')
break
result.append(self.now_node.value)
if self.__is_bottom_to_bottom(prev_node):
# 移動前のnodeとbottomでつながっている場合は進む方向を反転する
self.__turn()
return result
def R(self):
"""
右に90度回転
"""
self.next_direction += 1
if self.next_direction is 4:
self.next_direction = 0
self.next_node = self.now_node.next_node(self.next_direction)
def L(self):
"""
左に90度回転
"""
self.next_direction -= 1
if self.next_direction is -5:
self.next_direction = 3
self.next_node = self.now_node.next_node(self.next_direction)
def __turn(self):
"""
方向を現在の進行方向から反転する
"""
self.L()
self.L()
def __is_bottom_to_bottom(self, prev_node):
"""
bottomとbottomがつながっているnode間で移動があった場合trueを返す
"""
return self.now_node is not None \
and self.now_node.value in ['e','f','g','5','6','7'] \
and self.now_node.bottom is prev_node
def test(commands, answer):
kame = Kame()
print(commands)
result = kame.execute(commands)
if result == answer:
print(answer + " OK")
else:
print(answer + ' ' + result + ' NG')
if __name__ == "__main__":
test( "2RcL3LL22", "ABCNYjmpsvy147edcbcdef" )
test( "L3R4L5RR5R3L5", "A?" )
test( "2ReLLe", "ABCNYjmpsvy147eTITe741yvspmjYNC" )
test( "1ReRRe", "ABMXilorux036fUJUf630xuroliXMB" )
test( "ReRRe", "ALWhknqtwz25gVKVg52zwtqnkhWLA" )
test( "f", "ABCDEFGHIJK?" )
test( "Rf", "ALWhknqtwz25gVK?" )
test( "1Rf", "ABMXilorux036fUJ?" )
test( "2Rf", "ABCNYjmpsvy147eTI?" )
test( "aR1RaL1LaR1R2L1L2", "ABCDEFGHIJKVUTSRQPONMLWXYZabcdefg567432" )
test( "2R1R2L1L2R1R2L1L2R1R2L1L2R1R2L1L2", "ABCNMLWXYjihklmponqrsvutwxy" )
test( "2R4R2L4L2R4R2L4L2R4R2L4L2", "ABCNYjmlknqtwxy147efgVK?" )
test( "R1L2R4R2L4L2R4R2L4L2R4R2L4L2", "ALMNYjmponqtwz0147eTUVK?" )
test( "R2L2R4R2L4L2R4R2L4L2R4R2L4L2", "ALWXYjmpsrqtwz2347eTIJK?" )
test( "R3L2R4R2L4L2R4R2L4L2R4R2L4L2", "ALWhijmpsvutwz2567eTI?" )
test( "R5L2L5L1LaR1L4L5", "ALWhknopmjYNCBMXilorux0325gVKJIHGF" )
test( "1R2L4L2R4R2L4L2R4", "ABMXYZabQFGHIJUfg?" )
test( "2R2L4L2R4R2L4L2R4", "ABCNYZabcRGHIJKVg?" )
test( "3R2L4L2R4R2L4L2R4", "ABCDOZabcdSHIJK?" )
test( "4R2L4L2R4R2L4L2R4", "ABCDEPabcdeTIJK?" )
test( "5R2L4L2R4R2L4L2R4", "ABCDEFQbcdefUJK?" )
test( "LLL1RRR1LLL1RRR2R1", "ALMXYZ?" )
test( "R3RRR3", "ALWhij?" )
test( "1LLL4RRR1LR1RL1", "ABMXilm?" )
test( "R2L1R2L1R3R4", "ALWXilmpsvut?" )
test( "7R4f47LLLc6R9L", "ABCDEFGHSd?" )
test( "5RR868L8448LL4R6", "ABCDEFEDCBA?" )
test( "42Rd1RLLa7L5", "ABCDEFGRc?" )
test( "RRLL6RLR1L5d12LaLRRL529L", "ABCDEFGRSTUV?" )
test( "RLR7L6LL1LRRRcRL52R", "ALWhknqtuv?" )
test( "1RLR8RLR1R437L99636R", "ABMXiloruxwtqnkhWLA?" )
test( "LLL2L3La9Le5LRR", "ALWXYZOD?" )
test( "R1LcRR491", "ALMNOPQRSTUV?" )
test( "R8L1R1R512L8RLLReRf", "ALWhknqtwx0z?" )
test( "1RcL8f1L29a5", "ABMXilorux036fedcbaZYXW?" )
test( "R822LeL46LL39LL", "ALWhknqtwz25gfedcbaZYXW?" )
test( "9R3L5LRRLb5R3L7cLLLR4L", "ABCDEFGHIJUf65?" )
test( "7LLRRR2R3R69Lf76eR2L", "ABCDEFGHSdcbaPE?" )
test( "8RRRLL3Le", "ABCDEFGHITe765?" )
test( "8R5RLL6LbL4LL5bL", "ABCDEFGHITe7410z?" )
test( "6LR2R1LR5LRLRL484L63", "ABCDEFGHITe741yxw?" )
| mit |
ocampocj/cloud-custodian | tests/test_health.py | 6 | 2197 | # Copyright 2017 Capital One Services, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function, unicode_literals
from .common import BaseTest
class HealthResource(BaseTest):
def test_health_query(self):
session_factory = self.replay_flight_data("test_health_query")
p = self.load_policy(
{"name": "account-health-query", "resource": "health-event"},
session_factory=session_factory,
)
resources = p.run()
self.assertEqual(len(resources), 0)
def test_health_resource_query(self):
session_factory = self.replay_flight_data("test_health_resource_query")
p = self.load_policy(
{
"name": "account-health-ec2-query",
"resource": "health-event",
"query": [{"services": "EC2"}],
},
session_factory=session_factory,
)
resources = p.run()
self.assertEqual(len(resources), 1)
self.assertEqual(resources[0]["service"], "EC2")
def test_health_augment(self):
session_factory = self.replay_flight_data("test_health_augment")
p = self.load_policy(
{
"name": "account-health-augment",
"resource": "health-event",
"query": [{"services": ["BILLING", "IAM"]}],
},
session_factory=session_factory,
)
resources = p.run()
for r in resources:
self.assertTrue("Description" in r)
self.assertTrue(
(r["eventTypeCategory"] == "accountNotification") ^ ("AffectedEntities" in r)
)
| apache-2.0 |
pombredanne/pyjs | examples/mail/AboutDialog.py | 6 | 2286 | from pyjamas.ui.Button import Button
from pyjamas.ui.DialogBox import DialogBox
from pyjamas.ui.DockPanel import DockPanel
from pyjamas.ui.HorizontalPanel import HorizontalPanel
from pyjamas.ui.HTML import HTML
from pyjamas.ui.Image import Image
from pyjamas.ui import KeyboardListener
from pyjamas.ui.Widget import Widget
from pyjamas.ui import HasAlignment
class AboutDialog(DialogBox):
LOGO_IMAGE = "http://trac.pyworks.org/pyjamas/chrome/site/pyjamas-logo-small.png"
def __init__(self):
DialogBox.__init__(self)
# Use this opportunity to set the dialog's caption.
self.setText("About the Mail Sample")
# Create a DockPanel to contain the 'about' label and the 'OK' button.
outer = DockPanel()
outer.setSpacing(4)
outer.add(Image(AboutDialog.LOGO_IMAGE), DockPanel.WEST)
# Create the 'OK' button, along with a listener that hides the dialog
# when the button is clicked. Adding it to the 'south' position within
# the dock causes it to be placed at the bottom.
buttonPanel = HorizontalPanel()
buttonPanel.setHorizontalAlignment(HasAlignment.ALIGN_RIGHT)
buttonPanel.add(Button("Close", self))
outer.add(buttonPanel, DockPanel.SOUTH)
# Create the 'about' label. Placing it in the 'rest' position within the
# dock causes it to take up any remaining space after the 'OK' button
# has been laid out.
textplain = "This sample application demonstrates the construction "
textplain += "of a complex user interface using pyjamas' built-in widgets. Have a look "
textplain += "at the code to see how easy it is to build your own apps!"
text = HTML(textplain)
text.setStyleName("mail-AboutText")
outer.add(text, DockPanel.CENTER)
# Add a bit of spacing and margin to the dock to keep the components from
# being placed too closely together.
outer.setSpacing(8)
self.setWidget(outer)
def onClick(self, sender):
self.hide()
def onKeyDownPreview(self, key, modifiers):
# Use the popup's key preview hooks to close the dialog when either
# enter or escape is pressed.
if (key == KeyboardListener.KEY_ESCAPE or key == KeyboardListener.KEY_ENTER):
self.hide()
return True
| apache-2.0 |
benesch/pip | pip/_vendor/packaging/requirements.py | 5 | 4319 | # This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import string
import re
from pip._vendor.pyparsing import stringStart, stringEnd, originalTextFor, ParseException
from pip._vendor.pyparsing import ZeroOrMore, Word, Optional, Regex, Combine
from pip._vendor.pyparsing import Literal as L # noqa
from pip._vendor.six.moves.urllib import parse as urlparse
from .markers import MARKER_EXPR, Marker
from .specifiers import LegacySpecifier, Specifier, SpecifierSet
class InvalidRequirement(ValueError):
"""
An invalid requirement was found, users should refer to PEP 508.
"""
ALPHANUM = Word(string.ascii_letters + string.digits)
LBRACKET = L("[").suppress()
RBRACKET = L("]").suppress()
LPAREN = L("(").suppress()
RPAREN = L(")").suppress()
COMMA = L(",").suppress()
SEMICOLON = L(";").suppress()
AT = L("@").suppress()
PUNCTUATION = Word("-_.")
IDENTIFIER_END = ALPHANUM | (ZeroOrMore(PUNCTUATION) + ALPHANUM)
IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END))
NAME = IDENTIFIER("name")
EXTRA = IDENTIFIER
URI = Regex(r'[^ ]+')("url")
URL = (AT + URI)
EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA)
EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras")
VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE)
VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE)
VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY
VERSION_MANY = Combine(VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE),
joinString=",", adjacent=False)("_raw_spec")
_VERSION_SPEC = Optional(((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY))
_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or '')
VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier")
VERSION_SPEC.setParseAction(lambda s, l, t: t[1])
MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker")
MARKER_EXPR.setParseAction(
lambda s, l, t: Marker(s[t._original_start:t._original_end])
)
MARKER_SEPERATOR = SEMICOLON
MARKER = MARKER_SEPERATOR + MARKER_EXPR
VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER)
URL_AND_MARKER = URL + Optional(MARKER)
NAMED_REQUIREMENT = \
NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER)
REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd
class Requirement(object):
"""Parse a requirement.
Parse a given requirement string into its parts, such as name, specifier,
URL, and extras. Raises InvalidRequirement on a badly-formed requirement
string.
"""
# TODO: Can we test whether something is contained within a requirement?
# If so how do we do that? Do we need to test against the _name_ of
# the thing as well as the version? What about the markers?
# TODO: Can we normalize the name and extra name?
def __init__(self, requirement_string):
try:
req = REQUIREMENT.parseString(requirement_string)
except ParseException as e:
raise InvalidRequirement(
"Invalid requirement, parse error at \"{0!r}\"".format(
requirement_string[e.loc:e.loc + 8]))
self.name = req.name
if req.url:
parsed_url = urlparse.urlparse(req.url)
if not (parsed_url.scheme and parsed_url.netloc) or (
not parsed_url.scheme and not parsed_url.netloc):
raise InvalidRequirement("Invalid URL given")
self.url = req.url
else:
self.url = None
self.extras = set(req.extras.asList() if req.extras else [])
self.specifier = SpecifierSet(req.specifier)
self.marker = req.marker if req.marker else None
def __str__(self):
parts = [self.name]
if self.extras:
parts.append("[{0}]".format(",".join(sorted(self.extras))))
if self.specifier:
parts.append(str(self.specifier))
if self.url:
parts.append("@ {0}".format(self.url))
if self.marker:
parts.append("; {0}".format(self.marker))
return "".join(parts)
def __repr__(self):
return "<Requirement({0!r})>".format(str(self))
| mit |
makielab/django-oscar | oscar/apps/offer/migrations/0019_auto__del_shippingbenefit__add_field_benefit_proxy_class__chg_field_be.py | 17 | 16530 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Benefit.proxy_class'
db.add_column('offer_benefit', 'proxy_class',
self.gf('django.db.models.fields.CharField')(default=None, max_length=255, unique=True, null=True, blank=True),
keep_default=False)
# Changing field 'Benefit.type'
db.alter_column('offer_benefit', 'type', self.gf('django.db.models.fields.CharField')(max_length=128, null=True))
def backwards(self, orm):
# Deleting field 'Benefit.proxy_class'
db.delete_column('offer_benefit', 'proxy_class')
# Changing field 'Benefit.type'
db.alter_column('offer_benefit', 'type', self.gf('django.db.models.fields.CharField')(default='', max_length=128))
models = {
'catalogue.attributeentity': {
'Meta': {'object_name': 'AttributeEntity'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'blank': 'True'}),
'type': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'entities'", 'to': "orm['catalogue.AttributeEntityType']"})
},
'catalogue.attributeentitytype': {
'Meta': {'object_name': 'AttributeEntityType'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'blank': 'True'})
},
'catalogue.attributeoption': {
'Meta': {'object_name': 'AttributeOption'},
'group': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'options'", 'to': "orm['catalogue.AttributeOptionGroup']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'option': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'catalogue.attributeoptiongroup': {
'Meta': {'object_name': 'AttributeOptionGroup'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'catalogue.category': {
'Meta': {'ordering': "['full_name']", 'object_name': 'Category'},
'depth': ('django.db.models.fields.PositiveIntegerField', [], {}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'full_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.files.ImageField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'numchild': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'path': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'})
},
'catalogue.option': {
'Meta': {'object_name': 'Option'},
'code': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'Required'", 'max_length': '128'})
},
'catalogue.product': {
'Meta': {'ordering': "['-date_created']", 'object_name': 'Product'},
'attributes': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.ProductAttribute']", 'through': "orm['catalogue.ProductAttributeValue']", 'symmetrical': 'False'}),
'categories': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Category']", 'through': "orm['catalogue.ProductCategory']", 'symmetrical': 'False'}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'date_updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_discountable': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'parent': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'variants'", 'null': 'True', 'to': "orm['catalogue.Product']"}),
'product_class': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.ProductClass']", 'null': 'True'}),
'product_options': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Option']", 'symmetrical': 'False', 'blank': 'True'}),
'recommended_products': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Product']", 'symmetrical': 'False', 'through': "orm['catalogue.ProductRecommendation']", 'blank': 'True'}),
'related_products': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'relations'", 'blank': 'True', 'to': "orm['catalogue.Product']"}),
'score': ('django.db.models.fields.FloatField', [], {'default': '0.0', 'db_index': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255'}),
'status': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '128', 'null': 'True', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'upc': ('django.db.models.fields.CharField', [], {'max_length': '64', 'unique': 'True', 'null': 'True', 'blank': 'True'})
},
'catalogue.productattribute': {
'Meta': {'ordering': "['code']", 'object_name': 'ProductAttribute'},
'code': ('django.db.models.fields.SlugField', [], {'max_length': '128'}),
'entity_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeEntityType']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'option_group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeOptionGroup']", 'null': 'True', 'blank': 'True'}),
'product_class': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'attributes'", 'null': 'True', 'to': "orm['catalogue.ProductClass']"}),
'required': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'text'", 'max_length': '20'})
},
'catalogue.productattributevalue': {
'Meta': {'object_name': 'ProductAttributeValue'},
'attribute': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.ProductAttribute']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'attribute_values'", 'to': "orm['catalogue.Product']"}),
'value_boolean': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'value_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'value_entity': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeEntity']", 'null': 'True', 'blank': 'True'}),
'value_float': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'value_integer': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'value_option': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.AttributeOption']", 'null': 'True', 'blank': 'True'}),
'value_richtext': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'value_text': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
},
'catalogue.productcategory': {
'Meta': {'ordering': "['-is_canonical']", 'object_name': 'ProductCategory'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Category']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_canonical': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'product': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Product']"})
},
'catalogue.productclass': {
'Meta': {'ordering': "['name']", 'object_name': 'ProductClass'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'options': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['catalogue.Option']", 'symmetrical': 'False', 'blank': 'True'}),
'requires_shipping': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '128'}),
'track_stock': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
'catalogue.productrecommendation': {
'Meta': {'object_name': 'ProductRecommendation'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'primary': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'primary_recommendations'", 'to': "orm['catalogue.Product']"}),
'ranking': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'}),
'recommendation': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['catalogue.Product']"})
},
'offer.benefit': {
'Meta': {'object_name': 'Benefit'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'max_affected_items': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'proxy_class': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'range': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['offer.Range']", 'null': 'True', 'blank': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'value': ('oscar.models.fields.PositiveDecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'})
},
'offer.condition': {
'Meta': {'object_name': 'Condition'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'proxy_class': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'unique': 'True', 'null': 'True', 'blank': 'True'}),
'range': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['offer.Range']", 'null': 'True', 'blank': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'blank': 'True'}),
'value': ('oscar.models.fields.PositiveDecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'})
},
'offer.conditionaloffer': {
'Meta': {'ordering': "['-priority']", 'object_name': 'ConditionalOffer'},
'benefit': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['offer.Benefit']"}),
'condition': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['offer.Condition']"}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'end_datetime': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'max_basket_applications': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'max_discount': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '12', 'decimal_places': '2', 'blank': 'True'}),
'max_global_applications': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'max_user_applications': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}),
'num_applications': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'num_orders': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'offer_type': ('django.db.models.fields.CharField', [], {'default': "'Site'", 'max_length': '128'}),
'priority': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'redirect_url': ('oscar.models.fields.ExtendedURLField', [], {'max_length': '200', 'blank': 'True'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '128', 'unique': 'True', 'null': 'True'}),
'start_datetime': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'status': ('django.db.models.fields.CharField', [], {'default': "'Open'", 'max_length': '64'}),
'total_discount': ('django.db.models.fields.DecimalField', [], {'default': "'0.00'", 'max_digits': '12', 'decimal_places': '2'})
},
'offer.range': {
'Meta': {'object_name': 'Range'},
'classes': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'classes'", 'blank': 'True', 'to': "orm['catalogue.ProductClass']"}),
'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'excluded_products': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'excludes'", 'blank': 'True', 'to': "orm['catalogue.Product']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'included_categories': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'includes'", 'blank': 'True', 'to': "orm['catalogue.Category']"}),
'included_products': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'includes'", 'blank': 'True', 'to': "orm['catalogue.Product']"}),
'includes_all_products': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'}),
'proxy_class': ('django.db.models.fields.CharField', [], {'default': 'None', 'max_length': '255', 'unique': 'True', 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['offer']
| bsd-3-clause |
sappjw/gourmet | gourmet/plugins/field_editor/fieldEditor.py | 6 | 11240 | import gtk, gobject, os.path
import gourmet.gglobals as gglobals
import gourmet.convert as convert
from gourmet.gtk_extras import cb_extras as cb
from gourmet.gtk_extras import dialog_extras as de
from gettext import ngettext
from gettext import gettext as _
class FieldEditor:
"""A generic "value" editor for mucking about with the database.
"""
values = 'category','cuisine','source','link'
def __init__ (self, rd, rg):
self.field = None; self.other_field = None
self.rd = rd; self.rg = rg
self.ui = gtk.Builder()
self.ui.add_from_file(os.path.join(gglobals.uibase,'valueEditor.ui'))
self.__setup_widgets__()
self.__setup_treeview__()
self.ui.connect_signals({
'on_changeValueButton_toggled':self.changeValueButtonToggledCB,
'on_fieldToEditCombo_changed':self.fieldChangedCB,
'on_otherChangeCheckButton_toggled':self.otherChangeToggleCB,
'on_otherExpander_activate':self.otherChangeToggleCB,
'on_otherFieldCombo_changed':self.otherFieldComboChangedCB,
})
def __setup_widgets__ (self):
for w in [
'valueDialog',
'treeview',
'fieldToEditCombo','newValueComboBoxEntry',
'newValueEntry','changeValueButton',
'deleteValueButton','forEachLabel',
'otherExpander','otherFieldCombo',
'otherNewValueEntry','otherNewValueComboBoxEntry',
'otherValueBlurbLabel','otherChangeCheckButton',
'leaveValueButton'
]:
setattr(self,w,self.ui.get_object(w))
self.act_on_selection_widgets = [
self.deleteValueButton, self.changeValueButton,
self.newValueEntry,self.otherChangeCheckButton,
self.leaveValueButton
]
# Set up the combo-widget at the top with the
self.fields = [gglobals.REC_ATTR_DIC[v] for v in self.values]
cb.set_model_from_list(
self.fieldToEditCombo,
self.fields
)
cb.set_model_from_list(
self.otherFieldCombo,
self.fields
)
self.newValueComboBoxEntry.set_sensitive(False)
self.otherValueBlurbLabel.hide()
self.newValueEntryCompletion = gtk.EntryCompletion()
self.newValueEntry.set_completion(self.newValueEntryCompletion)
self.otherNewValueEntryCompletion = gtk.EntryCompletion()
self.otherNewValueEntry.set_completion(
self.otherNewValueEntryCompletion
)
self.valueDialog.connect('response',self.dialog_response_cb)
self.valueDialog.set_response_sensitive(gtk.RESPONSE_APPLY,False)
def __setup_treeview__ (self):
renderer = gtk.CellRendererText()
# If we have gtk > 2.8, set up text-wrapping
try:
renderer.get_property('wrap-width')
except TypeError:
pass
else:
renderer.set_property('wrap-mode',gtk.WRAP_WORD)
renderer.set_property('wrap-width',400)
col = gtk.TreeViewColumn('Value',
renderer,
text=0)
self.treeview.append_column(col)
self.treeview.get_selection().connect('changed',self.treeViewSelectionChanged)
self.treeview.get_selection().set_mode(gtk.SELECTION_MULTIPLE)
def changeValueButtonToggledCB (self, tb):
if tb.get_active():
self.newValueComboBoxEntry.set_sensitive(True)
else:
self.newValueComboBoxEntry.set_sensitive(False)
def treeViewSelectionChanged (self, tvSelection):
vals = self.get_selected_values(tvSelection)
if len(vals) == 1: val_string = vals[0]
elif len(vals) == 2: val_string = ' or '.join(vals)
elif len(vals) > 0:
val_string = ' or '.join([', '.join(vals[:-1]),vals[-1]])
else: # len(vals)==0
self.forEachLabel.set_text(_('For each selected value'))
if vals:
self.val_string = val_string
self.forEachLabel.set_text(
_('Where %(field)s is %(value)s')%{'value':val_string,
'field':self.field}
)
self.valueDialog.set_response_sensitive(gtk.RESPONSE_APPLY,(vals and True or False))
def fieldChangedCB (self, combobox):
name = cb.cb_get_active_text(combobox)
self.field = gglobals.NAME_TO_ATTR.get(name,name)
self.populate_treeview()
other_fields = self.fields[:]
if self.field != 'category':
other_fields.remove(gglobals.REC_ATTR_DIC[self.field])
cb.set_model_from_list(
self.otherFieldCombo,
other_fields
)
def otherFieldComboChangedCB (self, combobox):
name = cb.cb_get_active_text(combobox)
self.other_field = gglobals.NAME_TO_ATTR.get(name,name)
if self.other_field == 'category':
self.otherValueBlurbLabel.hide()
else:
self.otherValueBlurbLabel.show()
mod = self.make_model_for_field(self.other_field)
self.otherNewValueComboBoxEntry.set_model(mod)
if self.otherNewValueComboBoxEntry.get_text_column()==-1:
self.otherNewValueComboBoxEntry.set_text_column(0)
self.otherNewValueEntryCompletion.set_model(mod)
self.otherNewValueEntryCompletion.set_text_column(0)
def populate_treeview (self):
"""Assume that self.field is set"""
mod = self.make_model_for_field(self.field)
self.treeview.set_model(mod)
self.newValueComboBoxEntry.set_model(mod)
if self.newValueComboBoxEntry.get_text_column()==-1:
self.newValueComboBoxEntry.set_text_column(0)
self.newValueEntryCompletion.set_model(mod)
self.newValueEntryCompletion.set_text_column(0)
def make_model_for_field (self, field):
vals = self.rd.get_unique_values(field)
mod = gtk.ListStore(str)
for v in vals: mod.append((v,))
return mod
def run (self): return self.valueDialog.run()
def show (self): return self.valueDialog.show()
def hide (self): return self.valueDialog.hide()
def dialog_response_cb (self, dialog, response_id):
if response_id == gtk.RESPONSE_CLOSE:
self.valueDialog.hide()
if response_id == gtk.RESPONSE_APPLY:
criteria,table = self.get_criteria_and_table()
count = self.rd.fetch_len(table,**criteria)
count_text = ngettext('Change will affect %s recipe',
'Change will affect %s recipes',
count)%count
if self.deleteValueButton.get_active():
label = _('Delete %s where it is %s?')%(self.field,self.val_string)
yes = gtk.STOCK_DELETE
else:
label = _('Change %s from %s to "%s"?')%(self.field,self.val_string,
self.newValueEntry.get_text())
yes = '_Change'
if de.getBoolean(label=label,
sublabel='\n\n'.join([
count_text,
_('<i>This change is not reversable.</i>')
]),
custom_yes=yes,
custom_no=gtk.STOCK_CANCEL,
cancel=False):
self.apply_changes(criteria,table)
self.populate_treeview()
def otherChangeToggleCB (self, widg):
if widg!=self.otherChangeCheckButton:
self.otherChangeCheckButton.activate()
if self.otherChangeCheckButton.get_active():
self.otherExpander.set_expanded(True)
else:
self.otherExpander.set_expanded(False)
def get_changes (self):
if self.deleteValueButton.get_active():
value = None
elif self.changeValueButton.get_active():
value = self.newValueEntry.get_text()
return {self.field:value}
def get_other_changes (self):
if self.otherChangeCheckButton.get_active():
return {self.other_field:self.otherNewValueEntry.get_text()}
else:
return {}
def get_selected_values (self, ts=None):
if not ts:
ts = self.treeview.get_selection()
mod,paths = ts.get_selected_rows()
values = []
for p in paths:
values.append(
mod.get_value(mod.get_iter(p),0)
)
return values
def get_criteria_and_table (self):
values = self.get_selected_values()
if len(values) > 1:
criteria = {self.field:('==',('or',values))}
elif len(values)==1:
criteria = {self.field:values[0]}
if self.field == 'category':
table = self.rd.categories_table
else:
table = self.rd.recipe_table
return criteria,table
def apply_changes (self, criteria, table):
changes = self.get_changes()
other_changes = self.get_other_changes()
if self.field != 'category' and self.other_field != 'category':
changes.update(other_changes)
elif other_changes:
if self.other_field == 'category':
# Inefficient, but works with our current backend
# interface... and shouldn't be called often, so we'll
# deal with the ugliness for now
for r in self.rd.fetch_all(self.rd.recipe_table,**criteria):
if not self.rd.fetch_one(self.rd.categories_table,{'id':r.id}):
self.rd.do_add_cat({'id':r.id,'category':other_changes['category']})
else:
if self.field=='category':
IDs = [r.id for r in self.rd.fetch_all(self.rd.categories_table,**criteria)]
new_criteria = {'id':('==',('or',IDs))}
self.rd.update_by_criteria(
self.rd.recipe_table,
new_criteria,
other_changes
)
else:
self.rd.update_by_criteria(
self.rd.recipe_table,
criteria,
other_changes
)
if self.field=='category' and not changes['category']:
self.rd.delete_by_criteria(table,criteria)
else:
if self.field=='category':
table = self.rd.categories_table
else:
table = self.rd.recipe_table
self.rd.update_by_criteria(table,criteria,changes)
self.rg.reset_search()
if __name__ == '__main__':
import recipeManager
rm = recipeManager.default_rec_manager()
class DummyRG:
def reset_search (): pass
w = gtk.Window()
b = gtk.Button('edit me now')
w.add(b); w.show_all()
ve = FieldEditor(rm,DummyRG())
b.connect('clicked',lambda *args: ve.run())
w.connect('delete-event',gtk.main_quit)
gtk.main()
| gpl-2.0 |
apple/swift-lldb | packages/Python/lldbsuite/test/tools/lldb-server/TestGdbRemoteKill.py | 1 | 2105 | from __future__ import print_function
import gdbremote_testcase
import lldbgdbserverutils
from lldbsuite.test.decorators import *
from lldbsuite.test.lldbtest import *
from lldbsuite.test import lldbutil
class TestGdbRemoteKill(gdbremote_testcase.GdbRemoteTestCaseBase):
mydir = TestBase.compute_mydir(__file__)
@skipIfDarwinEmbedded # <rdar://problem/34539270> lldb-server tests not updated to work on ios etc yet
def attach_commandline_kill_after_initial_stop(self):
reg_expr = r"^\$[XW][0-9a-fA-F]+([^#]*)#[0-9A-Fa-f]{2}"
procs = self.prep_debug_monitor_and_inferior()
self.test_sequence.add_log_lines([
"read packet: $k#6b",
{"direction": "send", "regex": reg_expr},
], True)
if self.stub_sends_two_stop_notifications_on_kill:
# Add an expectation for a second X result for stubs that send two
# of these.
self.test_sequence.add_log_lines([
{"direction": "send", "regex": reg_expr},
], True)
self.expect_gdbremote_sequence()
# Wait a moment for completed and now-detached inferior process to
# clear.
time.sleep(1)
if not lldb.remote_platform:
# Process should be dead now. Reap results.
poll_result = procs["inferior"].poll()
self.assertIsNotNone(poll_result)
# Where possible, verify at the system level that the process is not
# running.
self.assertFalse(
lldbgdbserverutils.process_is_running(
procs["inferior"].pid, False))
@debugserver_test
def test_attach_commandline_kill_after_initial_stop_debugserver(self):
self.init_debugserver_test()
self.build()
self.set_inferior_startup_attach()
self.attach_commandline_kill_after_initial_stop()
@llgs_test
def test_attach_commandline_kill_after_initial_stop_llgs(self):
self.init_llgs_test()
self.build()
self.set_inferior_startup_attach()
self.attach_commandline_kill_after_initial_stop()
| apache-2.0 |
zak-k/iris | lib/iris/tests/unit/analysis/test_COUNT.py | 12 | 2160 | # (C) British Crown Copyright 2013 - 2015, Met Office
#
# This file is part of Iris.
#
# Iris is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Iris is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Iris. If not, see <http://www.gnu.org/licenses/>.
"""Unit tests for the :data:`iris.analysis.COUNT` aggregator."""
from __future__ import (absolute_import, division, print_function)
from six.moves import (filter, input, map, range, zip) # noqa
# Import iris.tests first so that some things can be initialised before
# importing anything else.
import iris.tests as tests
import numpy.ma as ma
from iris.analysis import COUNT
import iris.cube
from iris.coords import DimCoord
class Test_units_func(tests.IrisTest):
def test(self):
self.assertIsNotNone(COUNT.units_func)
new_units = COUNT.units_func(None)
self.assertEqual(new_units, 1)
class Test_masked(tests.IrisTest):
def setUp(self):
self.cube = iris.cube.Cube(ma.masked_equal([1, 2, 3, 4, 5], 3))
self.cube.add_dim_coord(DimCoord([6, 7, 8, 9, 10], long_name='foo'), 0)
self.func = lambda x: x >= 3
def test_ma(self):
cube = self.cube.collapsed("foo", COUNT, function=self.func)
self.assertArrayEqual(cube.data, [2])
class Test_name(tests.IrisTest):
def test(self):
self.assertEqual(COUNT.name(), 'count')
class Test_aggregate_shape(tests.IrisTest):
def test(self):
shape = ()
kwargs = dict()
self.assertTupleEqual(COUNT.aggregate_shape(**kwargs), shape)
kwargs = dict(wibble='wobble')
self.assertTupleEqual(COUNT.aggregate_shape(**kwargs), shape)
if __name__ == "__main__":
tests.main()
| gpl-3.0 |
ales-erjavec/orange | Orange/utils/collections.py | 6 | 4276 | import bisect
import array
from itertools import izip
class TypedDict(object):
""" An space efficient dictionary like object with typed keys and
values and O(log(n)) item lookup.
Example ::
>>> d = TypedDict({1:'a', 2:'b', 3:'c'}, keytype="i", valuetype="c")
"""
__slots__ = ["keytype", "valuetype", "_key_array", "_value_array"]
def __init__(self, mapping=None, keytype="I", valuetype="B", _key_array=None, _value_array=None):
"""
:param mapping: If given initialize the TypedDict object from this
dict like object
:param keytype: A string type code for keys (see `array` module for
details)
:param valuetype: A string type code for values (see `array` module
for details)
"""
self.keytype = keytype
self.valuetype = valuetype
if _key_array is not None and _value_array is not None:
assert(len(_key_array) == len(_value_array))
self._key_array = _key_array
self._value_array = _value_array
elif mapping:
items = []
for key in mapping:
if isinstance(key, tuple) and len(key) == 2:
items.append(key)
else:
items.append((key, mapping[key]))
items.sort()
self._key_array = array.array(self.keytype, [i[0] for i in items])
self._value_array = array.array(self.valuetype, [i[1] for i in items])
else:
self._key_array = array.array(keytype)
self._value_array = array.array(valuetype)
def __getitem__(self, key):
i = bisect.bisect_left(self._key_array, key)
if i == len(self._key_array):
raise KeyError(key)
elif self._key_array[i] == key:
return self._value_array[i]
else:
raise KeyError(key)
def __setitem__(self, key, value):
i = bisect.bisect_left(self._key_array, key)
if i == len(self._key_array):
self._key_array.insert(i, key)
self._value_array.insert(i, value)
elif self._key_array[i] == key:
self._value_array[i] = value
else:
self._key_array.insert(i, key)
self._value_array.insert(i, value)
def keys(self):
return self._key_array.tolist()
def values(self):
return self._value_array.tolist()
def items(self):
return zip(self.iterkeys(), self.itervalues())
def iterkeys(self):
return iter(self._key_array)
def itervalues(self):
return iter(self._value_array)
def iteritems(self):
return izip(self.iterkeys(), self.itervalues())
def get(self, key, default=None):
i = bisect.bisect_left(self._key_array, key)
if i == len(self._key_array):
return default
elif self._key_array[i] == key:
return self._value_array[i]
else:
return default
def has_key(self, key):
return self.__contains__(key)
def update(self, mapping):
raise NotImplementedError
def __len__(self):
return len(self._key_array)
def __iter__(self):
return self.iterkeys()
def __contains__(self, key):
i = bisect.bisect_left(self._key_array, key)
if i == len(self._key_array) or self._key_array[i] != key:
return False
else:
return True
def __delitem__(self, key):
raise NotImplementedError
def clear(self):
raise NotImplementedError
def todict(self):
""" Return a regular dict object initialized from this TypedDict.
"""
return dict(self.iteritems())
def __repr__(self):
return "TypedDict({0!r})".format(self.todict())
def __reduce_ex__(self, protocol):
return TypedDict, (), self.__getstate__()
def __getstate__(self):
return [getattr(self, slot) for slot in self.__slots__]
def __setstate__(self, state):
for slot, value in zip(self.__slots__, state):
setattr(self, slot, value)
| gpl-3.0 |
petebachant/pyqtgraph | pyqtgraph/canvas/CanvasTemplate_pyqt.py | 35 | 5126 | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'acq4/pyqtgraph/canvas/CanvasTemplate.ui'
#
# Created: Thu Jan 2 11:13:07 2014
# by: PyQt4 UI code generator 4.9
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName(_fromUtf8("Form"))
Form.resize(490, 414)
self.gridLayout = QtGui.QGridLayout(Form)
self.gridLayout.setMargin(0)
self.gridLayout.setSpacing(0)
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.splitter = QtGui.QSplitter(Form)
self.splitter.setOrientation(QtCore.Qt.Horizontal)
self.splitter.setObjectName(_fromUtf8("splitter"))
self.view = GraphicsView(self.splitter)
self.view.setObjectName(_fromUtf8("view"))
self.layoutWidget = QtGui.QWidget(self.splitter)
self.layoutWidget.setObjectName(_fromUtf8("layoutWidget"))
self.gridLayout_2 = QtGui.QGridLayout(self.layoutWidget)
self.gridLayout_2.setMargin(0)
self.gridLayout_2.setObjectName(_fromUtf8("gridLayout_2"))
self.autoRangeBtn = QtGui.QPushButton(self.layoutWidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(1)
sizePolicy.setHeightForWidth(self.autoRangeBtn.sizePolicy().hasHeightForWidth())
self.autoRangeBtn.setSizePolicy(sizePolicy)
self.autoRangeBtn.setObjectName(_fromUtf8("autoRangeBtn"))
self.gridLayout_2.addWidget(self.autoRangeBtn, 2, 0, 1, 2)
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setSpacing(0)
self.horizontalLayout.setObjectName(_fromUtf8("horizontalLayout"))
self.redirectCheck = QtGui.QCheckBox(self.layoutWidget)
self.redirectCheck.setObjectName(_fromUtf8("redirectCheck"))
self.horizontalLayout.addWidget(self.redirectCheck)
self.redirectCombo = CanvasCombo(self.layoutWidget)
self.redirectCombo.setObjectName(_fromUtf8("redirectCombo"))
self.horizontalLayout.addWidget(self.redirectCombo)
self.gridLayout_2.addLayout(self.horizontalLayout, 5, 0, 1, 2)
self.itemList = TreeWidget(self.layoutWidget)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(100)
sizePolicy.setHeightForWidth(self.itemList.sizePolicy().hasHeightForWidth())
self.itemList.setSizePolicy(sizePolicy)
self.itemList.setHeaderHidden(True)
self.itemList.setObjectName(_fromUtf8("itemList"))
self.itemList.headerItem().setText(0, _fromUtf8("1"))
self.gridLayout_2.addWidget(self.itemList, 6, 0, 1, 2)
self.ctrlLayout = QtGui.QGridLayout()
self.ctrlLayout.setSpacing(0)
self.ctrlLayout.setObjectName(_fromUtf8("ctrlLayout"))
self.gridLayout_2.addLayout(self.ctrlLayout, 10, 0, 1, 2)
self.resetTransformsBtn = QtGui.QPushButton(self.layoutWidget)
self.resetTransformsBtn.setObjectName(_fromUtf8("resetTransformsBtn"))
self.gridLayout_2.addWidget(self.resetTransformsBtn, 7, 0, 1, 1)
self.mirrorSelectionBtn = QtGui.QPushButton(self.layoutWidget)
self.mirrorSelectionBtn.setObjectName(_fromUtf8("mirrorSelectionBtn"))
self.gridLayout_2.addWidget(self.mirrorSelectionBtn, 3, 0, 1, 1)
self.reflectSelectionBtn = QtGui.QPushButton(self.layoutWidget)
self.reflectSelectionBtn.setObjectName(_fromUtf8("reflectSelectionBtn"))
self.gridLayout_2.addWidget(self.reflectSelectionBtn, 3, 1, 1, 1)
self.gridLayout.addWidget(self.splitter, 0, 0, 1, 1)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
Form.setWindowTitle(QtGui.QApplication.translate("Form", "Form", None, QtGui.QApplication.UnicodeUTF8))
self.autoRangeBtn.setText(QtGui.QApplication.translate("Form", "Auto Range", None, QtGui.QApplication.UnicodeUTF8))
self.redirectCheck.setToolTip(QtGui.QApplication.translate("Form", "Check to display all local items in a remote canvas.", None, QtGui.QApplication.UnicodeUTF8))
self.redirectCheck.setText(QtGui.QApplication.translate("Form", "Redirect", None, QtGui.QApplication.UnicodeUTF8))
self.resetTransformsBtn.setText(QtGui.QApplication.translate("Form", "Reset Transforms", None, QtGui.QApplication.UnicodeUTF8))
self.mirrorSelectionBtn.setText(QtGui.QApplication.translate("Form", "Mirror Selection", None, QtGui.QApplication.UnicodeUTF8))
self.reflectSelectionBtn.setText(QtGui.QApplication.translate("Form", "MirrorXY", None, QtGui.QApplication.UnicodeUTF8))
from ..widgets.TreeWidget import TreeWidget
from CanvasManager import CanvasCombo
from ..widgets.GraphicsView import GraphicsView
| mit |
zhengzhihust/tablib | tablib/packages/openpyxl3/drawing.py | 55 | 10472 | '''
Copyright (c) 2010 openpyxl
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
@license: http://www.opensource.org/licenses/mit-license.php
@author: Eric Gazoni
'''
import math
from .style import Color
from .shared.units import pixels_to_EMU, EMU_to_pixels, short_color
class Shadow(object):
SHADOW_BOTTOM = 'b'
SHADOW_BOTTOM_LEFT = 'bl'
SHADOW_BOTTOM_RIGHT = 'br'
SHADOW_CENTER = 'ctr'
SHADOW_LEFT = 'l'
SHADOW_TOP = 't'
SHADOW_TOP_LEFT = 'tl'
SHADOW_TOP_RIGHT = 'tr'
def __init__(self):
self.visible = False
self.blurRadius = 6
self.distance = 2
self.direction = 0
self.alignment = self.SHADOW_BOTTOM_RIGHT
self.color = Color(Color.BLACK)
self.alpha = 50
class Drawing(object):
""" a drawing object - eg container for shapes or charts
we assume user specifies dimensions in pixels; units are
converted to EMU in the drawing part
"""
count = 0
def __init__(self):
self.name = ''
self.description = ''
self.coordinates = ((1,2), (16,8))
self.left = 0
self.top = 0
self._width = EMU_to_pixels(200000)
self._height = EMU_to_pixels(1828800)
self.resize_proportional = False
self.rotation = 0
# self.shadow = Shadow()
def _set_width(self, w):
if self.resize_proportional and w:
ratio = self._height / self._width
self._height = round(ratio * w)
self._width = w
def _get_width(self):
return self._width
width = property(_get_width, _set_width)
def _set_height(self, h):
if self.resize_proportional and h:
ratio = self._width / self._height
self._width = round(ratio * h)
self._height = h
def _get_height(self):
return self._height
height = property(_get_height, _set_height)
def set_dimension(self, w=0, h=0):
xratio = w / self._width
yratio = h / self._height
if self.resize_proportional and w and h:
if (xratio * self._height) < h:
self._height = math.ceil(xratio * self._height)
self._width = width
else:
self._width = math.ceil(yratio * self._width)
self._height = height
def get_emu_dimensions(self):
""" return (x, y, w, h) in EMU """
return (pixels_to_EMU(self.left), pixels_to_EMU(self.top),
pixels_to_EMU(self._width), pixels_to_EMU(self._height))
class Shape(object):
""" a drawing inside a chart
coordiantes are specified by the user in the axis units
"""
MARGIN_LEFT = 6 + 13 + 1
MARGIN_BOTTOM = 17 + 11
FONT_WIDTH = 7
FONT_HEIGHT = 8
ROUND_RECT = 'roundRect'
RECT = 'rect'
# other shapes to define :
'''
"line"
"lineInv"
"triangle"
"rtTriangle"
"diamond"
"parallelogram"
"trapezoid"
"nonIsoscelesTrapezoid"
"pentagon"
"hexagon"
"heptagon"
"octagon"
"decagon"
"dodecagon"
"star4"
"star5"
"star6"
"star7"
"star8"
"star10"
"star12"
"star16"
"star24"
"star32"
"roundRect"
"round1Rect"
"round2SameRect"
"round2DiagRect"
"snipRoundRect"
"snip1Rect"
"snip2SameRect"
"snip2DiagRect"
"plaque"
"ellipse"
"teardrop"
"homePlate"
"chevron"
"pieWedge"
"pie"
"blockArc"
"donut"
"noSmoking"
"rightArrow"
"leftArrow"
"upArrow"
"downArrow"
"stripedRightArrow"
"notchedRightArrow"
"bentUpArrow"
"leftRightArrow"
"upDownArrow"
"leftUpArrow"
"leftRightUpArrow"
"quadArrow"
"leftArrowCallout"
"rightArrowCallout"
"upArrowCallout"
"downArrowCallout"
"leftRightArrowCallout"
"upDownArrowCallout"
"quadArrowCallout"
"bentArrow"
"uturnArrow"
"circularArrow"
"leftCircularArrow"
"leftRightCircularArrow"
"curvedRightArrow"
"curvedLeftArrow"
"curvedUpArrow"
"curvedDownArrow"
"swooshArrow"
"cube"
"can"
"lightningBolt"
"heart"
"sun"
"moon"
"smileyFace"
"irregularSeal1"
"irregularSeal2"
"foldedCorner"
"bevel"
"frame"
"halfFrame"
"corner"
"diagStripe"
"chord"
"arc"
"leftBracket"
"rightBracket"
"leftBrace"
"rightBrace"
"bracketPair"
"bracePair"
"straightConnector1"
"bentConnector2"
"bentConnector3"
"bentConnector4"
"bentConnector5"
"curvedConnector2"
"curvedConnector3"
"curvedConnector4"
"curvedConnector5"
"callout1"
"callout2"
"callout3"
"accentCallout1"
"accentCallout2"
"accentCallout3"
"borderCallout1"
"borderCallout2"
"borderCallout3"
"accentBorderCallout1"
"accentBorderCallout2"
"accentBorderCallout3"
"wedgeRectCallout"
"wedgeRoundRectCallout"
"wedgeEllipseCallout"
"cloudCallout"
"cloud"
"ribbon"
"ribbon2"
"ellipseRibbon"
"ellipseRibbon2"
"leftRightRibbon"
"verticalScroll"
"horizontalScroll"
"wave"
"doubleWave"
"plus"
"flowChartProcess"
"flowChartDecision"
"flowChartInputOutput"
"flowChartPredefinedProcess"
"flowChartInternalStorage"
"flowChartDocument"
"flowChartMultidocument"
"flowChartTerminator"
"flowChartPreparation"
"flowChartManualInput"
"flowChartManualOperation"
"flowChartConnector"
"flowChartPunchedCard"
"flowChartPunchedTape"
"flowChartSummingJunction"
"flowChartOr"
"flowChartCollate"
"flowChartSort"
"flowChartExtract"
"flowChartMerge"
"flowChartOfflineStorage"
"flowChartOnlineStorage"
"flowChartMagneticTape"
"flowChartMagneticDisk"
"flowChartMagneticDrum"
"flowChartDisplay"
"flowChartDelay"
"flowChartAlternateProcess"
"flowChartOffpageConnector"
"actionButtonBlank"
"actionButtonHome"
"actionButtonHelp"
"actionButtonInformation"
"actionButtonForwardNext"
"actionButtonBackPrevious"
"actionButtonEnd"
"actionButtonBeginning"
"actionButtonReturn"
"actionButtonDocument"
"actionButtonSound"
"actionButtonMovie"
"gear6"
"gear9"
"funnel"
"mathPlus"
"mathMinus"
"mathMultiply"
"mathDivide"
"mathEqual"
"mathNotEqual"
"cornerTabs"
"squareTabs"
"plaqueTabs"
"chartX"
"chartStar"
"chartPlus"
'''
def __init__(self, coordinates=((0,0), (1,1)), text=None, scheme="accent1"):
self.coordinates = coordinates # in axis unit
self.text = text
self.scheme = scheme
self.style = Shape.RECT
self._border_width = 3175 # in EMU
self._border_color = Color.BLACK[2:] #"F3B3C5"
self._color = Color.WHITE[2:]
self._text_color = Color.BLACK[2:]
def _get_border_color(self):
return self._border_color
def _set_border_color(self, color):
self._border_color = short_color(color)
border_color = property(_get_border_color, _set_border_color)
def _get_color(self):
return self._color
def _set_color(self, color):
self._color = short_color(color)
color = property(_get_color, _set_color)
def _get_text_color(self):
return self._text_color
def _set_text_color(self, color):
self._text_color = short_color(color)
text_color = property(_get_text_color, _set_text_color)
def _get_border_width(self):
return EMU_to_pixels(self._border_width)
def _set_border_width(self, w):
self._border_width = pixels_to_EMU(w)
print(self._border_width)
border_width = property(_get_border_width, _set_border_width)
def get_coordinates(self):
""" return shape coordinates in percentages (left, top, right, bottom) """
(x1, y1), (x2, y2) = self.coordinates
drawing_width = pixels_to_EMU(self._chart.drawing.width)
drawing_height = pixels_to_EMU(self._chart.drawing.height)
plot_width = drawing_width * self._chart.width
plot_height = drawing_height * self._chart.height
margin_left = self._chart._get_margin_left() * drawing_width
xunit = plot_width / self._chart.get_x_units()
margin_top = self._chart._get_margin_top() * drawing_height
yunit = self._chart.get_y_units()
x_start = (margin_left + (float(x1) * xunit)) / drawing_width
y_start = (margin_top + plot_height - (float(y1) * yunit)) / drawing_height
x_end = (margin_left + (float(x2) * xunit)) / drawing_width
y_end = (margin_top + plot_height - (float(y2) * yunit)) / drawing_height
def _norm_pct(pct):
""" force shapes to appear by truncating too large sizes """
if pct>1: pct = 1
elif pct<0: pct = 0
return pct
# allow user to specify y's in whatever order
# excel expect y_end to be lower
if y_end < y_start:
y_end, y_start = y_start, y_end
return (_norm_pct(x_start), _norm_pct(y_start),
_norm_pct(x_end), _norm_pct(y_end))
| mit |
inares/edx-platform | common/lib/xmodule/xmodule/modulestore/tests/sample_courses.py | 100 | 9544 | # encoding: utf-8
"""
The data type and use of it for declaratively creating test courses.
"""
# used to create course subtrees in ModuleStoreTestCase.create_test_course
# adds to self properties w/ the given block_id which hold the UsageKey for easy retrieval.
# fields is a dictionary of keys and values. sub_tree is a collection of BlockInfo
from collections import namedtuple
import datetime
BlockInfo = namedtuple('BlockInfo', 'block_id, category, fields, sub_tree') # pylint: disable=invalid-name
default_block_info_tree = [ # pylint: disable=invalid-name
BlockInfo(
'chapter_x', 'chapter', {}, [
BlockInfo(
'sequential_x1', 'sequential', {}, [
BlockInfo(
'vertical_x1a', 'vertical', {}, [
BlockInfo('problem_x1a_1', 'problem', {}, []),
BlockInfo('problem_x1a_2', 'problem', {}, []),
BlockInfo('problem_x1a_3', 'problem', {}, []),
BlockInfo('html_x1a_1', 'html', {}, []),
]
)
]
)
]
),
BlockInfo(
'chapter_y', 'chapter', {}, [
BlockInfo(
'sequential_y1', 'sequential', {}, [
BlockInfo(
'vertical_y1a', 'vertical', {}, [
BlockInfo('problem_y1a_1', 'problem', {}, []),
BlockInfo('problem_y1a_2', 'problem', {}, []),
BlockInfo('problem_y1a_3', 'problem', {}, []),
]
)
]
)
]
)
]
# equivalent to toy course in xml
TOY_BLOCK_INFO_TREE = [
BlockInfo(
'Overview', "chapter", {"display_name": "Overview"}, [
BlockInfo(
"Toy_Videos", "videosequence", {
"xml_attributes": {"filename": ["", None]}, "display_name": "Toy Videos", "format": "Lecture Sequence"
}, [
BlockInfo(
"secret:toylab", "html", {
"data": "<b>Lab 2A: Superposition Experiment</b>\n\n\n<p>Isn't the toy course great?</p>\n\n<p>Let's add some markup that uses non-ascii characters.\n'For example, we should be able to write words like encyclopædia, or foreign words like français.\nLooking beyond latin-1, we should handle math symbols: πr² ≤ ∞.\nAnd it shouldn't matter if we use entities or numeric codes — Ω ≠ π ≡ Ω ≠ π.\n</p>\n\n", # pylint: disable=line-too-long
"xml_attributes": {"filename": ["html/secret/toylab.xml", "html/secret/toylab.xml"]},
"display_name": "Toy lab"
}, []
),
BlockInfo(
"toyjumpto", "html", {
"data": "<a href=\"/jump_to_id/vertical_test\">This is a link to another page and some Chinese 四節比分和七年前</a> <p>Some more Chinese 四節比分和七年前</p>\n",
"xml_attributes": {"filename": ["html/toyjumpto.xml", "html/toyjumpto.xml"]}
}, []),
BlockInfo(
"toyhtml", "html", {
"data": "<a href='/static/handouts/sample_handout.txt'>Sample</a>",
"xml_attributes": {"filename": ["html/toyhtml.xml", "html/toyhtml.xml"]}
}, []),
BlockInfo(
"nonportable", "html", {
"data": "<a href=\"/static/foo.jpg\">link</a>\n",
"xml_attributes": {"filename": ["html/nonportable.xml", "html/nonportable.xml"]}
}, []),
BlockInfo(
"nonportable_link", "html", {
"data": "<a href=\"/jump_to_id/nonportable_link\">link</a>\n\n",
"xml_attributes": {"filename": ["html/nonportable_link.xml", "html/nonportable_link.xml"]}
}, []),
BlockInfo(
"badlink", "html", {
"data": "<img src=\"/static//file.jpg\" />\n",
"xml_attributes": {"filename": ["html/badlink.xml", "html/badlink.xml"]}
}, []),
BlockInfo(
"with_styling", "html", {
"data": "<p style=\"font:italic bold 72px/30px Georgia, serif; color: red; \">Red text here</p>",
"xml_attributes": {"filename": ["html/with_styling.xml", "html/with_styling.xml"]}
}, []),
BlockInfo(
"just_img", "html", {
"data": "<img src=\"/static/foo_bar.jpg\" />",
"xml_attributes": {"filename": ["html/just_img.xml", "html/just_img.xml"]}
}, []),
BlockInfo(
"Video_Resources", "video", {
"youtube_id_1_0": "1bK-WdDi6Qw", "display_name": "Video Resources"
}, []),
]),
BlockInfo(
"Welcome", "video", {"data": "", "youtube_id_1_0": "p2Q6BrNhdh8", "display_name": "Welcome"}, []
),
BlockInfo(
"video_123456789012", "video", {"data": "", "youtube_id_1_0": "p2Q6BrNhdh8", "display_name": "Test Video"}, []
),
BlockInfo(
"video_4f66f493ac8f", "video", {"youtube_id_1_0": "p2Q6BrNhdh8"}, []
)
]
),
BlockInfo(
"secret:magic", "chapter", {
"xml_attributes": {"filename": ["chapter/secret/magic.xml", "chapter/secret/magic.xml"]}
}, [
BlockInfo(
"toyvideo", "video", {"youtube_id_1_0": "OEoXaMPEzfMA", "display_name": "toyvideo"}, []
)
]
),
BlockInfo(
"poll_test", "chapter", {}, [
BlockInfo(
"T1_changemind_poll_foo", "poll_question", {
"question": "<p>Have you changed your mind? ’</p>",
"answers": [{"text": "Yes", "id": "yes"}, {"text": "No", "id": "no"}],
"xml_attributes": {"reset": "false", "filename": ["", None]},
"display_name": "Change your answer"
}, [])]
),
BlockInfo(
"vertical_container", "chapter", {
"xml_attributes": {"filename": ["chapter/vertical_container.xml", "chapter/vertical_container.xml"]}
}, [
BlockInfo("vertical_sequential", "sequential", {}, [
BlockInfo("vertical_test", "vertical", {
"xml_attributes": {"filename": ["vertical/vertical_test.xml", "vertical_test"]}
}, [
BlockInfo(
"sample_video", "video", {
"youtube_id_1_25": "AKqURZnYqpk",
"youtube_id_0_75": "JMD_ifUUfsU",
"youtube_id_1_0": "OEoXaMPEzfM",
"display_name": "default",
"youtube_id_1_5": "DYpADpL7jAY"
}, []),
BlockInfo(
"separate_file_video", "video", {
"youtube_id_1_25": "AKqURZnYqpk",
"youtube_id_0_75": "JMD_ifUUfsU",
"youtube_id_1_0": "OEoXaMPEzfM",
"display_name": "default",
"youtube_id_1_5": "DYpADpL7jAY"
}, []),
BlockInfo(
"video_with_end_time", "video", {
"youtube_id_1_25": "AKqURZnYqpk",
"display_name": "default",
"youtube_id_1_0": "OEoXaMPEzfM",
"end_time": datetime.timedelta(seconds=10),
"youtube_id_1_5": "DYpADpL7jAY",
"youtube_id_0_75": "JMD_ifUUfsU"
}, []),
BlockInfo(
"T1_changemind_poll_foo_2", "poll_question", {
"question": "<p>Have you changed your mind?</p>",
"answers": [{"text": "Yes", "id": "yes"}, {"text": "No", "id": "no"}],
"xml_attributes": {"reset": "false", "filename": ["", None]},
"display_name": "Change your answer"
}, []),
]),
BlockInfo("unicode", "html", {
"data": "…", "xml_attributes": {"filename": ["", None]}
}, [])
]),
]
),
BlockInfo(
"handout_container", "chapter", {
"xml_attributes": {"filename": ["chapter/handout_container.xml", "chapter/handout_container.xml"]}
}, [
BlockInfo(
"html_7e5578f25f79", "html", {
"data": "<a href=\"/static/handouts/sample_handout.txt\"> handouts</a>",
"xml_attributes": {"filename": ["", None]}
}, []
),
]
)
]
| agpl-3.0 |
jhoenicke/python-trezor | trezorlib/tests/device_tests/test_msg_tezos_sign_tx.py | 3 | 8381 | # This file is part of the Trezor project.
#
# Copyright (C) 2012-2018 SatoshiLabs and contributors
#
# This library is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License version 3
# as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the License along with this library.
# If not, see <https://www.gnu.org/licenses/lgpl-3.0.html>.
import pytest
from trezorlib import messages, tezos
from trezorlib.protobuf import dict_to_proto
from trezorlib.tools import parse_path
from .common import TrezorTest
TEZOS_PATH = parse_path("m/44'/1729'/0'")
@pytest.mark.tezos
@pytest.mark.skip_t1
class TestMsgTezosSignTx(TrezorTest):
def test_tezos_sign_tx_transaction(self):
self.setup_mnemonic_allallall()
resp = tezos.sign_tx(
self.client,
TEZOS_PATH,
dict_to_proto(
messages.TezosSignTx,
{
"branch": "f2ae0c72fdd41d7a89bebfe8d6dd6d38e0fcd0782adb8194717176eb70366f64",
"transaction": {
"source": {
"tag": 0,
"hash": "00001e65c88ae6317cd62a638c8abd1e71c83c8475",
},
"fee": 0,
"counter": 108925,
"gas_limit": 200,
"storage_limit": 0,
"amount": 10000,
"destination": {
"tag": 0,
"hash": "0004115bce5af2f977acbb900f449c14c53e1d89cf",
},
},
},
),
)
assert (
resp.signature
== "edsigtfmAbUJtZMAJRGMppvDzPtiWBBQiZKf7G15dV9tgkHQefwiV4JeSw5Rj57ZK54FHEthpyzCpfGvAjU8YqhHxMwZP9Z2Jmt"
)
assert (
resp.sig_op_contents.hex()
== "f2ae0c72fdd41d7a89bebfe8d6dd6d38e0fcd0782adb8194717176eb70366f64080000001e65c88ae6317cd62a638c8abd1e71c83c847500fdd206c80100904e000004115bce5af2f977acbb900f449c14c53e1d89cf003cce7e6dfe3f79a8bd39f77d738fd79140da1a9e762b7d156eca2cf945aae978436cf68c1ec11889e4f2cf074c9642e05b3d65cc2896809af1fbdab0b126f90c"
)
assert (
resp.operation_hash == "opNeGBdgbM5jN2ykz4o8NdsCuJfqNZ6WBEFVbBUmYH8gp45CJvH"
)
def test_tezos_sign_reveal_transaction(self):
self.setup_mnemonic_allallall()
resp = tezos.sign_tx(
self.client,
TEZOS_PATH,
dict_to_proto(
messages.TezosSignTx,
{
"branch": "03cbce9a5ea1fae2566f7f244a01edc5869f5ada9d0bf21c1098017c59be98e0",
"reveal": {
"source": {
"tag": 0,
"hash": "00001e65c88ae6317cd62a638c8abd1e71c83c8475",
},
"fee": 0,
"counter": 108923,
"gas_limit": 200,
"storage_limit": 0,
"public_key": "00200da2c0200927dd8168b2b62e1322637521fcefb3184e61c1c3123c7c00bb95",
},
"transaction": {
"source": {
"tag": 0,
"hash": "00001e65c88ae6317cd62a638c8abd1e71c83c8475",
},
"fee": 0,
"counter": 108924,
"gas_limit": 200,
"storage_limit": 0,
"amount": 10000,
"destination": {
"tag": 0,
"hash": "0004115bce5af2f977acbb900f449c14c53e1d89cf",
},
},
},
),
)
assert (
resp.signature
== "edsigtheQQ78dZM9Sir78T3TNdfnyHrbFw8w3hiGMaLD5mPbGrUiD1jvy5fpsNJW9T5o7qrWBe7y7bai6vZ5KhwJ5HKZ8UnoCbh"
)
assert (
resp.sig_op_contents.hex()
== "03cbce9a5ea1fae2566f7f244a01edc5869f5ada9d0bf21c1098017c59be98e0070000001e65c88ae6317cd62a638c8abd1e71c83c847500fbd206c8010000200da2c0200927dd8168b2b62e1322637521fcefb3184e61c1c3123c7c00bb95080000001e65c88ae6317cd62a638c8abd1e71c83c847500fcd206c80100904e000004115bce5af2f977acbb900f449c14c53e1d89cf004b33e241c90b828c31cf44a28c123aee3f161049c3cb4c42ec71dd96fbbf8dae9963bdadb33f51d7c6f11ff0e74f0baad742352d980a1899f69c3c65c70fe40f"
)
assert (
resp.operation_hash == "opQHu93L8juNm2VjmsMKioFowWNyMvGzopcuoVcuzFV1bJMhJef"
)
def test_tezos_sign_tx_origination(self):
self.setup_mnemonic_allallall()
resp = tezos.sign_tx(
self.client,
TEZOS_PATH,
dict_to_proto(
messages.TezosSignTx,
{
"branch": "5e556181029c4ce5e54c9ffcbba2fc0966ed4d880ddeb0849bf6387438a7a877",
"origination": {
"source": {
"tag": 0,
"hash": "00001e65c88ae6317cd62a638c8abd1e71c83c8475",
},
"fee": 0,
"counter": 108929,
"gas_limit": 10000,
"storage_limit": 100,
"manager_pubkey": "00001e65c88ae6317cd62a638c8abd1e71c83c8475",
"balance": 2000000,
"spendable": True,
"delegatable": True,
"delegate": "0049a35041e4be130977d51419208ca1d487cfb2e7",
},
},
),
)
assert (
resp.signature
== "edsigu46YtcVthQQQ2FTcuayNwTcYY1Mpo6BmwCu83qGovi4kHM9CL5h4NaV4NQw8RTEP1VgraR6Kiv5J6RQsDLMzG17V6fcYwp"
)
assert (
resp.sig_op_contents.hex()
== "5e556181029c4ce5e54c9ffcbba2fc0966ed4d880ddeb0849bf6387438a7a877090000001e65c88ae6317cd62a638c8abd1e71c83c84750081d306904e6400001e65c88ae6317cd62a638c8abd1e71c83c847580897affffff0049a35041e4be130977d51419208ca1d487cfb2e700e785342fd2258277741f93c17c5022ea1be059f47f3e343600e83c50ca191e8318da9e5ec237be9657d0fc6aba654f476c945430239a3c6dfeca21e06be98706"
)
assert (
resp.operation_hash == "onuKkBtP4K2JMGg7YMv7qs869B8aHCEUQecvuiL71aKkY8iPCb6"
)
def test_tezos_sign_tx_delegation(self):
self.setup_mnemonic_allallall()
resp = tezos.sign_tx(
self.client,
TEZOS_PATH,
dict_to_proto(
messages.TezosSignTx,
{
"branch": "9b8b8bc45d611a3ada20ad0f4b6f0bfd72ab395cc52213a57b14d1fb75b37fd0",
"delegation": {
"source": {
"tag": 0,
"hash": "00001e65c88ae6317cd62a638c8abd1e71c83c8475",
},
"fee": 0,
"counter": 108927,
"gas_limit": 200,
"storage_limit": 0,
"delegate": "0049a35041e4be130977d51419208ca1d487cfb2e7",
},
},
),
)
assert (
resp.signature
== "edsigu3qGseaB2MghcGQWNWUhPtWgM9rC62FTEVrYWGtzFTHShDxGGmLFfEpJyToRCeRqcgGm3pyXY3NdyATkjmFTtUvJKvb3rX"
)
assert (
resp.sig_op_contents.hex()
== "9b8b8bc45d611a3ada20ad0f4b6f0bfd72ab395cc52213a57b14d1fb75b37fd00a0000001e65c88ae6317cd62a638c8abd1e71c83c847500ffd206c80100ff0049a35041e4be130977d51419208ca1d487cfb2e7e581d41daf8cab833d5b99151a0303fd04472eb990f7338d7be57afe21c26e779ff4341511694aebd901a0d74d183bbcb726a9be4b873d3b47298f99f2b7e80c"
)
assert (
resp.operation_hash == "oocgc3hyKsGHPsw6WFWJpWT8jBwQLtebQAXF27KNisThkzoj635"
)
| lgpl-3.0 |
bbc/kamaelia | Sketches/MH/MobileReframe/RangeFilter.py | 3 | 7339 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2010 British Broadcasting Corporation and Kamaelia Contributors(1)
#
# (1) Kamaelia Contributors are listed in the AUTHORS file and at
# http://www.kamaelia.org/AUTHORS - please extend this file,
# not this notice.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -------------------------------------------------------------------------
"""\
======================================
Filter items out that are not in range
======================================
RangeFilter passes through items received on its "inbox" inbox where item[0]
lies within one or more of a specfied set of ranges of value. Items that don't
match this are discarded.
Example Usage
-------------
Reading all video frames from a YUV4MPEG format video file, but only passing on
video frames 25-49 and 100-199 inclusive further along the pipeline::
Pipeline( RateControlledFileReader("myvideo.yuv4mpeg",readmode="bytes"),
YUV4MPEGToFrame(),
TagWithSequenceNumber(),
RangeFilter(ranges=[ (25,49), (100,199) ]),
...
).run()
Behaviour
---------
At initialisation, specify a list of value ranges that RangeFilter should allow.
The list should be of the form::
[ (low,high), (low,high), (low, high), ... ]
The ranges specified are inclusive.
Send an item to the "inbox" inbox of the form (value, ....). If the value
matches one or more of the ranges specified, then the whole item (including the
value) will immediately be sent on out of the "outbox" outbox.
RangeFilter can therefore be used to select slices through sequence numbered or
timestamped data.
If the size limited inbox is full, this component will pause until it is able
to send out the data,.
If a producerFinished message is received on the "control" inbox, this component
will complete parsing any data pending in its inbox, and finish sending any
resulting data to its outbox. It will then send the producerFinished message on
out of its "signal" outbox and terminate.
If a shutdownMicroprocess message is received on the "control" inbox, this
component will immediately send it on out of its "signal" outbox and immediately
terminate. It will not complete processing, or sending on any pending data.
"""
from Axon.Component import component
from Axon.Ipc import producerFinished, shutdownMicroprocess
class RangeFilter(component):
"""\
RangeFilter(ranges) -> new RangeFilter component.
Filters out items of the form (value, ...) not within at least one of a
specified value set of range. Items within range are passed through.
Keyword arguments::
- ranges -- list of (low,high) pairs representing ranges of value. Ranges are inclusive.
"""
Outboxes = { "outbox" : "items in range",
"signal" : "Shutdown signalling"
}
def __init__(self, ranges):
"""x.__init__(...) initializes x; see x.__class__.__doc__ for signature"""
super(RangeFilter,self).__init__()
self.ranges=ranges
def inRange(self,index):
"""\
Returns one of the ranges that the specified index falls within,
otherwise returns None.
"""
for (start,end) in self.ranges:
if index>=start and index<=end:
return (start,end)
return None
def main(self):
"""Main loop"""
self.shutdownMsg = None
try:
while 1:
while self.dataReady("inbox"):
item = self.recv("inbox")
index = item[0]
if self.inRange(index):
for _ in self.waitSend(item,"outbox"): yield _
if self.canStop():
raise "STOP"
self.pause()
yield 1
except "STOP":
self.send(self.shutdownMsg,"signal")
# shutdown handling
def handleControl(self):
"""\
Collects any new shutdown messages arriving at the "control" inbox, and
ensures self.shutdownMsg contains the highest priority one encountered
so far.
"""
while self.dataReady("control"):
msg = self.recv("control")
if isinstance(msg, producerFinished) and not isinstance(self.shutdownMsg, shutdownMicroprocess):
self.shutdownMsg = msg
elif isinstance(msg, shutdownMicroprocess):
self.shutdownMsg = msg
def canStop(self):
"""\
Checks for any shutdown messages arriving at the "control" inbox, and
returns true if the component should terminate when it has finished
processing any pending data.
"""
self.handleControl()
return isinstance(self.shutdownMsg, (producerFinished,shutdownMicroprocess))
def mustStop(self):
"""\
Checks for any shutdown messages arriving at the "control" inbox, and
returns true if the component should terminate immediately.
"""
self.handleControl()
return isinstance(self.shutdownMsg, shutdownMicroprocess)
# data sending
def waitSend(self,data,boxname):
"""\
Generator.
Sends data out of the "outbox" outbox. If the destination is full
(noSpaceInBox exception) then it waits until there is space. It keeps
retrying until it succeeds.
If the component is ordered to immediately terminate then "STOP" is
raised as an exception.
"""
while 1:
try:
self.send(data,boxname)
return
except noSpaceInBox:
if self.mustStop():
raise "STOP"
self.pause()
yield 1
if self.mustStop():
raise "STOP"
__kamaelia_components__ = ( RangeFilter, )
if __name__=="__main__":
from Kamaelia.Util.DataSource import DataSource
from Kamaelia.Util.Console import ConsoleEchoer
from Kamaelia.Chassis.Pipeline import Pipeline
print "Only items in ranges 1-5 and 8-12 should be output...\n\n"
data = [
(0, "shouldn't pass through"),
(1, "YES!"),
(2, "YES!"),
(5, "YES!"),
(6, "shouldn't pass through"),
(7, "shouldn't pass through"),
(8, "YES!"),
(11, "YES!"),
(12, "YES!"),
(13, "shouldn't pass through"),
(29, "shouldn't pass through"),
(3, "YES!"),
]
Pipeline( DataSource( data ),
RangeFilter( [ (1,5), (8,12) ] ),
ConsoleEchoer(),
).run()
print | apache-2.0 |
tangtang2013/zerorpc-python | tests/test_reqstream.py | 77 | 1992 | # -*- coding: utf-8 -*-
# Open Source Initiative OSI - The MIT License (MIT):Licensing
#
# The MIT License (MIT)
# Copyright (c) 2012 DotCloud Inc ([email protected])
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of
# this software and associated documentation files (the "Software"), to deal in
# the Software without restriction, including without limitation the rights to
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software is furnished to do
# so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import gevent
import zerorpc
from testutils import teardown, random_ipc_endpoint
def test_rcp_streaming():
endpoint = random_ipc_endpoint()
class MySrv(zerorpc.Server):
@zerorpc.rep
def range(self, max):
return range(max)
@zerorpc.stream
def xrange(self, max):
return xrange(max)
srv = MySrv(heartbeat=2)
srv.bind(endpoint)
gevent.spawn(srv.run)
client = zerorpc.Client(heartbeat=2)
client.connect(endpoint)
r = client.range(10)
assert list(r) == list(range(10))
r = client.xrange(10)
assert getattr(r, 'next', None) is not None
l = []
print 'wait 4s for fun'
gevent.sleep(4)
for x in r:
l.append(x)
assert l == range(10)
| mit |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.