code
stringlengths 2
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int32 2
1.05M
|
---|---|---|---|---|---|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
import itertools
import socket
import ssl
import time
import uuid
import eventlet
import greenlet
import kombu
import kombu.connection
import kombu.entity
import kombu.messaging
from oslo.config import cfg
from trove.openstack.common import excutils
from trove.openstack.common.gettextutils import _ # noqa
from trove.openstack.common import network_utils
from trove.openstack.common.rpc import amqp as rpc_amqp
from trove.openstack.common.rpc import common as rpc_common
from trove.openstack.common import sslutils
kombu_opts = [
cfg.StrOpt('kombu_ssl_version',
default='',
help='SSL version to use (valid only if SSL enabled). '
'valid values are TLSv1, SSLv23 and SSLv3. SSLv2 may '
'be available on some distributions'
),
cfg.StrOpt('kombu_ssl_keyfile',
default='',
help='SSL key file (valid only if SSL enabled)'),
cfg.StrOpt('kombu_ssl_certfile',
default='',
help='SSL cert file (valid only if SSL enabled)'),
cfg.StrOpt('kombu_ssl_ca_certs',
default='',
help=('SSL certification authority file '
'(valid only if SSL enabled)')),
cfg.StrOpt('rabbit_host',
default='localhost',
help='The RabbitMQ broker address where a single node is used'),
cfg.IntOpt('rabbit_port',
default=5672,
help='The RabbitMQ broker port where a single node is used'),
cfg.ListOpt('rabbit_hosts',
default=['$rabbit_host:$rabbit_port'],
help='RabbitMQ HA cluster host:port pairs'),
cfg.BoolOpt('rabbit_use_ssl',
default=False,
help='connect over SSL for RabbitMQ'),
cfg.StrOpt('rabbit_userid',
default='guest',
help='the RabbitMQ userid'),
cfg.StrOpt('rabbit_password',
default='guest',
help='the RabbitMQ password',
secret=True),
cfg.StrOpt('rabbit_virtual_host',
default='/',
help='the RabbitMQ virtual host'),
cfg.IntOpt('rabbit_retry_interval',
default=1,
help='how frequently to retry connecting with RabbitMQ'),
cfg.IntOpt('rabbit_retry_backoff',
default=2,
help='how long to backoff for between retries when connecting '
'to RabbitMQ'),
cfg.IntOpt('rabbit_max_retries',
default=0,
help='maximum retries with trying to connect to RabbitMQ '
'(the default of 0 implies an infinite retry count)'),
cfg.BoolOpt('rabbit_ha_queues',
default=False,
help='use H/A queues in RabbitMQ (x-ha-policy: all).'
'You need to wipe RabbitMQ database when '
'changing this option.'),
]
cfg.CONF.register_opts(kombu_opts)
LOG = rpc_common.LOG
def _get_queue_arguments(conf):
"""Construct the arguments for declaring a queue.
If the rabbit_ha_queues option is set, we declare a mirrored queue
as described here:
http://www.rabbitmq.com/ha.html
Setting x-ha-policy to all means that the queue will be mirrored
to all nodes in the cluster.
"""
return {'x-ha-policy': 'all'} if conf.rabbit_ha_queues else {}
class ConsumerBase(object):
"""Consumer base class."""
def __init__(self, channel, callback, tag, **kwargs):
"""Declare a queue on an amqp channel.
'channel' is the amqp channel to use
'callback' is the callback to call when messages are received
'tag' is a unique ID for the consumer on the channel
queue name, exchange name, and other kombu options are
passed in here as a dictionary.
"""
self.callback = callback
self.tag = str(tag)
self.kwargs = kwargs
self.queue = None
self.ack_on_error = kwargs.get('ack_on_error', True)
self.reconnect(channel)
def reconnect(self, channel):
"""Re-declare the queue after a rabbit reconnect."""
self.channel = channel
self.kwargs['channel'] = channel
self.queue = kombu.entity.Queue(**self.kwargs)
self.queue.declare()
def _callback_handler(self, message, callback):
"""Call callback with deserialized message.
Messages that are processed without exception are ack'ed.
If the message processing generates an exception, it will be
ack'ed if ack_on_error=True. Otherwise it will be .requeue()'ed.
"""
try:
msg = rpc_common.deserialize_msg(message.payload)
callback(msg)
except Exception:
if self.ack_on_error:
LOG.exception(_("Failed to process message"
" ... skipping it."))
message.ack()
else:
LOG.exception(_("Failed to process message"
" ... will requeue."))
message.requeue()
else:
message.ack()
def consume(self, *args, **kwargs):
"""Actually declare the consumer on the amqp channel. This will
start the flow of messages from the queue. Using the
Connection.iterconsume() iterator will process the messages,
calling the appropriate callback.
If a callback is specified in kwargs, use that. Otherwise,
use the callback passed during __init__()
If kwargs['nowait'] is True, then this call will block until
a message is read.
"""
options = {'consumer_tag': self.tag}
options['nowait'] = kwargs.get('nowait', False)
callback = kwargs.get('callback', self.callback)
if not callback:
raise ValueError("No callback defined")
def _callback(raw_message):
message = self.channel.message_to_python(raw_message)
self._callback_handler(message, callback)
self.queue.consume(*args, callback=_callback, **options)
def cancel(self):
"""Cancel the consuming from the queue, if it has started."""
try:
self.queue.cancel(self.tag)
except KeyError as e:
# NOTE(comstud): Kludge to get around a amqplib bug
if str(e) != "u'%s'" % self.tag:
raise
self.queue = None
class DirectConsumer(ConsumerBase):
"""Queue/consumer class for 'direct'."""
def __init__(self, conf, channel, msg_id, callback, tag, **kwargs):
"""Init a 'direct' queue.
'channel' is the amqp channel to use
'msg_id' is the msg_id to listen on
'callback' is the callback to call when messages are received
'tag' is a unique ID for the consumer on the channel
Other kombu options may be passed
"""
# Default options
options = {'durable': False,
'queue_arguments': _get_queue_arguments(conf),
'auto_delete': True,
'exclusive': False}
options.update(kwargs)
exchange = kombu.entity.Exchange(name=msg_id,
type='direct',
durable=options['durable'],
auto_delete=options['auto_delete'])
super(DirectConsumer, self).__init__(channel,
callback,
tag,
name=msg_id,
exchange=exchange,
routing_key=msg_id,
**options)
class TopicConsumer(ConsumerBase):
"""Consumer class for 'topic'."""
def __init__(self, conf, channel, topic, callback, tag, name=None,
exchange_name=None, **kwargs):
"""Init a 'topic' queue.
:param channel: the amqp channel to use
:param topic: the topic to listen on
:paramtype topic: str
:param callback: the callback to call when messages are received
:param tag: a unique ID for the consumer on the channel
:param name: optional queue name, defaults to topic
:paramtype name: str
Other kombu options may be passed as keyword arguments
"""
# Default options
options = {'durable': conf.amqp_durable_queues,
'queue_arguments': _get_queue_arguments(conf),
'auto_delete': conf.amqp_auto_delete,
'exclusive': False}
options.update(kwargs)
exchange_name = exchange_name or rpc_amqp.get_control_exchange(conf)
exchange = kombu.entity.Exchange(name=exchange_name,
type='topic',
durable=options['durable'],
auto_delete=options['auto_delete'])
super(TopicConsumer, self).__init__(channel,
callback,
tag,
name=name or topic,
exchange=exchange,
routing_key=topic,
**options)
class FanoutConsumer(ConsumerBase):
"""Consumer class for 'fanout'."""
def __init__(self, conf, channel, topic, callback, tag, **kwargs):
"""Init a 'fanout' queue.
'channel' is the amqp channel to use
'topic' is the topic to listen on
'callback' is the callback to call when messages are received
'tag' is a unique ID for the consumer on the channel
Other kombu options may be passed
"""
unique = uuid.uuid4().hex
exchange_name = '%s_fanout' % topic
queue_name = '%s_fanout_%s' % (topic, unique)
# Default options
options = {'durable': False,
'queue_arguments': _get_queue_arguments(conf),
'auto_delete': True,
'exclusive': False}
options.update(kwargs)
exchange = kombu.entity.Exchange(name=exchange_name, type='fanout',
durable=options['durable'],
auto_delete=options['auto_delete'])
super(FanoutConsumer, self).__init__(channel, callback, tag,
name=queue_name,
exchange=exchange,
routing_key=topic,
**options)
class Publisher(object):
"""Base Publisher class."""
def __init__(self, channel, exchange_name, routing_key, **kwargs):
"""Init the Publisher class with the exchange_name, routing_key,
and other options
"""
self.exchange_name = exchange_name
self.routing_key = routing_key
self.kwargs = kwargs
self.reconnect(channel)
def reconnect(self, channel):
"""Re-establish the Producer after a rabbit reconnection."""
self.exchange = kombu.entity.Exchange(name=self.exchange_name,
**self.kwargs)
self.producer = kombu.messaging.Producer(exchange=self.exchange,
channel=channel,
routing_key=self.routing_key)
def send(self, msg, timeout=None):
"""Send a message."""
if timeout:
#
# AMQP TTL is in milliseconds when set in the header.
#
self.producer.publish(msg, headers={'ttl': (timeout * 1000)})
else:
self.producer.publish(msg)
class DirectPublisher(Publisher):
"""Publisher class for 'direct'."""
def __init__(self, conf, channel, msg_id, **kwargs):
"""init a 'direct' publisher.
Kombu options may be passed as keyword args to override defaults
"""
options = {'durable': False,
'auto_delete': True,
'exclusive': False}
options.update(kwargs)
super(DirectPublisher, self).__init__(channel, msg_id, msg_id,
type='direct', **options)
class TopicPublisher(Publisher):
"""Publisher class for 'topic'."""
def __init__(self, conf, channel, topic, **kwargs):
"""init a 'topic' publisher.
Kombu options may be passed as keyword args to override defaults
"""
options = {'durable': conf.amqp_durable_queues,
'auto_delete': conf.amqp_auto_delete,
'exclusive': False}
options.update(kwargs)
exchange_name = rpc_amqp.get_control_exchange(conf)
super(TopicPublisher, self).__init__(channel,
exchange_name,
topic,
type='topic',
**options)
class FanoutPublisher(Publisher):
"""Publisher class for 'fanout'."""
def __init__(self, conf, channel, topic, **kwargs):
"""init a 'fanout' publisher.
Kombu options may be passed as keyword args to override defaults
"""
options = {'durable': False,
'auto_delete': True,
'exclusive': False}
options.update(kwargs)
super(FanoutPublisher, self).__init__(channel, '%s_fanout' % topic,
None, type='fanout', **options)
class NotifyPublisher(TopicPublisher):
"""Publisher class for 'notify'."""
def __init__(self, conf, channel, topic, **kwargs):
self.durable = kwargs.pop('durable', conf.amqp_durable_queues)
self.queue_arguments = _get_queue_arguments(conf)
super(NotifyPublisher, self).__init__(conf, channel, topic, **kwargs)
def reconnect(self, channel):
super(NotifyPublisher, self).reconnect(channel)
# NOTE(jerdfelt): Normally the consumer would create the queue, but
# we do this to ensure that messages don't get dropped if the
# consumer is started after we do
queue = kombu.entity.Queue(channel=channel,
exchange=self.exchange,
durable=self.durable,
name=self.routing_key,
routing_key=self.routing_key,
queue_arguments=self.queue_arguments)
queue.declare()
class Connection(object):
"""Connection object."""
pool = None
def __init__(self, conf, server_params=None):
self.consumers = []
self.consumer_thread = None
self.proxy_callbacks = []
self.conf = conf
self.max_retries = self.conf.rabbit_max_retries
# Try forever?
if self.max_retries <= 0:
self.max_retries = None
self.interval_start = self.conf.rabbit_retry_interval
self.interval_stepping = self.conf.rabbit_retry_backoff
# max retry-interval = 30 seconds
self.interval_max = 30
self.memory_transport = False
if server_params is None:
server_params = {}
# Keys to translate from server_params to kombu params
server_params_to_kombu_params = {'username': 'userid'}
ssl_params = self._fetch_ssl_params()
params_list = []
for adr in self.conf.rabbit_hosts:
hostname, port = network_utils.parse_host_port(
adr, default_port=self.conf.rabbit_port)
params = {
'hostname': hostname,
'port': port,
'userid': self.conf.rabbit_userid,
'password': self.conf.rabbit_password,
'virtual_host': self.conf.rabbit_virtual_host,
}
for sp_key, value in server_params.iteritems():
p_key = server_params_to_kombu_params.get(sp_key, sp_key)
params[p_key] = value
if self.conf.fake_rabbit:
params['transport'] = 'memory'
if self.conf.rabbit_use_ssl:
params['ssl'] = ssl_params
params_list.append(params)
self.params_list = params_list
self.memory_transport = self.conf.fake_rabbit
self.connection = None
self.reconnect()
def _fetch_ssl_params(self):
"""Handles fetching what ssl params should be used for the connection
(if any).
"""
ssl_params = dict()
# http://docs.python.org/library/ssl.html - ssl.wrap_socket
if self.conf.kombu_ssl_version:
ssl_params['ssl_version'] = sslutils.validate_ssl_version(
self.conf.kombu_ssl_version)
if self.conf.kombu_ssl_keyfile:
ssl_params['keyfile'] = self.conf.kombu_ssl_keyfile
if self.conf.kombu_ssl_certfile:
ssl_params['certfile'] = self.conf.kombu_ssl_certfile
if self.conf.kombu_ssl_ca_certs:
ssl_params['ca_certs'] = self.conf.kombu_ssl_ca_certs
# We might want to allow variations in the
# future with this?
ssl_params['cert_reqs'] = ssl.CERT_REQUIRED
# Return the extended behavior or just have the default behavior
return ssl_params or True
def _connect(self, params):
"""Connect to rabbit. Re-establish any queues that may have
been declared before if we are reconnecting. Exceptions should
be handled by the caller.
"""
if self.connection:
LOG.info(_("Reconnecting to AMQP server on "
"%(hostname)s:%(port)d") % params)
try:
self.connection.release()
except self.connection_errors:
pass
# Setting this in case the next statement fails, though
# it shouldn't be doing any network operations, yet.
self.connection = None
self.connection = kombu.connection.BrokerConnection(**params)
self.connection_errors = self.connection.connection_errors
if self.memory_transport:
# Kludge to speed up tests.
self.connection.transport.polling_interval = 0.0
self.consumer_num = itertools.count(1)
self.connection.connect()
self.channel = self.connection.channel()
# work around 'memory' transport bug in 1.1.3
if self.memory_transport:
self.channel._new_queue('ae.undeliver')
for consumer in self.consumers:
consumer.reconnect(self.channel)
LOG.info(_('Connected to AMQP server on %(hostname)s:%(port)d') %
params)
def reconnect(self):
"""Handles reconnecting and re-establishing queues.
Will retry up to self.max_retries number of times.
self.max_retries = 0 means to retry forever.
Sleep between tries, starting at self.interval_start
seconds, backing off self.interval_stepping number of seconds
each attempt.
"""
attempt = 0
while True:
params = self.params_list[attempt % len(self.params_list)]
attempt += 1
try:
self._connect(params)
return
except (IOError, self.connection_errors) as e:
pass
except Exception as e:
# NOTE(comstud): Unfortunately it's possible for amqplib
# to return an error not covered by its transport
# connection_errors in the case of a timeout waiting for
# a protocol response. (See paste link in LP888621)
# So, we check all exceptions for 'timeout' in them
# and try to reconnect in this case.
if 'timeout' not in str(e):
raise
log_info = {}
log_info['err_str'] = str(e)
log_info['max_retries'] = self.max_retries
log_info.update(params)
if self.max_retries and attempt == self.max_retries:
msg = _('Unable to connect to AMQP server on '
'%(hostname)s:%(port)d after %(max_retries)d '
'tries: %(err_str)s') % log_info
LOG.error(msg)
raise rpc_common.RPCException(msg)
if attempt == 1:
sleep_time = self.interval_start or 1
elif attempt > 1:
sleep_time += self.interval_stepping
if self.interval_max:
sleep_time = min(sleep_time, self.interval_max)
log_info['sleep_time'] = sleep_time
LOG.error(_('AMQP server on %(hostname)s:%(port)d is '
'unreachable: %(err_str)s. Trying again in '
'%(sleep_time)d seconds.') % log_info)
time.sleep(sleep_time)
def ensure(self, error_callback, method, *args, **kwargs):
while True:
try:
return method(*args, **kwargs)
except (self.connection_errors, socket.timeout, IOError) as e:
if error_callback:
error_callback(e)
except Exception as e:
# NOTE(comstud): Unfortunately it's possible for amqplib
# to return an error not covered by its transport
# connection_errors in the case of a timeout waiting for
# a protocol response. (See paste link in LP888621)
# So, we check all exceptions for 'timeout' in them
# and try to reconnect in this case.
if 'timeout' not in str(e):
raise
if error_callback:
error_callback(e)
self.reconnect()
def get_channel(self):
"""Convenience call for bin/clear_rabbit_queues."""
return self.channel
def close(self):
"""Close/release this connection."""
self.cancel_consumer_thread()
self.wait_on_proxy_callbacks()
self.connection.release()
self.connection = None
def reset(self):
"""Reset a connection so it can be used again."""
self.cancel_consumer_thread()
self.wait_on_proxy_callbacks()
self.channel.close()
self.channel = self.connection.channel()
# work around 'memory' transport bug in 1.1.3
if self.memory_transport:
self.channel._new_queue('ae.undeliver')
self.consumers = []
def declare_consumer(self, consumer_cls, topic, callback):
"""Create a Consumer using the class that was passed in and
add it to our list of consumers
"""
def _connect_error(exc):
log_info = {'topic': topic, 'err_str': str(exc)}
LOG.error(_("Failed to declare consumer for topic '%(topic)s': "
"%(err_str)s") % log_info)
def _declare_consumer():
consumer = consumer_cls(self.conf, self.channel, topic, callback,
self.consumer_num.next())
self.consumers.append(consumer)
return consumer
return self.ensure(_connect_error, _declare_consumer)
def iterconsume(self, limit=None, timeout=None):
"""Return an iterator that will consume from all queues/consumers."""
info = {'do_consume': True}
def _error_callback(exc):
if isinstance(exc, socket.timeout):
LOG.debug(_('Timed out waiting for RPC response: %s') %
str(exc))
raise rpc_common.Timeout()
else:
LOG.exception(_('Failed to consume message from queue: %s') %
str(exc))
info['do_consume'] = True
def _consume():
if info['do_consume']:
queues_head = self.consumers[:-1] # not fanout.
queues_tail = self.consumers[-1] # fanout
for queue in queues_head:
queue.consume(nowait=True)
queues_tail.consume(nowait=False)
info['do_consume'] = False
return self.connection.drain_events(timeout=timeout)
for iteration in itertools.count(0):
if limit and iteration >= limit:
raise StopIteration
yield self.ensure(_error_callback, _consume)
def cancel_consumer_thread(self):
"""Cancel a consumer thread."""
if self.consumer_thread is not None:
self.consumer_thread.kill()
try:
self.consumer_thread.wait()
except greenlet.GreenletExit:
pass
self.consumer_thread = None
def wait_on_proxy_callbacks(self):
"""Wait for all proxy callback threads to exit."""
for proxy_cb in self.proxy_callbacks:
proxy_cb.wait()
def publisher_send(self, cls, topic, msg, timeout=None, **kwargs):
"""Send to a publisher based on the publisher class."""
def _error_callback(exc):
log_info = {'topic': topic, 'err_str': str(exc)}
LOG.exception(_("Failed to publish message to topic "
"'%(topic)s': %(err_str)s") % log_info)
def _publish():
publisher = cls(self.conf, self.channel, topic, **kwargs)
publisher.send(msg, timeout)
self.ensure(_error_callback, _publish)
def declare_direct_consumer(self, topic, callback):
"""Create a 'direct' queue.
In nova's use, this is generally a msg_id queue used for
responses for call/multicall
"""
self.declare_consumer(DirectConsumer, topic, callback)
def declare_topic_consumer(self, topic, callback=None, queue_name=None,
exchange_name=None, ack_on_error=True):
"""Create a 'topic' consumer."""
self.declare_consumer(functools.partial(TopicConsumer,
name=queue_name,
exchange_name=exchange_name,
ack_on_error=ack_on_error,
),
topic, callback)
def declare_fanout_consumer(self, topic, callback):
"""Create a 'fanout' consumer."""
self.declare_consumer(FanoutConsumer, topic, callback)
def direct_send(self, msg_id, msg):
"""Send a 'direct' message."""
self.publisher_send(DirectPublisher, msg_id, msg)
def topic_send(self, topic, msg, timeout=None):
"""Send a 'topic' message."""
self.publisher_send(TopicPublisher, topic, msg, timeout)
def fanout_send(self, topic, msg):
"""Send a 'fanout' message."""
self.publisher_send(FanoutPublisher, topic, msg)
def notify_send(self, topic, msg, **kwargs):
"""Send a notify message on a topic."""
self.publisher_send(NotifyPublisher, topic, msg, None, **kwargs)
def consume(self, limit=None):
"""Consume from all queues/consumers."""
it = self.iterconsume(limit=limit)
while True:
try:
it.next()
except StopIteration:
return
def consume_in_thread(self):
"""Consumer from all queues/consumers in a greenthread."""
@excutils.forever_retry_uncaught_exceptions
def _consumer_thread():
try:
self.consume()
except greenlet.GreenletExit:
return
if self.consumer_thread is None:
self.consumer_thread = eventlet.spawn(_consumer_thread)
return self.consumer_thread
def create_consumer(self, topic, proxy, fanout=False):
"""Create a consumer that calls a method in a proxy object."""
proxy_cb = rpc_amqp.ProxyCallback(
self.conf, proxy,
rpc_amqp.get_connection_pool(self.conf, Connection))
self.proxy_callbacks.append(proxy_cb)
if fanout:
self.declare_fanout_consumer(topic, proxy_cb)
else:
self.declare_topic_consumer(topic, proxy_cb)
def create_worker(self, topic, proxy, pool_name):
"""Create a worker that calls a method in a proxy object."""
proxy_cb = rpc_amqp.ProxyCallback(
self.conf, proxy,
rpc_amqp.get_connection_pool(self.conf, Connection))
self.proxy_callbacks.append(proxy_cb)
self.declare_topic_consumer(topic, proxy_cb, pool_name)
def join_consumer_pool(self, callback, pool_name, topic,
exchange_name=None, ack_on_error=True):
"""Register as a member of a group of consumers for a given topic from
the specified exchange.
Exactly one member of a given pool will receive each message.
A message will be delivered to multiple pools, if more than
one is created.
"""
callback_wrapper = rpc_amqp.CallbackWrapper(
conf=self.conf,
callback=callback,
connection_pool=rpc_amqp.get_connection_pool(self.conf,
Connection),
)
self.proxy_callbacks.append(callback_wrapper)
self.declare_topic_consumer(
queue_name=pool_name,
topic=topic,
exchange_name=exchange_name,
callback=callback_wrapper,
ack_on_error=ack_on_error,
)
def create_connection(conf, new=True):
"""Create a connection."""
return rpc_amqp.create_connection(
conf, new,
rpc_amqp.get_connection_pool(conf, Connection))
def multicall(conf, context, topic, msg, timeout=None):
"""Make a call that returns multiple times."""
return rpc_amqp.multicall(
conf, context, topic, msg, timeout,
rpc_amqp.get_connection_pool(conf, Connection))
def call(conf, context, topic, msg, timeout=None):
"""Sends a message on a topic and wait for a response."""
return rpc_amqp.call(
conf, context, topic, msg, timeout,
rpc_amqp.get_connection_pool(conf, Connection))
def cast(conf, context, topic, msg):
"""Sends a message on a topic without waiting for a response."""
return rpc_amqp.cast(
conf, context, topic, msg,
rpc_amqp.get_connection_pool(conf, Connection))
def fanout_cast(conf, context, topic, msg):
"""Sends a message on a fanout exchange without waiting for a response."""
return rpc_amqp.fanout_cast(
conf, context, topic, msg,
rpc_amqp.get_connection_pool(conf, Connection))
def cast_to_server(conf, context, server_params, topic, msg):
"""Sends a message on a topic to a specific server."""
return rpc_amqp.cast_to_server(
conf, context, server_params, topic, msg,
rpc_amqp.get_connection_pool(conf, Connection))
def fanout_cast_to_server(conf, context, server_params, topic, msg):
"""Sends a message on a fanout exchange to a specific server."""
return rpc_amqp.fanout_cast_to_server(
conf, context, server_params, topic, msg,
rpc_amqp.get_connection_pool(conf, Connection))
def notify(conf, context, topic, msg, envelope):
"""Sends a notification event on a topic."""
return rpc_amqp.notify(
conf, context, topic, msg,
rpc_amqp.get_connection_pool(conf, Connection),
envelope)
def cleanup():
return rpc_amqp.cleanup(Connection.pool)
| citrix-openstack-build/trove | trove/openstack/common/rpc/impl_kombu.py | Python | apache-2.0 | 32,685 |
input = """
:- not b.
b :- a, not a.
a v c.
"""
output = """
"""
| Yarrick13/hwasp | tests/wasp1/AllAnswerSets/choice_30.test.py | Python | apache-2.0 | 66 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Extract the net parameters from the pytorch file and store them
as python dict using cPickle. Must install pytorch.
'''
import torch.utils.model_zoo as model_zoo
import numpy as np
from argparse import ArgumentParser
import model
try:
import cPickle as pickle
except ModuleNotFoundError:
import pickle
URL_PREFIX = 'https://download.pytorch.org/models/'
model_urls = {
'densenet121': URL_PREFIX + 'densenet121-a639ec97.pth',
'densenet169': URL_PREFIX + 'densenet169-b2777c0a.pth',
'densenet201': URL_PREFIX + 'densenet201-c1103571.pth',
'densenet161': URL_PREFIX + 'densenet161-8d451a50.pth',
}
def rename(pname):
p1 = pname.find('/')
p2 = pname.rfind('/')
assert p1 != -1 and p2 != -1, 'param name = %s is not correct' % pname
if 'gamma' in pname:
suffix = 'weight'
elif 'beta' in pname:
suffix = 'bias'
elif 'mean' in pname:
suffix = 'running_mean'
elif 'var' in pname:
suffix = 'running_var'
else:
suffix = pname[p2 + 1:]
return pname[p1+1:p2] + '.' + suffix
if __name__ == '__main__':
parser = ArgumentParser(description='Convert params from torch to python'
'dict. ')
parser.add_argument("depth", type=int, choices=[121, 169, 201, 161])
parser.add_argument("outfile")
parser.add_argument('nb_classes', default=1000, type=int)
args = parser.parse_args()
net = model.create_net(args.depth, args.nb_classes)
url = 'densenet%d' % args.depth
torch_dict = model_zoo.load_url(model_urls[url])
params = {'SINGA_VERSION': 1101}
# resolve dict keys name mismatch problem
print(len(net.param_names()), len(torch_dict.keys()))
for pname, pval, torch_name in\
zip(net.param_names(), net.param_values(), torch_dict.keys()):
#torch_name = rename(pname)
ary = torch_dict[torch_name].numpy()
ary = np.array(ary, dtype=np.float32)
if len(ary.shape) == 4:
params[pname] = np.reshape(ary, (ary.shape[0], -1))
else:
params[pname] = np.transpose(ary)
#pdb.set_trace()
assert pval.shape == params[pname].shape, 'shape mismatch for {0}, \
expected {1} in torch model, got {2} in singa model'.\
format(pname, params[pname].shape, pval.shape)
with open(args.outfile, 'wb') as fd:
pickle.dump(params, fd) | kaiping/incubator-singa | examples/imagenet/densenet/convert.py | Python | apache-2.0 | 3,187 |
''' author@esilgard '''
# Copyright (c) 2013-2016 Fred Hutchinson Cancer Research Center
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys, os, codecs
import output_results, make_text_output_directory, metadata
from datetime import datetime
import global_strings as gb
'''
initial script of the Argos/NLP engine do deal with command line parsing and module outputs
should exit with a non-zero status for any fatal errors and
output warnings and results in json format to CWD in the file provided in cmd line args
'''
## declare output dictionary for values, warnings, and metadata
OUTPUT_DICTIONARY = {}
OUTPUT_DICTIONARY[gb.ERRS] = []
## path to the nlp_engine.py script ##
NLP_ENGINE_PATH = os.path.dirname(os.path.realpath(__file__)) + os.path.sep
ORIGINAL_WD = os.getcwd()
## timeit variable for performance testing ##
BEGIN = datetime.today()
try:
## grab version number from txt file which updates with git post-commit hook script
##(assume utf-8, but back up to utf-16)
__version__ = codecs.open(NLP_ENGINE_PATH + 'version', 'rb', encoding='utf-8')\
.readlines()[0].strip()
except UnicodeError:
try:
__version__ = codecs.open(NLP_ENGINE_PATH + 'version', 'rb', encoding='utf-16')\
.readlines()[0].strip()
except IOError:
sys.stderr.write('FATAL ERROR: could not locate or parse version file.')
sys.exit(1)
## path to file containing command line flags and descriptions ##
## in the format -char<tab>description<tab>verbose_description(for help and error messages) ##
try:
COMMAND_LINE_FLAG_FILE = open(NLP_ENGINE_PATH + 'command_line_flags.txt', 'r')
try:
## set of required flags for program to run successfully ##
REQUIRED_FLAGS = set([])
## dictionary of actual flags:argument values ##
ARGUMENTS = {}
## dictionary of flag:tuple(flag description,verbose flag description) ##
COMMAND_LINE_FLAGS = {}
for line in COMMAND_LINE_FLAG_FILE.readlines():
line = line.strip().split('\t')
if line[1] == 'required':
REQUIRED_FLAGS.add(line[0])
COMMAND_LINE_FLAGS[line[0]] = (line[2], line[3])
COMMAND_LINE_FLAG_FILE.close()
ARGS = sys.argv[1:]
except IOError:
sys.stderr.write('FATAL ERROR: command line flag dictionary could not be established \
from file, potential formatting error. program aborted.')
sys.exit(1)
except EnvironmentError:
sys.stderr.write('FATAL ERROR: command line flag file not found. program aborted.')
sys.exit(1)
## parse the ARGUMENTS from arg1 on into a dictionary - notify user of unrecognized flags
## NOTE - this does assume that flags start in the first position
## and every other argument is a flag
for index in range(0, len(ARGS)-1, 2):
if ARGS[index] in COMMAND_LINE_FLAGS:
ARGUMENTS[ARGS[index]] = ARGS[index+1]
else:
OUTPUT_DICTIONARY[gb.ERRS].append({gb.ERR_TYPE: 'Warning', gb.ERR_STR: 'nonfatal error: \
unrecognized flag: ' + ARGS[index] + ', this flag will not be excluded. Refer to ' + \
NLP_ENGINE_PATH + 'COMMAND_LINE_FLAGS.txt for a complete list and description of command line flags'})
## build the dictionary for the json output ##
OUTPUT_DICTIONARY[gb.CNTL] = {}
OUTPUT_DICTIONARY[gb.CNTL]["engineVersion"] = __version__
OUTPUT_DICTIONARY[gb.CNTL]["referenceId"] = "12345"
OUTPUT_DICTIONARY[gb.CNTL]["docVersion"] = "document version"
OUTPUT_DICTIONARY[gb.CNTL]["source"] = "document source"
OUTPUT_DICTIONARY[gb.CNTL]["docDate"] = "doc date"
OUTPUT_DICTIONARY[gb.CNTL]["processDate"] = str(datetime.today().isoformat())
metadata = metadata.get(NLP_ENGINE_PATH, ARGUMENTS)
OUTPUT_DICTIONARY[gb.CNTL]["metadata"] = metadata
OUTPUT_DICTIONARY[gb.REPORTS] = []
## add in flag info to the json output dictionary
OUTPUT_DICTIONARY[gb.CNTL]["docName"] = ARGUMENTS.get('-f')
OUTPUT_DICTIONARY[gb.CNTL]["docType"] = ARGUMENTS.get('-t')
OUTPUT_DICTIONARY[gb.CNTL]["diseaseGroup"] = ARGUMENTS.get('-g')
## ERR out for missing flags that are required ##
MISSING_FLAGS = REQUIRED_FLAGS-set(ARGUMENTS.keys())
if len(MISSING_FLAGS) > 0:
for each_flag in MISSING_FLAGS:
sys.stderr.write('FATAL ERROR: missing required flag: ' + each_flag + ' ' + COMMAND_LINE_FLAGS[each_flag][1])
sys.exit(1)
else:
## import and call appropriate module ##
try:
DOCUMENT_PROCESSER = __import__('fhcrc_'+ARGUMENTS.get('-t'), globals(), locals(), ['process'])
except ImportError:
sys.stderr.write('FATAL ERROR: could not import module ' + ARGUMENTS.get('-t'))
sys.exit(1)
MKDIR_ERRORS = make_text_output_directory.main(ARGUMENTS.get('-f'))
if MKDIR_ERRORS[0] == Exception:
sys.stderr.write(MKDIR_ERRORS[1])
sys.exit(1)
OUTPUT, ERRORS, RETURN_TYPE = DOCUMENT_PROCESSER.process.main(ARGUMENTS)
if RETURN_TYPE == Exception:
print ERRORS
sys.stderr.write('\n'.join([err[gb.ERR_STR] for err in ERRORS]))
sys.exit(1)
else:
OUTPUT_DICTIONARY[gb.REPORTS] = OUTPUT
OUTPUT_DICTIONARY[gb.ERRS] = ERRORS
if MKDIR_ERRORS[0] == dict:
OUTPUT_DICTIONARY[gb.ERRS].append(MKDIR_ERRORS[1])
## iterate through errors - CRASH for Exceptions and output Warnings
if OUTPUT_DICTIONARY[gb.ERRS]:
CRASH = False
for error_dictionary in OUTPUT_DICTIONARY[gb.ERRS]:
if error_dictionary and error_dictionary[gb.ERR_TYPE] == 'Exception':
CRASH = True
sys.stderr.write(error_dictionary[gb.ERR_STR])
if CRASH == True:
sys.exit(1)
## output results to file ##
OUTPUT_RETURN = output_results.main(ARGUMENTS.get('-o'), OUTPUT_DICTIONARY)
if OUTPUT_RETURN:
sys.exit(1)
## timeit - print out the amount of time it took to process all the reports ##
## print (datetime.today()-BEGIN).days * 86400 + (datetime.today()-BEGIN).seconds, \
##'seconds to process '+str(len(OUTPUT_DICTIONARY["reports"]))+' reports'
| LabKey/argos_nlp | nlp_engine.py | Python | apache-2.0 | 6,574 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log
from neutron.agent import agent_extensions_manager as agent_ext_manager
from neutron.conf.agent import agent_extensions_manager as agent_ext_mgr_config
LOG = log.getLogger(__name__)
L2_AGENT_EXT_MANAGER_NAMESPACE = 'neutron.agent.l2.extensions'
def register_opts(conf):
agent_ext_mgr_config.register_agent_ext_manager_opts(conf)
class L2AgentExtensionsManager(agent_ext_manager.AgentExtensionsManager):
"""Manage l2 agent extensions. The handle_port and delete_port methods are
guaranteed to be attributes of each extension because they have been
marked as abc.abstractmethod in the extensions' abstract class.
"""
def __init__(self, conf):
super(L2AgentExtensionsManager, self).__init__(conf,
L2_AGENT_EXT_MANAGER_NAMESPACE)
def handle_port(self, context, data):
"""Notify all agent extensions to handle port."""
for extension in self:
if hasattr(extension.obj, 'handle_port'):
extension.obj.handle_port(context, data)
else:
LOG.error(
"Agent Extension '%(name)s' does not "
"implement method handle_port",
{'name': extension.name}
)
def delete_port(self, context, data):
"""Notify all agent extensions to delete port."""
for extension in self:
if hasattr(extension.obj, 'delete_port'):
extension.obj.delete_port(context, data)
else:
LOG.error(
"Agent Extension '%(name)s' does not "
"implement method delete_port",
{'name': extension.name}
)
| noironetworks/neutron | neutron/agent/l2/l2_agent_extensions_manager.py | Python | apache-2.0 | 2,311 |
from Foundation import *
from PyObjCTools.TestSupport import *
try:
unicode
except NameError:
unicode = str
class TestNSMetaData (TestCase):
def testConstants(self):
self.assertIsInstance(NSMetadataQueryDidStartGatheringNotification, unicode)
self.assertIsInstance(NSMetadataQueryGatheringProgressNotification, unicode)
self.assertIsInstance(NSMetadataQueryDidFinishGatheringNotification, unicode)
self.assertIsInstance(NSMetadataQueryDidUpdateNotification, unicode)
self.assertIsInstance(NSMetadataQueryResultContentRelevanceAttribute, unicode)
self.assertIsInstance(NSMetadataQueryUserHomeScope, unicode)
self.assertIsInstance(NSMetadataQueryLocalComputerScope, unicode)
self.assertIsInstance(NSMetadataQueryNetworkScope, unicode)
@min_os_level('10.7')
def testConstants10_7(self):
self.assertIsInstance(NSMetadataQueryLocalDocumentsScope, unicode)
self.assertIsInstance(NSMetadataQueryUbiquitousDocumentsScope, unicode)
self.assertIsInstance(NSMetadataQueryUbiquitousDataScope, unicode)
self.assertIsInstance(NSMetadataItemFSNameKey, unicode)
self.assertIsInstance(NSMetadataItemDisplayNameKey, unicode)
self.assertIsInstance(NSMetadataItemURLKey, unicode)
self.assertIsInstance(NSMetadataItemPathKey, unicode)
self.assertIsInstance(NSMetadataItemFSSizeKey, unicode)
self.assertIsInstance(NSMetadataItemFSCreationDateKey, unicode)
self.assertIsInstance(NSMetadataItemFSContentChangeDateKey, unicode)
self.assertIsInstance(NSMetadataItemIsUbiquitousKey, unicode)
self.assertIsInstance(NSMetadataUbiquitousItemHasUnresolvedConflictsKey, unicode)
self.assertIsInstance(NSMetadataUbiquitousItemIsDownloadedKey, unicode)
self.assertIsInstance(NSMetadataUbiquitousItemIsDownloadingKey, unicode)
self.assertIsInstance(NSMetadataUbiquitousItemIsUploadedKey, unicode)
self.assertIsInstance(NSMetadataUbiquitousItemIsUploadingKey, unicode)
self.assertIsInstance(NSMetadataUbiquitousItemPercentDownloadedKey, unicode)
self.assertIsInstance(NSMetadataUbiquitousItemPercentUploadedKey, unicode)
def testMethods(self):
self.assertResultIsBOOL(NSMetadataQuery.startQuery)
self.assertResultIsBOOL(NSMetadataQuery.isStarted)
self.assertResultIsBOOL(NSMetadataQuery.isGathering)
self.assertResultIsBOOL(NSMetadataQuery.isStopped)
if __name__ == "__main__":
main()
| albertz/music-player | mac/pyobjc-framework-Cocoa/PyObjCTest/test_nsmetadata.py | Python | bsd-2-clause | 2,524 |
import pydev_log
import traceback
import pydevd_resolver
from pydevd_constants import * #@UnusedWildImport
from types import * #@UnusedWildImport
try:
from urllib import quote
except:
from urllib.parse import quote #@UnresolvedImport
try:
from xml.sax.saxutils import escape
def makeValidXmlValue(s):
return escape(s, {'"': '"'})
except:
#Simple replacement if it's not there.
def makeValidXmlValue(s):
return s.replace('<', '<').replace('>', '>').replace('"', '"')
class ExceptionOnEvaluate:
def __init__(self, result):
self.result = result
#------------------------------------------------------------------------------------------------------ resolvers in map
if not sys.platform.startswith("java"):
typeMap = [
#None means that it should not be treated as a compound variable
#isintance does not accept a tuple on some versions of python, so, we must declare it expanded
(type(None), None,),
(int, None),
(float, None),
(complex, None),
(str, None),
(tuple, pydevd_resolver.tupleResolver),
(list, pydevd_resolver.tupleResolver),
(dict, pydevd_resolver.dictResolver),
]
try:
typeMap.append((long, None))
except:
pass #not available on all python versions
try:
typeMap.append((unicode, None))
except:
pass #not available on all python versions
try:
typeMap.append((set, pydevd_resolver.setResolver))
except:
pass #not available on all python versions
try:
typeMap.append((frozenset, pydevd_resolver.setResolver))
except:
pass #not available on all python versions
else: #platform is java
from org.python import core #@UnresolvedImport
typeMap = [
(core.PyNone, None),
(core.PyInteger, None),
(core.PyLong, None),
(core.PyFloat, None),
(core.PyComplex, None),
(core.PyString, None),
(core.PyTuple, pydevd_resolver.tupleResolver),
(core.PyList, pydevd_resolver.tupleResolver),
(core.PyDictionary, pydevd_resolver.dictResolver),
(core.PyStringMap, pydevd_resolver.dictResolver),
]
if hasattr(core, 'PyJavaInstance'):
#Jython 2.5b3 removed it.
typeMap.append((core.PyJavaInstance, pydevd_resolver.instanceResolver))
def getType(o):
""" returns a triple (typeObject, typeString, resolver
resolver != None means that variable is a container,
and should be displayed as a hierarchy.
Use the resolver to get its attributes.
All container objects should have a resolver.
"""
try:
type_object = type(o)
type_name = type_object.__name__
except:
#This happens for org.python.core.InitModule
return 'Unable to get Type', 'Unable to get Type', None
try:
if type_name == 'org.python.core.PyJavaInstance':
return type_object, type_name, pydevd_resolver.instanceResolver
if type_name == 'org.python.core.PyArray':
return type_object, type_name, pydevd_resolver.jyArrayResolver
for t in typeMap:
if isinstance(o, t[0]):
return type_object, type_name, t[1]
except:
traceback.print_exc()
#no match return default
return type_object, type_name, pydevd_resolver.defaultResolver
def frameVarsToXML(frame_f_locals):
""" dumps frame variables to XML
<var name="var_name" scope="local" type="type" value="value"/>
"""
xml = ""
keys = frame_f_locals.keys()
if hasattr(keys, 'sort'):
keys.sort() #Python 3.0 does not have it
else:
keys = sorted(keys) #Jython 2.1 does not have it
for k in keys:
try:
v = frame_f_locals[k]
xml += varToXML(v, str(k))
except Exception:
traceback.print_exc()
pydev_log.error("Unexpected error, recovered safely.\n")
return xml
def varToXML(val, name, doTrim=True):
""" single variable or dictionary to xml representation """
is_exception_on_eval = isinstance(val, ExceptionOnEvaluate)
if is_exception_on_eval:
v = val.result
else:
v = val
type, typeName, resolver = getType(v)
try:
if hasattr(v, '__class__'):
try:
cName = str(v.__class__)
if cName.find('.') != -1:
cName = cName.split('.')[-1]
elif cName.find("'") != -1: #does not have '.' (could be something like <type 'int'>)
cName = cName[cName.index("'") + 1:]
if cName.endswith("'>"):
cName = cName[:-2]
except:
cName = str(v.__class__)
value = '%s: %s' % (cName, v)
else:
value = str(v)
except:
try:
value = repr(v)
except:
value = 'Unable to get repr for %s' % v.__class__
try:
name = quote(name, '/>_= ') #TODO: Fix PY-5834 without using quote
except:
pass
xml = '<var name="%s" type="%s"' % (makeValidXmlValue(name), makeValidXmlValue(typeName))
if value:
#cannot be too big... communication may not handle it.
if len(value) > MAXIMUM_VARIABLE_REPRESENTATION_SIZE and doTrim:
value = value[0:MAXIMUM_VARIABLE_REPRESENTATION_SIZE]
value += '...'
#fix to work with unicode values
try:
if not IS_PY3K:
if isinstance(value, unicode):
value = value.encode('utf-8')
else:
if isinstance(value, bytes):
value = value.encode('utf-8')
except TypeError: #in java, unicode is a function
pass
xmlValue = ' value="%s"' % (makeValidXmlValue(quote(value, '/>_= ')))
else:
xmlValue = ''
if is_exception_on_eval:
xmlCont = ' isErrorOnEval="True"'
else:
if resolver is not None:
xmlCont = ' isContainer="True"'
else:
xmlCont = ''
return ''.join((xml, xmlValue, xmlCont, ' />\n'))
| akiokio/centralfitestoque | src/.pycharm_helpers/pydev/pydevd_xml.py | Python | bsd-2-clause | 6,287 |
# (C) Datadog, Inc. 2018
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
| DataDog/integrations-extras | traefik/tests/__init__.py | Python | bsd-3-clause | 107 |
from .. utils import TranspileTestCase, BuiltinFunctionTestCase
class AbsTests(TranspileTestCase):
def test_abs_not_implemented(self):
self.assertCodeExecution("""
class NotAbsLike:
pass
x = NotAbsLike()
try:
print(abs(x))
except TypeError as err:
print(err)
""")
class BuiltinAbsFunctionTests(BuiltinFunctionTestCase, TranspileTestCase):
functions = ["abs"]
not_implemented = [
'test_class',
'test_frozenset',
]
| pombredanne/voc | tests/builtins/test_abs.py | Python | bsd-3-clause | 566 |
from __future__ import absolute_import
import six
from base64 import b64encode
from django.core.urlresolvers import reverse
from sentry.models import UserAvatar
from sentry.testutils import APITestCase
class UserAvatarTest(APITestCase):
def test_get(self):
user = self.create_user(email='[email protected]')
self.login_as(user=user)
url = reverse('sentry-api-0-user-avatar', kwargs={
'user_id': 'me',
})
response = self.client.get(url, format='json')
assert response.status_code == 200, response.content
assert response.data['id'] == six.text_type(user.id)
assert response.data['avatar']['avatarType'] == 'letter_avatar'
assert response.data['avatar']['avatarUuid'] is None
def test_gravatar(self):
user = self.create_user(email='[email protected]')
self.login_as(user=user)
url = reverse('sentry-api-0-user-avatar', kwargs={
'user_id': 'me',
})
response = self.client.put(url, data={'avatar_type': 'gravatar'}, format='json')
avatar = UserAvatar.objects.get(user=user)
assert response.status_code == 200, response.content
assert avatar.get_avatar_type_display() == 'gravatar'
def test_upload(self):
user = self.create_user(email='[email protected]')
self.login_as(user=user)
url = reverse('sentry-api-0-user-avatar', kwargs={
'user_id': 'me',
})
response = self.client.put(url, data={
'avatar_type': 'upload',
'avatar_photo': b64encode(self.load_fixture('avatar.jpg')),
}, format='json')
avatar = UserAvatar.objects.get(user=user)
assert response.status_code == 200, response.content
assert avatar.get_avatar_type_display() == 'upload'
assert avatar.file
def test_put_bad(self):
user = self.create_user(email='[email protected]')
UserAvatar.objects.create(user=user)
self.login_as(user=user)
url = reverse('sentry-api-0-user-avatar', kwargs={
'user_id': 'me',
})
response = self.client.put(url, data={'avatar_type': 'upload'}, format='json')
avatar = UserAvatar.objects.get(user=user)
assert response.status_code == 400
assert avatar.get_avatar_type_display() == 'letter_avatar'
response = self.client.put(url, data={'avatar_type': 'foo'}, format='json')
assert response.status_code == 400
assert avatar.get_avatar_type_display() == 'letter_avatar'
def test_put_forbidden(self):
user = self.create_user(email='[email protected]')
user2 = self.create_user(email='[email protected]')
self.login_as(user=user)
url = reverse('sentry-api-0-user-avatar', kwargs={
'user_id': user2.id,
})
response = self.client.put(url, data={'avatar_type': 'gravatar'}, format='json')
assert response.status_code == 403
| JackDanger/sentry | tests/sentry/api/endpoints/test_user_avatar.py | Python | bsd-3-clause | 2,972 |
import json
import httplib
from conpaas.core import https
def _check(response):
code, body = response
if code != httplib.OK: raise Exception('Received http response code %d' % (code))
data = json.loads(body)
if data['error']: raise Exception(data['error'])
else: return data['result']
def check_agent_process(host, port):
method = 'check_agent_process'
return _check(https.client.jsonrpc_get(host, port, '/', method))
def startup(host, port):
method = 'startup'
return _check(https.client.jsonrpc_post(host, port, '/', method))
def get_helloworld(host, port):
method = 'get_helloworld'
return _check(https.client.jsonrpc_get(host, port, '/', method))
| ema/conpaas | conpaas-services/src/conpaas/services/helloworld/agent/client.py | Python | bsd-3-clause | 679 |
from __future__ import absolute_import, unicode_literals
import datetime
import pytest
from case import MagicMock, call, patch, skip
from kombu import Connection
from kombu.five import Empty
def _create_mock_connection(url='', **kwargs):
from kombu.transport import mongodb # noqa
class _Channel(mongodb.Channel):
# reset _fanout_queues for each instance
_fanout_queues = {}
collections = {}
now = datetime.datetime.utcnow()
def _create_client(self):
mock = MagicMock(name='client')
# we need new mock object for every collection
def get_collection(name):
try:
return self.collections[name]
except KeyError:
mock = self.collections[name] = MagicMock(
name='collection:%s' % name)
return mock
mock.__getitem__.side_effect = get_collection
return mock
def get_now(self):
return self.now
class Transport(mongodb.Transport):
Channel = _Channel
return Connection(url, transport=Transport, **kwargs)
@skip.unless_module('pymongo')
class test_mongodb_uri_parsing:
def test_defaults(self):
url = 'mongodb://'
channel = _create_mock_connection(url).default_channel
hostname, dbname, options = channel._parse_uri()
assert dbname == 'kombu_default'
assert hostname == 'mongodb://127.0.0.1'
def test_custom_host(self):
url = 'mongodb://localhost'
channel = _create_mock_connection(url).default_channel
hostname, dbname, options = channel._parse_uri()
assert dbname == 'kombu_default'
def test_custom_database(self):
url = 'mongodb://localhost/dbname'
channel = _create_mock_connection(url).default_channel
hostname, dbname, options = channel._parse_uri()
assert dbname == 'dbname'
def test_custom_credentials(self):
url = 'mongodb://localhost/dbname'
channel = _create_mock_connection(
url, userid='foo', password='bar').default_channel
hostname, dbname, options = channel._parse_uri()
assert hostname == 'mongodb://foo:bar@localhost/dbname'
assert dbname == 'dbname'
def test_correct_readpreference(self):
url = 'mongodb://localhost/dbname?readpreference=nearest'
channel = _create_mock_connection(url).default_channel
hostname, dbname, options = channel._parse_uri()
assert options['readpreference'] == 'nearest'
class BaseMongoDBChannelCase:
def _get_method(self, cname, mname):
collection = getattr(self.channel, cname)
method = getattr(collection, mname.split('.', 1)[0])
for bit in mname.split('.')[1:]:
method = getattr(method.return_value, bit)
return method
def set_operation_return_value(self, cname, mname, *values):
method = self._get_method(cname, mname)
if len(values) == 1:
method.return_value = values[0]
else:
method.side_effect = values
def declare_droadcast_queue(self, queue):
self.channel.exchange_declare('fanout_exchange', type='fanout')
self.channel._queue_bind('fanout_exchange', 'foo', '*', queue)
assert queue in self.channel._broadcast_cursors
def get_broadcast(self, queue):
return self.channel._broadcast_cursors[queue]
def set_broadcast_return_value(self, queue, *values):
self.declare_droadcast_queue(queue)
cursor = MagicMock(name='cursor')
cursor.__iter__.return_value = iter(values)
self.channel._broadcast_cursors[queue]._cursor = iter(cursor)
def assert_collection_accessed(self, *collections):
self.channel.client.__getitem__.assert_has_calls(
[call(c) for c in collections], any_order=True)
def assert_operation_has_calls(self, cname, mname, calls, any_order=False):
method = self._get_method(cname, mname)
method.assert_has_calls(calls, any_order=any_order)
def assert_operation_called_with(self, cname, mname, *args, **kwargs):
self.assert_operation_has_calls(cname, mname, [call(*args, **kwargs)])
@skip.unless_module('pymongo')
class test_mongodb_channel(BaseMongoDBChannelCase):
def setup(self):
self.connection = _create_mock_connection()
self.channel = self.connection.default_channel
# Tests for "public" channel interface
def test_new_queue(self):
self.channel._new_queue('foobar')
self.channel.client.assert_not_called()
def test_get(self):
import pymongo
self.set_operation_return_value('messages', 'find_and_modify', {
'_id': 'docId', 'payload': '{"some": "data"}',
})
event = self.channel._get('foobar')
self.assert_collection_accessed('messages')
self.assert_operation_called_with(
'messages', 'find_and_modify',
query={'queue': 'foobar'},
remove=True,
sort=[
('priority', pymongo.ASCENDING),
],
)
assert event == {'some': 'data'}
self.set_operation_return_value('messages', 'find_and_modify', None)
with pytest.raises(Empty):
self.channel._get('foobar')
def test_get_fanout(self):
self.set_broadcast_return_value('foobar', {
'_id': 'docId1', 'payload': '{"some": "data"}',
})
event = self.channel._get('foobar')
self.assert_collection_accessed('messages.broadcast')
assert event == {'some': 'data'}
with pytest.raises(Empty):
self.channel._get('foobar')
def test_put(self):
self.channel._put('foobar', {'some': 'data'})
self.assert_collection_accessed('messages')
self.assert_operation_called_with('messages', 'insert', {
'queue': 'foobar',
'priority': 9,
'payload': '{"some": "data"}',
})
def test_put_fanout(self):
self.declare_droadcast_queue('foobar')
self.channel._put_fanout('foobar', {'some': 'data'}, 'foo')
self.assert_collection_accessed('messages.broadcast')
self.assert_operation_called_with('broadcast', 'insert', {
'queue': 'foobar', 'payload': '{"some": "data"}',
})
def test_size(self):
self.set_operation_return_value('messages', 'find.count', 77)
result = self.channel._size('foobar')
self.assert_collection_accessed('messages')
self.assert_operation_called_with(
'messages', 'find', {'queue': 'foobar'},
)
assert result == 77
def test_size_fanout(self):
self.declare_droadcast_queue('foobar')
cursor = MagicMock(name='cursor')
cursor.get_size.return_value = 77
self.channel._broadcast_cursors['foobar'] = cursor
result = self.channel._size('foobar')
assert result == 77
def test_purge(self):
self.set_operation_return_value('messages', 'find.count', 77)
result = self.channel._purge('foobar')
self.assert_collection_accessed('messages')
self.assert_operation_called_with(
'messages', 'remove', {'queue': 'foobar'},
)
assert result == 77
def test_purge_fanout(self):
self.declare_droadcast_queue('foobar')
cursor = MagicMock(name='cursor')
cursor.get_size.return_value = 77
self.channel._broadcast_cursors['foobar'] = cursor
result = self.channel._purge('foobar')
cursor.purge.assert_any_call()
assert result == 77
def test_get_table(self):
state_table = [('foo', '*', 'foo')]
stored_table = [('bar', '*', 'bar')]
self.channel.exchange_declare('test_exchange')
self.channel.state.exchanges['test_exchange']['table'] = state_table
self.set_operation_return_value('routing', 'find', [{
'_id': 'docId',
'routing_key': stored_table[0][0],
'pattern': stored_table[0][1],
'queue': stored_table[0][2],
}])
result = self.channel.get_table('test_exchange')
self.assert_collection_accessed('messages.routing')
self.assert_operation_called_with(
'routing', 'find', {'exchange': 'test_exchange'},
)
assert set(result) == frozenset(state_table) | frozenset(stored_table)
def test_queue_bind(self):
self.channel._queue_bind('test_exchange', 'foo', '*', 'foo')
self.assert_collection_accessed('messages.routing')
self.assert_operation_called_with(
'routing', 'update',
{'queue': 'foo', 'pattern': '*',
'routing_key': 'foo', 'exchange': 'test_exchange'},
{'queue': 'foo', 'pattern': '*',
'routing_key': 'foo', 'exchange': 'test_exchange'},
upsert=True,
)
def test_queue_delete(self):
self.channel.queue_delete('foobar')
self.assert_collection_accessed('messages.routing')
self.assert_operation_called_with(
'routing', 'remove', {'queue': 'foobar'},
)
def test_queue_delete_fanout(self):
self.declare_droadcast_queue('foobar')
cursor = MagicMock(name='cursor')
self.channel._broadcast_cursors['foobar'] = cursor
self.channel.queue_delete('foobar')
cursor.close.assert_any_call()
assert 'foobar' not in self.channel._broadcast_cursors
assert 'foobar' not in self.channel._fanout_queues
# Tests for channel internals
def test_create_broadcast(self):
self.channel._create_broadcast(self.channel.client)
self.channel.client.create_collection.assert_called_with(
'messages.broadcast', capped=True, size=100000,
)
def test_ensure_indexes(self):
self.channel._ensure_indexes(self.channel.client)
self.assert_operation_called_with(
'messages', 'ensure_index',
[('queue', 1), ('priority', 1), ('_id', 1)],
background=True,
)
self.assert_operation_called_with(
'broadcast', 'ensure_index',
[('queue', 1)],
)
self.assert_operation_called_with(
'routing', 'ensure_index', [('queue', 1), ('exchange', 1)],
)
def test_create_broadcast_cursor(self):
import pymongo
with patch.object(pymongo, 'version_tuple', (2, )):
self.channel._create_broadcast_cursor(
'fanout_exchange', 'foo', '*', 'foobar',
)
self.assert_collection_accessed('messages.broadcast')
self.assert_operation_called_with(
'broadcast', 'find',
tailable=True,
query={'queue': 'fanout_exchange'},
)
if pymongo.version_tuple >= (3, ):
self.channel._create_broadcast_cursor(
'fanout_exchange1', 'foo', '*', 'foobar',
)
self.assert_collection_accessed('messages.broadcast')
self.assert_operation_called_with(
'broadcast', 'find',
cursor_type=pymongo.CursorType.TAILABLE,
filter={'queue': 'fanout_exchange1'},
)
def test_open_rc_version(self):
import pymongo
def server_info(self):
return {'version': '3.6.0-rc'}
with patch.object(pymongo.MongoClient, 'server_info', server_info):
self.channel._open()
@skip.unless_module('pymongo')
class test_mongodb_channel_ttl(BaseMongoDBChannelCase):
def setup(self):
self.connection = _create_mock_connection(
transport_options={'ttl': True},
)
self.channel = self.connection.default_channel
self.expire_at = (
self.channel.get_now() + datetime.timedelta(milliseconds=777))
# Tests
def test_new_queue(self):
self.channel._new_queue('foobar')
self.assert_operation_called_with(
'queues', 'update',
{'_id': 'foobar'},
{'_id': 'foobar', 'options': {}, 'expire_at': None},
upsert=True,
)
def test_get(self):
import pymongo
self.set_operation_return_value('queues', 'find_one', {
'_id': 'docId', 'options': {'arguments': {'x-expires': 777}},
})
self.set_operation_return_value('messages', 'find_and_modify', {
'_id': 'docId', 'payload': '{"some": "data"}',
})
self.channel._get('foobar')
self.assert_collection_accessed('messages', 'messages.queues')
self.assert_operation_called_with(
'messages', 'find_and_modify',
query={'queue': 'foobar'},
remove=True,
sort=[
('priority', pymongo.ASCENDING),
],
)
self.assert_operation_called_with(
'routing', 'update',
{'queue': 'foobar'},
{'$set': {'expire_at': self.expire_at}},
multiple=True,
)
def test_put(self):
self.set_operation_return_value('queues', 'find_one', {
'_id': 'docId', 'options': {'arguments': {'x-message-ttl': 777}},
})
self.channel._put('foobar', {'some': 'data'})
self.assert_collection_accessed('messages')
self.assert_operation_called_with('messages', 'insert', {
'queue': 'foobar',
'priority': 9,
'payload': '{"some": "data"}',
'expire_at': self.expire_at,
})
def test_queue_bind(self):
self.set_operation_return_value('queues', 'find_one', {
'_id': 'docId', 'options': {'arguments': {'x-expires': 777}},
})
self.channel._queue_bind('test_exchange', 'foo', '*', 'foo')
self.assert_collection_accessed('messages.routing')
self.assert_operation_called_with(
'routing', 'update',
{'queue': 'foo', 'pattern': '*',
'routing_key': 'foo', 'exchange': 'test_exchange'},
{'queue': 'foo', 'pattern': '*',
'routing_key': 'foo', 'exchange': 'test_exchange',
'expire_at': self.expire_at},
upsert=True,
)
def test_queue_delete(self):
self.channel.queue_delete('foobar')
self.assert_collection_accessed('messages.queues')
self.assert_operation_called_with(
'queues', 'remove', {'_id': 'foobar'})
def test_ensure_indexes(self):
self.channel._ensure_indexes(self.channel.client)
self.assert_operation_called_with(
'messages', 'ensure_index', [('expire_at', 1)],
expireAfterSeconds=0)
self.assert_operation_called_with(
'routing', 'ensure_index', [('expire_at', 1)],
expireAfterSeconds=0)
self.assert_operation_called_with(
'queues', 'ensure_index', [('expire_at', 1)], expireAfterSeconds=0)
def test_get_expire(self):
result = self.channel._get_expire(
{'arguments': {'x-expires': 777}}, 'x-expires')
self.channel.client.assert_not_called()
assert result == self.expire_at
self.set_operation_return_value('queues', 'find_one', {
'_id': 'docId', 'options': {'arguments': {'x-expires': 777}},
})
result = self.channel._get_expire('foobar', 'x-expires')
assert result == self.expire_at
def test_update_queues_expire(self):
self.set_operation_return_value('queues', 'find_one', {
'_id': 'docId', 'options': {'arguments': {'x-expires': 777}},
})
self.channel._update_queues_expire('foobar')
self.assert_collection_accessed('messages.routing', 'messages.queues')
self.assert_operation_called_with(
'routing', 'update',
{'queue': 'foobar'},
{'$set': {'expire_at': self.expire_at}},
multiple=True,
)
self.assert_operation_called_with(
'queues', 'update',
{'_id': 'foobar'},
{'$set': {'expire_at': self.expire_at}},
multiple=True,
)
@skip.unless_module('pymongo')
class test_mongodb_channel_calc_queue_size(BaseMongoDBChannelCase):
def setup(self):
self.connection = _create_mock_connection(
transport_options={'calc_queue_size': False})
self.channel = self.connection.default_channel
self.expire_at = (
self.channel.get_now() + datetime.timedelta(milliseconds=777))
# Tests
def test_size(self):
self.set_operation_return_value('messages', 'find.count', 77)
result = self.channel._size('foobar')
self.assert_operation_has_calls('messages', 'find', [])
assert result == 0
| pexip/os-kombu | t/unit/transport/test_mongodb.py | Python | bsd-3-clause | 16,952 |
# -*- coding: utf-8 -*-
"""
.. _plot_montage:
Plotting sensor layouts of EEG systems
======================================
This example illustrates how to load all the EEG system montages
shipped in MNE-python, and display it on fsaverage template.
""" # noqa: D205, D400
# Authors: Alexandre Gramfort <[email protected]>
# Joan Massich <[email protected]>
#
# License: BSD Style.
import os.path as op
import mne
from mne.channels.montage import get_builtin_montages
from mne.datasets import fetch_fsaverage
from mne.viz import set_3d_title, set_3d_view
###############################################################################
# Check all montages against a sphere
for current_montage in get_builtin_montages():
montage = mne.channels.make_standard_montage(current_montage)
info = mne.create_info(
ch_names=montage.ch_names, sfreq=100., ch_types='eeg')
info.set_montage(montage)
sphere = mne.make_sphere_model(r0='auto', head_radius='auto', info=info)
fig = mne.viz.plot_alignment(
# Plot options
show_axes=True, dig='fiducials', surfaces='head',
bem=sphere, info=info)
set_3d_view(figure=fig, azimuth=135, elevation=80)
set_3d_title(figure=fig, title=current_montage)
###############################################################################
# Check all montages against fsaverage
subjects_dir = op.dirname(fetch_fsaverage())
for current_montage in get_builtin_montages():
montage = mne.channels.make_standard_montage(current_montage)
# Create dummy info
info = mne.create_info(
ch_names=montage.ch_names, sfreq=100., ch_types='eeg')
info.set_montage(montage)
fig = mne.viz.plot_alignment(
# Plot options
show_axes=True, dig='fiducials', surfaces='head', mri_fiducials=True,
subject='fsaverage', subjects_dir=subjects_dir, info=info,
coord_frame='mri',
trans='fsaverage', # transform from head coords to fsaverage's MRI
)
set_3d_view(figure=fig, azimuth=135, elevation=80)
set_3d_title(figure=fig, title=current_montage)
| Teekuningas/mne-python | examples/visualization/plot_montage.py | Python | bsd-3-clause | 2,108 |
# Copyright (c) 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import posixpath
import re
from collections import defaultdict
def uniform_path_format(native_path):
"""Alters the path if needed to be separated by forward slashes."""
return posixpath.normpath(native_path.replace(os.sep, posixpath.sep))
def parse(filename):
"""Searches the file for lines that start with `# TEAM:` or `# COMPONENT:`.
Args:
filename (str): path to the file to parse.
Returns:
a dict with the following format, with any subset of the listed keys:
{
'component': 'component>name',
'team': '[email protected]',
'os': 'Linux|Windows|Mac|Android|Chrome|Fuchsia'
}
"""
team_regex = re.compile('\s*#\s*TEAM\s*:\s*(\S+)')
component_regex = re.compile('\s*#\s*COMPONENT\s*:\s*(\S+)')
os_regex = re.compile('\s*#\s*OS\s*:\s*(\S+)')
result = {}
with open(filename) as f:
for line in f:
team_matches = team_regex.match(line)
if team_matches:
result['team'] = team_matches.group(1)
component_matches = component_regex.match(line)
if component_matches:
result['component'] = component_matches.group(1)
os_matches = os_regex.match(line)
if os_matches:
result['os'] = os_matches.group(1)
return result
def aggregate_components_from_owners(all_owners_data, root):
"""Converts the team/component/os tags parsed from OWNERS into mappings.
Args:
all_owners_data (dict): A mapping from relative path to a dir to a dict
mapping the tag names to their values. See docstring for scrape_owners.
root (str): the path to the src directory.
Returns:
A tuple (data, warnings, stats) where data is a dict of the form
{'component-to-team': {'Component1': 'team1@chr...', ...},
'teams-per-component': {'Component1': ['team1@chr...', 'team2@chr...]},
'dir-to-component': {'/path/to/1': 'Component1', ...}}
'dir-to-team': {'/path/to/1': 'team1@', ...}}
, warnings is a list of strings, stats is a dict of form
{'OWNERS-count': total number of OWNERS files,
'OWNERS-with-component-only-count': number of OWNERS have # COMPONENT,
'OWNERS-with-team-and-component-count': number of
OWNERS have TEAM and COMPONENT,
'OWNERS-count-by-depth': {directory depth: number of OWNERS},
'OWNERS-with-component-only-count-by-depth': {directory depth: number
of OWNERS have COMPONENT at this depth},
'OWNERS-with-team-and-component-count-by-depth':{directory depth: ...}}
"""
stats = {}
num_total = 0
num_with_component = 0
num_with_team_component = 0
num_total_by_depth = defaultdict(int)
num_with_component_by_depth = defaultdict(int)
num_with_team_component_by_depth = defaultdict(int)
warnings = []
teams_per_component = defaultdict(set)
topmost_team = {}
dir_to_component = {}
dir_missing_info_by_depth = defaultdict(list)
dir_to_team = {}
for rel_dirname, owners_data in all_owners_data.iteritems():
# Normalize this relative path to posix-style to make counting separators
# work correctly as a means of obtaining the file_depth.
rel_path = uniform_path_format(os.path.relpath(rel_dirname, root))
file_depth = 0 if rel_path == '.' else rel_path.count(posixpath.sep) + 1
num_total += 1
num_total_by_depth[file_depth] += 1
component = owners_data.get('component')
team = owners_data.get('team')
os_tag = owners_data.get('os')
if os_tag and component:
component = '%s(%s)' % (component, os_tag)
if team:
dir_to_team[rel_dirname] = team
if component:
num_with_component += 1
num_with_component_by_depth[file_depth] += 1
dir_to_component[rel_dirname] = component
if team:
num_with_team_component += 1
num_with_team_component_by_depth[file_depth] += 1
teams_per_component[component].add(team)
if component not in topmost_team or file_depth < topmost_team[
component]['depth']:
topmost_team[component] = {'depth': file_depth, 'team': team}
else:
rel_owners_path = uniform_path_format(os.path.join(rel_dirname, 'OWNERS'))
warnings.append('%s has no COMPONENT tag' % rel_owners_path)
if not team and not os_tag:
dir_missing_info_by_depth[file_depth].append(rel_owners_path)
mappings = {
'component-to-team': {
k: v['team'] for k, v in topmost_team.iteritems()
},
'teams-per-component': {
k: sorted(list(v)) for k, v in teams_per_component.iteritems()
},
'dir-to-component': dir_to_component,
'dir-to-team': dir_to_team,
}
warnings += validate_one_team_per_component(mappings)
stats = {'OWNERS-count': num_total,
'OWNERS-with-component-only-count': num_with_component,
'OWNERS-with-team-and-component-count': num_with_team_component,
'OWNERS-count-by-depth': num_total_by_depth,
'OWNERS-with-component-only-count-by-depth':
num_with_component_by_depth,
'OWNERS-with-team-and-component-count-by-depth':
num_with_team_component_by_depth,
'OWNERS-missing-info-by-depth':
dir_missing_info_by_depth}
return mappings, warnings, stats
def validate_one_team_per_component(m):
"""Validates that each component is associated with at most 1 team."""
warnings = []
# TODO(robertocn): Validate the component names: crbug.com/679540
teams_per_component = m['teams-per-component']
for c in teams_per_component:
if len(teams_per_component[c]) > 1:
warnings.append('Component %s has the following teams assigned: %s.\n'
'Team %s is being used, as it is defined at the OWNERS '
'file at the topmost dir'
% (
c,
', '.join(teams_per_component[c]),
m['component-to-team'][c]
))
return warnings
def scrape_owners(root, include_subdirs):
"""Recursively parse OWNERS files for tags.
Args:
root (str): The directory where to start parsing.
include_subdirs (bool): Whether to generate entries for subdirs with no
own OWNERS files based on the parent dir's tags.
Returns a dict in the form below.
{
'/path/to/dir': {
'component': 'component>name',
'team': '[email protected]',
'os': 'Linux|Windows|Mac|Android|Chrome|Fuchsia'
},
'/path/to/dir/inside/dir': {
'component': ...
}
}
"""
data = {}
def nearest_ancestor_tag(dirname, tag):
""" Find the value of tag in the nearest ancestor that defines it."""
ancestor = os.path.dirname(dirname)
while ancestor:
rel_ancestor = uniform_path_format(os.path.relpath(ancestor, root))
if rel_ancestor in data and data[rel_ancestor].get(tag):
return data[rel_ancestor][tag]
if rel_ancestor == '.':
break
ancestor = os.path.dirname(ancestor)
return
for dirname, _, files in os.walk(root):
# Proofing against windows casing oddities.
owners_file_names = [f for f in files if f.upper() == 'OWNERS']
rel_dirname = uniform_path_format(os.path.relpath(dirname, root))
if owners_file_names or include_subdirs:
if owners_file_names:
owners_full_path = os.path.join(dirname, owners_file_names[0])
data[rel_dirname] = parse(owners_full_path)
else:
data[rel_dirname] = {}
for tag in ('component', 'os', 'team'):
if not tag in data[rel_dirname]:
ancestor_tag = nearest_ancestor_tag(dirname, tag)
if ancestor_tag:
data[rel_dirname][tag] = ancestor_tag
return data
| nwjs/chromium.src | tools/checkteamtags/owners_file_tags.py | Python | bsd-3-clause | 7,900 |
#!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
"""
Verify that the Entry() global function and environment method work
correctly, and that the former does not try to expand construction
variables.
"""
import TestSCons
test = TestSCons.TestSCons()
test.write('SConstruct', """
env = Environment(FOO = 'fff', BAR = 'bbb')
print Entry('ddd')
print Entry('$FOO')
print Entry('${BAR}_$BAR')
print env.Entry('eee')
print env.Entry('$FOO')
print env.Entry('${BAR}_$BAR')
""")
test.run(stdout = test.wrap_stdout(read_str = """\
ddd
$FOO
${BAR}_$BAR
eee
fff
bbb_bbb
""", build_str = """\
scons: `.' is up to date.
"""))
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| Distrotech/scons | test/Entry.py | Python | mit | 1,860 |
import optparse
import os
import shutil
import sys
import unittest
from itertools import izip
from . import util
from . import stats
#=============================================================================
# common utility functions for testing
def clean_dir(path):
if os.path.exists(path):
shutil.rmtree(path)
def makedirs(path):
if not os.path.exists(path):
os.makedirs(path)
def make_clean_dir(path):
if os.path.exists(path):
shutil.rmtree(path)
os.makedirs(path)
def fequal(f1, f2, rel=.0001, eabs=1e-12):
"""assert whether two floats are approximately equal"""
if f1 == f2:
return
if f2 == 0:
err = f1
elif f1 == 0:
err = f2
else:
err = abs(f1 - f2) / abs(f2)
x = (err < rel)
if abs(f1 - f2) < eabs:
return
assert x, "%e != %e [rel=%f, abs=%f]" % (f1, f2, err, abs(f1 - f2))
def fequals(f1, f2, rel=.0001, eabs=1e-12):
for i, j in izip(f1, f2):
fequal(i, j, rel=rel, eabs=eabs)
def integrate(func, a, b, step):
return sum(func(i) * step for i in util.frange(a, b, step))
def eq_sample_pdf(samples, pdf,
ndivs=20, start=-util.INF, end=util.INF, pval=.05,
step=None):
"""Asserts a sample matches a probability density distribution"""
if step is None:
step = (max(samples) - min(samples)) / float(ndivs)
cdf = lambda x, params: integrate(pdf, x, x+step, step/10.0)
chi2, p = stats.chi_square_fit(cdf, [], samples,
ndivs=ndivs, start=start, end=end)
assert p >= pval, p
def eq_sample_pmf(samples, pmf, pval=.05):
"""Asserts a sample matches a probability mass distribution"""
import scipy.stats
hist = util.hist_dict(samples)
total = sum(hist.itervalues())
observed = []
expected = []
for sample, count in hist.iteritems():
if count >= 5:
observed.append(count)
expected.append(pmf(sample) * total)
chi2, p = scipy.stats.chisquare(
scipy.array(observed), scipy.array(expected))
assert p >= pval, p
_do_pause = True
def pause(text="press enter to continue: "):
"""Pause until the user presses enter"""
if _do_pause:
sys.stderr.write(text)
raw_input()
def set_pausing(enabled=True):
global _do_pause
_do_pause = enabled
#=============================================================================
# common unittest functions
def list_tests(stack=0):
# get environment
var = __import__("__main__").__dict__
for name, obj in var.iteritems():
if isinstance(obj, type) and issubclass(obj, unittest.TestCase):
for attr in dir(obj):
if attr.startswith("test"):
print "%s.%s" % (name, attr),
doc = getattr(obj, attr).__doc__
if doc:
print "--", doc.split("\n")[0]
else:
print
def test_main():
o = optparse.OptionParser()
o.add_option("-v", "--verbose", action="store_true",
help="Verbose output")
o.add_option("-q", "--quiet", action="store_true",
help="Minimal output")
o.add_option("-l", "--list_tests", action="store_true")
o.add_option("-p", "--pause", action="store_true")
conf, args = o.parse_args()
if conf.list_tests:
list_tests(1)
return
if conf.pause:
set_pausing(True)
else:
set_pausing(False)
# process unittest arguments
argv = [sys.argv[0]]
if conf.verbose:
argv.append("-v")
if conf.quiet:
argv.append("-q")
argv.extend(args)
# run unittest
unittest.main(argv=argv)
| wutron/compbio | rasmus/testing.py | Python | mit | 3,789 |
#!/usr/bin/env python
import argparse
import errno
import hashlib
import os
import shutil
import subprocess
import sys
import tempfile
from io import StringIO
from lib.config import PLATFORM, get_target_arch, get_env_var, s3_config, \
get_zip_name
from lib.util import electron_gyp, execute, get_electron_version, \
parse_version, scoped_cwd, s3put
from lib.github import GitHub
ELECTRON_REPO = 'electron/electron'
ELECTRON_VERSION = get_electron_version()
PROJECT_NAME = electron_gyp()['project_name%']
PRODUCT_NAME = electron_gyp()['product_name%']
SOURCE_ROOT = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
OUT_DIR = os.path.join(SOURCE_ROOT, 'out', 'R')
DIST_DIR = os.path.join(SOURCE_ROOT, 'dist')
DIST_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION)
SYMBOLS_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION, 'symbols')
DSYM_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION, 'dsym')
PDB_NAME = get_zip_name(PROJECT_NAME, ELECTRON_VERSION, 'pdb')
def main():
args = parse_args()
if not args.publish_release:
if not dist_newer_than_head():
run_python_script('create-dist.py')
build_version = get_electron_build_version()
if not ELECTRON_VERSION.startswith(build_version):
error = 'Tag name ({0}) should match build version ({1})\n'.format(
ELECTRON_VERSION, build_version)
sys.stderr.write(error)
sys.stderr.flush()
return 1
github = GitHub(auth_token())
releases = github.repos(ELECTRON_REPO).releases.get()
tag_exists = False
for release in releases:
if not release['draft'] and release['tag_name'] == args.version:
tag_exists = True
break
release = create_or_get_release_draft(github, releases, args.version,
tag_exists)
if args.publish_release:
# Upload the Node SHASUMS*.txt.
run_python_script('upload-node-checksums.py', '-v', ELECTRON_VERSION)
# Upload the index.json.
run_python_script('upload-index-json.py')
# Create and upload the Electron SHASUMS*.txt
release_electron_checksums(github, release)
# Press the publish button.
publish_release(github, release['id'])
# Do not upload other files when passed "-p".
return
# Upload Electron with GitHub Releases API.
upload_electron(github, release, os.path.join(DIST_DIR, DIST_NAME))
upload_electron(github, release, os.path.join(DIST_DIR, SYMBOLS_NAME))
if PLATFORM == 'darwin':
upload_electron(github, release, os.path.join(DIST_DIR,
'electron-api.json'))
upload_electron(github, release, os.path.join(DIST_DIR, 'electron.d.ts'))
upload_electron(github, release, os.path.join(DIST_DIR, DSYM_NAME))
elif PLATFORM == 'win32':
upload_electron(github, release, os.path.join(DIST_DIR, PDB_NAME))
# Upload free version of ffmpeg.
ffmpeg = get_zip_name('ffmpeg', ELECTRON_VERSION)
upload_electron(github, release, os.path.join(DIST_DIR, ffmpeg))
# Upload chromedriver and mksnapshot for minor version update.
if parse_version(args.version)[2] == '0':
chromedriver = get_zip_name('chromedriver', ELECTRON_VERSION)
upload_electron(github, release, os.path.join(DIST_DIR, chromedriver))
mksnapshot = get_zip_name('mksnapshot', ELECTRON_VERSION)
upload_electron(github, release, os.path.join(DIST_DIR, mksnapshot))
if PLATFORM == 'win32' and not tag_exists:
# Upload PDBs to Windows symbol server.
run_python_script('upload-windows-pdb.py')
# Upload node headers.
run_python_script('create-node-headers.py', '-v', args.version)
run_python_script('upload-node-headers.py', '-v', args.version)
def parse_args():
parser = argparse.ArgumentParser(description='upload distribution file')
parser.add_argument('-v', '--version', help='Specify the version',
default=ELECTRON_VERSION)
parser.add_argument('-p', '--publish-release',
help='Publish the release',
action='store_true')
return parser.parse_args()
def run_python_script(script, *args):
script_path = os.path.join(SOURCE_ROOT, 'script', script)
return execute([sys.executable, script_path] + list(args))
def get_electron_build_version():
if get_target_arch() == 'arm' or os.environ.has_key('CI'):
# In CI we just build as told.
return ELECTRON_VERSION
if PLATFORM == 'darwin':
electron = os.path.join(SOURCE_ROOT, 'out', 'R',
'{0}.app'.format(PRODUCT_NAME), 'Contents',
'MacOS', PRODUCT_NAME)
elif PLATFORM == 'win32':
electron = os.path.join(SOURCE_ROOT, 'out', 'R',
'{0}.exe'.format(PROJECT_NAME))
else:
electron = os.path.join(SOURCE_ROOT, 'out', 'R', PROJECT_NAME)
return subprocess.check_output([electron, '--version']).strip()
def dist_newer_than_head():
with scoped_cwd(SOURCE_ROOT):
try:
head_time = subprocess.check_output(['git', 'log', '--pretty=format:%at',
'-n', '1']).strip()
dist_time = os.path.getmtime(os.path.join(DIST_DIR, DIST_NAME))
except OSError as e:
if e.errno != errno.ENOENT:
raise
return False
return dist_time > int(head_time)
def get_text_with_editor(name):
editor = os.environ.get('EDITOR', 'nano')
initial_message = '\n# Please enter the body of your release note for %s.' \
% name
t = tempfile.NamedTemporaryFile(suffix='.tmp', delete=False)
t.write(initial_message)
t.close()
subprocess.call([editor, t.name])
text = ''
for line in open(t.name, 'r'):
if len(line) == 0 or line[0] != '#':
text += line
os.unlink(t.name)
return text
def create_or_get_release_draft(github, releases, tag, tag_exists):
# Search for existing draft.
for release in releases:
if release['draft']:
return release
if tag_exists:
tag = 'do-not-publish-me'
return create_release_draft(github, tag)
def create_release_draft(github, tag):
name = '{0} {1}'.format(PROJECT_NAME, tag)
if os.environ.has_key('CI'):
body = '(placeholder)'
else:
body = get_text_with_editor(name)
if body == '':
sys.stderr.write('Quit due to empty release note.\n')
sys.exit(0)
data = dict(tag_name=tag, name=name, body=body, draft=True)
r = github.repos(ELECTRON_REPO).releases.post(data=data)
return r
def release_electron_checksums(github, release):
checksums = run_python_script('merge-electron-checksums.py',
'-v', ELECTRON_VERSION)
upload_io_to_github(github, release, 'SHASUMS256.txt',
StringIO(checksums.decode('utf-8')), 'text/plain')
def upload_electron(github, release, file_path):
# Delete the original file before uploading in CI.
filename = os.path.basename(file_path)
if os.environ.has_key('CI'):
try:
for asset in release['assets']:
if asset['name'] == filename:
github.repos(ELECTRON_REPO).releases.assets(asset['id']).delete()
except Exception:
pass
# Upload the file.
with open(file_path, 'rb') as f:
upload_io_to_github(github, release, filename, f, 'application/zip')
# Upload the checksum file.
upload_sha256_checksum(release['tag_name'], file_path)
# Upload ARM assets without the v7l suffix for backwards compatibility
# TODO Remove for 2.0
if 'armv7l' in filename:
arm_filename = filename.replace('armv7l', 'arm')
arm_file_path = os.path.join(os.path.dirname(file_path), arm_filename)
shutil.copy2(file_path, arm_file_path)
upload_electron(github, release, arm_file_path)
def upload_io_to_github(github, release, name, io, content_type):
params = {'name': name}
headers = {'Content-Type': content_type}
github.repos(ELECTRON_REPO).releases(release['id']).assets.post(
params=params, headers=headers, data=io, verify=False)
def upload_sha256_checksum(version, file_path):
bucket, access_key, secret_key = s3_config()
checksum_path = '{}.sha256sum'.format(file_path)
sha256 = hashlib.sha256()
with open(file_path, 'rb') as f:
sha256.update(f.read())
filename = os.path.basename(file_path)
with open(checksum_path, 'w') as checksum:
checksum.write('{} *{}'.format(sha256.hexdigest(), filename))
s3put(bucket, access_key, secret_key, os.path.dirname(checksum_path),
'atom-shell/tmp/{0}'.format(version), [checksum_path])
def publish_release(github, release_id):
data = dict(draft=False)
github.repos(ELECTRON_REPO).releases(release_id).patch(data=data)
def auth_token():
token = get_env_var('GITHUB_TOKEN')
message = ('Error: Please set the $ELECTRON_GITHUB_TOKEN '
'environment variable, which is your personal token')
assert token, message
return token
if __name__ == '__main__':
import sys
sys.exit(main())
| rreimann/electron | script/upload.py | Python | mit | 8,894 |
"""
Add an excerpt field to the page.
"""
from __future__ import absolute_import, unicode_literals
from django.db import models
from django.utils.translation import ugettext_lazy as _
from feincms import extensions
class Extension(extensions.Extension):
def handle_model(self):
self.model.add_to_class(
'excerpt',
models.TextField(
_('excerpt'),
blank=True,
help_text=_(
'Add a brief excerpt summarizing the content'
' of this page.')))
def handle_modeladmin(self, modeladmin):
modeladmin.add_extension_options(_('Excerpt'), {
'fields': ('excerpt',),
'classes': ('collapse',),
})
| mcmaxwell/idea_digital_agency | idea/feincms/module/page/extensions/excerpt.py | Python | mit | 754 |
# $Id$
# -*- coding: utf8 -*-
# Authors: David Goodger <[email protected]>; Ueli Schlaepfer; Günter Milde
# Maintainer: [email protected]
# Copyright: This module has been placed in the public domain.
"""
Transforms needed by most or all documents:
- `Decorations`: Generate a document's header & footer.
- `Messages`: Placement of system messages stored in
`nodes.document.transform_messages`.
- `TestMessages`: Like `Messages`, used on test runs.
- `FinalReferences`: Resolve remaining references.
"""
__docformat__ = 'reStructuredText'
import re
import sys
import time
from docutils import nodes, utils
from docutils.transforms import TransformError, Transform
from docutils.utils import smartquotes
class Decorations(Transform):
"""
Populate a document's decoration element (header, footer).
"""
default_priority = 820
def apply(self):
header_nodes = self.generate_header()
if header_nodes:
decoration = self.document.get_decoration()
header = decoration.get_header()
header.extend(header_nodes)
footer_nodes = self.generate_footer()
if footer_nodes:
decoration = self.document.get_decoration()
footer = decoration.get_footer()
footer.extend(footer_nodes)
def generate_header(self):
return None
def generate_footer(self):
# @@@ Text is hard-coded for now.
# Should be made dynamic (language-dependent).
settings = self.document.settings
if settings.generator or settings.datestamp or settings.source_link \
or settings.source_url:
text = []
if settings.source_link and settings._source \
or settings.source_url:
if settings.source_url:
source = settings.source_url
else:
source = utils.relative_path(settings._destination,
settings._source)
text.extend([
nodes.reference('', 'View document source',
refuri=source),
nodes.Text('.\n')])
if settings.datestamp:
datestamp = time.strftime(settings.datestamp, time.gmtime())
text.append(nodes.Text('Generated on: ' + datestamp + '.\n'))
if settings.generator:
text.extend([
nodes.Text('Generated by '),
nodes.reference('', 'Docutils', refuri=
'http://docutils.sourceforge.net/'),
nodes.Text(' from '),
nodes.reference('', 'reStructuredText', refuri='http://'
'docutils.sourceforge.net/rst.html'),
nodes.Text(' source.\n')])
return [nodes.paragraph('', '', *text)]
else:
return None
class ExposeInternals(Transform):
"""
Expose internal attributes if ``expose_internals`` setting is set.
"""
default_priority = 840
def not_Text(self, node):
return not isinstance(node, nodes.Text)
def apply(self):
if self.document.settings.expose_internals:
for node in self.document.traverse(self.not_Text):
for att in self.document.settings.expose_internals:
value = getattr(node, att, None)
if value is not None:
node['internal:' + att] = value
class Messages(Transform):
"""
Place any system messages generated after parsing into a dedicated section
of the document.
"""
default_priority = 860
def apply(self):
unfiltered = self.document.transform_messages
threshold = self.document.reporter.report_level
messages = []
for msg in unfiltered:
if msg['level'] >= threshold and not msg.parent:
messages.append(msg)
if messages:
section = nodes.section(classes=['system-messages'])
# @@@ get this from the language module?
section += nodes.title('', 'Docutils System Messages')
section += messages
self.document.transform_messages[:] = []
self.document += section
class FilterMessages(Transform):
"""
Remove system messages below verbosity threshold.
"""
default_priority = 870
def apply(self):
for node in self.document.traverse(nodes.system_message):
if node['level'] < self.document.reporter.report_level:
node.parent.remove(node)
class TestMessages(Transform):
"""
Append all post-parse system messages to the end of the document.
Used for testing purposes.
"""
default_priority = 880
def apply(self):
for msg in self.document.transform_messages:
if not msg.parent:
self.document += msg
class StripComments(Transform):
"""
Remove comment elements from the document tree (only if the
``strip_comments`` setting is enabled).
"""
default_priority = 740
def apply(self):
if self.document.settings.strip_comments:
for node in self.document.traverse(nodes.comment):
node.parent.remove(node)
class StripClassesAndElements(Transform):
"""
Remove from the document tree all elements with classes in
`self.document.settings.strip_elements_with_classes` and all "classes"
attribute values in `self.document.settings.strip_classes`.
"""
default_priority = 420
def apply(self):
if not (self.document.settings.strip_elements_with_classes
or self.document.settings.strip_classes):
return
# prepare dicts for lookup (not sets, for Python 2.2 compatibility):
self.strip_elements = dict(
[(key, None)
for key in (self.document.settings.strip_elements_with_classes
or [])])
self.strip_classes = dict(
[(key, None) for key in (self.document.settings.strip_classes
or [])])
for node in self.document.traverse(self.check_classes):
node.parent.remove(node)
def check_classes(self, node):
if isinstance(node, nodes.Element):
for class_value in node['classes'][:]:
if class_value in self.strip_classes:
node['classes'].remove(class_value)
if class_value in self.strip_elements:
return 1
class SmartQuotes(Transform):
"""
Replace ASCII quotation marks with typographic form.
Also replace multiple dashes with em-dash/en-dash characters.
"""
default_priority = 850
def __init__(self, document, startnode):
Transform.__init__(self, document, startnode=startnode)
self.unsupported_languages = set()
def get_tokens(self, txtnodes):
# A generator that yields ``(texttype, nodetext)`` tuples for a list
# of "Text" nodes (interface to ``smartquotes.educate_tokens()``).
texttype = {True: 'literal', # "literal" text is not changed:
False: 'plain'}
for txtnode in txtnodes:
nodetype = texttype[isinstance(txtnode.parent,
(nodes.literal,
nodes.math,
nodes.image,
nodes.raw,
nodes.problematic))]
yield (nodetype, txtnode.astext())
def apply(self):
smart_quotes = self.document.settings.smart_quotes
if not smart_quotes:
return
try:
alternative = smart_quotes.startswith('alt')
except AttributeError:
alternative = False
# print repr(alternative)
document_language = self.document.settings.language_code
# "Educate" quotes in normal text. Handle each block of text
# (TextElement node) as a unit to keep context around inline nodes:
for node in self.document.traverse(nodes.TextElement):
# skip preformatted text blocks and special elements:
if isinstance(node, (nodes.FixedTextElement, nodes.Special)):
continue
# nested TextElements are not "block-level" elements:
if isinstance(node.parent, nodes.TextElement):
continue
# list of text nodes in the "text block":
txtnodes = [txtnode for txtnode in node.traverse(nodes.Text)
if not isinstance(txtnode.parent,
nodes.option_string)]
# language: use typographical quotes for language "lang"
lang = node.get_language_code(document_language)
# use alternative form if `smart-quotes` setting starts with "alt":
if alternative:
if '-x-altquot' in lang:
lang = lang.replace('-x-altquot', '')
else:
lang += '-x-altquot'
# drop subtags missing in quotes:
for tag in utils.normalize_language_tag(lang):
if tag in smartquotes.smartchars.quotes:
lang = tag
break
else: # language not supported: (keep ASCII quotes)
if lang not in self.unsupported_languages:
self.document.reporter.warning('No smart quotes '
'defined for language "%s".'%lang, base_node=node)
self.unsupported_languages.add(lang)
lang = ''
# Iterator educating quotes in plain text:
# '2': set all, using old school en- and em- dash shortcuts
teacher = smartquotes.educate_tokens(self.get_tokens(txtnodes),
attr='2', language=lang)
for txtnode, newtext in zip(txtnodes, teacher):
txtnode.parent.replace(txtnode, nodes.Text(newtext))
self.unsupported_languages = set() # reset
| Lyleo/OmniMarkupPreviewer | OmniMarkupLib/Renderers/libs/python3/docutils/transforms/universal.py | Python | mit | 10,307 |
from __future__ import unicode_literals, division, absolute_import
import logging
from builtins import * # noqa pylint: disable=unused-import, redefined-builtin
from flexget import plugin
from flexget.event import event
try:
# NOTE: Importing other plugins is discouraged!
from flexget.components.parsing.parsers import parser_common as plugin_parser_common
except ImportError:
raise plugin.DependencyError(issued_by=__name__, missing='parser_common')
log = logging.getLogger('metainfo_movie')
class MetainfoMovie(object):
"""
Check if entry appears to be a movie, and populate movie info if so.
"""
schema = {'type': 'boolean'}
def on_task_metainfo(self, task, config):
# Don't run if we are disabled
if config is False:
return
for entry in task.entries:
# If movie parser already parsed this, don't touch it.
if entry.get('id'):
continue
self.guess_entry(entry)
@staticmethod
def guess_entry(entry):
"""
Populates movie_* fields for entries that are successfully parsed.
:param entry: Entry that's being processed
:return: True for successful parse
"""
if entry.get('movie_guessed'):
# Return true if we already parsed this
return True
parser = plugin.get('parsing', 'metainfo_movie').parse_movie(data=entry['title'])
if parser and parser.valid:
parser.name = plugin_parser_common.normalize_name(
plugin_parser_common.remove_dirt(parser.name)
)
for field, value in parser.fields.items():
if not entry.is_lazy(field) and not entry.get(field):
entry[field] = value
return True
return False
@event('plugin.register')
def register_plugin():
plugin.register(MetainfoMovie, 'metainfo_movie', api_ver=2)
| gazpachoking/Flexget | flexget/plugins/metainfo/metainfo_movie.py | Python | mit | 1,939 |
"""
Dtella - Google Spreadsheets Puller Module
Copyright (C) 2008 Dtella Labs (http://dtella.org)
Copyright (C) 2008 Paul Marks (http://pmarks.net)
$Id$
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
"""
from twisted.internet import reactor
from twisted.internet.threads import deferToThread
import urllib
import xml.dom.minidom
PAGE_TEMPLATE = ("https://spreadsheets.google.com/feeds/cells/"
"%s/1/public/basic?max-col=1&max-row=10")
class GDataPuller(object):
# Tell our py2exe script to let XML/SSL be included.
needs_xml = True
needs_ssl = True
def __init__(self, sheet_key):
self.sheet_key = sheet_key
def startText(self):
return "Requesting config data from Google Spreadsheet..."
def query(self):
def f(url):
return urllib.urlopen(url).read()
d = deferToThread(f, PAGE_TEMPLATE % self.sheet_key)
def cb(result):
config_list = []
doc = xml.dom.minidom.parseString(result)
for c in doc.getElementsByTagName("content"):
if c.firstChild:
config_list.append(str(c.firstChild.nodeValue))
return config_list
d.addCallback(cb)
return d
| pmarks-net/dtella | dtella/modules/pull_gdata.py | Python | gpl-2.0 | 1,879 |
# -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2005, 2006, 2007, 2008, 2009, 2010, 2011, 2012 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
from invenio.legacy.dbquery import run_sql, IntegrityError
# Number of retries to insert a value in the DB storage
MAX_DB_RETRY = 10
class SequenceGenerator(object):
seq_name = None
def __init__(self):
assert self.seq_name
def _value_exists(self, value):
"""
Checks if the value exists in the storage
@param value: value to be checked in storage
@type value: string
@return: result of select SQL query
@rtype: tuple
"""
return run_sql("""SELECT seq_value FROM seqSTORE
WHERE seq_value=%s AND seq_name=%s""",
(value, self.seq_name))
def _insert_value(self, value):
"""
Inserts value into storage
@param value: value to be stored
@type value: string
@return: result of insert SQL query
@rtype: tuple
"""
run_sql("""INSERT INTO seqSTORE (seq_name, seq_value)
VALUES (%s, %s)""",
(self.seq_name, value))
def _next_value(self, *args, **kwargs):
"""
Internal implementation to calculate next value in sequence
"""
raise NotImplementedError
def next_value(self, *args, **kwargs):
"""
Get the next value in the sequence
@return: next value in sequence
@rtype: string
"""
db_retries = 0
value = None
while MAX_DB_RETRY > db_retries:
value = self._next_value(*args, **kwargs)
try:
self._insert_value(value)
break
except IntegrityError:
# The value is already in the storage, get next one
db_retries += 1
return value
| egabancho/invenio | invenio/modules/sequencegenerator/backend.py | Python | gpl-2.0 | 2,606 |
# -*- coding: utf-8 -*-
# Copyright (C) 2014 - Garrett Regier
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc. 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA.
from gi.repository import GLib
import abc
import collections
import queue
import threading
import traceback
from .debug import debug
class WorkerThread(threading.Thread):
__metaclass__ = abc.ABCMeta
__sentinel = object()
def __init__(self, callback, chunk_size=1, *args, **kwargs):
super().__init__(*args, **kwargs)
self.__callback = callback
self.__chunk_size = chunk_size
self.__quit = threading.Event()
self.__has_idle = threading.Event()
self.__tasks = queue.Queue()
self.__results = collections.deque()
@abc.abstractmethod
def handle_task(self, *args, **kwargs):
raise NotImplementedError
# TODO: add, put, push?
def push(self, *args, **kwargs):
self.__tasks.put((args, kwargs))
def __close(self, process_results):
self.__quit.set()
# Prevent the queue.get() from blocking forever
self.__tasks.put(self.__sentinel)
super().join()
if not process_results:
self.__results.clear()
else:
while self.__in_idle() is GLib.SOURCE_CONTINUE:
pass
def terminate(self):
self.__close(False)
def join(self):
self.__close(True)
def clear(self):
old_tasks = self.__tasks
self.__tasks = queue.Queue(1)
# Prevent the queue.get() from blocking forever
old_tasks.put(self.__sentinel)
# Block until the old queue has finished, otherwise
# a old result could be added to the new results queue
self.__tasks.put(self.__sentinel)
self.__tasks.put(self.__sentinel)
old_tasks = self.__tasks
self.__tasks = queue.Queue()
# Switch to the new queue
old_tasks.put(self.__sentinel)
# Finally, we can now create a new deque without
# the possibility of any old results being added to it
self.__results.clear()
def run(self):
while not self.__quit.is_set():
task = self.__tasks.get()
if task is self.__sentinel:
continue
args, kwargs = task
try:
result = self.handle_task(*args, **kwargs)
except Exception:
traceback.print_exc()
continue
self.__results.append(result)
# Avoid having an idle for every result
if not self.__has_idle.is_set():
self.__has_idle.set()
debug('%s<%s>: result callback idle started' %
(type(self).__name__, self.name))
GLib.source_set_name_by_id(GLib.idle_add(self.__in_idle),
'[gedit] git %s result callback idle' %
(type(self).__name__,))
def __in_idle(self):
try:
for i in range(self.__chunk_size):
result = self.__results.popleft()
try:
self.__callback(result)
except Exception:
traceback.print_exc()
except IndexError:
# Must be cleared before we check the results length
self.__has_idle.clear()
# Only remove the idle when there are no more items,
# some could have been added after the IndexError was raised
if len(self.__results) == 0:
debug('%s<%s>: result callback idle finished' %
(type(self).__name__, self.name))
return GLib.SOURCE_REMOVE
return GLib.SOURCE_CONTINUE
# ex:ts=4:et:
| VIVEKLUCKY1848/gedit-plugins-1 | plugins/git/git/workerthread.py | Python | gpl-2.0 | 4,422 |
###############################################################################
# ilastik: interactive learning and segmentation toolkit
#
# Copyright (C) 2011-2014, the ilastik developers
# <[email protected]>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# In addition, as a special exception, the copyright holders of
# ilastik give you permission to combine ilastik with applets,
# workflows and plugins which are not covered under the GNU
# General Public License.
#
# See the LICENSE file for details. License information is also available
# on the ilastik web site at:
# http://ilastik.org/license.html
###############################################################################
import ilastik.ilastik_logging
ilastik.ilastik_logging.default_config.init()
import unittest
import numpy
import vigra
from lazyflow.graph import Graph
from ilastik.applets.objectClassification.opObjectClassification import \
OpRelabelSegmentation, OpObjectTrain, OpObjectPredict, OpObjectClassification
from ilastik.applets import objectExtraction
from ilastik.applets.objectExtraction.opObjectExtraction import \
OpRegionFeatures, OpAdaptTimeListRoi, OpObjectExtraction
import h5py
class TestWithCube(object):
def setUp(self):
self.features = {"Standard Object Features": {\
"Count":{}, \
"Mean":{}, \
"Mean in neighborhood":{"margin":(5, 5, 2)}}}
self.rawimg = numpy.zeros((200, 200, 20, 1), dtype=numpy.uint8)
self.binimg = numpy.zeros((200, 200, 20, 1), dtype=numpy.uint8)
self.rawimg[0:100, :, :, :] = 50
#small white
self.rawimg[5:10, 5:10, 9:11, :] = 255
self.rawimg[20:25, 20:25, 9:11, :] = 255
self.rawimg[35:40, 30:35, 9:11, :] = 255
self.rawimg[50:55, 50:55, 9:11, :] = 255
self.rawimg[105:110, 5:10, 9:11, :] = 255
self.rawimg[120:125, 20:25, 9:11, :] = 255
self.rawimg[135:140, 30:35, 9:11, :] = 255
self.rawimg[150:155, 150:155, 9:11, :] = 255
#small grey
self.rawimg[145:150, 5:10, 9:11, :] = 150
self.rawimg[160:165, 40:45, 9:11, :] = 150
self.rawimg[175:180, 20:25, 9:11, :] = 150
self.rawimg[175:180, 5:10, 9:11, :] = 150
self.rawimg[45:50, 5:10, 9:11, :] = 150
self.rawimg[60:65, 40:45, 9:11, :] = 150
self.rawimg[75:80, 20:25, 9:11, :] = 150
self.rawimg[75:80, 5:10, 9:11, :] = 150
#large white
self.rawimg[50:70, 150:170, 9:11, :] = 255
self.rawimg[5:25, 60:80, 9:11, :] = 255
self.rawimg[5:25, 170:190, 9:11, :] = 255
self.rawimg[70:90, 90:110, 9:11, :] = 255
self.rawimg[150:170, 150:170, 9:11, :] = 255
self.rawimg[105:125, 60:80, 9:11, :] = 255
self.rawimg[105:125, 170:190, 9:11, :] = 255
self.rawimg[170:190, 90:110, 9:11, :] = 255
#large grey
self.rawimg[5:25, 90:110, 9:11, :] = 150
self.rawimg[30:50, 90:110, 9:11, :] = 150
self.rawimg[5:25, 120:140, 9:11, :] = 150
self.rawimg[30:50, 120:140, 9:11, :] = 150
self.rawimg[105:125, 90:110, 9:11, :] = 150
self.rawimg[130:150, 90:110, 9:11, :] = 150
self.rawimg[105:125, 120:140, 9:11, :] = 150
self.rawimg[130:150, 120:140, 9:11, :] = 150
self.binimg = (self.rawimg>55).astype(numpy.uint8)
#make one with multiple repeating time steps
self.rawimgt = numpy.zeros((4,)+self.rawimg.shape, dtype=self.rawimg.dtype)
self.binimgt = numpy.zeros(self.rawimgt.shape, dtype=numpy.uint8)
for t in range(self.rawimgt.shape[0]):
self.rawimgt[t, :] = self.rawimg[:]
self.binimgt[t, :] = self.binimg[:]
self.rawimgt = vigra.taggedView(self.rawimgt, 'txyzc')
self.binimgt = vigra.taggedView(self.binimgt, 'txyzc')
#make 5d, the workflow has a make 5d in the beginning of the pipeline
self.rawimg = self.rawimg.reshape((1,)+self.rawimg.shape)
self.rawimg = vigra.taggedView(self.rawimg, 'txyzc')
self.binimg = self.binimg.reshape((1,)+self.binimg.shape)
self.binimg = vigra.taggedView(self.binimg, 'txyzc')
def testNoTime(self):
gr = Graph()
opExtract = OpObjectExtraction(graph=gr)
opPredict = OpObjectClassification(graph=gr)
opExtract.RawImage.setValue(self.rawimg)
opExtract.BinaryImage.setValue(self.binimg)
opExtract.Features.setValue(self.features)
opPredict.RawImages.setValues([self.rawimg])
opPredict.BinaryImages.setValues([self.binimg])
opPredict.SegmentationImages.resize(1)
opPredict.SegmentationImages[0].connect(opExtract.LabelImage)
opPredict.ObjectFeatures.resize(1)
opPredict.ObjectFeatures[0].connect(opExtract.RegionFeatures)
opPredict.ComputedFeatureNames.connect(opExtract.ComputedFeatureNames)
#run the workflow with the test blocks in the gui,
#if you want to see why these labels are chosen
#object 11 -small white square
#object 27 -large grey square
labelArray = numpy.zeros((28,))
labelArray[11] = 1
labelArray[27] = 2
labelDict = {0: labelArray}
opPredict.LabelInputs.setValues([labelDict])
#Predict by size
selFeatures = {"Standard Object Features": {"Count":{}}}
opPredict.SelectedFeatures.setValue(selFeatures)
#[0][0] - first image, first time slice
predictions = opPredict.Predictions[0][0].wait()
predicted_labels = predictions[0]
assert predicted_labels[0]==0
assert numpy.all(predicted_labels[1:16]==1)
assert numpy.all(predicted_labels[16:]==2)
#Predict by color
selFeatures = {"Standard Object Features": {"Mean":{}}}
opPredict.SelectedFeatures.setValue(selFeatures)
predictions = opPredict.Predictions[0][0].wait()
predicted_labels = predictions[0]
assert predicted_labels[0]==0
assert predicted_labels[1]==1
assert predicted_labels[11]==1
assert predicted_labels[16]==1
assert predicted_labels[23]==1
assert predicted_labels[2]==2
assert predicted_labels[18]==2
assert predicted_labels[24]==2
assert predicted_labels[26]==2
#Predict by neighborhood color
selFeatures = {"Standard Object Features": {"Mean in neighborhood":{"margin": (5, 5, 2)}}}
opPredict.SelectedFeatures.setValue(selFeatures)
predictions = opPredict.Predictions[0][0].wait()
predicted_labels = predictions[0]
assert predicted_labels[0]==0
assert predicted_labels[1]==1
assert predicted_labels[8]==1
assert predicted_labels[24]==1
assert predicted_labels[28]==1
assert predicted_labels[9]==2
assert predicted_labels[14]==2
assert predicted_labels[26]==2
assert predicted_labels[29]==2
def testTime(self):
# Move the labels around different time steps.
# Assert the same results
gr = Graph()
opExtract = OpObjectExtraction(graph=gr)
opPredict = OpObjectClassification(graph=gr)
opExtract.RawImage.setValue(self.rawimg)
opExtract.BinaryImage.setValue(self.binimg)
opExtract.Features.setValue(self.features)
opPredict.RawImages.setValues([self.rawimg])
opPredict.BinaryImages.setValues([self.binimg])
opPredict.SegmentationImages.resize(1)
opPredict.SegmentationImages[0].connect(opExtract.LabelImage)
opPredict.ObjectFeatures.resize(1)
opPredict.ObjectFeatures[0].connect(opExtract.RegionFeatures)
opPredict.ComputedFeatureNames.connect(opExtract.ComputedFeatureNames)
grT = Graph()
opExtractT = OpObjectExtraction(graph=grT)
opPredictT = OpObjectClassification(graph=grT)
opExtractT.RawImage.setValue(self.rawimgt)
opExtractT.BinaryImage.setValue(self.binimgt)
opExtractT.Features.setValue(self.features)
opPredictT.RawImages.setValues([self.rawimgt])
opPredictT.BinaryImages.setValues([self.binimgt])
opPredictT.SegmentationImages.resize(1)
opPredictT.SegmentationImages[0].connect(opExtractT.LabelImage)
opPredictT.ObjectFeatures.resize(1)
opPredictT.ObjectFeatures[0].connect(opExtractT.RegionFeatures)
opPredictT.ComputedFeatureNames.connect(opExtractT.ComputedFeatureNames)
#run the workflow with the test blocks in the gui,
#if you want to see why these labels are chosen
#object 11 -small white square
#object 27 -large grey square
labelArray = numpy.zeros((28,))
labelArray[11] = 1
labelArray[27] = 2
labelDict = {0: labelArray}
opPredict.LabelInputs.setValues([labelDict])
labelArray11 = numpy.zeros((12,))
labelArray11[11] = 1
labelArray27 = numpy.zeros((28,))
labelArray27[27] = 2
labelArray0 = numpy.zeros((2,))
labelDictT = {0: labelArray0, 1:labelArray11, 2:labelArray0, 3:labelArray27}
opPredictT.LabelInputs.setValues([labelDictT])
#Predict by size
selFeatures = {"Standard Object Features": {"Count":{}}}
opPredict.SelectedFeatures.setValue(selFeatures)
#[0][0] - first image, first time slice
predictions = opPredict.Predictions[0][0].wait()
predicted_labels = predictions[0]
assert predicted_labels[0]==0
assert numpy.all(predicted_labels[1:16]==1)
assert numpy.all(predicted_labels[16:]==2)
opPredictT.SelectedFeatures.setValue(selFeatures)
predictionsT = opPredictT.Predictions[0][1].wait()
predicted_labels_T = predictionsT[1]
assert predicted_labels_T[0]==0
assert numpy.all(predicted_labels_T[1:16]==1)
assert numpy.all(predicted_labels_T[16:]==2)
#Predict by color
selFeatures = {"Standard Object Features": {"Mean":{}}}
opPredict.SelectedFeatures.setValue(selFeatures)
predictions = opPredict.Predictions[0][0].wait()
predicted_labels = predictions[0]
assert predicted_labels[0]==0
assert predicted_labels[1]==1
assert predicted_labels[11]==1
assert predicted_labels[16]==1
assert predicted_labels[23]==1
assert predicted_labels[2]==2
assert predicted_labels[18]==2
assert predicted_labels[24]==2
assert predicted_labels[26]==2
opPredictT.SelectedFeatures.setValue(selFeatures)
predictionsT = opPredictT.Predictions[0][2].wait()
predicted_labels_T = predictionsT[2]
assert predicted_labels_T[0]==0
assert predicted_labels_T[1]==1
assert predicted_labels_T[11]==1
assert predicted_labels_T[16]==1
assert predicted_labels_T[23]==1
assert predicted_labels_T[2]==2
assert predicted_labels_T[18]==2
assert predicted_labels_T[24]==2
assert predicted_labels_T[26]==2
#Predict by neighborhood color
selFeatures = {"Standard Object Features": {"Mean in neighborhood":{"margin": (5, 5, 2)}}}
opPredict.SelectedFeatures.setValue(selFeatures)
predictions = opPredict.Predictions[0][0].wait()
predicted_labels = predictions[0]
assert predicted_labels[0]==0
assert predicted_labels[1]==1
assert predicted_labels[8]==1
assert predicted_labels[24]==1
assert predicted_labels[28]==1
assert predicted_labels[9]==2
assert predicted_labels[14]==2
assert predicted_labels[26]==2
assert predicted_labels[29]==2
opPredictT.SelectedFeatures.setValue(selFeatures)
predictionsT = opPredictT.Predictions[0][3].wait()
predicted_labels_T = predictionsT[3]
assert predicted_labels_T[0]==0
assert predicted_labels_T[1]==1
assert predicted_labels_T[8]==1
assert predicted_labels_T[24]==1
assert predicted_labels_T[28]==1
assert predicted_labels_T[9]==2
assert predicted_labels_T[14]==2
assert predicted_labels_T[26]==2
assert predicted_labels_T[29]==2
def testMultipleImages(self):
# Now add the images multiple times and distribute labels
# between different copies. Assert same results
gr = Graph()
opPredict = OpObjectClassification(graph=gr)
bin_orig = self.binimg.squeeze()
segimg = vigra.analysis.labelVolumeWithBackground(bin_orig)
vfeats = vigra.analysis.extractRegionFeatures(segimg.astype(numpy.float32), segimg, ["Count"])
counts = vfeats["Count"]
counts[0] = 0
counts = counts.reshape(counts.shape+(1,))
feats = {0: {"Standard Object Features": {"Count": counts}}}
featnames = {'Standard Object Features': {'Count': {}}}
segimg = segimg.reshape((1,)+segimg.shape+(1,))
segimg = vigra.taggedView(segimg, 'txyzc')
opPredict.RawImages.setValues([self.rawimg, self.rawimg, self.rawimg])
opPredict.BinaryImages.setValues([self.binimg, self.binimg, self.binimg])
opPredict.SegmentationImages.setValues([segimg, segimg, segimg])
opPredict.ObjectFeatures.setValues([feats, feats, feats])
opPredict.ComputedFeatureNames.setValue(featnames)
#run the workflow with the test blocks in the gui,
#if you want to see why these labels are chosen
#object 11 -small white square
#object 27 -large grey square
labelArray11 = numpy.zeros((12,))
labelArray11[11] = 1
labelArray27 = numpy.zeros((28,))
labelArray27[27] = 2
labelArray0 = numpy.zeros((2,))
labelDict11 = {0: labelArray11}
labelDict27 = {0: labelArray27}
labelDict0 = {0: labelArray0}
opPredict.LabelInputs.setValues([labelDict11, labelDict0, labelDict27])
#Predict by size
selFeatures = {"Standard Object Features": {"Count":{}}}
opPredict.SelectedFeatures.setValue(selFeatures)
#[0][0] - first image, first time slice
predictions = opPredict.Predictions[0][0].wait()
predicted_labels = predictions[0]
assert predicted_labels[0]==0
assert numpy.all(predicted_labels[1:16]==1)
assert numpy.all(predicted_labels[16:]==2)
if __name__ == '__main__':
import sys
import nose
# Don't steal stdout. Show it on the console as usual.
sys.argv.append("--nocapture")
# Don't set the logging level to DEBUG. Leave it alone.
sys.argv.append("--nologcapture")
nose.run(defaultTest=__file__)
| nielsbuwen/ilastik | tests/test_applets/objectClassification/testPrediction.py | Python | gpl-3.0 | 15,423 |
# Copyright (C) 2009 by Ning Shi and Andy Pavlo
# Brown University
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT
# IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
# ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
import sys
import os
from urlparse import urlparse
from xml.sax import make_parser
from xml.sax.handler import ContentHandler
from xml.sax.handler import feature_namespaces
from xml.sax.xmlreader import InputSource
try:
from cStringIO import StringIO as _StringIO
except:
from StringIO import StringIO as _StringIO
# Detects the directory which contains this file so that we can import httplib2.
cwd = os.getcwd()
realpath = os.path.realpath(__file__)
basedir = os.path.dirname(realpath)
basename = os.path.basename(realpath)
if not os.path.exists(realpath):
if os.path.exists(os.path.join(cwd, basename)):
basedir = cwd
sys.path.append(os.path.dirname(basedir))
from exceptions import *
class ContentParser(ContentHandler):
"""XML handler class.
This class is used by the SAX XML parser.
"""
__parallels = ("Statements",)
__terminals = ("SQL", "Status", "Info", "Result", "Seed")
def __init__(self, l):
"""Constructor.
'l': An empty dictionary to be filled with request pairs.
"""
ContentHandler.__init__(self)
self.rp_list = l
self.__current_key = []
self.__current = [self.rp_list]
def startElement(self, name, attrs):
"""Marks the start of an element.
This is the callback used by the SAX XML parser to notify us that a new
element begins.
'name': The name of the element.
'attrs': The attributes of the element.
"""
name = name.encode("utf-8")
if name in self.__terminals:
self.__current_key.append(name)
return
elif name in self.__parallels:
self.__current[-1][name] = []
self.__current.append(self.__current[-1][name])
else:
if type(self.__current[-1]) is list:
self.__current[-1].append({})
self.__current.append(self.__current[-1][-1])
else:
self.__current[-1][name] = {}
self.__current.append(self.__current[-1][name])
self.__current_key.append(name)
for n in attrs.getNames():
self.__current[-1][n.encode("utf-8")] = attrs.getValue(n)
def endElement(self, name):
"""Marks the end of an element.
This is the callback used by the SAX XML parser to notify us that an
opened element ends.
'name': The name of the element.
"""
name = name.encode("utf-8")
if self.__current_key[-1] != name:
raise InvalidXML("Start tag does not match end tag.")
if name not in self.__terminals:
self.__current.pop()
self.__current_key.pop()
def characters(self, content):
"""Marks the inner content of an element.
This is the callback used by the SAX XML parser to notify us about the
inner content of an element.
'content': The inner content.
"""
content = content.strip()
if content:
if len(self.__current_key) == 0:
raise InvalidXML("No tag opened.")
if type(self.__current[-1]) is list:
self.__current[-1].append(content)
else:
if self.__current_key[-1] in self.__current[-1]:
content = self.__current[-1][self.__current_key[-1]] + \
content
else:
content = content
self.__current[-1][self.__current_key[-1]] = content
class XMLParser:
"""The XML parser.
TODO: We should validate the XML before parsing.
"""
def __init__(self, url_file_string):
"""Constructor.
'url_file_string': A URL, a file handle, a filename which points to the
actual XML document or the actual XML data.
"""
self.url_file_string = url_file_string
self.rp_list = {}
self.__data = None
self.__parser = make_parser()
self.__parser.setFeature(feature_namespaces, 0)
self.__parser.setContentHandler(ContentParser(self.rp_list))
def __open_resource__(self):
"""Opens the resource depends on the type of information given.
If it is a file handle, nothing needs to be done; if it is the XML data,
make it readable like a file; if it is a filename, open it and return
the file handle.
Return: A handle to read from by calling the method 'read()' of the
handle.
"""
if hasattr(self.url_file_string, 'read'):
return self.url_file_string
if self.url_file_string == "-":
return sys.stdin
if self.url_file_string[0] == "<":
return _StringIO(self.url_file_string.encode("utf-8"))
try:
return open(self.url_file_string)
except:
pass
def __read_data__(self):
"""Reads the XML document.
"""
if self.__data:
return
fd = self.__open_resource__()
if fd:
data = fd.read()
fd.close()
self.__data = InputSource()
self.__data.setByteStream(_StringIO(data))
def __parse__(self):
"""Parses the XML document.
"""
if not self.__data:
self.__read_data__()
if self.__data:
self.__parser.parse(self.__data)
def get_data(self):
"""Gets the request pairs dictionary.
Return: The request pairs dictionary.
"""
if not self.__data:
self.__parse__()
return self.rp_list["SQLGenerator"]
| ifcharming/original2.0 | tests/scripts/xml2/xmlparser.py | Python | gpl-3.0 | 6,723 |
from docutils.parsers.rst import Directive, directives
from docutils import nodes
from docutils.parsers.rst.directives.admonitions import BaseAdmonition
from sphinx.util import compat
compat.make_admonition = BaseAdmonition
from sphinx import addnodes
from sphinx.locale import _
class bestpractice(nodes.Admonition, nodes.Element):
pass
class BestPractice(Directive):
has_content = True
required_arguments = 0
optional_arguments = 1
final_argument_whitespace = True
option_spec = {}
def run(self):
ret = make_admonition(
bestpractice, self.name, [_('Best Practice')], self.options,
self.content, self.lineno, self.content_offset, self.block_text,
self.state, self.state_machine)
if self.arguments:
argnodes, msgs = self.state.inline_text(self.arguments[0],
self.lineno)
para = nodes.paragraph()
para += argnodes
para += msgs
ret[0].insert(1, para)
return ret
def visit_bestpractice_node(self, node):
self.body.append(self.starttag(node, 'div', CLASS=('admonition best-practice')))
self.set_first_last(node)
def depart_bestpractice_node(self, node):
self.depart_admonition(node)
def setup(app):
app.add_node(bestpractice, html=(visit_bestpractice_node, depart_bestpractice_node))
app.add_directive('best-practice', BestPractice)
| takeit/web-publisher | docs/_extensions/sensio/sphinx/bestpractice.py | Python | agpl-3.0 | 1,458 |
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
class PyAzureMgmtDeploymentmanager(PythonPackage):
"""Microsoft Azure Deployment Manager Client Library for Python."""
homepage = "https://github.com/Azure/azure-sdk-for-python"
pypi = "azure-mgmt-deploymentmanager/azure-mgmt-deploymentmanager-0.2.0.zip"
version('0.2.0', sha256='46e342227993fc9acab1dda42f2eb566b522a8c945ab9d0eea56276b46f6d730')
depends_on('py-setuptools', type='build')
depends_on('[email protected]:', type=('build', 'run'))
depends_on('[email protected]:1', type=('build', 'run'))
depends_on('[email protected]:1', type=('build', 'run'))
depends_on('py-azure-mgmt-nspkg', when='^python@:2', type=('build', 'run'))
| LLNL/spack | var/spack/repos/builtin/packages/py-azure-mgmt-deploymentmanager/package.py | Python | lgpl-2.1 | 877 |
##############################################################################
# Copyright (c) 2017, Los Alamos National Security, LLC
# Produced at the Los Alamos National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class Yajl(CMakePackage):
"""Yet Another JSON Library (YAJL)"""
homepage = "http://lloyd.github.io/yajl/"
url = "https://github.com/lloyd/yajl/archive/2.1.0.zip"
version('develop', git='https://github.com/lloyd/yajl.git', branch='master')
version('2.1.0', '5eb9c16539bf354b937fcb20e263d1eb')
| skosukhin/spack | var/spack/repos/builtin/packages/yajl/package.py | Python | lgpl-2.1 | 1,554 |
"""Test suite for abdt_branch."""
# =============================================================================
# TEST PLAN
# -----------------------------------------------------------------------------
# Here we detail the things we are concerned to test and specify which tests
# cover those concerns.
#
# Concerns:
# [XB] can test is_abandoned, is_null, is_new
# [XC] can move between all states without error
# [XD] can set and retrieve repo name, branch link
# [ C] can move bad_pre_review -> 'new' states without duplicating branches
# [ D] unique names and emails are returned in the order of most recent first
# [ E] all commits are shown when no arguments are supplied
# [ E] number of commits can be limited by max_commits argument
# [ E] number of commits can be limited by max_size argument
# [ ] can detect if review branch has new commits (after ff, merge, rebase)
# [ ] can get raw diff from branch
# [ ] can get author names and emails from branch
# [ ] raise if get author names and emails from branch with no history
# [ ] raise if get author names and emails from branch with invalid base
# [ ] can 'get_any_author_emails', raise if no emails ever
# [ ] bad unicode chars in diffs
# [ ] bad unicode chars in commit messages
# [ ] can land an uncomplicated review
# [ ] XXX: withReservedBranch
# [ ] XXX: emptyMergeWorkflow
# [ ] XXX: mergeConflictWorkflow
# [ ] XXX: changeAlreadyMergedOnBase
# [ ] XXX: commandeeredLand
# [ ] XXX: createHugeReview
# [ ] XXX: hugeUpdateToReview
# [ ] XXX: empty repository, no history
# [ ] XXX: landing when origin has been updated underneath us
# [ ] XXX: moving tracker branches when there's something in the way
# -----------------------------------------------------------------------------
# Tests:
# [ A] test_A_Breathing
# [ B] test_B_Empty
# [ C] test_C_BadPreReviewToNew
# [ D] test_D_AlternatingAuthors
# [ E] test_E_NewCommitsDescription
# [XB] test_XB_UntrackedBranch
# [XC] test_XC_MoveBetweenAllMarkedStates
# [XD] check_XD_SetRetrieveRepoNameBranchLink
# =============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import unittest
import phlgit_branch
import phlgit_push
import phlgit_revparse
import phlgitu_fixture
import phlgitx_refcache
import abdt_branch
import abdt_branchtester
import abdt_classicnaming
import abdt_differresultcache
import abdt_git
import abdt_naming
class Test(unittest.TestCase):
def __init__(self, data):
super(Test, self).__init__(data)
self.repos = None
self.repo_central = None
self.repo_dev = None
self.repo_arcyd = None
def setUp(self):
self.repos = phlgitu_fixture.CentralisedWithTwoWorkers()
self.repo_central = self.repos.central_repo
self.repo_dev = self.repos.w0.repo
sys_repo = self.repos.w1.repo
refcache_repo = phlgitx_refcache.Repo(sys_repo)
differ_cache = abdt_differresultcache.Cache(refcache_repo)
self.repo_arcyd = abdt_git.Repo(
refcache_repo, differ_cache, 'origin', 'myrepo')
def tearDown(self):
self.repos.close()
def test_A_Breathing(self):
pass
def test_B_Empty(self):
pass
def test_C_BadPreReviewToNew(self):
# can move bad_pre_review -> 'new' states without duplicating branches
base, branch_name, branch = self._setup_for_untracked_branch()
transition_list = [
branch.mark_ok_new_review, branch.mark_new_bad_in_review
]
for do_transition in transition_list:
branches = phlgit_branch.get_remote(self.repo_arcyd, 'origin')
branch.mark_bad_pre_review()
branches_bad_pre = phlgit_branch.get_remote(
self.repo_arcyd, 'origin')
do_transition(102)
branches_new = phlgit_branch.get_remote(self.repo_arcyd, 'origin')
# we expect to have gained one branch when starting to track as
# 'bad_pre_review'.
self.assertEqual(len(branches_bad_pre), len(branches) + 1)
# we expect to have the same number of branches after moving with
# 'mark_ok_new_review'
self.assertEqual(len(branches_bad_pre), len(branches_new))
# remove the tracking branch and make sure the count has gone down
branch.clear_mark()
branches_cleared = phlgit_branch.get_remote(
self.repo_arcyd, 'origin')
self.assertEqual(len(branches_cleared), len(branches))
def test_D_AlternatingAuthors(self):
base, branch_name, branch = self._setup_for_untracked_branch()
alice_user = 'Alice'
alice_email = '[email protected]'
bob_user = 'Bob'
bob_email = '[email protected]'
self._dev_commit_new_empty_file('ALICE1', alice_user, alice_email)
self._dev_commit_new_empty_file('BOB1', bob_user, bob_email)
self._dev_commit_new_empty_file('ALICE2', alice_user, alice_email)
phlgit_push.push(self.repo_dev, branch_name, 'origin')
self.repo_arcyd('fetch', 'origin')
author_names_emails = branch.get_author_names_emails()
self.assertTupleEqual(
author_names_emails[0],
(bob_user, bob_email))
self.assertTupleEqual(
author_names_emails[1],
(alice_user, alice_email))
# any_author_emails = branch.get_any_author_emails()
# self.assertEqual(any_author_emails[-1], alice_email)
# self.assertEqual(any_author_emails[-2], bob_email)
def test_E_NewCommitsDescription(self):
base, branch_name, branch = self._setup_for_untracked_branch()
user = 'Alice'
email = '[email protected]'
self._dev_commit_new_empty_file('Commit 1', user, email)
self._dev_commit_new_empty_file('Commit 2', user, email)
self._dev_commit_new_empty_file('Commit 3', user, email)
self._dev_commit_new_empty_file('Commit 4', user, email)
phlgit_push.push(self.repo_dev, branch_name, 'origin')
self.repo_arcyd('fetch', 'origin')
# [ E] all commits are shown when no arguments are supplied
new_commits_str = branch.describe_new_commits()
new_commits = new_commits_str.splitlines()
self.assertEqual(4, len(new_commits))
count = 4
for line in new_commits:
self.assertTrue(line.endswith('Commit {}'.format(count)))
count -= 1
# [ E] number of commits can be limited by max_commits argument
new_commits_str = branch.describe_new_commits(2)
new_commits = new_commits_str.splitlines()
self.assertEqual(3, len(new_commits))
self.assertTrue(new_commits[0].endswith('Commit 4'))
self.assertTrue(new_commits[1].endswith('Commit 3'))
self.assertEqual(new_commits[2], '...2 commits not shown.')
# [ E] number of commits can be limited by max_size argument
new_commits_str = branch.describe_new_commits(3, 20)
new_commits = new_commits_str.splitlines()
self.assertEqual(2, len(new_commits))
self.assertTrue(new_commits[0].endswith('Commit 4'))
self.assertEqual(new_commits[1], '...3 commits not shown.')
def _dev_commit_new_empty_file(self, filename, user, email):
self._create_new_file(self.repo_dev, filename)
self.repo_dev('add', filename)
self.repo_dev(
'commit',
'-m',
filename,
'--author=' + '{} <{}>'.format(user, email))
def test_XB_UntrackedBranch(self):
abdt_branchtester.check_XB_UntrackedBranch(self)
def test_XC_MoveBetweenAllMarkedStates(self):
abdt_branchtester.check_XC_MoveBetweenAllMarkedStates(self)
def check_D_SetRetrieveRepoNameBranchLink(self):
abdt_branchtester.check_XD_SetRetrieveRepoNameBranchLink(self)
def _create_new_file(self, repo, filename):
self.assertFalse(os.path.isfile(filename))
open(os.path.join(repo.working_dir, filename), 'a').close()
def _setup_for_tracked_branch(self):
base, branch_name, branch = self._setup_for_untracked_branch()
branch.mark_ok_new_review(101)
return base, branch_name, branch
def _setup_for_untracked_branch(self, repo_name='name', branch_url=None):
base = abdt_naming.EXAMPLE_REVIEW_BRANCH_BASE
naming = abdt_classicnaming.Naming()
branch_name = abdt_classicnaming.EXAMPLE_REVIEW_BRANCH_NAME
self.repo_dev('checkout', '-b', branch_name)
phlgit_push.push(self.repo_dev, branch_name, 'origin')
self.repo_arcyd('fetch', 'origin')
review_branch = naming.make_review_branch_from_name(branch_name)
review_hash = phlgit_revparse.get_sha1_or_none(
self.repo_arcyd, review_branch.branch)
branch = abdt_branch.Branch(
self.repo_arcyd,
review_branch,
review_hash,
None,
None,
None,
repo_name,
branch_url)
# should not raise
branch.verify_review_branch_base()
return base, branch_name, branch
# -----------------------------------------------------------------------------
# Copyright (C) 2013-2015 Bloomberg Finance L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ------------------------------ END-OF-FILE ----------------------------------
| cs-shadow/phabricator-tools | py/abd/abdt_branch__t.py | Python | apache-2.0 | 10,066 |
from extractors.extract_website import ExtractWebsite
from datawakestreams.extractors.extractor_bolt import ExtractorBolt
class WebsiteBolt(ExtractorBolt):
name ='website_extractor'
def __init__(self):
ExtractorBolt.__init__(self)
self.extractor = ExtractWebsite()
| Sotera/Datawake-Legacy | memex-datawake-stream/src/datawakestreams/extractors/website_bolt.py | Python | apache-2.0 | 293 |
#!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Spout for Apache Pulsar: """
import os
import tempfile
import pulsar
import heronpy.api.src.python.api_constants as api_constants
from heronpy.api.src.python.spout.spout import Spout
from heronpy.streamlet.src.python.streamletboltbase import StreamletBoltBase
def GenerateLogConfContents(logFileName):
return """
# Define the root logger with appender file
log4j.rootLogger = INFO, FILE
# Define the file appender
log4j.appender.FILE=org.apache.log4j.DailyRollingFileAppender
log4j.appender.FILE.File=%s""" % logFileName + """
log4j.appender.FILE.Threshold=INFO
log4j.appender.FILE.DatePattern='.' yyyy-MM-dd-a
log4j.appender.FILE.layout=org.apache.log4j.PatternLayout
log4j.appender.FILE.layout.ConversionPattern=%d{yy-MM-dd HH:mm:ss.SSS} %X{pname}:%X{pid} %-5p %l- %m%n
"""
def GenerateLogConfig(context):
namePrefix = str(context.get_component_id()) + "-" + str(context.get_task_id())
logFileName = os.getcwd() + "/" + namePrefix
flHandler = tempfile.NamedTemporaryFile(prefix=namePrefix, suffix='.conf',
dir=os.getcwd(), delete=False)
flHandler.write(GenerateLogConfContents(logFileName))
flHandler.flush()
flHandler.close()
return flHandler.name
class PulsarSpout(Spout, StreamletBoltBase):
"""PulsarSpout: reads from a pulsar topic"""
# pylint: disable=too-many-instance-attributes
# pylint: disable=no-self-use
def default_deserializer(self, msg):
return [str(msg)]
# TopologyBuilder uses these constants to set
# cluster/topicname
serviceUrl = "PULSAR_SERVICE_URL"
topicName = "PULSAR_TOPIC"
receiveTimeoutMs = "PULSAR_RECEIVE_TIMEOUT_MS"
deserializer = "PULSAR_MESSAGE_DESERIALIZER"
def initialize(self, config, context):
"""Implements Pulsar Spout's initialize method"""
self.logger.info("Initializing PulsarSpout with the following")
self.logger.info("Component-specific config: \n%s" % str(config))
self.logger.info("Context: \n%s" % str(context))
self.emit_count = 0
self.ack_count = 0
self.fail_count = 0
if not PulsarSpout.serviceUrl in config or not PulsarSpout.topicName in config:
self.logger.fatal("Need to specify both serviceUrl and topicName")
self.pulsar_cluster = str(config[PulsarSpout.serviceUrl])
self.topic = str(config[PulsarSpout.topicName])
mode = config[api_constants.TOPOLOGY_RELIABILITY_MODE]
if mode == api_constants.TopologyReliabilityMode.ATLEAST_ONCE:
self.acking_timeout = 1000 * int(config[api_constants.TOPOLOGY_MESSAGE_TIMEOUT_SECS])
else:
self.acking_timeout = 30000
if PulsarSpout.receiveTimeoutMs in config:
self.receive_timeout_ms = config[PulsarSpout.receiveTimeoutMs]
else:
self.receive_timeout_ms = 10
if PulsarSpout.deserializer in config:
self.deserializer = config[PulsarSpout.deserializer]
if not callable(self.deserializer):
self.logger.fatal("Pulsar Message Deserializer needs to be callable")
else:
self.deserializer = self.default_deserializer
# First generate the config
self.logConfFileName = GenerateLogConfig(context)
self.logger.info("Generated LogConf at %s" % self.logConfFileName)
# We currently use the high level consumer API
# For supporting effectively once, we will need to switch
# to using lower level Reader API, when it becomes
# available in python
self.client = pulsar.Client(self.pulsar_cluster, log_conf_file_path=self.logConfFileName)
self.logger.info("Setup Client with cluster %s" % self.pulsar_cluster)
try:
self.consumer = self.client.subscribe(self.topic, context.get_topology_name(),
consumer_type=pulsar.ConsumerType.Failover,
unacked_messages_timeout_ms=self.acking_timeout)
except Exception as e:
self.logger.fatal("Pulsar client subscription failed: %s" % str(e))
self.logger.info("Subscribed to topic %s" % self.topic)
def next_tuple(self):
try:
msg = self.consumer.receive(timeout_millis=self.receive_timeout_ms)
except Exception as e:
self.logger.debug("Exception during recieve: %s" % str(e))
return
try:
self.emit(self.deserializer(msg.data()), tup_id=msg.message_id())
self.emit_count += 1
except Exception as e:
self.logger.info("Exception during emit: %s" % str(e))
def ack(self, tup_id):
self.ack_count += 1
self.consumer.acknowledge(tup_id)
def fail(self, tup_id):
self.fail_count += 1
self.logger.debug("Failed tuple %s" % str(tup_id))
| mycFelix/heron | heronpy/connectors/pulsar/pulsarspout.py | Python | apache-2.0 | 5,438 |
# Copyright (c) 2010-2012 OpenStack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Internal client library for making calls directly to the servers rather than
through the proxy.
"""
import socket
from httplib import HTTPException
from time import time
from urllib import quote as _quote
from eventlet import sleep, Timeout
from swift.common.bufferedhttp import http_connect
from swiftclient import ClientException, json_loads
from swift.common.utils import normalize_timestamp
from swift.common.http import HTTP_NO_CONTENT, HTTP_INSUFFICIENT_STORAGE, \
is_success, is_server_error
def quote(value, safe='/'):
if isinstance(value, unicode):
value = value.encode('utf8')
return _quote(value, safe)
def direct_get_account(node, part, account, marker=None, limit=None,
prefix=None, delimiter=None, conn_timeout=5,
response_timeout=15):
"""
Get listings directly from the account server.
:param node: node dictionary from the ring
:param part: partition the account is on
:param account: account name
:param marker: marker query
:param limit: query limit
:param prefix: prefix query
:param delimeter: delimeter for the query
:param conn_timeout: timeout in seconds for establishing the connection
:param response_timeout: timeout in seconds for getting the response
:returns: a tuple of (response headers, a list of containers) The response
headers will be a dict and all header names will be lowercase.
"""
path = '/' + account
qs = 'format=json'
if marker:
qs += '&marker=%s' % quote(marker)
if limit:
qs += '&limit=%d' % limit
if prefix:
qs += '&prefix=%s' % quote(prefix)
if delimiter:
qs += '&delimiter=%s' % quote(delimiter)
with Timeout(conn_timeout):
conn = http_connect(node['ip'], node['port'], node['device'], part,
'GET', path, query_string=qs)
with Timeout(response_timeout):
resp = conn.getresponse()
if not is_success(resp.status):
resp.read()
raise ClientException(
'Account server %s:%s direct GET %s gave status %s' % (node['ip'],
node['port'], repr('/%s/%s%s' % (node['device'], part, path)),
resp.status),
http_host=node['ip'], http_port=node['port'],
http_device=node['device'], http_status=resp.status,
http_reason=resp.reason)
resp_headers = {}
for header, value in resp.getheaders():
resp_headers[header.lower()] = value
if resp.status == HTTP_NO_CONTENT:
resp.read()
return resp_headers, []
return resp_headers, json_loads(resp.read())
def direct_head_container(node, part, account, container, conn_timeout=5,
response_timeout=15):
"""
Request container information directly from the container server.
:param node: node dictionary from the ring
:param part: partition the container is on
:param account: account name
:param container: container name
:param conn_timeout: timeout in seconds for establishing the connection
:param response_timeout: timeout in seconds for getting the response
:returns: a dict containing the response's headers (all header names will
be lowercase)
"""
path = '/%s/%s' % (account, container)
with Timeout(conn_timeout):
conn = http_connect(node['ip'], node['port'], node['device'], part,
'HEAD', path)
with Timeout(response_timeout):
resp = conn.getresponse()
resp.read()
if not is_success(resp.status):
raise ClientException(
'Container server %s:%s direct HEAD %s gave status %s' %
(node['ip'], node['port'],
repr('/%s/%s%s' % (node['device'], part, path)),
resp.status),
http_host=node['ip'], http_port=node['port'],
http_device=node['device'], http_status=resp.status,
http_reason=resp.reason)
resp_headers = {}
for header, value in resp.getheaders():
resp_headers[header.lower()] = value
return resp_headers
def direct_get_container(node, part, account, container, marker=None,
limit=None, prefix=None, delimiter=None,
conn_timeout=5, response_timeout=15):
"""
Get container listings directly from the container server.
:param node: node dictionary from the ring
:param part: partition the container is on
:param account: account name
:param container: container name
:param marker: marker query
:param limit: query limit
:param prefix: prefix query
:param delimeter: delimeter for the query
:param conn_timeout: timeout in seconds for establishing the connection
:param response_timeout: timeout in seconds for getting the response
:returns: a tuple of (response headers, a list of objects) The response
headers will be a dict and all header names will be lowercase.
"""
path = '/%s/%s' % (account, container)
qs = 'format=json'
if marker:
qs += '&marker=%s' % quote(marker)
if limit:
qs += '&limit=%d' % limit
if prefix:
qs += '&prefix=%s' % quote(prefix)
if delimiter:
qs += '&delimiter=%s' % quote(delimiter)
with Timeout(conn_timeout):
conn = http_connect(node['ip'], node['port'], node['device'], part,
'GET', path, query_string=qs)
with Timeout(response_timeout):
resp = conn.getresponse()
if not is_success(resp.status):
resp.read()
raise ClientException(
'Container server %s:%s direct GET %s gave stats %s' % (node['ip'],
node['port'], repr('/%s/%s%s' % (node['device'], part, path)),
resp.status),
http_host=node['ip'], http_port=node['port'],
http_device=node['device'], http_status=resp.status,
http_reason=resp.reason)
resp_headers = {}
for header, value in resp.getheaders():
resp_headers[header.lower()] = value
if resp.status == HTTP_NO_CONTENT:
resp.read()
return resp_headers, []
return resp_headers, json_loads(resp.read())
def direct_delete_container(node, part, account, container, conn_timeout=5,
response_timeout=15, headers={}):
path = '/%s/%s' % (account, container)
headers['X-Timestamp'] = normalize_timestamp(time())
with Timeout(conn_timeout):
conn = http_connect(node['ip'], node['port'], node['device'], part,
'DELETE', path, headers)
with Timeout(response_timeout):
resp = conn.getresponse()
resp.read()
if not is_success(resp.status):
raise ClientException(
'Container server %s:%s direct DELETE %s gave status %s' %
(node['ip'], node['port'],
repr('/%s/%s%s' % (node['device'], part, path)),
resp.status),
http_host=node['ip'], http_port=node['port'],
http_device=node['device'], http_status=resp.status,
http_reason=resp.reason)
def direct_head_object(node, part, account, container, obj, conn_timeout=5,
response_timeout=15):
"""
Request object information directly from the object server.
:param node: node dictionary from the ring
:param part: partition the container is on
:param account: account name
:param container: container name
:param obj: object name
:param conn_timeout: timeout in seconds for establishing the connection
:param response_timeout: timeout in seconds for getting the response
:returns: a dict containing the response's headers (all header names will
be lowercase)
"""
path = '/%s/%s/%s' % (account, container, obj)
with Timeout(conn_timeout):
conn = http_connect(node['ip'], node['port'], node['device'], part,
'HEAD', path)
with Timeout(response_timeout):
resp = conn.getresponse()
resp.read()
if not is_success(resp.status):
raise ClientException(
'Object server %s:%s direct HEAD %s gave status %s' %
(node['ip'], node['port'],
repr('/%s/%s%s' % (node['device'], part, path)),
resp.status),
http_host=node['ip'], http_port=node['port'],
http_device=node['device'], http_status=resp.status,
http_reason=resp.reason)
resp_headers = {}
for header, value in resp.getheaders():
resp_headers[header.lower()] = value
return resp_headers
def direct_get_object(node, part, account, container, obj, conn_timeout=5,
response_timeout=15, resp_chunk_size=None, headers={}):
"""
Get object directly from the object server.
:param node: node dictionary from the ring
:param part: partition the container is on
:param account: account name
:param container: container name
:param obj: object name
:param conn_timeout: timeout in seconds for establishing the connection
:param response_timeout: timeout in seconds for getting the response
:param resp_chunk_size: if defined, chunk size of data to read.
:param headers: dict to be passed into HTTPConnection headers
:returns: a tuple of (response headers, the object's contents) The response
headers will be a dict and all header names will be lowercase.
"""
path = '/%s/%s/%s' % (account, container, obj)
with Timeout(conn_timeout):
conn = http_connect(node['ip'], node['port'], node['device'], part,
'GET', path, headers=headers)
with Timeout(response_timeout):
resp = conn.getresponse()
if not is_success(resp.status):
resp.read()
raise ClientException(
'Object server %s:%s direct GET %s gave status %s' %
(node['ip'], node['port'],
repr('/%s/%s%s' % (node['device'], part, path)),
resp.status),
http_host=node['ip'], http_port=node['port'],
http_device=node['device'], http_status=resp.status,
http_reason=resp.reason)
if resp_chunk_size:
def _object_body():
buf = resp.read(resp_chunk_size)
while buf:
yield buf
buf = resp.read(resp_chunk_size)
object_body = _object_body()
else:
object_body = resp.read()
resp_headers = {}
for header, value in resp.getheaders():
resp_headers[header.lower()] = value
return resp_headers, object_body
def direct_put_object(node, part, account, container, name, contents,
content_length=None, etag=None, content_type=None,
headers=None, conn_timeout=5, response_timeout=15,
resp_chunk_size=None):
"""
Put object directly from the object server.
:param node: node dictionary from the ring
:param part: partition the container is on
:param account: account name
:param container: container name
:param name: object name
:param contents: a string to read object data from
:param content_length: value to send as content-length header
:param etag: etag of contents
:param content_type: value to send as content-type header
:param headers: additional headers to include in the request
:param conn_timeout: timeout in seconds for establishing the connection
:param response_timeout: timeout in seconds for getting the response
:param chunk_size: if defined, chunk size of data to send.
:returns: etag from the server response
"""
# TODO: Add chunked puts
path = '/%s/%s/%s' % (account, container, name)
if headers is None:
headers = {}
if etag:
headers['ETag'] = etag.strip('"')
if content_length is not None:
headers['Content-Length'] = str(content_length)
if content_type is not None:
headers['Content-Type'] = content_type
else:
headers['Content-Type'] = 'application/octet-stream'
if not contents:
headers['Content-Length'] = '0'
headers['X-Timestamp'] = normalize_timestamp(time())
with Timeout(conn_timeout):
conn = http_connect(node['ip'], node['port'], node['device'], part,
'PUT', path, headers=headers)
conn.send(contents)
with Timeout(response_timeout):
resp = conn.getresponse()
resp.read()
if not is_success(resp.status):
raise ClientException(
'Object server %s:%s direct PUT %s gave status %s' %
(node['ip'], node['port'],
repr('/%s/%s%s' % (node['device'], part, path)),
resp.status),
http_host=node['ip'], http_port=node['port'],
http_device=node['device'], http_status=resp.status,
http_reason=resp.reason)
return resp.getheader('etag').strip('"')
def direct_post_object(node, part, account, container, name, headers,
conn_timeout=5, response_timeout=15):
"""
Direct update to object metadata on object server.
:param node: node dictionary from the ring
:param part: partition the container is on
:param account: account name
:param container: container name
:param name: object name
:param headers: headers to store as metadata
:param conn_timeout: timeout in seconds for establishing the connection
:param response_timeout: timeout in seconds for getting the response
:raises ClientException: HTTP POST request failed
"""
path = '/%s/%s/%s' % (account, container, name)
headers['X-Timestamp'] = normalize_timestamp(time())
with Timeout(conn_timeout):
conn = http_connect(node['ip'], node['port'], node['device'], part,
'POST', path, headers=headers)
with Timeout(response_timeout):
resp = conn.getresponse()
resp.read()
if not is_success(resp.status):
raise ClientException(
'Object server %s:%s direct POST %s gave status %s' %
(node['ip'], node['port'],
repr('/%s/%s%s' % (node['device'], part, path)),
resp.status),
http_host=node['ip'], http_port=node['port'],
http_device=node['device'], http_status=resp.status,
http_reason=resp.reason)
def direct_delete_object(node, part, account, container, obj,
conn_timeout=5, response_timeout=15, headers={}):
"""
Delete object directly from the object server.
:param node: node dictionary from the ring
:param part: partition the container is on
:param account: account name
:param container: container name
:param obj: object name
:param conn_timeout: timeout in seconds for establishing the connection
:param response_timeout: timeout in seconds for getting the response
:returns: response from server
"""
path = '/%s/%s/%s' % (account, container, obj)
headers['X-Timestamp'] = normalize_timestamp(time())
with Timeout(conn_timeout):
conn = http_connect(node['ip'], node['port'], node['device'], part,
'DELETE', path, headers)
with Timeout(response_timeout):
resp = conn.getresponse()
resp.read()
if not is_success(resp.status):
raise ClientException(
'Object server %s:%s direct DELETE %s gave status %s' %
(node['ip'], node['port'],
repr('/%s/%s%s' % (node['device'], part, path)),
resp.status),
http_host=node['ip'], http_port=node['port'],
http_device=node['device'], http_status=resp.status,
http_reason=resp.reason)
def retry(func, *args, **kwargs):
"""
Helper function to retry a given function a number of times.
:param func: callable to be called
:param retries: number of retries
:param error_log: logger for errors
:param args: arguments to send to func
:param kwargs: keyward arguments to send to func (if retries or
error_log are sent, they will be deleted from kwargs
before sending on to func)
:returns: restult of func
"""
retries = 5
if 'retries' in kwargs:
retries = kwargs['retries']
del kwargs['retries']
error_log = None
if 'error_log' in kwargs:
error_log = kwargs['error_log']
del kwargs['error_log']
attempts = 0
backoff = 1
while attempts <= retries:
attempts += 1
try:
return attempts, func(*args, **kwargs)
except (socket.error, HTTPException, Timeout), err:
if error_log:
error_log(err)
if attempts > retries:
raise
except ClientException, err:
if error_log:
error_log(err)
if attempts > retries or not is_server_error(err.http_status) or \
err.http_status == HTTP_INSUFFICIENT_STORAGE:
raise
sleep(backoff)
backoff *= 2
# Shouldn't actually get down here, but just in case.
if args and 'ip' in args[0]:
raise ClientException('Raise too many retries',
http_host=args[0]['ip'], http_port=args[0]['port'],
http_device=args[0]['device'])
else:
raise ClientException('Raise too many retries')
| VictorLowther/swift | swift/common/direct_client.py | Python | apache-2.0 | 18,186 |
import sys
from Boxing import Box, dump_exception, load_exception
from ModuleNetProxy import RootImporter
from Lib import raise_exception, AttrFrontend
FRAME_REQUEST = 1
FRAME_RESULT = 2
FRAME_EXCEPTION = 3
class Connection(object):
"""
the rpyc connection layer (protocol and APIs). generally speaking, the only
things you'll need to access directly from this object are:
* modules - represents the remote python interprerer's modules namespace
* execute - executes the given code on the other side of the connection
* namespace - the namespace in which the code you `execute` resides
the rest of the attributes should be of no intresent to you, except maybe
for `remote_conn`, which represents the other side of the connection. it is
unlikely, however, you'll need to use it (it is used interally).
when you are done using a connection, and wish to release the resources it
holds, you should call close(). you don't have to, but if you don't, the gc
can't release the memory because of cyclic references.
"""
__slots__ = ["_closed", "_local_namespace", "channel", "box", "async_replies",
"sync_replies", "module_cache", "remote_conn", "modules", "namespace"]
def __init__(self, channel):
self._closed = False
self._local_namespace = {}
self.channel = channel
self.box = Box(self)
self.async_replies = {}
self.sync_replies = {}
self.module_cache = {}
self.remote_conn = self.sync_request("handle_getconn")
# user APIs:
self.modules = RootImporter(self)
self.namespace = AttrFrontend(self.remote_conn._local_namespace)
self.execute("")
def __repr__(self):
if self._closed:
return "<%s.%s(closed)>" % (self.__class__.__module__, self.__class__.__name__)
else:
return "<%s.%s(%r)>" % (self.__class__.__module__, self.__class__.__name__, self.channel)
#
# file api layer
#
def close(self):
"""closes down the connection and releases all cyclic dependecies"""
if not self._closed:
self.box.close()
self.channel.close()
self._closed = True
self._local_namespace = None
self.channel = None
self.box = None
self.async_replies = None
self.sync_replies = None
self.module_cache = None
self.modules = None
self.remote_conn = None
self.namespace = None
def fileno(self):
"""connections are select()able"""
return self.channel.fileno()
#
# protocol
#
def send(self, type, seq, obj):
if self._closed:
raise EOFError("the connection is closed")
return self.channel.send(type, seq, self.box.pack(obj))
def send_request(self, handlername, *args):
return self.send(FRAME_REQUEST, None, (handlername, args))
def send_exception(self, seq, exc_info):
self.send(FRAME_EXCEPTION, seq, dump_exception(*exc_info))
def send_result(self, seq, obj):
self.send(FRAME_RESULT, seq, obj)
#
# dispatching
#
def dispatch_result(self, seq, obj):
if seq in self.async_replies:
self.async_replies.pop(seq)(obj, False)
else:
self.sync_replies[seq] = obj
def dispatch_exception(self, seq, obj):
excobj = load_exception(obj)
if seq in self.async_replies:
self.async_replies.pop(seq)(excobj, True)
else:
raise_exception(*excobj)
def dispatch_request(self, seq, handlername, args):
try:
res = getattr(self, handlername)(*args)
except SystemExit:
raise
except:
self.send_exception(seq, sys.exc_info())
else:
self.send_result(seq, res)
def poll(self):
"""if available, serves a single request, otherwise returns (non-blocking serve)"""
if self.channel.is_available():
self.serve()
return True
else:
return False
def serve(self):
"""serves a single request (may block)"""
type, seq, data = self.channel.recv()
if type == FRAME_RESULT:
self.dispatch_result(seq, self.box.unpack(data))
elif type == FRAME_REQUEST:
self.dispatch_request(seq, *self.box.unpack(data))
elif type == FRAME_EXCEPTION:
self.dispatch_exception(seq, self.box.unpack(data))
else:
raise ValueError("invalid frame type (%d)" % (type,))
#
# requests
#
def sync_request(self, handlername, *args):
"""performs a synchronous (blocking) request"""
seq = self.send_request(handlername, *args)
while seq not in self.sync_replies:
self.serve()
return self.sync_replies.pop(seq)
def async_request(self, callback, handlername, *args):
"""performs an asynchronous (non-blocking) request"""
seq = self.send_request(handlername, *args)
self.async_replies[seq] = callback
#
# root requests (not through NetProxies)
#
def rimport(self, modulename):
"""imports a module by name (as a string)"""
if modulename not in self.module_cache:
module = self.sync_request("handle_import", modulename)
self.module_cache[modulename] = module
return self.module_cache[modulename]
def execute(self, expr, mode = "exec"):
"""executes the given code at the remote side of the connection"""
return self.sync_request("handle_execute", expr, mode)
#
# handlers
#
def handle_decref(self, oid):
self.box.decref(oid)
def handle_delattr(self, oid, name):
delattr(self.box[oid], name)
def handle_getattr(self, oid, name):
return getattr(self.box[oid], name)
def handle_setattr(self, oid, name, value):
setattr(self.box[oid], name, value)
def handle_delitem(self, oid, index):
del self.box[oid][index]
def handle_getitem(self, oid, index):
return self.box[oid][index]
def handle_setitem(self, oid, index, value):
self.box[oid][index] = value
def handle_call(self, oid, args, kwargs):
return self.box[oid](*args, **kwargs)
def handle_repr(self, oid):
return repr(self.box[oid])
def handle_str(self, oid):
return str(self.box[oid])
def handle_bool(self, oid):
return bool(self.box[oid])
def handle_import(self, modulename):
return __import__(modulename, None, None, modulename.split(".")[-1])
def handle_getconn(self):
return self
def handle_execute(self, expr, mode):
codeobj = compile(expr, "<from %s>" % (self,), mode)
return eval(codeobj, self._local_namespace)
| eBay/restcommander | play-1.2.4/python/Lib/site-packages/Rpyc/Connection.py | Python | apache-2.0 | 6,944 |
# Copyright 2012 Nebula, Inc.
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
from nova.tests.functional.api_sample_tests import api_sample_base
CONF = cfg.CONF
CONF.import_opt('osapi_compute_extension',
'nova.api.openstack.compute.legacy_v2.extensions')
class QuotaSetsSampleJsonTests(api_sample_base.ApiSampleTestBaseV3):
ADMIN_API = True
extension_name = "os-quota-sets"
def _get_flags(self):
f = super(QuotaSetsSampleJsonTests, self)._get_flags()
f['osapi_compute_extension'] = CONF.osapi_compute_extension[:]
f['osapi_compute_extension'].append('nova.api.openstack.compute.'
'contrib.server_group_quotas.'
'Server_group_quotas')
f['osapi_compute_extension'].append('nova.api.openstack.compute.'
'contrib.quotas.Quotas')
f['osapi_compute_extension'].append('nova.api.openstack.compute.'
'contrib.extended_quotas.Extended_quotas')
f['osapi_compute_extension'].append('nova.api.openstack.compute.'
'contrib.user_quotas.User_quotas')
return f
def test_show_quotas(self):
# Get api sample to show quotas.
response = self._do_get('os-quota-sets/fake_tenant')
self._verify_response('quotas-show-get-resp', {}, response, 200)
def test_show_quotas_defaults(self):
# Get api sample to show quotas defaults.
response = self._do_get('os-quota-sets/fake_tenant/defaults')
self._verify_response('quotas-show-defaults-get-resp',
{}, response, 200)
def test_update_quotas(self):
# Get api sample to update quotas.
response = self._do_put('os-quota-sets/fake_tenant',
'quotas-update-post-req',
{})
self._verify_response('quotas-update-post-resp', {}, response, 200)
def test_delete_quotas(self):
# Get api sample to delete quota.
response = self._do_delete('os-quota-sets/fake_tenant')
self.assertEqual(response.status_code, 202)
self.assertEqual(response.content, '')
def test_update_quotas_force(self):
# Get api sample to update quotas.
response = self._do_put('os-quota-sets/fake_tenant',
'quotas-update-force-post-req',
{})
return self._verify_response('quotas-update-force-post-resp', {},
response, 200)
def test_show_quotas_for_user(self):
# Get api sample to show quotas for user.
response = self._do_get('os-quota-sets/fake_tenant?user_id=1')
self._verify_response('user-quotas-show-get-resp', {}, response, 200)
def test_delete_quotas_for_user(self):
response = self._do_delete('os-quota-sets/fake_tenant?user_id=1')
self.assertEqual(response.status_code, 202)
self.assertEqual(response.content, '')
def test_update_quotas_for_user(self):
# Get api sample to update quotas for user.
response = self._do_put('os-quota-sets/fake_tenant?user_id=1',
'user-quotas-update-post-req',
{})
return self._verify_response('user-quotas-update-post-resp', {},
response, 200)
| whitepages/nova | nova/tests/functional/api_sample_tests/test_quota_sets.py | Python | apache-2.0 | 4,046 |
from __future__ import print_function
import time
import argparse
import grpc
from jaeger_client import Config
from grpc_opentracing import open_tracing_client_interceptor
from grpc_opentracing.grpcext import intercept_channel
import command_line_pb2
def run():
parser = argparse.ArgumentParser()
parser.add_argument(
'--log_payloads',
action='store_true',
help='log request/response objects to open-tracing spans')
args = parser.parse_args()
config = Config(
config={
'sampler': {
'type': 'const',
'param': 1,
},
'logging': True,
},
service_name='trivial-client')
tracer = config.initialize_tracer()
tracer_interceptor = open_tracing_client_interceptor(
tracer, log_payloads=args.log_payloads)
channel = grpc.insecure_channel('localhost:50051')
channel = intercept_channel(channel, tracer_interceptor)
stub = command_line_pb2.CommandLineStub(channel)
response = stub.Echo(command_line_pb2.CommandRequest(text='Hello, hello'))
print(response.text)
time.sleep(2)
tracer.close()
time.sleep(2)
if __name__ == '__main__':
run()
| johnbelamaric/themis | vendor/github.com/grpc-ecosystem/grpc-opentracing/python/examples/trivial/trivial_client.py | Python | apache-2.0 | 1,218 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright 2013 Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import cherrypy
import os
import six
from girder.constants import PACKAGE_DIR
def _mergeConfig(filename):
"""
Load `filename` into the cherrypy config.
Also, handle global options by putting them in the root.
"""
cherrypy._cpconfig.merge(cherrypy.config, filename)
# When in Sphinx, cherrypy may be mocked and returning None
global_config = cherrypy.config.pop('global', {}) or {}
for option, value in six.viewitems(global_config):
cherrypy.config[option] = value
def _loadConfigsByPrecedent():
"""
Load configuration in reverse order of precedent.
"""
configPaths = [os.path.join(PACKAGE_DIR, 'conf', 'girder.dist.cfg')]
if 'GIRDER_TEST_DB' not in os.environ:
# we don't want to load the local config file if we are running tests
configPaths.append(os.path.join(PACKAGE_DIR, 'conf', 'girder.local.cfg'))
configPaths.append(os.path.join('/etc', 'girder.cfg'))
configPaths.append(os.path.join(os.path.expanduser('~'), '.girder', 'girder.cfg'))
if 'GIRDER_CONFIG' in os.environ:
configPaths.append(os.environ['GIRDER_CONFIG'])
for curConfigPath in configPaths:
if os.path.exists(curConfigPath):
_mergeConfig(curConfigPath)
def loadConfig():
_loadConfigsByPrecedent()
if 'GIRDER_PORT' in os.environ:
port = int(os.environ['GIRDER_PORT'])
cherrypy.config['server.socket_port'] = port
if 'GIRDER_MONGO_URI' in os.environ:
if 'database' not in cherrypy.config:
cherrypy.config['database'] = {}
cherrypy.config['database']['uri'] = os.getenv('GIRDER_MONGO_URI')
if 'GIRDER_TEST_DB' in os.environ:
cherrypy.config['database']['uri'] =\
os.environ['GIRDER_TEST_DB'].replace('.', '_')
def getConfig():
if 'database' not in cherrypy.config:
loadConfig()
# When in Sphinx, cherrypy may be mocked and returning None
return cherrypy.config or {}
| manthey/girder | girder/utility/config.py | Python | apache-2.0 | 2,764 |
################################################################################
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
################################################################################
import os
import shutil
import socket
import subprocess
import sys
import tempfile
import time
import unittest
import grpc
from apache_beam.portability.api.beam_provision_api_pb2 import (ProvisionInfo,
GetProvisionInfoResponse)
from apache_beam.portability.api.beam_provision_api_pb2_grpc import (
ProvisionServiceServicer, add_ProvisionServiceServicer_to_server)
from concurrent import futures
from google.protobuf import json_format
from pyflink.java_gateway import get_gateway
from pyflink.pyflink_gateway_server import on_windows
from pyflink.testing.test_case_utils import PyFlinkTestCase
class PythonBootTests(PyFlinkTestCase):
def setUp(self):
provision_info = json_format.Parse('{"retrievalToken": "test_token"}', ProvisionInfo())
response = GetProvisionInfoResponse(info=provision_info)
def get_unused_port():
sock = socket.socket()
sock.bind(('', 0))
port = sock.getsockname()[1]
sock.close()
return port
class ProvisionService(ProvisionServiceServicer):
def GetProvisionInfo(self, request, context):
return response
def start_test_provision_server():
server = grpc.server(futures.ThreadPoolExecutor(max_workers=1))
add_ProvisionServiceServicer_to_server(ProvisionService(), server)
port = get_unused_port()
server.add_insecure_port('[::]:' + str(port))
server.start()
return server, port
self.provision_server, self.provision_port = start_test_provision_server()
self.env = dict(os.environ)
self.env["python"] = sys.executable
self.env["FLINK_BOOT_TESTING"] = "1"
self.env["BOOT_LOG_DIR"] = os.path.join(self.env["FLINK_HOME"], "log")
self.tmp_dir = tempfile.mkdtemp(str(time.time()), dir=self.tempdir)
# assume that this file is in flink-python source code directory.
flink_python_source_root = os.path.dirname(
os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
runner_script = "pyflink-udf-runner.bat" if on_windows() else \
"pyflink-udf-runner.sh"
self.runner_path = os.path.join(
flink_python_source_root, "bin", runner_script)
def run_boot_py(self):
args = [self.runner_path, "--id", "1",
"--logging_endpoint", "localhost:0000",
"--artifact_endpoint", "whatever",
"--provision_endpoint", "localhost:%d" % self.provision_port,
"--control_endpoint", "localhost:0000",
"--semi_persist_dir", self.tmp_dir]
return subprocess.call(args, env=self.env)
def test_python_boot(self):
exit_code = self.run_boot_py()
self.assertTrue(exit_code == 0, "the boot.py exited with non-zero code.")
@unittest.skipIf(on_windows(), "'subprocess.check_output' in Windows always return empty "
"string, skip this test.")
def test_param_validation(self):
args = [self.runner_path]
exit_message = subprocess.check_output(args, env=self.env).decode("utf-8")
self.assertIn("No id provided.", exit_message)
args = [self.runner_path, "--id", "1"]
exit_message = subprocess.check_output(args, env=self.env).decode("utf-8")
self.assertIn("No logging endpoint provided.", exit_message)
args = [self.runner_path, "--id", "1",
"--logging_endpoint", "localhost:0000"]
exit_message = subprocess.check_output(args, env=self.env).decode("utf-8")
self.assertIn("No provision endpoint provided.", exit_message)
args = [self.runner_path, "--id", "1",
"--logging_endpoint", "localhost:0000",
"--provision_endpoint", "localhost:%d" % self.provision_port]
exit_message = subprocess.check_output(args, env=self.env).decode("utf-8")
self.assertIn("No control endpoint provided.", exit_message)
def test_set_working_directory(self):
JProcessPythonEnvironmentManager = \
get_gateway().jvm.org.apache.flink.python.env.ProcessPythonEnvironmentManager
output_file = os.path.join(self.tmp_dir, "output.txt")
pyflink_dir = os.path.join(self.tmp_dir, "pyflink")
os.mkdir(pyflink_dir)
# just create an empty file
open(os.path.join(pyflink_dir, "__init__.py"), 'a').close()
fn_execution_dir = os.path.join(pyflink_dir, "fn_execution")
os.mkdir(fn_execution_dir)
open(os.path.join(fn_execution_dir, "__init__.py"), 'a').close()
with open(os.path.join(fn_execution_dir, "boot.py"), "w") as f:
f.write("import os\nwith open(r'%s', 'w') as f:\n f.write(os.getcwd())" %
output_file)
# test if the name of working directory variable of udf runner is consist with
# ProcessPythonEnvironmentManager.
self.env[JProcessPythonEnvironmentManager.PYTHON_WORKING_DIR] = self.tmp_dir
self.env["python"] = sys.executable
args = [self.runner_path]
subprocess.check_output(args, env=self.env)
process_cwd = None
if os.path.exists(output_file):
with open(output_file, 'r') as f:
process_cwd = f.read()
self.assertEqual(os.path.realpath(self.tmp_dir),
process_cwd,
"setting working directory variable is not work!")
def tearDown(self):
self.provision_server.stop(0)
try:
if self.tmp_dir is not None:
shutil.rmtree(self.tmp_dir)
except:
pass
| jinglining/flink | flink-python/pyflink/fn_execution/tests/test_process_mode_boot.py | Python | apache-2.0 | 6,732 |
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import multiprocessing
import os
import signal
import unittest
from datetime import timedelta
from time import sleep
from dateutil.relativedelta import relativedelta
from numpy.testing import assert_array_almost_equal
from airflow import DAG, exceptions, settings
from airflow.exceptions import AirflowException
from airflow.hooks.base_hook import BaseHook
from airflow.jobs.local_task_job import LocalTaskJob
from airflow.models import DagBag, DagRun, TaskFail, TaskInstance
from airflow.models.baseoperator import BaseOperator
from airflow.operators.bash import BashOperator
from airflow.operators.check_operator import CheckOperator, ValueCheckOperator
from airflow.operators.dummy_operator import DummyOperator
from airflow.operators.python import PythonOperator
from airflow.settings import Session
from airflow.utils.dates import infer_time_unit, round_time, scale_time_units
from airflow.utils.state import State
from airflow.utils.timezone import datetime
from tests.test_utils.config import conf_vars
DEV_NULL = '/dev/null'
DEFAULT_DATE = datetime(2015, 1, 1)
TEST_DAG_ID = 'unit_tests'
class OperatorSubclass(BaseOperator):
"""
An operator to test template substitution
"""
template_fields = ['some_templated_field']
def __init__(self, some_templated_field, *args, **kwargs):
super().__init__(*args, **kwargs)
self.some_templated_field = some_templated_field
def execute(self, context):
pass
class TestCore(unittest.TestCase):
default_scheduler_args = {"num_runs": 1}
def setUp(self):
self.dagbag = DagBag(
dag_folder=DEV_NULL, include_examples=True)
self.args = {'owner': 'airflow', 'start_date': DEFAULT_DATE}
self.dag = DAG(TEST_DAG_ID, default_args=self.args)
self.dag_bash = self.dagbag.dags['example_bash_operator']
self.runme_0 = self.dag_bash.get_task('runme_0')
self.run_after_loop = self.dag_bash.get_task('run_after_loop')
self.run_this_last = self.dag_bash.get_task('run_this_last')
def tearDown(self):
session = Session()
session.query(DagRun).filter(
DagRun.dag_id == TEST_DAG_ID).delete(
synchronize_session=False)
session.query(TaskInstance).filter(
TaskInstance.dag_id == TEST_DAG_ID).delete(
synchronize_session=False)
session.query(TaskFail).filter(
TaskFail.dag_id == TEST_DAG_ID).delete(
synchronize_session=False)
session.commit()
session.close()
def test_check_operators(self):
conn_id = "sqlite_default"
captain_hook = BaseHook.get_hook(conn_id=conn_id) # quite funny :D
captain_hook.run("CREATE TABLE operator_test_table (a, b)")
captain_hook.run("insert into operator_test_table values (1,2)")
op = CheckOperator(
task_id='check',
sql="select count(*) from operator_test_table",
conn_id=conn_id,
dag=self.dag)
op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
op = ValueCheckOperator(
task_id='value_check',
pass_value=95,
tolerance=0.1,
conn_id=conn_id,
sql="SELECT 100",
dag=self.dag)
op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
captain_hook.run("drop table operator_test_table")
def test_clear_api(self):
task = self.dag_bash.tasks[0]
task.clear(
start_date=DEFAULT_DATE, end_date=DEFAULT_DATE,
upstream=True, downstream=True)
ti = TaskInstance(task=task, execution_date=DEFAULT_DATE)
ti.are_dependents_done()
def test_illegal_args(self):
"""
Tests that Operators reject illegal arguments
"""
msg = 'Invalid arguments were passed to BashOperator (task_id: test_illegal_args).'
with conf_vars({('operators', 'allow_illegal_arguments'): 'True'}):
with self.assertWarns(PendingDeprecationWarning) as warning:
BashOperator(
task_id='test_illegal_args',
bash_command='echo success',
dag=self.dag,
illegal_argument_1234='hello?')
assert any(msg in str(w) for w in warning.warnings)
def test_illegal_args_forbidden(self):
"""
Tests that operators raise exceptions on illegal arguments when
illegal arguments are not allowed.
"""
with self.assertRaises(AirflowException) as ctx:
BashOperator(
task_id='test_illegal_args',
bash_command='echo success',
dag=self.dag,
illegal_argument_1234='hello?')
self.assertIn(
('Invalid arguments were passed to BashOperator '
'(task_id: test_illegal_args).'),
str(ctx.exception))
def test_bash_operator(self):
op = BashOperator(
task_id='test_bash_operator',
bash_command="echo success",
dag=self.dag)
op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
def test_bash_operator_multi_byte_output(self):
op = BashOperator(
task_id='test_multi_byte_bash_operator',
bash_command="echo \u2600",
dag=self.dag,
output_encoding='utf-8')
op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
def test_bash_operator_kill(self):
import psutil
sleep_time = "100%d" % os.getpid()
op = BashOperator(
task_id='test_bash_operator_kill',
execution_timeout=timedelta(seconds=1),
bash_command="/bin/bash -c 'sleep %s'" % sleep_time,
dag=self.dag)
self.assertRaises(
exceptions.AirflowTaskTimeout,
op.run,
start_date=DEFAULT_DATE, end_date=DEFAULT_DATE)
sleep(2)
pid = -1
for proc in psutil.process_iter():
if proc.cmdline() == ['sleep', sleep_time]:
pid = proc.pid
if pid != -1:
os.kill(pid, signal.SIGTERM)
self.fail("BashOperator's subprocess still running after stopping on timeout!")
def test_on_failure_callback(self):
# Annoying workaround for nonlocal not existing in python 2
data = {'called': False}
def check_failure(context, test_case=self):
data['called'] = True
error = context.get('exception')
test_case.assertIsInstance(error, AirflowException)
op = BashOperator(
task_id='check_on_failure_callback',
bash_command="exit 1",
dag=self.dag,
on_failure_callback=check_failure)
self.assertRaises(
exceptions.AirflowException,
op.run,
start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
self.assertTrue(data['called'])
def test_dryrun(self):
op = BashOperator(
task_id='test_dryrun',
bash_command="echo success",
dag=self.dag)
op.dry_run()
def test_sqlite(self):
import airflow.providers.sqlite.operators.sqlite
op = airflow.providers.sqlite.operators.sqlite.SqliteOperator(
task_id='time_sqlite',
sql="CREATE TABLE IF NOT EXISTS unitest (dummy VARCHAR(20))",
dag=self.dag)
op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
def test_timeout(self):
op = PythonOperator(
task_id='test_timeout',
execution_timeout=timedelta(seconds=1),
python_callable=lambda: sleep(5),
dag=self.dag)
self.assertRaises(
exceptions.AirflowTaskTimeout,
op.run,
start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
def test_python_op(self):
def test_py_op(templates_dict, ds, **kwargs):
if not templates_dict['ds'] == ds:
raise Exception("failure")
op = PythonOperator(
task_id='test_py_op',
python_callable=test_py_op,
templates_dict={'ds': "{{ ds }}"},
dag=self.dag)
op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
def test_complex_template(self):
def verify_templated_field(context):
self.assertEqual(context['ti'].task.some_templated_field['bar'][1],
context['ds'])
op = OperatorSubclass(
task_id='test_complex_template',
some_templated_field={
'foo': '123',
'bar': ['baz', '{{ ds }}']
},
dag=self.dag)
op.execute = verify_templated_field
op.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
def test_template_non_bool(self):
"""
Test templates can handle objects with no sense of truthiness
"""
class NonBoolObject:
def __len__(self): # pylint: disable=invalid-length-returned
return NotImplemented
def __bool__(self):
return NotImplemented
op = OperatorSubclass(
task_id='test_bad_template_obj',
some_templated_field=NonBoolObject(),
dag=self.dag)
op.resolve_template_files()
def test_task_get_template(self):
TI = TaskInstance
ti = TI(
task=self.runme_0, execution_date=DEFAULT_DATE)
ti.dag = self.dag_bash
ti.run(ignore_ti_state=True)
context = ti.get_template_context()
# DEFAULT DATE is 2015-01-01
self.assertEqual(context['ds'], '2015-01-01')
self.assertEqual(context['ds_nodash'], '20150101')
# next_ds is 2015-01-02 as the dag interval is daily
self.assertEqual(context['next_ds'], '2015-01-02')
self.assertEqual(context['next_ds_nodash'], '20150102')
# prev_ds is 2014-12-31 as the dag interval is daily
self.assertEqual(context['prev_ds'], '2014-12-31')
self.assertEqual(context['prev_ds_nodash'], '20141231')
self.assertEqual(context['ts'], '2015-01-01T00:00:00+00:00')
self.assertEqual(context['ts_nodash'], '20150101T000000')
self.assertEqual(context['ts_nodash_with_tz'], '20150101T000000+0000')
self.assertEqual(context['yesterday_ds'], '2014-12-31')
self.assertEqual(context['yesterday_ds_nodash'], '20141231')
self.assertEqual(context['tomorrow_ds'], '2015-01-02')
self.assertEqual(context['tomorrow_ds_nodash'], '20150102')
def test_local_task_job(self):
TI = TaskInstance
ti = TI(
task=self.runme_0, execution_date=DEFAULT_DATE)
job = LocalTaskJob(task_instance=ti, ignore_ti_state=True)
job.run()
def test_raw_job(self):
TI = TaskInstance
ti = TI(
task=self.runme_0, execution_date=DEFAULT_DATE)
ti.dag = self.dag_bash
ti.run(ignore_ti_state=True)
def test_round_time(self):
rt1 = round_time(datetime(2015, 1, 1, 6), timedelta(days=1))
self.assertEqual(datetime(2015, 1, 1, 0, 0), rt1)
rt2 = round_time(datetime(2015, 1, 2), relativedelta(months=1))
self.assertEqual(datetime(2015, 1, 1, 0, 0), rt2)
rt3 = round_time(datetime(2015, 9, 16, 0, 0), timedelta(1), datetime(
2015, 9, 14, 0, 0))
self.assertEqual(datetime(2015, 9, 16, 0, 0), rt3)
rt4 = round_time(datetime(2015, 9, 15, 0, 0), timedelta(1), datetime(
2015, 9, 14, 0, 0))
self.assertEqual(datetime(2015, 9, 15, 0, 0), rt4)
rt5 = round_time(datetime(2015, 9, 14, 0, 0), timedelta(1), datetime(
2015, 9, 14, 0, 0))
self.assertEqual(datetime(2015, 9, 14, 0, 0), rt5)
rt6 = round_time(datetime(2015, 9, 13, 0, 0), timedelta(1), datetime(
2015, 9, 14, 0, 0))
self.assertEqual(datetime(2015, 9, 14, 0, 0), rt6)
def test_infer_time_unit(self):
self.assertEqual('minutes', infer_time_unit([130, 5400, 10]))
self.assertEqual('seconds', infer_time_unit([110, 50, 10, 100]))
self.assertEqual('hours', infer_time_unit([100000, 50000, 10000, 20000]))
self.assertEqual('days', infer_time_unit([200000, 100000]))
def test_scale_time_units(self):
# use assert_almost_equal from numpy.testing since we are comparing
# floating point arrays
arr1 = scale_time_units([130, 5400, 10], 'minutes')
assert_array_almost_equal(arr1, [2.167, 90.0, 0.167], decimal=3)
arr2 = scale_time_units([110, 50, 10, 100], 'seconds')
assert_array_almost_equal(arr2, [110.0, 50.0, 10.0, 100.0], decimal=3)
arr3 = scale_time_units([100000, 50000, 10000, 20000], 'hours')
assert_array_almost_equal(arr3, [27.778, 13.889, 2.778, 5.556],
decimal=3)
arr4 = scale_time_units([200000, 100000], 'days')
assert_array_almost_equal(arr4, [2.315, 1.157], decimal=3)
def test_bad_trigger_rule(self):
with self.assertRaises(AirflowException):
DummyOperator(
task_id='test_bad_trigger',
trigger_rule="non_existent",
dag=self.dag)
def test_terminate_task(self):
"""If a task instance's db state get deleted, it should fail"""
from airflow.executors.sequential_executor import SequentialExecutor
TI = TaskInstance
dag = self.dagbag.dags.get('test_utils')
task = dag.task_dict.get('sleeps_forever')
ti = TI(task=task, execution_date=DEFAULT_DATE)
job = LocalTaskJob(
task_instance=ti, ignore_ti_state=True, executor=SequentialExecutor())
# Running task instance asynchronously
proc = multiprocessing.Process(target=job.run)
proc.start()
sleep(5)
settings.engine.dispose()
session = settings.Session()
ti.refresh_from_db(session=session)
# making sure it's actually running
self.assertEqual(State.RUNNING, ti.state)
ti = session.query(TI).filter_by(
dag_id=task.dag_id,
task_id=task.task_id,
execution_date=DEFAULT_DATE
).one()
# deleting the instance should result in a failure
session.delete(ti)
session.commit()
# waiting for the async task to finish
proc.join()
# making sure that the task ended up as failed
ti.refresh_from_db(session=session)
self.assertEqual(State.FAILED, ti.state)
session.close()
def test_task_fail_duration(self):
"""If a task fails, the duration should be recorded in TaskFail"""
op1 = BashOperator(
task_id='pass_sleepy',
bash_command='sleep 3',
dag=self.dag)
op2 = BashOperator(
task_id='fail_sleepy',
bash_command='sleep 5',
execution_timeout=timedelta(seconds=3),
retry_delay=timedelta(seconds=0),
dag=self.dag)
session = settings.Session()
try:
op1.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
except Exception: # pylint: disable=broad-except
pass
try:
op2.run(start_date=DEFAULT_DATE, end_date=DEFAULT_DATE, ignore_ti_state=True)
except Exception: # pylint: disable=broad-except
pass
op1_fails = session.query(TaskFail).filter_by(
task_id='pass_sleepy',
dag_id=self.dag.dag_id,
execution_date=DEFAULT_DATE).all()
op2_fails = session.query(TaskFail).filter_by(
task_id='fail_sleepy',
dag_id=self.dag.dag_id,
execution_date=DEFAULT_DATE).all()
self.assertEqual(0, len(op1_fails))
self.assertEqual(1, len(op2_fails))
self.assertGreaterEqual(sum([f.duration for f in op2_fails]), 3)
def test_externally_triggered_dagrun(self):
TI = TaskInstance
# Create the dagrun between two "scheduled" execution dates of the DAG
execution_date = DEFAULT_DATE + timedelta(days=2)
execution_ds = execution_date.strftime('%Y-%m-%d')
execution_ds_nodash = execution_ds.replace('-', '')
dag = DAG(
TEST_DAG_ID,
default_args=self.args,
schedule_interval=timedelta(weeks=1),
start_date=DEFAULT_DATE)
task = DummyOperator(task_id='test_externally_triggered_dag_context',
dag=dag)
dag.create_dagrun(run_id=DagRun.id_for_date(execution_date),
execution_date=execution_date,
state=State.RUNNING,
external_trigger=True)
task.run(
start_date=execution_date, end_date=execution_date)
ti = TI(task=task, execution_date=execution_date)
context = ti.get_template_context()
# next_ds/prev_ds should be the execution date for manually triggered runs
self.assertEqual(context['next_ds'], execution_ds)
self.assertEqual(context['next_ds_nodash'], execution_ds_nodash)
self.assertEqual(context['prev_ds'], execution_ds)
self.assertEqual(context['prev_ds_nodash'], execution_ds_nodash)
if __name__ == '__main__':
unittest.main()
| spektom/incubator-airflow | tests/test_core.py | Python | apache-2.0 | 18,421 |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=g-short-docstring-punctuation
"""Image processing and decoding ops.
See the @{$python/image} guide.
@@decode_bmp
@@decode_gif
@@decode_jpeg
@@decode_and_crop_jpeg
@@encode_jpeg
@@extract_jpeg_shape
@@decode_png
@@encode_png
@@is_jpeg
@@decode_image
@@resize_images
@@resize_area
@@resize_bicubic
@@resize_bilinear
@@resize_nearest_neighbor
@@resize_image_with_crop_or_pad
@@central_crop
@@pad_to_bounding_box
@@crop_to_bounding_box
@@extract_glimpse
@@crop_and_resize
@@flip_up_down
@@random_flip_up_down
@@flip_left_right
@@random_flip_left_right
@@transpose_image
@@rot90
@@rgb_to_grayscale
@@grayscale_to_rgb
@@hsv_to_rgb
@@rgb_to_hsv
@@rgb_to_yiq
@@yiq_to_rgb
@@rgb_to_yuv
@@yuv_to_rgb
@@convert_image_dtype
@@adjust_brightness
@@random_brightness
@@adjust_contrast
@@random_contrast
@@adjust_hue
@@random_hue
@@adjust_gamma
@@adjust_saturation
@@random_saturation
@@per_image_standardization
@@draw_bounding_boxes
@@non_max_suppression
@@sample_distorted_bounding_box
@@total_variation
@@psnr
@@ssim
@@ssim_multiscale
@@image_gradients
@@sobel_edges
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# go/tf-wildcard-import
# pylint: disable=wildcard-import
from tensorflow.python.ops.gen_image_ops import *
from tensorflow.python.ops.image_ops_impl import *
# pylint: enable=wildcard-import
# TODO(drpng): remove these once internal use has discontinued.
# pylint: disable=unused-import
from tensorflow.python.ops.image_ops_impl import _Check3DImage
from tensorflow.python.ops.image_ops_impl import _ImageDimensions
# pylint: enable=unused-import
from tensorflow.python.util.all_util import remove_undocumented
_allowed_symbols = [
# ResizeMethod is not documented, but is documented in functions
# that use it.
'ResizeMethod',
]
remove_undocumented(__name__, _allowed_symbols)
| allenlavoie/tensorflow | tensorflow/python/ops/image_ops.py | Python | apache-2.0 | 2,576 |
"""
author: Lalit Jain, [email protected]
modified: Chris Fernandez, [email protected]
last updated: 05/27/2015
A module for replicating the 25 total arms with 8 arms shown at a time tuple bandits pure exploration
experiments from the NEXT paper.
Usage:
python experiment_tuple_n25k8.py
"""
import os, sys
# The line below imports launch_experiment.py.
# We assume that it is located in next/examples
# This function is used at the very bottom of this file
sys.path.append("../")
from launch_experiment import *
# List of Algorithms currently available for TupleBandits
curr_dir = os.path.dirname(os.path.abspath(__file__))
experiment_list = []
supported_alg_ids = ['RandomSampling']
# Algorithm List. These algorithms are independent (no inter-connectedness
# between algorithms) and each algorithm gets `proportion` number of queries
# (i.e., if proportions is set to 0.33 for each algorithm, each algorithm will
# sample 1/3 of the time)
alg_list = []
for alg_id in supported_alg_ids:
alg_item = {}
alg_item['alg_id'] = alg_id
alg_item['alg_label'] = alg_id
alg_item['params'] = {}
alg_list.append(alg_item)
# Run algorithms here in fixed proportions
# The number of queries sampled is the ones we specify, rather than using some
# more complicated scheme.
algorithm_management_settings = {}
params = {}
params['proportions'] = []
for algorithm in alg_list:
params['proportions'].append( { 'alg_label': algorithm['alg_label'] , 'proportion':1./len(alg_list) } )
# Algorithms are run here in fixed proportions
algorithm_management_settings['mode'] = 'fixed_proportions'
algorithm_management_settings['params'] = params
# Select some cartoons from the curr_dir
cap_list = ['cap436']
# Create experiment dictionary
for cap in cap_list:
initExp = {}
initExp['args'] = {}
# if cap == cap_list[2]:
# initExp['args']['n'] = 25
# else:
# initExp['args']['n'] = 8
initExp['args']['n'] = 25 # number of targets
initExp['args']['k'] = 8 # how many choices does the user have to choose among?
# probability of error. similar to "significant because p < 0.05"
initExp['args']['failure_probability'] = .01
# one parcipant sees many algorithms? 'one_to_many' means one participant
# will see many algorithms
initExp['args']['participant_to_algorithm_management'] = 'one_to_many'
initExp['args']['algorithm_management_settings'] = algorithm_management_settings
initExp['args']['alg_list'] = alg_list
# What does the user see at start and finish? These are the
# instructions/debreif (by default they have default values)
# initExp['args']['instructions'] = ''
# initExp['args']['debrief'] =''
initExp['args']['num_tries'] = 1 # how many questions does the user see?
# Which app are we running? (examples of other algorithms are in examples/
# (this is another TupleBandits example)
initExp['app_id'] = 'TupleBanditsPureExploration'
# Set the context
experiment = {}
experiment['initExp'] = initExp
experiment['primary_type'] = 'text'
experiment['primary_target_file'] = curr_dir+"/"+cap+".txt"
experiment['target_file'] = curr_dir+"/"+cap+".txt"
experiment['context'] = curr_dir+"/"+cap+".jpg"
experiment['context_type'] = 'image'
experiment_list.append(experiment)
# Launch the experiment
try:
AWS_SECRET_ACCESS_KEY = os.environ['AWS_SECRET_ACCESS_KEY']
AWS_ACCESS_ID = os.environ['AWS_ACCESS_KEY_ID']
AWS_BUCKET_NAME = os.environ['AWS_BUCKET_NAME']
host = os.environ['NEXT_BACKEND_GLOBAL_HOST'] + \
":" + os.environ.get('NEXT_BACKEND_GLOBAL_PORT', '8000')
except:
print 'The following environment variables must be defined:'
for key in ['AWS_ACCESS_KEY_ID', 'AWS_SECRET_ACCESS_KEY',
'AWS_BUCKET_NAME', 'NEXT_BACKEND_GLOBAL_HOST']:
if key not in os.environ:
print ' ' + key
sys.exit()
# Call launch_experiment module found in NEXT/lauch_experiment.py
exp_uid_list, exp_key_list, widget_key_list = launch_experiment(host, experiment_list, AWS_ACCESS_ID, AWS_SECRET_ACCESS_KEY, AWS_BUCKET_NAME)
# Update the cartoon_dueling.html file wit the exp_uid_list and widget_key_list
# with open('cartoon_tuple_n25k8.html','r') as page:
# print "opended file"
# page_string = page.read()
# page_string = page_string.replace("{{exp_uid_list}}", str(exp_uid_list))
# page_string = page_string.replace("{{widget_key_list}}", str(widget_key_list))
# with open('../../next_frontend_base/next_frontend_base/templates/cartoon_tuple_n25k8.html','w') as out:
# out.write(page_string)
# out.flush()
# out.close()
| kgjamieson/NEXT-psych | examples/cartoon_tuple_n25k8/experiment_tuple_n25k8.py | Python | apache-2.0 | 4,671 |
#!/usr/bin/python
#
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code example updates a single placement to allow for AdSense targeting.
To determine which placements exist, run get_all_placements.py.
"""
__author__ = ('Nicholas Chen',
'Joseph DiLallo')
# Import appropriate modules from the client library.
from googleads import dfp
PLACEMENT_ID = 'INSERT_PLACEMENT_ID_HERE'
def main(client, placement_id):
# Initialize appropriate service.
placement_service = client.GetService('PlacementService', version='v201411')
# Create query.
values = [{
'key': 'placementId',
'value': {
'xsi_type': 'NumberValue',
'value': placement_id
}
}]
query = 'WHERE id = :placementId'
statement = dfp.FilterStatement(query, values, 1)
# Get placements by statement.
placements = placement_service.getPlacementsByStatement(
statement.ToStatement())
for placement in placements:
if not placement['targetingDescription']:
placement['targetingDescription'] = 'Generic description'
placement['targetingAdLocation'] = 'All images on sports pages.'
placement['targetingSiteName'] = 'http://code.google.com'
placement['isAdSenseTargetingEnabled'] = 'true'
# Update placements remotely.
placements = placement_service.updatePlacements(placements)
for placement in placements:
print ('Placement with id \'%s\', name \'%s\', and AdSense targeting '
'enabled \'%s\' was updated.'
% (placement['id'], placement['name'],
placement['isAdSenseTargetingEnabled']))
if __name__ == '__main__':
# Initialize client object.
dfp_client = dfp.DfpClient.LoadFromStorage()
main(dfp_client, PLACEMENT_ID)
| coxmediagroup/googleads-python-lib | examples/dfp/v201411/placement_service/update_placements.py | Python | apache-2.0 | 2,284 |
# Copyright 2013-2016 DataStax, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
import unittest2 as unittest
except ImportError:
import unittest # noqa
import sys
from datetime import datetime, timedelta, date, tzinfo
from decimal import Decimal as D
from uuid import uuid4, uuid1
from cassandra import InvalidRequest
from cassandra.cqlengine.columns import TimeUUID
from cassandra.cqlengine.columns import Ascii
from cassandra.cqlengine.columns import Text
from cassandra.cqlengine.columns import Integer
from cassandra.cqlengine.columns import BigInt
from cassandra.cqlengine.columns import VarInt
from cassandra.cqlengine.columns import DateTime
from cassandra.cqlengine.columns import Date
from cassandra.cqlengine.columns import UUID
from cassandra.cqlengine.columns import Boolean
from cassandra.cqlengine.columns import Decimal
from cassandra.cqlengine.columns import Inet
from cassandra.cqlengine.connection import execute
from cassandra.cqlengine.management import sync_table, drop_table
from cassandra.cqlengine.models import Model, ValidationError
from cassandra import util
from tests.integration import PROTOCOL_VERSION
from tests.integration.cqlengine.base import BaseCassEngTestCase
class TestDatetime(BaseCassEngTestCase):
class DatetimeTest(Model):
test_id = Integer(primary_key=True)
created_at = DateTime()
@classmethod
def setUpClass(cls):
sync_table(cls.DatetimeTest)
@classmethod
def tearDownClass(cls):
drop_table(cls.DatetimeTest)
def test_datetime_io(self):
now = datetime.now()
self.DatetimeTest.objects.create(test_id=0, created_at=now)
dt2 = self.DatetimeTest.objects(test_id=0).first()
assert dt2.created_at.timetuple()[:6] == now.timetuple()[:6]
def test_datetime_tzinfo_io(self):
class TZ(tzinfo):
def utcoffset(self, date_time):
return timedelta(hours=-1)
def dst(self, date_time):
return None
now = datetime(1982, 1, 1, tzinfo=TZ())
dt = self.DatetimeTest.objects.create(test_id=1, created_at=now)
dt2 = self.DatetimeTest.objects(test_id=1).first()
assert dt2.created_at.timetuple()[:6] == (now + timedelta(hours=1)).timetuple()[:6]
def test_datetime_date_support(self):
today = date.today()
self.DatetimeTest.objects.create(test_id=2, created_at=today)
dt2 = self.DatetimeTest.objects(test_id=2).first()
assert dt2.created_at.isoformat() == datetime(today.year, today.month, today.day).isoformat()
def test_datetime_none(self):
dt = self.DatetimeTest.objects.create(test_id=3, created_at=None)
dt2 = self.DatetimeTest.objects(test_id=3).first()
assert dt2.created_at is None
dts = self.DatetimeTest.objects.filter(test_id=3).values_list('created_at')
assert dts[0][0] is None
def test_datetime_invalid(self):
dt_value= 'INVALID'
with self.assertRaises(TypeError):
self.DatetimeTest.objects.create(test_id=4, created_at=dt_value)
def test_datetime_timestamp(self):
dt_value = 1454520554
self.DatetimeTest.objects.create(test_id=5, created_at=dt_value)
dt2 = self.DatetimeTest.objects(test_id=5).first()
assert dt2.created_at == datetime.utcfromtimestamp(dt_value)
def test_datetime_large(self):
dt_value = datetime(2038, 12, 31, 10, 10, 10, 123000)
self.DatetimeTest.objects.create(test_id=6, created_at=dt_value)
dt2 = self.DatetimeTest.objects(test_id=6).first()
assert dt2.created_at == dt_value
def test_datetime_truncate_microseconds(self):
"""
Test to ensure that truncate microseconds works as expected.
This will be default behavior in the future and we will need to modify the tests to comply
with new behavior
@since 3.2
@jira_ticket PYTHON-273
@expected_result microseconds should be to the nearest thousand when truncate is set.
@test_category object_mapper
"""
DateTime.truncate_microseconds = True
try:
dt_value = datetime(2024, 12, 31, 10, 10, 10, 923567)
dt_truncated = datetime(2024, 12, 31, 10, 10, 10, 923000)
self.DatetimeTest.objects.create(test_id=6, created_at=dt_value)
dt2 = self.DatetimeTest.objects(test_id=6).first()
self.assertEqual(dt2.created_at,dt_truncated)
finally:
# We need to always return behavior to default
DateTime.truncate_microseconds = False
class TestBoolDefault(BaseCassEngTestCase):
class BoolDefaultValueTest(Model):
test_id = Integer(primary_key=True)
stuff = Boolean(default=True)
@classmethod
def setUpClass(cls):
sync_table(cls.BoolDefaultValueTest)
def test_default_is_set(self):
tmp = self.BoolDefaultValueTest.create(test_id=1)
self.assertEqual(True, tmp.stuff)
tmp2 = self.BoolDefaultValueTest.get(test_id=1)
self.assertEqual(True, tmp2.stuff)
class TestBoolValidation(BaseCassEngTestCase):
class BoolValidationTest(Model):
test_id = Integer(primary_key=True)
bool_column = Boolean()
@classmethod
def setUpClass(cls):
sync_table(cls.BoolValidationTest)
def test_validation_preserves_none(self):
test_obj = self.BoolValidationTest(test_id=1)
test_obj.validate()
self.assertIsNone(test_obj.bool_column)
class TestVarInt(BaseCassEngTestCase):
class VarIntTest(Model):
test_id = Integer(primary_key=True)
bignum = VarInt(primary_key=True)
@classmethod
def setUpClass(cls):
sync_table(cls.VarIntTest)
@classmethod
def tearDownClass(cls):
sync_table(cls.VarIntTest)
def test_varint_io(self):
# TODO: this is a weird test. i changed the number from sys.maxint (which doesn't exist in python 3)
# to the giant number below and it broken between runs.
long_int = 92834902384092834092384028340283048239048203480234823048230482304820348239
int1 = self.VarIntTest.objects.create(test_id=0, bignum=long_int)
int2 = self.VarIntTest.objects(test_id=0).first()
self.assertEqual(int1.bignum, int2.bignum)
class TestDate(BaseCassEngTestCase):
class DateTest(Model):
test_id = Integer(primary_key=True)
created_at = Date()
@classmethod
def setUpClass(cls):
if PROTOCOL_VERSION < 4:
return
sync_table(cls.DateTest)
@classmethod
def tearDownClass(cls):
if PROTOCOL_VERSION < 4:
return
drop_table(cls.DateTest)
def setUp(self):
if PROTOCOL_VERSION < 4:
raise unittest.SkipTest("Protocol v4 datatypes require native protocol 4+, currently using: {0}".format(PROTOCOL_VERSION))
def test_date_io(self):
today = date.today()
self.DateTest.objects.create(test_id=0, created_at=today)
result = self.DateTest.objects(test_id=0).first()
self.assertEqual(result.created_at, util.Date(today))
def test_date_io_using_datetime(self):
now = datetime.utcnow()
self.DateTest.objects.create(test_id=0, created_at=now)
result = self.DateTest.objects(test_id=0).first()
self.assertIsInstance(result.created_at, util.Date)
self.assertEqual(result.created_at, util.Date(now))
def test_date_none(self):
self.DateTest.objects.create(test_id=1, created_at=None)
dt2 = self.DateTest.objects(test_id=1).first()
assert dt2.created_at is None
dts = self.DateTest.objects(test_id=1).values_list('created_at')
assert dts[0][0] is None
class TestDecimal(BaseCassEngTestCase):
class DecimalTest(Model):
test_id = Integer(primary_key=True)
dec_val = Decimal()
@classmethod
def setUpClass(cls):
sync_table(cls.DecimalTest)
@classmethod
def tearDownClass(cls):
drop_table(cls.DecimalTest)
def test_decimal_io(self):
dt = self.DecimalTest.objects.create(test_id=0, dec_val=D('0.00'))
dt2 = self.DecimalTest.objects(test_id=0).first()
assert dt2.dec_val == dt.dec_val
dt = self.DecimalTest.objects.create(test_id=0, dec_val=5)
dt2 = self.DecimalTest.objects(test_id=0).first()
assert dt2.dec_val == D('5')
class TestUUID(BaseCassEngTestCase):
class UUIDTest(Model):
test_id = Integer(primary_key=True)
a_uuid = UUID(default=uuid4())
@classmethod
def setUpClass(cls):
sync_table(cls.UUIDTest)
@classmethod
def tearDownClass(cls):
drop_table(cls.UUIDTest)
def test_uuid_str_with_dashes(self):
a_uuid = uuid4()
t0 = self.UUIDTest.create(test_id=0, a_uuid=str(a_uuid))
t1 = self.UUIDTest.get(test_id=0)
assert a_uuid == t1.a_uuid
def test_uuid_str_no_dashes(self):
a_uuid = uuid4()
t0 = self.UUIDTest.create(test_id=1, a_uuid=a_uuid.hex)
t1 = self.UUIDTest.get(test_id=1)
assert a_uuid == t1.a_uuid
def test_uuid_with_upcase(self):
a_uuid = uuid4()
val = str(a_uuid).upper()
t0 = self.UUIDTest.create(test_id=0, a_uuid=val)
t1 = self.UUIDTest.get(test_id=0)
assert a_uuid == t1.a_uuid
class TestTimeUUID(BaseCassEngTestCase):
class TimeUUIDTest(Model):
test_id = Integer(primary_key=True)
timeuuid = TimeUUID(default=uuid1())
@classmethod
def setUpClass(cls):
sync_table(cls.TimeUUIDTest)
@classmethod
def tearDownClass(cls):
drop_table(cls.TimeUUIDTest)
def test_timeuuid_io(self):
"""
ensures that
:return:
"""
t0 = self.TimeUUIDTest.create(test_id=0)
t1 = self.TimeUUIDTest.get(test_id=0)
assert t1.timeuuid.time == t1.timeuuid.time
class TestInteger(BaseCassEngTestCase):
class IntegerTest(Model):
test_id = UUID(primary_key=True, default=lambda:uuid4())
value = Integer(default=0, required=True)
def test_default_zero_fields_validate(self):
""" Tests that integer columns with a default value of 0 validate """
it = self.IntegerTest()
it.validate()
class TestBigInt(BaseCassEngTestCase):
class BigIntTest(Model):
test_id = UUID(primary_key=True, default=lambda:uuid4())
value = BigInt(default=0, required=True)
def test_default_zero_fields_validate(self):
""" Tests that bigint columns with a default value of 0 validate """
it = self.BigIntTest()
it.validate()
class TestAscii(BaseCassEngTestCase):
def test_min_length(self):
""" Test arbitrary minimal lengths requirements. """
Ascii(min_length=0).validate('')
Ascii(min_length=0).validate(None)
Ascii(min_length=0).validate('kevin')
Ascii(min_length=1).validate('k')
Ascii(min_length=5).validate('kevin')
Ascii(min_length=5).validate('kevintastic')
with self.assertRaises(ValidationError):
Ascii(min_length=1).validate('')
with self.assertRaises(ValidationError):
Ascii(min_length=1).validate(None)
with self.assertRaises(ValidationError):
Ascii(min_length=6).validate('')
with self.assertRaises(ValidationError):
Ascii(min_length=6).validate(None)
with self.assertRaises(ValidationError):
Ascii(min_length=6).validate('kevin')
with self.assertRaises(ValueError):
Ascii(min_length=-1)
def test_max_length(self):
""" Test arbitrary maximal lengths requirements. """
Ascii(max_length=0).validate('')
Ascii(max_length=0).validate(None)
Ascii(max_length=1).validate('')
Ascii(max_length=1).validate(None)
Ascii(max_length=1).validate('b')
Ascii(max_length=5).validate('')
Ascii(max_length=5).validate(None)
Ascii(max_length=5).validate('b')
Ascii(max_length=5).validate('blake')
with self.assertRaises(ValidationError):
Ascii(max_length=0).validate('b')
with self.assertRaises(ValidationError):
Ascii(max_length=5).validate('blaketastic')
with self.assertRaises(ValueError):
Ascii(max_length=-1)
def test_length_range(self):
Ascii(min_length=0, max_length=0)
Ascii(min_length=0, max_length=1)
Ascii(min_length=10, max_length=10)
Ascii(min_length=10, max_length=11)
with self.assertRaises(ValueError):
Ascii(min_length=10, max_length=9)
with self.assertRaises(ValueError):
Ascii(min_length=1, max_length=0)
def test_type_checking(self):
Ascii().validate('string')
Ascii().validate(u'unicode')
Ascii().validate(bytearray('bytearray', encoding='ascii'))
with self.assertRaises(ValidationError):
Ascii().validate(5)
with self.assertRaises(ValidationError):
Ascii().validate(True)
Ascii().validate("!#$%&\'()*+,-./")
with self.assertRaises(ValidationError):
Ascii().validate('Beyonc' + chr(233))
if sys.version_info < (3, 1):
with self.assertRaises(ValidationError):
Ascii().validate(u'Beyonc' + unichr(233))
def test_unaltering_validation(self):
""" Test the validation step doesn't re-interpret values. """
self.assertEqual(Ascii().validate(''), '')
self.assertEqual(Ascii().validate(None), None)
self.assertEqual(Ascii().validate('yo'), 'yo')
def test_non_required_validation(self):
""" Tests that validation is ok on none and blank values if required is False. """
Ascii().validate('')
Ascii().validate(None)
def test_required_validation(self):
""" Tests that validation raise on none and blank values if value required. """
Ascii(required=True).validate('k')
with self.assertRaises(ValidationError):
Ascii(required=True).validate('')
with self.assertRaises(ValidationError):
Ascii(required=True).validate(None)
# With min_length set.
Ascii(required=True, min_length=0).validate('k')
Ascii(required=True, min_length=1).validate('k')
with self.assertRaises(ValidationError):
Ascii(required=True, min_length=2).validate('k')
# With max_length set.
Ascii(required=True, max_length=1).validate('k')
with self.assertRaises(ValidationError):
Ascii(required=True, max_length=2).validate('kevin')
with self.assertRaises(ValueError):
Ascii(required=True, max_length=0)
class TestText(BaseCassEngTestCase):
def test_min_length(self):
""" Test arbitrary minimal lengths requirements. """
Text(min_length=0).validate('')
Text(min_length=0).validate(None)
Text(min_length=0).validate('blake')
Text(min_length=1).validate('b')
Text(min_length=5).validate('blake')
Text(min_length=5).validate('blaketastic')
with self.assertRaises(ValidationError):
Text(min_length=1).validate('')
with self.assertRaises(ValidationError):
Text(min_length=1).validate(None)
with self.assertRaises(ValidationError):
Text(min_length=6).validate('')
with self.assertRaises(ValidationError):
Text(min_length=6).validate(None)
with self.assertRaises(ValidationError):
Text(min_length=6).validate('blake')
with self.assertRaises(ValueError):
Text(min_length=-1)
def test_max_length(self):
""" Test arbitrary maximal lengths requirements. """
Text(max_length=0).validate('')
Text(max_length=0).validate(None)
Text(max_length=1).validate('')
Text(max_length=1).validate(None)
Text(max_length=1).validate('b')
Text(max_length=5).validate('')
Text(max_length=5).validate(None)
Text(max_length=5).validate('b')
Text(max_length=5).validate('blake')
with self.assertRaises(ValidationError):
Text(max_length=0).validate('b')
with self.assertRaises(ValidationError):
Text(max_length=5).validate('blaketastic')
with self.assertRaises(ValueError):
Text(max_length=-1)
def test_length_range(self):
Text(min_length=0, max_length=0)
Text(min_length=0, max_length=1)
Text(min_length=10, max_length=10)
Text(min_length=10, max_length=11)
with self.assertRaises(ValueError):
Text(min_length=10, max_length=9)
with self.assertRaises(ValueError):
Text(min_length=1, max_length=0)
def test_type_checking(self):
Text().validate('string')
Text().validate(u'unicode')
Text().validate(bytearray('bytearray', encoding='ascii'))
with self.assertRaises(ValidationError):
Text().validate(5)
with self.assertRaises(ValidationError):
Text().validate(True)
Text().validate("!#$%&\'()*+,-./")
Text().validate('Beyonc' + chr(233))
if sys.version_info < (3, 1):
Text().validate(u'Beyonc' + unichr(233))
def test_unaltering_validation(self):
""" Test the validation step doesn't re-interpret values. """
self.assertEqual(Text().validate(''), '')
self.assertEqual(Text().validate(None), None)
self.assertEqual(Text().validate('yo'), 'yo')
def test_non_required_validation(self):
""" Tests that validation is ok on none and blank values if required is False """
Text().validate('')
Text().validate(None)
def test_required_validation(self):
""" Tests that validation raise on none and blank values if value required. """
Text(required=True).validate('b')
with self.assertRaises(ValidationError):
Text(required=True).validate('')
with self.assertRaises(ValidationError):
Text(required=True).validate(None)
# With min_length set.
Text(required=True, min_length=0).validate('b')
Text(required=True, min_length=1).validate('b')
with self.assertRaises(ValidationError):
Text(required=True, min_length=2).validate('b')
# With max_length set.
Text(required=True, max_length=1).validate('b')
with self.assertRaises(ValidationError):
Text(required=True, max_length=2).validate('blake')
with self.assertRaises(ValueError):
Text(required=True, max_length=0)
class TestExtraFieldsRaiseException(BaseCassEngTestCase):
class TestModel(Model):
id = UUID(primary_key=True, default=uuid4)
def test_extra_field(self):
with self.assertRaises(ValidationError):
self.TestModel.create(bacon=5000)
class TestPythonDoesntDieWhenExtraFieldIsInCassandra(BaseCassEngTestCase):
class TestModel(Model):
__table_name__ = 'alter_doesnt_break_running_app'
id = UUID(primary_key=True, default=uuid4)
def test_extra_field(self):
drop_table(self.TestModel)
sync_table(self.TestModel)
self.TestModel.create()
execute("ALTER TABLE {0} add blah int".format(self.TestModel.column_family_name(include_keyspace=True)))
self.TestModel.objects().all()
class TestTimeUUIDFromDatetime(BaseCassEngTestCase):
def test_conversion_specific_date(self):
dt = datetime(1981, 7, 11, microsecond=555000)
uuid = util.uuid_from_time(dt)
from uuid import UUID
assert isinstance(uuid, UUID)
ts = (uuid.time - 0x01b21dd213814000) / 1e7 # back to a timestamp
new_dt = datetime.utcfromtimestamp(ts)
# checks that we created a UUID1 with the proper timestamp
assert new_dt == dt
class TestInet(BaseCassEngTestCase):
class InetTestModel(Model):
id = UUID(primary_key=True, default=uuid4)
address = Inet()
def setUp(self):
drop_table(self.InetTestModel)
sync_table(self.InetTestModel)
def test_inet_saves(self):
tmp = self.InetTestModel.create(address="192.168.1.1")
m = self.InetTestModel.get(id=tmp.id)
assert m.address == "192.168.1.1"
def test_non_address_fails(self):
# TODO: presently this only tests that the server blows it up. Is there supposed to be local validation?
with self.assertRaises(InvalidRequest):
self.InetTestModel.create(address="what is going on here?")
| Richard-Mathie/cassandra_benchmark | vendor/github.com/datastax/python-driver/tests/integration/cqlengine/columns/test_validation.py | Python | apache-2.0 | 21,274 |
#!/usr/bin/env python
import os
from mi.logging import log
from mi.dataset.parser.zplsc_c import ZplscCParser
from mi.dataset.dataset_parser import DataSetDriverConfigKeys
from mi.dataset.driver.zplsc_c.resource import RESOURCE_PATH
__author__ = 'Rene Gelinas'
MODULE_NAME = 'mi.dataset.parser.zplsc_c'
CLASS_NAME = 'ZplscCRecoveredDataParticle'
config = {
DataSetDriverConfigKeys.PARTICLE_MODULE: MODULE_NAME,
DataSetDriverConfigKeys.PARTICLE_CLASS: CLASS_NAME
}
def create_zplsc_c_parser(file_handle):
"""
This function creates a zplsc-c parser for recovered data.
@param file_handle - File handle of the ZPLSC_C raw data.
"""
return ZplscCParser(config, file_handle, rec_exception_callback)
def file_path(filename):
log.debug('resource path = %s, file name = %s', RESOURCE_PATH, filename)
return os.path.join(RESOURCE_PATH, filename)
def rec_exception_callback(exception):
"""
Call back method to for exceptions
@param exception - Exception that occurred
"""
log.info("Exception occurred: %s", exception.message)
def zplsc_c_echogram_test():
with open(file_path('160501.01A')) as in_file:
parser = create_zplsc_c_parser(in_file)
parser.create_echogram()
if __name__ == '__main__':
zplsc_c_echogram_test()
| renegelinas/mi-instrument | mi/dataset/parser/test/test_zplsc_c_echogram.py | Python | bsd-2-clause | 1,302 |
import asyncio
import inspect
import warnings
from asgiref.sync import sync_to_async
class RemovedInDjango41Warning(DeprecationWarning):
pass
class RemovedInDjango50Warning(PendingDeprecationWarning):
pass
RemovedInNextVersionWarning = RemovedInDjango41Warning
class warn_about_renamed_method:
def __init__(self, class_name, old_method_name, new_method_name, deprecation_warning):
self.class_name = class_name
self.old_method_name = old_method_name
self.new_method_name = new_method_name
self.deprecation_warning = deprecation_warning
def __call__(self, f):
def wrapped(*args, **kwargs):
warnings.warn(
"`%s.%s` is deprecated, use `%s` instead." %
(self.class_name, self.old_method_name, self.new_method_name),
self.deprecation_warning, 2)
return f(*args, **kwargs)
return wrapped
class RenameMethodsBase(type):
"""
Handles the deprecation paths when renaming a method.
It does the following:
1) Define the new method if missing and complain about it.
2) Define the old method if missing.
3) Complain whenever an old method is called.
See #15363 for more details.
"""
renamed_methods = ()
def __new__(cls, name, bases, attrs):
new_class = super().__new__(cls, name, bases, attrs)
for base in inspect.getmro(new_class):
class_name = base.__name__
for renamed_method in cls.renamed_methods:
old_method_name = renamed_method[0]
old_method = base.__dict__.get(old_method_name)
new_method_name = renamed_method[1]
new_method = base.__dict__.get(new_method_name)
deprecation_warning = renamed_method[2]
wrapper = warn_about_renamed_method(class_name, *renamed_method)
# Define the new method if missing and complain about it
if not new_method and old_method:
warnings.warn(
"`%s.%s` method should be renamed `%s`." %
(class_name, old_method_name, new_method_name),
deprecation_warning, 2)
setattr(base, new_method_name, old_method)
setattr(base, old_method_name, wrapper(old_method))
# Define the old method as a wrapped call to the new method.
if not old_method and new_method:
setattr(base, old_method_name, wrapper(new_method))
return new_class
class DeprecationInstanceCheck(type):
def __instancecheck__(self, instance):
warnings.warn(
"`%s` is deprecated, use `%s` instead." % (self.__name__, self.alternative),
self.deprecation_warning, 2
)
return super().__instancecheck__(instance)
class MiddlewareMixin:
sync_capable = True
async_capable = True
def __init__(self, get_response):
if get_response is None:
raise ValueError('get_response must be provided.')
self.get_response = get_response
self._async_check()
super().__init__()
def _async_check(self):
"""
If get_response is a coroutine function, turns us into async mode so
a thread is not consumed during a whole request.
"""
if asyncio.iscoroutinefunction(self.get_response):
# Mark the class as async-capable, but do the actual switch
# inside __call__ to avoid swapping out dunder methods
self._is_coroutine = asyncio.coroutines._is_coroutine
def __call__(self, request):
# Exit out to async mode, if needed
if asyncio.iscoroutinefunction(self.get_response):
return self.__acall__(request)
response = None
if hasattr(self, 'process_request'):
response = self.process_request(request)
response = response or self.get_response(request)
if hasattr(self, 'process_response'):
response = self.process_response(request, response)
return response
async def __acall__(self, request):
"""
Async version of __call__ that is swapped in when an async request
is running.
"""
response = None
if hasattr(self, 'process_request'):
response = await sync_to_async(
self.process_request,
thread_sensitive=True,
)(request)
response = response or await self.get_response(request)
if hasattr(self, 'process_response'):
response = await sync_to_async(
self.process_response,
thread_sensitive=True,
)(request, response)
return response
| elena/django | django/utils/deprecation.py | Python | bsd-3-clause | 4,818 |
from OpenGL import GL
import numpy
from depths import DepthOffset
from pymclevel import BoundingBox
from config import config
from albow.translate import _
class EditorTool(object):
surfaceBuild = False
panel = None
optionsPanel = None
toolIconName = None
worldTooltipText = None
previewRenderer = None
tooltipText = "???"
def levelChanged(self):
""" called after a level change """
pass
@property
def statusText(self):
return ""
@property
def cameraDistance(self):
return self.editor.cameraToolDistance
def toolEnabled(self):
return True
def __init__(self, editor):
self.editor = editor
self.__hotkey = None
@property
def hotkey(self):
return _(self.__hotkey)
@hotkey.setter
def hotkey(self, k):
self.__hotkey = k
def toolReselected(self):
pass
def toolSelected(self):
pass
def drawTerrainReticle(self):
pass
def drawTerrainMarkers(self):
pass
def drawTerrainPreview(self, origin):
if self.previewRenderer is None:
return
self.previewRenderer.origin = map(lambda a, b: a - b, origin, self.level.bounds.origin)
GL.glPolygonOffset(DepthOffset.ClonePreview, DepthOffset.ClonePreview)
GL.glEnable(GL.GL_POLYGON_OFFSET_FILL)
self.previewRenderer.draw()
GL.glDisable(GL.GL_POLYGON_OFFSET_FILL)
def rotate(self, amount=1, blocksOnly=False):
pass
def roll(self, amount=1, blocksOnly=False):
pass
def flip(self, amount=1, blocksOnly=False):
pass
def mirror(self, amount=1, blocksOnly=False):
pass
def swap(self, amount=1):
pass
def mouseDown(self, evt, pos, direction):
'''pos is the coordinates of the block under the cursor,
direction indicates which face is under it. the tool performs
its action on the specified block'''
pass
def mouseUp(self, evt, pos, direction):
pass
def mouseDrag(self, evt, pos, direction):
pass
def keyDown(self, evt):
pass
def keyUp(self, evt):
pass
def increaseToolReach(self):
"Return True if the tool handles its own reach"
return False
def decreaseToolReach(self):
"Return True if the tool handles its own reach"
return False
def resetToolReach(self):
"Return True if the tool handles its own reach"
return False
def confirm(self):
''' called when user presses enter '''
pass
def cancel(self):
'''cancel the current operation. called when a different tool
is picked, escape is pressed, or etc etc'''
self.hidePanel()
# pass
def findBestTrackingPlane(self, face):
cv = list(self.editor.mainViewport.cameraVector)
cv[face >> 1] = 0
cv = map(abs, cv)
return cv.index(max(cv))
def drawToolReticle(self):
'''get self.editor.blockFaceUnderCursor for pos and direction.
pos is the coordinates of the block under the cursor,
direction indicates which face is under it. draw something to
let the user know where the tool is going to act. e.g. a
transparent block for the block placing tool.'''
pass
def drawToolMarkers(self):
''' draw any markers the tool wants to leave in the field
while another tool is out. e.g. the current selection for
SelectionTool'''
pass
def selectionChanged(self):
""" called when the selection changes due to nudge. other tools can be active. """
pass
edge_factor = 0.1
def boxFaceUnderCursor(self, box):
if self.editor.mainViewport.mouseMovesCamera:
return None, None
p0 = self.editor.mainViewport.cameraPosition
normal = self.editor.mainViewport.mouseVector
if normal is None:
return None, None
points = {}
# glPointSize(5.0)
# glColor(1.0, 1.0, 0.0, 1.0)
# glBegin(GL_POINTS)
for dim in range(3):
dim1 = dim + 1
dim2 = dim + 2
dim1 %= 3
dim2 %= 3
def pointInBounds(point, x):
return box.origin[x] <= point[x] <= box.maximum[x]
neg = normal[dim] < 0
for side in 0, 1:
d = (box.maximum, box.origin)[side][dim] - p0[dim]
if d >= 0 or (neg and d <= 0):
if normal[dim]:
scale = d / normal[dim]
point = map(lambda a, p: (a * scale + p), normal, p0)
# glVertex3f(*point)
if pointInBounds(point, dim1) and pointInBounds(point, dim2):
points[dim * 2 + side] = point
# glEnd()
if not len(points):
return None, None
cp = self.editor.mainViewport.cameraPosition
distances = dict(
(numpy.sum(map(lambda a, b: (b - a) ** 2, cp, point)), (face, point)) for face, point in points.iteritems())
if not len(distances):
return None, None
# When holding alt, pick the face opposite the camera
# if key.get_mods() & KMOD_ALT:
# minmax = max
# else:
face, point = distances[min(distances.iterkeys())]
# if the point is near the edge of the face, and the edge is facing away,
# return the away-facing face
dim = face // 2
dim1, dim2 = dim + 1, dim + 2
dim1, dim2 = dim1 % 3, dim2 % 3
cv = self.editor.mainViewport.cameraVector
# determine if a click was within self.edge_factor of the edge of a selection box side. if so, click through
# to the opposite side
for d in dim1, dim2:
edge_width = box.size[d] * self.edge_factor
facenormal = [0, 0, 0]
cameraBehind = False
if point[d] - box.origin[d] < edge_width:
facenormal[d] = -1
cameraBehind = cp[d] - box.origin[d] > 0
if point[d] - box.maximum[d] > -edge_width:
facenormal[d] = 1
cameraBehind = cp[d] - box.maximum[d] < 0
if numpy.dot(facenormal, cv) > 0 or cameraBehind:
# the face adjacent to the clicked edge faces away from the cam
return distances[max(distances.iterkeys())]
return face, point
def selectionCorners(self):
""" returns the positions of the two selection corners as a pair of 3-tuples, each ordered x,y,z """
if (None != self.editor.selectionTool.bottomLeftPoint and
None != self.editor.selectionTool.topRightPoint):
return (self.editor.selectionTool.bottomLeftPoint,
self.editor.selectionTool.topRightPoint)
return None
def selectionBoxForCorners(self, p1, p2):
''' considers p1,p2 as the marked corners of a selection.
returns a BoundingBox containing all the blocks within.'''
if self.editor.level is None:
return None
p1, p2 = list(p1), list(p2)
# d = [(a-b) for a,b in zip(p1,p2)]
for i in range(3):
if p1[i] > p2[i]:
t = p2[i]
p2[i] = p1[i]
p1[i] = t
p2[i] += 1
size = map(lambda a, b: a - b, p2, p1)
if p1[1] < 0:
size[1] += p1[1]
p1[1] = 0
h = self.editor.level.Height
if p1[1] >= h:
p1[1] = h - 1
size[1] = 1
if p1[1] + size[1] >= h:
size[1] = h - p1[1]
return BoundingBox(p1, size)
def selectionBox(self):
''' selection corners, ordered, with the greater point moved up one block for use as the ending value of an array slice '''
c = self.selectionCorners()
if c:
return self.selectionBoxForCorners(*c)
return None
def selectionSize(self):
''' returns a tuple containing the size of the selection (x,y,z)'''
c = self.selectionBox()
if c is None:
return None
return c.size
@property
def maxBlocks(self):
return config.settings.blockBuffer.get() / 2 # assume block buffer in bytes
def showPanel(self):
pass
def hidePanel(self):
if self.panel and self.panel.parent:
self.panel.parent.remove(self.panel)
self.panel = None
| gpmidi/MCEdit-Unified | editortools/editortool.py | Python | isc | 8,663 |
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import ast
import os.path
import platform
import re
import sys
class Config(object):
'''A Config contains a dictionary that species a build configuration.'''
# Valid values for target_os:
OS_ANDROID = 'android'
OS_CHROMEOS = 'chromeos'
OS_LINUX = 'linux'
OS_MAC = 'mac'
OS_WINDOWS = 'windows'
# Valid values for target_cpu:
ARCH_X86 = 'x86'
ARCH_X64 = 'x64'
ARCH_ARM = 'arm'
def __init__(self, build_dir=None, target_os=None, target_cpu=None,
is_debug=None, is_verbose=None, apk_name='MojoRunner.apk'):
'''Function arguments take precedence over GN args and default values.'''
assert target_os in (None, Config.OS_ANDROID, Config.OS_CHROMEOS,
Config.OS_LINUX, Config.OS_MAC, Config.OS_WINDOWS)
assert target_cpu in (None, Config.ARCH_X86, Config.ARCH_X64,
Config.ARCH_ARM)
assert is_debug in (None, True, False)
assert is_verbose in (None, True, False)
self.values = {
'build_dir': build_dir,
'target_os': self.GetHostOS(),
'target_cpu': self.GetHostCPU(),
'is_debug': True,
'is_verbose': True,
'dcheck_always_on': False,
'is_asan': False,
'apk_name': apk_name,
}
self._ParseGNArgs()
if target_os is not None:
self.values['target_os'] = target_os
if target_cpu is not None:
self.values['target_cpu'] = target_cpu
if is_debug is not None:
self.values['is_debug'] = is_debug
if is_verbose is not None:
self.values['is_verbose'] = is_verbose
@staticmethod
def GetHostOS():
if sys.platform == 'linux2':
return Config.OS_LINUX
if sys.platform == 'darwin':
return Config.OS_MAC
if sys.platform == 'win32':
return Config.OS_WINDOWS
raise NotImplementedError('Unsupported host OS')
@staticmethod
def GetHostCPU():
# Derived from //native_client/pynacl/platform.py
machine = platform.machine()
if machine in ('x86', 'x86-32', 'x86_32', 'x8632', 'i386', 'i686', 'ia32',
'32'):
return Config.ARCH_X86
if machine in ('x86-64', 'amd64', 'AMD64', 'x86_64', 'x8664', '64'):
return Config.ARCH_X64
if machine.startswith('arm'):
return Config.ARCH_ARM
raise Exception('Cannot identify CPU arch: %s' % machine)
def _ParseGNArgs(self):
'''Parse the gn config file from the build directory, if it exists.'''
TRANSLATIONS = { 'true': 'True', 'false': 'False', }
if self.values['build_dir'] is None:
return
gn_file = os.path.join(self.values['build_dir'], 'args.gn')
if not os.path.isfile(gn_file):
return
with open(gn_file, 'r') as f:
for line in f:
line = re.sub('\s*#.*', '', line)
result = re.match('^\s*(\w+)\s*=\s*(.*)\s*$', line)
if result:
key = result.group(1)
value = result.group(2)
self.values[key] = ast.literal_eval(TRANSLATIONS.get(value, value))
# Getters for standard fields ------------------------------------------------
@property
def build_dir(self):
'''Build directory path.'''
return self.values['build_dir']
@property
def target_os(self):
'''OS of the build/test target.'''
return self.values['target_os']
@property
def target_cpu(self):
'''CPU arch of the build/test target.'''
return self.values['target_cpu']
@property
def is_debug(self):
'''Is Debug build?'''
return self.values['is_debug']
@property
def is_verbose(self):
'''Should print additional logging information?'''
return self.values['is_verbose']
@property
def dcheck_always_on(self):
'''DCHECK is fatal even in release builds'''
return self.values['dcheck_always_on']
@property
def is_asan(self):
'''Is ASAN build?'''
return self.values['is_asan']
@property
def apk_name(self):
'''Name of the APK file to run'''
return self.values['apk_name']
| junhuac/MQUIC | src/mojo/tools/mopy/config.py | Python | mit | 4,103 |
"""
For a detailed gene table and a summary gene table
"""
#!/usr/bin/env python
from collections import defaultdict
filename = 'detailed_gene_table_v75'
detailed_out = open(filename, 'w')
file = 'summary_gene_table_v75'
summary_out = open(file, 'w')
# write out files for detailed and summary gene table
detailed_out.write("\t".join(["Chromosome","Gene_name","Is_hgnc","Ensembl_gene_id","Ensembl_transcript_id","Biotype",
"Transcript_status","CCDS_id","HGNC_id","CDS_length","Protein_length",
"Transcript_start","Transcript_end","strand","Synonyms",
"Rvis_pct","entrez_gene_id","mammalian_phenotype_id"]))
detailed_out.write("\n")
summary_out.write("\t".join(["Chromosome","Gene_name","Is_hgnc","Ensembl_gene_id",
"HGNC_id","Synonyms", "Rvis_pct","Strand","Transcript_min_start","Transcript_max_end","Mammalian_phenotype_id"]))
summary_out.write("\n")
mouse_phenotype = defaultdict(list)
genic_intolerance = defaultdict(list)
keygene = list_hgnc = []
#initializing values for the summary gene table
transcript_min = defaultdict(list)
transcript_max = defaultdict(list)
lines_seen = set()
for line in open("genic_intolerance_dataset2", 'r'):
if line.startswith("#") is False:
field = line.strip().split("\t")
name = str(field[0])
score = str(field[1])
percentile = str(field[2])
(key,value) = (name, percentile)
genic_intolerance[name].append(percentile)
#Phenotype data from MGI - Jax
for row in open("HMD_HumanPhenotype", 'r'):
col = row.strip().split("\t")
#Remove leading white spaces in the column
entrez_id = str(col[1]).lstrip()
#Remove leading white spaces in the column & join MP terms with a comma
mph = str(col[5]).lstrip().replace(' ',',') if str(col[5]) != '' else None
(key,value) = (entrez_id, mph)
mouse_phenotype[entrez_id].append(mph)
# Dictionary for summary gene table to handle transcript min, max co-ordinates
for each in open("raw_gene_table", 'r'):
if each.startswith("Chromosome") is False:
k = each.strip().split("\t")
chr = "chr"+str((k[0]))
ens = str(k[2])
start = str(k[10])
end = str(k[11])
transcript_min[(chr,ens)].append(start)
transcript_max[(chr,ens)].append(end)
for each in open("raw_gene_table", 'r'):
if each.startswith("Chromosome") is False:
k = each.strip().split("\t")
chrom = "chr"+str((k[0]))
hgnc = str(k[1])
ens_geneid = str(k[2])
ens_transid = str(k[3])
trans_biotype = str(k[4])
status = str(k[5])
ccds_id = str(k[6]) #these id's are unique to transcripts
hgnc_id = str(k[7])
cds_len = str(k[8])
protein_len = str(k[9])
transcript_start = str(k[10])
transcript_end = str(k[11])
strand = str(k[12])
#remove space between names
previous = str(k[13]).replace(" ","")
synonyms = str(k[14]).replace(" ","")
entrez = str(k[15])
# sort all transcript start and end positions for a gene (use ens_geneid, since HGNC is not always true)
# Capture the first and the last position from the sorted list to give min, max
if (chrom,ens_geneid) in transcript_min:
minmum = sorted(transcript_min[(chrom,ens_geneid)])[0]
if (chrom,ens_geneid) in transcript_max:
maxmum = sorted(transcript_max[(chrom,ens_geneid)])[-1]
rvis = genic_intolerance[hgnc][0] if hgnc in genic_intolerance else None
pheno = mouse_phenotype[entrez] if entrez in mouse_phenotype else None
if pheno is not None and len(pheno) == 1:
phenotype = pheno[0]
elif pheno is None:
phenotype = "None"
else:
if len(pheno) > 1:
#convert the list to a string
string = ",".join(pheno)
# store a None for multiple Nones
if "None" in string and "MP:" not in string:
phenotype = None
#remove redundancy in MP terms
if "None" not in string and "MP:" in string:
phenotype = ",".join(set(string.split(",")))
#remove nones when MP terms are available
if "None" in string and "MP:" in string:
phen = string.split(",")
phenotype = ",".join([x for x in phen if x != "None"])
if hgnc != "None":
list_hgnc.append(hgnc)
#we don't want string of Nones
if "None" in previous and "None" in synonyms and "None" in hgnc:
string = None
else:
# We would like all genes names to be put together
gene_string = hgnc+","+previous+","+synonyms
#get rid of Nones in gene strings
if gene_string.startswith("None"):
string = gene_string.replace("None,","")
else:
string = gene_string.replace(",None","")
#Nonetype object has no attribute split
if string is not None:
genes = set(string.split(","))
if len(genes) > 1:
# We would like to represent each member of the gene list as a key and the remainder as synonyms each time
for each in genes:
keygene = set([each])
synonym = genes.difference(keygene)
gene_name = ','.join(keygene)
other_names = ','.join(synonym)
hgnc_flag = "1" if gene_name in list_hgnc else "0"
# only when the gene is a HGNC name, it would have an hgnc id
is_hgnc_id = hgnc_id if gene_name in list_hgnc else "None"
# handling duplicate lines (due to transcripts) in summary table (which we don't care for in this table)
# writing to outfile for the summary gene table
line = "\t".join([chrom,gene_name,hgnc_flag,ens_geneid,is_hgnc_id,
other_names,str(rvis),strand,minmum,maxmum,str(phenotype)])
if line not in lines_seen:
summary_out.write(line)
summary_out.write("\n")
lines_seen.add(line)
# Writing to out for detailed gene table
detailed_out.write("\t".join([chrom,gene_name,hgnc_flag,ens_geneid,ens_transid,trans_biotype,
status,ccds_id,is_hgnc_id,cds_len,protein_len,transcript_start,
transcript_end,strand,other_names,str(rvis),entrez,str(phenotype)]))
detailed_out.write("\n")
# if there is one gene name in the list, we just want it to be the key
elif len(genes) == 1:
gene_name = ','.join(genes)
other_names = "None"
hgnc_flag = "1" if gene_name in list_hgnc else "0"
is_hgnc_id = hgnc_id if gene_name in list_hgnc else "None"
# handling duplicate lines (due to transcripts) in summary table (which we don't care for in this table)
# writing to outfile for the summary gene table
line = "\t".join([chrom,str(gene_name),hgnc_flag,ens_geneid,is_hgnc_id,
other_names,str(rvis),strand,minmum,maxmum,str(phenotype)])
if line not in lines_seen:
summary_out.write(line)
summary_out.write("\n")
lines_seen.add(line)
# write to out for detailed gene table
detailed_out.write("\t".join([chrom,str(gene_name),hgnc_flag,ens_geneid,ens_transid,trans_biotype,
status,ccds_id,is_hgnc_id,cds_len,protein_len,transcript_start,
transcript_end,strand,other_names,str(rvis),entrez,str(phenotype)]))
detailed_out.write("\n")
# if there are no HGNC, previous or synonyms names for an ensembl entry, just return None
elif string is None:
gene_name = "None"
other_names = "None"
hgnc_flag = "0"
is_hgnc_id = "None"
#handling duplicate lines (due to transcripts) in summary table (which we don't care for in this table)
#writing to outfile for the summary gene table
line = "\t".join([chrom,gene_name,hgnc_flag,ens_geneid,is_hgnc_id,
other_names,str(rvis),strand,minmum,maxmum,str(phenotype)])
if line not in lines_seen:
summary_out.write(line)
summary_out.write("\n")
lines_seen.add(line)
# probably we still want to print these lines where gene is none since ensembl gene id has value
detailed_out.write("\t".join([chrom,gene_name,hgnc_flag,ens_geneid,ens_transid,trans_biotype,status,
ccds_id,is_hgnc_id,cds_len,protein_len,transcript_start,transcript_end,
strand,other_names,str(rvis),entrez,str(phenotype)]))
detailed_out.write("\n")
detailed_out.close()
summary_out.close()
| brentp/gemini | gemini/annotation_provenance/gene_table/combined_gene_table.py | Python | mit | 9,693 |
import re
import chardet
import sys
RE_CHARSET = re.compile(br'<meta.*?charset=["\']*(.+?)["\'>]', flags=re.I)
RE_PRAGMA = re.compile(br'<meta.*?content=["\']*;?charset=(.+?)["\'>]', flags=re.I)
RE_XML = re.compile(br'^<\?xml.*?encoding=["\']*(.+?)["\'>]')
CHARSETS = {
'big5': 'big5hkscs',
'gb2312': 'gb18030',
'ascii': 'utf-8',
'maccyrillic': 'cp1251',
'win1251': 'cp1251',
'win-1251': 'cp1251',
'windows-1251': 'cp1251',
}
def fix_charset(encoding):
"""Overrides encoding when charset declaration
or charset determination is a subset of a larger
charset. Created because of issues with Chinese websites"""
encoding = encoding.lower()
return CHARSETS.get(encoding, encoding)
def get_encoding(page):
# Regex for XML and HTML Meta charset declaration
declared_encodings = (RE_CHARSET.findall(page) +
RE_PRAGMA.findall(page) +
RE_XML.findall(page))
# Try any declared encodings
for declared_encoding in declared_encodings:
try:
if sys.version_info[0] == 3:
# declared_encoding will actually be bytes but .decode() only
# accepts `str` type. Decode blindly with ascii because no one should
# ever use non-ascii characters in the name of an encoding.
declared_encoding = declared_encoding.decode('ascii', 'replace')
encoding = fix_charset(declared_encoding)
# Now let's decode the page
page.decode()
# It worked!
return encoding
except UnicodeDecodeError:
pass
# Fallback to chardet if declared encodings fail
# Remove all HTML tags, and leave only text for chardet
text = re.sub(b'(\s*</?[^>]*>)+\s*', b' ', page).strip()
enc = 'utf-8'
if len(text) < 10:
return enc # can't guess
res = chardet.detect(text)
enc = res['encoding'] or 'utf-8'
#print '->', enc, "%.2f" % res['confidence']
enc = fix_charset(enc)
return enc
| AlphaCluster/NewsBlur | vendor/readability/encoding.py | Python | mit | 2,034 |
#
# Copyright (C) 2010 Stanislav Bohm
#
# This file is part of Kaira.
#
# Kaira is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 3 of the License, or
# (at your option) any later version.
#
# Kaira is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Kaira. If not, see <http://www.gnu.org/licenses/>.
#
import argparse
import sys
import paths
sys.path.append(paths.PTP_DIR)
import loader
import os
import tracelog
def export(filename, directory, trace, lib):
p = loader.load_project(filename)
if trace and lib:
target = "libtraced"
elif trace:
target = "traced"
elif lib:
target = "lib"
else:
target = "release"
build_config = p.get_build_config(target)
if directory is not None:
build_config.directory = directory
else:
build_config.directory = os.path.dirname(filename)
p.export(build_config)
def check_tracelog(filename):
t = tracelog.TraceLog(filename)
print t.get_runinstances_count()
def main():
parser = argparse.ArgumentParser(description='Kaira gui command line controller')
parser.add_argument('--export', metavar='filename', type=str)
parser.add_argument('--output', metavar='directory', type=str)
parser.add_argument("--trace", action='store_true')
parser.add_argument('--tracelog', metavar='filename', type=str)
parser.add_argument("--lib", action='store_true')
args = parser.parse_args()
if args.export:
export(os.path.abspath(args.export), args.output, args.trace, args.lib)
return
if args.tracelog:
check_tracelog(args.tracelog)
if __name__ == "__main__":
main()
| MrPablozOne/kaira | gui/cmdutils.py | Python | gpl-3.0 | 2,053 |
# (c) 2014 Michael DeHaan, <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import ansible.constants as C
from ansible.errors import AnsibleParserError, AnsibleError, AnsibleAssertionError
from ansible.module_utils.six import iteritems, string_types
from ansible.module_utils._text import to_text
from ansible.parsing.splitter import parse_kv, split_args
from ansible.plugins.loader import module_loader, action_loader
from ansible.template import Templar
from ansible.utils.sentinel import Sentinel
# For filtering out modules correctly below
FREEFORM_ACTIONS = frozenset(C.MODULE_REQUIRE_ARGS)
RAW_PARAM_MODULES = FREEFORM_ACTIONS.union((
'include',
'include_vars',
'include_tasks',
'include_role',
'import_tasks',
'import_role',
'add_host',
'group_by',
'set_fact',
'meta',
))
BUILTIN_TASKS = frozenset((
'meta',
'include',
'include_tasks',
'include_role',
'import_tasks',
'import_role'
))
class ModuleArgsParser:
"""
There are several ways a module and argument set can be expressed:
# legacy form (for a shell command)
- action: shell echo hi
# common shorthand for local actions vs delegate_to
- local_action: shell echo hi
# most commonly:
- copy: src=a dest=b
# legacy form
- action: copy src=a dest=b
# complex args form, for passing structured data
- copy:
src: a
dest: b
# gross, but technically legal
- action:
module: copy
args:
src: a
dest: b
# Standard YAML form for command-type modules. In this case, the args specified
# will act as 'defaults' and will be overridden by any args specified
# in one of the other formats (complex args under the action, or
# parsed from the k=v string
- command: 'pwd'
args:
chdir: '/tmp'
This class has some of the logic to canonicalize these into the form
- module: <module_name>
delegate_to: <optional>
args: <args>
Args may also be munged for certain shell command parameters.
"""
def __init__(self, task_ds=None, collection_list=None):
task_ds = {} if task_ds is None else task_ds
if not isinstance(task_ds, dict):
raise AnsibleAssertionError("the type of 'task_ds' should be a dict, but is a %s" % type(task_ds))
self._task_ds = task_ds
self._collection_list = collection_list
# delayed local imports to prevent circular import
from ansible.playbook.task import Task
from ansible.playbook.handler import Handler
# store the valid Task/Handler attrs for quick access
self._task_attrs = set(Task._valid_attrs.keys())
self._task_attrs.update(set(Handler._valid_attrs.keys()))
# HACK: why are these not FieldAttributes on task with a post-validate to check usage?
self._task_attrs.update(['local_action', 'static'])
self._task_attrs = frozenset(self._task_attrs)
self.internal_redirect_list = []
def _split_module_string(self, module_string):
'''
when module names are expressed like:
action: copy src=a dest=b
the first part of the string is the name of the module
and the rest are strings pertaining to the arguments.
'''
tokens = split_args(module_string)
if len(tokens) > 1:
return (tokens[0].strip(), " ".join(tokens[1:]))
else:
return (tokens[0].strip(), "")
def _normalize_parameters(self, thing, action=None, additional_args=None):
'''
arguments can be fuzzy. Deal with all the forms.
'''
additional_args = {} if additional_args is None else additional_args
# final args are the ones we'll eventually return, so first update
# them with any additional args specified, which have lower priority
# than those which may be parsed/normalized next
final_args = dict()
if additional_args:
if isinstance(additional_args, string_types):
templar = Templar(loader=None)
if templar.is_template(additional_args):
final_args['_variable_params'] = additional_args
else:
raise AnsibleParserError("Complex args containing variables cannot use bare variables (without Jinja2 delimiters), "
"and must use the full variable style ('{{var_name}}')")
elif isinstance(additional_args, dict):
final_args.update(additional_args)
else:
raise AnsibleParserError('Complex args must be a dictionary or variable string ("{{var}}").')
# how we normalize depends if we figured out what the module name is
# yet. If we have already figured it out, it's a 'new style' invocation.
# otherwise, it's not
if action is not None:
args = self._normalize_new_style_args(thing, action)
else:
(action, args) = self._normalize_old_style_args(thing)
# this can occasionally happen, simplify
if args and 'args' in args:
tmp_args = args.pop('args')
if isinstance(tmp_args, string_types):
tmp_args = parse_kv(tmp_args)
args.update(tmp_args)
# only internal variables can start with an underscore, so
# we don't allow users to set them directly in arguments
if args and action not in FREEFORM_ACTIONS:
for arg in args:
arg = to_text(arg)
if arg.startswith('_ansible_'):
raise AnsibleError("invalid parameter specified for action '%s': '%s'" % (action, arg))
# finally, update the args we're going to return with the ones
# which were normalized above
if args:
final_args.update(args)
return (action, final_args)
def _normalize_new_style_args(self, thing, action):
'''
deals with fuzziness in new style module invocations
accepting key=value pairs and dictionaries, and returns
a dictionary of arguments
possible example inputs:
'echo hi', 'shell'
{'region': 'xyz'}, 'ec2'
standardized outputs like:
{ _raw_params: 'echo hi', _uses_shell: True }
'''
if isinstance(thing, dict):
# form is like: { xyz: { x: 2, y: 3 } }
args = thing
elif isinstance(thing, string_types):
# form is like: copy: src=a dest=b
check_raw = action in FREEFORM_ACTIONS
args = parse_kv(thing, check_raw=check_raw)
elif thing is None:
# this can happen with modules which take no params, like ping:
args = None
else:
raise AnsibleParserError("unexpected parameter type in action: %s" % type(thing), obj=self._task_ds)
return args
def _normalize_old_style_args(self, thing):
'''
deals with fuzziness in old-style (action/local_action) module invocations
returns tuple of (module_name, dictionary_args)
possible example inputs:
{ 'shell' : 'echo hi' }
'shell echo hi'
{'module': 'ec2', 'x': 1 }
standardized outputs like:
('ec2', { 'x': 1} )
'''
action = None
args = None
if isinstance(thing, dict):
# form is like: action: { module: 'copy', src: 'a', dest: 'b' }
thing = thing.copy()
if 'module' in thing:
action, module_args = self._split_module_string(thing['module'])
args = thing.copy()
check_raw = action in FREEFORM_ACTIONS
args.update(parse_kv(module_args, check_raw=check_raw))
del args['module']
elif isinstance(thing, string_types):
# form is like: action: copy src=a dest=b
(action, args) = self._split_module_string(thing)
check_raw = action in FREEFORM_ACTIONS
args = parse_kv(args, check_raw=check_raw)
else:
# need a dict or a string, so giving up
raise AnsibleParserError("unexpected parameter type in action: %s" % type(thing), obj=self._task_ds)
return (action, args)
def parse(self, skip_action_validation=False):
'''
Given a task in one of the supported forms, parses and returns
returns the action, arguments, and delegate_to values for the
task, dealing with all sorts of levels of fuzziness.
'''
thing = None
action = None
delegate_to = self._task_ds.get('delegate_to', Sentinel)
args = dict()
self.internal_redirect_list = []
# This is the standard YAML form for command-type modules. We grab
# the args and pass them in as additional arguments, which can/will
# be overwritten via dict updates from the other arg sources below
additional_args = self._task_ds.get('args', dict())
# We can have one of action, local_action, or module specified
# action
if 'action' in self._task_ds:
# an old school 'action' statement
thing = self._task_ds['action']
action, args = self._normalize_parameters(thing, action=action, additional_args=additional_args)
# local_action
if 'local_action' in self._task_ds:
# local_action is similar but also implies a delegate_to
if action is not None:
raise AnsibleParserError("action and local_action are mutually exclusive", obj=self._task_ds)
thing = self._task_ds.get('local_action', '')
delegate_to = 'localhost'
action, args = self._normalize_parameters(thing, action=action, additional_args=additional_args)
# module: <stuff> is the more new-style invocation
# filter out task attributes so we're only querying unrecognized keys as actions/modules
non_task_ds = dict((k, v) for k, v in iteritems(self._task_ds) if (k not in self._task_attrs) and (not k.startswith('with_')))
# walk the filtered input dictionary to see if we recognize a module name
for item, value in iteritems(non_task_ds):
is_action_candidate = False
if item in BUILTIN_TASKS:
is_action_candidate = True
elif skip_action_validation:
is_action_candidate = True
else:
# If the plugin is resolved and redirected smuggle the list of candidate names via the task attribute 'internal_redirect_list'
context = action_loader.find_plugin_with_context(item, collection_list=self._collection_list)
if not context.resolved:
context = module_loader.find_plugin_with_context(item, collection_list=self._collection_list)
if context.resolved and context.redirect_list:
self.internal_redirect_list = context.redirect_list
elif context.redirect_list:
self.internal_redirect_list = context.redirect_list
is_action_candidate = bool(self.internal_redirect_list)
if is_action_candidate:
# finding more than one module name is a problem
if action is not None:
raise AnsibleParserError("conflicting action statements: %s, %s" % (action, item), obj=self._task_ds)
action = item
thing = value
action, args = self._normalize_parameters(thing, action=action, additional_args=additional_args)
# if we didn't see any module in the task at all, it's not a task really
if action is None:
if non_task_ds: # there was one non-task action, but we couldn't find it
bad_action = list(non_task_ds.keys())[0]
raise AnsibleParserError("couldn't resolve module/action '{0}'. This often indicates a "
"misspelling, missing collection, or incorrect module path.".format(bad_action),
obj=self._task_ds)
else:
raise AnsibleParserError("no module/action detected in task.",
obj=self._task_ds)
elif args.get('_raw_params', '') != '' and action not in RAW_PARAM_MODULES:
templar = Templar(loader=None)
raw_params = args.pop('_raw_params')
if templar.is_template(raw_params):
args['_variable_params'] = raw_params
else:
raise AnsibleParserError("this task '%s' has extra params, which is only allowed in the following modules: %s" % (action,
", ".join(RAW_PARAM_MODULES)),
obj=self._task_ds)
return (action, args, delegate_to)
| j-carl/ansible | lib/ansible/parsing/mod_args.py | Python | gpl-3.0 | 13,925 |
# -*- coding: utf-8 -*-
from module.plugins.internal.XFSAccount import XFSAccount
class OpenloadCo(XFSAccount):
__name__ = "OpenloadCo"
__type__ = "account"
__version__ = "0.02"
__status__ = "testing"
__description__ = """Openload.co account plugin"""
__license__ = "GPLv3"
__authors__ = [("Walter Purcaro", "[email protected]")]
PLUGIN_DOMAIN = "openload.co"
| Guidobelix/pyload | module/plugins/accounts/OpenloadCo.py | Python | gpl-3.0 | 413 |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
import logging
import re
import subprocess
from unittest import skipIf
from odoo import tools
from . import lint_case
RULES = ('{'
'"no-undef": "error",'
'"no-restricted-globals": ["error", "event", "self"],'
'"no-const-assign": ["error"],'
'"no-debugger": ["error"],'
'"no-dupe-class-members": ["error"]'
'}'
)
PARSER_OPTIONS = '{ecmaVersion: 2019, sourceType: module}'
GLOBAL = ','.join([
'owl',
'odoo',
'$',
'jQuery',
'_',
'Chart',
'fuzzy',
'QWeb2',
'Popover',
'StackTrace',
'QUnit',
'luxon',
'moment',
'py',
'ClipboardJS',
'globalThis',
])
_logger = logging.getLogger(__name__)
try:
eslint = tools.misc.find_in_path('eslint')
except IOError:
eslint = None
@skipIf(eslint is None, "eslint tool not found on this system")
class TestESLint(lint_case.LintCase):
longMessage = True
def test_eslint_version(self):
""" Test that there are no eslint errors in javascript files """
files_to_check = [
p for p in self.iter_module_files('**/static/**/*.js')
if not re.match('.*/libs?/.*', p) # don't check libraries
]
_logger.info('Testing %s js files', len(files_to_check))
# https://eslint.org/docs/user-guide/command-line-interface
cmd = [eslint, '--no-eslintrc', '--env', 'browser', '--env', 'es2017', '--parser-options', PARSER_OPTIONS, '--rule', RULES, '--global', GLOBAL] + files_to_check
process = subprocess.run(cmd, stdout=subprocess.PIPE, stderr=subprocess.DEVNULL, check=False)
self.assertEqual(process.returncode, 0, msg=process.stdout.decode())
| jeremiahyan/odoo | odoo/addons/test_lint/tests/test_eslint.py | Python | gpl-3.0 | 1,842 |
"""
This config file runs the simplest dev environment using sqlite, and db-based
sessions. Assumes structure:
/envroot/
/db # This is where it'll write the database file
/edx-platform # The location of this repo
/log # Where we're going to write log files
"""
# We intentionally define lots of variables that aren't used, and
# want to import all variables from base settings files
# pylint: disable=W0401, W0614
from .common import *
import os
from path import path
from warnings import filterwarnings, simplefilter
from uuid import uuid4
# mongo connection settings
MONGO_PORT_NUM = int(os.environ.get('EDXAPP_TEST_MONGO_PORT', '27017'))
MONGO_HOST = os.environ.get('EDXAPP_TEST_MONGO_HOST', 'localhost')
os.environ['DJANGO_LIVE_TEST_SERVER_ADDRESS'] = 'localhost:8000-9000'
THIS_UUID = uuid4().hex[:5]
# can't test start dates with this True, but on the other hand,
# can test everything else :)
FEATURES['DISABLE_START_DATES'] = True
# Most tests don't use the discussion service, so we turn it off to speed them up.
# Tests that do can enable this flag, but must use the UrlResetMixin class to force urls.py
# to reload. For consistency in user-experience, keep the value of this setting in sync with
# the one in cms/envs/test.py
FEATURES['ENABLE_DISCUSSION_SERVICE'] = False
FEATURES['ENABLE_SERVICE_STATUS'] = True
FEATURES['ENABLE_HINTER_INSTRUCTOR_VIEW'] = True
FEATURES['ENABLE_INSTRUCTOR_LEGACY_DASHBOARD'] = True
FEATURES['ENABLE_SHOPPING_CART'] = True
FEATURES['ENABLE_VERIFIED_CERTIFICATES'] = True
# Enable this feature for course staff grade downloads, to enable acceptance tests
FEATURES['ENABLE_S3_GRADE_DOWNLOADS'] = True
FEATURES['ALLOW_COURSE_STAFF_GRADE_DOWNLOADS'] = True
# Toggles embargo on for testing
FEATURES['EMBARGO'] = True
# Need wiki for courseware views to work. TODO (vshnayder): shouldn't need it.
WIKI_ENABLED = True
# Makes the tests run much faster...
SOUTH_TESTS_MIGRATE = False # To disable migrations and use syncdb instead
# Nose Test Runner
TEST_RUNNER = 'django_nose.NoseTestSuiteRunner'
_system = 'lms'
_report_dir = REPO_ROOT / 'reports' / _system
_report_dir.makedirs_p()
NOSE_ARGS = [
'--id-file', REPO_ROOT / '.testids' / _system / 'noseids',
'--xunit-file', _report_dir / 'nosetests.xml',
]
# Local Directories
TEST_ROOT = path("test_root")
# Want static files in the same dir for running on jenkins.
STATIC_ROOT = TEST_ROOT / "staticfiles"
STATUS_MESSAGE_PATH = TEST_ROOT / "status_message.json"
COURSES_ROOT = TEST_ROOT / "data"
DATA_DIR = COURSES_ROOT
COMMON_TEST_DATA_ROOT = COMMON_ROOT / "test" / "data"
# Where the content data is checked out. This may not exist on jenkins.
GITHUB_REPO_ROOT = ENV_ROOT / "data"
USE_I18N = True
LANGUAGE_CODE = 'en' # tests assume they will get English.
XQUEUE_INTERFACE = {
"url": "http://sandbox-xqueue.edx.org",
"django_auth": {
"username": "lms",
"password": "***REMOVED***"
},
"basic_auth": ('anant', 'agarwal'),
}
XQUEUE_WAITTIME_BETWEEN_REQUESTS = 5 # seconds
# Don't rely on a real staff grading backend
MOCK_STAFF_GRADING = True
MOCK_PEER_GRADING = True
# TODO (cpennington): We need to figure out how envs/test.py can inject things
# into common.py so that we don't have to repeat this sort of thing
STATICFILES_DIRS = [
COMMON_ROOT / "static",
PROJECT_ROOT / "static",
]
STATICFILES_DIRS += [
(course_dir, COMMON_TEST_DATA_ROOT / course_dir)
for course_dir in os.listdir(COMMON_TEST_DATA_ROOT)
if os.path.isdir(COMMON_TEST_DATA_ROOT / course_dir)
]
# Avoid having to run collectstatic before the unit test suite
# If we don't add these settings, then Django templates that can't
# find pipelined assets will raise a ValueError.
# http://stackoverflow.com/questions/12816941/unit-testing-with-django-pipeline
STATICFILES_STORAGE='pipeline.storage.NonPackagingPipelineStorage'
PIPELINE_ENABLED=False
update_module_store_settings(
MODULESTORE,
module_store_options={
'fs_root': TEST_ROOT / "data",
},
xml_store_options={
'data_dir': COMMON_TEST_DATA_ROOT,
},
doc_store_settings={
'host': MONGO_HOST,
'port': MONGO_PORT_NUM,
'db': 'test_xmodule',
'collection': 'test_modulestore{0}'.format(THIS_UUID),
},
)
CONTENTSTORE = {
'ENGINE': 'xmodule.contentstore.mongo.MongoContentStore',
'DOC_STORE_CONFIG': {
'host': MONGO_HOST,
'db': 'xcontent',
'port': MONGO_PORT_NUM,
}
}
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': TEST_ROOT / 'db' / 'edx.db'
},
}
CACHES = {
# This is the cache used for most things.
# In staging/prod envs, the sessions also live here.
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'edx_loc_mem_cache',
'KEY_FUNCTION': 'util.memcache.safe_key',
},
# The general cache is what you get if you use our util.cache. It's used for
# things like caching the course.xml file for different A/B test groups.
# We set it to be a DummyCache to force reloading of course.xml in dev.
# In staging environments, we would grab VERSION from data uploaded by the
# push process.
'general': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
'KEY_PREFIX': 'general',
'VERSION': 4,
'KEY_FUNCTION': 'util.memcache.safe_key',
},
'mongo_metadata_inheritance': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': os.path.join(tempfile.gettempdir(), 'mongo_metadata_inheritance'),
'TIMEOUT': 300,
'KEY_FUNCTION': 'util.memcache.safe_key',
},
'loc_cache': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'edx_location_mem_cache',
},
}
# Dummy secret key for dev
SECRET_KEY = '85920908f28904ed733fe576320db18cabd7b6cd'
# hide ratelimit warnings while running tests
filterwarnings('ignore', message='No request passed to the backend, unable to rate-limit')
# Ignore deprecation warnings (so we don't clutter Jenkins builds/production)
# https://docs.python.org/2/library/warnings.html#the-warnings-filter
simplefilter('ignore') # Change to "default" to see the first instance of each hit
# or "error" to convert all into errors
######### Third-party auth ##########
FEATURES['ENABLE_THIRD_PARTY_AUTH'] = True
################################## OPENID #####################################
FEATURES['AUTH_USE_OPENID'] = True
FEATURES['AUTH_USE_OPENID_PROVIDER'] = True
################################## SHIB #######################################
FEATURES['AUTH_USE_SHIB'] = True
FEATURES['SHIB_DISABLE_TOS'] = True
FEATURES['RESTRICT_ENROLL_BY_REG_METHOD'] = True
OPENID_CREATE_USERS = False
OPENID_UPDATE_DETAILS_FROM_SREG = True
OPENID_USE_AS_ADMIN_LOGIN = False
OPENID_PROVIDER_TRUSTED_ROOTS = ['*']
############################## OAUTH2 Provider ################################
FEATURES['ENABLE_OAUTH2_PROVIDER'] = True
########################### External REST APIs #################################
FEATURES['ENABLE_MOBILE_REST_API'] = True
FEATURES['ENABLE_VIDEO_ABSTRACTION_LAYER_API'] = True
###################### Payment ##############################3
# Enable fake payment processing page
FEATURES['ENABLE_PAYMENT_FAKE'] = True
# Configure the payment processor to use the fake processing page
# Since both the fake payment page and the shoppingcart app are using
# the same settings, we can generate this randomly and guarantee
# that they are using the same secret.
from random import choice
import string
RANDOM_SHARED_SECRET = ''.join(
choice(string.letters + string.digits + string.punctuation)
for x in range(250)
)
CC_PROCESSOR_NAME = 'CyberSource2'
CC_PROCESSOR['CyberSource2']['SECRET_KEY'] = RANDOM_SHARED_SECRET
CC_PROCESSOR['CyberSource2']['ACCESS_KEY'] = "0123456789012345678901"
CC_PROCESSOR['CyberSource2']['PROFILE_ID'] = "edx"
CC_PROCESSOR['CyberSource2']['PURCHASE_ENDPOINT'] = "/shoppingcart/payment_fake"
FEATURES['STORE_BILLING_INFO'] = True
########################### SYSADMIN DASHBOARD ################################
FEATURES['ENABLE_SYSADMIN_DASHBOARD'] = True
GIT_REPO_DIR = TEST_ROOT / "course_repos"
################################# CELERY ######################################
CELERY_ALWAYS_EAGER = True
CELERY_RESULT_BACKEND = 'cache'
BROKER_TRANSPORT = 'memory'
############################ STATIC FILES #############################
DEFAULT_FILE_STORAGE = 'django.core.files.storage.FileSystemStorage'
MEDIA_ROOT = TEST_ROOT / "uploads"
MEDIA_URL = "/static/uploads/"
STATICFILES_DIRS.append(("uploads", MEDIA_ROOT))
new_staticfiles_dirs = []
# Strip out any static files that aren't in the repository root
# so that the tests can run with only the edx-platform directory checked out
for static_dir in STATICFILES_DIRS:
# Handle both tuples and non-tuple directory definitions
try:
_, data_dir = static_dir
except ValueError:
data_dir = static_dir
if data_dir.startswith(REPO_ROOT):
new_staticfiles_dirs.append(static_dir)
STATICFILES_DIRS = new_staticfiles_dirs
FILE_UPLOAD_TEMP_DIR = TEST_ROOT / "uploads"
FILE_UPLOAD_HANDLERS = (
'django.core.files.uploadhandler.MemoryFileUploadHandler',
'django.core.files.uploadhandler.TemporaryFileUploadHandler',
)
########################### Server Ports ###################################
# These ports are carefully chosen so that if the browser needs to
# access them, they will be available through the SauceLabs SSH tunnel
LETTUCE_SERVER_PORT = 8003
XQUEUE_PORT = 8040
YOUTUBE_PORT = 8031
LTI_PORT = 8765
VIDEO_SOURCE_PORT = 8777
################### Make tests faster
#http://slacy.com/blog/2012/04/make-your-tests-faster-in-django-1-4/
PASSWORD_HASHERS = (
# 'django.contrib.auth.hashers.PBKDF2PasswordHasher',
# 'django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher',
# 'django.contrib.auth.hashers.BCryptPasswordHasher',
'django.contrib.auth.hashers.SHA1PasswordHasher',
'django.contrib.auth.hashers.MD5PasswordHasher',
# 'django.contrib.auth.hashers.CryptPasswordHasher',
)
### This enables the Metrics tab for the Instructor dashboard ###########
FEATURES['CLASS_DASHBOARD'] = True
################### Make tests quieter
# OpenID spews messages like this to stderr, we don't need to see them:
# Generated checkid_setup request to http://testserver/openid/provider/login/ with assocication {HMAC-SHA1}{51d49995}{s/kRmA==}
import openid.oidutil
openid.oidutil.log = lambda message, level = 0: None
PLATFORM_NAME = "edX"
SITE_NAME = "edx.org"
# set up some testing for microsites
MICROSITE_CONFIGURATION = {
"test_microsite": {
"domain_prefix": "testmicrosite",
"university": "test_microsite",
"platform_name": "Test Microsite",
"logo_image_url": "test_microsite/images/header-logo.png",
"email_from_address": "[email protected]",
"payment_support_email": "[email protected]",
"ENABLE_MKTG_SITE": False,
"SITE_NAME": "test_microsite.localhost",
"course_org_filter": "TestMicrositeX",
"course_about_show_social_links": False,
"css_overrides_file": "test_microsite/css/test_microsite.css",
"show_partners": False,
"show_homepage_promo_video": False,
"course_index_overlay_text": "This is a Test Microsite Overlay Text.",
"course_index_overlay_logo_file": "test_microsite/images/header-logo.png",
"homepage_overlay_html": "<h1>This is a Test Microsite Overlay HTML</h1>",
"ALWAYS_REDIRECT_HOMEPAGE_TO_DASHBOARD_FOR_AUTHENTICATED_USER": False,
},
"default": {
"university": "default_university",
"domain_prefix": "www",
}
}
MICROSITE_ROOT_DIR = COMMON_ROOT / 'test' / 'test_microsites'
MICROSITE_TEST_HOSTNAME = 'testmicrosite.testserver'
FEATURES['USE_MICROSITES'] = True
# add extra template directory for test-only templates
MAKO_TEMPLATES['main'].extend([
COMMON_ROOT / 'test' / 'templates'
])
######### LinkedIn ########
LINKEDIN_API['COMPANY_ID'] = '0000000'
# Setting for the testing of Software Secure Result Callback
VERIFY_STUDENT["SOFTWARE_SECURE"] = {
"API_ACCESS_KEY": "BBBBBBBBBBBBBBBBBBBB",
"API_SECRET_KEY": "CCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC",
}
VIDEO_CDN_URL = {
'CN': 'http://api.xuetangx.com/edx/video?s3_url='
}
######### dashboard git log settings #########
MONGODB_LOG = {
'host': MONGO_HOST,
'port': MONGO_PORT_NUM,
'user': '',
'password': '',
'db': 'xlog',
}
| c0710204/edx-platform | lms/envs/test.py | Python | agpl-3.0 | 12,700 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""setup.py: setuptools control."""
import ez_setup
ez_setup.use_setuptools()
from os.path import dirname, abspath, join
from setuptools import setup
BASE_DIR = join(dirname(abspath(__file__)), 'orka/orka.py')
import orka
requires = ['kamaki','paramiko','requests','PyYAML']
# setup
setup(
name = "orka",
packages = ["orka"],
# starts from this main
entry_points = {
"console_scripts": ['orka = orka.orka:main']
},
version = orka.__version__,
description = "Python command line application for creating and deleting Hadoop clusters in ~okeanos.",
install_requires = requires
)
| grnet/e-science | orka/setup.py | Python | agpl-3.0 | 676 |
from sys import version_info
from functools import reduce
from operator import mul
from flask_babel import gettext
if version_info[0] == 3:
unicode = str
keywords = ('min',
'max',
'avg',
'sum',
'prod')
# required answerer function
# can return a list of results (any result type) for a given query
def answer(query):
parts = query.query.split()
if len(parts) < 2:
return []
try:
args = list(map(float, parts[1:]))
except:
return []
func = parts[0]
answer = None
if func == b'min':
answer = min(args)
elif func == b'max':
answer = max(args)
elif func == b'avg':
answer = sum(args) / len(args)
elif func == b'sum':
answer = sum(args)
elif func == b'prod':
answer = reduce(mul, args, 1)
if answer is None:
return []
return [{'answer': unicode(answer)}]
# required answerer function
# returns information about the answerer
def self_info():
return {'name': gettext('Statistics functions'),
'description': gettext('Compute {functions} of the arguments').format(functions='/'.join(keywords)),
'examples': ['avg 123 548 2.04 24.2']}
| asciimoo/searx | searx/answerers/statistics/answerer.py | Python | agpl-3.0 | 1,243 |
"""
Unit tests for the stem.version.Version parsing and class.
"""
import unittest
import stem.util.system
import stem.version
from stem.version import Version
from test import mocking
TOR_VERSION_OUTPUT = """Mar 22 23:09:37.088 [notice] Tor v0.2.2.35 \
(git-73ff13ab3cc9570d). This is experimental software. Do not rely on it for \
strong anonymity. (Running on Linux i686)
Tor version 0.2.2.35 (git-73ff13ab3cc9570d)."""
class TestVersion(unittest.TestCase):
def tearDown(self):
mocking.revert_mocking()
def test_get_system_tor_version(self):
# Clear the version cache both before and after the test. Without this
# prior results short circuit the system call, and future calls will
# provide this mocked value.
stem.version.VERSION_CACHE = {}
def _mock_call(command):
if command == "tor --version":
return TOR_VERSION_OUTPUT.splitlines()
else:
raise ValueError("stem.util.system.call received an unexpected command: %s" % command)
mocking.mock(stem.util.system.call, _mock_call)
version = stem.version.get_system_tor_version()
self.assert_versions_match(version, 0, 2, 2, 35, None, "git-73ff13ab3cc9570d")
self.assertEqual("73ff13ab3cc9570d", version.git_commit)
stem.version.VERSION_CACHE = {}
def test_parsing(self):
"""
Tests parsing by the Version class constructor.
"""
# valid versions with various number of compontents to the version
version = Version("0.1.2.3-tag")
self.assert_versions_match(version, 0, 1, 2, 3, "tag", None)
version = Version("0.1.2.3")
self.assert_versions_match(version, 0, 1, 2, 3, None, None)
version = Version("0.1.2-tag")
self.assert_versions_match(version, 0, 1, 2, None, "tag", None)
version = Version("0.1.2")
self.assert_versions_match(version, 0, 1, 2, None, None, None)
# checks an empty tag
version = Version("0.1.2.3-")
self.assert_versions_match(version, 0, 1, 2, 3, "", None)
version = Version("0.1.2-")
self.assert_versions_match(version, 0, 1, 2, None, "", None)
# check with extra informaton
version = Version("0.1.2.3-tag (git-73ff13ab3cc9570d)")
self.assert_versions_match(version, 0, 1, 2, 3, "tag", "git-73ff13ab3cc9570d")
self.assertEqual("73ff13ab3cc9570d", version.git_commit)
version = Version("0.1.2.3-tag ()")
self.assert_versions_match(version, 0, 1, 2, 3, "tag", "")
version = Version("0.1.2 (git-73ff13ab3cc9570d)")
self.assert_versions_match(version, 0, 1, 2, None, None, "git-73ff13ab3cc9570d")
# checks invalid version strings
self.assertRaises(ValueError, stem.version.Version, "")
self.assertRaises(ValueError, stem.version.Version, "1.2.3.4nodash")
self.assertRaises(ValueError, stem.version.Version, "1.2.3.a")
self.assertRaises(ValueError, stem.version.Version, "1.2.a.4")
self.assertRaises(ValueError, stem.version.Version, "1x2x3x4")
self.assertRaises(ValueError, stem.version.Version, "12.3")
self.assertRaises(ValueError, stem.version.Version, "1.-2.3")
def test_comparison(self):
"""
Tests comparision between Version instances.
"""
# check for basic incrementing in each portion
self.assert_version_is_greater("1.1.2.3-tag", "0.1.2.3-tag")
self.assert_version_is_greater("0.2.2.3-tag", "0.1.2.3-tag")
self.assert_version_is_greater("0.1.3.3-tag", "0.1.2.3-tag")
self.assert_version_is_greater("0.1.2.4-tag", "0.1.2.3-tag")
self.assert_version_is_greater("0.1.2.3-ugg", "0.1.2.3-tag")
self.assert_version_is_equal("0.1.2.3-tag", "0.1.2.3-tag")
# check with common tags
self.assert_version_is_greater("0.1.2.3-beta", "0.1.2.3-alpha")
self.assert_version_is_greater("0.1.2.3-rc", "0.1.2.3-beta")
# checks that a missing patch level equals zero
self.assert_version_is_equal("0.1.2", "0.1.2.0")
self.assert_version_is_equal("0.1.2-tag", "0.1.2.0-tag")
# checks for missing patch or status
self.assert_version_is_greater("0.1.2.3-tag", "0.1.2.3")
self.assert_version_is_greater("0.1.2.3-tag", "0.1.2-tag")
self.assert_version_is_greater("0.1.2.3-tag", "0.1.2")
self.assert_version_is_equal("0.1.2.3", "0.1.2.3")
self.assert_version_is_equal("0.1.2", "0.1.2")
def test_nonversion_comparison(self):
"""
Checks that we can be compared with other types.
In python 3 on only equality comparisons work, greater than and less than
comparisons result in a TypeError.
"""
test_version = Version("0.1.2.3")
self.assertNotEqual(test_version, None)
self.assertNotEqual(test_version, 5)
def test_string(self):
"""
Tests the Version -> string conversion.
"""
# checks conversion with various numbers of arguments
self.assert_string_matches("0.1.2.3-tag")
self.assert_string_matches("0.1.2.3")
self.assert_string_matches("0.1.2")
def test_requirements_greater_than(self):
"""
Checks a VersionRequirements with a single greater_than rule.
"""
requirements = stem.version._VersionRequirements()
requirements.greater_than(Version("0.2.2.36"))
self.assertTrue(Version("0.2.2.36") >= requirements)
self.assertTrue(Version("0.2.2.37") >= requirements)
self.assertTrue(Version("0.2.3.36") >= requirements)
self.assertFalse(Version("0.2.2.35") >= requirements)
self.assertFalse(Version("0.2.1.38") >= requirements)
requirements = stem.version._VersionRequirements()
requirements.greater_than(Version("0.2.2.36"), False)
self.assertFalse(Version("0.2.2.35") >= requirements)
self.assertFalse(Version("0.2.2.36") >= requirements)
self.assertTrue(Version("0.2.2.37") >= requirements)
def test_requirements_less_than(self):
"""
Checks a VersionRequirements with a single less_than rule.
"""
requirements = stem.version._VersionRequirements()
requirements.less_than(Version("0.2.2.36"))
self.assertTrue(Version("0.2.2.36") >= requirements)
self.assertTrue(Version("0.2.2.35") >= requirements)
self.assertTrue(Version("0.2.1.38") >= requirements)
self.assertFalse(Version("0.2.2.37") >= requirements)
self.assertFalse(Version("0.2.3.36") >= requirements)
requirements = stem.version._VersionRequirements()
requirements.less_than(Version("0.2.2.36"), False)
self.assertFalse(Version("0.2.2.37") >= requirements)
self.assertFalse(Version("0.2.2.36") >= requirements)
self.assertTrue(Version("0.2.2.35") >= requirements)
def test_requirements_in_range(self):
"""
Checks a VersionRequirements with a single in_range rule.
"""
requirements = stem.version._VersionRequirements()
requirements.in_range(Version("0.2.2.36"), Version("0.2.2.38"))
self.assertFalse(Version("0.2.2.35") >= requirements)
self.assertTrue(Version("0.2.2.36") >= requirements)
self.assertTrue(Version("0.2.2.37") >= requirements)
self.assertFalse(Version("0.2.2.38") >= requirements)
# rule for 'anything in the 0.2.2.x series'
requirements = stem.version._VersionRequirements()
requirements.in_range(Version("0.2.2.0"), Version("0.2.3.0"))
for index in xrange(0, 100):
self.assertTrue(Version("0.2.2.%i" % index) >= requirements)
def test_requirements_multiple_rules(self):
"""
Checks a VersionRequirements is the logical 'or' when it has multiple rules.
"""
# rule to say 'anything but the 0.2.2.x series'
requirements = stem.version._VersionRequirements()
requirements.greater_than(Version("0.2.3.0"))
requirements.less_than(Version("0.2.2.0"), False)
self.assertTrue(Version("0.2.3.0") >= requirements)
self.assertFalse(Version("0.2.2.0") >= requirements)
for index in xrange(0, 100):
self.assertFalse(Version("0.2.2.%i" % index) >= requirements)
def assert_versions_match(self, version, major, minor, micro, patch, status, extra):
"""
Asserts that the values for a types.Version instance match the given
values.
"""
self.assertEqual(major, version.major)
self.assertEqual(minor, version.minor)
self.assertEqual(micro, version.micro)
self.assertEqual(patch, version.patch)
self.assertEqual(status, version.status)
self.assertEqual(extra, version.extra)
if extra is None:
self.assertEqual(None, version.git_commit)
def assert_version_is_greater(self, first_version, second_version):
"""
Asserts that the parsed version of the first version is greate than the
second (also checking the inverse).
"""
version1 = Version(first_version)
version2 = Version(second_version)
self.assertEqual(version1 > version2, True)
self.assertEqual(version1 < version2, False)
def assert_version_is_equal(self, first_version, second_version):
"""
Asserts that the parsed version of the first version equals the second.
"""
version1 = Version(first_version)
version2 = Version(second_version)
self.assertEqual(version1, version2)
def assert_string_matches(self, version):
"""
Parses the given version string then checks that its string representation
matches the input.
"""
self.assertEqual(version, str(Version(version)))
| wfn/stem | test/unit/version.py | Python | lgpl-3.0 | 9,206 |
# coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
ExtractorError,
int_or_none,
)
class EaglePlatformIE(InfoExtractor):
_VALID_URL = r'''(?x)
(?:
eagleplatform:(?P<custom_host>[^/]+):|
https?://(?P<host>.+?\.media\.eagleplatform\.com)/index/player\?.*\brecord_id=
)
(?P<id>\d+)
'''
_TESTS = [{
# http://lenta.ru/news/2015/03/06/navalny/
'url': 'http://lentaru.media.eagleplatform.com/index/player?player=new&record_id=227304&player_template_id=5201',
'md5': '0b7994faa2bd5c0f69a3db6db28d078d',
'info_dict': {
'id': '227304',
'ext': 'mp4',
'title': 'Навальный вышел на свободу',
'description': 'md5:d97861ac9ae77377f3f20eaf9d04b4f5',
'thumbnail': 're:^https?://.*\.jpg$',
'duration': 87,
'view_count': int,
'age_limit': 0,
},
}, {
# http://muz-tv.ru/play/7129/
# http://media.clipyou.ru/index/player?record_id=12820&width=730&height=415&autoplay=true
'url': 'eagleplatform:media.clipyou.ru:12820',
'md5': '6c2ebeab03b739597ce8d86339d5a905',
'info_dict': {
'id': '12820',
'ext': 'mp4',
'title': "'O Sole Mio",
'thumbnail': 're:^https?://.*\.jpg$',
'duration': 216,
'view_count': int,
},
}]
def _handle_error(self, response):
status = int_or_none(response.get('status', 200))
if status != 200:
raise ExtractorError(' '.join(response['errors']), expected=True)
def _download_json(self, url_or_request, video_id, note='Downloading JSON metadata'):
response = super(EaglePlatformIE, self)._download_json(url_or_request, video_id, note)
self._handle_error(response)
return response
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
host, video_id = mobj.group('custom_host') or mobj.group('host'), mobj.group('id')
player_data = self._download_json(
'http://%s/api/player_data?id=%s' % (host, video_id), video_id)
media = player_data['data']['playlist']['viewports'][0]['medialist'][0]
title = media['title']
description = media.get('description')
thumbnail = media.get('snapshot')
duration = int_or_none(media.get('duration'))
view_count = int_or_none(media.get('views'))
age_restriction = media.get('age_restriction')
age_limit = None
if age_restriction:
age_limit = 0 if age_restriction == 'allow_all' else 18
m3u8_data = self._download_json(
media['sources']['secure_m3u8']['auto'],
video_id, 'Downloading m3u8 JSON')
formats = self._extract_m3u8_formats(
m3u8_data['data'][0], video_id,
'mp4', entry_protocol='m3u8_native')
self._sort_formats(formats)
return {
'id': video_id,
'title': title,
'description': description,
'thumbnail': thumbnail,
'duration': duration,
'view_count': view_count,
'age_limit': age_limit,
'formats': formats,
}
| amishb/youtube-dl | youtube_dl/extractor/eagleplatform.py | Python | unlicense | 3,435 |
import datetime
class Store:
def parse(self,line):
fields=line.split('\t')
self.id = fields[0]
self.name = fields[1]
return self
def __repr__(self):
return "Store: id=%s \t name=%s"%(self.id,self.name)
class Product:
def parse(self,line):
fields=line.split('\t')
self.id = fields[0]
self.name = fields[1]
self.category=fields[2]
return self
def __repr__(self):
return "Product: id=%s \t name=%s"%(self.id,self.name)
class SaleRow:
def parse(self,line):
fields=line.split('\t')
self.day=fields[0] # maybe parse as date? see below:)
# self.day=datetime.datetime.strptime(fields[0],"%Y-%m-%d")
self.store_id=fields[1]
self.product_id=fields[2]
self.quantity=int(fields[3]) # let's parse this
return self
def __repr__(self):
return "SaleRow: day=%s \t store_id=%s \t product_id=%s quantity=%d"%(self.day,self.store_id,self.product_id, self.quantity)
| sk-rai/Intro-to-SPARK-with-Python | code/sales/sales_schema.py | Python | unlicense | 899 |
"""Utilities for defining models
"""
import operator
from typing import Any, Callable, Type
class KeyBasedCompareMixin:
"""Provides comparison capabilities that is based on a key"""
__slots__ = ["_compare_key", "_defining_class"]
def __init__(self, key, defining_class):
# type: (Any, Type[KeyBasedCompareMixin]) -> None
self._compare_key = key
self._defining_class = defining_class
def __hash__(self):
# type: () -> int
return hash(self._compare_key)
def __lt__(self, other):
# type: (Any) -> bool
return self._compare(other, operator.__lt__)
def __le__(self, other):
# type: (Any) -> bool
return self._compare(other, operator.__le__)
def __gt__(self, other):
# type: (Any) -> bool
return self._compare(other, operator.__gt__)
def __ge__(self, other):
# type: (Any) -> bool
return self._compare(other, operator.__ge__)
def __eq__(self, other):
# type: (Any) -> bool
return self._compare(other, operator.__eq__)
def _compare(self, other, method):
# type: (Any, Callable[[Any, Any], bool]) -> bool
if not isinstance(other, self._defining_class):
return NotImplemented
return method(self._compare_key, other._compare_key)
| google/material-design-icons | update/venv/lib/python3.9/site-packages/pip/_internal/utils/models.py | Python | apache-2.0 | 1,329 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import time
import pytest
from kubernetes_tests.test_base import EXECUTOR, TestBase
@pytest.mark.skipif(EXECUTOR != 'KubernetesExecutor', reason="Only runs on KubernetesExecutor")
class TestKubernetesExecutor(TestBase):
def test_integration_run_dag(self):
dag_id = 'example_kubernetes_executor'
dag_run_id, execution_date = self.start_job_in_kubernetes(dag_id, self.host)
print(f"Found the job with execution_date {execution_date}")
# Wait some time for the operator to complete
self.monitor_task(
host=self.host,
dag_run_id=dag_run_id,
dag_id=dag_id,
task_id='start_task',
expected_final_state='success',
timeout=300,
)
self.ensure_dag_expected_state(
host=self.host,
execution_date=execution_date,
dag_id=dag_id,
expected_final_state='success',
timeout=300,
)
def test_integration_run_dag_with_scheduler_failure(self):
dag_id = 'example_kubernetes_executor'
dag_run_id, execution_date = self.start_job_in_kubernetes(dag_id, self.host)
self._delete_airflow_pod("scheduler")
time.sleep(10) # give time for pod to restart
# Wait some time for the operator to complete
self.monitor_task(
host=self.host,
dag_run_id=dag_run_id,
dag_id=dag_id,
task_id='start_task',
expected_final_state='success',
timeout=300,
)
self.monitor_task(
host=self.host,
dag_run_id=dag_run_id,
dag_id=dag_id,
task_id='other_namespace_task',
expected_final_state='success',
timeout=300,
)
self.ensure_dag_expected_state(
host=self.host,
execution_date=execution_date,
dag_id=dag_id,
expected_final_state='success',
timeout=300,
)
assert self._num_pods_in_namespace('test-namespace') == 0, "failed to delete pods in other namespace"
| Acehaidrey/incubator-airflow | kubernetes_tests/test_kubernetes_executor.py | Python | apache-2.0 | 2,909 |
#
# Copyright (c) 2015 nexB Inc. and others. All rights reserved.
# http://nexb.com and https://github.com/nexB/scancode-toolkit/
# The ScanCode software is licensed under the Apache License version 2.0.
# Data generated with ScanCode require an acknowledgment.
# ScanCode is a trademark of nexB Inc.
#
# You may not use this software except in compliance with the License.
# You may obtain a copy of the License at: http://apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#
# When you publish or redistribute any data created with ScanCode or any ScanCode
# derivative work, you must accompany this data with the following acknowledgment:
#
# Generated with ScanCode and provided on an "AS IS" BASIS, WITHOUT WARRANTIES
# OR CONDITIONS OF ANY KIND, either express or implied. No content created from
# ScanCode should be considered or used as legal advice. Consult an Attorney
# for any legal advice.
# ScanCode is a free software code scanning tool from nexB Inc. and others.
# Visit https://github.com/nexB/scancode-toolkit/ for support and download.
from __future__ import absolute_import
from __future__ import print_function
import codecs
import logging
import json
from collections import OrderedDict
from functools import partial
from commoncode import filetype
from commoncode import fileutils
from packagedcode import models
from packagedcode.utils import parse_repo_url
"""
Handle PHP composer packages, refer to https://getcomposer.org/
"""
logger = logging.getLogger(__name__)
# import sys
# logging.basicConfig(level=logging.DEBUG, stream=sys.stdout)
# logger.setLevel(logging.DEBUG)
class PHPComposerPackage(models.Package):
metafiles = ('composer.json')
filetypes = ('.json',)
mimetypes = ('application/json')
repo_types = (models.repo_phpcomposer,)
type = models.StringType(default='phpcomposer')
primary_language = models.StringType(default='PHP')
@classmethod
def recognize(cls, location):
return parse(location)
def is_phpcomposer_json(location):
return (filetype.is_file(location)
and fileutils.file_name(location).lower() == 'composer.json')
def parse(location):
"""
Return a Package object from a composer.json file or None.
"""
if not is_phpcomposer_json(location):
return
# mapping of top level composer.json items to the Package object field name
plain_fields = OrderedDict([
('name', 'name'),
('description', 'summary'),
('keywords', 'keywords'),
('version', 'version'),
('homepage', 'homepage_url'),
])
# mapping of top level composer.json items to a function accepting as arguments
# the composer.json element value and returning an iterable of key, values Package Object to update
field_mappers = OrderedDict([
('authors', author_mapper),
('license', licensing_mapper),
('require', dependencies_mapper),
('require-dev', dev_dependencies_mapper),
('repositories', repository_mapper),
('support', support_mapper),
])
with codecs.open(location, encoding='utf-8') as loc:
data = json.load(loc, object_pairs_hook=OrderedDict)
if not data.get('name') or not data.get('description'):
# a composer.json without name and description is not a usable PHP composer package
# name and description fields are required: https://getcomposer.org/doc/04-schema.md#name
return
package = PHPComposerPackage()
# a composer.json is at the root of a PHP composer package
base_dir = fileutils.parent_directory(location)
package.location = base_dir
package.metafile_locations = [location]
for source, target in plain_fields.items():
value = data.get(source)
if value:
if isinstance(value, basestring):
value = value.strip()
if value:
setattr(package, target, value)
for source, func in field_mappers.items():
logger.debug('parse: %(source)r, %(func)r' % locals())
value = data.get(source)
if value:
if isinstance(value, basestring):
value = value.strip()
if value:
func(value, package)
vendor_mapper(package) # Parse vendor from name value
return package
def licensing_mapper(licenses, package):
"""
Update package licensing and return package.
Licensing data structure has evolved over time and is a tad messy.
https://getcomposer.org/doc/04-schema.md#license
licenses is either:
- a string with:
- an SPDX id or expression { "license": "(LGPL-2.1 or GPL-3.0+)" }
- array:
"license": [
"LGPL-2.1",
"GPL-3.0+"
]
"""
if not licenses:
return package
if isinstance(licenses, basestring):
package.asserted_licenses.append(models.AssertedLicense(license=licenses))
elif isinstance(licenses, list):
"""
"license": [
"LGPL-2.1",
"GPL-3.0+"
]
"""
for lic in licenses:
if isinstance(lic, basestring):
package.asserted_licenses.append(models.AssertedLicense(license=lic))
else:
# use the bare repr
if lic:
package.asserted_licenses.append(models.AssertedLicense(license=repr(lic)))
else:
# use the bare repr
package.asserted_licenses.append(models.AssertedLicense(license=repr(licenses)))
return package
def author_mapper(authors_content, package):
"""
Update package authors and return package.
https://getcomposer.org/doc/04-schema.md#authors
"""
authors = []
for name, email, url in parse_person(authors_content):
authors.append(models.Party(type=models.party_person, name=name, email=email, url=url))
package.authors = authors
return package
def support_mapper(support, package):
"""
Update support and bug tracking url.
https://getcomposer.org/doc/04-schema.md#support
"""
package.support_contacts = [support.get('email')]
package.bug_tracking_url = support.get('issues')
package.code_view_url = support.get('source')
return package
def vendor_mapper(package):
"""
Vender is part of name element.
https://getcomposer.org/doc/04-schema.md#name
"""
name = package.name
if name and '/' in name:
vendors = name.split('/')
if vendors[0]:
package.vendors = [models.Party(name=vendors[0])]
return package
def repository_mapper(repos, package):
"""
https://getcomposer.org/doc/04-schema.md#repositories
"repositories": [
{
"type": "composer",
"url": "http://packages.example.com"
},
{
"type": "composer",
"url": "https://packages.example.com",
"options": {
"ssl": {
"verify_peer": "true"
}
}
},
{
"type": "vcs",
"url": "https://github.com/Seldaek/monolog"
},
{
"type": "pear",
"url": "https://pear2.php.net"
},
{
"type": "package",
"package": {
"name": "smarty/smarty",
"version": "3.1.7",
"dist": {
"url": "http://www.smarty.net/files/Smarty-3.1.7.zip",
"type": "zip"
},
"source": {
"url": "https://smarty-php.googlecode.com/svn/",
"type": "svn",
"reference": "tags/Smarty_3_1_7/distribution/"
}
}
}
]
"""
if not repos:
return package
if isinstance(repos, basestring):
package.vcs_repository = parse_repo_url(repos)
elif isinstance(repos, list):
for repo in repos:
if repo.get('type') == 'vcs':
# vcs type includes git, svn, fossil or hg.
# refer to https://getcomposer.org/doc/05-repositories.md#vcs
repo_url = repo.get('url')
if repo_url.startswith('svn') or 'subversion.apache.org' in repo_url:
package.vcs_tool = 'svn'
elif repo_url.startswith('hg') or 'mercurial.selenic.com' in repo_url:
package.vcs_tool = 'hg'
elif repo_url.startswith('fossil') or 'fossil-scm.org' in repo_url:
package.vcs_tool = 'fossil'
else:
package.vcs_tool = 'git'
package.vcs_repository = parse_repo_url(repo.get('url'))
return package
def deps_mapper(deps, package, field_name):
"""
Handle deps such as dependencies, devDependencies
return a tuple of (dep type, list of deps)
https://getcomposer.org/doc/04-schema.md#package-links
"""
dep_types = {
'dependencies': models.dep_runtime,
'devDependencies': models.dep_dev,
}
resolved_type = dep_types[field_name]
dependencies = []
for name, version_constraint in deps.items():
dep = models.Dependency(name=name, version_constraint=version_constraint)
dependencies.append(dep)
if resolved_type in package.dependencies:
package.dependencies[resolved_type].extend(dependencies)
else:
package.dependencies[resolved_type] = dependencies
return package
dependencies_mapper = partial(deps_mapper, field_name='dependencies')
dev_dependencies_mapper = partial(deps_mapper, field_name='devDependencies')
def parse_person(persons):
"""
https://getcomposer.org/doc/04-schema.md#authors
A "person" is an object with a "name" field and optionally "url" and "email".
Yield a name, email, url tuple for a person object
A person can be in the form:
"authors": [
{
"name": "Nils Adermann",
"email": "[email protected]",
"homepage": "http://www.naderman.de",
"role": "Developer"
},
{
"name": "Jordi Boggiano",
"email": "[email protected]",
"homepage": "http://seld.be",
"role": "Developer"
}
]
Both forms are equivalent.
"""
if isinstance(persons, list):
for person in persons:
# ensure we have our three values
name = person.get('name')
email = person.get('email')
url = person.get('homepage')
yield name and name.strip(), email and email.strip('<> '), url and url.strip('() ')
else:
raise Exception('Incorrect PHP composer composer.json person: %(person)r' % locals())
| yasharmaster/scancode-toolkit | src/packagedcode/phpcomposer.py | Python | apache-2.0 | 11,144 |
# Copyright 2012 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# @author: Ryota MIBU
import contextlib
import random
from neutron.common import constants as q_const
from neutron.openstack.common import uuidutils
from neutron.plugins.nec.common import exceptions as nexc
from neutron.plugins.nec.db import api as ndb
from neutron.plugins.nec.db import models as nmodels # noqa
from neutron.tests.unit.nec import test_nec_plugin
class NECPluginV2DBTestBase(test_nec_plugin.NecPluginV2TestCase):
"""Class conisting of NECPluginV2 DB unit tests."""
def setUp(self):
"""Setup for tests."""
super(NECPluginV2DBTestBase, self).setUp()
self.session = self.context.session
def get_ofc_item_random_params(self):
"""create random parameters for ofc_item test."""
ofc_id = uuidutils.generate_uuid()
neutron_id = uuidutils.generate_uuid()
none = uuidutils.generate_uuid()
return ofc_id, neutron_id, none
@contextlib.contextmanager
def portinfo_random_params(self):
with self.port() as port:
params = {'port_id': port['port']['id'],
'datapath_id': hex(random.randint(0, 0xffffffff)),
'port_no': random.randint(1, 100),
'vlan_id': random.randint(q_const.MIN_VLAN_TAG,
q_const.MAX_VLAN_TAG),
'mac': ':'.join(["%02x" % random.randint(0, 0xff)
for x in range(6)])
}
yield params
class NECPluginV2DBOfcMappingTest(NECPluginV2DBTestBase):
def test_add_ofc_item(self):
"""test add OFC item."""
o, q, n = self.get_ofc_item_random_params()
tenant = ndb.add_ofc_item(self.session, 'ofc_tenant', q, o)
self.assertEqual(tenant.ofc_id, o)
self.assertEqual(tenant.neutron_id, q)
def test_add_ofc_item_duplicate_entry(self):
o, q, n = self.get_ofc_item_random_params()
ndb.add_ofc_item(self.session, 'ofc_tenant', q, o)
self.assertRaises(nexc.NECDBException,
ndb.add_ofc_item,
self.session, 'ofc_tenant', q, o)
def test_get_ofc_item(self):
o, q, n = self.get_ofc_item_random_params()
ndb.add_ofc_item(self.session, 'ofc_tenant', q, o)
tenant = ndb.get_ofc_item(self.session, 'ofc_tenant', q)
self.assertEqual(tenant.ofc_id, o)
self.assertEqual(tenant.neutron_id, q)
def test_get_ofc_item_for_nonexisting_entry(self):
self.assertIsNone(
ndb.get_ofc_item(self.session, 'ofc_tenant', 'non-exist-id'))
def test_get_ofc_id(self):
o, q, n = self.get_ofc_item_random_params()
ndb.add_ofc_item(self.session, 'ofc_tenant', q, o)
tenant_id = ndb.get_ofc_id(self.session, 'ofc_tenant', q)
self.assertEqual(tenant_id, o)
def test_get_ofc_id_for_nonexisting_entry(self):
self.assertRaises(nexc.OFCMappingNotFound,
ndb.get_ofc_id,
self.session, 'ofc_tenant', 'non-exist-id')
def test_exists_ofc_item(self):
o, q, n = self.get_ofc_item_random_params()
self.assertFalse(ndb.exists_ofc_item(self.session, 'ofc_tenant', q))
ndb.add_ofc_item(self.session, 'ofc_tenant', q, o)
self.assertTrue(ndb.exists_ofc_item(self.session, 'ofc_tenant', q))
ndb.del_ofc_item(self.session, 'ofc_tenant', q)
self.assertFalse(ndb.exists_ofc_item(self.session, 'ofc_tenant', q))
def test_find_ofc_item(self):
o, q, n = self.get_ofc_item_random_params()
ndb.add_ofc_item(self.session, 'ofc_tenant', q, o)
tenant = ndb.find_ofc_item(self.session, 'ofc_tenant', o)
self.assertEqual(tenant.ofc_id, o)
self.assertEqual(tenant.neutron_id, q)
def test_find_ofc_item_for_nonexisting_entry(self):
self.assertIsNone(
ndb.find_ofc_item(self.session, 'ofc_tenant', 'non-existi-id'))
def test_del_ofc_item(self):
o, q, n = self.get_ofc_item_random_params()
ndb.add_ofc_item(self.session, 'ofc_tenant', q, o)
self.assertTrue(ndb.del_ofc_item(self.session, 'ofc_tenant', q))
self.assertIsNone(ndb.get_ofc_item(self.session, 'ofc_tenant', q))
self.assertIsNone(ndb.find_ofc_item(self.session, 'ofc_tenant', o))
def test_del_ofc_item_for_nonexisting_entry(self):
self.assertFalse(
ndb.del_ofc_item(self.session, 'ofc_tenant', 'non-existi-id'))
class NECPluginV2DBPortInfoTest(NECPluginV2DBTestBase):
def _compare_portinfo(self, portinfo, expected):
self.assertEqual(portinfo.id, expected['port_id'])
self.assertEqual(portinfo.datapath_id, expected['datapath_id'])
self.assertEqual(portinfo.port_no, expected['port_no'])
self.assertEqual(portinfo.vlan_id, expected['vlan_id'])
self.assertEqual(portinfo.mac, expected['mac'])
def _add_portinfo(self, session, params):
return ndb.add_portinfo(session, params['port_id'],
params['datapath_id'], params['port_no'],
params['vlan_id'], params['mac'])
def testd_add_portinfo(self):
"""test add portinfo."""
with self.portinfo_random_params() as params:
portinfo = self._add_portinfo(self.session, params)
self._compare_portinfo(portinfo, params)
exception_raised = False
try:
self._add_portinfo(self.session, params)
except nexc.NECDBException:
exception_raised = True
self.assertTrue(exception_raised)
def teste_get_portinfo(self):
"""test get portinfo."""
with self.portinfo_random_params() as params:
self._add_portinfo(self.session, params)
portinfo = ndb.get_portinfo(self.session, params['port_id'])
self._compare_portinfo(portinfo, params)
nonexist_id = uuidutils.generate_uuid()
portinfo_none = ndb.get_portinfo(self.session, nonexist_id)
self.assertIsNone(portinfo_none)
def testf_del_portinfo(self):
"""test delete portinfo."""
with self.portinfo_random_params() as params:
self._add_portinfo(self.session, params)
portinfo = ndb.get_portinfo(self.session, params['port_id'])
self.assertEqual(portinfo.id, params['port_id'])
ndb.del_portinfo(self.session, params['port_id'])
portinfo_none = ndb.get_portinfo(self.session, params['port_id'])
self.assertIsNone(portinfo_none)
| virtualopensystems/neutron | neutron/tests/unit/nec/test_db.py | Python | apache-2.0 | 7,264 |
# Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Extensions supporting OAuth1."""
from oslo_config import cfg
from oslo_serialization import jsonutils
from oslo_utils import timeutils
from keystone.common import controller
from keystone.common import dependency
from keystone.common import wsgi
from keystone.contrib.oauth1 import core as oauth1
from keystone.contrib.oauth1 import validator
from keystone import exception
from keystone.i18n import _
from keystone.models import token_model
from keystone import notifications
CONF = cfg.CONF
@notifications.internal(notifications.INVALIDATE_USER_OAUTH_CONSUMER_TOKENS,
resource_id_arg_index=0)
def _emit_user_oauth_consumer_token_invalidate(payload):
# This is a special case notification that expect the payload to be a dict
# containing the user_id and the consumer_id. This is so that the token
# provider can invalidate any tokens in the token persistence if
# token persistence is enabled
pass
@dependency.requires('oauth_api', 'token_provider_api')
class ConsumerCrudV3(controller.V3Controller):
collection_name = 'consumers'
member_name = 'consumer'
@classmethod
def base_url(cls, context, path=None):
"""Construct a path and pass it to V3Controller.base_url method."""
# NOTE(stevemar): Overriding path to /OS-OAUTH1/consumers so that
# V3Controller.base_url handles setting the self link correctly.
path = '/OS-OAUTH1/' + cls.collection_name
return controller.V3Controller.base_url(context, path=path)
@controller.protected()
def create_consumer(self, context, consumer):
ref = self._assign_unique_id(self._normalize_dict(consumer))
initiator = notifications._get_request_audit_info(context)
consumer_ref = self.oauth_api.create_consumer(ref, initiator)
return ConsumerCrudV3.wrap_member(context, consumer_ref)
@controller.protected()
def update_consumer(self, context, consumer_id, consumer):
self._require_matching_id(consumer_id, consumer)
ref = self._normalize_dict(consumer)
self._validate_consumer_ref(ref)
initiator = notifications._get_request_audit_info(context)
ref = self.oauth_api.update_consumer(consumer_id, ref, initiator)
return ConsumerCrudV3.wrap_member(context, ref)
@controller.protected()
def list_consumers(self, context):
ref = self.oauth_api.list_consumers()
return ConsumerCrudV3.wrap_collection(context, ref)
@controller.protected()
def get_consumer(self, context, consumer_id):
ref = self.oauth_api.get_consumer(consumer_id)
return ConsumerCrudV3.wrap_member(context, ref)
@controller.protected()
def delete_consumer(self, context, consumer_id):
user_token_ref = token_model.KeystoneToken(
token_id=context['token_id'],
token_data=self.token_provider_api.validate_token(
context['token_id']))
payload = {'user_id': user_token_ref.user_id,
'consumer_id': consumer_id}
_emit_user_oauth_consumer_token_invalidate(payload)
initiator = notifications._get_request_audit_info(context)
self.oauth_api.delete_consumer(consumer_id, initiator)
def _validate_consumer_ref(self, consumer):
if 'secret' in consumer:
msg = _('Cannot change consumer secret')
raise exception.ValidationError(message=msg)
@dependency.requires('oauth_api')
class AccessTokenCrudV3(controller.V3Controller):
collection_name = 'access_tokens'
member_name = 'access_token'
@classmethod
def _add_self_referential_link(cls, context, ref):
# NOTE(lwolf): overriding method to add proper path to self link
ref.setdefault('links', {})
path = '/users/%(user_id)s/OS-OAUTH1/access_tokens' % {
'user_id': cls._get_user_id(ref)
}
ref['links']['self'] = cls.base_url(context, path) + '/' + ref['id']
@controller.protected()
def get_access_token(self, context, user_id, access_token_id):
access_token = self.oauth_api.get_access_token(access_token_id)
if access_token['authorizing_user_id'] != user_id:
raise exception.NotFound()
access_token = self._format_token_entity(context, access_token)
return AccessTokenCrudV3.wrap_member(context, access_token)
@controller.protected()
def list_access_tokens(self, context, user_id):
auth_context = context.get('environment',
{}).get('KEYSTONE_AUTH_CONTEXT', {})
if auth_context.get('is_delegated_auth'):
raise exception.Forbidden(
_('Cannot list request tokens'
' with a token issued via delegation.'))
refs = self.oauth_api.list_access_tokens(user_id)
formatted_refs = ([self._format_token_entity(context, x)
for x in refs])
return AccessTokenCrudV3.wrap_collection(context, formatted_refs)
@controller.protected()
def delete_access_token(self, context, user_id, access_token_id):
access_token = self.oauth_api.get_access_token(access_token_id)
consumer_id = access_token['consumer_id']
payload = {'user_id': user_id, 'consumer_id': consumer_id}
_emit_user_oauth_consumer_token_invalidate(payload)
initiator = notifications._get_request_audit_info(context)
return self.oauth_api.delete_access_token(
user_id, access_token_id, initiator)
@staticmethod
def _get_user_id(entity):
return entity.get('authorizing_user_id', '')
def _format_token_entity(self, context, entity):
formatted_entity = entity.copy()
access_token_id = formatted_entity['id']
user_id = self._get_user_id(formatted_entity)
if 'role_ids' in entity:
formatted_entity.pop('role_ids')
if 'access_secret' in entity:
formatted_entity.pop('access_secret')
url = ('/users/%(user_id)s/OS-OAUTH1/access_tokens/%(access_token_id)s'
'/roles' % {'user_id': user_id,
'access_token_id': access_token_id})
formatted_entity.setdefault('links', {})
formatted_entity['links']['roles'] = (self.base_url(context, url))
return formatted_entity
@dependency.requires('oauth_api', 'role_api')
class AccessTokenRolesV3(controller.V3Controller):
collection_name = 'roles'
member_name = 'role'
@controller.protected()
def list_access_token_roles(self, context, user_id, access_token_id):
access_token = self.oauth_api.get_access_token(access_token_id)
if access_token['authorizing_user_id'] != user_id:
raise exception.NotFound()
authed_role_ids = access_token['role_ids']
authed_role_ids = jsonutils.loads(authed_role_ids)
refs = ([self._format_role_entity(x) for x in authed_role_ids])
return AccessTokenRolesV3.wrap_collection(context, refs)
@controller.protected()
def get_access_token_role(self, context, user_id,
access_token_id, role_id):
access_token = self.oauth_api.get_access_token(access_token_id)
if access_token['authorizing_user_id'] != user_id:
raise exception.Unauthorized(_('User IDs do not match'))
authed_role_ids = access_token['role_ids']
authed_role_ids = jsonutils.loads(authed_role_ids)
for authed_role_id in authed_role_ids:
if authed_role_id == role_id:
role = self._format_role_entity(role_id)
return AccessTokenRolesV3.wrap_member(context, role)
raise exception.RoleNotFound(_('Could not find role'))
def _format_role_entity(self, role_id):
role = self.role_api.get_role(role_id)
formatted_entity = role.copy()
if 'description' in role:
formatted_entity.pop('description')
if 'enabled' in role:
formatted_entity.pop('enabled')
return formatted_entity
@dependency.requires('assignment_api', 'oauth_api',
'resource_api', 'token_provider_api')
class OAuthControllerV3(controller.V3Controller):
collection_name = 'not_used'
member_name = 'not_used'
def create_request_token(self, context):
headers = context['headers']
oauth_headers = oauth1.get_oauth_headers(headers)
consumer_id = oauth_headers.get('oauth_consumer_key')
requested_project_id = headers.get('Requested-Project-Id')
if not consumer_id:
raise exception.ValidationError(
attribute='oauth_consumer_key', target='request')
if not requested_project_id:
raise exception.ValidationError(
attribute='requested_project_id', target='request')
# NOTE(stevemar): Ensure consumer and requested project exist
self.resource_api.get_project(requested_project_id)
self.oauth_api.get_consumer(consumer_id)
url = self.base_url(context, context['path'])
req_headers = {'Requested-Project-Id': requested_project_id}
req_headers.update(headers)
request_verifier = oauth1.RequestTokenEndpoint(
request_validator=validator.OAuthValidator(),
token_generator=oauth1.token_generator)
h, b, s = request_verifier.create_request_token_response(
url,
http_method='POST',
body=context['query_string'],
headers=req_headers)
if (not b) or int(s) > 399:
msg = _('Invalid signature')
raise exception.Unauthorized(message=msg)
request_token_duration = CONF.oauth1.request_token_duration
initiator = notifications._get_request_audit_info(context)
token_ref = self.oauth_api.create_request_token(consumer_id,
requested_project_id,
request_token_duration,
initiator)
result = ('oauth_token=%(key)s&oauth_token_secret=%(secret)s'
% {'key': token_ref['id'],
'secret': token_ref['request_secret']})
if CONF.oauth1.request_token_duration:
expiry_bit = '&oauth_expires_at=%s' % token_ref['expires_at']
result += expiry_bit
headers = [('Content-Type', 'application/x-www-urlformencoded')]
response = wsgi.render_response(result,
status=(201, 'Created'),
headers=headers)
return response
def create_access_token(self, context):
headers = context['headers']
oauth_headers = oauth1.get_oauth_headers(headers)
consumer_id = oauth_headers.get('oauth_consumer_key')
request_token_id = oauth_headers.get('oauth_token')
oauth_verifier = oauth_headers.get('oauth_verifier')
if not consumer_id:
raise exception.ValidationError(
attribute='oauth_consumer_key', target='request')
if not request_token_id:
raise exception.ValidationError(
attribute='oauth_token', target='request')
if not oauth_verifier:
raise exception.ValidationError(
attribute='oauth_verifier', target='request')
req_token = self.oauth_api.get_request_token(
request_token_id)
expires_at = req_token['expires_at']
if expires_at:
now = timeutils.utcnow()
expires = timeutils.normalize_time(
timeutils.parse_isotime(expires_at))
if now > expires:
raise exception.Unauthorized(_('Request token is expired'))
url = self.base_url(context, context['path'])
access_verifier = oauth1.AccessTokenEndpoint(
request_validator=validator.OAuthValidator(),
token_generator=oauth1.token_generator)
h, b, s = access_verifier.create_access_token_response(
url,
http_method='POST',
body=context['query_string'],
headers=headers)
params = oauth1.extract_non_oauth_params(b)
if len(params) != 0:
msg = _('There should not be any non-oauth parameters')
raise exception.Unauthorized(message=msg)
if req_token['consumer_id'] != consumer_id:
msg = _('provided consumer key does not match stored consumer key')
raise exception.Unauthorized(message=msg)
if req_token['verifier'] != oauth_verifier:
msg = _('provided verifier does not match stored verifier')
raise exception.Unauthorized(message=msg)
if req_token['id'] != request_token_id:
msg = _('provided request key does not match stored request key')
raise exception.Unauthorized(message=msg)
if not req_token.get('authorizing_user_id'):
msg = _('Request Token does not have an authorizing user id')
raise exception.Unauthorized(message=msg)
access_token_duration = CONF.oauth1.access_token_duration
initiator = notifications._get_request_audit_info(context)
token_ref = self.oauth_api.create_access_token(request_token_id,
access_token_duration,
initiator)
result = ('oauth_token=%(key)s&oauth_token_secret=%(secret)s'
% {'key': token_ref['id'],
'secret': token_ref['access_secret']})
if CONF.oauth1.access_token_duration:
expiry_bit = '&oauth_expires_at=%s' % (token_ref['expires_at'])
result += expiry_bit
headers = [('Content-Type', 'application/x-www-urlformencoded')]
response = wsgi.render_response(result,
status=(201, 'Created'),
headers=headers)
return response
@controller.protected()
def authorize_request_token(self, context, request_token_id, roles):
"""An authenticated user is going to authorize a request token.
As a security precaution, the requested roles must match those in
the request token. Because this is in a CLI-only world at the moment,
there is not another easy way to make sure the user knows which roles
are being requested before authorizing.
"""
auth_context = context.get('environment',
{}).get('KEYSTONE_AUTH_CONTEXT', {})
if auth_context.get('is_delegated_auth'):
raise exception.Forbidden(
_('Cannot authorize a request token'
' with a token issued via delegation.'))
req_token = self.oauth_api.get_request_token(request_token_id)
expires_at = req_token['expires_at']
if expires_at:
now = timeutils.utcnow()
expires = timeutils.normalize_time(
timeutils.parse_isotime(expires_at))
if now > expires:
raise exception.Unauthorized(_('Request token is expired'))
# put the roles in a set for easy comparison
authed_roles = set()
for role in roles:
authed_roles.add(role['id'])
# verify the authorizing user has the roles
user_token = token_model.KeystoneToken(
token_id=context['token_id'],
token_data=self.token_provider_api.validate_token(
context['token_id']))
user_id = user_token.user_id
project_id = req_token['requested_project_id']
user_roles = self.assignment_api.get_roles_for_user_and_project(
user_id, project_id)
cred_set = set(user_roles)
if not cred_set.issuperset(authed_roles):
msg = _('authorizing user does not have role required')
raise exception.Unauthorized(message=msg)
# create list of just the id's for the backend
role_list = list(authed_roles)
# verify the user has the project too
req_project_id = req_token['requested_project_id']
user_projects = self.assignment_api.list_projects_for_user(user_id)
for user_project in user_projects:
if user_project['id'] == req_project_id:
break
else:
msg = _("User is not a member of the requested project")
raise exception.Unauthorized(message=msg)
# finally authorize the token
authed_token = self.oauth_api.authorize_request_token(
request_token_id, user_id, role_list)
to_return = {'token': {'oauth_verifier': authed_token['verifier']}}
return to_return
| jumpstarter-io/keystone | keystone/contrib/oauth1/controllers.py | Python | apache-2.0 | 17,478 |
# -*- coding: utf-8 -*-
###############################################################################
#
# DeleteOrganization
# Deletes an existing organization.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class DeleteOrganization(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the DeleteOrganization Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(DeleteOrganization, self).__init__(temboo_session, '/Library/Zendesk/Organizations/DeleteOrganization')
def new_input_set(self):
return DeleteOrganizationInputSet()
def _make_result_set(self, result, path):
return DeleteOrganizationResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return DeleteOrganizationChoreographyExecution(session, exec_id, path)
class DeleteOrganizationInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the DeleteOrganization
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_Email(self, value):
"""
Set the value of the Email input for this Choreo. ((required, string) The email address you use to login to your Zendesk account.)
"""
super(DeleteOrganizationInputSet, self)._set_input('Email', value)
def set_ID(self, value):
"""
Set the value of the ID input for this Choreo. ((required, string) ID of the organization to delete.)
"""
super(DeleteOrganizationInputSet, self)._set_input('ID', value)
def set_Password(self, value):
"""
Set the value of the Password input for this Choreo. ((required, password) Your Zendesk password.)
"""
super(DeleteOrganizationInputSet, self)._set_input('Password', value)
def set_Server(self, value):
"""
Set the value of the Server input for this Choreo. ((required, string) Your Zendesk domain and subdomain (e.g., temboocare.zendesk.com).)
"""
super(DeleteOrganizationInputSet, self)._set_input('Server', value)
class DeleteOrganizationResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the DeleteOrganization Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. ((json) The response from Zendesk.)
"""
return self._output.get('Response', None)
def get_ResponseStatusCode(self):
"""
Retrieve the value for the "ResponseStatusCode" output from this Choreo execution. ((integer) The response status code returned from Zendesk.)
"""
return self._output.get('ResponseStatusCode', None)
class DeleteOrganizationChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return DeleteOrganizationResultSet(response, path)
| jordanemedlock/psychtruths | temboo/core/Library/Zendesk/Organizations/DeleteOrganization.py | Python | apache-2.0 | 4,028 |
from feedly.feed_managers.base import Feedly
from feedly.feeds.base import UserBaseFeed
from feedly.feeds.redis import RedisFeed
from feedly.tests.managers.base import BaseFeedlyTest
import pytest
class RedisUserBaseFeed(UserBaseFeed, RedisFeed):
pass
class RedisFeedly(Feedly):
feed_classes = {
'feed': RedisFeed
}
user_feed_class = RedisUserBaseFeed
@pytest.mark.usefixtures("redis_reset")
class RedisFeedlyTest(BaseFeedlyTest):
manager_class = RedisFeedly
| Anislav/Stream-Framework | feedly/tests/managers/redis.py | Python | bsd-3-clause | 493 |
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
This module contains classes that help to emulate xcodebuild behavior on top of
other build systems, such as make and ninja.
"""
import gyp.common
import os.path
import re
import shlex
import subprocess
import sys
from gyp.common import GypError
class XcodeSettings(object):
"""A class that understands the gyp 'xcode_settings' object."""
# Populated lazily by _SdkPath(). Shared by all XcodeSettings, so cached
# at class-level for efficiency.
_sdk_path_cache = {}
def __init__(self, spec):
self.spec = spec
self.isIOS = False
# Per-target 'xcode_settings' are pushed down into configs earlier by gyp.
# This means self.xcode_settings[config] always contains all settings
# for that config -- the per-target settings as well. Settings that are
# the same for all configs are implicitly per-target settings.
self.xcode_settings = {}
configs = spec['configurations']
for configname, config in configs.iteritems():
self.xcode_settings[configname] = config.get('xcode_settings', {})
if self.xcode_settings[configname].get('IPHONEOS_DEPLOYMENT_TARGET',
None):
self.isIOS = True
# If you need this, speak up at http://crbug.com/122592
conditional_keys = [key for key in self.xcode_settings[configname]
if key.endswith(']')]
if conditional_keys:
print 'Warning: Conditional keys not implemented, ignoring:', \
' '.join(conditional_keys)
for key in conditional_keys:
del self.xcode_settings[configname][key]
# This is only non-None temporarily during the execution of some methods.
self.configname = None
# Used by _AdjustLibrary to match .a and .dylib entries in libraries.
self.library_re = re.compile(r'^lib([^/]+)\.(a|dylib)$')
def _Settings(self):
assert self.configname
return self.xcode_settings[self.configname]
def _Test(self, test_key, cond_key, default):
return self._Settings().get(test_key, default) == cond_key
def _Appendf(self, lst, test_key, format_str, default=None):
if test_key in self._Settings():
lst.append(format_str % str(self._Settings()[test_key]))
elif default:
lst.append(format_str % str(default))
def _WarnUnimplemented(self, test_key):
if test_key in self._Settings():
print 'Warning: Ignoring not yet implemented key "%s".' % test_key
def _IsBundle(self):
return int(self.spec.get('mac_bundle', 0)) != 0
def GetFrameworkVersion(self):
"""Returns the framework version of the current target. Only valid for
bundles."""
assert self._IsBundle()
return self.GetPerTargetSetting('FRAMEWORK_VERSION', default='A')
def GetWrapperExtension(self):
"""Returns the bundle extension (.app, .framework, .plugin, etc). Only
valid for bundles."""
assert self._IsBundle()
if self.spec['type'] in ('loadable_module', 'shared_library'):
default_wrapper_extension = {
'loadable_module': 'bundle',
'shared_library': 'framework',
}[self.spec['type']]
wrapper_extension = self.GetPerTargetSetting(
'WRAPPER_EXTENSION', default=default_wrapper_extension)
return '.' + self.spec.get('product_extension', wrapper_extension)
elif self.spec['type'] == 'executable':
return '.' + self.spec.get('product_extension', 'app')
else:
assert False, "Don't know extension for '%s', target '%s'" % (
self.spec['type'], self.spec['target_name'])
def GetProductName(self):
"""Returns PRODUCT_NAME."""
return self.spec.get('product_name', self.spec['target_name'])
def GetFullProductName(self):
"""Returns FULL_PRODUCT_NAME."""
if self._IsBundle():
return self.GetWrapperName()
else:
return self._GetStandaloneBinaryPath()
def GetWrapperName(self):
"""Returns the directory name of the bundle represented by this target.
Only valid for bundles."""
assert self._IsBundle()
return self.GetProductName() + self.GetWrapperExtension()
def GetBundleContentsFolderPath(self):
"""Returns the qualified path to the bundle's contents folder. E.g.
Chromium.app/Contents or Foo.bundle/Versions/A. Only valid for bundles."""
if self.isIOS:
return self.GetWrapperName()
assert self._IsBundle()
if self.spec['type'] == 'shared_library':
return os.path.join(
self.GetWrapperName(), 'Versions', self.GetFrameworkVersion())
else:
# loadable_modules have a 'Contents' folder like executables.
return os.path.join(self.GetWrapperName(), 'Contents')
def GetBundleResourceFolder(self):
"""Returns the qualified path to the bundle's resource folder. E.g.
Chromium.app/Contents/Resources. Only valid for bundles."""
assert self._IsBundle()
if self.isIOS:
return self.GetBundleContentsFolderPath()
return os.path.join(self.GetBundleContentsFolderPath(), 'Resources')
def GetBundlePlistPath(self):
"""Returns the qualified path to the bundle's plist file. E.g.
Chromium.app/Contents/Info.plist. Only valid for bundles."""
assert self._IsBundle()
if self.spec['type'] in ('executable', 'loadable_module'):
return os.path.join(self.GetBundleContentsFolderPath(), 'Info.plist')
else:
return os.path.join(self.GetBundleContentsFolderPath(),
'Resources', 'Info.plist')
def GetProductType(self):
"""Returns the PRODUCT_TYPE of this target."""
if self._IsBundle():
return {
'executable': 'com.apple.product-type.application',
'loadable_module': 'com.apple.product-type.bundle',
'shared_library': 'com.apple.product-type.framework',
}[self.spec['type']]
else:
return {
'executable': 'com.apple.product-type.tool',
'loadable_module': 'com.apple.product-type.library.dynamic',
'shared_library': 'com.apple.product-type.library.dynamic',
'static_library': 'com.apple.product-type.library.static',
}[self.spec['type']]
def GetMachOType(self):
"""Returns the MACH_O_TYPE of this target."""
# Weird, but matches Xcode.
if not self._IsBundle() and self.spec['type'] == 'executable':
return ''
return {
'executable': 'mh_execute',
'static_library': 'staticlib',
'shared_library': 'mh_dylib',
'loadable_module': 'mh_bundle',
}[self.spec['type']]
def _GetBundleBinaryPath(self):
"""Returns the name of the bundle binary of by this target.
E.g. Chromium.app/Contents/MacOS/Chromium. Only valid for bundles."""
assert self._IsBundle()
if self.spec['type'] in ('shared_library') or self.isIOS:
path = self.GetBundleContentsFolderPath()
elif self.spec['type'] in ('executable', 'loadable_module'):
path = os.path.join(self.GetBundleContentsFolderPath(), 'MacOS')
return os.path.join(path, self.GetExecutableName())
def _GetStandaloneExecutableSuffix(self):
if 'product_extension' in self.spec:
return '.' + self.spec['product_extension']
return {
'executable': '',
'static_library': '.a',
'shared_library': '.dylib',
'loadable_module': '.so',
}[self.spec['type']]
def _GetStandaloneExecutablePrefix(self):
return self.spec.get('product_prefix', {
'executable': '',
'static_library': 'lib',
'shared_library': 'lib',
# Non-bundled loadable_modules are called foo.so for some reason
# (that is, .so and no prefix) with the xcode build -- match that.
'loadable_module': '',
}[self.spec['type']])
def _GetStandaloneBinaryPath(self):
"""Returns the name of the non-bundle binary represented by this target.
E.g. hello_world. Only valid for non-bundles."""
assert not self._IsBundle()
assert self.spec['type'] in (
'executable', 'shared_library', 'static_library', 'loadable_module'), (
'Unexpected type %s' % self.spec['type'])
target = self.spec['target_name']
if self.spec['type'] == 'static_library':
if target[:3] == 'lib':
target = target[3:]
elif self.spec['type'] in ('loadable_module', 'shared_library'):
if target[:3] == 'lib':
target = target[3:]
target_prefix = self._GetStandaloneExecutablePrefix()
target = self.spec.get('product_name', target)
target_ext = self._GetStandaloneExecutableSuffix()
return target_prefix + target + target_ext
def GetExecutableName(self):
"""Returns the executable name of the bundle represented by this target.
E.g. Chromium."""
if self._IsBundle():
return self.spec.get('product_name', self.spec['target_name'])
else:
return self._GetStandaloneBinaryPath()
def GetExecutablePath(self):
"""Returns the directory name of the bundle represented by this target. E.g.
Chromium.app/Contents/MacOS/Chromium."""
if self._IsBundle():
return self._GetBundleBinaryPath()
else:
return self._GetStandaloneBinaryPath()
def GetActiveArchs(self, configname):
"""Returns the architectures this target should be built for."""
# TODO: Look at VALID_ARCHS, ONLY_ACTIVE_ARCH; possibly set
# CURRENT_ARCH / NATIVE_ARCH env vars?
return self.xcode_settings[configname].get('ARCHS', ['i386'])
def _GetSdkVersionInfoItem(self, sdk, infoitem):
job = subprocess.Popen(['xcodebuild', '-version', '-sdk', sdk, infoitem],
stdout=subprocess.PIPE)
out = job.communicate()[0]
if job.returncode != 0:
sys.stderr.write(out + '\n')
raise GypError('Error %d running xcodebuild' % job.returncode)
return out.rstrip('\n')
def _SdkPath(self):
sdk_root = self.GetPerTargetSetting('SDKROOT', default='macosx')
if sdk_root.startswith('/'):
return sdk_root
if sdk_root not in XcodeSettings._sdk_path_cache:
XcodeSettings._sdk_path_cache[sdk_root] = self._GetSdkVersionInfoItem(
sdk_root, 'Path')
return XcodeSettings._sdk_path_cache[sdk_root]
def _AppendPlatformVersionMinFlags(self, lst):
self._Appendf(lst, 'MACOSX_DEPLOYMENT_TARGET', '-mmacosx-version-min=%s')
if 'IPHONEOS_DEPLOYMENT_TARGET' in self._Settings():
# TODO: Implement this better?
sdk_path_basename = os.path.basename(self._SdkPath())
if sdk_path_basename.lower().startswith('iphonesimulator'):
self._Appendf(lst, 'IPHONEOS_DEPLOYMENT_TARGET',
'-mios-simulator-version-min=%s')
else:
self._Appendf(lst, 'IPHONEOS_DEPLOYMENT_TARGET',
'-miphoneos-version-min=%s')
def GetCflags(self, configname, arch=None):
"""Returns flags that need to be added to .c, .cc, .m, and .mm
compilations."""
# This functions (and the similar ones below) do not offer complete
# emulation of all xcode_settings keys. They're implemented on demand.
self.configname = configname
cflags = []
sdk_root = self._SdkPath()
if 'SDKROOT' in self._Settings():
cflags.append('-isysroot %s' % sdk_root)
if self._Test('CLANG_WARN_CONSTANT_CONVERSION', 'YES', default='NO'):
cflags.append('-Wconstant-conversion')
if self._Test('GCC_CHAR_IS_UNSIGNED_CHAR', 'YES', default='NO'):
cflags.append('-funsigned-char')
if self._Test('GCC_CW_ASM_SYNTAX', 'YES', default='YES'):
cflags.append('-fasm-blocks')
if 'GCC_DYNAMIC_NO_PIC' in self._Settings():
if self._Settings()['GCC_DYNAMIC_NO_PIC'] == 'YES':
cflags.append('-mdynamic-no-pic')
else:
pass
# TODO: In this case, it depends on the target. xcode passes
# mdynamic-no-pic by default for executable and possibly static lib
# according to mento
if self._Test('GCC_ENABLE_PASCAL_STRINGS', 'YES', default='YES'):
cflags.append('-mpascal-strings')
self._Appendf(cflags, 'GCC_OPTIMIZATION_LEVEL', '-O%s', default='s')
if self._Test('GCC_GENERATE_DEBUGGING_SYMBOLS', 'YES', default='YES'):
dbg_format = self._Settings().get('DEBUG_INFORMATION_FORMAT', 'dwarf')
if dbg_format == 'dwarf':
cflags.append('-gdwarf-2')
elif dbg_format == 'stabs':
raise NotImplementedError('stabs debug format is not supported yet.')
elif dbg_format == 'dwarf-with-dsym':
cflags.append('-gdwarf-2')
else:
raise NotImplementedError('Unknown debug format %s' % dbg_format)
if self._Settings().get('GCC_STRICT_ALIASING') == 'YES':
cflags.append('-fstrict-aliasing')
elif self._Settings().get('GCC_STRICT_ALIASING') == 'NO':
cflags.append('-fno-strict-aliasing')
if self._Test('GCC_SYMBOLS_PRIVATE_EXTERN', 'YES', default='NO'):
cflags.append('-fvisibility=hidden')
if self._Test('GCC_TREAT_WARNINGS_AS_ERRORS', 'YES', default='NO'):
cflags.append('-Werror')
if self._Test('GCC_WARN_ABOUT_MISSING_NEWLINE', 'YES', default='NO'):
cflags.append('-Wnewline-eof')
self._AppendPlatformVersionMinFlags(cflags)
# TODO:
if self._Test('COPY_PHASE_STRIP', 'YES', default='NO'):
self._WarnUnimplemented('COPY_PHASE_STRIP')
self._WarnUnimplemented('GCC_DEBUGGING_SYMBOLS')
self._WarnUnimplemented('GCC_ENABLE_OBJC_EXCEPTIONS')
# TODO: This is exported correctly, but assigning to it is not supported.
self._WarnUnimplemented('MACH_O_TYPE')
self._WarnUnimplemented('PRODUCT_TYPE')
if arch is not None:
archs = [arch]
else:
archs = self._Settings().get('ARCHS', ['i386'])
if len(archs) != 1:
# TODO: Supporting fat binaries will be annoying.
self._WarnUnimplemented('ARCHS')
archs = ['i386']
cflags.append('-arch ' + archs[0])
if archs[0] in ('i386', 'x86_64'):
if self._Test('GCC_ENABLE_SSE3_EXTENSIONS', 'YES', default='NO'):
cflags.append('-msse3')
if self._Test('GCC_ENABLE_SUPPLEMENTAL_SSE3_INSTRUCTIONS', 'YES',
default='NO'):
cflags.append('-mssse3') # Note 3rd 's'.
if self._Test('GCC_ENABLE_SSE41_EXTENSIONS', 'YES', default='NO'):
cflags.append('-msse4.1')
if self._Test('GCC_ENABLE_SSE42_EXTENSIONS', 'YES', default='NO'):
cflags.append('-msse4.2')
cflags += self._Settings().get('WARNING_CFLAGS', [])
config = self.spec['configurations'][self.configname]
framework_dirs = config.get('mac_framework_dirs', [])
for directory in framework_dirs:
cflags.append('-F' + directory.replace('$(SDKROOT)', sdk_root))
self.configname = None
return cflags
def GetCflagsC(self, configname):
"""Returns flags that need to be added to .c, and .m compilations."""
self.configname = configname
cflags_c = []
if self._Settings().get('GCC_C_LANGUAGE_STANDARD', '') == 'ansi':
cflags_c.append('-ansi')
else:
self._Appendf(cflags_c, 'GCC_C_LANGUAGE_STANDARD', '-std=%s')
cflags_c += self._Settings().get('OTHER_CFLAGS', [])
self.configname = None
return cflags_c
def GetCflagsCC(self, configname):
"""Returns flags that need to be added to .cc, and .mm compilations."""
self.configname = configname
cflags_cc = []
clang_cxx_language_standard = self._Settings().get(
'CLANG_CXX_LANGUAGE_STANDARD')
# Note: Don't make c++0x to c++11 so that c++0x can be used with older
# clangs that don't understand c++11 yet (like Xcode 4.2's).
if clang_cxx_language_standard:
cflags_cc.append('-std=%s' % clang_cxx_language_standard)
self._Appendf(cflags_cc, 'CLANG_CXX_LIBRARY', '-stdlib=%s')
if self._Test('GCC_ENABLE_CPP_RTTI', 'NO', default='YES'):
cflags_cc.append('-fno-rtti')
if self._Test('GCC_ENABLE_CPP_EXCEPTIONS', 'NO', default='YES'):
cflags_cc.append('-fno-exceptions')
if self._Test('GCC_INLINES_ARE_PRIVATE_EXTERN', 'YES', default='NO'):
cflags_cc.append('-fvisibility-inlines-hidden')
if self._Test('GCC_THREADSAFE_STATICS', 'NO', default='YES'):
cflags_cc.append('-fno-threadsafe-statics')
# Note: This flag is a no-op for clang, it only has an effect for gcc.
if self._Test('GCC_WARN_ABOUT_INVALID_OFFSETOF_MACRO', 'NO', default='YES'):
cflags_cc.append('-Wno-invalid-offsetof')
other_ccflags = []
for flag in self._Settings().get('OTHER_CPLUSPLUSFLAGS', ['$(inherited)']):
# TODO: More general variable expansion. Missing in many other places too.
if flag in ('$inherited', '$(inherited)', '${inherited}'):
flag = '$OTHER_CFLAGS'
if flag in ('$OTHER_CFLAGS', '$(OTHER_CFLAGS)', '${OTHER_CFLAGS}'):
other_ccflags += self._Settings().get('OTHER_CFLAGS', [])
else:
other_ccflags.append(flag)
cflags_cc += other_ccflags
self.configname = None
return cflags_cc
def _AddObjectiveCGarbageCollectionFlags(self, flags):
gc_policy = self._Settings().get('GCC_ENABLE_OBJC_GC', 'unsupported')
if gc_policy == 'supported':
flags.append('-fobjc-gc')
elif gc_policy == 'required':
flags.append('-fobjc-gc-only')
def _AddObjectiveCARCFlags(self, flags):
if self._Test('CLANG_ENABLE_OBJC_ARC', 'YES', default='NO'):
flags.append('-fobjc-arc')
def GetCflagsObjC(self, configname):
"""Returns flags that need to be added to .m compilations."""
self.configname = configname
cflags_objc = []
self._AddObjectiveCGarbageCollectionFlags(cflags_objc)
self._AddObjectiveCARCFlags(cflags_objc)
self.configname = None
return cflags_objc
def GetCflagsObjCC(self, configname):
"""Returns flags that need to be added to .mm compilations."""
self.configname = configname
cflags_objcc = []
self._AddObjectiveCGarbageCollectionFlags(cflags_objcc)
self._AddObjectiveCARCFlags(cflags_objcc)
if self._Test('GCC_OBJC_CALL_CXX_CDTORS', 'YES', default='NO'):
cflags_objcc.append('-fobjc-call-cxx-cdtors')
self.configname = None
return cflags_objcc
def GetInstallNameBase(self):
"""Return DYLIB_INSTALL_NAME_BASE for this target."""
# Xcode sets this for shared_libraries, and for nonbundled loadable_modules.
if (self.spec['type'] != 'shared_library' and
(self.spec['type'] != 'loadable_module' or self._IsBundle())):
return None
install_base = self.GetPerTargetSetting(
'DYLIB_INSTALL_NAME_BASE',
default='/Library/Frameworks' if self._IsBundle() else '/usr/local/lib')
return install_base
def _StandardizePath(self, path):
"""Do :standardizepath processing for path."""
# I'm not quite sure what :standardizepath does. Just call normpath(),
# but don't let @executable_path/../foo collapse to foo.
if '/' in path:
prefix, rest = '', path
if path.startswith('@'):
prefix, rest = path.split('/', 1)
rest = os.path.normpath(rest) # :standardizepath
path = os.path.join(prefix, rest)
return path
def GetInstallName(self):
"""Return LD_DYLIB_INSTALL_NAME for this target."""
# Xcode sets this for shared_libraries, and for nonbundled loadable_modules.
if (self.spec['type'] != 'shared_library' and
(self.spec['type'] != 'loadable_module' or self._IsBundle())):
return None
default_install_name = \
'$(DYLIB_INSTALL_NAME_BASE:standardizepath)/$(EXECUTABLE_PATH)'
install_name = self.GetPerTargetSetting(
'LD_DYLIB_INSTALL_NAME', default=default_install_name)
# Hardcode support for the variables used in chromium for now, to
# unblock people using the make build.
if '$' in install_name:
assert install_name in ('$(DYLIB_INSTALL_NAME_BASE:standardizepath)/'
'$(WRAPPER_NAME)/$(PRODUCT_NAME)', default_install_name), (
'Variables in LD_DYLIB_INSTALL_NAME are not generally supported '
'yet in target \'%s\' (got \'%s\')' %
(self.spec['target_name'], install_name))
install_name = install_name.replace(
'$(DYLIB_INSTALL_NAME_BASE:standardizepath)',
self._StandardizePath(self.GetInstallNameBase()))
if self._IsBundle():
# These are only valid for bundles, hence the |if|.
install_name = install_name.replace(
'$(WRAPPER_NAME)', self.GetWrapperName())
install_name = install_name.replace(
'$(PRODUCT_NAME)', self.GetProductName())
else:
assert '$(WRAPPER_NAME)' not in install_name
assert '$(PRODUCT_NAME)' not in install_name
install_name = install_name.replace(
'$(EXECUTABLE_PATH)', self.GetExecutablePath())
return install_name
def _MapLinkerFlagFilename(self, ldflag, gyp_to_build_path):
"""Checks if ldflag contains a filename and if so remaps it from
gyp-directory-relative to build-directory-relative."""
# This list is expanded on demand.
# They get matched as:
# -exported_symbols_list file
# -Wl,exported_symbols_list file
# -Wl,exported_symbols_list,file
LINKER_FILE = '(\S+)'
WORD = '\S+'
linker_flags = [
['-exported_symbols_list', LINKER_FILE], # Needed for NaCl.
['-unexported_symbols_list', LINKER_FILE],
['-reexported_symbols_list', LINKER_FILE],
['-sectcreate', WORD, WORD, LINKER_FILE], # Needed for remoting.
]
for flag_pattern in linker_flags:
regex = re.compile('(?:-Wl,)?' + '[ ,]'.join(flag_pattern))
m = regex.match(ldflag)
if m:
ldflag = ldflag[:m.start(1)] + gyp_to_build_path(m.group(1)) + \
ldflag[m.end(1):]
# Required for ffmpeg (no idea why they don't use LIBRARY_SEARCH_PATHS,
# TODO(thakis): Update ffmpeg.gyp):
if ldflag.startswith('-L'):
ldflag = '-L' + gyp_to_build_path(ldflag[len('-L'):])
return ldflag
def GetLdflags(self, configname, product_dir, gyp_to_build_path, arch=None):
"""Returns flags that need to be passed to the linker.
Args:
configname: The name of the configuration to get ld flags for.
product_dir: The directory where products such static and dynamic
libraries are placed. This is added to the library search path.
gyp_to_build_path: A function that converts paths relative to the
current gyp file to paths relative to the build direcotry.
"""
self.configname = configname
ldflags = []
# The xcode build is relative to a gyp file's directory, and OTHER_LDFLAGS
# can contain entries that depend on this. Explicitly absolutify these.
for ldflag in self._Settings().get('OTHER_LDFLAGS', []):
ldflags.append(self._MapLinkerFlagFilename(ldflag, gyp_to_build_path))
if self._Test('DEAD_CODE_STRIPPING', 'YES', default='NO'):
ldflags.append('-Wl,-dead_strip')
if self._Test('PREBINDING', 'YES', default='NO'):
ldflags.append('-Wl,-prebind')
self._Appendf(
ldflags, 'DYLIB_COMPATIBILITY_VERSION', '-compatibility_version %s')
self._Appendf(
ldflags, 'DYLIB_CURRENT_VERSION', '-current_version %s')
self._AppendPlatformVersionMinFlags(ldflags)
if 'SDKROOT' in self._Settings():
ldflags.append('-isysroot ' + self._SdkPath())
for library_path in self._Settings().get('LIBRARY_SEARCH_PATHS', []):
ldflags.append('-L' + gyp_to_build_path(library_path))
if 'ORDER_FILE' in self._Settings():
ldflags.append('-Wl,-order_file ' +
'-Wl,' + gyp_to_build_path(
self._Settings()['ORDER_FILE']))
if arch is not None:
archs = [arch]
else:
archs = self._Settings().get('ARCHS', ['i386'])
if len(archs) != 1:
# TODO: Supporting fat binaries will be annoying.
self._WarnUnimplemented('ARCHS')
archs = ['i386']
ldflags.append('-arch ' + archs[0])
# Xcode adds the product directory by default.
ldflags.append('-L' + product_dir)
install_name = self.GetInstallName()
if install_name:
ldflags.append('-install_name ' + install_name.replace(' ', r'\ '))
for rpath in self._Settings().get('LD_RUNPATH_SEARCH_PATHS', []):
ldflags.append('-Wl,-rpath,' + rpath)
config = self.spec['configurations'][self.configname]
framework_dirs = config.get('mac_framework_dirs', [])
for directory in framework_dirs:
ldflags.append('-F' + directory.replace('$(SDKROOT)', self._SdkPath()))
self.configname = None
return ldflags
def GetLibtoolflags(self, configname):
"""Returns flags that need to be passed to the static linker.
Args:
configname: The name of the configuration to get ld flags for.
"""
self.configname = configname
libtoolflags = []
for libtoolflag in self._Settings().get('OTHER_LDFLAGS', []):
libtoolflags.append(libtoolflag)
# TODO(thakis): ARCHS?
self.configname = None
return libtoolflags
def GetPerTargetSettings(self):
"""Gets a list of all the per-target settings. This will only fetch keys
whose values are the same across all configurations."""
first_pass = True
result = {}
for configname in sorted(self.xcode_settings.keys()):
if first_pass:
result = dict(self.xcode_settings[configname])
first_pass = False
else:
for key, value in self.xcode_settings[configname].iteritems():
if key not in result:
continue
elif result[key] != value:
del result[key]
return result
def GetPerTargetSetting(self, setting, default=None):
"""Tries to get xcode_settings.setting from spec. Assumes that the setting
has the same value in all configurations and throws otherwise."""
first_pass = True
result = None
for configname in sorted(self.xcode_settings.keys()):
if first_pass:
result = self.xcode_settings[configname].get(setting, None)
first_pass = False
else:
assert result == self.xcode_settings[configname].get(setting, None), (
"Expected per-target setting for '%s', got per-config setting "
"(target %s)" % (setting, spec['target_name']))
if result is None:
return default
return result
def _GetStripPostbuilds(self, configname, output_binary, quiet):
"""Returns a list of shell commands that contain the shell commands
neccessary to strip this target's binary. These should be run as postbuilds
before the actual postbuilds run."""
self.configname = configname
result = []
if (self._Test('DEPLOYMENT_POSTPROCESSING', 'YES', default='NO') and
self._Test('STRIP_INSTALLED_PRODUCT', 'YES', default='NO')):
default_strip_style = 'debugging'
if self._IsBundle():
default_strip_style = 'non-global'
elif self.spec['type'] == 'executable':
default_strip_style = 'all'
strip_style = self._Settings().get('STRIP_STYLE', default_strip_style)
strip_flags = {
'all': '',
'non-global': '-x',
'debugging': '-S',
}[strip_style]
explicit_strip_flags = self._Settings().get('STRIPFLAGS', '')
if explicit_strip_flags:
strip_flags += ' ' + _NormalizeEnvVarReferences(explicit_strip_flags)
if not quiet:
result.append('echo STRIP\\(%s\\)' % self.spec['target_name'])
result.append('strip %s %s' % (strip_flags, output_binary))
self.configname = None
return result
def _GetDebugInfoPostbuilds(self, configname, output, output_binary, quiet):
"""Returns a list of shell commands that contain the shell commands
neccessary to massage this target's debug information. These should be run
as postbuilds before the actual postbuilds run."""
self.configname = configname
# For static libraries, no dSYMs are created.
result = []
if (self._Test('GCC_GENERATE_DEBUGGING_SYMBOLS', 'YES', default='YES') and
self._Test(
'DEBUG_INFORMATION_FORMAT', 'dwarf-with-dsym', default='dwarf') and
self.spec['type'] != 'static_library'):
if not quiet:
result.append('echo DSYMUTIL\\(%s\\)' % self.spec['target_name'])
result.append('dsymutil %s -o %s' % (output_binary, output + '.dSYM'))
self.configname = None
return result
def GetTargetPostbuilds(self, configname, output, output_binary, quiet=False):
"""Returns a list of shell commands that contain the shell commands
to run as postbuilds for this target, before the actual postbuilds."""
# dSYMs need to build before stripping happens.
return (
self._GetDebugInfoPostbuilds(configname, output, output_binary, quiet) +
self._GetStripPostbuilds(configname, output_binary, quiet))
def _AdjustLibrary(self, library):
if library.endswith('.framework'):
l = '-framework ' + os.path.splitext(os.path.basename(library))[0]
else:
m = self.library_re.match(library)
if m:
l = '-l' + m.group(1)
else:
l = library
return l.replace('$(SDKROOT)', self._SdkPath())
def AdjustLibraries(self, libraries):
"""Transforms entries like 'Cocoa.framework' in libraries into entries like
'-framework Cocoa', 'libcrypto.dylib' into '-lcrypto', etc.
"""
libraries = [ self._AdjustLibrary(library) for library in libraries]
return libraries
class MacPrefixHeader(object):
"""A class that helps with emulating Xcode's GCC_PREFIX_HEADER feature.
This feature consists of several pieces:
* If GCC_PREFIX_HEADER is present, all compilations in that project get an
additional |-include path_to_prefix_header| cflag.
* If GCC_PRECOMPILE_PREFIX_HEADER is present too, then the prefix header is
instead compiled, and all other compilations in the project get an
additional |-include path_to_compiled_header| instead.
+ Compiled prefix headers have the extension gch. There is one gch file for
every language used in the project (c, cc, m, mm), since gch files for
different languages aren't compatible.
+ gch files themselves are built with the target's normal cflags, but they
obviously don't get the |-include| flag. Instead, they need a -x flag that
describes their language.
+ All o files in the target need to depend on the gch file, to make sure
it's built before any o file is built.
This class helps with some of these tasks, but it needs help from the build
system for writing dependencies to the gch files, for writing build commands
for the gch files, and for figuring out the location of the gch files.
"""
def __init__(self, xcode_settings,
gyp_path_to_build_path, gyp_path_to_build_output):
"""If xcode_settings is None, all methods on this class are no-ops.
Args:
gyp_path_to_build_path: A function that takes a gyp-relative path,
and returns a path relative to the build directory.
gyp_path_to_build_output: A function that takes a gyp-relative path and
a language code ('c', 'cc', 'm', or 'mm'), and that returns a path
to where the output of precompiling that path for that language
should be placed (without the trailing '.gch').
"""
# This doesn't support per-configuration prefix headers. Good enough
# for now.
self.header = None
self.compile_headers = False
if xcode_settings:
self.header = xcode_settings.GetPerTargetSetting('GCC_PREFIX_HEADER')
self.compile_headers = xcode_settings.GetPerTargetSetting(
'GCC_PRECOMPILE_PREFIX_HEADER', default='NO') != 'NO'
self.compiled_headers = {}
if self.header:
if self.compile_headers:
for lang in ['c', 'cc', 'm', 'mm']:
self.compiled_headers[lang] = gyp_path_to_build_output(
self.header, lang)
self.header = gyp_path_to_build_path(self.header)
def _CompiledHeader(self, lang, arch):
assert self.compile_headers
h = self.compiled_headers[lang]
if arch:
h += '.' + arch
return h
def GetInclude(self, lang, arch=None):
"""Gets the cflags to include the prefix header for language |lang|."""
if self.compile_headers and lang in self.compiled_headers:
return '-include %s' % self._CompiledHeader(lang, arch)
elif self.header:
return '-include %s' % self.header
else:
return ''
def _Gch(self, lang, arch):
"""Returns the actual file name of the prefix header for language |lang|."""
assert self.compile_headers
return self._CompiledHeader(lang, arch) + '.gch'
def GetObjDependencies(self, sources, objs, arch=None):
"""Given a list of source files and the corresponding object files, returns
a list of (source, object, gch) tuples, where |gch| is the build-directory
relative path to the gch file each object file depends on. |compilable[i]|
has to be the source file belonging to |objs[i]|."""
if not self.header or not self.compile_headers:
return []
result = []
for source, obj in zip(sources, objs):
ext = os.path.splitext(source)[1]
lang = {
'.c': 'c',
'.cpp': 'cc', '.cc': 'cc', '.cxx': 'cc',
'.m': 'm',
'.mm': 'mm',
}.get(ext, None)
if lang:
result.append((source, obj, self._Gch(lang, arch)))
return result
def GetPchBuildCommands(self, arch=None):
"""Returns [(path_to_gch, language_flag, language, header)].
|path_to_gch| and |header| are relative to the build directory.
"""
if not self.header or not self.compile_headers:
return []
return [
(self._Gch('c', arch), '-x c-header', 'c', self.header),
(self._Gch('cc', arch), '-x c++-header', 'cc', self.header),
(self._Gch('m', arch), '-x objective-c-header', 'm', self.header),
(self._Gch('mm', arch), '-x objective-c++-header', 'mm', self.header),
]
def MergeGlobalXcodeSettingsToSpec(global_dict, spec):
"""Merges the global xcode_settings dictionary into each configuration of the
target represented by spec. For keys that are both in the global and the local
xcode_settings dict, the local key gets precendence.
"""
# The xcode generator special-cases global xcode_settings and does something
# that amounts to merging in the global xcode_settings into each local
# xcode_settings dict.
global_xcode_settings = global_dict.get('xcode_settings', {})
for config in spec['configurations'].values():
if 'xcode_settings' in config:
new_settings = global_xcode_settings.copy()
new_settings.update(config['xcode_settings'])
config['xcode_settings'] = new_settings
def IsMacBundle(flavor, spec):
"""Returns if |spec| should be treated as a bundle.
Bundles are directories with a certain subdirectory structure, instead of
just a single file. Bundle rules do not produce a binary but also package
resources into that directory."""
is_mac_bundle = (int(spec.get('mac_bundle', 0)) != 0 and flavor == 'mac')
if is_mac_bundle:
assert spec['type'] != 'none', (
'mac_bundle targets cannot have type none (target "%s")' %
spec['target_name'])
return is_mac_bundle
def GetMacBundleResources(product_dir, xcode_settings, resources):
"""Yields (output, resource) pairs for every resource in |resources|.
Only call this for mac bundle targets.
Args:
product_dir: Path to the directory containing the output bundle,
relative to the build directory.
xcode_settings: The XcodeSettings of the current target.
resources: A list of bundle resources, relative to the build directory.
"""
dest = os.path.join(product_dir,
xcode_settings.GetBundleResourceFolder())
for res in resources:
output = dest
# The make generator doesn't support it, so forbid it everywhere
# to keep the generators more interchangable.
assert ' ' not in res, (
"Spaces in resource filenames not supported (%s)" % res)
# Split into (path,file).
res_parts = os.path.split(res)
# Now split the path into (prefix,maybe.lproj).
lproj_parts = os.path.split(res_parts[0])
# If the resource lives in a .lproj bundle, add that to the destination.
if lproj_parts[1].endswith('.lproj'):
output = os.path.join(output, lproj_parts[1])
output = os.path.join(output, res_parts[1])
# Compiled XIB files are referred to by .nib.
if output.endswith('.xib'):
output = output[0:-3] + 'nib'
yield output, res
def GetMacInfoPlist(product_dir, xcode_settings, gyp_path_to_build_path):
"""Returns (info_plist, dest_plist, defines, extra_env), where:
* |info_plist| is the source plist path, relative to the
build directory,
* |dest_plist| is the destination plist path, relative to the
build directory,
* |defines| is a list of preprocessor defines (empty if the plist
shouldn't be preprocessed,
* |extra_env| is a dict of env variables that should be exported when
invoking |mac_tool copy-info-plist|.
Only call this for mac bundle targets.
Args:
product_dir: Path to the directory containing the output bundle,
relative to the build directory.
xcode_settings: The XcodeSettings of the current target.
gyp_to_build_path: A function that converts paths relative to the
current gyp file to paths relative to the build direcotry.
"""
info_plist = xcode_settings.GetPerTargetSetting('INFOPLIST_FILE')
if not info_plist:
return None, None, [], {}
# The make generator doesn't support it, so forbid it everywhere
# to keep the generators more interchangable.
assert ' ' not in info_plist, (
"Spaces in Info.plist filenames not supported (%s)" % info_plist)
info_plist = gyp_path_to_build_path(info_plist)
# If explicitly set to preprocess the plist, invoke the C preprocessor and
# specify any defines as -D flags.
if xcode_settings.GetPerTargetSetting(
'INFOPLIST_PREPROCESS', default='NO') == 'YES':
# Create an intermediate file based on the path.
defines = shlex.split(xcode_settings.GetPerTargetSetting(
'INFOPLIST_PREPROCESSOR_DEFINITIONS', default=''))
else:
defines = []
dest_plist = os.path.join(product_dir, xcode_settings.GetBundlePlistPath())
extra_env = xcode_settings.GetPerTargetSettings()
return info_plist, dest_plist, defines, extra_env
def _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration,
additional_settings=None):
"""Return the environment variables that Xcode would set. See
http://developer.apple.com/library/mac/#documentation/DeveloperTools/Reference/XcodeBuildSettingRef/1-Build_Setting_Reference/build_setting_ref.html#//apple_ref/doc/uid/TP40003931-CH3-SW153
for a full list.
Args:
xcode_settings: An XcodeSettings object. If this is None, this function
returns an empty dict.
built_products_dir: Absolute path to the built products dir.
srcroot: Absolute path to the source root.
configuration: The build configuration name.
additional_settings: An optional dict with more values to add to the
result.
"""
if not xcode_settings: return {}
# This function is considered a friend of XcodeSettings, so let it reach into
# its implementation details.
spec = xcode_settings.spec
# These are filled in on a as-needed basis.
env = {
'BUILT_PRODUCTS_DIR' : built_products_dir,
'CONFIGURATION' : configuration,
'PRODUCT_NAME' : xcode_settings.GetProductName(),
# See /Developer/Platforms/MacOSX.platform/Developer/Library/Xcode/Specifications/MacOSX\ Product\ Types.xcspec for FULL_PRODUCT_NAME
'SRCROOT' : srcroot,
'SOURCE_ROOT': '${SRCROOT}',
# This is not true for static libraries, but currently the env is only
# written for bundles:
'TARGET_BUILD_DIR' : built_products_dir,
'TEMP_DIR' : '${TMPDIR}',
}
if xcode_settings.GetPerTargetSetting('SDKROOT'):
env['SDKROOT'] = xcode_settings._SdkPath()
else:
env['SDKROOT'] = ''
if spec['type'] in (
'executable', 'static_library', 'shared_library', 'loadable_module'):
env['EXECUTABLE_NAME'] = xcode_settings.GetExecutableName()
env['EXECUTABLE_PATH'] = xcode_settings.GetExecutablePath()
env['FULL_PRODUCT_NAME'] = xcode_settings.GetFullProductName()
mach_o_type = xcode_settings.GetMachOType()
if mach_o_type:
env['MACH_O_TYPE'] = mach_o_type
env['PRODUCT_TYPE'] = xcode_settings.GetProductType()
if xcode_settings._IsBundle():
env['CONTENTS_FOLDER_PATH'] = \
xcode_settings.GetBundleContentsFolderPath()
env['UNLOCALIZED_RESOURCES_FOLDER_PATH'] = \
xcode_settings.GetBundleResourceFolder()
env['INFOPLIST_PATH'] = xcode_settings.GetBundlePlistPath()
env['WRAPPER_NAME'] = xcode_settings.GetWrapperName()
install_name = xcode_settings.GetInstallName()
if install_name:
env['LD_DYLIB_INSTALL_NAME'] = install_name
install_name_base = xcode_settings.GetInstallNameBase()
if install_name_base:
env['DYLIB_INSTALL_NAME_BASE'] = install_name_base
if not additional_settings:
additional_settings = {}
else:
# Flatten lists to strings.
for k in additional_settings:
if not isinstance(additional_settings[k], str):
additional_settings[k] = ' '.join(additional_settings[k])
additional_settings.update(env)
for k in additional_settings:
additional_settings[k] = _NormalizeEnvVarReferences(additional_settings[k])
return additional_settings
def _NormalizeEnvVarReferences(str):
"""Takes a string containing variable references in the form ${FOO}, $(FOO),
or $FOO, and returns a string with all variable references in the form ${FOO}.
"""
# $FOO -> ${FOO}
str = re.sub(r'\$([a-zA-Z_][a-zA-Z0-9_]*)', r'${\1}', str)
# $(FOO) -> ${FOO}
matches = re.findall(r'(\$\(([a-zA-Z0-9\-_]+)\))', str)
for match in matches:
to_replace, variable = match
assert '$(' not in match, '$($(FOO)) variables not supported: ' + match
str = str.replace(to_replace, '${' + variable + '}')
return str
def ExpandEnvVars(string, expansions):
"""Expands ${VARIABLES}, $(VARIABLES), and $VARIABLES in string per the
expansions list. If the variable expands to something that references
another variable, this variable is expanded as well if it's in env --
until no variables present in env are left."""
for k, v in reversed(expansions):
string = string.replace('${' + k + '}', v)
string = string.replace('$(' + k + ')', v)
string = string.replace('$' + k, v)
return string
def _TopologicallySortedEnvVarKeys(env):
"""Takes a dict |env| whose values are strings that can refer to other keys,
for example env['foo'] = '$(bar) and $(baz)'. Returns a list L of all keys of
env such that key2 is after key1 in L if env[key2] refers to env[key1].
Throws an Exception in case of dependency cycles.
"""
# Since environment variables can refer to other variables, the evaluation
# order is important. Below is the logic to compute the dependency graph
# and sort it.
regex = re.compile(r'\$\{([a-zA-Z0-9\-_]+)\}')
def GetEdges(node):
# Use a definition of edges such that user_of_variable -> used_varible.
# This happens to be easier in this case, since a variable's
# definition contains all variables it references in a single string.
# We can then reverse the result of the topological sort at the end.
# Since: reverse(topsort(DAG)) = topsort(reverse_edges(DAG))
matches = set([v for v in regex.findall(env[node]) if v in env])
for dependee in matches:
assert '${' not in dependee, 'Nested variables not supported: ' + dependee
return matches
try:
# Topologically sort, and then reverse, because we used an edge definition
# that's inverted from the expected result of this function (see comment
# above).
order = gyp.common.TopologicallySorted(env.keys(), GetEdges)
order.reverse()
return order
except gyp.common.CycleError, e:
raise GypError(
'Xcode environment variables are cyclically dependent: ' + str(e.nodes))
def GetSortedXcodeEnv(xcode_settings, built_products_dir, srcroot,
configuration, additional_settings=None):
env = _GetXcodeEnv(xcode_settings, built_products_dir, srcroot, configuration,
additional_settings)
return [(key, env[key]) for key in _TopologicallySortedEnvVarKeys(env)]
def GetSpecPostbuildCommands(spec, quiet=False):
"""Returns the list of postbuilds explicitly defined on |spec|, in a form
executable by a shell."""
postbuilds = []
for postbuild in spec.get('postbuilds', []):
if not quiet:
postbuilds.append('echo POSTBUILD\\(%s\\) %s' % (
spec['target_name'], postbuild['postbuild_name']))
postbuilds.append(gyp.common.EncodePOSIXShellList(postbuild['action']))
return postbuilds
| MIPS/external-chromium_org-tools-gyp | pylib/gyp/xcode_emulation.py | Python | bsd-3-clause | 44,910 |
import os
import re
from django.conf import global_settings, settings
from django.contrib.sites.models import Site, RequestSite
from django.contrib.auth.models import User
from django.core import mail
from django.core.exceptions import SuspiciousOperation
from django.core.urlresolvers import reverse, NoReverseMatch
from django.http import QueryDict
from django.utils.encoding import force_text
from django.utils.html import escape
from django.utils.http import urlquote
from django.utils._os import upath
from django.test import TestCase
from django.test.utils import override_settings
from django.contrib.auth import SESSION_KEY, REDIRECT_FIELD_NAME
from django.contrib.auth.forms import (AuthenticationForm, PasswordChangeForm,
SetPasswordForm, PasswordResetForm)
from django.contrib.auth.tests.utils import skipIfCustomUser
@override_settings(
LANGUAGES=(
('en', 'English'),
),
LANGUAGE_CODE='en',
TEMPLATE_LOADERS=global_settings.TEMPLATE_LOADERS,
TEMPLATE_DIRS=(
os.path.join(os.path.dirname(upath(__file__)), 'templates'),
),
USE_TZ=False,
PASSWORD_HASHERS=('django.contrib.auth.hashers.SHA1PasswordHasher',),
)
class AuthViewsTestCase(TestCase):
"""
Helper base class for all the follow test cases.
"""
fixtures = ['authtestdata.json']
urls = 'django.contrib.auth.tests.urls'
def login(self, password='password'):
response = self.client.post('/login/', {
'username': 'testclient',
'password': password,
})
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith(settings.LOGIN_REDIRECT_URL))
self.assertTrue(SESSION_KEY in self.client.session)
def assertContainsEscaped(self, response, text, **kwargs):
return self.assertContains(response, escape(force_text(text)), **kwargs)
@skipIfCustomUser
class AuthViewNamedURLTests(AuthViewsTestCase):
urls = 'django.contrib.auth.urls'
def test_named_urls(self):
"Named URLs should be reversible"
expected_named_urls = [
('login', [], {}),
('logout', [], {}),
('password_change', [], {}),
('password_change_done', [], {}),
('password_reset', [], {}),
('password_reset_done', [], {}),
('password_reset_confirm', [], {
'uidb36': 'aaaaaaa',
'token': '1111-aaaaa',
}),
('password_reset_complete', [], {}),
]
for name, args, kwargs in expected_named_urls:
try:
reverse(name, args=args, kwargs=kwargs)
except NoReverseMatch:
self.fail("Reversal of url named '%s' failed with NoReverseMatch" % name)
@skipIfCustomUser
class PasswordResetTest(AuthViewsTestCase):
def test_email_not_found(self):
"Error is raised if the provided email address isn't currently registered"
response = self.client.get('/password_reset/')
self.assertEqual(response.status_code, 200)
response = self.client.post('/password_reset/', {'email': '[email protected]'})
self.assertContainsEscaped(response, PasswordResetForm.error_messages['unknown'])
self.assertEqual(len(mail.outbox), 0)
def test_email_found(self):
"Email is sent if a valid email address is provided for password reset"
response = self.client.post('/password_reset/', {'email': '[email protected]'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertTrue("http://" in mail.outbox[0].body)
self.assertEqual(settings.DEFAULT_FROM_EMAIL, mail.outbox[0].from_email)
def test_email_found_custom_from(self):
"Email is sent if a valid email address is provided for password reset when a custom from_email is provided."
response = self.client.post('/password_reset_from_email/', {'email': '[email protected]'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual("[email protected]", mail.outbox[0].from_email)
@override_settings(ALLOWED_HOSTS=['adminsite.com'])
def test_admin_reset(self):
"If the reset view is marked as being for admin, the HTTP_HOST header is used for a domain override."
response = self.client.post('/admin_password_reset/',
{'email': '[email protected]'},
HTTP_HOST='adminsite.com'
)
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
self.assertTrue("http://adminsite.com" in mail.outbox[0].body)
self.assertEqual(settings.DEFAULT_FROM_EMAIL, mail.outbox[0].from_email)
# Skip any 500 handler action (like sending more mail...)
@override_settings(DEBUG_PROPAGATE_EXCEPTIONS=True)
def test_poisoned_http_host(self):
"Poisoned HTTP_HOST headers can't be used for reset emails"
# This attack is based on the way browsers handle URLs. The colon
# should be used to separate the port, but if the URL contains an @,
# the colon is interpreted as part of a username for login purposes,
# making 'evil.com' the request domain. Since HTTP_HOST is used to
# produce a meaningful reset URL, we need to be certain that the
# HTTP_HOST header isn't poisoned. This is done as a check when get_host()
# is invoked, but we check here as a practical consequence.
with self.assertRaises(SuspiciousOperation):
self.client.post('/password_reset/',
{'email': '[email protected]'},
HTTP_HOST='www.example:[email protected]'
)
self.assertEqual(len(mail.outbox), 0)
# Skip any 500 handler action (like sending more mail...)
@override_settings(DEBUG_PROPAGATE_EXCEPTIONS=True)
def test_poisoned_http_host_admin_site(self):
"Poisoned HTTP_HOST headers can't be used for reset emails on admin views"
with self.assertRaises(SuspiciousOperation):
self.client.post('/admin_password_reset/',
{'email': '[email protected]'},
HTTP_HOST='www.example:[email protected]'
)
self.assertEqual(len(mail.outbox), 0)
def _test_confirm_start(self):
# Start by creating the email
response = self.client.post('/password_reset/', {'email': '[email protected]'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
return self._read_signup_email(mail.outbox[0])
def _read_signup_email(self, email):
urlmatch = re.search(r"https?://[^/]*(/.*reset/\S*)", email.body)
self.assertTrue(urlmatch is not None, "No URL found in sent email")
return urlmatch.group(), urlmatch.groups()[0]
def test_confirm_valid(self):
url, path = self._test_confirm_start()
response = self.client.get(path)
# redirect to a 'complete' page:
self.assertContains(response, "Please enter your new password")
def test_confirm_invalid(self):
url, path = self._test_confirm_start()
# Let's munge the token in the path, but keep the same length,
# in case the URLconf will reject a different length.
path = path[:-5] + ("0" * 4) + path[-1]
response = self.client.get(path)
self.assertContains(response, "The password reset link was invalid")
def test_confirm_invalid_user(self):
# Ensure that we get a 200 response for a non-existant user, not a 404
response = self.client.get('/reset/123456-1-1/')
self.assertContains(response, "The password reset link was invalid")
def test_confirm_overflow_user(self):
# Ensure that we get a 200 response for a base36 user id that overflows int
response = self.client.get('/reset/zzzzzzzzzzzzz-1-1/')
self.assertContains(response, "The password reset link was invalid")
def test_confirm_invalid_post(self):
# Same as test_confirm_invalid, but trying
# to do a POST instead.
url, path = self._test_confirm_start()
path = path[:-5] + ("0" * 4) + path[-1]
self.client.post(path, {
'new_password1': 'anewpassword',
'new_password2': ' anewpassword',
})
# Check the password has not been changed
u = User.objects.get(email='[email protected]')
self.assertTrue(not u.check_password("anewpassword"))
def test_confirm_complete(self):
url, path = self._test_confirm_start()
response = self.client.post(path, {'new_password1': 'anewpassword',
'new_password2': 'anewpassword'})
# It redirects us to a 'complete' page:
self.assertEqual(response.status_code, 302)
# Check the password has been changed
u = User.objects.get(email='[email protected]')
self.assertTrue(u.check_password("anewpassword"))
# Check we can't use the link again
response = self.client.get(path)
self.assertContains(response, "The password reset link was invalid")
def test_confirm_different_passwords(self):
url, path = self._test_confirm_start()
response = self.client.post(path, {'new_password1': 'anewpassword',
'new_password2': 'x'})
self.assertContainsEscaped(response, SetPasswordForm.error_messages['password_mismatch'])
@override_settings(AUTH_USER_MODEL='auth.CustomUser')
class CustomUserPasswordResetTest(AuthViewsTestCase):
fixtures = ['custom_user.json']
def _test_confirm_start(self):
# Start by creating the email
response = self.client.post('/password_reset/', {'email': '[email protected]'})
self.assertEqual(response.status_code, 302)
self.assertEqual(len(mail.outbox), 1)
return self._read_signup_email(mail.outbox[0])
def _read_signup_email(self, email):
urlmatch = re.search(r"https?://[^/]*(/.*reset/\S*)", email.body)
self.assertTrue(urlmatch is not None, "No URL found in sent email")
return urlmatch.group(), urlmatch.groups()[0]
def test_confirm_valid_custom_user(self):
url, path = self._test_confirm_start()
response = self.client.get(path)
# redirect to a 'complete' page:
self.assertContains(response, "Please enter your new password")
@skipIfCustomUser
class ChangePasswordTest(AuthViewsTestCase):
def fail_login(self, password='password'):
response = self.client.post('/login/', {
'username': 'testclient',
'password': password,
})
self.assertContainsEscaped(response, AuthenticationForm.error_messages['invalid_login'] % {
'username': User._meta.get_field('username').verbose_name
})
def logout(self):
response = self.client.get('/logout/')
def test_password_change_fails_with_invalid_old_password(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'donuts',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertContainsEscaped(response, PasswordChangeForm.error_messages['password_incorrect'])
def test_password_change_fails_with_mismatched_passwords(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'donuts',
})
self.assertContainsEscaped(response, SetPasswordForm.error_messages['password_mismatch'])
def test_password_change_succeeds(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith('/password_change/done/'))
self.fail_login()
self.login(password='password1')
def test_password_change_done_succeeds(self):
self.login()
response = self.client.post('/password_change/', {
'old_password': 'password',
'new_password1': 'password1',
'new_password2': 'password1',
})
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith('/password_change/done/'))
def test_password_change_done_fails(self):
with self.settings(LOGIN_URL='/login/'):
response = self.client.get('/password_change/done/')
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith('/login/?next=/password_change/done/'))
@skipIfCustomUser
class LoginTest(AuthViewsTestCase):
def test_current_site_in_context_after_login(self):
response = self.client.get(reverse('django.contrib.auth.views.login'))
self.assertEqual(response.status_code, 200)
if Site._meta.installed:
site = Site.objects.get_current()
self.assertEqual(response.context['site'], site)
self.assertEqual(response.context['site_name'], site.name)
else:
self.assertIsInstance(response.context['site'], RequestSite)
self.assertTrue(isinstance(response.context['form'], AuthenticationForm),
'Login form is not an AuthenticationForm')
def test_security_check(self, password='password'):
login_url = reverse('django.contrib.auth.views.login')
# Those URLs should not pass the security check
for bad_url in ('http://example.com',
'https://example.com',
'ftp://exampel.com',
'//example.com'):
nasty_url = '%(url)s?%(next)s=%(bad_url)s' % {
'url': login_url,
'next': REDIRECT_FIELD_NAME,
'bad_url': urlquote(bad_url),
}
response = self.client.post(nasty_url, {
'username': 'testclient',
'password': password,
})
self.assertEqual(response.status_code, 302)
self.assertFalse(bad_url in response['Location'],
"%s should be blocked" % bad_url)
# These URLs *should* still pass the security check
for good_url in ('/view/?param=http://example.com',
'/view/?param=https://example.com',
'/view?param=ftp://exampel.com',
'view/?param=//example.com',
'https:///',
'//testserver/',
'/url%20with%20spaces/'): # see ticket #12534
safe_url = '%(url)s?%(next)s=%(good_url)s' % {
'url': login_url,
'next': REDIRECT_FIELD_NAME,
'good_url': urlquote(good_url),
}
response = self.client.post(safe_url, {
'username': 'testclient',
'password': password,
})
self.assertEqual(response.status_code, 302)
self.assertTrue(good_url in response['Location'],
"%s should be allowed" % good_url)
@skipIfCustomUser
class LoginURLSettings(AuthViewsTestCase):
def setUp(self):
super(LoginURLSettings, self).setUp()
self.old_LOGIN_URL = settings.LOGIN_URL
def tearDown(self):
super(LoginURLSettings, self).tearDown()
settings.LOGIN_URL = self.old_LOGIN_URL
def get_login_required_url(self, login_url):
settings.LOGIN_URL = login_url
response = self.client.get('/login_required/')
self.assertEqual(response.status_code, 302)
return response['Location']
def test_standard_login_url(self):
login_url = '/login/'
login_required_url = self.get_login_required_url(login_url)
querystring = QueryDict('', mutable=True)
querystring['next'] = '/login_required/'
self.assertEqual(login_required_url, 'http://testserver%s?%s' %
(login_url, querystring.urlencode('/')))
def test_remote_login_url(self):
login_url = 'http://remote.example.com/login'
login_required_url = self.get_login_required_url(login_url)
querystring = QueryDict('', mutable=True)
querystring['next'] = 'http://testserver/login_required/'
self.assertEqual(login_required_url,
'%s?%s' % (login_url, querystring.urlencode('/')))
def test_https_login_url(self):
login_url = 'https:///login/'
login_required_url = self.get_login_required_url(login_url)
querystring = QueryDict('', mutable=True)
querystring['next'] = 'http://testserver/login_required/'
self.assertEqual(login_required_url,
'%s?%s' % (login_url, querystring.urlencode('/')))
def test_login_url_with_querystring(self):
login_url = '/login/?pretty=1'
login_required_url = self.get_login_required_url(login_url)
querystring = QueryDict('pretty=1', mutable=True)
querystring['next'] = '/login_required/'
self.assertEqual(login_required_url, 'http://testserver/login/?%s' %
querystring.urlencode('/'))
def test_remote_login_url_with_next_querystring(self):
login_url = 'http://remote.example.com/login/'
login_required_url = self.get_login_required_url('%s?next=/default/' %
login_url)
querystring = QueryDict('', mutable=True)
querystring['next'] = 'http://testserver/login_required/'
self.assertEqual(login_required_url, '%s?%s' % (login_url,
querystring.urlencode('/')))
@skipIfCustomUser
class LogoutTest(AuthViewsTestCase):
def confirm_logged_out(self):
self.assertTrue(SESSION_KEY not in self.client.session)
def test_logout_default(self):
"Logout without next_page option renders the default template"
self.login()
response = self.client.get('/logout/')
self.assertContains(response, 'Logged out')
self.confirm_logged_out()
def test_14377(self):
# Bug 14377
self.login()
response = self.client.get('/logout/')
self.assertTrue('site' in response.context)
def test_logout_with_overridden_redirect_url(self):
# Bug 11223
self.login()
response = self.client.get('/logout/next_page/')
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith('/somewhere/'))
response = self.client.get('/logout/next_page/?next=/login/')
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith('/login/'))
self.confirm_logged_out()
def test_logout_with_next_page_specified(self):
"Logout with next_page option given redirects to specified resource"
self.login()
response = self.client.get('/logout/next_page/')
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith('/somewhere/'))
self.confirm_logged_out()
def test_logout_with_redirect_argument(self):
"Logout with query string redirects to specified resource"
self.login()
response = self.client.get('/logout/?next=/login/')
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith('/login/'))
self.confirm_logged_out()
def test_logout_with_custom_redirect_argument(self):
"Logout with custom query string redirects to specified resource"
self.login()
response = self.client.get('/logout/custom_query/?follow=/somewhere/')
self.assertEqual(response.status_code, 302)
self.assertTrue(response['Location'].endswith('/somewhere/'))
self.confirm_logged_out()
def test_security_check(self, password='password'):
logout_url = reverse('django.contrib.auth.views.logout')
# Those URLs should not pass the security check
for bad_url in ('http://example.com',
'https://example.com',
'ftp://exampel.com',
'//example.com'):
nasty_url = '%(url)s?%(next)s=%(bad_url)s' % {
'url': logout_url,
'next': REDIRECT_FIELD_NAME,
'bad_url': urlquote(bad_url),
}
self.login()
response = self.client.get(nasty_url)
self.assertEqual(response.status_code, 302)
self.assertFalse(bad_url in response['Location'],
"%s should be blocked" % bad_url)
self.confirm_logged_out()
# These URLs *should* still pass the security check
for good_url in ('/view/?param=http://example.com',
'/view/?param=https://example.com',
'/view?param=ftp://exampel.com',
'view/?param=//example.com',
'https:///',
'//testserver/',
'/url%20with%20spaces/'): # see ticket #12534
safe_url = '%(url)s?%(next)s=%(good_url)s' % {
'url': logout_url,
'next': REDIRECT_FIELD_NAME,
'good_url': urlquote(good_url),
}
self.login()
response = self.client.get(safe_url)
self.assertEqual(response.status_code, 302)
self.assertTrue(good_url in response['Location'],
"%s should be allowed" % good_url)
self.confirm_logged_out()
| nuodb/nuodb-django | test/auth/tests/views.py | Python | bsd-3-clause | 22,169 |
# -*- coding: utf-8 -*-
"""
**********
Exceptions
**********
Base exceptions and errors for NetworkX.
"""
__author__ = """Aric Hagberg ([email protected])\nPieter Swart ([email protected])\nDan Schult([email protected])\nLoïc Séguin-C. <[email protected]>"""
# Copyright (C) 2004-2016 by
# Aric Hagberg <[email protected]>
# Dan Schult <[email protected]>
# Pieter Swart <[email protected]>
# All rights reserved.
# BSD license.
#
# Exception handling
# the root of all Exceptions
class NetworkXException(Exception):
"""Base class for exceptions in NetworkX."""
class NetworkXError(NetworkXException):
"""Exception for a serious error in NetworkX"""
class NetworkXPointlessConcept(NetworkXException):
"""Harary, F. and Read, R. "Is the Null Graph a Pointless Concept?"
In Graphs and Combinatorics Conference, George Washington University.
New York: Springer-Verlag, 1973.
"""
class NetworkXAlgorithmError(NetworkXException):
"""Exception for unexpected termination of algorithms."""
class NetworkXUnfeasible(NetworkXAlgorithmError):
"""Exception raised by algorithms trying to solve a problem
instance that has no feasible solution."""
class NetworkXNoPath(NetworkXUnfeasible):
"""Exception for algorithms that should return a path when running
on graphs where such a path does not exist."""
class NetworkXNoCycle(NetworkXUnfeasible):
"""Exception for algorithms that should return a cycle when running
on graphs where such a cycle does not exist."""
class NetworkXUnbounded(NetworkXAlgorithmError):
"""Exception raised by algorithms trying to solve a maximization
or a minimization problem instance that is unbounded."""
class NetworkXNotImplemented(NetworkXException):
"""Exception raised by algorithms not implemented for a type of graph."""
| SanketDG/networkx | networkx/exception.py | Python | bsd-3-clause | 1,828 |
##############################################################################
# Copyright 2009, Gerhard Weis
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the authors nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT
##############################################################################
'''
Test cases for the isotime module.
'''
import unittest
from datetime import time
from isodate import parse_time, UTC, FixedOffset, ISO8601Error, time_isoformat
from isodate import TIME_BAS_COMPLETE, TIME_BAS_MINUTE
from isodate import TIME_EXT_COMPLETE, TIME_EXT_MINUTE
from isodate import TIME_HOUR
from isodate import TZ_BAS, TZ_EXT, TZ_HOUR
# the following list contains tuples of ISO time strings and the expected
# result from the parse_time method. A result of None means an ISO8601Error
# is expected.
TEST_CASES = [('232050', time(23, 20, 50), TIME_BAS_COMPLETE + TZ_BAS),
('23:20:50', time(23, 20, 50), TIME_EXT_COMPLETE + TZ_EXT),
('2320', time(23, 20), TIME_BAS_MINUTE),
('23:20', time(23, 20), TIME_EXT_MINUTE),
('23', time(23), TIME_HOUR),
('232050,5', time(23, 20, 50, 500000), None),
('23:20:50.5', time(23, 20, 50, 500000), None),
# test precision
('15:33:42.123456', time(15, 33, 42, 123456), None),
('15:33:42.1234564', time(15, 33, 42, 123456), None),
('15:33:42.1234557', time(15, 33, 42, 123456), None),
('2320,8', time(23, 20, 48), None),
('23:20,8', time(23, 20, 48), None),
('23,3', time(23, 18), None),
('232030Z', time(23, 20, 30, tzinfo=UTC),
TIME_BAS_COMPLETE + TZ_BAS),
('2320Z', time(23, 20, tzinfo=UTC), TIME_BAS_MINUTE + TZ_BAS),
('23Z', time(23, tzinfo=UTC), TIME_HOUR + TZ_BAS),
('23:20:30Z', time(23, 20, 30, tzinfo=UTC),
TIME_EXT_COMPLETE + TZ_EXT),
('23:20Z', time(23, 20, tzinfo=UTC), TIME_EXT_MINUTE + TZ_EXT),
('152746+0100', time(15, 27, 46,
tzinfo=FixedOffset(1, 0, '+0100')),
TIME_BAS_COMPLETE + TZ_BAS),
('152746-0500', time(15, 27, 46,
tzinfo=FixedOffset(-5, 0, '-0500')),
TIME_BAS_COMPLETE + TZ_BAS),
('152746+01', time(15, 27, 46,
tzinfo=FixedOffset(1, 0, '+01:00')),
TIME_BAS_COMPLETE + TZ_HOUR),
('152746-05', time(15, 27, 46,
tzinfo=FixedOffset(-5, -0, '-05:00')),
TIME_BAS_COMPLETE + TZ_HOUR),
('15:27:46+01:00', time(15, 27, 46,
tzinfo=FixedOffset(1, 0, '+01:00')),
TIME_EXT_COMPLETE + TZ_EXT),
('15:27:46-05:00', time(15, 27, 46,
tzinfo=FixedOffset(-5, -0, '-05:00')),
TIME_EXT_COMPLETE + TZ_EXT),
('15:27:46+01', time(15, 27, 46,
tzinfo=FixedOffset(1, 0, '+01:00')),
TIME_EXT_COMPLETE + TZ_HOUR),
('15:27:46-05', time(15, 27, 46,
tzinfo=FixedOffset(-5, -0, '-05:00')),
TIME_EXT_COMPLETE + TZ_HOUR),
('1:17:30', None, TIME_EXT_COMPLETE)]
def create_testcase(timestring, expectation, format):
"""
Create a TestCase class for a specific test.
This allows having a separate TestCase for each test tuple from the
TEST_CASES list, so that a failed test won't stop other tests.
"""
class TestTime(unittest.TestCase):
'''
A test case template to parse an ISO time string into a time
object.
'''
def test_parse(self):
'''
Parse an ISO time string and compare it to the expected value.
'''
if expectation is None:
self.assertRaises(ISO8601Error, parse_time, timestring)
else:
result = parse_time(timestring)
self.assertEqual(result, expectation)
def test_format(self):
'''
Take time object and create ISO string from it.
This is the reverse test to test_parse.
'''
if expectation is None:
self.assertRaises(AttributeError,
time_isoformat, expectation, format)
elif format is not None:
self.assertEqual(time_isoformat(expectation, format),
timestring)
return unittest.TestLoader().loadTestsFromTestCase(TestTime)
def test_suite():
'''
Construct a TestSuite instance for all test cases.
'''
suite = unittest.TestSuite()
for timestring, expectation, format in TEST_CASES:
suite.addTest(create_testcase(timestring, expectation, format))
return suite
# load_tests Protocol
def load_tests(loader, tests, pattern):
return test_suite()
if __name__ == '__main__':
unittest.main(defaultTest='test_suite')
| Nexenta/s3-tests | virtualenv/lib/python2.7/site-packages/isodate/tests/test_time.py | Python | mit | 6,554 |
"package1.subpackage.module"
| metachris/py2app | py2app_tests/basic_app_with_encoding/package1/subpackage/module.py | Python | mit | 29 |
#!/usr/bin/python
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Fix sys.path so it can find our libraries.
This file is named google3.py because gpylint specifically ignores it when
complaining about the order of import statements - google3 should always
come before other non-python-standard imports.
"""
__author__ = '[email protected] (Avery Pennarun)'
import tr.google3 #pylint: disable-msg=C6204,W0611
| omererdem/honeything | src/cwmp/google3.py | Python | gpl-3.0 | 964 |
# Copyright (C) 2010-2015 Cuckoo Foundation.
# This file is part of Cuckoo Sandbox - http://www.cuckoosandbox.org
# See the file 'docs/LICENSE' for copying permission.
import os
import time
import shutil
import logging
import Queue
from threading import Thread, Lock
from lib.cuckoo.common.config import Config
from lib.cuckoo.common.constants import CUCKOO_ROOT
from lib.cuckoo.common.exceptions import CuckooMachineError, CuckooGuestError
from lib.cuckoo.common.exceptions import CuckooOperationalError
from lib.cuckoo.common.exceptions import CuckooCriticalError
from lib.cuckoo.common.objects import File
from lib.cuckoo.common.utils import create_folder
from lib.cuckoo.core.database import Database, TASK_COMPLETED, TASK_REPORTED
from lib.cuckoo.core.guest import GuestManager
from lib.cuckoo.core.plugins import list_plugins, RunAuxiliary, RunProcessing
from lib.cuckoo.core.plugins import RunSignatures, RunReporting
from lib.cuckoo.core.resultserver import ResultServer
log = logging.getLogger(__name__)
machinery = None
machine_lock = Lock()
latest_symlink_lock = Lock()
active_analysis_count = 0
class CuckooDeadMachine(Exception):
"""Exception thrown when a machine turns dead.
When this exception has been thrown, the analysis task will start again,
and will try to use another machine, when available.
"""
pass
class AnalysisManager(Thread):
"""Analysis Manager.
This class handles the full analysis process for a given task. It takes
care of selecting the analysis machine, preparing the configuration and
interacting with the guest agent and analyzer components to launch and
complete the analysis and store, process and report its results.
"""
def __init__(self, task, error_queue):
"""@param task: task object containing the details for the analysis."""
Thread.__init__(self)
Thread.daemon = True
self.task = task
self.errors = error_queue
self.cfg = Config()
self.storage = ""
self.binary = ""
self.machine = None
def init_storage(self):
"""Initialize analysis storage folder."""
self.storage = os.path.join(CUCKOO_ROOT,
"storage",
"analyses",
str(self.task.id))
# If the analysis storage folder already exists, we need to abort the
# analysis or previous results will be overwritten and lost.
if os.path.exists(self.storage):
log.error("Analysis results folder already exists at path \"%s\","
" analysis aborted", self.storage)
return False
# If we're not able to create the analysis storage folder, we have to
# abort the analysis.
try:
create_folder(folder=self.storage)
except CuckooOperationalError:
log.error("Unable to create analysis folder %s", self.storage)
return False
return True
def check_file(self):
"""Checks the integrity of the file to be analyzed."""
sample = Database().view_sample(self.task.sample_id)
sha256 = File(self.task.target).get_sha256()
if sha256 != sample.sha256:
log.error("Target file has been modified after submission: \"%s\"", self.task.target)
return False
return True
def store_file(self):
"""Store a copy of the file being analyzed."""
if not os.path.exists(self.task.target):
log.error("The file to analyze does not exist at path \"%s\", "
"analysis aborted", self.task.target)
return False
sha256 = File(self.task.target).get_sha256()
self.binary = os.path.join(CUCKOO_ROOT, "storage", "binaries", sha256)
if os.path.exists(self.binary):
log.info("File already exists at \"%s\"", self.binary)
else:
# TODO: do we really need to abort the analysis in case we are not
# able to store a copy of the file?
try:
shutil.copy(self.task.target, self.binary)
except (IOError, shutil.Error) as e:
log.error("Unable to store file from \"%s\" to \"%s\", "
"analysis aborted", self.task.target, self.binary)
return False
try:
new_binary_path = os.path.join(self.storage, "binary")
if hasattr(os, "symlink"):
os.symlink(self.binary, new_binary_path)
else:
shutil.copy(self.binary, new_binary_path)
except (AttributeError, OSError) as e:
log.error("Unable to create symlink/copy from \"%s\" to "
"\"%s\": %s", self.binary, self.storage, e)
return True
def acquire_machine(self):
"""Acquire an analysis machine from the pool of available ones."""
machine = None
# Start a loop to acquire the a machine to run the analysis on.
while True:
machine_lock.acquire()
# In some cases it's possible that we enter this loop without
# having any available machines. We should make sure this is not
# such case, or the analysis task will fail completely.
if not machinery.availables():
machine_lock.release()
time.sleep(1)
continue
# If the user specified a specific machine ID, a platform to be
# used or machine tags acquire the machine accordingly.
try:
machine = machinery.acquire(machine_id=self.task.machine,
platform=self.task.platform,
tags=self.task.tags)
finally:
machine_lock.release()
# If no machine is available at this moment, wait for one second
# and try again.
if not machine:
log.debug("Task #%d: no machine available yet", self.task.id)
time.sleep(1)
else:
log.info("Task #%d: acquired machine %s (label=%s)",
self.task.id, machine.name, machine.label)
break
self.machine = machine
def build_options(self):
"""Generate analysis options.
@return: options dict.
"""
options = {}
options["id"] = self.task.id
options["ip"] = self.machine.resultserver_ip
options["port"] = self.machine.resultserver_port
options["category"] = self.task.category
options["target"] = self.task.target
options["package"] = self.task.package
options["options"] = self.task.options
options["enforce_timeout"] = self.task.enforce_timeout
options["clock"] = self.task.clock
options["terminate_processes"] = self.cfg.cuckoo.terminate_processes
if not self.task.timeout or self.task.timeout == 0:
options["timeout"] = self.cfg.timeouts.default
else:
options["timeout"] = self.task.timeout
if self.task.category == "file":
options["file_name"] = File(self.task.target).get_name()
options["file_type"] = File(self.task.target).get_type()
return options
def launch_analysis(self):
"""Start analysis."""
succeeded = False
dead_machine = False
log.info("Starting analysis of %s \"%s\" (task=%d)",
self.task.category.upper(), self.task.target, self.task.id)
# Initialize the analysis folders.
if not self.init_storage():
return False
if self.task.category == "file":
# Check whether the file has been changed for some unknown reason.
# And fail this analysis if it has been modified.
if not self.check_file():
return False
# Store a copy of the original file.
if not self.store_file():
return False
# Acquire analysis machine.
try:
self.acquire_machine()
except CuckooOperationalError as e:
log.error("Cannot acquire machine: {0}".format(e))
return False
# Generate the analysis configuration file.
options = self.build_options()
# At this point we can tell the ResultServer about it.
try:
ResultServer().add_task(self.task, self.machine)
except Exception as e:
machinery.release(self.machine.label)
self.errors.put(e)
aux = RunAuxiliary(task=self.task, machine=self.machine)
aux.start()
try:
# Mark the selected analysis machine in the database as started.
guest_log = Database().guest_start(self.task.id,
self.machine.name,
self.machine.label,
machinery.__class__.__name__)
# Start the machine.
machinery.start(self.machine.label)
# Initialize the guest manager.
guest = GuestManager(self.machine.name, self.machine.ip,
self.machine.platform)
# Start the analysis.
guest.start_analysis(options)
guest.wait_for_completion()
succeeded = True
except CuckooMachineError as e:
log.error(str(e), extra={"task_id": self.task.id})
dead_machine = True
except CuckooGuestError as e:
log.error(str(e), extra={"task_id": self.task.id})
finally:
# Stop Auxiliary modules.
aux.stop()
# Take a memory dump of the machine before shutting it off.
if self.cfg.cuckoo.memory_dump or self.task.memory:
try:
dump_path = os.path.join(self.storage, "memory.dmp")
machinery.dump_memory(self.machine.label, dump_path)
except NotImplementedError:
log.error("The memory dump functionality is not available "
"for the current machine manager.")
except CuckooMachineError as e:
log.error(e)
try:
# Stop the analysis machine.
machinery.stop(self.machine.label)
except CuckooMachineError as e:
log.warning("Unable to stop machine %s: %s",
self.machine.label, e)
# Mark the machine in the database as stopped. Unless this machine
# has been marked as dead, we just keep it as "started" in the
# database so it'll not be used later on in this session.
Database().guest_stop(guest_log)
# After all this, we can make the ResultServer forget about the
# internal state for this analysis task.
ResultServer().del_task(self.task, self.machine)
if dead_machine:
# Remove the guest from the database, so that we can assign a
# new guest when the task is being analyzed with another
# machine.
Database().guest_remove(guest_log)
# Remove the analysis directory that has been created so
# far, as launch_analysis() is going to be doing that again.
shutil.rmtree(self.storage)
# This machine has turned dead, so we throw an exception here
# which informs the AnalysisManager that it should analyze
# this task again with another available machine.
raise CuckooDeadMachine()
try:
# Release the analysis machine. But only if the machine has
# not turned dead yet.
machinery.release(self.machine.label)
except CuckooMachineError as e:
log.error("Unable to release machine %s, reason %s. "
"You might need to restore it manually.",
self.machine.label, e)
return succeeded
def process_results(self):
"""Process the analysis results and generate the enabled reports."""
results = RunProcessing(task_id=self.task.id).run()
RunSignatures(results=results).run()
RunReporting(task_id=self.task.id, results=results).run()
# If the target is a file and the user enabled the option,
# delete the original copy.
if self.task.category == "file" and self.cfg.cuckoo.delete_original:
if not os.path.exists(self.task.target):
log.warning("Original file does not exist anymore: \"%s\": "
"File not found.", self.task.target)
else:
try:
os.remove(self.task.target)
except OSError as e:
log.error("Unable to delete original file at path "
"\"%s\": %s", self.task.target, e)
# If the target is a file and the user enabled the delete copy of
# the binary option, then delete the copy.
if self.task.category == "file" and self.cfg.cuckoo.delete_bin_copy:
if not os.path.exists(self.binary):
log.warning("Copy of the original file does not exist anymore: \"%s\": File not found", self.binary)
else:
try:
os.remove(self.binary)
except OSError as e:
log.error("Unable to delete the copy of the original file at path \"%s\": %s", self.binary, e)
log.info("Task #%d: reports generation completed (path=%s)",
self.task.id, self.storage)
return True
def run(self):
"""Run manager thread."""
global active_analysis_count
active_analysis_count += 1
try:
while True:
try:
success = self.launch_analysis()
except CuckooDeadMachine:
continue
break
Database().set_status(self.task.id, TASK_COMPLETED)
log.debug("Released database task #%d with status %s",
self.task.id, success)
if self.cfg.cuckoo.process_results:
self.process_results()
Database().set_status(self.task.id, TASK_REPORTED)
# We make a symbolic link ("latest") which links to the latest
# analysis - this is useful for debugging purposes. This is only
# supported under systems that support symbolic links.
if hasattr(os, "symlink"):
latest = os.path.join(CUCKOO_ROOT, "storage",
"analyses", "latest")
# First we have to remove the existing symbolic link, then we
# have to create the new one.
# Deal with race conditions using a lock.
latest_symlink_lock.acquire()
try:
if os.path.exists(latest):
os.remove(latest)
os.symlink(self.storage, latest)
except OSError as e:
log.warning("Error pointing latest analysis symlink: %s" % e)
finally:
latest_symlink_lock.release()
log.info("Task #%d: analysis procedure completed", self.task.id)
except:
log.exception("Failure in AnalysisManager.run")
active_analysis_count -= 1
class Scheduler:
"""Tasks Scheduler.
This class is responsible for the main execution loop of the tool. It
prepares the analysis machines and keep waiting and loading for new
analysis tasks.
Whenever a new task is available, it launches AnalysisManager which will
take care of running the full analysis process and operating with the
assigned analysis machine.
"""
def __init__(self, maxcount=None):
self.running = True
self.cfg = Config()
self.db = Database()
self.maxcount = maxcount
self.total_analysis_count = 0
def initialize(self):
"""Initialize the machine manager."""
global machinery
machinery_name = self.cfg.cuckoo.machinery
log.info("Using \"%s\" machine manager", machinery_name)
# Get registered class name. Only one machine manager is imported,
# therefore there should be only one class in the list.
plugin = list_plugins("machinery")[0]
# Initialize the machine manager.
machinery = plugin()
# Find its configuration file.
conf = os.path.join(CUCKOO_ROOT, "conf", "%s.conf" % machinery_name)
if not os.path.exists(conf):
raise CuckooCriticalError("The configuration file for machine "
"manager \"{0}\" does not exist at path:"
" {1}".format(machinery_name, conf))
# Provide a dictionary with the configuration options to the
# machine manager instance.
machinery.set_options(Config(machinery_name))
# Initialize the machine manager.
try:
machinery.initialize(machinery_name)
except CuckooMachineError as e:
raise CuckooCriticalError("Error initializing machines: %s" % e)
# At this point all the available machines should have been identified
# and added to the list. If none were found, Cuckoo needs to abort the
# execution.
if not len(machinery.machines()):
raise CuckooCriticalError("No machines available.")
else:
log.info("Loaded %s machine/s", len(machinery.machines()))
if len(machinery.machines()) > 1 and self.db.engine.name == "sqlite":
log.warning("As you've configured Cuckoo to execute parallel "
"analyses, we recommend you to switch to a MySQL "
"a PostgreSQL database as SQLite might cause some "
"issues.")
if len(machinery.machines()) > 4 and self.cfg.cuckoo.process_results:
log.warning("When running many virtual machines it is recommended "
"to process the results in a separate process.py to "
"increase throughput and stability. Please read the "
"documentation about the `Processing Utility`.")
def stop(self):
"""Stop scheduler."""
self.running = False
# Shutdown machine manager (used to kill machines that still alive).
machinery.shutdown()
def start(self):
"""Start scheduler."""
self.initialize()
log.info("Waiting for analysis tasks.")
# Message queue with threads to transmit exceptions (used as IPC).
errors = Queue.Queue()
# Command-line overrides the configuration file.
if self.maxcount is None:
self.maxcount = self.cfg.cuckoo.max_analysis_count
# This loop runs forever.
while self.running:
time.sleep(1)
# If not enough free disk space is available, then we print an
# error message and wait another round (this check is ignored
# when the freespace configuration variable is set to zero).
if self.cfg.cuckoo.freespace:
# Resolve the full base path to the analysis folder, just in
# case somebody decides to make a symbolic link out of it.
dir_path = os.path.join(CUCKOO_ROOT, "storage", "analyses")
# TODO: Windows support
if hasattr(os, "statvfs"):
dir_stats = os.statvfs(dir_path)
# Calculate the free disk space in megabytes.
space_available = dir_stats.f_bavail * dir_stats.f_frsize
space_available /= 1024 * 1024
if space_available < self.cfg.cuckoo.freespace:
log.error("Not enough free disk space! (Only %d MB!)",
space_available)
continue
# Have we limited the number of concurrently executing machines?
if self.cfg.cuckoo.max_machines_count > 0:
# Are too many running?
if len(machinery.running()) >= self.cfg.cuckoo.max_machines_count:
continue
# If no machines are available, it's pointless to fetch for
# pending tasks. Loop over.
if not machinery.availables():
continue
# Exits if max_analysis_count is defined in the configuration
# file and has been reached.
if self.maxcount and self.total_analysis_count >= self.maxcount:
if active_analysis_count <= 0:
self.stop()
else:
# Fetch a pending analysis task.
#TODO: this fixes only submissions by --machine, need to add other attributes (tags etc.)
for machine in self.db.get_available_machines():
task = self.db.fetch(machine=machine.name)
if task:
log.debug("Processing task #%s", task.id)
self.total_analysis_count += 1
# Initialize and start the analysis manager.
analysis = AnalysisManager(task, errors)
analysis.start()
# Deal with errors.
try:
raise errors.get(block=False)
except Queue.Empty:
pass
| liorvh/CuckooSploit | lib/cuckoo/core/scheduler.py | Python | gpl-3.0 | 21,948 |
"""
This file is part of py-sonic.
py-sonic is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
py-sonic is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with py-sonic. If not, see <http://www.gnu.org/licenses/>
"""
class SonicError(Exception):
pass
class ParameterError(SonicError):
pass
class VersionError(SonicError):
pass
class CredentialError(SonicError):
pass
class AuthError(SonicError):
pass
class LicenseError(SonicError):
pass
class DataNotFoundError(SonicError):
pass
class ArgumentError(SonicError):
pass
# This maps the error code numbers from the Subsonic server to their
# appropriate Exceptions
ERR_CODE_MAP = {
0: SonicError ,
10: ParameterError ,
20: VersionError ,
30: VersionError ,
40: CredentialError ,
50: AuthError ,
60: LicenseError ,
70: DataNotFoundError ,
}
def getExcByCode(code):
code = int(code)
if code in ERR_CODE_MAP:
return ERR_CODE_MAP[code]
return SonicError
| ties/py-sonic | libsonic/errors.py | Python | gpl-3.0 | 1,413 |
#!/usr/bin/env python
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2014 Thomas Voegtlin
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import socket
import time
import sys
import os
import threading
import traceback
import json
import Queue
import util
from network import Network
from util import print_error, print_stderr, parse_json
from simple_config import SimpleConfig
from daemon import NetworkServer, DAEMON_PORT
class NetworkProxy(threading.Thread):
def __init__(self, socket, config=None):
if config is None:
config = {} # Do not use mutables as default arguments!
threading.Thread.__init__(self)
self.config = SimpleConfig(config) if type(config) == type({}) else config
self.message_id = 0
self.unanswered_requests = {}
self.subscriptions = {}
self.debug = False
self.lock = threading.Lock()
self.pending_transactions_for_notifications = []
self.callbacks = {}
self.running = True
self.daemon = True
if socket:
self.pipe = util.SocketPipe(socket)
self.network = None
else:
self.network = Network(config)
self.pipe = util.QueuePipe(send_queue=self.network.requests_queue)
self.network.start(self.pipe.get_queue)
for key in ['status','banner','updated','servers','interfaces']:
value = self.network.get_status_value(key)
self.pipe.get_queue.put({'method':'network.status', 'params':[key, value]})
# status variables
self.status = 'connecting'
self.servers = {}
self.banner = ''
self.blockchain_height = 0
self.server_height = 0
self.interfaces = []
def is_running(self):
return self.running
def run(self):
while self.is_running():
try:
response = self.pipe.get()
except util.timeout:
continue
if response is None:
break
self.process(response)
self.trigger_callback('stop')
if self.network:
self.network.stop()
print_error("NetworkProxy: terminating")
def process(self, response):
if self.debug:
print_error("<--", response)
if response.get('method') == 'network.status':
key, value = response.get('params')
if key == 'status':
self.status = value
elif key == 'banner':
self.banner = value
elif key == 'updated':
self.blockchain_height, self.server_height = value
elif key == 'servers':
self.servers = value
elif key == 'interfaces':
self.interfaces = value
self.trigger_callback(key)
return
msg_id = response.get('id')
result = response.get('result')
error = response.get('error')
if msg_id is not None:
with self.lock:
method, params, callback = self.unanswered_requests.pop(msg_id)
else:
method = response.get('method')
params = response.get('params')
with self.lock:
for k,v in self.subscriptions.items():
if (method, params) in v:
callback = k
break
else:
print_error( "received unexpected notification", method, params)
return
r = {'method':method, 'params':params, 'result':result, 'id':msg_id, 'error':error}
callback(r)
def send(self, messages, callback):
"""return the ids of the requests that we sent"""
# detect subscriptions
sub = []
for message in messages:
m, v = message
if m[-10:] == '.subscribe':
sub.append(message)
if sub:
with self.lock:
if self.subscriptions.get(callback) is None:
self.subscriptions[callback] = []
for message in sub:
if message not in self.subscriptions[callback]:
self.subscriptions[callback].append(message)
with self.lock:
requests = []
ids = []
for m in messages:
method, params = m
request = { 'id':self.message_id, 'method':method, 'params':params }
self.unanswered_requests[self.message_id] = method, params, callback
ids.append(self.message_id)
requests.append(request)
if self.debug:
print_error("-->", request)
self.message_id += 1
self.pipe.send_all(requests)
return ids
def synchronous_get(self, requests, timeout=100000000):
queue = Queue.Queue()
ids = self.send(requests, queue.put)
id2 = ids[:]
res = {}
while ids:
r = queue.get(True, timeout)
_id = r.get('id')
ids.remove(_id)
if r.get('error'):
return BaseException(r.get('error'))
result = r.get('result')
res[_id] = r.get('result')
out = []
for _id in id2:
out.append(res[_id])
return out
def get_servers(self):
return self.servers
def get_interfaces(self):
return self.interfaces
def get_header(self, height):
return self.synchronous_get([('network.get_header',[height])])[0]
def get_local_height(self):
return self.blockchain_height
def get_server_height(self):
return self.server_height
def is_connected(self):
return self.status == 'connected'
def is_connecting(self):
return self.status == 'connecting'
def is_up_to_date(self):
return self.unanswered_requests == {}
def get_parameters(self):
return self.synchronous_get([('network.get_parameters',[])])[0]
def set_parameters(self, *args):
return self.synchronous_get([('network.set_parameters',args)])[0]
def stop(self):
self.running = False
def stop_daemon(self):
return self.send([('daemon.stop',[])], None)
def register_callback(self, event, callback):
with self.lock:
if not self.callbacks.get(event):
self.callbacks[event] = []
self.callbacks[event].append(callback)
def trigger_callback(self, event):
with self.lock:
callbacks = self.callbacks.get(event,[])[:]
if callbacks:
[callback() for callback in callbacks]
| Kefkius/electrum-frc | lib/network_proxy.py | Python | gpl-3.0 | 7,346 |
#!/usr/bin/env python
# encoding: utf-8
# Thomas Nagy, 2005-2010 (ita)
"""
ConfigSet: a special dict
The values put in :py:class:`ConfigSet` must be lists
"""
import copy, re, os
from waflib import Logs, Utils
re_imp = re.compile('^(#)*?([^#=]*?)\ =\ (.*?)$', re.M)
class ConfigSet(object):
"""
A dict that honor serialization and parent relationships. The serialization format
is human-readable (python-like) and performed by using eval() and repr().
For high performance prefer pickle. Do not store functions as they are not serializable.
The values can be accessed by attributes or by keys::
from waflib.ConfigSet import ConfigSet
env = ConfigSet()
env.FOO = 'test'
env['FOO'] = 'test'
"""
__slots__ = ('table', 'parent')
def __init__(self, filename=None):
self.table = {}
"""
Internal dict holding the object values
"""
#self.parent = None
if filename:
self.load(filename)
def __contains__(self, key):
"""
Enable the *in* syntax::
if 'foo' in env:
print(env['foo'])
"""
if key in self.table: return True
try: return self.parent.__contains__(key)
except AttributeError: return False # parent may not exist
def keys(self):
"""Dict interface (unknown purpose)"""
keys = set()
cur = self
while cur:
keys.update(cur.table.keys())
cur = getattr(cur, 'parent', None)
keys = list(keys)
keys.sort()
return keys
def __str__(self):
"""Text representation of the ConfigSet (for debugging purposes)"""
return "\n".join(["%r %r" % (x, self.__getitem__(x)) for x in self.keys()])
def __getitem__(self, key):
"""
Dictionary interface: get value from key::
def configure(conf):
conf.env['foo'] = {}
print(env['foo'])
"""
try:
while 1:
x = self.table.get(key, None)
if not x is None:
return x
self = self.parent
except AttributeError:
return []
def __setitem__(self, key, value):
"""
Dictionary interface: get value from key
"""
self.table[key] = value
def __delitem__(self, key):
"""
Dictionary interface: get value from key
"""
self[key] = []
def __getattr__(self, name):
"""
Attribute access provided for convenience. The following forms are equivalent::
def configure(conf):
conf.env.value
conf.env['value']
"""
if name in self.__slots__:
return object.__getattr__(self, name)
else:
return self[name]
def __setattr__(self, name, value):
"""
Attribute access provided for convenience. The following forms are equivalent::
def configure(conf):
conf.env.value = x
env['value'] = x
"""
if name in self.__slots__:
object.__setattr__(self, name, value)
else:
self[name] = value
def __delattr__(self, name):
"""
Attribute access provided for convenience. The following forms are equivalent::
def configure(conf):
del env.value
del env['value']
"""
if name in self.__slots__:
object.__delattr__(self, name)
else:
del self[name]
def derive(self):
"""
Returns a new ConfigSet deriving from self. The copy returned
will be a shallow copy::
from waflib.ConfigSet import ConfigSet
env = ConfigSet()
env.append_value('CFLAGS', ['-O2'])
child = env.derive()
child.CFLAGS.append('test') # warning! this will modify 'env'
child.CFLAGS = ['-O3'] # new list, ok
child.append_value('CFLAGS', ['-O3']) # ok
Use :py:func:`ConfigSet.detach` to detach the child from the parent.
"""
newenv = ConfigSet()
newenv.parent = self
return newenv
def detach(self):
"""
Detach self from its parent (if existing)
Modifying the parent :py:class:`ConfigSet` will not change the current object
Modifying this :py:class:`ConfigSet` will not modify the parent one.
"""
tbl = self.get_merged_dict()
try:
delattr(self, 'parent')
except AttributeError:
pass
else:
keys = tbl.keys()
for x in keys:
tbl[x] = copy.deepcopy(tbl[x])
self.table = tbl
return self
def get_flat(self, key):
"""
Return a value as a string. If the input is a list, the value returned is space-separated.
:param key: key to use
:type key: string
"""
s = self[key]
if isinstance(s, str): return s
return ' '.join(s)
def _get_list_value_for_modification(self, key):
"""
Return a list value for further modification.
The list may be modified inplace and there is no need to do this afterwards::
self.table[var] = value
"""
try:
value = self.table[key]
except KeyError:
try: value = self.parent[key]
except AttributeError: value = []
if isinstance(value, list):
value = value[:]
else:
value = [value]
else:
if not isinstance(value, list):
value = [value]
self.table[key] = value
return value
def append_value(self, var, val):
"""
Appends a value to the specified config key::
def build(bld):
bld.env.append_value('CFLAGS', ['-O2'])
The value must be a list or a tuple
"""
if isinstance(val, str): # if there were string everywhere we could optimize this
val = [val]
current_value = self._get_list_value_for_modification(var)
current_value.extend(val)
def prepend_value(self, var, val):
"""
Prepends a value to the specified item::
def configure(conf):
conf.env.prepend_value('CFLAGS', ['-O2'])
The value must be a list or a tuple
"""
if isinstance(val, str):
val = [val]
self.table[var] = val + self._get_list_value_for_modification(var)
def append_unique(self, var, val):
"""
Append a value to the specified item only if it's not already present::
def build(bld):
bld.env.append_unique('CFLAGS', ['-O2', '-g'])
The value must be a list or a tuple
"""
if isinstance(val, str):
val = [val]
current_value = self._get_list_value_for_modification(var)
for x in val:
if x not in current_value:
current_value.append(x)
def get_merged_dict(self):
"""
Compute the merged dictionary from the fusion of self and all its parent
:rtype: a ConfigSet object
"""
table_list = []
env = self
while 1:
table_list.insert(0, env.table)
try: env = env.parent
except AttributeError: break
merged_table = {}
for table in table_list:
merged_table.update(table)
return merged_table
def store(self, filename):
"""
Write the :py:class:`ConfigSet` data into a file. See :py:meth:`ConfigSet.load` for reading such files.
:param filename: file to use
:type filename: string
"""
try:
os.makedirs(os.path.split(filename)[0])
except OSError:
pass
buf = []
merged_table = self.get_merged_dict()
keys = list(merged_table.keys())
keys.sort()
try:
fun = ascii
except NameError:
fun = repr
for k in keys:
if k != 'undo_stack':
buf.append('%s = %s\n' % (k, fun(merged_table[k])))
Utils.writef(filename, ''.join(buf))
def load(self, filename):
"""
Retrieve the :py:class:`ConfigSet` data from a file. See :py:meth:`ConfigSet.store` for writing such files
:param filename: file to use
:type filename: string
"""
tbl = self.table
code = Utils.readf(filename, m='rU')
for m in re_imp.finditer(code):
g = m.group
tbl[g(2)] = eval(g(3))
Logs.debug('env: %s' % str(self.table))
def update(self, d):
"""
Dictionary interface: replace values from another dict
:param d: object to use the value from
:type d: dict-like object
"""
for k, v in d.items():
self[k] = v
def stash(self):
"""
Store the object state, to provide a kind of transaction support::
env = ConfigSet()
env.stash()
try:
env.append_value('CFLAGS', '-O3')
call_some_method(env)
finally:
env.revert()
The history is kept in a stack, and is lost during the serialization by :py:meth:`ConfigSet.store`
"""
orig = self.table
tbl = self.table = self.table.copy()
for x in tbl.keys():
tbl[x] = copy.deepcopy(tbl[x])
self.undo_stack = self.undo_stack + [orig]
def revert(self):
"""
Reverts the object to a previous state. See :py:meth:`ConfigSet.stash`
"""
self.table = self.undo_stack.pop(-1)
| evancich/apm_motor | modules/waf/waflib/ConfigSet.py | Python | gpl-3.0 | 8,007 |
"""timezone at metavj level
Revision ID: 224621d9edde
Revises: 14346346596e
Create Date: 2015-12-21 16:52:30.275508
"""
# revision identifiers, used by Alembic.
revision = '224621d9edde'
down_revision = '5a590ae95255'
from alembic import op
import sqlalchemy as sa
import geoalchemy2 as ga
def upgrade():
op.create_table('timezone',
sa.Column('id', sa.BIGINT(), nullable=False),
sa.Column('name', sa.TEXT(), nullable=False),
sa.PrimaryKeyConstraint('id'),
schema='navitia'
)
op.create_table('tz_dst',
sa.Column('id', sa.BIGINT(), nullable=False),
sa.Column('tz_id', sa.BIGINT(), nullable=False),
sa.Column('beginning_date', sa.DATE(), nullable=False),
sa.Column('end_date', sa.DATE(), nullable=False),
sa.Column('utc_offset', sa.INTEGER(), nullable=False),
sa.ForeignKeyConstraint(['tz_id'], [u'navitia.timezone.id'], name=u'associated_tz_dst_fkey'),
sa.PrimaryKeyConstraint('id'),
schema='navitia'
)
op.add_column(u'meta_vj', sa.Column('timezone', sa.BIGINT(), nullable=True), schema=u'navitia')
op.drop_column(u'vehicle_journey', 'utc_to_local_offset', schema=u'navitia')
def downgrade():
op.drop_column(u'meta_vj', 'timezone', schema=u'navitia')
op.drop_table('tz_dst', schema='navitia')
op.drop_table('timezone', schema='navitia')
op.add_column(u'vehicle_journey', sa.Column('utc_to_local_offset', sa.BIGINT(), nullable=True), schema=u'navitia')
| TeXitoi/navitia | source/sql/alembic/versions/224621d9edde_timezone_at_metavj_level.py | Python | agpl-3.0 | 1,446 |
#
# bloom.py
#
# Distributed under the MIT/X11 software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
"""Bloom filter support"""
from __future__ import absolute_import, division, print_function, unicode_literals
import struct
import sys
import math
import bitcoin.core
import bitcoin.core.serialize
def ROTL32(x, r):
assert x <= 0xFFFFFFFF
return ((x << r) & 0xFFFFFFFF) | (x >> (32 - r))
def MurmurHash3(nHashSeed, vDataToHash):
"""MurmurHash3 (x86_32)
Used for bloom filters. See http://code.google.com/p/smhasher/source/browse/trunk/MurmurHash3.cpp
"""
assert nHashSeed <= 0xFFFFFFFF
h1 = nHashSeed
c1 = 0xcc9e2d51
c2 = 0x1b873593
# body
i = 0
while i < len(vDataToHash) - len(vDataToHash) % 4 \
and len(vDataToHash) - i >= 4:
k1 = struct.unpack(b"<L", vDataToHash[i:i+4])[0]
k1 = (k1 * c1) & 0xFFFFFFFF
k1 = ROTL32(k1, 15)
k1 = (k1 * c2) & 0xFFFFFFFF
h1 ^= k1
h1 = ROTL32(h1, 13)
h1 = (((h1*5) & 0xFFFFFFFF) + 0xe6546b64) & 0xFFFFFFFF
i += 4
# tail
k1 = 0
j = (len(vDataToHash) // 4) * 4
import sys
bord = ord
if sys.version > '3':
# In Py3 indexing bytes returns numbers, not characters
bord = lambda x: x
if len(vDataToHash) & 3 >= 3:
k1 ^= bord(vDataToHash[j+2]) << 16
if len(vDataToHash) & 3 >= 2:
k1 ^= bord(vDataToHash[j+1]) << 8
if len(vDataToHash) & 3 >= 1:
k1 ^= bord(vDataToHash[j])
k1 &= 0xFFFFFFFF
k1 = (k1 * c1) & 0xFFFFFFFF
k1 = ROTL32(k1, 15)
k1 = (k1 * c2) & 0xFFFFFFFF
h1 ^= k1
# finalization
h1 ^= len(vDataToHash) & 0xFFFFFFFF
h1 ^= (h1 & 0xFFFFFFFF) >> 16
h1 *= 0x85ebca6b
h1 ^= (h1 & 0xFFFFFFFF) >> 13
h1 *= 0xc2b2ae35
h1 ^= (h1 & 0xFFFFFFFF) >> 16
return h1 & 0xFFFFFFFF
class CBloomFilter(bitcoin.core.serialize.Serializable):
# 20,000 items with fp rate < 0.1% or 10,000 items and <0.0001%
MAX_BLOOM_FILTER_SIZE = 36000
MAX_HASH_FUNCS = 50
UPDATE_NONE = 0
UPDATE_ALL = 1
UPDATE_P2PUBKEY_ONLY = 2
UPDATE_MASK = 3
def __init__(self, nElements, nFPRate, nTweak, nFlags):
"""Create a new bloom filter
The filter will have a given false-positive rate when filled with the
given number of elements.
Note that if the given parameters will result in a filter outside the
bounds of the protocol limits, the filter created will be as close to
the given parameters as possible within the protocol limits. This will
apply if nFPRate is very low or nElements is unreasonably high.
nTweak is a constant which is added to the seed value passed to the
hash function It should generally always be a random value (and is
largely only exposed for unit testing)
nFlags should be one of the UPDATE_* enums (but not _MASK)
"""
LN2SQUARED = 0.4804530139182014246671025263266649717305529515945455
LN2 = 0.6931471805599453094172321214581765680755001343602552
self.vData = bytearray(int(min(-1 / LN2SQUARED * nElements * math.log(nFPRate), self.MAX_BLOOM_FILTER_SIZE * 8) / 8))
self.nHashFuncs = int(min(len(self.vData) * 8 / nElements * LN2, self.MAX_HASH_FUNCS))
self.nTweak = nTweak
self.nFlags = nFlags
def bloom_hash(self, nHashNum, vDataToHash):
return MurmurHash3(((nHashNum * 0xFBA4C795) + self.nTweak) & 0xFFFFFFFF, vDataToHash) % (len(self.vData) * 8)
__bit_mask = bytearray([0x01, 0x02, 0x04, 0x08, 0x10, 0x20, 0x40, 0x80])
def insert(self, elem):
"""Insert an element in the filter.
elem may be a COutPoint or bytes
"""
if isinstance(elem, bitcoin.core.COutPoint):
elem = elem.serialize()
if len(self.vData) == 1 and self.vData[0] == 0xff:
return
for i in range(0, self.nHashFuncs):
nIndex = self.bloom_hash(i, elem)
# Sets bit nIndex of vData
self.vData[nIndex >> 3] |= self.__bit_mask[7 & nIndex]
def contains(self, elem):
"""Test if the filter contains an element
elem may be a COutPoint or bytes
"""
if isinstance(elem, bitcoin.core.COutPoint):
elem = elem.serialize()
if len(self.vData) == 1 and self.vData[0] == 0xff:
return True
for i in range(0, self.nHashFuncs):
nIndex = self.bloom_hash(i, elem)
if not (self.vData[nIndex >> 3] & self.__bit_mask[7 & nIndex]):
return False
return True
def IsWithinSizeConstraints(self):
return len(self.vData) <= self.MAX_BLOOM_FILTER_SIZE and self.nHashFuncs <= self.MAX_HASH_FUNCS
def IsRelevantAndUpdate(tx, tx_hash):
# Not useful for a client, so not implemented yet.
raise NotImplementedError
__struct = struct.Struct(b'<IIB')
@classmethod
def stream_deserialize(cls, f):
vData = bitcoin.core.serialize.BytesSerializer.stream_deserialize(f)
(nHashFuncs,
nTweak,
nFlags) = self.__struct.unpack(_ser_read(f, self.__struct.size))
self = cls()
self.vData = vData
self.nHashFuncs = nHashFuncs
self.nTweak = nTweak
self.nFlags = nFlags
return self
def stream_serialize(self, f):
if sys.version > '3':
bitcoin.core.serialize.BytesSerializer.stream_serialize(self.vData, f)
else:
# 2.7 has problems with f.write(bytearray())
bitcoin.core.serialize.BytesSerializer.stream_serialize(bytes(self.vData), f)
f.write(self.__struct.pack(self.nHashFuncs, self.nTweak, self.nFlags))
| Roasbeef/seneca | python-bitcoinlib/bitcoin/bloom.py | Python | unlicense | 5,793 |
"""The test for the threshold sensor platform."""
import unittest
from homeassistant.setup import setup_component
from homeassistant.const import (
ATTR_UNIT_OF_MEASUREMENT, STATE_UNKNOWN, TEMP_CELSIUS)
from tests.common import get_test_home_assistant
class TestThresholdSensor(unittest.TestCase):
"""Test the threshold sensor."""
def setup_method(self, method):
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
def teardown_method(self, method):
"""Stop everything that was started."""
self.hass.stop()
def test_sensor_upper(self):
"""Test if source is above threshold."""
config = {
'binary_sensor': {
'platform': 'threshold',
'upper': '15',
'entity_id': 'sensor.test_monitored',
}
}
assert setup_component(self.hass, 'binary_sensor', config)
self.hass.states.set('sensor.test_monitored', 16,
{ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS})
self.hass.block_till_done()
state = self.hass.states.get('binary_sensor.threshold')
self.assertEqual('sensor.test_monitored',
state.attributes.get('entity_id'))
self.assertEqual(16, state.attributes.get('sensor_value'))
self.assertEqual('above', state.attributes.get('position'))
self.assertEqual(float(config['binary_sensor']['upper']),
state.attributes.get('upper'))
self.assertEqual(0.0, state.attributes.get('hysteresis'))
self.assertEqual('upper', state.attributes.get('type'))
assert state.state == 'on'
self.hass.states.set('sensor.test_monitored', 14)
self.hass.block_till_done()
state = self.hass.states.get('binary_sensor.threshold')
assert state.state == 'off'
self.hass.states.set('sensor.test_monitored', 15)
self.hass.block_till_done()
state = self.hass.states.get('binary_sensor.threshold')
assert state.state == 'off'
def test_sensor_lower(self):
"""Test if source is below threshold."""
config = {
'binary_sensor': {
'platform': 'threshold',
'lower': '15',
'entity_id': 'sensor.test_monitored',
}
}
assert setup_component(self.hass, 'binary_sensor', config)
self.hass.states.set('sensor.test_monitored', 16)
self.hass.block_till_done()
state = self.hass.states.get('binary_sensor.threshold')
self.assertEqual('above', state.attributes.get('position'))
self.assertEqual(float(config['binary_sensor']['lower']),
state.attributes.get('lower'))
self.assertEqual(0.0, state.attributes.get('hysteresis'))
self.assertEqual('lower', state.attributes.get('type'))
assert state.state == 'off'
self.hass.states.set('sensor.test_monitored', 14)
self.hass.block_till_done()
state = self.hass.states.get('binary_sensor.threshold')
assert state.state == 'on'
def test_sensor_hysteresis(self):
"""Test if source is above threshold using hysteresis."""
config = {
'binary_sensor': {
'platform': 'threshold',
'upper': '15',
'hysteresis': '2.5',
'entity_id': 'sensor.test_monitored',
}
}
assert setup_component(self.hass, 'binary_sensor', config)
self.hass.states.set('sensor.test_monitored', 20)
self.hass.block_till_done()
state = self.hass.states.get('binary_sensor.threshold')
self.assertEqual('above', state.attributes.get('position'))
self.assertEqual(float(config['binary_sensor']['upper']),
state.attributes.get('upper'))
self.assertEqual(2.5, state.attributes.get('hysteresis'))
self.assertEqual('upper', state.attributes.get('type'))
assert state.state == 'on'
self.hass.states.set('sensor.test_monitored', 13)
self.hass.block_till_done()
state = self.hass.states.get('binary_sensor.threshold')
assert state.state == 'on'
self.hass.states.set('sensor.test_monitored', 12)
self.hass.block_till_done()
state = self.hass.states.get('binary_sensor.threshold')
assert state.state == 'off'
self.hass.states.set('sensor.test_monitored', 17)
self.hass.block_till_done()
state = self.hass.states.get('binary_sensor.threshold')
assert state.state == 'off'
self.hass.states.set('sensor.test_monitored', 18)
self.hass.block_till_done()
state = self.hass.states.get('binary_sensor.threshold')
assert state.state == 'on'
def test_sensor_in_range_no_hysteresis(self):
"""Test if source is within the range."""
config = {
'binary_sensor': {
'platform': 'threshold',
'lower': '10',
'upper': '20',
'entity_id': 'sensor.test_monitored',
}
}
assert setup_component(self.hass, 'binary_sensor', config)
self.hass.states.set('sensor.test_monitored', 16,
{ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS})
self.hass.block_till_done()
state = self.hass.states.get('binary_sensor.threshold')
self.assertEqual('sensor.test_monitored',
state.attributes.get('entity_id'))
self.assertEqual(16, state.attributes.get('sensor_value'))
self.assertEqual('in_range', state.attributes.get('position'))
self.assertEqual(float(config['binary_sensor']['lower']),
state.attributes.get('lower'))
self.assertEqual(float(config['binary_sensor']['upper']),
state.attributes.get('upper'))
self.assertEqual(0.0, state.attributes.get('hysteresis'))
self.assertEqual('range', state.attributes.get('type'))
assert state.state == 'on'
self.hass.states.set('sensor.test_monitored', 9)
self.hass.block_till_done()
state = self.hass.states.get('binary_sensor.threshold')
self.assertEqual('below', state.attributes.get('position'))
assert state.state == 'off'
self.hass.states.set('sensor.test_monitored', 21)
self.hass.block_till_done()
state = self.hass.states.get('binary_sensor.threshold')
self.assertEqual('above', state.attributes.get('position'))
assert state.state == 'off'
def test_sensor_in_range_with_hysteresis(self):
"""Test if source is within the range."""
config = {
'binary_sensor': {
'platform': 'threshold',
'lower': '10',
'upper': '20',
'hysteresis': '2',
'entity_id': 'sensor.test_monitored',
}
}
assert setup_component(self.hass, 'binary_sensor', config)
self.hass.states.set('sensor.test_monitored', 16,
{ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS})
self.hass.block_till_done()
state = self.hass.states.get('binary_sensor.threshold')
self.assertEqual('sensor.test_monitored',
state.attributes.get('entity_id'))
self.assertEqual(16, state.attributes.get('sensor_value'))
self.assertEqual('in_range', state.attributes.get('position'))
self.assertEqual(float(config['binary_sensor']['lower']),
state.attributes.get('lower'))
self.assertEqual(float(config['binary_sensor']['upper']),
state.attributes.get('upper'))
self.assertEqual(float(config['binary_sensor']['hysteresis']),
state.attributes.get('hysteresis'))
self.assertEqual('range', state.attributes.get('type'))
assert state.state == 'on'
self.hass.states.set('sensor.test_monitored', 8)
self.hass.block_till_done()
state = self.hass.states.get('binary_sensor.threshold')
self.assertEqual('in_range', state.attributes.get('position'))
assert state.state == 'on'
self.hass.states.set('sensor.test_monitored', 7)
self.hass.block_till_done()
state = self.hass.states.get('binary_sensor.threshold')
self.assertEqual('below', state.attributes.get('position'))
assert state.state == 'off'
self.hass.states.set('sensor.test_monitored', 12)
self.hass.block_till_done()
state = self.hass.states.get('binary_sensor.threshold')
self.assertEqual('below', state.attributes.get('position'))
assert state.state == 'off'
self.hass.states.set('sensor.test_monitored', 13)
self.hass.block_till_done()
state = self.hass.states.get('binary_sensor.threshold')
self.assertEqual('in_range', state.attributes.get('position'))
assert state.state == 'on'
self.hass.states.set('sensor.test_monitored', 22)
self.hass.block_till_done()
state = self.hass.states.get('binary_sensor.threshold')
self.assertEqual('in_range', state.attributes.get('position'))
assert state.state == 'on'
self.hass.states.set('sensor.test_monitored', 23)
self.hass.block_till_done()
state = self.hass.states.get('binary_sensor.threshold')
self.assertEqual('above', state.attributes.get('position'))
assert state.state == 'off'
self.hass.states.set('sensor.test_monitored', 18)
self.hass.block_till_done()
state = self.hass.states.get('binary_sensor.threshold')
self.assertEqual('above', state.attributes.get('position'))
assert state.state == 'off'
self.hass.states.set('sensor.test_monitored', 17)
self.hass.block_till_done()
state = self.hass.states.get('binary_sensor.threshold')
self.assertEqual('in_range', state.attributes.get('position'))
assert state.state == 'on'
def test_sensor_in_range_unknown_state(self):
"""Test if source is within the range."""
config = {
'binary_sensor': {
'platform': 'threshold',
'lower': '10',
'upper': '20',
'entity_id': 'sensor.test_monitored',
}
}
assert setup_component(self.hass, 'binary_sensor', config)
self.hass.states.set('sensor.test_monitored', 16,
{ATTR_UNIT_OF_MEASUREMENT: TEMP_CELSIUS})
self.hass.block_till_done()
state = self.hass.states.get('binary_sensor.threshold')
self.assertEqual('sensor.test_monitored',
state.attributes.get('entity_id'))
self.assertEqual(16, state.attributes.get('sensor_value'))
self.assertEqual('in_range', state.attributes.get('position'))
self.assertEqual(float(config['binary_sensor']['lower']),
state.attributes.get('lower'))
self.assertEqual(float(config['binary_sensor']['upper']),
state.attributes.get('upper'))
self.assertEqual(0.0, state.attributes.get('hysteresis'))
self.assertEqual('range', state.attributes.get('type'))
assert state.state == 'on'
self.hass.states.set('sensor.test_monitored', STATE_UNKNOWN)
self.hass.block_till_done()
state = self.hass.states.get('binary_sensor.threshold')
self.assertEqual('unknown', state.attributes.get('position'))
assert state.state == 'off'
def test_sensor_lower_zero_threshold(self):
"""Test if a lower threshold of zero is set."""
config = {
'binary_sensor': {
'platform': 'threshold',
'lower': '0',
'entity_id': 'sensor.test_monitored',
}
}
assert setup_component(self.hass, 'binary_sensor', config)
self.hass.states.set('sensor.test_monitored', 16)
self.hass.block_till_done()
state = self.hass.states.get('binary_sensor.threshold')
self.assertEqual('lower', state.attributes.get('type'))
self.assertEqual(float(config['binary_sensor']['lower']),
state.attributes.get('lower'))
assert state.state == 'off'
self.hass.states.set('sensor.test_monitored', -3)
self.hass.block_till_done()
state = self.hass.states.get('binary_sensor.threshold')
assert state.state == 'on'
def test_sensor_upper_zero_threshold(self):
"""Test if an upper threshold of zero is set."""
config = {
'binary_sensor': {
'platform': 'threshold',
'upper': '0',
'entity_id': 'sensor.test_monitored',
}
}
assert setup_component(self.hass, 'binary_sensor', config)
self.hass.states.set('sensor.test_monitored', -10)
self.hass.block_till_done()
state = self.hass.states.get('binary_sensor.threshold')
self.assertEqual('upper', state.attributes.get('type'))
self.assertEqual(float(config['binary_sensor']['upper']),
state.attributes.get('upper'))
assert state.state == 'off'
self.hass.states.set('sensor.test_monitored', 2)
self.hass.block_till_done()
state = self.hass.states.get('binary_sensor.threshold')
assert state.state == 'on'
| persandstrom/home-assistant | tests/components/binary_sensor/test_threshold.py | Python | apache-2.0 | 13,704 |
from __future__ import print_function
import csv
import os
import random
import re
import urllib3
from urllib.parse import parse_qs, urlparse, unquote, urlencode, quote
from locust import HttpLocust, TaskSet, task, TaskSequence, seq_task, between
# SAML IdP Host
HOST = "https://axman000.local:8443"
# Run weighting for each protocol
CAS_WEIGHT = 3
SAML_WEIGHT = 1
# Testing using locally generated cert, so turning off error messaging
IGNORE_SSL = True
# Search Patterns
EXECUTION_PAT = re.compile(r'<input type="hidden" name="execution" value="([^"]+)"')
EVENTID_PAT = re.compile(r'<input type="hidden" name="_eventId" value="([^"]+)"')
RELAY_STATE_PAT = re.compile(r'<input type="hidden" name="RelayState" value="([^"]+)"')
SAML_RESPONSE_PAT = re.compile(r'<input type="hidden" name="SAMLResponse" value="([^"]+)"')
SAML_SP_PAGE_PAT = re.compile(".*PHP Variables.*")
# Service Provider settings
CAS_SP = "https://castest.edu/"
# SAML Service Provider settings
SP = 'https://idptestbed'
SP_LOGIN = '/Shibboleth.sso/SAML2/POST'
SP_ENTITY_ID = 'https://sp.idptestbed/shibboleth'
SP_PROTECTED = '/php-shib-protected/'
sp = 'https://idptestbed'
sp_login = '/Shibboleth.sso/SAML2/POST'
sp_entity_id = 'https://sp.idptestbed/shibboleth'
sp_protected = '/php-shib-protected/'
class BasicTaskSet(TaskSet):
def on_start(self):
"""
LOCUST startup process
"""
if IGNORE_SSL:
urllib3.disable_warnings()
print("Start Locust Run!")
def on_stop(self):
"""
LOCUST shutdown process
"""
print("End of Locust Run")
@task(CAS_WEIGHT)
class CASTaskSet(TaskSequence):
@seq_task(1)
def login(self):
"""
Main script used to log in via CAS protocol
"""
print("CAS Login Process Starting ...")
client = self.client
cas_response = client.get("/cas/login",
params={'service': CAS_SP},
name="CAS 1. /cas/login - GET",
verify=False)
content = cas_response.text
found_exec = EXECUTION_PAT.search(content)
if found_exec is None:
print("CAS No Execution field found on login form!")
self.interrupt()
execution = found_exec.groups()[0]
found_eventid = EVENTID_PAT.search(content)
if found_eventid is None:
print("CAS No Event Id field found on login form!")
self.interrupt()
event_id = found_eventid.groups()[0]
creds = random.choice(self.locust.creds)
cas_user = creds[0]
cas_passwd = creds[1]
data = {
"username": cas_user,
"password": cas_passwd,
"execution": execution,
"_eventId": event_id,
"geolocation": "",
}
print("CAS Logging in User")
cas_login_response = client.post("/cas/login?service={}".format(CAS_SP),
data=data,
name="CAS 2. /cas/login - POST",
verify=False,
allow_redirects=False)
cas_response_url = cas_login_response.next.url
url_query = unquote(urlparse(cas_response_url).query)
cas_parsed_url = parse_qs(url_query)
if 'ticket' in cas_parsed_url:
cas_ticket = cas_parsed_url['ticket'][0]
else:
print("CAS No Ticket found in returned form!")
self.interrupt()
print("Validating service ticket ...")
ticket_response = client.get("/cas/serviceValidate",
params={'service': CAS_SP, 'ticket': cas_ticket},
name="CAS 3. /cas/serviceValidate - GET",
verify=False)
user_data = ticket_response.text
if "<cas:authenticationSuccess>" in user_data:
print("Succesful Run!")
else:
print("CAS No Event Id field found on login form!")
self.interrupt()
print("Validating service ticket ...")
@seq_task(2)
def logout(self):
"""
CAS User logout
"""
print("CAS Logged out of SSO.")
self.client.get("/cas/logout",
verify=False,
name="CAS 4. /cas/logout - GET")
self.interrupt()
@task(SAML_WEIGHT)
class SAMLTaskSet(TaskSequence):
@seq_task(1)
def login(self):
"""
Main script used to log in via SAML protocol
"""
client = self.client
print("SAML Go to SP and redirect to CAS")
sp_client = SP + SP_PROTECTED
client_response = client.get(sp_client,
verify=False,
name="SAML 1. {} - GET".format(SP_PROTECTED))
print("SAML Now at CAS Login page")
response_url = client_response.url
url_query = unquote(urlparse(response_url).query)
parsed_url = parse_qs(url_query)
print("SAML Grab data passed to CAS")
if 'RelayState' in parsed_url:
sp_relay_state = parsed_url['RelayState'][0]
else:
print("SAML No RelayState field found on login form!")
self.interrupt()
if 'SAMLRequest' in parsed_url:
sp_saml_request = parsed_url['SAMLRequest'][0]
else:
print("SAML No SAMLRequest field found on login form!")
self.interrupt()
content = client_response.text
found_exec = EXECUTION_PAT.search(content)
if found_exec is None:
print("SAML No Execution field found on login form!")
self.interrupt()
execution = found_exec.groups()[0]
found_eventid = EVENTID_PAT.search(content)
if found_eventid is None:
print("SAML No Event Id field found on login form!")
self.interrupt()
event_id = found_eventid.groups()[0]
print("SAML Get user login info")
creds = random.choice(self.locust.creds)
user = creds[0]
passwd = creds[1]
print("SAML Build Login parameters")
params = {
'SAMLRequest': sp_saml_request,
'RelayState': sp_relay_state
}
data = {
"username": user,
"password": passwd,
"execution": execution,
"_eventId": event_id,
"geolocation": '',
}
encoded_params = urlencode(params, quote_via=quote)
encoded_entityid = quote(SP_ENTITY_ID, safe='')
encoded_service = quote(
'{}/cas/idp/profile/SAML2/Callback?entityId={}&{}'.format(HOST,
encoded_entityid,
encoded_params), safe='')
print("SAML Submit User login credentials ...")
login_response = client.post("/cas/login?service=" + encoded_service,
data=data,
verify=False,
allow_redirects=True,
name="SAML 2. /cas/login?service= - POST")
login_content = login_response.text
found_relay = RELAY_STATE_PAT.search(login_content)
if found_relay is None:
print("SAML No Relay State field found!")
self.interrupt()
# Having issues with the relay coming back with hex code, adding this call to convert.
idp_relay_state = found_relay.groups()[0].replace(':', ':')
saml_response = SAML_RESPONSE_PAT.search(login_content)
if saml_response is None:
print("SAML No SAML Response field found!")
self.interrupt()
idp_saml_response = unquote(saml_response.groups()[0])
sp_url = SP + SP_LOGIN
data = {
"RelayState": idp_relay_state,
"SAMLResponse": idp_saml_response,
}
print("SAML Return call to SP with SAML info ...")
sp_response = client.post(sp_url,
data=data,
verify=False,
name="SAML 3. {} - POST".format(SP_LOGIN))
assert SAML_SP_PAGE_PAT.search(sp_response.text) is not None, "Expected title has not been found!"
print("SAML Successful Run!")
@seq_task(2)
def logout(self):
"""
SAML User logout
"""
print("SAML Logged out of SSO.")
self.client.get("/cas/logout",
verify=False,
name="SAML 4. /cas/logout - GET")
self.interrupt()
def load_creds():
"""
Load test user credentials.
"""
credpath = os.path.join(
os.path.dirname(__file__),
"credentials.csv")
creds = []
with open(credpath, "r") as f:
reader = csv.reader(f)
for row in reader:
creds.append((row[0], row[1]))
return creds
class BothLocust(HttpLocust):
task_set = BasicTaskSet
host = HOST
wait_time = between(2, 15)
creds = load_creds()
| leleuj/cas | etc/loadtests/locust/cas5/bothLocust.py | Python | apache-2.0 | 9,997 |
import io
import zipfile
from tornado import testing
from waterbutler.core import streams
from waterbutler.core.utils import AsyncIterator
from tests import utils
class TestZipHandler(utils.HandlerTestCase):
HOOK_PATH = 'waterbutler.server.api.v0.zip.ZipHandler._send_hook'
@testing.gen_test
def test_download_stream(self):
data = b'freddie brian john roger'
stream = streams.StringStream(data)
stream.content_type = 'application/octet-stream'
zipstream = streams.ZipStreamReader(AsyncIterator([('file.txt', stream)]))
self.mock_provider.zip = utils.MockCoroutine(return_value=zipstream)
resp = yield self.http_client.fetch(
self.get_url('/zip?provider=queenhub&path=/freddie.png'),
)
zip = zipfile.ZipFile(io.BytesIO(resp.body))
assert zip.testzip() is None
assert zip.open('file.txt').read() == data
| TomBaxter/waterbutler | tests/server/api/v0/test_zip.py | Python | apache-2.0 | 919 |
# Copyright 2013-2016 DataStax, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
import unittest2 as unittest
except ImportError:
import unittest
from cassandra import Unavailable, Timeout, ConsistencyLevel
import re
class ConsistencyExceptionTest(unittest.TestCase):
"""
Verify Cassandra Exception string representation
"""
def extract_consistency(self, msg):
"""
Given message that has 'consistency': 'value', extract consistency value as a string
:param msg: message with consistency value
:return: String representing consistency value
"""
match = re.search("'consistency':\s+'([\w\s]+)'", msg)
return match and match.group(1)
def test_timeout_consistency(self):
"""
Verify that Timeout exception object translates consistency from input value to correct output string
"""
consistency_str = self.extract_consistency(repr(Timeout("Timeout Message", consistency=None)))
self.assertEqual(consistency_str, 'Not Set')
for c in ConsistencyLevel.value_to_name.keys():
consistency_str = self.extract_consistency(repr(Timeout("Timeout Message", consistency=c)))
self.assertEqual(consistency_str, ConsistencyLevel.value_to_name[c])
def test_unavailable_consistency(self):
"""
Verify that Unavailable exception object translates consistency from input value to correct output string
"""
consistency_str = self.extract_consistency(repr(Unavailable("Unavailable Message", consistency=None)))
self.assertEqual(consistency_str, 'Not Set')
for c in ConsistencyLevel.value_to_name.keys():
consistency_str = self.extract_consistency(repr(Unavailable("Timeout Message", consistency=c)))
self.assertEqual(consistency_str, ConsistencyLevel.value_to_name[c])
| Richard-Mathie/cassandra_benchmark | vendor/github.com/datastax/python-driver/tests/unit/test_exception.py | Python | apache-2.0 | 2,375 |
import orjson
from django.core.exceptions import ValidationError
from django.db.utils import IntegrityError
from zerver.lib.actions import (
do_change_realm_domain,
do_change_user_role,
do_create_realm,
do_remove_realm_domain,
do_set_realm_property,
)
from zerver.lib.domains import validate_domain
from zerver.lib.email_validation import email_allowed_for_realm
from zerver.lib.test_classes import ZulipTestCase
from zerver.models import DomainNotAllowedForRealmError, RealmDomain, UserProfile, get_realm
class RealmDomainTest(ZulipTestCase):
def setUp(self) -> None:
realm = get_realm("zulip")
do_set_realm_property(realm, "emails_restricted_to_domains", True, acting_user=None)
def test_list_realm_domains(self) -> None:
self.login("iago")
realm = get_realm("zulip")
RealmDomain.objects.create(realm=realm, domain="acme.com", allow_subdomains=True)
result = self.client_get("/json/realm/domains")
self.assert_json_success(result)
received = result.json()["domains"]
expected = [
{"domain": "zulip.com", "allow_subdomains": False},
{"domain": "acme.com", "allow_subdomains": True},
]
self.assertEqual(received, expected)
def test_not_realm_admin(self) -> None:
self.login("hamlet")
result = self.client_post("/json/realm/domains")
self.assert_json_error(result, "Must be an organization administrator")
result = self.client_patch("/json/realm/domains/15")
self.assert_json_error(result, "Must be an organization administrator")
result = self.client_delete("/json/realm/domains/15")
self.assert_json_error(result, "Must be an organization administrator")
def test_create_realm_domain(self) -> None:
self.login("iago")
data = {
"domain": "",
"allow_subdomains": orjson.dumps(True).decode(),
}
result = self.client_post("/json/realm/domains", info=data)
self.assert_json_error(result, "Invalid domain: Domain can't be empty.")
data["domain"] = "acme.com"
result = self.client_post("/json/realm/domains", info=data)
self.assert_json_success(result)
realm = get_realm("zulip")
self.assertTrue(
RealmDomain.objects.filter(
realm=realm, domain="acme.com", allow_subdomains=True
).exists()
)
result = self.client_post("/json/realm/domains", info=data)
self.assert_json_error(
result, "The domain acme.com is already a part of your organization."
)
mit_user_profile = self.mit_user("sipbtest")
self.login_user(mit_user_profile)
do_change_user_role(
mit_user_profile, UserProfile.ROLE_REALM_ADMINISTRATOR, acting_user=None
)
result = self.client_post(
"/json/realm/domains", info=data, HTTP_HOST=mit_user_profile.realm.host
)
self.assert_json_success(result)
def test_patch_realm_domain(self) -> None:
self.login("iago")
realm = get_realm("zulip")
RealmDomain.objects.create(realm=realm, domain="acme.com", allow_subdomains=False)
data = {
"allow_subdomains": orjson.dumps(True).decode(),
}
url = "/json/realm/domains/acme.com"
result = self.client_patch(url, data)
self.assert_json_success(result)
self.assertTrue(
RealmDomain.objects.filter(
realm=realm, domain="acme.com", allow_subdomains=True
).exists()
)
url = "/json/realm/domains/non-existent.com"
result = self.client_patch(url, data)
self.assertEqual(result.status_code, 400)
self.assert_json_error(result, "No entry found for domain non-existent.com.")
def test_delete_realm_domain(self) -> None:
self.login("iago")
realm = get_realm("zulip")
RealmDomain.objects.create(realm=realm, domain="acme.com")
result = self.client_delete("/json/realm/domains/non-existent.com")
self.assertEqual(result.status_code, 400)
self.assert_json_error(result, "No entry found for domain non-existent.com.")
result = self.client_delete("/json/realm/domains/acme.com")
self.assert_json_success(result)
self.assertFalse(RealmDomain.objects.filter(domain="acme.com").exists())
self.assertTrue(realm.emails_restricted_to_domains)
def test_delete_all_realm_domains(self) -> None:
self.login("iago")
realm = get_realm("zulip")
query = RealmDomain.objects.filter(realm=realm)
self.assertTrue(realm.emails_restricted_to_domains)
for realm_domain in query.all():
do_remove_realm_domain(realm_domain, acting_user=None)
self.assertEqual(query.count(), 0)
# Deleting last realm_domain should set `emails_restricted_to_domains` to False.
# This should be tested on a fresh instance, since the cached objects
# would not be updated.
self.assertFalse(get_realm("zulip").emails_restricted_to_domains)
def test_email_allowed_for_realm(self) -> None:
realm1 = do_create_realm("testrealm1", "Test Realm 1", emails_restricted_to_domains=True)
realm2 = do_create_realm("testrealm2", "Test Realm 2", emails_restricted_to_domains=True)
realm_domain = RealmDomain.objects.create(
realm=realm1, domain="test1.com", allow_subdomains=False
)
RealmDomain.objects.create(realm=realm2, domain="test2.test1.com", allow_subdomains=True)
email_allowed_for_realm("[email protected]", realm1)
with self.assertRaises(DomainNotAllowedForRealmError):
email_allowed_for_realm("[email protected]", realm1)
email_allowed_for_realm("[email protected]", realm2)
email_allowed_for_realm("[email protected]", realm2)
with self.assertRaises(DomainNotAllowedForRealmError):
email_allowed_for_realm("[email protected]", realm2)
do_change_realm_domain(realm_domain, True)
email_allowed_for_realm("[email protected]", realm1)
email_allowed_for_realm("[email protected]", realm1)
with self.assertRaises(DomainNotAllowedForRealmError):
email_allowed_for_realm("[email protected]", realm1)
def test_realm_realm_domains_uniqueness(self) -> None:
realm = get_realm("zulip")
with self.assertRaises(IntegrityError):
RealmDomain.objects.create(realm=realm, domain="zulip.com", allow_subdomains=True)
def test_validate_domain(self) -> None:
invalid_domains = [
"",
"test",
"t.",
"test.",
".com",
"-test",
"test...com",
"test-",
"test_domain.com",
"test.-domain.com",
"a" * 255 + ".com",
]
for domain in invalid_domains:
with self.assertRaises(ValidationError):
validate_domain(domain)
valid_domains = ["acme.com", "x-x.y.3.z"]
for domain in valid_domains:
validate_domain(domain)
| andersk/zulip | zerver/tests/test_realm_domains.py | Python | apache-2.0 | 7,224 |
# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import absolute_import
import os
import sys
import tempfile
import warnings
import numpy
from numpy import testing as npt
import tables
from tables import Atom, ClosedNodeError, NoSuchNodeError
from tables.utils import byteorders
from tables.tests import common
from tables.tests.common import allequal
from tables.tests.common import unittest, test_filename
from tables.tests.common import PyTablesTestCase as TestCase
from six.moves import range
warnings.resetwarnings()
class BasicTestCase(TestCase):
"""Basic test for all the supported typecodes present in numpy.
All of them are included on pytables.
"""
endiancheck = False
def write_read(self, testarray):
a = testarray
if common.verbose:
print('\n', '-=' * 30)
print("Running test for array with type '%s'" % a.dtype.type,
end=' ')
print("for class check:", self.title)
# Create an instance of HDF5 file
filename = tempfile.mktemp(".h5")
try:
with tables.open_file(filename, mode="w") as fileh:
root = fileh.root
# Create the array under root and name 'somearray'
if self.endiancheck and a.dtype.kind != "S":
b = a.byteswap()
b.dtype = a.dtype.newbyteorder()
a = b
fileh.create_array(root, 'somearray', a, "Some array")
# Re-open the file in read-only mode
with tables.open_file(filename, mode="r") as fileh:
root = fileh.root
# Read the saved array
b = root.somearray.read()
# Compare them. They should be equal.
if common.verbose and not allequal(a, b):
print("Write and read arrays differ!")
# print("Array written:", a)
print("Array written shape:", a.shape)
print("Array written itemsize:", a.itemsize)
print("Array written type:", a.dtype.type)
# print("Array read:", b)
print("Array read shape:", b.shape)
print("Array read itemsize:", b.itemsize)
print("Array read type:", b.dtype.type)
if a.dtype.kind != "S":
print("Array written byteorder:", a.dtype.byteorder)
print("Array read byteorder:", b.dtype.byteorder)
# Check strictly the array equality
self.assertEqual(a.shape, b.shape)
self.assertEqual(a.shape, root.somearray.shape)
if a.dtype.kind == "S":
self.assertEqual(root.somearray.atom.type, "string")
else:
self.assertEqual(a.dtype.type, b.dtype.type)
self.assertEqual(a.dtype.type,
root.somearray.atom.dtype.type)
abo = byteorders[a.dtype.byteorder]
bbo = byteorders[b.dtype.byteorder]
if abo != "irrelevant":
self.assertEqual(abo, root.somearray.byteorder)
self.assertEqual(bbo, sys.byteorder)
if self.endiancheck:
self.assertNotEqual(bbo, abo)
obj = root.somearray
self.assertEqual(obj.flavor, 'numpy')
self.assertEqual(obj.shape, a.shape)
self.assertEqual(obj.ndim, a.ndim)
self.assertEqual(obj.chunkshape, None)
if a.shape:
nrows = a.shape[0]
else:
# scalar
nrows = 1
self.assertEqual(obj.nrows, nrows)
self.assertTrue(allequal(a, b))
finally:
# Then, delete the file
os.remove(filename)
def write_read_out_arg(self, testarray):
a = testarray
if common.verbose:
print('\n', '-=' * 30)
print("Running test for array with type '%s'" % a.dtype.type,
end=' ')
print("for class check:", self.title)
# Create an instance of HDF5 file
filename = tempfile.mktemp(".h5")
try:
with tables.open_file(filename, mode="w") as fileh:
root = fileh.root
# Create the array under root and name 'somearray'
if self.endiancheck and a.dtype.kind != "S":
b = a.byteswap()
b.dtype = a.dtype.newbyteorder()
a = b
fileh.create_array(root, 'somearray', a, "Some array")
# Re-open the file in read-only mode
with tables.open_file(filename, mode="r") as fileh:
root = fileh.root
# Read the saved array
b = numpy.empty_like(a, dtype=a.dtype)
root.somearray.read(out=b)
# Check strictly the array equality
self.assertEqual(a.shape, b.shape)
self.assertEqual(a.shape, root.somearray.shape)
if a.dtype.kind == "S":
self.assertEqual(root.somearray.atom.type, "string")
else:
self.assertEqual(a.dtype.type, b.dtype.type)
self.assertEqual(a.dtype.type,
root.somearray.atom.dtype.type)
abo = byteorders[a.dtype.byteorder]
bbo = byteorders[b.dtype.byteorder]
if abo != "irrelevant":
self.assertEqual(abo, root.somearray.byteorder)
self.assertEqual(abo, bbo)
if self.endiancheck:
self.assertNotEqual(bbo, sys.byteorder)
self.assertTrue(allequal(a, b))
finally:
# Then, delete the file
os.remove(filename)
def write_read_atom_shape_args(self, testarray):
a = testarray
atom = Atom.from_dtype(a.dtype)
shape = a.shape
byteorder = None
if common.verbose:
print('\n', '-=' * 30)
print("Running test for array with type '%s'" % a.dtype.type,
end=' ')
print("for class check:", self.title)
# Create an instance of HDF5 file
filename = tempfile.mktemp(".h5")
try:
with tables.open_file(filename, mode="w") as fileh:
root = fileh.root
# Create the array under root and name 'somearray'
if self.endiancheck and a.dtype.kind != "S":
b = a.byteswap()
b.dtype = a.dtype.newbyteorder()
if b.dtype.byteorder in ('>', '<'):
byteorder = byteorders[b.dtype.byteorder]
a = b
ptarr = fileh.create_array(root, 'somearray',
atom=atom, shape=shape,
title="Some array",
# specify the byteorder explicitly
# since there is no way to deduce
# it in this case
byteorder=byteorder)
self.assertEqual(shape, ptarr.shape)
self.assertEqual(atom, ptarr.atom)
ptarr[...] = a
# Re-open the file in read-only mode
with tables.open_file(filename, mode="r") as fileh:
root = fileh.root
# Read the saved array
b = root.somearray.read()
# Compare them. They should be equal.
if common.verbose and not allequal(a, b):
print("Write and read arrays differ!")
# print("Array written:", a)
print("Array written shape:", a.shape)
print("Array written itemsize:", a.itemsize)
print("Array written type:", a.dtype.type)
# print("Array read:", b)
print("Array read shape:", b.shape)
print("Array read itemsize:", b.itemsize)
print("Array read type:", b.dtype.type)
if a.dtype.kind != "S":
print("Array written byteorder:", a.dtype.byteorder)
print("Array read byteorder:", b.dtype.byteorder)
# Check strictly the array equality
self.assertEqual(a.shape, b.shape)
self.assertEqual(a.shape, root.somearray.shape)
if a.dtype.kind == "S":
self.assertEqual(root.somearray.atom.type, "string")
else:
self.assertEqual(a.dtype.type, b.dtype.type)
self.assertEqual(a.dtype.type,
root.somearray.atom.dtype.type)
abo = byteorders[a.dtype.byteorder]
bbo = byteorders[b.dtype.byteorder]
if abo != "irrelevant":
self.assertEqual(abo, root.somearray.byteorder)
self.assertEqual(bbo, sys.byteorder)
if self.endiancheck:
self.assertNotEqual(bbo, abo)
obj = root.somearray
self.assertEqual(obj.flavor, 'numpy')
self.assertEqual(obj.shape, a.shape)
self.assertEqual(obj.ndim, a.ndim)
self.assertEqual(obj.chunkshape, None)
if a.shape:
nrows = a.shape[0]
else:
# scalar
nrows = 1
self.assertEqual(obj.nrows, nrows)
self.assertTrue(allequal(a, b))
finally:
# Then, delete the file
os.remove(filename)
def setup00_char(self):
"""Data integrity during recovery (character objects)"""
if not isinstance(self.tupleChar, numpy.ndarray):
a = numpy.array(self.tupleChar, dtype="S")
else:
a = self.tupleChar
return a
def test00_char(self):
a = self.setup00_char()
self.write_read(a)
def test00_char_out_arg(self):
a = self.setup00_char()
self.write_read_out_arg(a)
def test00_char_atom_shape_args(self):
a = self.setup00_char()
self.write_read_atom_shape_args(a)
def test00b_char(self):
"""Data integrity during recovery (string objects)"""
a = self.tupleChar
filename = tempfile.mktemp(".h5")
try:
# Create an instance of HDF5 file
with tables.open_file(filename, mode="w") as fileh:
fileh.create_array(fileh.root, 'somearray', a, "Some array")
# Re-open the file in read-only mode
with tables.open_file(filename, mode="r") as fileh:
# Read the saved array
b = fileh.root.somearray.read()
if isinstance(a, bytes):
self.assertEqual(type(b), bytes)
self.assertEqual(a, b)
else:
# If a is not a python string, then it should be a list
# or ndarray
self.assertTrue(type(b) in [list, numpy.ndarray])
finally:
# Then, delete the file
os.remove(filename)
def test00b_char_out_arg(self):
"""Data integrity during recovery (string objects)"""
a = self.tupleChar
filename = tempfile.mktemp(".h5")
try:
# Create an instance of HDF5 file
with tables.open_file(filename, mode="w") as fileh:
fileh.create_array(fileh.root, 'somearray', a, "Some array")
# Re-open the file in read-only mode
with tables.open_file(filename, mode="r") as fileh:
# Read the saved array
b = numpy.empty_like(a)
if fileh.root.somearray.flavor != 'numpy':
self.assertRaises(TypeError,
lambda: fileh.root.somearray.read(out=b))
else:
fileh.root.somearray.read(out=b)
self.assertTrue(type(b), numpy.ndarray)
finally:
# Then, delete the file
os.remove(filename)
def test00b_char_atom_shape_args(self):
"""Data integrity during recovery (string objects)"""
a = self.tupleChar
filename = tempfile.mktemp(".h5")
try:
# Create an instance of HDF5 file
with tables.open_file(filename, mode="w") as fileh:
nparr = numpy.asarray(a)
atom = Atom.from_dtype(nparr.dtype)
shape = nparr.shape
if nparr.dtype.byteorder in ('>', '<'):
byteorder = byteorders[nparr.dtype.byteorder]
else:
byteorder = None
ptarr = fileh.create_array(fileh.root, 'somearray',
atom=atom, shape=shape,
byteorder=byteorder,
title="Some array")
self.assertEqual(shape, ptarr.shape)
self.assertEqual(atom, ptarr.atom)
ptarr[...] = a
# Re-open the file in read-only mode
with tables.open_file(filename, mode="r") as fileh:
# Read the saved array
b = numpy.empty_like(a)
if fileh.root.somearray.flavor != 'numpy':
self.assertRaises(TypeError,
lambda: fileh.root.somearray.read(out=b))
else:
fileh.root.somearray.read(out=b)
self.assertTrue(type(b), numpy.ndarray)
finally:
# Then, delete the file
os.remove(filename)
def setup01_char_nc(self):
"""Data integrity during recovery (non-contiguous character objects)"""
if not isinstance(self.tupleChar, numpy.ndarray):
a = numpy.array(self.tupleChar, dtype="S")
else:
a = self.tupleChar
if a.ndim == 0:
b = a.copy()
else:
b = a[::2]
# Ensure that this numpy string is non-contiguous
if len(b) > 1:
self.assertEqual(b.flags.contiguous, False)
return b
def test01_char_nc(self):
b = self.setup01_char_nc()
self.write_read(b)
def test01_char_nc_out_arg(self):
b = self.setup01_char_nc()
self.write_read_out_arg(b)
def test01_char_nc_atom_shape_args(self):
b = self.setup01_char_nc()
self.write_read_atom_shape_args(b)
def test02_types(self):
"""Data integrity during recovery (numerical types)"""
typecodes = ['int8', 'int16', 'int32', 'int64',
'uint8', 'uint16', 'uint32', 'uint64',
'float32', 'float64',
'complex64', 'complex128']
for name in ('float16', 'float96', 'float128',
'complex192', 'complex256'):
atomname = name.capitalize() + 'Atom'
if hasattr(tables, atomname):
typecodes.append(name)
for typecode in typecodes:
a = numpy.array(self.tupleInt, typecode)
self.write_read(a)
b = numpy.array(self.tupleInt, typecode)
self.write_read_out_arg(b)
c = numpy.array(self.tupleInt, typecode)
self.write_read_atom_shape_args(c)
def test03_types_nc(self):
"""Data integrity during recovery (non-contiguous numerical types)"""
typecodes = ['int8', 'int16', 'int32', 'int64',
'uint8', 'uint16', 'uint32', 'uint64',
'float32', 'float64',
'complex64', 'complex128', ]
for name in ('float16', 'float96', 'float128',
'complex192', 'complex256'):
atomname = name.capitalize() + 'Atom'
if hasattr(tables, atomname):
typecodes.append(name)
for typecode in typecodes:
a = numpy.array(self.tupleInt, typecode)
if a.ndim == 0:
b1 = a.copy()
b2 = a.copy()
b3 = a.copy()
else:
b1 = a[::2]
b2 = a[::2]
b3 = a[::2]
# Ensure that this array is non-contiguous
if len(b1) > 1:
self.assertEqual(b1.flags.contiguous, False)
if len(b2) > 1:
self.assertEqual(b2.flags.contiguous, False)
if len(b3) > 1:
self.assertEqual(b3.flags.contiguous, False)
self.write_read(b1)
self.write_read_out_arg(b2)
self.write_read_atom_shape_args(b3)
class Basic0DOneTestCase(BasicTestCase):
# Scalar case
title = "Rank-0 case 1"
tupleInt = 3
tupleChar = b"3"
endiancheck = True
class Basic0DTwoTestCase(BasicTestCase):
# Scalar case
title = "Rank-0 case 2"
tupleInt = 33
tupleChar = b"33"
endiancheck = True
class Basic1DZeroTestCase(BasicTestCase):
# This test case is not supported by PyTables (HDF5 limitations)
# 1D case
title = "Rank-1 case 0"
tupleInt = ()
tupleChar = ()
endiancheck = False
class Basic1DOneTestCase(BasicTestCase):
# 1D case
title = "Rank-1 case 1"
tupleInt = (3,)
tupleChar = (b"a",)
endiancheck = True
class Basic1DTwoTestCase(BasicTestCase):
# 1D case
title = "Rank-1 case 2"
tupleInt = (3, 4)
tupleChar = (b"aaa",)
endiancheck = True
class Basic1DThreeTestCase(BasicTestCase):
# 1D case
title = "Rank-1 case 3"
tupleInt = (3, 4, 5)
tupleChar = (b"aaa", b"bbb",)
endiancheck = True
class Basic2DOneTestCase(BasicTestCase):
# 2D case
title = "Rank-2 case 1"
tupleInt = numpy.array(numpy.arange((4)**2))
tupleInt.shape = (4,)*2
tupleChar = numpy.array(["abc"]*3**2, dtype="S3")
tupleChar.shape = (3,)*2
endiancheck = True
class Basic2DTwoTestCase(BasicTestCase):
# 2D case, with a multidimensional dtype
title = "Rank-2 case 2"
tupleInt = numpy.array(numpy.arange((4)), dtype=(numpy.int_, (4,)))
tupleChar = numpy.array(["abc"]*3, dtype=("S3", (3,)))
endiancheck = True
class Basic10DTestCase(BasicTestCase):
# 10D case
title = "Rank-10 test"
tupleInt = numpy.array(numpy.arange((2)**10))
tupleInt.shape = (2,)*10
tupleChar = numpy.array(
["abc"]*2**10, dtype="S3")
tupleChar.shape = (2,)*10
endiancheck = True
class Basic32DTestCase(BasicTestCase):
# 32D case (maximum)
title = "Rank-32 test"
tupleInt = numpy.array((32,))
tupleInt.shape = (1,)*32
tupleChar = numpy.array(["121"], dtype="S3")
tupleChar.shape = (1,)*32
class ReadOutArgumentTests(common.TempFileMixin, TestCase):
def setUp(self):
super(ReadOutArgumentTests, self).setUp()
self.size = 1000
def create_array(self):
array = numpy.arange(self.size, dtype='f8')
disk_array = self.h5file.create_array('/', 'array', array)
return array, disk_array
def test_read_entire_array(self):
array, disk_array = self.create_array()
out_buffer = numpy.empty((self.size, ), 'f8')
disk_array.read(out=out_buffer)
numpy.testing.assert_equal(out_buffer, array)
def test_read_contiguous_slice1(self):
array, disk_array = self.create_array()
out_buffer = numpy.arange(self.size, dtype='f8')
out_buffer = numpy.random.permutation(out_buffer)
out_buffer_orig = out_buffer.copy()
start = self.size // 2
disk_array.read(start=start, stop=self.size, out=out_buffer[start:])
numpy.testing.assert_equal(out_buffer[start:], array[start:])
numpy.testing.assert_equal(out_buffer[:start], out_buffer_orig[:start])
def test_read_contiguous_slice2(self):
array, disk_array = self.create_array()
out_buffer = numpy.arange(self.size, dtype='f8')
out_buffer = numpy.random.permutation(out_buffer)
out_buffer_orig = out_buffer.copy()
start = self.size // 4
stop = self.size - start
disk_array.read(start=start, stop=stop, out=out_buffer[start:stop])
numpy.testing.assert_equal(out_buffer[start:stop], array[start:stop])
numpy.testing.assert_equal(out_buffer[:start], out_buffer_orig[:start])
numpy.testing.assert_equal(out_buffer[stop:], out_buffer_orig[stop:])
def test_read_non_contiguous_slice_contiguous_buffer(self):
array, disk_array = self.create_array()
out_buffer = numpy.empty((self.size // 2, ), dtype='f8')
disk_array.read(start=0, stop=self.size, step=2, out=out_buffer)
numpy.testing.assert_equal(out_buffer, array[0:self.size:2])
def test_read_non_contiguous_buffer(self):
array, disk_array = self.create_array()
out_buffer = numpy.empty((self.size, ), 'f8')
out_buffer_slice = out_buffer[0:self.size:2]
# once Python 2.6 support is dropped, this could change
# to assertRaisesRegexp to check exception type and message at once
self.assertRaises(ValueError, disk_array.read, 0, self.size, 2,
out_buffer_slice)
try:
disk_array.read(0, self.size, 2, out_buffer_slice)
except ValueError as exc:
self.assertEqual('output array not C contiguous', str(exc))
def test_buffer_too_small(self):
array, disk_array = self.create_array()
out_buffer = numpy.empty((self.size // 2, ), 'f8')
self.assertRaises(ValueError, disk_array.read, 0, self.size, 1,
out_buffer)
try:
disk_array.read(0, self.size, 1, out_buffer)
except ValueError as exc:
self.assertTrue('output array size invalid, got' in str(exc))
def test_buffer_too_large(self):
array, disk_array = self.create_array()
out_buffer = numpy.empty((self.size + 1, ), 'f8')
self.assertRaises(ValueError, disk_array.read, 0, self.size, 1,
out_buffer)
try:
disk_array.read(0, self.size, 1, out_buffer)
except ValueError as exc:
self.assertTrue('output array size invalid, got' in str(exc))
class SizeOnDiskInMemoryPropertyTestCase(common.TempFileMixin, TestCase):
def setUp(self):
super(SizeOnDiskInMemoryPropertyTestCase, self).setUp()
self.array_size = (10, 10)
self.array = self.h5file.create_array(
'/', 'somearray', numpy.zeros(self.array_size, 'i4'))
def test_all_zeros(self):
self.assertEqual(self.array.size_on_disk, 10 * 10 * 4)
self.assertEqual(self.array.size_in_memory, 10 * 10 * 4)
class UnalignedAndComplexTestCase(common.TempFileMixin, TestCase):
"""Basic test for all the supported typecodes present in numpy.
Most of them are included on PyTables.
"""
def setUp(self):
super(UnalignedAndComplexTestCase, self).setUp()
self.root = self.h5file.root
def write_read(self, testArray):
if common.verbose:
print('\n', '-=' * 30)
print("\nRunning test for array with type '%s'" %
testArray.dtype.type)
# Create the array under root and name 'somearray'
a = testArray
if self.endiancheck:
byteorder = {"little": "big", "big": "little"}[sys.byteorder]
else:
byteorder = sys.byteorder
self.h5file.create_array(self.root, 'somearray', a, "Some array",
byteorder=byteorder)
if self.reopen:
self._reopen()
self.root = self.h5file.root
# Read the saved array
b = self.root.somearray.read()
# Get an array to be compared in the correct byteorder
c = a.newbyteorder(byteorder)
# Compare them. They should be equal.
if not allequal(c, b) and common.verbose:
print("Write and read arrays differ!")
print("Array written:", a)
print("Array written shape:", a.shape)
print("Array written itemsize:", a.itemsize)
print("Array written type:", a.dtype.type)
print("Array read:", b)
print("Array read shape:", b.shape)
print("Array read itemsize:", b.itemsize)
print("Array read type:", b.dtype.type)
# Check strictly the array equality
self.assertEqual(a.shape, b.shape)
self.assertEqual(a.shape, self.root.somearray.shape)
if a.dtype.byteorder != "|":
self.assertEqual(a.dtype, b.dtype)
self.assertEqual(a.dtype, self.root.somearray.atom.dtype)
self.assertEqual(byteorders[b.dtype.byteorder], sys.byteorder)
self.assertEqual(self.root.somearray.byteorder, byteorder)
self.assertTrue(allequal(c, b))
def test01_signedShort_unaligned(self):
"""Checking an unaligned signed short integer array"""
r = numpy.rec.array(b'a'*200, formats='i1,f4,i2', shape=10)
a = r["f2"]
# Ensure that this array is non-aligned
self.assertEqual(a.flags.aligned, False)
self.assertEqual(a.dtype.type, numpy.int16)
self.write_read(a)
def test02_float_unaligned(self):
"""Checking an unaligned single precision array"""
r = numpy.rec.array(b'a'*200, formats='i1,f4,i2', shape=10)
a = r["f1"]
# Ensure that this array is non-aligned
self.assertEqual(a.flags.aligned, 0)
self.assertEqual(a.dtype.type, numpy.float32)
self.write_read(a)
def test03_byte_offset(self):
"""Checking an offsetted byte array"""
r = numpy.arange(100, dtype=numpy.int8)
r.shape = (10, 10)
a = r[2]
self.write_read(a)
def test04_short_offset(self):
"""Checking an offsetted unsigned short int precision array"""
r = numpy.arange(100, dtype=numpy.uint32)
r.shape = (10, 10)
a = r[2]
self.write_read(a)
def test05_int_offset(self):
"""Checking an offsetted integer array"""
r = numpy.arange(100, dtype=numpy.int32)
r.shape = (10, 10)
a = r[2]
self.write_read(a)
def test06_longlongint_offset(self):
"""Checking an offsetted long long integer array"""
r = numpy.arange(100, dtype=numpy.int64)
r.shape = (10, 10)
a = r[2]
self.write_read(a)
def test07_float_offset(self):
"""Checking an offsetted single precision array"""
r = numpy.arange(100, dtype=numpy.float32)
r.shape = (10, 10)
a = r[2]
self.write_read(a)
def test08_double_offset(self):
"""Checking an offsetted double precision array"""
r = numpy.arange(100, dtype=numpy.float64)
r.shape = (10, 10)
a = r[2]
self.write_read(a)
def test09_float_offset_unaligned(self):
"""Checking an unaligned and offsetted single precision array"""
r = numpy.rec.array(b'a'*200, formats='i1,3f4,i2', shape=10)
a = r["f1"][3]
# Ensure that this array is non-aligned
self.assertEqual(a.flags.aligned, False)
self.assertEqual(a.dtype.type, numpy.float32)
self.write_read(a)
def test10_double_offset_unaligned(self):
"""Checking an unaligned and offsetted double precision array"""
r = numpy.rec.array(b'a'*400, formats='i1,3f8,i2', shape=10)
a = r["f1"][3]
# Ensure that this array is non-aligned
self.assertEqual(a.flags.aligned, False)
self.assertEqual(a.dtype.type, numpy.float64)
self.write_read(a)
def test11_int_byteorder(self):
"""Checking setting data with different byteorder in a range
(integer)"""
# Save an array with the reversed byteorder on it
a = numpy.arange(25, dtype=numpy.int32).reshape(5, 5)
a = a.byteswap()
a = a.newbyteorder()
array = self.h5file.create_array(
self.h5file.root, 'array', a, "byteorder (int)")
# Read a subarray (got an array with the machine byteorder)
b = array[2:4, 3:5]
b = b.byteswap()
b = b.newbyteorder()
# Set this subarray back to the array
array[2:4, 3:5] = b
b = b.byteswap()
b = b.newbyteorder()
# Set this subarray back to the array
array[2:4, 3:5] = b
# Check that the array is back in the correct byteorder
c = array[...]
if common.verbose:
print("byteorder of array on disk-->", array.byteorder)
print("byteorder of subarray-->", b.dtype.byteorder)
print("subarray-->", b)
print("retrieved array-->", c)
self.assertTrue(allequal(a, c))
def test12_float_byteorder(self):
"""Checking setting data with different byteorder in a range (float)"""
# Save an array with the reversed byteorder on it
a = numpy.arange(25, dtype=numpy.float64).reshape(5, 5)
a = a.byteswap()
a = a.newbyteorder()
array = self.h5file.create_array(
self.h5file.root, 'array', a, "byteorder (float)")
# Read a subarray (got an array with the machine byteorder)
b = array[2:4, 3:5]
b = b.byteswap()
b = b.newbyteorder()
# Set this subarray back to the array
array[2:4, 3:5] = b
b = b.byteswap()
b = b.newbyteorder()
# Set this subarray back to the array
array[2:4, 3:5] = b
# Check that the array is back in the correct byteorder
c = array[...]
if common.verbose:
print("byteorder of array on disk-->", array.byteorder)
print("byteorder of subarray-->", b.dtype.byteorder)
print("subarray-->", b)
print("retrieved array-->", c)
self.assertTrue(allequal(a, c))
class ComplexNotReopenNotEndianTestCase(UnalignedAndComplexTestCase):
endiancheck = False
reopen = False
class ComplexReopenNotEndianTestCase(UnalignedAndComplexTestCase):
endiancheck = False
reopen = True
class ComplexNotReopenEndianTestCase(UnalignedAndComplexTestCase):
endiancheck = True
reopen = False
class ComplexReopenEndianTestCase(UnalignedAndComplexTestCase):
endiancheck = True
reopen = True
class GroupsArrayTestCase(common.TempFileMixin, TestCase):
"""This test class checks combinations of arrays with groups."""
def test00_iterativeGroups(self):
"""Checking combinations of arrays with groups."""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test00_iterativeGroups..." %
self.__class__.__name__)
# Get the root group
group = self.h5file.root
# Set the type codes to test
# The typecodes below does expose an ambiguity that is reported in:
# http://projects.scipy.org/scipy/numpy/ticket/283 and
# http://projects.scipy.org/scipy/numpy/ticket/290
typecodes = ['b', 'B', 'h', 'H', 'i', 'I', 'l', 'L', 'q', 'f', 'd',
'F', 'D']
if hasattr(tables, 'Float16Atom'):
typecodes.append('e')
if hasattr(tables, 'Float96Atom') or hasattr(tables, 'Float128Atom'):
typecodes.append('g')
if (hasattr(tables, 'Complex192Atom') or
hasattr(tables, 'Complex256Atom')):
typecodes.append('G')
for i, typecode in enumerate(typecodes):
a = numpy.ones((3,), typecode)
dsetname = 'array_' + typecode
if common.verbose:
print("Creating dataset:", group._g_join(dsetname))
self.h5file.create_array(group, dsetname, a, "Large array")
group = self.h5file.create_group(group, 'group' + str(i))
# Reopen the file
self._reopen()
# Get the root group
group = self.h5file.root
# Get the metadata on the previosly saved arrays
for i in range(len(typecodes)):
# Create an array for later comparison
a = numpy.ones((3,), typecodes[i])
# Get the dset object hanging from group
dset = getattr(group, 'array_' + typecodes[i])
# Get the actual array
b = dset.read()
if common.verbose:
print("Info from dataset:", dset._v_pathname)
print(" shape ==>", dset.shape, end=' ')
print(" type ==> %s" % dset.atom.dtype)
print("Array b read from file. Shape: ==>", b.shape, end=' ')
print(". Type ==> %s" % b.dtype)
self.assertEqual(a.shape, b.shape)
self.assertEqual(a.dtype, b.dtype)
self.assertTrue(allequal(a, b))
# Iterate over the next group
group = getattr(group, 'group' + str(i))
def test01_largeRankArrays(self):
"""Checking creation of large rank arrays (0 < rank <= 32)
It also uses arrays ranks which ranges until maxrank.
"""
# maximum level of recursivity (deepest group level) achieved:
# maxrank = 32 (for a effective maximum rank of 32)
# This limit is due to HDF5 library limitations.
minrank = 1
maxrank = 32
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test01_largeRankArrays..." %
self.__class__.__name__)
print("Maximum rank for tested arrays:", maxrank)
group = self.h5file.root
if common.verbose:
print("Rank array writing progress: ", end=' ')
for rank in range(minrank, maxrank + 1):
# Create an array of integers, with incrementally bigger ranges
a = numpy.ones((1,) * rank, numpy.int32)
if common.verbose:
print("%3d," % (rank), end=' ')
self.h5file.create_array(group, "array", a, "Rank: %s" % rank)
group = self.h5file.create_group(group, 'group' + str(rank))
# Reopen the file
self._reopen()
group = self.h5file.root
if common.verbose:
print()
print("Rank array reading progress: ")
# Get the metadata on the previosly saved arrays
for rank in range(minrank, maxrank + 1):
# Create an array for later comparison
a = numpy.ones((1,) * rank, numpy.int32)
# Get the actual array
b = group.array.read()
if common.verbose:
print("%3d," % (rank), end=' ')
if common.verbose and not allequal(a, b):
print("Info from dataset:", group.array._v_pathname)
print(" Shape: ==>", group.array.shape, end=' ')
print(" typecode ==> %c" % group.array.typecode)
print("Array b read from file. Shape: ==>", b.shape, end=' ')
print(". Type ==> %c" % b.dtype)
self.assertEqual(a.shape, b.shape)
self.assertEqual(a.dtype, b.dtype)
self.assertTrue(allequal(a, b))
# print(self.h5file)
# Iterate over the next group
group = self.h5file.get_node(group, 'group' + str(rank))
if common.verbose:
print() # This flush the stdout buffer
class CopyTestCase(common.TempFileMixin, TestCase):
def test01_copy(self):
"""Checking Array.copy() method."""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test01_copy..." % self.__class__.__name__)
# Create an Array
arr = numpy.array([[456, 2], [3, 457]], dtype='int16')
array1 = self.h5file.create_array(
self.h5file.root, 'array1', arr, "title array1")
# Copy to another Array
array2 = array1.copy('/', 'array2')
if self.close:
if common.verbose:
print("(closing file version)")
self._reopen()
array1 = self.h5file.root.array1
array2 = self.h5file.root.array2
if common.verbose:
print("array1-->", array1.read())
print("array2-->", array2.read())
# print("dirs-->", dir(array1), dir(array2))
print("attrs array1-->", repr(array1.attrs))
print("attrs array2-->", repr(array2.attrs))
# Check that all the elements are equal
self.assertTrue(allequal(array1.read(), array2.read()))
# Assert other properties in array
self.assertEqual(array1.nrows, array2.nrows)
self.assertEqual(array1.flavor, array2.flavor)
self.assertEqual(array1.atom.dtype, array2.atom.dtype)
self.assertEqual(array1.title, array2.title)
def test02_copy(self):
"""Checking Array.copy() method (where specified)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test02_copy..." % self.__class__.__name__)
# Create an Array
arr = numpy.array([[456, 2], [3, 457]], dtype='int16')
array1 = self.h5file.create_array(
self.h5file.root, 'array1', arr, "title array1")
# Copy to another Array
group1 = self.h5file.create_group("/", "group1")
array2 = array1.copy(group1, 'array2')
if self.close:
if common.verbose:
print("(closing file version)")
self._reopen()
array1 = self.h5file.root.array1
array2 = self.h5file.root.group1.array2
if common.verbose:
print("array1-->", array1.read())
print("array2-->", array2.read())
# print("dirs-->", dir(array1), dir(array2))
print("attrs array1-->", repr(array1.attrs))
print("attrs array2-->", repr(array2.attrs))
# Check that all the elements are equal
self.assertTrue(allequal(array1.read(), array2.read()))
# Assert other properties in array
self.assertEqual(array1.nrows, array2.nrows)
self.assertEqual(array1.flavor, array2.flavor)
self.assertEqual(array1.atom.dtype, array2.atom.dtype)
self.assertEqual(array1.title, array2.title)
def test03_copy(self):
"""Checking Array.copy() method (checking title copying)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test04_copy..." % self.__class__.__name__)
# Create an Array
arr = numpy.array([[456, 2], [3, 457]], dtype='int16')
array1 = self.h5file.create_array(
self.h5file.root, 'array1', arr, "title array1")
# Append some user attrs
array1.attrs.attr1 = "attr1"
array1.attrs.attr2 = 2
# Copy it to another Array
array2 = array1.copy('/', 'array2', title="title array2")
if self.close:
if common.verbose:
print("(closing file version)")
self._reopen()
array1 = self.h5file.root.array1
array2 = self.h5file.root.array2
# Assert user attributes
if common.verbose:
print("title of destination array-->", array2.title)
self.assertEqual(array2.title, "title array2")
def test04_copy(self):
"""Checking Array.copy() method (user attributes copied)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test05_copy..." % self.__class__.__name__)
# Create an Array
arr = numpy.array([[456, 2], [3, 457]], dtype='int16')
array1 = self.h5file.create_array(
self.h5file.root, 'array1', arr, "title array1")
# Append some user attrs
array1.attrs.attr1 = "attr1"
array1.attrs.attr2 = 2
# Copy it to another Array
array2 = array1.copy('/', 'array2', copyuserattrs=1)
if self.close:
if common.verbose:
print("(closing file version)")
self._reopen()
array1 = self.h5file.root.array1
array2 = self.h5file.root.array2
if common.verbose:
print("attrs array1-->", repr(array1.attrs))
print("attrs array2-->", repr(array2.attrs))
# Assert user attributes
self.assertEqual(array2.attrs.attr1, "attr1")
self.assertEqual(array2.attrs.attr2, 2)
def test04b_copy(self):
"""Checking Array.copy() method (user attributes not copied)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test05b_copy..." % self.__class__.__name__)
# Create an Array
arr = numpy.array([[456, 2], [3, 457]], dtype='int16')
array1 = self.h5file.create_array(
self.h5file.root, 'array1', arr, "title array1")
# Append some user attrs
array1.attrs.attr1 = "attr1"
array1.attrs.attr2 = 2
# Copy it to another Array
array2 = array1.copy('/', 'array2', copyuserattrs=0)
if self.close:
if common.verbose:
print("(closing file version)")
self._reopen()
array1 = self.h5file.root.array1
array2 = self.h5file.root.array2
if common.verbose:
print("attrs array1-->", repr(array1.attrs))
print("attrs array2-->", repr(array2.attrs))
# Assert user attributes
self.assertEqual(hasattr(array2.attrs, "attr1"), 0)
self.assertEqual(hasattr(array2.attrs, "attr2"), 0)
class CloseCopyTestCase(CopyTestCase):
close = 1
class OpenCopyTestCase(CopyTestCase):
close = 0
class CopyIndexTestCase(common.TempFileMixin, TestCase):
def test01_index(self):
"""Checking Array.copy() method with indexes."""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test01_index..." % self.__class__.__name__)
# Create a numpy
r = numpy.arange(200, dtype='int32')
r.shape = (100, 2)
# Save it in a array:
array1 = self.h5file.create_array(
self.h5file.root, 'array1', r, "title array1")
# Copy to another array
array2 = array1.copy("/", 'array2',
start=self.start,
stop=self.stop,
step=self.step)
if common.verbose:
print("array1-->", array1.read())
print("array2-->", array2.read())
print("attrs array1-->", repr(array1.attrs))
print("attrs array2-->", repr(array2.attrs))
# Check that all the elements are equal
r2 = r[self.start:self.stop:self.step]
self.assertTrue(allequal(r2, array2.read()))
# Assert the number of rows in array
if common.verbose:
print("nrows in array2-->", array2.nrows)
print("and it should be-->", r2.shape[0])
self.assertEqual(r2.shape[0], array2.nrows)
def test02_indexclosef(self):
"""Checking Array.copy() method with indexes (close file version)"""
if common.verbose:
print('\n', '-=' * 30)
print("Running %s.test02_indexclosef..." % self.__class__.__name__)
# Create a numpy
r = numpy.arange(200, dtype='int32')
r.shape = (100, 2)
# Save it in a array:
array1 = self.h5file.create_array(
self.h5file.root, 'array1', r, "title array1")
# Copy to another array
array2 = array1.copy("/", 'array2',
start=self.start,
stop=self.stop,
step=self.step)
# Close and reopen the file
self._reopen()
array1 = self.h5file.root.array1
array2 = self.h5file.root.array2
if common.verbose:
print("array1-->", array1.read())
print("array2-->", array2.read())
print("attrs array1-->", repr(array1.attrs))
print("attrs array2-->", repr(array2.attrs))
# Check that all the elements are equal
r2 = r[self.start:self.stop:self.step]
self.assertTrue(allequal(r2, array2.read()))
# Assert the number of rows in array
if common.verbose:
print("nrows in array2-->", array2.nrows)
print("and it should be-->", r2.shape[0])
self.assertEqual(r2.shape[0], array2.nrows)
class CopyIndex1TestCase(CopyIndexTestCase):
start = 0
stop = 7
step = 1
class CopyIndex2TestCase(CopyIndexTestCase):
start = 0
stop = -1
step = 1
class CopyIndex3TestCase(CopyIndexTestCase):
start = 1
stop = 7
step = 1
class CopyIndex4TestCase(CopyIndexTestCase):
start = 0
stop = 6
step = 1
class CopyIndex5TestCase(CopyIndexTestCase):
start = 3
stop = 7
step = 1
class CopyIndex6TestCase(CopyIndexTestCase):
start = 3
stop = 6
step = 2
class CopyIndex7TestCase(CopyIndexTestCase):
start = 0
stop = 7
step = 10
class CopyIndex8TestCase(CopyIndexTestCase):
start = 6
stop = -1 # Negative values means starting from the end
step = 1
class CopyIndex9TestCase(CopyIndexTestCase):
start = 3
stop = 4
step = 1
class CopyIndex10TestCase(CopyIndexTestCase):
start = 3
stop = 4
step = 2
class CopyIndex11TestCase(CopyIndexTestCase):
start = -3
stop = -1
step = 2
class CopyIndex12TestCase(CopyIndexTestCase):
start = -1 # Should point to the last element
stop = None # None should mean the last element (including it)
step = 1
class GetItemTestCase(common.TempFileMixin, TestCase):
def test00_single(self):
"""Single element access (character types)"""
# Create the array under root and name 'somearray'
a = self.charList
arr = self.h5file.create_array(
self.h5file.root, 'somearray', a, "Some array")
if self.close:
self._reopen()
arr = self.h5file.root.somearray
# Get and compare an element
if common.verbose:
print("Original first element:", a[0], type(a[0]))
print("Read first element:", arr[0], type(arr[0]))
self.assertTrue(allequal(a[0], arr[0]))
self.assertEqual(type(a[0]), type(arr[0]))
def test01_single(self):
"""Single element access (numerical types)"""
# Create the array under root and name 'somearray'
a = self.numericalList
arr = self.h5file.create_array(
self.h5file.root, 'somearray', a, "Some array")
if self.close:
self._reopen()
arr = self.h5file.root.somearray
# Get and compare an element
if common.verbose:
print("Original first element:", a[0], type(a[0]))
print("Read first element:", arr[0], type(arr[0]))
self.assertEqual(a[0], arr[0])
self.assertEqual(type(a[0]), type(arr[0]))
def test02_range(self):
"""Range element access (character types)"""
# Create the array under root and name 'somearray'
a = self.charListME
arr = self.h5file.create_array(
self.h5file.root, 'somearray', a, "Some array")
if self.close:
self._reopen()
arr = self.h5file.root.somearray
# Get and compare an element
if common.verbose:
print("Original elements:", a[1:4])
print("Read elements:", arr[1:4])
self.assertTrue(allequal(a[1:4], arr[1:4]))
def test03_range(self):
"""Range element access (numerical types)"""
# Create the array under root and name 'somearray'
a = self.numericalListME
arr = self.h5file.create_array(
self.h5file.root, 'somearray', a, "Some array")
if self.close:
self._reopen()
arr = self.h5file.root.somearray
# Get and compare an element
if common.verbose:
print("Original elements:", a[1:4])
print("Read elements:", arr[1:4])
self.assertTrue(allequal(a[1:4], arr[1:4]))
def test04_range(self):
"""Range element access, strided (character types)"""
# Create the array under root and name 'somearray'
a = self.charListME
arr = self.h5file.create_array(
self.h5file.root, 'somearray', a, "Some array")
if self.close:
self._reopen()
arr = self.h5file.root.somearray
# Get and compare an element
if common.verbose:
print("Original elements:", a[1:4:2])
print("Read elements:", arr[1:4:2])
self.assertTrue(allequal(a[1:4:2], arr[1:4:2]))
def test05_range(self):
"""Range element access, strided (numerical types)"""
# Create the array under root and name 'somearray'
a = self.numericalListME
arr = self.h5file.create_array(
self.h5file.root, 'somearray', a, "Some array")
if self.close:
self._reopen()
arr = self.h5file.root.somearray
# Get and compare an element
if common.verbose:
print("Original elements:", a[1:4:2])
print("Read elements:", arr[1:4:2])
self.assertTrue(allequal(a[1:4:2], arr[1:4:2]))
def test06_negativeIndex(self):
"""Negative Index element access (character types)"""
# Create the array under root and name 'somearray'
a = self.charListME
arr = self.h5file.create_array(
self.h5file.root, 'somearray', a, "Some array")
if self.close:
self._reopen()
arr = self.h5file.root.somearray
# Get and compare an element
if common.verbose:
print("Original last element:", a[-1])
print("Read last element:", arr[-1])
self.assertTrue(allequal(a[-1], arr[-1]))
def test07_negativeIndex(self):
"""Negative Index element access (numerical types)"""
# Create the array under root and name 'somearray'
a = self.numericalListME
arr = self.h5file.create_array(
self.h5file.root, 'somearray', a, "Some array")
if self.close:
self._reopen()
arr = self.h5file.root.somearray
# Get and compare an element
if common.verbose:
print("Original before last element:", a[-2])
print("Read before last element:", arr[-2])
if isinstance(a[-2], numpy.ndarray):
self.assertTrue(allequal(a[-2], arr[-2]))
else:
self.assertEqual(a[-2], arr[-2])
def test08_negativeRange(self):
"""Negative range element access (character types)"""
# Create the array under root and name 'somearray'
a = self.charListME
arr = self.h5file.create_array(
self.h5file.root, 'somearray', a, "Some array")
if self.close:
self._reopen()
arr = self.h5file.root.somearray
# Get and compare an element
if common.verbose:
print("Original last elements:", a[-4:-1])
print("Read last elements:", arr[-4:-1])
self.assertTrue(allequal(a[-4:-1], arr[-4:-1]))
def test09_negativeRange(self):
"""Negative range element access (numerical types)"""
# Create the array under root and name 'somearray'
a = self.numericalListME
arr = self.h5file.create_array(
self.h5file.root, 'somearray', a, "Some array")
if self.close:
self._reopen()
arr = self.h5file.root.somearray
# Get and compare an element
if common.verbose:
print("Original last elements:", a[-4:-1])
print("Read last elements:", arr[-4:-1])
self.assertTrue(allequal(a[-4:-1], arr[-4:-1]))
class GI1NATestCase(GetItemTestCase, TestCase):
title = "Rank-1 case 1"
numericalList = numpy.array([3])
numericalListME = numpy.array([3, 2, 1, 0, 4, 5, 6])
charList = numpy.array(["3"], 'S')
charListME = numpy.array(
["321", "221", "121", "021", "421", "521", "621"], 'S')
class GI1NAOpenTestCase(GI1NATestCase):
close = 0
class GI1NACloseTestCase(GI1NATestCase):
close = 1
class GI2NATestCase(GetItemTestCase):
# A more complex example
title = "Rank-1,2 case 2"
numericalList = numpy.array([3, 4])
numericalListME = numpy.array([[3, 2, 1, 0, 4, 5, 6],
[2, 1, 0, 4, 5, 6, 7],
[4, 3, 2, 1, 0, 4, 5],
[3, 2, 1, 0, 4, 5, 6],
[3, 2, 1, 0, 4, 5, 6]])
charList = numpy.array(["a", "b"], 'S')
charListME = numpy.array(
[["321", "221", "121", "021", "421", "521", "621"],
["21", "21", "11", "02", "42", "21", "61"],
["31", "21", "12", "21", "41", "51", "621"],
["321", "221", "121", "021",
"421", "521", "621"],
["3241", "2321", "13216",
"0621", "4421", "5421", "a621"],
["a321", "s221", "d121", "g021", "b421", "5vvv21", "6zxzxs21"]], 'S')
class GI2NAOpenTestCase(GI2NATestCase):
close = 0
class GI2NACloseTestCase(GI2NATestCase):
close = 1
class SetItemTestCase(common.TempFileMixin, TestCase):
def test00_single(self):
"""Single element update (character types)"""
# Create the array under root and name 'somearray'
a = self.charList
arr = self.h5file.create_array(
self.h5file.root, 'somearray', a, "Some array")
if self.close:
self._reopen('a')
arr = self.h5file.root.somearray
# Modify a single element of a and arr:
a[0] = b"b"
arr[0] = b"b"
# Get and compare an element
if common.verbose:
print("Original first element:", a[0])
print("Read first element:", arr[0])
self.assertTrue(allequal(a[0], arr[0]))
def test01_single(self):
"""Single element update (numerical types)"""
# Create the array under root and name 'somearray'
a = self.numericalList
arr = self.h5file.create_array(
self.h5file.root, 'somearray', a, "Some array")
if self.close:
self._reopen('a')
arr = self.h5file.root.somearray
# Modify elements of a and arr:
a[0] = 333
arr[0] = 333
# Get and compare an element
if common.verbose:
print("Original first element:", a[0])
print("Read first element:", arr[0])
self.assertEqual(a[0], arr[0])
def test02_range(self):
"""Range element update (character types)"""
# Create the array under root and name 'somearray'
a = self.charListME
arr = self.h5file.create_array(
self.h5file.root, 'somearray', a, "Some array")
if self.close:
self._reopen('a')
arr = self.h5file.root.somearray
# Modify elements of a and arr:
a[1:3] = b"xXx"
arr[1:3] = b"xXx"
# Get and compare an element
if common.verbose:
print("Original elements:", a[1:4])
print("Read elements:", arr[1:4])
self.assertTrue(allequal(a[1:4], arr[1:4]))
def test03_range(self):
"""Range element update (numerical types)"""
# Create the array under root and name 'somearray'
a = self.numericalListME
arr = self.h5file.create_array(
self.h5file.root, 'somearray', a, "Some array")
if self.close:
self._reopen('a')
arr = self.h5file.root.somearray
# Modify elements of a and arr:
s = slice(1, 3, None)
rng = numpy.arange(a[s].size)*2 + 3
rng.shape = a[s].shape
a[s] = rng
arr[s] = rng
# Get and compare an element
if common.verbose:
print("Original elements:", a[1:4])
print("Read elements:", arr[1:4])
self.assertTrue(allequal(a[1:4], arr[1:4]))
def test04_range(self):
"""Range element update, strided (character types)"""
# Create the array under root and name 'somearray'
a = self.charListME
arr = self.h5file.create_array(
self.h5file.root, 'somearray', a, "Some array")
if self.close:
self._reopen('a')
arr = self.h5file.root.somearray
# Modify elements of a and arr:
s = slice(1, 4, 2)
a[s] = b"xXx"
arr[s] = b"xXx"
# Get and compare an element
if common.verbose:
print("Original elements:", a[1:4:2])
print("Read elements:", arr[1:4:2])
self.assertTrue(allequal(a[1:4:2], arr[1:4:2]))
def test05_range(self):
"""Range element update, strided (numerical types)"""
# Create the array under root and name 'somearray'
a = self.numericalListME
arr = self.h5file.create_array(
self.h5file.root, 'somearray', a, "Some array")
if self.close:
self._reopen('a')
arr = self.h5file.root.somearray
# Modify elements of a and arr:
s = slice(1, 4, 2)
rng = numpy.arange(a[s].size)*2 + 3
rng.shape = a[s].shape
a[s] = rng
arr[s] = rng
# Get and compare an element
if common.verbose:
print("Original elements:", a[1:4:2])
print("Read elements:", arr[1:4:2])
self.assertTrue(allequal(a[1:4:2], arr[1:4:2]))
def test06_negativeIndex(self):
"""Negative Index element update (character types)"""
# Create the array under root and name 'somearray'
a = self.charListME
arr = self.h5file.create_array(
self.h5file.root, 'somearray', a, "Some array")
if self.close:
self._reopen('a')
arr = self.h5file.root.somearray
# Modify elements of a and arr:
s = -1
a[s] = b"xXx"
arr[s] = b"xXx"
# Get and compare an element
if common.verbose:
print("Original last element:", a[-1])
print("Read last element:", arr[-1])
self.assertTrue(allequal(a[-1], arr[-1]))
def test07_negativeIndex(self):
"""Negative Index element update (numerical types)"""
# Create the array under root and name 'somearray'
a = self.numericalListME
arr = self.h5file.create_array(
self.h5file.root, 'somearray', a, "Some array")
if self.close:
self._reopen('a')
arr = self.h5file.root.somearray
# Modify elements of a and arr:
s = -2
a[s] = a[s]*2 + 3
arr[s] = arr[s]*2 + 3
# Get and compare an element
if common.verbose:
print("Original before last element:", a[-2])
print("Read before last element:", arr[-2])
if isinstance(a[-2], numpy.ndarray):
self.assertTrue(allequal(a[-2], arr[-2]))
else:
self.assertEqual(a[-2], arr[-2])
def test08_negativeRange(self):
"""Negative range element update (character types)"""
# Create the array under root and name 'somearray'
a = self.charListME
arr = self.h5file.create_array(
self.h5file.root, 'somearray', a, "Some array")
if self.close:
self._reopen('a')
arr = self.h5file.root.somearray
# Modify elements of a and arr:
s = slice(-4, -1, None)
a[s] = b"xXx"
arr[s] = b"xXx"
# Get and compare an element
if common.verbose:
print("Original last elements:", a[-4:-1])
print("Read last elements:", arr[-4:-1])
self.assertTrue(allequal(a[-4:-1], arr[-4:-1]))
def test09_negativeRange(self):
"""Negative range element update (numerical types)"""
# Create the array under root and name 'somearray'
a = self.numericalListME
arr = self.h5file.create_array(
self.h5file.root, 'somearray', a, "Some array")
if self.close:
self._reopen('a')
arr = self.h5file.root.somearray
# Modify elements of a and arr:
s = slice(-3, -1, None)
rng = numpy.arange(a[s].size)*2 + 3
rng.shape = a[s].shape
a[s] = rng
arr[s] = rng
# Get and compare an element
if common.verbose:
print("Original last elements:", a[-4:-1])
print("Read last elements:", arr[-4:-1])
self.assertTrue(allequal(a[-4:-1], arr[-4:-1]))
def test10_outOfRange(self):
"""Out of range update (numerical types)"""
# Create the array under root and name 'somearray'
a = self.numericalListME
arr = self.h5file.create_array(
self.h5file.root, 'somearray', a, "Some array")
if self.close:
self._reopen('a')
arr = self.h5file.root.somearray
# Modify elements of arr that are out of range:
s = slice(1, a.shape[0]+1, None)
s2 = slice(1, 1000, None)
rng = numpy.arange(a[s].size)*2 + 3
rng.shape = a[s].shape
a[s] = rng
rng2 = numpy.arange(a[s2].size)*2 + 3
rng2.shape = a[s2].shape
arr[s2] = rng2
# Get and compare an element
if common.verbose:
print("Original last elements:", a[-4:-1])
print("Read last elements:", arr[-4:-1])
self.assertTrue(allequal(a[-4:-1], arr[-4:-1]))
class SI1NATestCase(SetItemTestCase, TestCase):
title = "Rank-1 case 1"
numericalList = numpy.array([3])
numericalListME = numpy.array([3, 2, 1, 0, 4, 5, 6])
charList = numpy.array(["3"], 'S')
charListME = numpy.array(
["321", "221", "121", "021", "421", "521", "621"], 'S')
class SI1NAOpenTestCase(SI1NATestCase):
close = 0
class SI1NACloseTestCase(SI1NATestCase):
close = 1
class SI2NATestCase(SetItemTestCase):
# A more complex example
title = "Rank-1,2 case 2"
numericalList = numpy.array([3, 4])
numericalListME = numpy.array([[3, 2, 1, 0, 4, 5, 6],
[2, 1, 0, 4, 5, 6, 7],
[4, 3, 2, 1, 0, 4, 5],
[3, 2, 1, 0, 4, 5, 6],
[3, 2, 1, 0, 4, 5, 6]])
charList = numpy.array(["a", "b"], 'S')
charListME = numpy.array(
[["321", "221", "121", "021", "421", "521", "621"],
["21", "21", "11", "02", "42", "21", "61"],
["31", "21", "12", "21", "41", "51", "621"],
["321", "221", "121", "021",
"421", "521", "621"],
["3241", "2321", "13216",
"0621", "4421", "5421", "a621"],
["a321", "s221", "d121", "g021", "b421", "5vvv21", "6zxzxs21"]], 'S')
class SI2NAOpenTestCase(SI2NATestCase):
close = 0
class SI2NACloseTestCase(SI2NATestCase):
close = 1
class GeneratorTestCase(common.TempFileMixin, TestCase):
def test00a_single(self):
"""Testing generator access to Arrays, single elements (char)"""
# Create the array under root and name 'somearray'
a = self.charList
arr = self.h5file.create_array(
self.h5file.root, 'somearray', a, "Some array")
if self.close:
self._reopen()
arr = self.h5file.root.somearray
# Get and compare an element
ga = [i for i in a]
garr = [i for i in arr]
if common.verbose:
print("Result of original iterator:", ga)
print("Result of read generator:", garr)
self.assertEqual(ga, garr)
def test00b_me(self):
"""Testing generator access to Arrays, multiple elements (char)"""
# Create the array under root and name 'somearray'
a = self.charListME
arr = self.h5file.create_array(
self.h5file.root, 'somearray', a, "Some array")
if self.close:
self._reopen()
arr = self.h5file.root.somearray
# Get and compare an element
ga = [i for i in a]
garr = [i for i in arr]
if common.verbose:
print("Result of original iterator:", ga)
print("Result of read generator:", garr)
for i in range(len(ga)):
self.assertTrue(allequal(ga[i], garr[i]))
def test01a_single(self):
"""Testing generator access to Arrays, single elements (numeric)"""
# Create the array under root and name 'somearray'
a = self.numericalList
arr = self.h5file.create_array(
self.h5file.root, 'somearray', a, "Some array")
if self.close:
self._reopen()
arr = self.h5file.root.somearray
# Get and compare an element
ga = [i for i in a]
garr = [i for i in arr]
if common.verbose:
print("Result of original iterator:", ga)
print("Result of read generator:", garr)
self.assertEqual(ga, garr)
def test01b_me(self):
"""Testing generator access to Arrays, multiple elements (numeric)"""
# Create the array under root and name 'somearray'
a = self.numericalListME
arr = self.h5file.create_array(
self.h5file.root, 'somearray', a, "Some array")
if self.close:
self._reopen()
arr = self.h5file.root.somearray
# Get and compare an element
ga = [i for i in a]
garr = [i for i in arr]
if common.verbose:
print("Result of original iterator:", ga)
print("Result of read generator:", garr)
for i in range(len(ga)):
self.assertTrue(allequal(ga[i], garr[i]))
class GE1NATestCase(GeneratorTestCase):
title = "Rank-1 case 1"
numericalList = numpy.array([3])
numericalListME = numpy.array([3, 2, 1, 0, 4, 5, 6])
charList = numpy.array(["3"], 'S')
charListME = numpy.array(
["321", "221", "121", "021", "421", "521", "621"], 'S')
class GE1NAOpenTestCase(GE1NATestCase):
close = 0
class GE1NACloseTestCase(GE1NATestCase):
close = 1
class GE2NATestCase(GeneratorTestCase):
# A more complex example
title = "Rank-1,2 case 2"
numericalList = numpy.array([3, 4])
numericalListME = numpy.array([[3, 2, 1, 0, 4, 5, 6],
[2, 1, 0, 4, 5, 6, 7],
[4, 3, 2, 1, 0, 4, 5],
[3, 2, 1, 0, 4, 5, 6],
[3, 2, 1, 0, 4, 5, 6]])
charList = numpy.array(["a", "b"], 'S')
charListME = numpy.array(
[["321", "221", "121", "021", "421", "521", "621"],
["21", "21", "11", "02", "42", "21", "61"],
["31", "21", "12", "21", "41", "51", "621"],
["321", "221", "121", "021",
"421", "521", "621"],
["3241", "2321", "13216",
"0621", "4421", "5421", "a621"],
["a321", "s221", "d121", "g021", "b421", "5vvv21", "6zxzxs21"]], 'S')
class GE2NAOpenTestCase(GE2NATestCase):
close = 0
class GE2NACloseTestCase(GE2NATestCase):
close = 1
class NonHomogeneousTestCase(common.TempFileMixin, TestCase):
def test(self):
"""Test for creation of non-homogeneous arrays."""
# This checks ticket #12.
self.assertRaises(ValueError,
self.h5file.create_array, '/', 'test', [1, [2, 3]])
self.assertRaises(NoSuchNodeError, self.h5file.remove_node, '/test')
class TruncateTestCase(common.TempFileMixin, TestCase):
def test(self):
"""Test for unability to truncate Array objects."""
array1 = self.h5file.create_array('/', 'array1', [0, 2])
self.assertRaises(TypeError, array1.truncate, 0)
class PointSelectionTestCase(common.TempFileMixin, TestCase):
def setUp(self):
super(PointSelectionTestCase, self).setUp()
# Limits for selections
self.limits = [
(0, 1), # just one element
(20, -10), # no elements
(-10, 4), # several elements
(0, 10), # several elements (again)
]
# Create a sample array
size = numpy.prod(self.shape)
nparr = numpy.arange(size, dtype=numpy.int32).reshape(self.shape)
self.nparr = nparr
self.tbarr = self.h5file.create_array(self.h5file.root, 'array', nparr)
def test01a_read(self):
"""Test for point-selections (read, boolean keys)."""
nparr = self.nparr
tbarr = self.tbarr
for value1, value2 in self.limits:
key = (nparr >= value1) & (nparr < value2)
if common.verbose:
print("Selection to test:", key)
a = nparr[key]
b = tbarr[key]
self.assertTrue(
numpy.alltrue(a == b),
"NumPy array and PyTables selections does not match.")
def test01b_read(self):
"""Test for point-selections (read, integer keys)."""
nparr = self.nparr
tbarr = self.tbarr
for value1, value2 in self.limits:
key = numpy.where((nparr >= value1) & (nparr < value2))
if common.verbose:
print("Selection to test:", key)
a = nparr[key]
b = tbarr[key]
self.assertTrue(
numpy.alltrue(a == b),
"NumPy array and PyTables selections does not match.")
def test01c_read(self):
"""Test for point-selections (read, float keys)."""
nparr = self.nparr
tbarr = self.tbarr
for value1, value2 in self.limits:
key = numpy.where((nparr >= value1) & (nparr < value2))
if common.verbose:
print("Selection to test:", key)
# a = nparr[key]
fkey = numpy.array(key, "f4")
self.assertRaises((IndexError, TypeError), tbarr.__getitem__, fkey)
def test01d_read(self):
nparr = self.nparr
tbarr = self.tbarr
for key in self.working_keyset:
if common.verbose:
print("Selection to test:", key)
a = nparr[key]
b = tbarr[key]
npt.assert_array_equal(
a, b, "NumPy array and PyTables selections does not match.")
def test01e_read(self):
tbarr = self.tbarr
for key in self.not_working_keyset:
if common.verbose:
print("Selection to test:", key)
self.assertRaises(IndexError, tbarr.__getitem__, key)
def test02a_write(self):
"""Test for point-selections (write, boolean keys)."""
nparr = self.nparr
tbarr = self.tbarr
for value1, value2 in self.limits:
key = (nparr >= value1) & (nparr < value2)
if common.verbose:
print("Selection to test:", key)
s = nparr[key]
nparr[key] = s * 2
tbarr[key] = s * 2
a = nparr[:]
b = tbarr[:]
self.assertTrue(
numpy.alltrue(a == b),
"NumPy array and PyTables modifications does not match.")
def test02b_write(self):
"""Test for point-selections (write, integer keys)."""
nparr = self.nparr
tbarr = self.tbarr
for value1, value2 in self.limits:
key = numpy.where((nparr >= value1) & (nparr < value2))
if common.verbose:
print("Selection to test:", key)
s = nparr[key]
nparr[key] = s * 2
tbarr[key] = s * 2
a = nparr[:]
b = tbarr[:]
self.assertTrue(
numpy.alltrue(a == b),
"NumPy array and PyTables modifications does not match.")
def test02c_write(self):
"""Test for point-selections (write, integer values, broadcast)."""
nparr = self.nparr
tbarr = self.tbarr
for value1, value2 in self.limits:
key = numpy.where((nparr >= value1) & (nparr < value2))
if common.verbose:
print("Selection to test:", key)
# s = nparr[key]
nparr[key] = 2 # force a broadcast
tbarr[key] = 2 # force a broadcast
a = nparr[:]
b = tbarr[:]
self.assertTrue(
numpy.alltrue(a == b),
"NumPy array and PyTables modifications does not match.")
class PointSelection0(PointSelectionTestCase):
shape = (3,)
working_keyset = [
[0, 1],
[0, -1],
]
not_working_keyset = [
[0, 3],
[0, 4],
[0, -4],
]
class PointSelection1(PointSelectionTestCase):
shape = (5, 3, 3)
working_keyset = [
[(0, 0), (0, 1), (0, 0)],
[(0, 0), (0, -1), (0, 0)],
]
not_working_keyset = [
[(0, 0), (0, 3), (0, 0)],
[(0, 0), (0, 4), (0, 0)],
[(0, 0), (0, -4), (0, 0)],
[(0, 0), (0, -5), (0, 0)]
]
class PointSelection2(PointSelectionTestCase):
shape = (7, 3)
working_keyset = [
[(0, 0), (0, 1)],
[(0, 0), (0, -1)],
[(0, 0), (0, -2)],
]
not_working_keyset = [
[(0, 0), (0, 3)],
[(0, 0), (0, 4)],
[(0, 0), (0, -4)],
[(0, 0), (0, -5)],
]
class PointSelection3(PointSelectionTestCase):
shape = (4, 3, 2, 1)
working_keyset = [
[(0, 0), (0, 1), (0, 0), (0, 0)],
[(0, 0), (0, -1), (0, 0), (0, 0)],
]
not_working_keyset = [
[(0, 0), (0, 3), (0, 0), (0, 0)],
[(0, 0), (0, 4), (0, 0), (0, 0)],
[(0, 0), (0, -4), (0, 0), (0, 0)],
]
class PointSelection4(PointSelectionTestCase):
shape = (1, 3, 2, 5, 6)
working_keyset = [
[(0, 0), (0, 1), (0, 0), (0, 0), (0, 0)],
[(0, 0), (0, -1), (0, 0), (0, 0), (0, 0)],
]
not_working_keyset = [
[(0, 0), (0, 3), (0, 0), (0, 0), (0, 0)],
[(0, 0), (0, 4), (0, 0), (0, 0), (0, 0)],
[(0, 0), (0, -4), (0, 0), (0, 0), (0, 0)],
]
class FancySelectionTestCase(common.TempFileMixin, TestCase):
def setUp(self):
super(FancySelectionTestCase, self).setUp()
M, N, O = self.shape
# The next are valid selections for both NumPy and PyTables
self.working_keyset = [
([1, 3], slice(1, N-1), 2),
([M-1, 1, 3, 2], slice(None), 2), # unordered lists supported
(slice(M), [N-1, 1, 0], slice(None)),
(slice(1, M, 3), slice(1, N), [O-1, 1, 0]),
(M-1, [2, 1], 1),
(1, 2, 1), # regular selection
([1, 2], -2, -1), # negative indices
([1, -2], 2, -1), # more negative indices
([1, -2], 2, Ellipsis), # one ellipsis
(Ellipsis, [1, 2]), # one ellipsis
(numpy.array(
[1, -2], 'i4'), 2, -1), # array 32-bit instead of list
(numpy.array(
[-1, 2], 'i8'), 2, -1), # array 64-bit instead of list
]
# Using booleans instead of ints is deprecated since numpy 1.8
# Tests for keys that have to support the __index__ attribute
#if (sys.version_info[0] >= 2 and sys.version_info[1] >= 5):
# self.working_keyset.append(
# (False, True), # equivalent to (0,1) ;-)
# )
# Valid selections for NumPy, but not for PyTables (yet)
# The next should raise an IndexError
self.not_working_keyset = [
numpy.array([False, True], dtype="b1"), # boolean arrays
([1, 2, 1], 2, 1), # repeated values
([1, 2], 2, [1, 2]), # several lists
([], 2, 1), # empty selections
(Ellipsis, [1, 2], Ellipsis), # several ellipsis
# Using booleans instead of ints is deprecated since numpy 1.8
([False, True]), # boolean values with incompatible shape
]
# The next should raise an IndexError in both NumPy and PyTables
self.not_working_oob = [
([1, 2], 2, 1000), # out-of-bounds selections
([1, 2], 2000, 1), # out-of-bounds selections
]
# The next should raise a IndexError in both NumPy and PyTables
self.not_working_too_many = [
([1, 2], 2, 1, 1),
]
# Create a sample array
nparr = numpy.empty(self.shape, dtype=numpy.int32)
data = numpy.arange(N * O, dtype=numpy.int32).reshape(N, O)
for i in range(M):
nparr[i] = data * i
self.nparr = nparr
self.tbarr = self.h5file.create_array(self.h5file.root, 'array', nparr)
def test01a_read(self):
"""Test for fancy-selections (working selections, read)."""
nparr = self.nparr
tbarr = self.tbarr
for key in self.working_keyset:
if common.verbose:
print("Selection to test:", key)
a = nparr[key]
b = tbarr[key]
self.assertTrue(
numpy.alltrue(a == b),
"NumPy array and PyTables selections does not match.")
def test01b_read(self):
"""Test for fancy-selections (not working selections, read)."""
# nparr = self.nparr
tbarr = self.tbarr
for key in self.not_working_keyset:
if common.verbose:
print("Selection to test:", key)
# a = nparr[key]
self.assertRaises(IndexError, tbarr.__getitem__, key)
def test01c_read(self):
"""Test for fancy-selections (out-of-bound indexes, read)."""
nparr = self.nparr
tbarr = self.tbarr
for key in self.not_working_oob:
if common.verbose:
print("Selection to test:", key)
self.assertRaises(IndexError, nparr.__getitem__, key)
self.assertRaises(IndexError, tbarr.__getitem__, key)
def test01d_read(self):
"""Test for fancy-selections (too many indexes, read)."""
nparr = self.nparr
tbarr = self.tbarr
for key in self.not_working_too_many:
if common.verbose:
print("Selection to test:", key)
# ValueError for numpy 1.6.x and earlier
# IndexError in numpy > 1.8.0
self.assertRaises((ValueError, IndexError), nparr.__getitem__, key)
self.assertRaises(IndexError, tbarr.__getitem__, key)
def test02a_write(self):
"""Test for fancy-selections (working selections, write)."""
nparr = self.nparr
tbarr = self.tbarr
for key in self.working_keyset:
if common.verbose:
print("Selection to test:", key)
s = nparr[key]
nparr[key] = s * 2
tbarr[key] = s * 2
a = nparr[:]
b = tbarr[:]
self.assertTrue(
numpy.alltrue(a == b),
"NumPy array and PyTables modifications does not match.")
def test02b_write(self):
"""Test for fancy-selections (working selections, write, broadcast)."""
nparr = self.nparr
tbarr = self.tbarr
for key in self.working_keyset:
if common.verbose:
print("Selection to test:", key)
# s = nparr[key]
nparr[key] = 2 # broadcast value
tbarr[key] = 2 # broadcast value
a = nparr[:]
b = tbarr[:]
# if common.verbose:
# print("NumPy modified array:", a)
# print("PyTables modifyied array:", b)
self.assertTrue(
numpy.alltrue(a == b),
"NumPy array and PyTables modifications does not match.")
class FancySelection1(FancySelectionTestCase):
shape = (5, 3, 3) # Minimum values
class FancySelection2(FancySelectionTestCase):
# shape = (5, 3, 3) # Minimum values
shape = (7, 3, 3)
class FancySelection3(FancySelectionTestCase):
# shape = (5, 3, 3) # Minimum values
shape = (7, 4, 5)
class FancySelection4(FancySelectionTestCase):
# shape = (5, 3, 3) # Minimum values
shape = (5, 3, 10)
class CopyNativeHDF5MDAtom(TestCase):
def setUp(self):
super(CopyNativeHDF5MDAtom, self).setUp()
filename = test_filename("array_mdatom.h5")
self.h5file = tables.open_file(filename, "r")
self.arr = self.h5file.root.arr
self.copy = tempfile.mktemp(".h5")
self.copyh = tables.open_file(self.copy, mode="w")
self.arr2 = self.arr.copy(self.copyh.root, newname="arr2")
def tearDown(self):
self.h5file.close()
self.copyh.close()
os.remove(self.copy)
super(CopyNativeHDF5MDAtom, self).tearDown()
def test01_copy(self):
"""Checking that native MD atoms are copied as-is"""
self.assertEqual(self.arr.atom, self.arr2.atom)
self.assertEqual(self.arr.shape, self.arr2.shape)
def test02_reopen(self):
"""Checking that native MD atoms are copied as-is (re-open)"""
self.copyh.close()
self.copyh = tables.open_file(self.copy, mode="r")
self.arr2 = self.copyh.root.arr2
self.assertEqual(self.arr.atom, self.arr2.atom)
self.assertEqual(self.arr.shape, self.arr2.shape)
class AccessClosedTestCase(common.TempFileMixin, TestCase):
def setUp(self):
super(AccessClosedTestCase, self).setUp()
a = numpy.zeros((10, 10))
self.array = self.h5file.create_array(self.h5file.root, 'array', a)
def test_read(self):
self.h5file.close()
self.assertRaises(ClosedNodeError, self.array.read)
def test_getitem(self):
self.h5file.close()
self.assertRaises(ClosedNodeError, self.array.__getitem__, 0)
def test_setitem(self):
self.h5file.close()
self.assertRaises(ClosedNodeError, self.array.__setitem__, 0, 0)
class BroadcastTest(common.TempFileMixin, TestCase):
def test(self):
"""Test correct broadcasting when the array atom is not scalar."""
array_shape = (2, 3)
element_shape = (3,)
dtype = numpy.dtype((numpy.int, element_shape))
atom = Atom.from_dtype(dtype)
h5arr = self.h5file.create_carray(self.h5file.root, 'array',
atom, array_shape)
size = numpy.prod(element_shape)
nparr = numpy.arange(size).reshape(element_shape)
h5arr[0] = nparr
self.assertTrue(numpy.all(h5arr[0] == nparr))
class TestCreateArrayArgs(common.TempFileMixin, TestCase):
where = '/'
name = 'array'
obj = numpy.array([[1, 2], [3, 4]])
title = 'title'
byteorder = None
createparents = False
atom = Atom.from_dtype(obj.dtype)
shape = obj.shape
def test_positional_args(self):
self.h5file.create_array(self.where, self.name, self.obj, self.title)
self.h5file.close()
self.h5file = tables.open_file(self.h5fname)
ptarr = self.h5file.get_node(self.where, self.name)
nparr = ptarr.read()
self.assertEqual(ptarr.title, self.title)
self.assertEqual(ptarr.shape, self.shape)
self.assertEqual(ptarr.atom, self.atom)
self.assertEqual(ptarr.atom.dtype, self.atom.dtype)
self.assertTrue(allequal(self.obj, nparr))
def test_positional_args_atom_shape(self):
self.h5file.create_array(self.where, self.name, None, self.title,
self.byteorder, self.createparents,
self.atom, self.shape)
self.h5file.close()
self.h5file = tables.open_file(self.h5fname)
ptarr = self.h5file.get_node(self.where, self.name)
nparr = ptarr.read()
self.assertEqual(ptarr.title, self.title)
self.assertEqual(ptarr.shape, self.shape)
self.assertEqual(ptarr.atom, self.atom)
self.assertEqual(ptarr.atom.dtype, self.atom.dtype)
self.assertTrue(allequal(numpy.zeros_like(self.obj), nparr))
def test_kwargs_obj(self):
self.h5file.create_array(self.where, self.name, title=self.title,
obj=self.obj)
self.h5file.close()
self.h5file = tables.open_file(self.h5fname)
ptarr = self.h5file.get_node(self.where, self.name)
nparr = ptarr.read()
self.assertEqual(ptarr.title, self.title)
self.assertEqual(ptarr.shape, self.shape)
self.assertEqual(ptarr.atom, self.atom)
self.assertEqual(ptarr.atom.dtype, self.atom.dtype)
self.assertTrue(allequal(self.obj, nparr))
def test_kwargs_atom_shape_01(self):
ptarr = self.h5file.create_array(self.where, self.name,
title=self.title,
atom=self.atom, shape=self.shape)
ptarr[...] = self.obj
self.h5file.close()
self.h5file = tables.open_file(self.h5fname)
ptarr = self.h5file.get_node(self.where, self.name)
nparr = ptarr.read()
self.assertEqual(ptarr.title, self.title)
self.assertEqual(ptarr.shape, self.shape)
self.assertEqual(ptarr.atom, self.atom)
self.assertEqual(ptarr.atom.dtype, self.atom.dtype)
self.assertTrue(allequal(self.obj, nparr))
def test_kwargs_atom_shape_02(self):
ptarr = self.h5file.create_array(self.where, self.name,
title=self.title,
atom=self.atom, shape=self.shape)
#ptarr[...] = self.obj
self.h5file.close()
self.h5file = tables.open_file(self.h5fname)
ptarr = self.h5file.get_node(self.where, self.name)
nparr = ptarr.read()
self.assertEqual(ptarr.title, self.title)
self.assertEqual(ptarr.shape, self.shape)
self.assertEqual(ptarr.atom, self.atom)
self.assertEqual(ptarr.atom.dtype, self.atom.dtype)
self.assertTrue(allequal(numpy.zeros_like(self.obj), nparr))
def test_kwargs_obj_atom(self):
ptarr = self.h5file.create_array(self.where, self.name,
title=self.title,
obj=self.obj,
atom=self.atom)
self.h5file.close()
self.h5file = tables.open_file(self.h5fname)
ptarr = self.h5file.get_node(self.where, self.name)
nparr = ptarr.read()
self.assertEqual(ptarr.title, self.title)
self.assertEqual(ptarr.shape, self.shape)
self.assertEqual(ptarr.atom, self.atom)
self.assertEqual(ptarr.atom.dtype, self.atom.dtype)
self.assertTrue(allequal(self.obj, nparr))
def test_kwargs_obj_shape(self):
ptarr = self.h5file.create_array(self.where, self.name,
title=self.title,
obj=self.obj,
shape=self.shape)
self.h5file.close()
self.h5file = tables.open_file(self.h5fname)
ptarr = self.h5file.get_node(self.where, self.name)
nparr = ptarr.read()
self.assertEqual(ptarr.title, self.title)
self.assertEqual(ptarr.shape, self.shape)
self.assertEqual(ptarr.atom, self.atom)
self.assertEqual(ptarr.atom.dtype, self.atom.dtype)
self.assertTrue(allequal(self.obj, nparr))
def test_kwargs_obj_atom_shape(self):
ptarr = self.h5file.create_array(self.where, self.name,
title=self.title,
obj=self.obj,
atom=self.atom,
shape=self.shape)
self.h5file.close()
self.h5file = tables.open_file(self.h5fname)
ptarr = self.h5file.get_node(self.where, self.name)
nparr = ptarr.read()
self.assertEqual(ptarr.title, self.title)
self.assertEqual(ptarr.shape, self.shape)
self.assertEqual(ptarr.atom, self.atom)
self.assertEqual(ptarr.atom.dtype, self.atom.dtype)
self.assertTrue(allequal(self.obj, nparr))
def test_kwargs_obj_atom_error(self):
atom = Atom.from_dtype(numpy.dtype('complex'))
#shape = self.shape + self.shape
self.assertRaises(TypeError,
self.h5file.create_array,
self.where,
self.name,
title=self.title,
obj=self.obj,
atom=atom)
def test_kwargs_obj_shape_error(self):
#atom = Atom.from_dtype(numpy.dtype('complex'))
shape = self.shape + self.shape
self.assertRaises(TypeError,
self.h5file.create_array,
self.where,
self.name,
title=self.title,
obj=self.obj,
shape=shape)
def test_kwargs_obj_atom_shape_error_01(self):
atom = Atom.from_dtype(numpy.dtype('complex'))
#shape = self.shape + self.shape
self.assertRaises(TypeError,
self.h5file.create_array,
self.where,
self.name,
title=self.title,
obj=self.obj,
atom=atom,
shape=self.shape)
def test_kwargs_obj_atom_shape_error_02(self):
#atom = Atom.from_dtype(numpy.dtype('complex'))
shape = self.shape + self.shape
self.assertRaises(TypeError,
self.h5file.create_array,
self.where,
self.name,
title=self.title,
obj=self.obj,
atom=self.atom,
shape=shape)
def test_kwargs_obj_atom_shape_error_03(self):
atom = Atom.from_dtype(numpy.dtype('complex'))
shape = self.shape + self.shape
self.assertRaises(TypeError,
self.h5file.create_array,
self.where,
self.name,
title=self.title,
obj=self.obj,
atom=atom,
shape=shape)
def suite():
theSuite = unittest.TestSuite()
niter = 1
for i in range(niter):
# The scalar case test should be refined in order to work
theSuite.addTest(unittest.makeSuite(Basic0DOneTestCase))
theSuite.addTest(unittest.makeSuite(Basic0DTwoTestCase))
# theSuite.addTest(unittest.makeSuite(Basic1DZeroTestCase))
theSuite.addTest(unittest.makeSuite(Basic1DOneTestCase))
theSuite.addTest(unittest.makeSuite(Basic1DTwoTestCase))
theSuite.addTest(unittest.makeSuite(Basic1DThreeTestCase))
theSuite.addTest(unittest.makeSuite(Basic2DOneTestCase))
theSuite.addTest(unittest.makeSuite(Basic2DTwoTestCase))
theSuite.addTest(unittest.makeSuite(Basic10DTestCase))
# The 32 dimensions case is tested on GroupsArray
# theSuite.addTest(unittest.makeSuite(Basic32DTestCase))
theSuite.addTest(unittest.makeSuite(ReadOutArgumentTests))
theSuite.addTest(unittest.makeSuite(
SizeOnDiskInMemoryPropertyTestCase))
theSuite.addTest(unittest.makeSuite(GroupsArrayTestCase))
theSuite.addTest(unittest.makeSuite(ComplexNotReopenNotEndianTestCase))
theSuite.addTest(unittest.makeSuite(ComplexReopenNotEndianTestCase))
theSuite.addTest(unittest.makeSuite(ComplexNotReopenEndianTestCase))
theSuite.addTest(unittest.makeSuite(ComplexReopenEndianTestCase))
theSuite.addTest(unittest.makeSuite(CloseCopyTestCase))
theSuite.addTest(unittest.makeSuite(OpenCopyTestCase))
theSuite.addTest(unittest.makeSuite(CopyIndex1TestCase))
theSuite.addTest(unittest.makeSuite(CopyIndex2TestCase))
theSuite.addTest(unittest.makeSuite(CopyIndex3TestCase))
theSuite.addTest(unittest.makeSuite(CopyIndex4TestCase))
theSuite.addTest(unittest.makeSuite(CopyIndex5TestCase))
theSuite.addTest(unittest.makeSuite(CopyIndex6TestCase))
theSuite.addTest(unittest.makeSuite(CopyIndex7TestCase))
theSuite.addTest(unittest.makeSuite(CopyIndex8TestCase))
theSuite.addTest(unittest.makeSuite(CopyIndex9TestCase))
theSuite.addTest(unittest.makeSuite(CopyIndex10TestCase))
theSuite.addTest(unittest.makeSuite(CopyIndex11TestCase))
theSuite.addTest(unittest.makeSuite(CopyIndex12TestCase))
theSuite.addTest(unittest.makeSuite(GI1NAOpenTestCase))
theSuite.addTest(unittest.makeSuite(GI1NACloseTestCase))
theSuite.addTest(unittest.makeSuite(GI2NAOpenTestCase))
theSuite.addTest(unittest.makeSuite(GI2NACloseTestCase))
theSuite.addTest(unittest.makeSuite(SI1NAOpenTestCase))
theSuite.addTest(unittest.makeSuite(SI1NACloseTestCase))
theSuite.addTest(unittest.makeSuite(SI2NAOpenTestCase))
theSuite.addTest(unittest.makeSuite(SI2NACloseTestCase))
theSuite.addTest(unittest.makeSuite(GE1NAOpenTestCase))
theSuite.addTest(unittest.makeSuite(GE1NACloseTestCase))
theSuite.addTest(unittest.makeSuite(GE2NAOpenTestCase))
theSuite.addTest(unittest.makeSuite(GE2NACloseTestCase))
theSuite.addTest(unittest.makeSuite(NonHomogeneousTestCase))
theSuite.addTest(unittest.makeSuite(TruncateTestCase))
theSuite.addTest(unittest.makeSuite(FancySelection1))
theSuite.addTest(unittest.makeSuite(FancySelection2))
theSuite.addTest(unittest.makeSuite(FancySelection3))
theSuite.addTest(unittest.makeSuite(FancySelection4))
theSuite.addTest(unittest.makeSuite(PointSelection0))
theSuite.addTest(unittest.makeSuite(PointSelection1))
theSuite.addTest(unittest.makeSuite(PointSelection2))
theSuite.addTest(unittest.makeSuite(PointSelection3))
theSuite.addTest(unittest.makeSuite(PointSelection4))
theSuite.addTest(unittest.makeSuite(CopyNativeHDF5MDAtom))
theSuite.addTest(unittest.makeSuite(AccessClosedTestCase))
theSuite.addTest(unittest.makeSuite(TestCreateArrayArgs))
theSuite.addTest(unittest.makeSuite(BroadcastTest))
return theSuite
if __name__ == '__main__':
common.parse_argv(sys.argv)
common.print_versions()
unittest.main(defaultTest='suite')
| gdementen/PyTables | tables/tests/test_array.py | Python | bsd-3-clause | 95,636 |
from __future__ import unicode_literals
import re
from functools import partial
from importlib import import_module
from inspect import getargspec, getcallargs
import warnings
from django.apps import apps
from django.conf import settings
from django.template.context import (BaseContext, Context, RequestContext, # NOQA: imported for backwards compatibility
ContextPopException)
from django.utils.deprecation import RemovedInDjango20Warning
from django.utils.itercompat import is_iterable
from django.utils.text import (smart_split, unescape_string_literal,
get_text_list)
from django.utils.encoding import force_str, force_text
from django.utils.translation import ugettext_lazy, pgettext_lazy
from django.utils.safestring import (SafeData, EscapeData, mark_safe,
mark_for_escaping)
from django.utils.formats import localize
from django.utils.html import escape
from django.utils.module_loading import module_has_submodule
from django.utils import six
from django.utils.timezone import template_localtime
from django.utils.encoding import python_2_unicode_compatible
TOKEN_TEXT = 0
TOKEN_VAR = 1
TOKEN_BLOCK = 2
TOKEN_COMMENT = 3
TOKEN_MAPPING = {
TOKEN_TEXT: 'Text',
TOKEN_VAR: 'Var',
TOKEN_BLOCK: 'Block',
TOKEN_COMMENT: 'Comment',
}
# template syntax constants
FILTER_SEPARATOR = '|'
FILTER_ARGUMENT_SEPARATOR = ':'
VARIABLE_ATTRIBUTE_SEPARATOR = '.'
BLOCK_TAG_START = '{%'
BLOCK_TAG_END = '%}'
VARIABLE_TAG_START = '{{'
VARIABLE_TAG_END = '}}'
COMMENT_TAG_START = '{#'
COMMENT_TAG_END = '#}'
TRANSLATOR_COMMENT_MARK = 'Translators'
SINGLE_BRACE_START = '{'
SINGLE_BRACE_END = '}'
ALLOWED_VARIABLE_CHARS = ('abcdefghijklmnopqrstuvwxyz'
'ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_.')
# what to report as the origin for templates that come from non-loader sources
# (e.g. strings)
UNKNOWN_SOURCE = '<unknown source>'
# match a variable or block tag and capture the entire tag, including start/end
# delimiters
tag_re = (re.compile('(%s.*?%s|%s.*?%s|%s.*?%s)' %
(re.escape(BLOCK_TAG_START), re.escape(BLOCK_TAG_END),
re.escape(VARIABLE_TAG_START), re.escape(VARIABLE_TAG_END),
re.escape(COMMENT_TAG_START), re.escape(COMMENT_TAG_END))))
# global dictionary of libraries that have been loaded using get_library
libraries = {}
# global list of libraries to load by default for a new parser
builtins = []
# True if TEMPLATE_STRING_IF_INVALID contains a format string (%s). None means
# uninitialized.
invalid_var_format_string = None
class TemplateSyntaxError(Exception):
pass
class TemplateDoesNotExist(Exception):
pass
class TemplateEncodingError(Exception):
pass
@python_2_unicode_compatible
class VariableDoesNotExist(Exception):
def __init__(self, msg, params=()):
self.msg = msg
self.params = params
def __str__(self):
return self.msg % tuple(force_text(p, errors='replace') for p in self.params)
class InvalidTemplateLibrary(Exception):
pass
class Origin(object):
def __init__(self, name):
self.name = name
def reload(self):
raise NotImplementedError('subclasses of Origin must provide a reload() method')
def __str__(self):
return self.name
class StringOrigin(Origin):
def __init__(self, source):
super(StringOrigin, self).__init__(UNKNOWN_SOURCE)
self.source = source
def reload(self):
return self.source
class Template(object):
def __init__(self, template_string, origin=None, name=None):
try:
template_string = force_text(template_string)
except UnicodeDecodeError:
raise TemplateEncodingError("Templates can only be constructed "
"from unicode or UTF-8 strings.")
if settings.TEMPLATE_DEBUG and origin is None:
origin = StringOrigin(template_string)
self.nodelist = compile_string(template_string, origin)
self.name = name
self.origin = origin
def __iter__(self):
for node in self.nodelist:
for subnode in node:
yield subnode
def _render(self, context):
return self.nodelist.render(context)
def render(self, context):
"Display stage -- can be called many times"
context.render_context.push()
try:
return self._render(context)
finally:
context.render_context.pop()
def compile_string(template_string, origin):
"Compiles template_string into NodeList ready for rendering"
if settings.TEMPLATE_DEBUG:
from django.template.debug import DebugLexer, DebugParser
lexer_class, parser_class = DebugLexer, DebugParser
else:
lexer_class, parser_class = Lexer, Parser
lexer = lexer_class(template_string, origin)
parser = parser_class(lexer.tokenize())
return parser.parse()
class Token(object):
def __init__(self, token_type, contents):
# token_type must be TOKEN_TEXT, TOKEN_VAR, TOKEN_BLOCK or
# TOKEN_COMMENT.
self.token_type, self.contents = token_type, contents
self.lineno = None
def __str__(self):
token_name = TOKEN_MAPPING[self.token_type]
return ('<%s token: "%s...">' %
(token_name, self.contents[:20].replace('\n', '')))
def split_contents(self):
split = []
bits = iter(smart_split(self.contents))
for bit in bits:
# Handle translation-marked template pieces
if bit.startswith('_("') or bit.startswith("_('"):
sentinal = bit[2] + ')'
trans_bit = [bit]
while not bit.endswith(sentinal):
bit = next(bits)
trans_bit.append(bit)
bit = ' '.join(trans_bit)
split.append(bit)
return split
class Lexer(object):
def __init__(self, template_string, origin):
self.template_string = template_string
self.origin = origin
self.lineno = 1
self.verbatim = False
def tokenize(self):
"""
Return a list of tokens from a given template_string.
"""
in_tag = False
result = []
for bit in tag_re.split(self.template_string):
if bit:
result.append(self.create_token(bit, in_tag))
in_tag = not in_tag
return result
def create_token(self, token_string, in_tag):
"""
Convert the given token string into a new Token object and return it.
If in_tag is True, we are processing something that matched a tag,
otherwise it should be treated as a literal string.
"""
if in_tag and token_string.startswith(BLOCK_TAG_START):
# The [2:-2] ranges below strip off *_TAG_START and *_TAG_END.
# We could do len(BLOCK_TAG_START) to be more "correct", but we've
# hard-coded the 2s here for performance. And it's not like
# the TAG_START values are going to change anytime, anyway.
block_content = token_string[2:-2].strip()
if self.verbatim and block_content == self.verbatim:
self.verbatim = False
if in_tag and not self.verbatim:
if token_string.startswith(VARIABLE_TAG_START):
token = Token(TOKEN_VAR, token_string[2:-2].strip())
elif token_string.startswith(BLOCK_TAG_START):
if block_content[:9] in ('verbatim', 'verbatim '):
self.verbatim = 'end%s' % block_content
token = Token(TOKEN_BLOCK, block_content)
elif token_string.startswith(COMMENT_TAG_START):
content = ''
if token_string.find(TRANSLATOR_COMMENT_MARK):
content = token_string[2:-2].strip()
token = Token(TOKEN_COMMENT, content)
else:
token = Token(TOKEN_TEXT, token_string)
token.lineno = self.lineno
self.lineno += token_string.count('\n')
return token
class Parser(object):
def __init__(self, tokens):
self.tokens = tokens
self.tags = {}
self.filters = {}
for lib in builtins:
self.add_library(lib)
def parse(self, parse_until=None):
if parse_until is None:
parse_until = []
nodelist = self.create_nodelist()
while self.tokens:
token = self.next_token()
# Use the raw values here for TOKEN_* for a tiny performance boost.
if token.token_type == 0: # TOKEN_TEXT
self.extend_nodelist(nodelist, TextNode(token.contents), token)
elif token.token_type == 1: # TOKEN_VAR
if not token.contents:
self.empty_variable(token)
try:
filter_expression = self.compile_filter(token.contents)
except TemplateSyntaxError as e:
if not self.compile_filter_error(token, e):
raise
var_node = self.create_variable_node(filter_expression)
self.extend_nodelist(nodelist, var_node, token)
elif token.token_type == 2: # TOKEN_BLOCK
try:
command = token.contents.split()[0]
except IndexError:
self.empty_block_tag(token)
if command in parse_until:
# put token back on token list so calling
# code knows why it terminated
self.prepend_token(token)
return nodelist
# execute callback function for this tag and append
# resulting node
self.enter_command(command, token)
try:
compile_func = self.tags[command]
except KeyError:
self.invalid_block_tag(token, command, parse_until)
try:
compiled_result = compile_func(self, token)
except TemplateSyntaxError as e:
if not self.compile_function_error(token, e):
raise
self.extend_nodelist(nodelist, compiled_result, token)
self.exit_command()
if parse_until:
self.unclosed_block_tag(parse_until)
return nodelist
def skip_past(self, endtag):
while self.tokens:
token = self.next_token()
if token.token_type == TOKEN_BLOCK and token.contents == endtag:
return
self.unclosed_block_tag([endtag])
def create_variable_node(self, filter_expression):
return VariableNode(filter_expression)
def create_nodelist(self):
return NodeList()
def extend_nodelist(self, nodelist, node, token):
if node.must_be_first and nodelist:
try:
if nodelist.contains_nontext:
raise AttributeError
except AttributeError:
raise TemplateSyntaxError("%r must be the first tag "
"in the template." % node)
if isinstance(nodelist, NodeList) and not isinstance(node, TextNode):
nodelist.contains_nontext = True
nodelist.append(node)
def enter_command(self, command, token):
pass
def exit_command(self):
pass
def error(self, token, msg):
return TemplateSyntaxError(msg)
def empty_variable(self, token):
raise self.error(token, "Empty variable tag")
def empty_block_tag(self, token):
raise self.error(token, "Empty block tag")
def invalid_block_tag(self, token, command, parse_until=None):
if parse_until:
raise self.error(token, "Invalid block tag: '%s', expected %s" %
(command, get_text_list(["'%s'" % p for p in parse_until])))
raise self.error(token, "Invalid block tag: '%s'" % command)
def unclosed_block_tag(self, parse_until):
raise self.error(None, "Unclosed tags: %s " % ', '.join(parse_until))
def compile_filter_error(self, token, e):
pass
def compile_function_error(self, token, e):
pass
def next_token(self):
return self.tokens.pop(0)
def prepend_token(self, token):
self.tokens.insert(0, token)
def delete_first_token(self):
del self.tokens[0]
def add_library(self, lib):
self.tags.update(lib.tags)
self.filters.update(lib.filters)
def compile_filter(self, token):
"""
Convenient wrapper for FilterExpression
"""
return FilterExpression(token, self)
def find_filter(self, filter_name):
if filter_name in self.filters:
return self.filters[filter_name]
else:
raise TemplateSyntaxError("Invalid filter: '%s'" % filter_name)
class TokenParser(object):
"""
Subclass this and implement the top() method to parse a template line.
When instantiating the parser, pass in the line from the Django template
parser.
The parser's "tagname" instance-variable stores the name of the tag that
the filter was called with.
"""
def __init__(self, subject):
self.subject = subject
self.pointer = 0
self.backout = []
self.tagname = self.tag()
def top(self):
"""
Overload this method to do the actual parsing and return the result.
"""
raise NotImplementedError('subclasses of Tokenparser must provide a top() method')
def more(self):
"""
Returns True if there is more stuff in the tag.
"""
return self.pointer < len(self.subject)
def back(self):
"""
Undoes the last microparser. Use this for lookahead and backtracking.
"""
if not len(self.backout):
raise TemplateSyntaxError("back called without some previous "
"parsing")
self.pointer = self.backout.pop()
def tag(self):
"""
A microparser that just returns the next tag from the line.
"""
subject = self.subject
i = self.pointer
if i >= len(subject):
raise TemplateSyntaxError("expected another tag, found "
"end of string: %s" % subject)
p = i
while i < len(subject) and subject[i] not in (' ', '\t'):
i += 1
s = subject[p:i]
while i < len(subject) and subject[i] in (' ', '\t'):
i += 1
self.backout.append(self.pointer)
self.pointer = i
return s
def value(self):
"""
A microparser that parses for a value: some string constant or
variable name.
"""
subject = self.subject
i = self.pointer
def next_space_index(subject, i):
"""
Increment pointer until a real space (i.e. a space not within
quotes) is encountered
"""
while i < len(subject) and subject[i] not in (' ', '\t'):
if subject[i] in ('"', "'"):
c = subject[i]
i += 1
while i < len(subject) and subject[i] != c:
i += 1
if i >= len(subject):
raise TemplateSyntaxError("Searching for value. "
"Unexpected end of string in column %d: %s" %
(i, subject))
i += 1
return i
if i >= len(subject):
raise TemplateSyntaxError("Searching for value. Expected another "
"value but found end of string: %s" %
subject)
if subject[i] in ('"', "'"):
p = i
i += 1
while i < len(subject) and subject[i] != subject[p]:
i += 1
if i >= len(subject):
raise TemplateSyntaxError("Searching for value. Unexpected "
"end of string in column %d: %s" %
(i, subject))
i += 1
# Continue parsing until next "real" space,
# so that filters are also included
i = next_space_index(subject, i)
res = subject[p:i]
while i < len(subject) and subject[i] in (' ', '\t'):
i += 1
self.backout.append(self.pointer)
self.pointer = i
return res
else:
p = i
i = next_space_index(subject, i)
s = subject[p:i]
while i < len(subject) and subject[i] in (' ', '\t'):
i += 1
self.backout.append(self.pointer)
self.pointer = i
return s
# This only matches constant *strings* (things in quotes or marked for
# translation). Numbers are treated as variables for implementation reasons
# (so that they retain their type when passed to filters).
constant_string = r"""
(?:%(i18n_open)s%(strdq)s%(i18n_close)s|
%(i18n_open)s%(strsq)s%(i18n_close)s|
%(strdq)s|
%(strsq)s)
""" % {
'strdq': r'"[^"\\]*(?:\\.[^"\\]*)*"', # double-quoted string
'strsq': r"'[^'\\]*(?:\\.[^'\\]*)*'", # single-quoted string
'i18n_open': re.escape("_("),
'i18n_close': re.escape(")"),
}
constant_string = constant_string.replace("\n", "")
filter_raw_string = r"""
^(?P<constant>%(constant)s)|
^(?P<var>[%(var_chars)s]+|%(num)s)|
(?:\s*%(filter_sep)s\s*
(?P<filter_name>\w+)
(?:%(arg_sep)s
(?:
(?P<constant_arg>%(constant)s)|
(?P<var_arg>[%(var_chars)s]+|%(num)s)
)
)?
)""" % {
'constant': constant_string,
'num': r'[-+\.]?\d[\d\.e]*',
'var_chars': "\w\.",
'filter_sep': re.escape(FILTER_SEPARATOR),
'arg_sep': re.escape(FILTER_ARGUMENT_SEPARATOR),
}
filter_re = re.compile(filter_raw_string, re.UNICODE | re.VERBOSE)
class FilterExpression(object):
"""
Parses a variable token and its optional filters (all as a single string),
and return a list of tuples of the filter name and arguments.
Sample::
>>> token = 'variable|default:"Default value"|date:"Y-m-d"'
>>> p = Parser('')
>>> fe = FilterExpression(token, p)
>>> len(fe.filters)
2
>>> fe.var
<Variable: 'variable'>
"""
def __init__(self, token, parser):
self.token = token
matches = filter_re.finditer(token)
var_obj = None
filters = []
upto = 0
for match in matches:
start = match.start()
if upto != start:
raise TemplateSyntaxError("Could not parse some characters: "
"%s|%s|%s" %
(token[:upto], token[upto:start],
token[start:]))
if var_obj is None:
var, constant = match.group("var", "constant")
if constant:
try:
var_obj = Variable(constant).resolve({})
except VariableDoesNotExist:
var_obj = None
elif var is None:
raise TemplateSyntaxError("Could not find variable at "
"start of %s." % token)
else:
var_obj = Variable(var)
else:
filter_name = match.group("filter_name")
args = []
constant_arg, var_arg = match.group("constant_arg", "var_arg")
if constant_arg:
args.append((False, Variable(constant_arg).resolve({})))
elif var_arg:
args.append((True, Variable(var_arg)))
filter_func = parser.find_filter(filter_name)
self.args_check(filter_name, filter_func, args)
filters.append((filter_func, args))
upto = match.end()
if upto != len(token):
raise TemplateSyntaxError("Could not parse the remainder: '%s' "
"from '%s'" % (token[upto:], token))
self.filters = filters
self.var = var_obj
def resolve(self, context, ignore_failures=False):
if isinstance(self.var, Variable):
try:
obj = self.var.resolve(context)
except VariableDoesNotExist:
if ignore_failures:
obj = None
else:
if settings.TEMPLATE_STRING_IF_INVALID:
global invalid_var_format_string
if invalid_var_format_string is None:
invalid_var_format_string = '%s' in settings.TEMPLATE_STRING_IF_INVALID
if invalid_var_format_string:
return settings.TEMPLATE_STRING_IF_INVALID % self.var
return settings.TEMPLATE_STRING_IF_INVALID
else:
obj = settings.TEMPLATE_STRING_IF_INVALID
else:
obj = self.var
for func, args in self.filters:
arg_vals = []
for lookup, arg in args:
if not lookup:
arg_vals.append(mark_safe(arg))
else:
arg_vals.append(arg.resolve(context))
if getattr(func, 'expects_localtime', False):
obj = template_localtime(obj, context.use_tz)
if getattr(func, 'needs_autoescape', False):
new_obj = func(obj, autoescape=context.autoescape, *arg_vals)
else:
new_obj = func(obj, *arg_vals)
if getattr(func, 'is_safe', False) and isinstance(obj, SafeData):
obj = mark_safe(new_obj)
elif isinstance(obj, EscapeData):
obj = mark_for_escaping(new_obj)
else:
obj = new_obj
return obj
def args_check(name, func, provided):
provided = list(provided)
# First argument, filter input, is implied.
plen = len(provided) + 1
# Check to see if a decorator is providing the real function.
func = getattr(func, '_decorated_function', func)
args, varargs, varkw, defaults = getargspec(func)
alen = len(args)
dlen = len(defaults or [])
# Not enough OR Too many
if plen < (alen - dlen) or plen > alen:
raise TemplateSyntaxError("%s requires %d arguments, %d provided" %
(name, alen - dlen, plen))
return True
args_check = staticmethod(args_check)
def __str__(self):
return self.token
def resolve_variable(path, context):
"""
Returns the resolved variable, which may contain attribute syntax, within
the given context.
Deprecated; use the Variable class instead.
"""
warnings.warn("resolve_variable() is deprecated. Use django.template."
"Variable(path).resolve(context) instead",
RemovedInDjango20Warning, stacklevel=2)
return Variable(path).resolve(context)
class Variable(object):
"""
A template variable, resolvable against a given context. The variable may
be a hard-coded string (if it begins and ends with single or double quote
marks)::
>>> c = {'article': {'section':u'News'}}
>>> Variable('article.section').resolve(c)
u'News'
>>> Variable('article').resolve(c)
{'section': u'News'}
>>> class AClass: pass
>>> c = AClass()
>>> c.article = AClass()
>>> c.article.section = u'News'
(The example assumes VARIABLE_ATTRIBUTE_SEPARATOR is '.')
"""
def __init__(self, var):
self.var = var
self.literal = None
self.lookups = None
self.translate = False
self.message_context = None
if not isinstance(var, six.string_types):
raise TypeError(
"Variable must be a string or number, got %s" % type(var))
try:
# First try to treat this variable as a number.
#
# Note that this could cause an OverflowError here that we're not
# catching. Since this should only happen at compile time, that's
# probably OK.
self.literal = float(var)
# So it's a float... is it an int? If the original value contained a
# dot or an "e" then it was a float, not an int.
if '.' not in var and 'e' not in var.lower():
self.literal = int(self.literal)
# "2." is invalid
if var.endswith('.'):
raise ValueError
except ValueError:
# A ValueError means that the variable isn't a number.
if var.startswith('_(') and var.endswith(')'):
# The result of the lookup should be translated at rendering
# time.
self.translate = True
var = var[2:-1]
# If it's wrapped with quotes (single or double), then
# we're also dealing with a literal.
try:
self.literal = mark_safe(unescape_string_literal(var))
except ValueError:
# Otherwise we'll set self.lookups so that resolve() knows we're
# dealing with a bonafide variable
if var.find(VARIABLE_ATTRIBUTE_SEPARATOR + '_') > -1 or var[0] == '_':
raise TemplateSyntaxError("Variables and attributes may "
"not begin with underscores: '%s'" %
var)
self.lookups = tuple(var.split(VARIABLE_ATTRIBUTE_SEPARATOR))
def resolve(self, context):
"""Resolve this variable against a given context."""
if self.lookups is not None:
# We're dealing with a variable that needs to be resolved
value = self._resolve_lookup(context)
else:
# We're dealing with a literal, so it's already been "resolved"
value = self.literal
if self.translate:
if self.message_context:
return pgettext_lazy(self.message_context, value)
else:
return ugettext_lazy(value)
return value
def __repr__(self):
return "<%s: %r>" % (self.__class__.__name__, self.var)
def __str__(self):
return self.var
def _resolve_lookup(self, context):
"""
Performs resolution of a real variable (i.e. not a literal) against the
given context.
As indicated by the method's name, this method is an implementation
detail and shouldn't be called by external code. Use Variable.resolve()
instead.
"""
current = context
try: # catch-all for silent variable failures
for bit in self.lookups:
try: # dictionary lookup
current = current[bit]
except (TypeError, AttributeError, KeyError, ValueError):
try: # attribute lookup
# Don't return class attributes if the class is the context:
if isinstance(current, BaseContext) and getattr(type(current), bit):
raise AttributeError
current = getattr(current, bit)
except (TypeError, AttributeError) as e:
# Reraise an AttributeError raised by a @property
if (isinstance(e, AttributeError) and
not isinstance(current, BaseContext) and bit in dir(current)):
raise
try: # list-index lookup
current = current[int(bit)]
except (IndexError, # list index out of range
ValueError, # invalid literal for int()
KeyError, # current is a dict without `int(bit)` key
TypeError): # unsubscriptable object
raise VariableDoesNotExist("Failed lookup for key "
"[%s] in %r",
(bit, current)) # missing attribute
if callable(current):
if getattr(current, 'do_not_call_in_templates', False):
pass
elif getattr(current, 'alters_data', False):
current = settings.TEMPLATE_STRING_IF_INVALID
else:
try: # method call (assuming no args required)
current = current()
except TypeError:
try:
getcallargs(current)
except TypeError: # arguments *were* required
current = settings.TEMPLATE_STRING_IF_INVALID # invalid method call
else:
raise
except Exception as e:
if getattr(e, 'silent_variable_failure', False):
current = settings.TEMPLATE_STRING_IF_INVALID
else:
raise
return current
class Node(object):
# Set this to True for nodes that must be first in the template (although
# they can be preceded by text nodes.
must_be_first = False
child_nodelists = ('nodelist',)
def render(self, context):
"""
Return the node rendered as a string.
"""
pass
def __iter__(self):
yield self
def get_nodes_by_type(self, nodetype):
"""
Return a list of all nodes (within this node and its nodelist)
of the given type
"""
nodes = []
if isinstance(self, nodetype):
nodes.append(self)
for attr in self.child_nodelists:
nodelist = getattr(self, attr, None)
if nodelist:
nodes.extend(nodelist.get_nodes_by_type(nodetype))
return nodes
class NodeList(list):
# Set to True the first time a non-TextNode is inserted by
# extend_nodelist().
contains_nontext = False
def render(self, context):
bits = []
for node in self:
if isinstance(node, Node):
bit = self.render_node(node, context)
else:
bit = node
bits.append(force_text(bit))
return mark_safe(''.join(bits))
def get_nodes_by_type(self, nodetype):
"Return a list of all nodes of the given type"
nodes = []
for node in self:
nodes.extend(node.get_nodes_by_type(nodetype))
return nodes
def render_node(self, node, context):
return node.render(context)
class TextNode(Node):
def __init__(self, s):
self.s = s
def __repr__(self):
return force_str("<Text Node: '%s'>" % self.s[:25], 'ascii',
errors='replace')
def render(self, context):
return self.s
def render_value_in_context(value, context):
"""
Converts any value to a string to become part of a rendered template. This
means escaping, if required, and conversion to a unicode object. If value
is a string, it is expected to have already been translated.
"""
value = template_localtime(value, use_tz=context.use_tz)
value = localize(value, use_l10n=context.use_l10n)
value = force_text(value)
if ((context.autoescape and not isinstance(value, SafeData)) or
isinstance(value, EscapeData)):
return escape(value)
else:
return value
class VariableNode(Node):
def __init__(self, filter_expression):
self.filter_expression = filter_expression
def __repr__(self):
return "<Variable Node: %s>" % self.filter_expression
def render(self, context):
try:
output = self.filter_expression.resolve(context)
except UnicodeDecodeError:
# Unicode conversion can fail sometimes for reasons out of our
# control (e.g. exception rendering). In that case, we fail
# quietly.
return ''
return render_value_in_context(output, context)
# Regex for token keyword arguments
kwarg_re = re.compile(r"(?:(\w+)=)?(.+)")
def token_kwargs(bits, parser, support_legacy=False):
"""
A utility method for parsing token keyword arguments.
:param bits: A list containing remainder of the token (split by spaces)
that is to be checked for arguments. Valid arguments will be removed
from this list.
:param support_legacy: If set to true ``True``, the legacy format
``1 as foo`` will be accepted. Otherwise, only the standard ``foo=1``
format is allowed.
:returns: A dictionary of the arguments retrieved from the ``bits`` token
list.
There is no requirement for all remaining token ``bits`` to be keyword
arguments, so the dictionary will be returned as soon as an invalid
argument format is reached.
"""
if not bits:
return {}
match = kwarg_re.match(bits[0])
kwarg_format = match and match.group(1)
if not kwarg_format:
if not support_legacy:
return {}
if len(bits) < 3 or bits[1] != 'as':
return {}
kwargs = {}
while bits:
if kwarg_format:
match = kwarg_re.match(bits[0])
if not match or not match.group(1):
return kwargs
key, value = match.groups()
del bits[:1]
else:
if len(bits) < 3 or bits[1] != 'as':
return kwargs
key, value = bits[2], bits[0]
del bits[:3]
kwargs[key] = parser.compile_filter(value)
if bits and not kwarg_format:
if bits[0] != 'and':
return kwargs
del bits[:1]
return kwargs
def parse_bits(parser, bits, params, varargs, varkw, defaults,
takes_context, name):
"""
Parses bits for template tag helpers (simple_tag, include_tag and
assignment_tag), in particular by detecting syntax errors and by
extracting positional and keyword arguments.
"""
if takes_context:
if params[0] == 'context':
params = params[1:]
else:
raise TemplateSyntaxError(
"'%s' is decorated with takes_context=True so it must "
"have a first argument of 'context'" % name)
args = []
kwargs = {}
unhandled_params = list(params)
for bit in bits:
# First we try to extract a potential kwarg from the bit
kwarg = token_kwargs([bit], parser)
if kwarg:
# The kwarg was successfully extracted
param, value = list(six.iteritems(kwarg))[0]
if param not in params and varkw is None:
# An unexpected keyword argument was supplied
raise TemplateSyntaxError(
"'%s' received unexpected keyword argument '%s'" %
(name, param))
elif param in kwargs:
# The keyword argument has already been supplied once
raise TemplateSyntaxError(
"'%s' received multiple values for keyword argument '%s'" %
(name, param))
else:
# All good, record the keyword argument
kwargs[str(param)] = value
if param in unhandled_params:
# If using the keyword syntax for a positional arg, then
# consume it.
unhandled_params.remove(param)
else:
if kwargs:
raise TemplateSyntaxError(
"'%s' received some positional argument(s) after some "
"keyword argument(s)" % name)
else:
# Record the positional argument
args.append(parser.compile_filter(bit))
try:
# Consume from the list of expected positional arguments
unhandled_params.pop(0)
except IndexError:
if varargs is None:
raise TemplateSyntaxError(
"'%s' received too many positional arguments" %
name)
if defaults is not None:
# Consider the last n params handled, where n is the
# number of defaults.
unhandled_params = unhandled_params[:-len(defaults)]
if unhandled_params:
# Some positional arguments were not supplied
raise TemplateSyntaxError(
"'%s' did not receive value(s) for the argument(s): %s" %
(name, ", ".join("'%s'" % p for p in unhandled_params)))
return args, kwargs
def generic_tag_compiler(parser, token, params, varargs, varkw, defaults,
name, takes_context, node_class):
"""
Returns a template.Node subclass.
"""
bits = token.split_contents()[1:]
args, kwargs = parse_bits(parser, bits, params, varargs, varkw,
defaults, takes_context, name)
return node_class(takes_context, args, kwargs)
class TagHelperNode(Node):
"""
Base class for tag helper nodes such as SimpleNode, InclusionNode and
AssignmentNode. Manages the positional and keyword arguments to be passed
to the decorated function.
"""
def __init__(self, takes_context, args, kwargs):
self.takes_context = takes_context
self.args = args
self.kwargs = kwargs
def get_resolved_arguments(self, context):
resolved_args = [var.resolve(context) for var in self.args]
if self.takes_context:
resolved_args = [context] + resolved_args
resolved_kwargs = dict((k, v.resolve(context)) for k, v in self.kwargs.items())
return resolved_args, resolved_kwargs
class Library(object):
def __init__(self):
self.filters = {}
self.tags = {}
def tag(self, name=None, compile_function=None):
if name is None and compile_function is None:
# @register.tag()
return self.tag_function
elif name is not None and compile_function is None:
if callable(name):
# @register.tag
return self.tag_function(name)
else:
# @register.tag('somename') or @register.tag(name='somename')
def dec(func):
return self.tag(name, func)
return dec
elif name is not None and compile_function is not None:
# register.tag('somename', somefunc)
self.tags[name] = compile_function
return compile_function
else:
raise InvalidTemplateLibrary("Unsupported arguments to "
"Library.tag: (%r, %r)", (name, compile_function))
def tag_function(self, func):
self.tags[getattr(func, "_decorated_function", func).__name__] = func
return func
def filter(self, name=None, filter_func=None, **flags):
if name is None and filter_func is None:
# @register.filter()
def dec(func):
return self.filter_function(func, **flags)
return dec
elif name is not None and filter_func is None:
if callable(name):
# @register.filter
return self.filter_function(name, **flags)
else:
# @register.filter('somename') or @register.filter(name='somename')
def dec(func):
return self.filter(name, func, **flags)
return dec
elif name is not None and filter_func is not None:
# register.filter('somename', somefunc)
self.filters[name] = filter_func
for attr in ('expects_localtime', 'is_safe', 'needs_autoescape'):
if attr in flags:
value = flags[attr]
# set the flag on the filter for FilterExpression.resolve
setattr(filter_func, attr, value)
# set the flag on the innermost decorated function
# for decorators that need it e.g. stringfilter
if hasattr(filter_func, "_decorated_function"):
setattr(filter_func._decorated_function, attr, value)
filter_func._filter_name = name
return filter_func
else:
raise InvalidTemplateLibrary("Unsupported arguments to "
"Library.filter: (%r, %r)", (name, filter_func))
def filter_function(self, func, **flags):
name = getattr(func, "_decorated_function", func).__name__
return self.filter(name, func, **flags)
def simple_tag(self, func=None, takes_context=None, name=None):
def dec(func):
params, varargs, varkw, defaults = getargspec(func)
class SimpleNode(TagHelperNode):
def render(self, context):
resolved_args, resolved_kwargs = self.get_resolved_arguments(context)
return func(*resolved_args, **resolved_kwargs)
function_name = (name or
getattr(func, '_decorated_function', func).__name__)
compile_func = partial(generic_tag_compiler,
params=params, varargs=varargs, varkw=varkw,
defaults=defaults, name=function_name,
takes_context=takes_context, node_class=SimpleNode)
compile_func.__doc__ = func.__doc__
self.tag(function_name, compile_func)
return func
if func is None:
# @register.simple_tag(...)
return dec
elif callable(func):
# @register.simple_tag
return dec(func)
else:
raise TemplateSyntaxError("Invalid arguments provided to simple_tag")
def assignment_tag(self, func=None, takes_context=None, name=None):
def dec(func):
params, varargs, varkw, defaults = getargspec(func)
class AssignmentNode(TagHelperNode):
def __init__(self, takes_context, args, kwargs, target_var):
super(AssignmentNode, self).__init__(takes_context, args, kwargs)
self.target_var = target_var
def render(self, context):
resolved_args, resolved_kwargs = self.get_resolved_arguments(context)
context[self.target_var] = func(*resolved_args, **resolved_kwargs)
return ''
function_name = (name or
getattr(func, '_decorated_function', func).__name__)
def compile_func(parser, token):
bits = token.split_contents()[1:]
if len(bits) < 2 or bits[-2] != 'as':
raise TemplateSyntaxError(
"'%s' tag takes at least 2 arguments and the "
"second last argument must be 'as'" % function_name)
target_var = bits[-1]
bits = bits[:-2]
args, kwargs = parse_bits(parser, bits, params,
varargs, varkw, defaults, takes_context, function_name)
return AssignmentNode(takes_context, args, kwargs, target_var)
compile_func.__doc__ = func.__doc__
self.tag(function_name, compile_func)
return func
if func is None:
# @register.assignment_tag(...)
return dec
elif callable(func):
# @register.assignment_tag
return dec(func)
else:
raise TemplateSyntaxError("Invalid arguments provided to assignment_tag")
def inclusion_tag(self, file_name, context_class=Context, takes_context=False, name=None):
def dec(func):
params, varargs, varkw, defaults = getargspec(func)
class InclusionNode(TagHelperNode):
def render(self, context):
resolved_args, resolved_kwargs = self.get_resolved_arguments(context)
_dict = func(*resolved_args, **resolved_kwargs)
if not getattr(self, 'nodelist', False):
from django.template.loader import get_template, select_template
if isinstance(file_name, Template):
t = file_name
elif not isinstance(file_name, six.string_types) and is_iterable(file_name):
t = select_template(file_name)
else:
t = get_template(file_name)
self.nodelist = t.nodelist
new_context = context_class(_dict, **{
'autoescape': context.autoescape,
'current_app': context.current_app,
'use_l10n': context.use_l10n,
'use_tz': context.use_tz,
})
# Copy across the CSRF token, if present, because
# inclusion tags are often used for forms, and we need
# instructions for using CSRF protection to be as simple
# as possible.
csrf_token = context.get('csrf_token', None)
if csrf_token is not None:
new_context['csrf_token'] = csrf_token
return self.nodelist.render(new_context)
function_name = (name or
getattr(func, '_decorated_function', func).__name__)
compile_func = partial(generic_tag_compiler,
params=params, varargs=varargs, varkw=varkw,
defaults=defaults, name=function_name,
takes_context=takes_context, node_class=InclusionNode)
compile_func.__doc__ = func.__doc__
self.tag(function_name, compile_func)
return func
return dec
def is_library_missing(name):
"""Check if library that failed to load cannot be found under any
templatetags directory or does exist but fails to import.
Non-existing condition is checked recursively for each subpackage in cases
like <appdir>/templatetags/subpackage/package/module.py.
"""
# Don't bother to check if '.' is in name since any name will be prefixed
# with some template root.
path, module = name.rsplit('.', 1)
try:
package = import_module(path)
return not module_has_submodule(package, module)
except ImportError:
return is_library_missing(path)
def import_library(taglib_module):
"""
Load a template tag library module.
Verifies that the library contains a 'register' attribute, and
returns that attribute as the representation of the library
"""
try:
mod = import_module(taglib_module)
except ImportError as e:
# If the ImportError is because the taglib submodule does not exist,
# that's not an error that should be raised. If the submodule exists
# and raised an ImportError on the attempt to load it, that we want
# to raise.
if is_library_missing(taglib_module):
return None
else:
raise InvalidTemplateLibrary("ImportError raised loading %s: %s" %
(taglib_module, e))
try:
return mod.register
except AttributeError:
raise InvalidTemplateLibrary("Template library %s does not have "
"a variable named 'register'" %
taglib_module)
templatetags_modules = []
def get_templatetags_modules():
"""
Return the list of all available template tag modules.
Caches the result for faster access.
"""
global templatetags_modules
if not templatetags_modules:
_templatetags_modules = []
# Populate list once per process. Mutate the local list first, and
# then assign it to the global name to ensure there are no cases where
# two threads try to populate it simultaneously.
templatetags_modules_candidates = ['django.templatetags']
templatetags_modules_candidates += ['%s.templatetags' % app_config.name
for app_config in apps.get_app_configs()]
for templatetag_module in templatetags_modules_candidates:
try:
import_module(templatetag_module)
_templatetags_modules.append(templatetag_module)
except ImportError:
continue
templatetags_modules = _templatetags_modules
return templatetags_modules
def get_library(library_name):
"""
Load the template library module with the given name.
If library is not already loaded loop over all templatetags modules
to locate it.
{% load somelib %} and {% load someotherlib %} loops twice.
Subsequent loads eg. {% load somelib %} in the same process will grab
the cached module from libraries.
"""
lib = libraries.get(library_name, None)
if not lib:
templatetags_modules = get_templatetags_modules()
tried_modules = []
for module in templatetags_modules:
taglib_module = '%s.%s' % (module, library_name)
tried_modules.append(taglib_module)
lib = import_library(taglib_module)
if lib:
libraries[library_name] = lib
break
if not lib:
raise InvalidTemplateLibrary("Template library %s not found, "
"tried %s" %
(library_name,
','.join(tried_modules)))
return lib
def add_to_builtins(module):
builtins.append(import_library(module))
add_to_builtins('django.template.defaulttags')
add_to_builtins('django.template.defaultfilters')
add_to_builtins('django.template.loader_tags')
| olasitarska/django | django/template/base.py | Python | bsd-3-clause | 50,676 |
"""Test interact and interactive."""
#-----------------------------------------------------------------------------
# Copyright (C) 2014 The IPython Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from __future__ import print_function
from collections import OrderedDict
import nose.tools as nt
import IPython.testing.tools as tt
# from IPython.core.getipython import get_ipython
from IPython.html import widgets
from IPython.html.widgets import interact, interactive, Widget, interaction
from IPython.utils.py3compat import annotate
#-----------------------------------------------------------------------------
# Utility stuff
#-----------------------------------------------------------------------------
class DummyComm(object):
comm_id = 'a-b-c-d'
def send(self, *args, **kwargs):
pass
def close(self, *args, **kwargs):
pass
_widget_attrs = {}
displayed = []
def setup():
_widget_attrs['comm'] = Widget.comm
Widget.comm = DummyComm()
_widget_attrs['_ipython_display_'] = Widget._ipython_display_
def raise_not_implemented(*args, **kwargs):
raise NotImplementedError()
Widget._ipython_display_ = raise_not_implemented
def teardown():
for attr, value in _widget_attrs.items():
setattr(Widget, attr, value)
def f(**kwargs):
pass
def clear_display():
global displayed
displayed = []
def record_display(*args):
displayed.extend(args)
#-----------------------------------------------------------------------------
# Actual tests
#-----------------------------------------------------------------------------
def check_widget(w, **d):
"""Check a single widget against a dict"""
for attr, expected in d.items():
if attr == 'cls':
nt.assert_is(w.__class__, expected)
else:
value = getattr(w, attr)
nt.assert_equal(value, expected,
"%s.%s = %r != %r" % (w.__class__.__name__, attr, value, expected)
)
def check_widgets(container, **to_check):
"""Check that widgets are created as expected"""
# build a widget dictionary, so it matches
widgets = {}
for w in container.children:
widgets[w.description] = w
for key, d in to_check.items():
nt.assert_in(key, widgets)
check_widget(widgets[key], **d)
def test_single_value_string():
a = u'hello'
c = interactive(f, a=a)
w = c.children[0]
check_widget(w,
cls=widgets.TextWidget,
description='a',
value=a,
)
def test_single_value_bool():
for a in (True, False):
c = interactive(f, a=a)
w = c.children[0]
check_widget(w,
cls=widgets.CheckboxWidget,
description='a',
value=a,
)
def test_single_value_dict():
for d in [
dict(a=5),
dict(a=5, b='b', c=dict),
]:
c = interactive(f, d=d)
w = c.children[0]
check_widget(w,
cls=widgets.DropdownWidget,
description='d',
values=d,
value=next(iter(d.values())),
)
def test_single_value_float():
for a in (2.25, 1.0, -3.5):
c = interactive(f, a=a)
w = c.children[0]
check_widget(w,
cls=widgets.FloatSliderWidget,
description='a',
value=a,
min= -a if a > 0 else 3*a,
max= 3*a if a > 0 else -a,
step=0.1,
readout=True,
)
def test_single_value_int():
for a in (1, 5, -3):
c = interactive(f, a=a)
nt.assert_equal(len(c.children), 1)
w = c.children[0]
check_widget(w,
cls=widgets.IntSliderWidget,
description='a',
value=a,
min= -a if a > 0 else 3*a,
max= 3*a if a > 0 else -a,
step=1,
readout=True,
)
def test_list_tuple_2_int():
with nt.assert_raises(ValueError):
c = interactive(f, tup=(1,1))
with nt.assert_raises(ValueError):
c = interactive(f, tup=(1,-1))
for min, max in [ (0,1), (1,10), (1,2), (-5,5), (-20,-19) ]:
c = interactive(f, tup=(min, max), lis=[min, max])
nt.assert_equal(len(c.children), 2)
d = dict(
cls=widgets.IntSliderWidget,
min=min,
max=max,
step=1,
readout=True,
)
check_widgets(c, tup=d, lis=d)
def test_list_tuple_3_int():
with nt.assert_raises(ValueError):
c = interactive(f, tup=(1,2,0))
with nt.assert_raises(ValueError):
c = interactive(f, tup=(1,2,-1))
for min, max, step in [ (0,2,1), (1,10,2), (1,100,2), (-5,5,4), (-100,-20,4) ]:
c = interactive(f, tup=(min, max, step), lis=[min, max, step])
nt.assert_equal(len(c.children), 2)
d = dict(
cls=widgets.IntSliderWidget,
min=min,
max=max,
step=step,
readout=True,
)
check_widgets(c, tup=d, lis=d)
def test_list_tuple_2_float():
with nt.assert_raises(ValueError):
c = interactive(f, tup=(1.0,1.0))
with nt.assert_raises(ValueError):
c = interactive(f, tup=(0.5,-0.5))
for min, max in [ (0.5, 1.5), (1.1,10.2), (1,2.2), (-5.,5), (-20,-19.) ]:
c = interactive(f, tup=(min, max), lis=[min, max])
nt.assert_equal(len(c.children), 2)
d = dict(
cls=widgets.FloatSliderWidget,
min=min,
max=max,
step=.1,
readout=True,
)
check_widgets(c, tup=d, lis=d)
def test_list_tuple_3_float():
with nt.assert_raises(ValueError):
c = interactive(f, tup=(1,2,0.0))
with nt.assert_raises(ValueError):
c = interactive(f, tup=(-1,-2,1.))
with nt.assert_raises(ValueError):
c = interactive(f, tup=(1,2.,-1.))
for min, max, step in [ (0.,2,1), (1,10.,2), (1,100,2.), (-5.,5.,4), (-100,-20.,4.) ]:
c = interactive(f, tup=(min, max, step), lis=[min, max, step])
nt.assert_equal(len(c.children), 2)
d = dict(
cls=widgets.FloatSliderWidget,
min=min,
max=max,
step=step,
readout=True,
)
check_widgets(c, tup=d, lis=d)
def test_list_tuple_str():
values = ['hello', 'there', 'guy']
first = values[0]
dvalues = OrderedDict((v,v) for v in values)
c = interactive(f, tup=tuple(values), lis=list(values))
nt.assert_equal(len(c.children), 2)
d = dict(
cls=widgets.DropdownWidget,
value=first,
values=dvalues
)
check_widgets(c, tup=d, lis=d)
def test_list_tuple_invalid():
for bad in [
(),
(5, 'hi'),
('hi', 5),
({},),
(None,),
]:
with nt.assert_raises(ValueError):
print(bad) # because there is no custom message in assert_raises
c = interactive(f, tup=bad)
def test_defaults():
@annotate(n=10)
def f(n, f=4.5, g=1):
pass
c = interactive(f)
check_widgets(c,
n=dict(
cls=widgets.IntSliderWidget,
value=10,
),
f=dict(
cls=widgets.FloatSliderWidget,
value=4.5,
),
g=dict(
cls=widgets.IntSliderWidget,
value=1,
),
)
def test_default_values():
@annotate(n=10, f=(0, 10.), g=5, h={'a': 1, 'b': 2}, j=['hi', 'there'])
def f(n, f=4.5, g=1, h=2, j='there'):
pass
c = interactive(f)
check_widgets(c,
n=dict(
cls=widgets.IntSliderWidget,
value=10,
),
f=dict(
cls=widgets.FloatSliderWidget,
value=4.5,
),
g=dict(
cls=widgets.IntSliderWidget,
value=5,
),
h=dict(
cls=widgets.DropdownWidget,
values={'a': 1, 'b': 2},
value=2
),
j=dict(
cls=widgets.DropdownWidget,
values={'hi':'hi', 'there':'there'},
value='there'
),
)
def test_default_out_of_bounds():
@annotate(f=(0, 10.), h={'a': 1}, j=['hi', 'there'])
def f(f='hi', h=5, j='other'):
pass
c = interactive(f)
check_widgets(c,
f=dict(
cls=widgets.FloatSliderWidget,
value=5.,
),
h=dict(
cls=widgets.DropdownWidget,
values={'a': 1},
value=1,
),
j=dict(
cls=widgets.DropdownWidget,
values={'hi':'hi', 'there':'there'},
value='hi',
),
)
def test_annotations():
@annotate(n=10, f=widgets.FloatTextWidget())
def f(n, f):
pass
c = interactive(f)
check_widgets(c,
n=dict(
cls=widgets.IntSliderWidget,
value=10,
),
f=dict(
cls=widgets.FloatTextWidget,
),
)
def test_priority():
@annotate(annotate='annotate', kwarg='annotate')
def f(kwarg='default', annotate='default', default='default'):
pass
c = interactive(f, kwarg='kwarg')
check_widgets(c,
kwarg=dict(
cls=widgets.TextWidget,
value='kwarg',
),
annotate=dict(
cls=widgets.TextWidget,
value='annotate',
),
)
@nt.with_setup(clear_display)
def test_decorator_kwarg():
with tt.monkeypatch(interaction, 'display', record_display):
@interact(a=5)
def foo(a):
pass
nt.assert_equal(len(displayed), 1)
w = displayed[0].children[0]
check_widget(w,
cls=widgets.IntSliderWidget,
value=5,
)
@nt.with_setup(clear_display)
def test_decorator_no_call():
with tt.monkeypatch(interaction, 'display', record_display):
@interact
def foo(a='default'):
pass
nt.assert_equal(len(displayed), 1)
w = displayed[0].children[0]
check_widget(w,
cls=widgets.TextWidget,
value='default',
)
@nt.with_setup(clear_display)
def test_call_interact():
def foo(a='default'):
pass
with tt.monkeypatch(interaction, 'display', record_display):
ifoo = interact(foo)
nt.assert_equal(len(displayed), 1)
w = displayed[0].children[0]
check_widget(w,
cls=widgets.TextWidget,
value='default',
)
@nt.with_setup(clear_display)
def test_call_interact_kwargs():
def foo(a='default'):
pass
with tt.monkeypatch(interaction, 'display', record_display):
ifoo = interact(foo, a=10)
nt.assert_equal(len(displayed), 1)
w = displayed[0].children[0]
check_widget(w,
cls=widgets.IntSliderWidget,
value=10,
)
@nt.with_setup(clear_display)
def test_call_decorated_on_trait_change():
"""test calling @interact decorated functions"""
d = {}
with tt.monkeypatch(interaction, 'display', record_display):
@interact
def foo(a='default'):
d['a'] = a
return a
nt.assert_equal(len(displayed), 1)
w = displayed[0].children[0]
check_widget(w,
cls=widgets.TextWidget,
value='default',
)
# test calling the function directly
a = foo('hello')
nt.assert_equal(a, 'hello')
nt.assert_equal(d['a'], 'hello')
# test that setting trait values calls the function
w.value = 'called'
nt.assert_equal(d['a'], 'called')
@nt.with_setup(clear_display)
def test_call_decorated_kwargs_on_trait_change():
"""test calling @interact(foo=bar) decorated functions"""
d = {}
with tt.monkeypatch(interaction, 'display', record_display):
@interact(a='kwarg')
def foo(a='default'):
d['a'] = a
return a
nt.assert_equal(len(displayed), 1)
w = displayed[0].children[0]
check_widget(w,
cls=widgets.TextWidget,
value='kwarg',
)
# test calling the function directly
a = foo('hello')
nt.assert_equal(a, 'hello')
nt.assert_equal(d['a'], 'hello')
# test that setting trait values calls the function
w.value = 'called'
nt.assert_equal(d['a'], 'called')
def test_fixed():
c = interactive(f, a=widgets.fixed(5), b='text')
nt.assert_equal(len(c.children), 1)
w = c.children[0]
check_widget(w,
cls=widgets.TextWidget,
value='text',
description='b',
)
def test_default_description():
c = interactive(f, b='text')
w = c.children[0]
check_widget(w,
cls=widgets.TextWidget,
value='text',
description='b',
)
def test_custom_description():
c = interactive(f, b=widgets.TextWidget(value='text', description='foo'))
w = c.children[0]
check_widget(w,
cls=widgets.TextWidget,
value='text',
description='foo',
)
| WillisXChen/django-oscar | oscar/lib/python2.7/site-packages/IPython/html/widgets/tests/test_interaction.py | Python | bsd-3-clause | 13,235 |
import sys
import pytest
from netaddr import INET_PTON, AddrFormatError
from netaddr.strategy import ipv4
def test_strategy_ipv4():
b = '11000000.00000000.00000010.00000001'
i = 3221225985
t = (192, 0, 2, 1)
s = '192.0.2.1'
bin_val = '0b11000000000000000000001000000001'
assert ipv4.bits_to_int(b) == i
assert ipv4.int_to_bits(i) == b
assert ipv4.int_to_str(i) == s
assert ipv4.int_to_words(i) == t
assert ipv4.int_to_bin(i) == bin_val
assert ipv4.int_to_bin(i) == bin_val
assert ipv4.bin_to_int(bin_val) == i
assert ipv4.words_to_int(t) == i
assert ipv4.words_to_int(list(t)) == i
assert ipv4.valid_bin(bin_val)
@pytest.mark.skipif(sys.version_info > (3,), reason="requires python 2.x")
def test_strategy_ipv4_py2():
i = 3221225985
p = '\xc0\x00\x02\x01'
assert ipv4.int_to_packed(i) == p
assert ipv4.packed_to_int(p) == i
@pytest.mark.skipif(sys.version_info < (3,), reason="requires python 3.x")
def test_strategy_ipv4_py3():
i = 3221225985
p = b'\xc0\x00\x02\x01'
assert ipv4.int_to_packed(i) == p
assert ipv4.packed_to_int(p) == i
def test_strategy_inet_aton_behaviour():
# inet_aton() is a very old system call and is very permissive with
# regard to what is assume is a valid IPv4 address. Unfortunately, it
# is also the most widely used by system software used in software today,
# so netaddr supports this behaviour by default.
assert ipv4.str_to_int('127') == 127
assert ipv4.str_to_int('0x7f') == 127
assert ipv4.str_to_int('0177') == 127
assert ipv4.str_to_int('127.1') == 2130706433
assert ipv4.str_to_int('0x7f.1') == 2130706433
assert ipv4.str_to_int('0177.1') == 2130706433
assert ipv4.str_to_int('127.0.0.1') == 2130706433
def test_strategy_inet_pton_behaviour():
# inet_pton() is a newer system call that supports both IPv4 and IPv6.
# It is a lot more strict about what it deems to be a valid IPv4 address
# and doesn't support many of the features found in inet_aton() such as
# support for non- decimal octets, partial numbers of octets, etc.
with pytest.raises(AddrFormatError):
ipv4.str_to_int('127', flags=INET_PTON)
with pytest.raises(AddrFormatError):
ipv4.str_to_int('0x7f', flags=INET_PTON)
with pytest.raises(AddrFormatError):
ipv4.str_to_int('0177', flags=INET_PTON)
with pytest.raises(AddrFormatError):
ipv4.str_to_int('127.1', flags=INET_PTON)
with pytest.raises(AddrFormatError):
ipv4.str_to_int('0x7f.1', flags=INET_PTON)
with pytest.raises(AddrFormatError):
ipv4.str_to_int('0177.1', flags=INET_PTON)
assert ipv4.str_to_int('127.0.0.1', flags=INET_PTON) == 2130706433
| jonathanwcrane/netaddr | test/strategy/test_ipv4_strategy.py | Python | bsd-3-clause | 2,753 |
# -*- coding: utf-8 -*-
#
# jQuery File Upload Plugin GAE Python Example 2.1.1
# https://github.com/blueimp/jQuery-File-Upload
#
# Copyright 2011, Sebastian Tschan
# https://blueimp.net
#
# Licensed under the MIT license:
# http://www.opensource.org/licenses/MIT
#
from __future__ import with_statement
from google.appengine.api import files, images
from google.appengine.ext import blobstore, deferred
from google.appengine.ext.webapp import blobstore_handlers
import json
import re
import urllib
import webapp2
WEBSITE = 'http://blueimp.github.io/jQuery-File-Upload/'
MIN_FILE_SIZE = 1 # bytes
MAX_FILE_SIZE = 5000000 # bytes
IMAGE_TYPES = re.compile('image/(gif|p?jpeg|(x-)?png)')
ACCEPT_FILE_TYPES = IMAGE_TYPES
THUMBNAIL_MODIFICATOR = '=s80' # max width / height
EXPIRATION_TIME = 300 # seconds
def cleanup(blob_keys):
blobstore.delete(blob_keys)
class UploadHandler(webapp2.RequestHandler):
def initialize(self, request, response):
super(UploadHandler, self).initialize(request, response)
self.response.headers['Access-Control-Allow-Origin'] = '*'
self.response.headers[
'Access-Control-Allow-Methods'
] = 'OPTIONS, HEAD, GET, POST, PUT, DELETE'
self.response.headers[
'Access-Control-Allow-Headers'
] = 'Content-Type, Content-Range, Content-Disposition'
def validate(self, file):
if file['size'] < MIN_FILE_SIZE:
file['error'] = 'File is too small'
elif file['size'] > MAX_FILE_SIZE:
file['error'] = 'File is too big'
elif not ACCEPT_FILE_TYPES.match(file['type']):
file['error'] = 'Filetype not allowed'
else:
return True
return False
def get_file_size(self, file):
file.seek(0, 2) # Seek to the end of the file
size = file.tell() # Get the position of EOF
file.seek(0) # Reset the file position to the beginning
return size
def write_blob(self, data, info):
blob = files.blobstore.create(
mime_type=info['type'],
_blobinfo_uploaded_filename=info['name']
)
with files.open(blob, 'a') as f:
f.write(data)
files.finalize(blob)
return files.blobstore.get_blob_key(blob)
def handle_upload(self):
results = []
blob_keys = []
for name, fieldStorage in self.request.POST.items():
if type(fieldStorage) is unicode:
continue
result = {}
result['name'] = re.sub(
r'^.*\\',
'',
fieldStorage.filename
)
result['type'] = fieldStorage.type
result['size'] = self.get_file_size(fieldStorage.file)
if self.validate(result):
blob_key = str(
self.write_blob(fieldStorage.value, result)
)
blob_keys.append(blob_key)
result['deleteType'] = 'DELETE'
result['deleteUrl'] = self.request.host_url +\
'/?key=' + urllib.quote(blob_key, '')
if (IMAGE_TYPES.match(result['type'])):
try:
result['url'] = images.get_serving_url(
blob_key,
secure_url=self.request.host_url.startswith(
'https'
)
)
result['thumbnailUrl'] = result['url'] +\
THUMBNAIL_MODIFICATOR
except: # Could not get an image serving url
pass
if not 'url' in result:
result['url'] = self.request.host_url +\
'/' + blob_key + '/' + urllib.quote(
result['name'].encode('utf-8'), '')
results.append(result)
deferred.defer(
cleanup,
blob_keys,
_countdown=EXPIRATION_TIME
)
return results
def options(self):
pass
def head(self):
pass
def get(self):
self.redirect(WEBSITE)
def post(self):
if (self.request.get('_method') == 'DELETE'):
return self.delete()
result = {'files': self.handle_upload()}
s = json.dumps(result, separators=(',', ':'))
redirect = self.request.get('redirect')
if redirect:
return self.redirect(str(
redirect.replace('%s', urllib.quote(s, ''), 1)
))
if 'application/json' in self.request.headers.get('Accept'):
self.response.headers['Content-Type'] = 'application/json'
self.response.write(s)
def delete(self):
key = self.request.get('key') or ''
blobstore.delete(key)
s = json.dumps({key: True}, separators=(',', ':'))
if 'application/json' in self.request.headers.get('Accept'):
self.response.headers['Content-Type'] = 'application/json'
self.response.write(s)
class DownloadHandler(blobstore_handlers.BlobstoreDownloadHandler):
def get(self, key, filename):
if not blobstore.get(key):
self.error(404)
else:
# Prevent browsers from MIME-sniffing the content-type:
self.response.headers['X-Content-Type-Options'] = 'nosniff'
# Cache for the expiration time:
self.response.headers['Cache-Control'] = 'public,max-age=%d' % EXPIRATION_TIME
# Send the file forcing a download dialog:
self.send_blob(key, save_as=filename, content_type='application/octet-stream')
app = webapp2.WSGIApplication(
[
('/', UploadHandler),
('/([^/]+)/([^/]+)', DownloadHandler)
],
debug=True
)
| guillaume-boutin/learning-builderengine | themes/user_dashboard/assets/plugins/jquery-file-upload/server/gae-python/main.py | Python | mit | 6,014 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2012 Jonathan Esterhazy <jonathan.esterhazy at gmail.com>
# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html
#
# HDP inference code is adapted from the onlinehdp.py script by
# Chong Wang (chongw at cs.princeton.edu).
# http://www.cs.princeton.edu/~chongw/software/onlinehdp.tar.gz
#
# Some show/print topics code is adapted from Dr. Hoffman's online lda sample code,
# (C) 2010 Matthew D. Hoffman, GNU GPL 3.0
# http://www.cs.princeton.edu/~mdhoffma/code/onlineldavb.tar
"""
This module encapsulates functionality for the online Hierarchical Dirichlet Process algorithm.
It allows both model estimation from a training corpus and inference of topic
distribution on new, unseen documents.
The core estimation code is directly adapted from the `onlinelhdp.py` script
by C. Wang see
**Wang, Paisley, Blei: Online Variational Inference for the Hierarchical Dirichlet
Process, JMLR (2011).**
http://jmlr.csail.mit.edu/proceedings/papers/v15/wang11a/wang11a.pdf
The algorithm:
* is **streamed**: training documents come in sequentially, no random access,
* runs in **constant memory** w.r.t. the number of documents: size of the
training corpus does not affect memory footprint
"""
from __future__ import with_statement
import logging, time
import numpy as np
import scipy.special as sp
from gensim import interfaces, utils, matutils
from six.moves import xrange
logger = logging.getLogger(__name__)
meanchangethresh = 0.00001
rhot_bound = 0.0
def log_normalize(v):
log_max = 100.0
if len(v.shape) == 1:
max_val = np.max(v)
log_shift = log_max - np.log(len(v) + 1.0) - max_val
tot = np.sum(np.exp(v + log_shift))
log_norm = np.log(tot) - log_shift
v = v - log_norm
else:
max_val = np.max(v, 1)
log_shift = log_max - np.log(v.shape[1] + 1.0) - max_val
tot = np.sum(np.exp(v + log_shift[:, np.newaxis]), 1)
log_norm = np.log(tot) - log_shift
v = v - log_norm[:, np.newaxis]
return (v, log_norm)
def dirichlet_expectation(alpha):
"""
For a vector theta ~ Dir(alpha), compute E[log(theta)] given alpha.
"""
if (len(alpha.shape) == 1):
return(sp.psi(alpha) - sp.psi(np.sum(alpha)))
return(sp.psi(alpha) - sp.psi(np.sum(alpha, 1))[:, np.newaxis])
def expect_log_sticks(sticks):
"""
For stick-breaking hdp, return the E[log(sticks)]
"""
dig_sum = sp.psi(np.sum(sticks, 0))
ElogW = sp.psi(sticks[0]) - dig_sum
Elog1_W = sp.psi(sticks[1]) - dig_sum
n = len(sticks[0]) + 1
Elogsticks = np.zeros(n)
Elogsticks[0: n - 1] = ElogW
Elogsticks[1:] = Elogsticks[1:] + np.cumsum(Elog1_W)
return Elogsticks
def lda_e_step(doc_word_ids, doc_word_counts, alpha, beta, max_iter=100):
gamma = np.ones(len(alpha))
expElogtheta = np.exp(dirichlet_expectation(gamma))
betad = beta[:, doc_word_ids]
phinorm = np.dot(expElogtheta, betad) + 1e-100
counts = np.array(doc_word_counts)
for _ in xrange(max_iter):
lastgamma = gamma
gamma = alpha + expElogtheta * np.dot(counts / phinorm, betad.T)
Elogtheta = dirichlet_expectation(gamma)
expElogtheta = np.exp(Elogtheta)
phinorm = np.dot(expElogtheta, betad) + 1e-100
meanchange = np.mean(abs(gamma - lastgamma))
if (meanchange < meanchangethresh):
break
likelihood = np.sum(counts * np.log(phinorm))
likelihood += np.sum((alpha - gamma) * Elogtheta)
likelihood += np.sum(sp.gammaln(gamma) - sp.gammaln(alpha))
likelihood += sp.gammaln(np.sum(alpha)) - sp.gammaln(np.sum(gamma))
return (likelihood, gamma)
class SuffStats(object):
def __init__(self, T, Wt, Dt):
self.m_chunksize = Dt
self.m_var_sticks_ss = np.zeros(T)
self.m_var_beta_ss = np.zeros((T, Wt))
def set_zero(self):
self.m_var_sticks_ss.fill(0.0)
self.m_var_beta_ss.fill(0.0)
class HdpModel(interfaces.TransformationABC):
"""
The constructor estimates Hierachical Dirichlet Process model parameters based
on a training corpus:
>>> hdp = HdpModel(corpus, id2word)
>>> hdp.print_topics(topics=20, topn=10)
Inference on new documents is based on the approximately LDA-equivalent topics.
Model persistency is achieved through its `load`/`save` methods.
"""
def __init__(self, corpus, id2word, max_chunks=None, max_time=None,
chunksize=256, kappa=1.0, tau=64.0, K=15, T=150, alpha=1,
gamma=1, eta=0.01, scale=1.0, var_converge=0.0001,
outputdir=None):
"""
`gamma`: first level concentration
`alpha`: second level concentration
`eta`: the topic Dirichlet
`T`: top level truncation level
`K`: second level truncation level
`kappa`: learning rate
`tau`: slow down parameter
`max_time`: stop training after this many seconds
`max_chunks`: stop after having processed this many chunks (wrap around
corpus beginning in another corpus pass, if there are not enough chunks
in the corpus)
"""
self.corpus = corpus
self.id2word = id2word
self.chunksize = chunksize
self.max_chunks = max_chunks
self.max_time = max_time
self.outputdir = outputdir
self.lda_alpha = None
self.lda_beta = None
self.m_W = len(id2word)
self.m_D = 0
if corpus:
self.m_D = len(corpus)
self.m_T = T
self.m_K = K
self.m_alpha = alpha
self.m_gamma = gamma
self.m_var_sticks = np.zeros((2, T - 1))
self.m_var_sticks[0] = 1.0
self.m_var_sticks[1] = range(T - 1, 0, -1)
self.m_varphi_ss = np.zeros(T)
self.m_lambda = np.random.gamma(1.0, 1.0, (T, self.m_W)) * self.m_D * 100 / (T * self.m_W) - eta
self.m_eta = eta
self.m_Elogbeta = dirichlet_expectation(self.m_eta + self.m_lambda)
self.m_tau = tau + 1
self.m_kappa = kappa
self.m_scale = scale
self.m_updatect = 0
self.m_status_up_to_date = True
self.m_num_docs_processed = 0
self.m_timestamp = np.zeros(self.m_W, dtype=int)
self.m_r = [0]
self.m_lambda_sum = np.sum(self.m_lambda, axis=1)
self.m_var_converge = var_converge
if self.outputdir:
self.save_options()
# if a training corpus was provided, start estimating the model right away
if corpus is not None:
self.update(corpus)
def inference(self, chunk):
if self.lda_alpha is None or self.lda_beta is None:
raise RuntimeError("model must be trained to perform inference")
chunk = list(chunk)
if len(chunk) > 1:
logger.debug("performing inference on a chunk of %i documents" % len(chunk))
gamma = np.zeros((len(chunk), self.lda_beta.shape[0]))
for d, doc in enumerate(chunk):
if not doc: # leave gamma at zero for empty documents
continue
ids, counts = zip(*doc)
_, gammad = lda_e_step(ids, counts, self.lda_alpha, self.lda_beta)
gamma[d, :] = gammad
return gamma
def __getitem__(self, bow, eps=0.01):
is_corpus, corpus = utils.is_corpus(bow)
if is_corpus:
return self._apply(corpus)
gamma = self.inference([bow])[0]
topic_dist = gamma / sum(gamma) if sum(gamma) != 0 else []
return [(topicid, topicvalue) for topicid, topicvalue in enumerate(topic_dist)
if topicvalue >= eps]
def update(self, corpus):
save_freq = max(1, int(10000 / self.chunksize)) # save every 10k docs, roughly
chunks_processed = 0
start_time = time.clock()
while True:
for chunk in utils.grouper(corpus, self.chunksize):
self.update_chunk(chunk)
self.m_num_docs_processed += len(chunk)
chunks_processed += 1
if self.update_finished(start_time, chunks_processed, self.m_num_docs_processed):
self.update_expectations()
alpha, beta = self.hdp_to_lda()
self.lda_alpha = alpha
self.lda_beta = beta
self.print_topics(20)
if self.outputdir:
self.save_topics()
return
elif chunks_processed % save_freq == 0:
self.update_expectations()
# self.save_topics(self.m_num_docs_processed)
self.print_topics(20)
logger.info('PROGRESS: finished document %i of %i' %
(self.m_num_docs_processed, self.m_D))
def update_finished(self, start_time, chunks_processed, docs_processed):
return (
# chunk limit reached
(self.max_chunks and chunks_processed == self.max_chunks) or
# time limit reached
(self.max_time and time.clock() - start_time > self.max_time) or
# no limits and whole corpus has been processed once
(not self.max_chunks and not self.max_time and docs_processed >= self.m_D))
def update_chunk(self, chunk, update=True, opt_o=True):
# Find the unique words in this chunk...
unique_words = dict()
word_list = []
for doc in chunk:
for word_id, _ in doc:
if word_id not in unique_words:
unique_words[word_id] = len(unique_words)
word_list.append(word_id)
Wt = len(word_list) # length of words in these documents
# ...and do the lazy updates on the necessary columns of lambda
rw = np.array([self.m_r[t] for t in self.m_timestamp[word_list]])
self.m_lambda[:, word_list] *= np.exp(self.m_r[-1] - rw)
self.m_Elogbeta[:, word_list] = \
sp.psi(self.m_eta + self.m_lambda[:, word_list]) - \
sp.psi(self.m_W * self.m_eta + self.m_lambda_sum[:, np.newaxis])
ss = SuffStats(self.m_T, Wt, len(chunk))
Elogsticks_1st = expect_log_sticks(self.m_var_sticks) # global sticks
# run variational inference on some new docs
score = 0.0
count = 0
for doc in chunk:
if len(doc) > 0:
doc_word_ids, doc_word_counts = zip(*doc)
doc_score = self.doc_e_step(doc, ss, Elogsticks_1st,
word_list, unique_words, doc_word_ids,
doc_word_counts, self.m_var_converge)
count += sum(doc_word_counts)
score += doc_score
if update:
self.update_lambda(ss, word_list, opt_o)
return (score, count)
def doc_e_step(self, doc, ss, Elogsticks_1st, word_list,
unique_words, doc_word_ids, doc_word_counts, var_converge) :
"""
e step for a single doc
"""
chunkids = [unique_words[id] for id in doc_word_ids]
Elogbeta_doc = self.m_Elogbeta[:, doc_word_ids]
## very similar to the hdp equations
v = np.zeros((2, self.m_K - 1))
v[0] = 1.0
v[1] = self.m_alpha
# back to the uniform
phi = np.ones((len(doc_word_ids), self.m_K)) * 1.0 / self.m_K
likelihood = 0.0
old_likelihood = -1e200
converge = 1.0
eps = 1e-100
iter = 0
max_iter = 100
# not yet support second level optimization yet, to be done in the future
while iter < max_iter and (converge < 0.0 or converge > var_converge):
### update variational parameters
# var_phi
if iter < 3:
var_phi = np.dot(phi.T, (Elogbeta_doc * doc_word_counts).T)
(log_var_phi, log_norm) = log_normalize(var_phi)
var_phi = np.exp(log_var_phi)
else:
var_phi = np.dot(phi.T, (Elogbeta_doc * doc_word_counts).T) + Elogsticks_1st
(log_var_phi, log_norm) = log_normalize(var_phi)
var_phi = np.exp(log_var_phi)
# phi
if iter < 3:
phi = np.dot(var_phi, Elogbeta_doc).T
(log_phi, log_norm) = log_normalize(phi)
phi = np.exp(log_phi)
else:
phi = np.dot(var_phi, Elogbeta_doc).T + Elogsticks_2nd
(log_phi, log_norm) = log_normalize(phi)
phi = np.exp(log_phi)
# v
phi_all = phi * np.array(doc_word_counts)[:, np.newaxis]
v[0] = 1.0 + np.sum(phi_all[:, :self.m_K - 1], 0)
phi_cum = np.flipud(np.sum(phi_all[:, 1:], 0))
v[1] = self.m_alpha + np.flipud(np.cumsum(phi_cum))
Elogsticks_2nd = expect_log_sticks(v)
likelihood = 0.0
# compute likelihood
# var_phi part/ C in john's notation
likelihood += np.sum((Elogsticks_1st - log_var_phi) * var_phi)
# v part/ v in john's notation, john's beta is alpha here
log_alpha = np.log(self.m_alpha)
likelihood += (self.m_K - 1) * log_alpha
dig_sum = sp.psi(np.sum(v, 0))
likelihood += np.sum((np.array([1.0, self.m_alpha])[:, np.newaxis] - v) * (sp.psi(v) - dig_sum))
likelihood -= np.sum(sp.gammaln(np.sum(v, 0))) - np.sum(sp.gammaln(v))
# Z part
likelihood += np.sum((Elogsticks_2nd - log_phi) * phi)
# X part, the data part
likelihood += np.sum(phi.T * np.dot(var_phi, Elogbeta_doc * doc_word_counts))
converge = (likelihood - old_likelihood) / abs(old_likelihood)
old_likelihood = likelihood
if converge < -0.000001:
logger.warning('likelihood is decreasing!')
iter += 1
# update the suff_stat ss
# this time it only contains information from one doc
ss.m_var_sticks_ss += np.sum(var_phi, 0)
ss.m_var_beta_ss[:, chunkids] += np.dot(var_phi.T, phi.T * doc_word_counts)
return likelihood
def update_lambda(self, sstats, word_list, opt_o):
self.m_status_up_to_date = False
# rhot will be between 0 and 1, and says how much to weight
# the information we got from this mini-chunk.
rhot = self.m_scale * pow(self.m_tau + self.m_updatect, -self.m_kappa)
if rhot < rhot_bound:
rhot = rhot_bound
self.m_rhot = rhot
# Update appropriate columns of lambda based on documents.
self.m_lambda[:, word_list] = self.m_lambda[:, word_list] * (1 - rhot) + \
rhot * self.m_D * sstats.m_var_beta_ss / sstats.m_chunksize
self.m_lambda_sum = (1 - rhot) * self.m_lambda_sum + \
rhot * self.m_D * np.sum(sstats.m_var_beta_ss, axis=1) / sstats.m_chunksize
self.m_updatect += 1
self.m_timestamp[word_list] = self.m_updatect
self.m_r.append(self.m_r[-1] + np.log(1 - rhot))
self.m_varphi_ss = (1.0 - rhot) * self.m_varphi_ss + rhot * \
sstats.m_var_sticks_ss * self.m_D / sstats.m_chunksize
if opt_o:
self.optimal_ordering()
## update top level sticks
self.m_var_sticks[0] = self.m_varphi_ss[:self.m_T - 1] + 1.0
var_phi_sum = np.flipud(self.m_varphi_ss[1:])
self.m_var_sticks[1] = np.flipud(np.cumsum(var_phi_sum)) + self.m_gamma
def optimal_ordering(self):
"""
ordering the topics
"""
idx = matutils.argsort(self.m_lambda_sum, reverse=True)
self.m_varphi_ss = self.m_varphi_ss[idx]
self.m_lambda = self.m_lambda[idx, :]
self.m_lambda_sum = self.m_lambda_sum[idx]
self.m_Elogbeta = self.m_Elogbeta[idx, :]
def update_expectations(self):
"""
Since we're doing lazy updates on lambda, at any given moment
the current state of lambda may not be accurate. This function
updates all of the elements of lambda and Elogbeta
so that if (for example) we want to print out the
topics we've learned we'll get the correct behavior.
"""
for w in xrange(self.m_W):
self.m_lambda[:, w] *= np.exp(self.m_r[-1] -
self.m_r[self.m_timestamp[w]])
self.m_Elogbeta = sp.psi(self.m_eta + self.m_lambda) - \
sp.psi(self.m_W * self.m_eta + self.m_lambda_sum[:, np.newaxis])
self.m_timestamp[:] = self.m_updatect
self.m_status_up_to_date = True
def print_topics(self, topics=20, topn=20):
"""Alias for `show_topics()` that prints the `topn` most
probable words for `topics` number of topics to log.
Set `topics=-1` to print all topics."""
return self.show_topics(topics=topics, topn=topn, log=True)
def show_topics(self, topics=20, topn=20, log=False, formatted=True):
"""
Print the `topN` most probable words for `topics` number of topics.
Set `topics=-1` to print all topics.
Set `formatted=True` to return the topics as a list of strings, or
`False` as lists of (weight, word) pairs.
"""
if not self.m_status_up_to_date:
self.update_expectations()
betas = self.m_lambda + self.m_eta
hdp_formatter = HdpTopicFormatter(self.id2word, betas)
return hdp_formatter.show_topics(topics, topn, log, formatted)
def save_topics(self, doc_count=None):
"""legacy method; use `self.save()` instead"""
if not self.outputdir:
logger.error("cannot store topics without having specified an output directory")
if doc_count is None:
fname = 'final'
else:
fname = 'doc-%i' % doc_count
fname = '%s/%s.topics' % (self.outputdir, fname)
logger.info("saving topics to %s" % fname)
betas = self.m_lambda + self.m_eta
np.savetxt(fname, betas)
def save_options(self):
"""legacy method; use `self.save()` instead"""
if not self.outputdir:
logger.error("cannot store options without having specified an output directory")
return
fname = '%s/options.dat' % self.outputdir
with utils.smart_open(fname, 'wb') as fout:
fout.write('tau: %s\n' % str(self.m_tau - 1))
fout.write('chunksize: %s\n' % str(self.chunksize))
fout.write('var_converge: %s\n' % str(self.m_var_converge))
fout.write('D: %s\n' % str(self.m_D))
fout.write('K: %s\n' % str(self.m_K))
fout.write('T: %s\n' % str(self.m_T))
fout.write('W: %s\n' % str(self.m_W))
fout.write('alpha: %s\n' % str(self.m_alpha))
fout.write('kappa: %s\n' % str(self.m_kappa))
fout.write('eta: %s\n' % str(self.m_eta))
fout.write('gamma: %s\n' % str(self.m_gamma))
def hdp_to_lda(self):
"""
Compute the LDA almost equivalent HDP.
"""
# alpha
sticks = self.m_var_sticks[0] / (self.m_var_sticks[0] + self.m_var_sticks[1])
alpha = np.zeros(self.m_T)
left = 1.0
for i in xrange(0, self.m_T - 1):
alpha[i] = sticks[i] * left
left = left - alpha[i]
alpha[self.m_T - 1] = left
alpha = alpha * self.m_alpha
# beta
beta = (self.m_lambda + self.m_eta) / (self.m_W * self.m_eta + \
self.m_lambda_sum[:, np.newaxis])
return (alpha, beta)
def evaluate_test_corpus(self, corpus):
logger.info('TEST: evaluating test corpus')
if self.lda_alpha is None or self.lda_beta is None:
self.lda_alpha, self.lda_beta = self.hdp_to_lda()
score = 0.0
total_words = 0
for i, doc in enumerate(corpus):
if len(doc) > 0:
doc_word_ids, doc_word_counts = zip(*doc)
likelihood, gamma = lda_e_step(doc_word_ids, doc_word_counts, self.lda_alpha, self.lda_beta)
theta = gamma / np.sum(gamma)
lda_betad = self.lda_beta[:, doc_word_ids]
log_predicts = np.log(np.dot(theta, lda_betad))
doc_score = sum(log_predicts) / len(doc)
logger.info('TEST: %6d %.5f' % (i, doc_score))
score += likelihood
total_words += sum(doc_word_counts)
logger.info('TEST: average score: %.5f, total score: %.5f, test docs: %d' % (score / total_words, score, len(corpus)))
return score
#endclass HdpModel
class HdpTopicFormatter(object):
(STYLE_GENSIM, STYLE_PRETTY) = (1, 2)
def __init__(self, dictionary=None, topic_data=None, topic_file=None, style=None):
if dictionary is None:
raise ValueError('no dictionary!')
if topic_data is not None:
topics = topic_data
elif topic_file is not None:
topics = np.loadtxt('%s' % topic_file)
else:
raise ValueError('no topic data!')
# sort topics
topics_sums = np.sum(topics, axis=1)
idx = matutils.argsort(topics_sums, reverse=True)
self.data = topics[idx]
self.dictionary = dictionary
if style is None:
style = self.STYLE_GENSIM
self.style = style
def print_topics(self, topics=10, topn=10):
return self.show_topics(topics, topn, True)
def show_topics(self, topics=10, topn=10, log=False, formatted=True):
shown = []
if topics < 0:
topics = len(self.data)
topics = min(topics, len(self.data))
for k in xrange(topics):
lambdak = list(self.data[k, :])
lambdak = lambdak / sum(lambdak)
temp = zip(lambdak, xrange(len(lambdak)))
temp = sorted(temp, key=lambda x: x[0], reverse=True)
topic_terms = self.show_topic_terms(temp, topn)
if formatted:
topic = self.format_topic(k, topic_terms)
# assuming we only output formatted topics
if log:
logger.info(topic)
else:
topic = (k, topic_terms)
shown.append(topic)
return shown
def show_topic_terms(self, topic_data, topn):
return [(self.dictionary[wid], weight) for (weight, wid) in topic_data[:topn]]
def format_topic(self, topic_id, topic_terms):
if self.STYLE_GENSIM == self.style:
fmt = ' + '.join(['%.3f*%s' % (weight, word) for (word, weight) in topic_terms])
fmt = 'topic %i: %s' % (topic_id, fmt)
else:
fmt = '\n'.join([' %20s %.8f' % (word, weight) for (word, weight) in topic_terms])
fmt = 'topic %i:\n%s' % (topic_id, fmt)
return fmt
#endclass HdpTopicFormatter
| krishna11888/ai | third_party/gensim/gensim/models/hdpmodel.py | Python | gpl-2.0 | 23,000 |
from Components.HTMLComponent import HTMLComponent
from Components.GUIComponent import GUIComponent
from Screen import Screen
from Components.ActionMap import ActionMap
from Components.Label import Label
from ServiceReference import ServiceReference
from enigma import eListboxPythonMultiContent, eListbox, gFont, iServiceInformation, eServiceCenter
from Tools.Transponder import ConvertToHumanReadable
from Components.Converter.ChannelNumbers import channelnumbers
import skin
RT_HALIGN_LEFT = 0
TYPE_TEXT = 0
TYPE_VALUE_HEX = 1
TYPE_VALUE_DEC = 2
TYPE_VALUE_HEX_DEC = 3
TYPE_SLIDER = 4
TYPE_VALUE_ORBIT_DEC = 5
def to_unsigned(x):
return x & 0xFFFFFFFF
def ServiceInfoListEntry(a, b, valueType=TYPE_TEXT, param=4):
print "b:", b
if not isinstance(b, str):
if valueType == TYPE_VALUE_HEX:
b = ("0x%0" + str(param) + "x") % to_unsigned(b)
elif valueType == TYPE_VALUE_DEC:
b = str(b)
elif valueType == TYPE_VALUE_HEX_DEC:
b = ("0x%0" + str(param) + "x (%dd)") % (to_unsigned(b), b)
elif valueType == TYPE_VALUE_ORBIT_DEC:
direction = 'E'
if b > 1800:
b = 3600 - b
direction = 'W'
b = ("%d.%d%s") % (b // 10, b % 10, direction)
else:
b = str(b)
x, y, w, h = skin.parameters.get("ServiceInfo",(0, 0, 300, 30))
xa, ya, wa, ha = skin.parameters.get("ServiceInfoLeft",(0, 0, 300, 25))
xb, yb, wb, hb = skin.parameters.get("ServiceInfoRight",(300, 0, 600, 25))
return [
#PyObject *type, *px, *py, *pwidth, *pheight, *pfnt, *pstring, *pflags;
(eListboxPythonMultiContent.TYPE_TEXT, x, y, w, h, 0, RT_HALIGN_LEFT, ""),
(eListboxPythonMultiContent.TYPE_TEXT, xa, ya, wa, ha, 0, RT_HALIGN_LEFT, a),
(eListboxPythonMultiContent.TYPE_TEXT, xb, yb, wb, hb, 0, RT_HALIGN_LEFT, b)
]
class ServiceInfoList(HTMLComponent, GUIComponent):
def __init__(self, source):
GUIComponent.__init__(self)
self.l = eListboxPythonMultiContent()
self.list = source
self.l.setList(self.list)
font = skin.fonts.get("ServiceInfo", ("Regular", 23, 25))
self.l.setFont(0, gFont(font[0], font[1]))
self.l.setItemHeight(font[2])
GUI_WIDGET = eListbox
def postWidgetCreate(self, instance):
self.instance.setContent(self.l)
TYPE_SERVICE_INFO = 1
TYPE_TRANSPONDER_INFO = 2
class ServiceInfo(Screen):
def __init__(self, session, serviceref=None):
Screen.__init__(self, session)
self["actions"] = ActionMap(["OkCancelActions", "ColorActions"],
{
"ok": self.close,
"cancel": self.close,
"red": self.information,
"green": self.pids,
"yellow": self.transponder,
"blue": self.tuner
}, -1)
if serviceref:
self.type = TYPE_TRANSPONDER_INFO
self.skinName="ServiceInfoSimple"
info = eServiceCenter.getInstance().info(serviceref)
self.transponder_info = info.getInfoObject(serviceref, iServiceInformation.sTransponderData)
# info is a iStaticServiceInformation, not a iServiceInformation
self.info = None
self.feinfo = None
else:
self.type = TYPE_SERVICE_INFO
self["key_red"] = self["red"] = Label(_("Service"))
self["key_green"] = self["green"] = Label(_("PIDs"))
self["key_yellow"] = self["yellow"] = Label(_("Multiplex"))
self["key_blue"] = self["blue"] = Label(_("Tuner status"))
service = session.nav.getCurrentService()
if service is not None:
self.info = service.info()
self.feinfo = service.frontendInfo()
else:
self.info = None
self.feinfo = None
tlist = [ ]
self["infolist"] = ServiceInfoList(tlist)
self.onShown.append(self.information)
def information(self):
if self.type == TYPE_SERVICE_INFO:
if self.session.nav.getCurrentlyPlayingServiceOrGroup():
name = ServiceReference(self.session.nav.getCurrentlyPlayingServiceReference()).getServiceName()
refstr = self.session.nav.getCurrentlyPlayingServiceReference().toString()
else:
name = _("N/A")
refstr = _("N/A")
aspect = "-"
videocodec = "-"
resolution = "-"
if self.info:
videocodec = ("MPEG2", "MPEG4", "MPEG1", "MPEG4-II", "VC1", "VC1-SM", "-" )[self.info and self.info.getInfo(iServiceInformation.sVideoType)]
width = self.info.getInfo(iServiceInformation.sVideoWidth)
height = self.info.getInfo(iServiceInformation.sVideoHeight)
if width > 0 and height > 0:
resolution = "%dx%d" % (width,height)
resolution += ("i", "p", "")[self.info.getInfo(iServiceInformation.sProgressive)]
resolution += str((self.info.getInfo(iServiceInformation.sFrameRate) + 500) / 1000)
aspect = self.getServiceInfoValue(iServiceInformation.sAspect)
if aspect in ( 1, 2, 5, 6, 9, 0xA, 0xD, 0xE ):
aspect = "4:3"
else:
aspect = "16:9"
Labels = ( (_("Name"), name, TYPE_TEXT),
(_("Provider"), self.getServiceInfoValue(iServiceInformation.sProvider), TYPE_TEXT),
(_("Videoformat"), aspect, TYPE_TEXT),
(_("Videosize"), resolution, TYPE_TEXT),
(_("Videocodec"), videocodec, TYPE_TEXT),
(_("Namespace"), self.getServiceInfoValue(iServiceInformation.sNamespace), TYPE_VALUE_HEX, 8),
(_("Service reference"), refstr, TYPE_TEXT))
self.fillList(Labels)
else:
if self.transponder_info:
tp_info = ConvertToHumanReadable(self.transponder_info)
conv = { "tuner_type" : _("Transponder type"),
"system" : _("System"),
"modulation" : _("Modulation"),
"orbital_position" : _("Orbital position"),
"frequency" : _("Frequency"),
"symbol_rate" : _("Symbol rate"),
"bandwidth" : _("Bandwidth"),
"polarization" : _("Polarization"),
"inversion" : _("Inversion"),
"pilot" : _("Pilot"),
"rolloff" : _("Roll-off"),
"fec_inner" : _("FEC"),
"code_rate_lp" : _("Coderate LP"),
"code_rate_hp" : _("Coderate HP"),
"constellation" : _("Constellation"),
"transmission_mode": _("Transmission mode"),
"guard_interval" : _("Guard interval"),
"hierarchy_information": _("Hierarchy information") }
Labels = [(conv[i], tp_info[i], i == "orbital_position" and TYPE_VALUE_ORBIT_DEC or TYPE_VALUE_DEC) for i in tp_info.keys() if i in conv]
self.fillList(Labels)
def pids(self):
if self.type == TYPE_SERVICE_INFO:
Labels = ( (_("Video PID"), self.getServiceInfoValue(iServiceInformation.sVideoPID), TYPE_VALUE_HEX_DEC, 4),
(_("Audio PID"), self.getServiceInfoValue(iServiceInformation.sAudioPID), TYPE_VALUE_HEX_DEC, 4),
(_("PCR PID"), self.getServiceInfoValue(iServiceInformation.sPCRPID), TYPE_VALUE_HEX_DEC, 4),
(_("PMT PID"), self.getServiceInfoValue(iServiceInformation.sPMTPID), TYPE_VALUE_HEX_DEC, 4),
(_("TXT PID"), self.getServiceInfoValue(iServiceInformation.sTXTPID), TYPE_VALUE_HEX_DEC, 4),
(_("TSID"), self.getServiceInfoValue(iServiceInformation.sTSID), TYPE_VALUE_HEX_DEC, 4),
(_("ONID"), self.getServiceInfoValue(iServiceInformation.sONID), TYPE_VALUE_HEX_DEC, 4),
(_("SID"), self.getServiceInfoValue(iServiceInformation.sSID), TYPE_VALUE_HEX_DEC, 4))
self.fillList(Labels)
def showFrontendData(self, real):
if self.type == TYPE_SERVICE_INFO:
frontendData = self.feinfo and self.feinfo.getAll(real)
Labels = self.getFEData(frontendData)
self.fillList(Labels)
def transponder(self):
if self.type == TYPE_SERVICE_INFO:
self.showFrontendData(True)
def tuner(self):
if self.type == TYPE_SERVICE_INFO:
self.showFrontendData(False)
def getFEData(self, frontendDataOrg):
if frontendDataOrg and len(frontendDataOrg):
frontendData = ConvertToHumanReadable(frontendDataOrg)
if frontendDataOrg["tuner_type"] == "DVB-S":
return ((_("NIM"), chr(ord('A') + frontendData["tuner_number"]), TYPE_TEXT),
(_("Type"), frontendData["tuner_type"], TYPE_TEXT),
(_("System"), frontendData["system"], TYPE_TEXT),
(_("Modulation"), frontendData["modulation"], TYPE_TEXT),
(_("Orbital position"), frontendData["orbital_position"], TYPE_VALUE_DEC),
(_("Frequency"), frontendData["frequency"], TYPE_VALUE_DEC),
(_("Symbol rate"), frontendData["symbol_rate"], TYPE_VALUE_DEC),
(_("Polarization"), frontendData["polarization"], TYPE_TEXT),
(_("Inversion"), frontendData["inversion"], TYPE_TEXT),
(_("FEC"), frontendData["fec_inner"], TYPE_TEXT),
(_("Pilot"), frontendData.get("pilot", None), TYPE_TEXT),
(_("Roll-off"), frontendData.get("rolloff", None), TYPE_TEXT))
elif frontendDataOrg["tuner_type"] == "DVB-C":
return ((_("NIM"), chr(ord('A') + frontendData["tuner_number"]), TYPE_TEXT),
(_("Type"), frontendData["tuner_type"], TYPE_TEXT),
(_("Modulation"), frontendData["modulation"], TYPE_TEXT),
(_("Frequency"), frontendData["frequency"], TYPE_VALUE_DEC),
(_("Symbol rate"), frontendData["symbol_rate"], TYPE_VALUE_DEC),
(_("Inversion"), frontendData["inversion"], TYPE_TEXT),
(_("FEC"), frontendData["fec_inner"], TYPE_TEXT))
elif frontendDataOrg["tuner_type"] == "DVB-T":
return ((_("NIM"), chr(ord('A') + frontendData["tuner_number"]), TYPE_TEXT),
(_("Type"), frontendData["tuner_type"], TYPE_TEXT),
(_("Frequency"), frontendData["frequency"], TYPE_VALUE_DEC),
(_("Channel"), channelnumbers.getChannelNumber(frontendData["frequency"], frontendData["tuner_number"]), TYPE_VALUE_DEC),
(_("Inversion"), frontendData["inversion"], TYPE_TEXT),
(_("Bandwidth"), frontendData["bandwidth"], TYPE_VALUE_DEC),
(_("Code rate LP"), frontendData["code_rate_lp"], TYPE_TEXT),
(_("Code rate HP"), frontendData["code_rate_hp"], TYPE_TEXT),
(_("Constellation"), frontendData["constellation"], TYPE_TEXT),
(_("Transmission mode"), frontendData["transmission_mode"], TYPE_TEXT),
(_("Guard interval"), frontendData["guard_interval"], TYPE_TEXT),
(_("Hierarchy info"), frontendData["hierarchy_information"], TYPE_TEXT))
return [ ]
def fillList(self, Labels):
tlist = [ ]
for item in Labels:
if item[1] is None:
continue;
value = item[1]
if len(item) < 4:
tlist.append(ServiceInfoListEntry(item[0]+":", value, item[2]))
else:
tlist.append(ServiceInfoListEntry(item[0]+":", value, item[2], item[3]))
self["infolist"].l.setList(tlist)
def getServiceInfoValue(self, what):
if self.info is None:
return ""
v = self.info.getInfo(what)
if v == -2:
v = self.info.getInfoString(what)
elif v == -1:
v = _("N/A")
return v
| popazerty/SDG-e2 | lib/python/Screens/ServiceInfo.py | Python | gpl-2.0 | 10,356 |
"""
The simple harness interface
"""
__author__ = """Copyright Andy Whitcroft, Martin J. Bligh 2006"""
import os, harness, time
class harness_simple(harness.harness):
"""
The simple server harness
Properties:
job
The job object for this job
"""
def __init__(self, job, harness_args):
"""
job
The job object for this job
"""
self.setup(job)
self.status = os.fdopen(3, 'w')
def test_status(self, status, tag):
"""A test within this job is completing"""
if self.status:
for line in status.split('\n'):
# prepend status messages with
# AUTOTEST_STATUS:tag: so that we can tell
# which lines were sent by the autotest client
pre = 'AUTOTEST_STATUS:%s:' % (tag,)
self.status.write(pre + line + '\n')
self.status.flush()
| libvirt/autotest | client/bin/harness_simple.py | Python | gpl-2.0 | 974 |
# Author: Marvin Pinto <[email protected]>
# Author: Dennis Lutter <[email protected]>
# Author: Shawn Conroyd <[email protected]>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of Sick Beard.
#
# Sick Beard is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Sick Beard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
import urllib
import urllib2
import sickbeard
from sickbeard import logger
from sickbeard.common import notifyStrings, NOTIFY_SNATCH, NOTIFY_DOWNLOAD
from sickbeard.exceptions import ex
API_URL = "https://new.boxcar.io/api/notifications"
class Boxcar2Notifier:
def _sendBoxcar2(self, title, msg, accessToken, sound):
"""
Sends a boxcar2 notification to the address provided
msg: The message to send (unicode)
title: The title of the message
accessToken: The access token to send notification to
returns: True if the message succeeded, False otherwise
"""
# build up the URL and parameters
msg = msg.strip().encode('utf-8')
data = urllib.urlencode({
'user_credentials': accessToken,
'notification[title]': title + " - " + msg,
'notification[long_message]': msg,
'notification[sound]': sound,
'notification[source_name]': "SickBeard"
})
# send the request to boxcar2
try:
req = urllib2.Request(API_URL)
handle = urllib2.urlopen(req, data)
handle.close()
except urllib2.URLError, e:
# FIXME: Python 2.5 hack, it wrongly reports 201 as an error
if hasattr(e, 'code') and e.code == 201:
logger.log(u"BOXCAR2: Notification successful.", logger.MESSAGE)
return True
# if we get an error back that doesn't have an error code then who knows what's really happening
if not hasattr(e, 'code'):
logger.log(u"BOXCAR2: Notification failed." + ex(e), logger.ERROR)
else:
logger.log(u"BOXCAR2: Notification failed. Error code: " + str(e.code), logger.ERROR)
if e.code == 404:
logger.log(u"BOXCAR2: Access token is wrong/not associated to a device.", logger.ERROR)
elif e.code == 401:
logger.log(u"BOXCAR2: Access token not recognized.", logger.ERROR)
elif e.code == 400:
logger.log(u"BOXCAR2: Wrong data sent to boxcar.", logger.ERROR)
elif e.code == 503:
logger.log(u"BOXCAR2: Boxcar server to busy to handle the request at this time.", logger.WARNING)
return False
logger.log(u"BOXCAR2: Notification successful.", logger.MESSAGE)
return True
def _notify(self, title, message, accessToken=None, sound=None, force=False):
"""
Sends a boxcar2 notification based on the provided info or SB config
title: The title of the notification to send
message: The message string to send
accessToken: The access token to send the notification to (optional, defaults to the access token in the config)
force: If True then the notification will be sent even if Boxcar is disabled in the config
"""
# suppress notifications if the notifier is disabled but the notify options are checked
if not sickbeard.USE_BOXCAR2 and not force:
return False
# fill in omitted parameters
if not accessToken:
accessToken = sickbeard.BOXCAR2_ACCESS_TOKEN
if not sound:
sound = sickbeard.BOXCAR2_SOUND
logger.log(u"BOXCAR2: Sending notification for " + message, logger.DEBUG)
return self._sendBoxcar2(title, message, accessToken, sound)
##############################################################################
# Public functions
##############################################################################
def notify_snatch(self, ep_name):
if sickbeard.BOXCAR2_NOTIFY_ONSNATCH:
self._notify(notifyStrings[NOTIFY_SNATCH], ep_name)
def notify_download(self, ep_name):
if sickbeard.BOXCAR2_NOTIFY_ONDOWNLOAD:
self._notify(notifyStrings[NOTIFY_DOWNLOAD], ep_name)
def test_notify(self, accessToken, sound):
return self._notify("Test", "This is a test notification from Sick Beard", accessToken, sound, force=True)
def update_library(self, ep_obj=None):
pass
notifier = Boxcar2Notifier
| imajes/Sick-Beard | sickbeard/notifiers/boxcar2.py | Python | gpl-3.0 | 5,011 |
#!/usr/bin/env python
#
# Copyright 2008, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Verifies that Google Test correctly parses environment variables."""
__author__ = '[email protected] (Zhanyong Wan)'
import os
import gtest_test_utils
IS_WINDOWS = os.name == 'nt'
IS_LINUX = os.name == 'posix' and os.uname()[0] == 'Linux'
COMMAND = gtest_test_utils.GetTestExecutablePath('gtest_env_var_test_')
def AssertEq(expected, actual):
if expected != actual:
print 'Expected: %s' % (expected,)
print ' Actual: %s' % (actual,)
raise AssertionError
def SetEnvVar(env_var, value):
"""Sets the env variable to 'value'; unsets it when 'value' is None."""
if value is not None:
os.environ[env_var] = value
elif env_var in os.environ:
del os.environ[env_var]
def GetFlag(flag):
"""Runs gtest_env_var_test_ and returns its output."""
args = [COMMAND]
if flag is not None:
args += [flag]
return gtest_test_utils.Subprocess(args).output
def TestFlag(flag, test_val, default_val):
"""Verifies that the given flag is affected by the corresponding env var."""
env_var = 'GTEST_' + flag.upper()
SetEnvVar(env_var, test_val)
AssertEq(test_val, GetFlag(flag))
SetEnvVar(env_var, None)
AssertEq(default_val, GetFlag(flag))
class GTestEnvVarTest(gtest_test_utils.TestCase):
def testEnvVarAffectsFlag(self):
"""Tests that environment variable should affect the corresponding flag."""
TestFlag('break_on_failure', '1', '0')
TestFlag('color', 'yes', 'auto')
TestFlag('filter', 'FooTest.Bar', '*')
TestFlag('output', 'tmp/foo.xml', '')
TestFlag('print_time', '0', '1')
TestFlag('repeat', '999', '1')
TestFlag('throw_on_failure', '1', '0')
TestFlag('death_test_style', 'threadsafe', 'fast')
if IS_WINDOWS:
TestFlag('catch_exceptions', '1', '0')
if IS_LINUX:
TestFlag('death_test_use_fork', '1', '0')
TestFlag('stack_trace_depth', '0', '100')
if __name__ == '__main__':
gtest_test_utils.Main()
| TextusData/Mover | thirdparty/protobuf-2.2.0/gtest/test/gtest_env_var_test.py | Python | gpl-3.0 | 3,472 |
# ----------------------------------------------------------------------
# Numenta Platform for Intelligent Computing (NuPIC)
# Copyright (C) 2014, Numenta, Inc. Unless you have an agreement
# with Numenta, Inc., for a separate license for this software code, the
# following terms and conditions apply:
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
# See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses.
#
# http://numenta.org/licenses/
# ----------------------------------------------------------------------
"""
MonitorMixinBase class used in monitor mixin framework.
"""
import abc
import numpy
from prettytable import PrettyTable
from nupic.research.monitor_mixin.plot import Plot
class MonitorMixinBase(object):
"""
Base class for MonitorMixin. Each subclass will be a mixin for a particular
algorithm.
All arguments, variables, and methods in monitor mixin classes should be
prefixed with "mm" (to avoid collision with the classes they mix in to).
"""
__metaclass__ = abc.ABCMeta
def __init__(self, *args, **kwargs):
"""
Note: If you set the kwarg "mmName", then pretty-printing of traces and
metrics will include the name you specify as a tag before every title.
"""
self.mmName = kwargs.get("mmName")
if "mmName" in kwargs:
del kwargs["mmName"]
super(MonitorMixinBase, self).__init__(*args, **kwargs)
# Mapping from key (string) => trace (Trace)
self._mmTraces = None
self._mmData = None
self.mmClearHistory()
def mmClearHistory(self):
"""
Clears the stored history.
"""
self._mmTraces = {}
self._mmData = {}
@staticmethod
def mmPrettyPrintTraces(traces, breakOnResets=None):
"""
Returns pretty-printed table of traces.
@param traces (list) Traces to print in table
@param breakOnResets (BoolsTrace) Trace of resets to break table on
@return (string) Pretty-printed table of traces.
"""
assert len(traces) > 0, "No traces found"
table = PrettyTable(["#"] + [trace.prettyPrintTitle() for trace in traces])
for i in xrange(len(traces[0].data)):
if breakOnResets and breakOnResets.data[i]:
table.add_row(["<reset>"] * (len(traces) + 1))
table.add_row([i] +
[trace.prettyPrintDatum(trace.data[i]) for trace in traces])
return table.get_string().encode("utf-8")
@staticmethod
def mmPrettyPrintMetrics(metrics, sigFigs=5):
"""
Returns pretty-printed table of metrics.
@param metrics (list) Traces to print in table
@param sigFigs (int) Number of significant figures to print
@return (string) Pretty-printed table of metrics.
"""
assert len(metrics) > 0, "No metrics found"
table = PrettyTable(["Metric", "mean", "standard deviation",
"min", "max", "sum", ])
for metric in metrics:
table.add_row([metric.prettyPrintTitle()] + metric.getStats())
return table.get_string().encode("utf-8")
def mmGetDefaultTraces(self, verbosity=1):
"""
Returns list of default traces. (To be overridden.)
@param verbosity (int) Verbosity level
@return (list) Default traces
"""
return []
def mmGetDefaultMetrics(self, verbosity=1):
"""
Returns list of default metrics. (To be overridden.)
@param verbosity (int) Verbosity level
@return (list) Default metrics
"""
return []
def mmGetCellTracePlot(self, cellTrace, cellCount, activityType, title="",
showReset=False, resetShading=0.25):
"""
Returns plot of the cell activity. Note that if many timesteps of
activities are input, matplotlib's image interpolation may omit activities
(columns in the image).
@param cellTrace (list) a temporally ordered list of sets of cell
activities
@param cellCount (int) number of cells in the space being rendered
@param activityType (string) type of cell activity being displayed
@param title (string) an optional title for the figure
@param showReset (bool) if true, the first set of cell activities
after a reset will have a grayscale background
@param resetShading (float) applicable if showReset is true, specifies the
intensity of the reset background with 0.0
being white and 1.0 being black
@return (Plot) plot
"""
plot = Plot(self, title)
resetTrace = self.mmGetTraceResets().data
data = numpy.zeros((cellCount, 1))
for i in xrange(len(cellTrace)):
# Set up a "background" vector that is shaded or blank
if showReset and resetTrace[i]:
activity = numpy.ones((cellCount, 1)) * resetShading
else:
activity = numpy.zeros((cellCount, 1))
activeIndices = cellTrace[i]
activity[list(activeIndices)] = 1
data = numpy.concatenate((data, activity), 1)
plot.add2DArray(data, xlabel="Time", ylabel=activityType)
return plot
| david-ragazzi/nupic | nupic/research/monitor_mixin/monitor_mixin_base.py | Python | gpl-3.0 | 5,503 |
# Amara, universalsubtitles.org
#
# Copyright (C) 2013 Participatory Culture Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see
# http://www.gnu.org/licenses/agpl-3.0.html.
| ofer43211/unisubs | apps/comments/__init__.py | Python | agpl-3.0 | 763 |
"""
Kickass Torrent (Videos, Music, Files)
@website https://kickass.so
@provide-api no (nothing found)
@using-api no
@results HTML (using search portal)
@stable yes (HTML can change)
@parse url, title, content, seed, leech, magnetlink
"""
from urlparse import urljoin
from cgi import escape
from urllib import quote
from lxml import html
from operator import itemgetter
from searx.engines.xpath import extract_text
# engine dependent config
categories = ['videos', 'music', 'files']
paging = True
# search-url
url = 'https://kickass.to/'
search_url = url + 'search/{search_term}/{pageno}/'
# specific xpath variables
magnet_xpath = './/a[@title="Torrent magnet link"]'
torrent_xpath = './/a[@title="Download torrent file"]'
content_xpath = './/span[@class="font11px lightgrey block"]'
# do search-request
def request(query, params):
params['url'] = search_url.format(search_term=quote(query),
pageno=params['pageno'])
return params
# get response from search-request
def response(resp):
results = []
dom = html.fromstring(resp.text)
search_res = dom.xpath('//table[@class="data"]//tr')
# return empty array if nothing is found
if not search_res:
return []
# parse results
for result in search_res[1:]:
link = result.xpath('.//a[@class="cellMainLink"]')[0]
href = urljoin(url, link.attrib['href'])
title = extract_text(link)
content = escape(extract_text(result.xpath(content_xpath)))
seed = result.xpath('.//td[contains(@class, "green")]/text()')[0]
leech = result.xpath('.//td[contains(@class, "red")]/text()')[0]
filesize = result.xpath('.//td[contains(@class, "nobr")]/text()')[0]
filesize_multiplier = result.xpath('.//td[contains(@class, "nobr")]//span/text()')[0]
files = result.xpath('.//td[contains(@class, "center")][2]/text()')[0]
# convert seed to int if possible
if seed.isdigit():
seed = int(seed)
else:
seed = 0
# convert leech to int if possible
if leech.isdigit():
leech = int(leech)
else:
leech = 0
# convert filesize to byte if possible
try:
filesize = float(filesize)
# convert filesize to byte
if filesize_multiplier == 'TB':
filesize = int(filesize * 1024 * 1024 * 1024 * 1024)
elif filesize_multiplier == 'GB':
filesize = int(filesize * 1024 * 1024 * 1024)
elif filesize_multiplier == 'MB':
filesize = int(filesize * 1024 * 1024)
elif filesize_multiplier == 'KB':
filesize = int(filesize * 1024)
except:
filesize = None
# convert files to int if possible
if files.isdigit():
files = int(files)
else:
files = None
magnetlink = result.xpath(magnet_xpath)[0].attrib['href']
torrentfile = result.xpath(torrent_xpath)[0].attrib['href']
torrentfileurl = quote(torrentfile, safe="%/:=&?~#+!$,;'@()*")
# append result
results.append({'url': href,
'title': title,
'content': content,
'seed': seed,
'leech': leech,
'filesize': filesize,
'files': files,
'magnetlink': magnetlink,
'torrentfile': torrentfileurl,
'template': 'torrent.html'})
# return results sorted by seeder
return sorted(results, key=itemgetter('seed'), reverse=True)
| framasoft/searx | searx/engines/kickass.py | Python | agpl-3.0 | 3,732 |
# -*- coding: utf-8 -*-
# Copyright(C) 2014 Romain Bignon
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
| laurent-george/weboob | weboob/browser/filters/__init__.py | Python | agpl-3.0 | 729 |
#!/usr/bin/env python
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import time
import urllib2
import ambari_simplejson as json # simplejson is much faster comparing to Python 2.6 json module and has the same functions set.
import logging
import traceback
from resource_management.libraries.functions.namenode_ha_utils import get_all_namenode_addresses
from resource_management.libraries.functions.curl_krb_request import curl_krb_request
from resource_management.libraries.functions.curl_krb_request import DEFAULT_KERBEROS_KINIT_TIMER_MS
from resource_management.libraries.functions.curl_krb_request import KERBEROS_KINIT_TIMER_PARAMETER
from resource_management.core.environment import Environment
LABEL = 'Last Checkpoint: [{h} hours, {m} minutes, {tx} transactions]'
HDFS_SITE_KEY = '{{hdfs-site}}'
RESULT_STATE_UNKNOWN = 'UNKNOWN'
RESULT_STATE_SKIPPED = 'SKIPPED'
NN_HTTP_ADDRESS_KEY = '{{hdfs-site/dfs.namenode.http-address}}'
NN_HTTPS_ADDRESS_KEY = '{{hdfs-site/dfs.namenode.https-address}}'
NN_HTTP_POLICY_KEY = '{{hdfs-site/dfs.http.policy}}'
NN_CHECKPOINT_TX_KEY = '{{hdfs-site/dfs.namenode.checkpoint.txns}}'
NN_CHECKPOINT_PERIOD_KEY = '{{hdfs-site/dfs.namenode.checkpoint.period}}'
PERCENT_WARNING_KEY = 'checkpoint.time.warning.threshold'
PERCENT_WARNING_DEFAULT = 200
PERCENT_CRITICAL_KEY = 'checkpoint.time.critical.threshold'
PERCENT_CRITICAL_DEFAULT = 200
CHECKPOINT_TX_MULTIPLIER_WARNING_KEY = 'checkpoint.txns.multiplier.warning.threshold'
CHECKPOINT_TX_MULTIPLIER_WARNING_DEFAULT = 2
CHECKPOINT_TX_MULTIPLIER_CRITICAL_KEY = 'checkpoint.txns.multiplier.critical.threshold'
CHECKPOINT_TX_MULTIPLIER_CRITICAL_DEFAULT = 4
CHECKPOINT_TX_DEFAULT = 1000000
CHECKPOINT_PERIOD_DEFAULT = 21600
CONNECTION_TIMEOUT_KEY = 'connection.timeout'
CONNECTION_TIMEOUT_DEFAULT = 5.0
KERBEROS_KEYTAB = '{{hdfs-site/dfs.web.authentication.kerberos.keytab}}'
KERBEROS_PRINCIPAL = '{{hdfs-site/dfs.web.authentication.kerberos.principal}}'
SECURITY_ENABLED_KEY = '{{cluster-env/security_enabled}}'
SMOKEUSER_KEY = "{{cluster-env/smokeuser}}"
EXECUTABLE_SEARCH_PATHS = '{{kerberos-env/executable_search_paths}}'
logger = logging.getLogger('ambari_alerts')
def get_tokens():
"""
Returns a tuple of tokens in the format {{site/property}} that will be used
to build the dictionary passed into execute
"""
return (HDFS_SITE_KEY, NN_HTTP_ADDRESS_KEY, NN_HTTPS_ADDRESS_KEY, NN_HTTP_POLICY_KEY, EXECUTABLE_SEARCH_PATHS,
NN_CHECKPOINT_TX_KEY, NN_CHECKPOINT_PERIOD_KEY, KERBEROS_KEYTAB, KERBEROS_PRINCIPAL, SECURITY_ENABLED_KEY, SMOKEUSER_KEY)
def execute(configurations={}, parameters={}, host_name=None):
"""
Returns a tuple containing the result code and a pre-formatted result label
Keyword arguments:
configurations (dictionary): a mapping of configuration key to value
parameters (dictionary): a mapping of script parameter key to value
host_name (string): the name of this host where the alert is running
"""
if configurations is None:
return (('UNKNOWN', ['There were no configurations supplied to the script.']))
uri = None
scheme = 'http'
http_uri = None
https_uri = None
http_policy = 'HTTP_ONLY'
checkpoint_tx = CHECKPOINT_TX_DEFAULT
checkpoint_period = CHECKPOINT_PERIOD_DEFAULT
# hdfs-site is required
if not HDFS_SITE_KEY in configurations:
return (RESULT_STATE_UNKNOWN, ['{0} is a required parameter for the script'.format(HDFS_SITE_KEY)])
if NN_HTTP_POLICY_KEY in configurations:
http_policy = configurations[NN_HTTP_POLICY_KEY]
if NN_CHECKPOINT_TX_KEY in configurations:
checkpoint_tx = configurations[NN_CHECKPOINT_TX_KEY]
if NN_CHECKPOINT_PERIOD_KEY in configurations:
checkpoint_period = configurations[NN_CHECKPOINT_PERIOD_KEY]
if SMOKEUSER_KEY in configurations:
smokeuser = configurations[SMOKEUSER_KEY]
executable_paths = None
if EXECUTABLE_SEARCH_PATHS in configurations:
executable_paths = configurations[EXECUTABLE_SEARCH_PATHS]
security_enabled = False
if SECURITY_ENABLED_KEY in configurations:
security_enabled = str(configurations[SECURITY_ENABLED_KEY]).upper() == 'TRUE'
kerberos_keytab = None
if KERBEROS_KEYTAB in configurations:
kerberos_keytab = configurations[KERBEROS_KEYTAB]
kerberos_principal = None
if KERBEROS_PRINCIPAL in configurations:
kerberos_principal = configurations[KERBEROS_PRINCIPAL]
kerberos_principal = kerberos_principal.replace('_HOST', host_name)
# parse script arguments
connection_timeout = CONNECTION_TIMEOUT_DEFAULT
if CONNECTION_TIMEOUT_KEY in parameters:
connection_timeout = float(parameters[CONNECTION_TIMEOUT_KEY])
percent_warning = PERCENT_WARNING_DEFAULT
if PERCENT_WARNING_KEY in parameters:
percent_warning = float(parameters[PERCENT_WARNING_KEY])
percent_critical = PERCENT_CRITICAL_DEFAULT
if PERCENT_CRITICAL_KEY in parameters:
percent_critical = float(parameters[PERCENT_CRITICAL_KEY])
checkpoint_txn_multiplier_warning = CHECKPOINT_TX_MULTIPLIER_WARNING_DEFAULT
if CHECKPOINT_TX_MULTIPLIER_WARNING_KEY in parameters:
checkpoint_txn_multiplier_warning = float(parameters[CHECKPOINT_TX_MULTIPLIER_WARNING_KEY])
checkpoint_txn_multiplier_critical = CHECKPOINT_TX_MULTIPLIER_CRITICAL_DEFAULT
if CHECKPOINT_TX_MULTIPLIER_CRITICAL_KEY in parameters:
checkpoint_txn_multiplier_critical = float(parameters[CHECKPOINT_TX_MULTIPLIER_CRITICAL_KEY])
kinit_timer_ms = parameters.get(KERBEROS_KINIT_TIMER_PARAMETER, DEFAULT_KERBEROS_KINIT_TIMER_MS)
# determine the right URI and whether to use SSL
hdfs_site = configurations[HDFS_SITE_KEY]
scheme = "https" if http_policy == "HTTPS_ONLY" else "http"
nn_addresses = get_all_namenode_addresses(hdfs_site)
for nn_address in nn_addresses:
if nn_address.startswith(host_name + ":"):
uri = nn_address
break
if not uri:
return (RESULT_STATE_SKIPPED, ['NameNode on host {0} not found (namenode adresses = {1})'.format(host_name, ', '.join(nn_addresses))])
current_time = int(round(time.time() * 1000))
last_checkpoint_time_qry = "{0}://{1}/jmx?qry=Hadoop:service=NameNode,name=FSNamesystem".format(scheme,uri)
journal_transaction_info_qry = "{0}://{1}/jmx?qry=Hadoop:service=NameNode,name=NameNodeInfo".format(scheme,uri)
# start out assuming an OK status
label = None
result_code = "OK"
try:
if kerberos_principal is not None and kerberos_keytab is not None and security_enabled:
env = Environment.get_instance()
# curl requires an integer timeout
curl_connection_timeout = int(connection_timeout)
last_checkpoint_time_response, error_msg, time_millis = curl_krb_request(env.tmp_dir, kerberos_keytab,
kerberos_principal, last_checkpoint_time_qry,"checkpoint_time_alert", executable_paths, False,
"NameNode Last Checkpoint", smokeuser, connection_timeout=curl_connection_timeout,
kinit_timer_ms = kinit_timer_ms)
last_checkpoint_time_response_json = json.loads(last_checkpoint_time_response)
last_checkpoint_time = int(last_checkpoint_time_response_json["beans"][0]["LastCheckpointTime"])
journal_transaction_info_response, error_msg, time_millis = curl_krb_request(env.tmp_dir, kerberos_keytab,
kerberos_principal, journal_transaction_info_qry,"checkpoint_time_alert", executable_paths,
False, "NameNode Last Checkpoint", smokeuser, connection_timeout=curl_connection_timeout,
kinit_timer_ms = kinit_timer_ms)
journal_transaction_info_response_json = json.loads(journal_transaction_info_response)
journal_transaction_info = journal_transaction_info_response_json["beans"][0]["JournalTransactionInfo"]
else:
last_checkpoint_time = int(get_value_from_jmx(last_checkpoint_time_qry,
"LastCheckpointTime", connection_timeout))
journal_transaction_info = get_value_from_jmx(journal_transaction_info_qry,
"JournalTransactionInfo", connection_timeout)
journal_transaction_info_dict = json.loads(journal_transaction_info)
last_tx = int(journal_transaction_info_dict['LastAppliedOrWrittenTxId'])
most_recent_tx = int(journal_transaction_info_dict['MostRecentCheckpointTxId'])
transaction_difference = last_tx - most_recent_tx
delta = (current_time - last_checkpoint_time)/1000
label = LABEL.format(h=get_time(delta)['h'], m=get_time(delta)['m'], tx=transaction_difference)
is_checkpoint_txn_warning = transaction_difference > checkpoint_txn_multiplier_warning * int(checkpoint_tx)
is_checkpoint_txn_critical = transaction_difference > checkpoint_txn_multiplier_critical * int(checkpoint_tx)
# Either too many uncommitted transactions or missed check-pointing for
# long time decided by the thresholds
if is_checkpoint_txn_critical or (float(delta) / int(checkpoint_period)*100 >= int(percent_critical)):
logger.debug('Raising critical alert: transaction_difference = {0}, checkpoint_tx = {1}'.format(transaction_difference, checkpoint_tx))
result_code = 'CRITICAL'
elif is_checkpoint_txn_warning or (float(delta) / int(checkpoint_period)*100 >= int(percent_warning)):
logger.debug('Raising warning alert: transaction_difference = {0}, checkpoint_tx = {1}'.format(transaction_difference, checkpoint_tx))
result_code = 'WARNING'
except:
label = traceback.format_exc()
result_code = 'UNKNOWN'
return ((result_code, [label]))
def get_time(delta):
h = int(delta/3600)
m = int((delta % 3600)/60)
return {'h':h, 'm':m}
def get_value_from_jmx(query, jmx_property, connection_timeout):
response = None
try:
response = urllib2.urlopen(query, timeout=connection_timeout)
data = response.read()
data_dict = json.loads(data)
return data_dict["beans"][0][jmx_property]
finally:
if response is not None:
try:
response.close()
except:
pass
| arenadata/ambari | ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HDFS/package/alerts/alert_checkpoint_time.py | Python | apache-2.0 | 10,596 |
# Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo.serialization import jsonutils
import webob
from nova import compute
from nova import exception
from nova import objects
from nova import test
from nova.tests.unit.api.openstack import fakes
UUID = '70f6db34-de8d-4fbd-aafb-4065bdfa6114'
last_add_fixed_ip = (None, None)
last_remove_fixed_ip = (None, None)
def compute_api_add_fixed_ip(self, context, instance, network_id):
global last_add_fixed_ip
last_add_fixed_ip = (instance['uuid'], network_id)
def compute_api_remove_fixed_ip(self, context, instance, address):
global last_remove_fixed_ip
last_remove_fixed_ip = (instance['uuid'], address)
def compute_api_get(self, context, instance_id, want_objects=False,
expected_attrs=None):
instance = objects.Instance()
instance.uuid = instance_id
instance.id = 1
instance.vm_state = 'fake'
instance.task_state = 'fake'
instance.obj_reset_changes()
return instance
class FixedIpTestV21(test.NoDBTestCase):
def setUp(self):
super(FixedIpTestV21, self).setUp()
fakes.stub_out_networking(self.stubs)
fakes.stub_out_rate_limiting(self.stubs)
self.stubs.Set(compute.api.API, "add_fixed_ip",
compute_api_add_fixed_ip)
self.stubs.Set(compute.api.API, "remove_fixed_ip",
compute_api_remove_fixed_ip)
self.stubs.Set(compute.api.API, 'get', compute_api_get)
self.app = self._get_app()
def _get_app(self):
return fakes.wsgi_app_v21(init_only=('servers', 'os-multinic'))
def _get_url(self):
return '/v2/fake'
def test_add_fixed_ip(self):
global last_add_fixed_ip
last_add_fixed_ip = (None, None)
body = dict(addFixedIp=dict(networkId='test_net'))
req = webob.Request.blank(
self._get_url() + '/servers/%s/action' % UUID)
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers['content-type'] = 'application/json'
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 202)
self.assertEqual(last_add_fixed_ip, (UUID, 'test_net'))
def _test_add_fixed_ip_bad_request(self, body):
req = webob.Request.blank(
self._get_url() + '/servers/%s/action' % UUID)
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers['content-type'] = 'application/json'
resp = req.get_response(self.app)
self.assertEqual(400, resp.status_int)
def test_add_fixed_ip_empty_network_id(self):
body = {'addFixedIp': {'network_id': ''}}
self._test_add_fixed_ip_bad_request(body)
def test_add_fixed_ip_network_id_bigger_than_36(self):
body = {'addFixedIp': {'network_id': 'a' * 37}}
self._test_add_fixed_ip_bad_request(body)
def test_add_fixed_ip_no_network(self):
global last_add_fixed_ip
last_add_fixed_ip = (None, None)
body = dict(addFixedIp=dict())
req = webob.Request.blank(
self._get_url() + '/servers/%s/action' % UUID)
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers['content-type'] = 'application/json'
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 400)
self.assertEqual(last_add_fixed_ip, (None, None))
@mock.patch.object(compute.api.API, 'add_fixed_ip')
def test_add_fixed_ip_no_more_ips_available(self, mock_add_fixed_ip):
mock_add_fixed_ip.side_effect = exception.NoMoreFixedIps(net='netid')
body = dict(addFixedIp=dict(networkId='test_net'))
req = webob.Request.blank(
self._get_url() + '/servers/%s/action' % UUID)
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers['content-type'] = 'application/json'
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 400)
def test_remove_fixed_ip(self):
global last_remove_fixed_ip
last_remove_fixed_ip = (None, None)
body = dict(removeFixedIp=dict(address='10.10.10.1'))
req = webob.Request.blank(
self._get_url() + '/servers/%s/action' % UUID)
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers['content-type'] = 'application/json'
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 202)
self.assertEqual(last_remove_fixed_ip, (UUID, '10.10.10.1'))
def test_remove_fixed_ip_no_address(self):
global last_remove_fixed_ip
last_remove_fixed_ip = (None, None)
body = dict(removeFixedIp=dict())
req = webob.Request.blank(
self._get_url() + '/servers/%s/action' % UUID)
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers['content-type'] = 'application/json'
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 400)
self.assertEqual(last_remove_fixed_ip, (None, None))
def test_remove_fixed_ip_invalid_address(self):
body = {'remove_fixed_ip': {'address': ''}}
req = webob.Request.blank(
self._get_url() + '/servers/%s/action' % UUID)
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers['content-type'] = 'application/json'
resp = req.get_response(self.app)
self.assertEqual(400, resp.status_int)
@mock.patch.object(compute.api.API, 'remove_fixed_ip',
side_effect=exception.FixedIpNotFoundForSpecificInstance(
instance_uuid=UUID, ip='10.10.10.1'))
def test_remove_fixed_ip_not_found(self, _remove_fixed_ip):
body = {'remove_fixed_ip': {'address': '10.10.10.1'}}
req = webob.Request.blank(
self._get_url() + '/servers/%s/action' % UUID)
req.method = 'POST'
req.body = jsonutils.dumps(body)
req.headers['content-type'] = 'application/json'
resp = req.get_response(self.app)
self.assertEqual(400, resp.status_int)
class FixedIpTestV2(FixedIpTestV21):
def setUp(self):
super(FixedIpTestV2, self).setUp()
self.flags(
osapi_compute_extension=[
'nova.api.openstack.compute.contrib.select_extensions'],
osapi_compute_ext_list=['Multinic'])
def _get_app(self):
return fakes.wsgi_app(init_only=('servers',))
def test_remove_fixed_ip_invalid_address(self):
# NOTE(cyeoh): This test is disabled for the V2 API because it is
# has poorer input validation.
pass
| silenceli/nova | nova/tests/unit/api/openstack/compute/contrib/test_multinic.py | Python | apache-2.0 | 7,255 |
#!/usr/bin/env python
# ---
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# ---
import os
import re
import sys
import json
import optparse
#--------------------------------------------------------------------
def main():
#----------------------------------------------------------------
if len(sys.argv) < 3:
error("expecting parameters srcDir outputDir")
srcDirName = sys.argv[1]
oDirName = sys.argv[2]
if not os.path.exists(srcDirName): error("source directory not found: '" + srcDirName + "'")
if not os.path.isdir(srcDirName): error("source directory not a directory: '" + srcDirName + "'")
if not os.path.exists(oDirName): error("output directory not found: '" + oDirName + "'")
if not os.path.isdir(oDirName): error("output directory not a directory: '" + oDirName + "'")
#----------------------------------------------------------------
scripts = []
scriptNames = {}
scriptSrc = {}
scriptMin = {}
includedFiles = []
includedFiles.append("modjewel.js")
entries = os.listdir(os.path.join(srcDirName, "weinre/common"))
for entry in entries:
includedFiles.append("weinre/common/%s" % entry)
entries = os.listdir(os.path.join(srcDirName, "weinre/target"))
for entry in entries:
includedFiles.append("weinre/target/%s" % entry)
includedFiles.append("interfaces/all-json-idls-min.js")
for includedFile in includedFiles:
baseScriptFile = includedFile
scriptFile = os.path.join(srcDirName, baseScriptFile)
if not os.path.exists(scriptFile):
error("script file not found: '" + scriptFile + "'")
scripts.append(scriptFile)
scriptNames[scriptFile] = baseScriptFile
with open(scriptFile, "r") as iFile:
scriptSrc[scriptFile] = iFile.read()
scriptMin[scriptFile] = min(scriptSrc[scriptFile])
# log("read: %s" % scriptFile)
#----------------------------------------------------------------
oFileName = os.path.join(oDirName, "target-script.js")
writeMergedFile(oFileName, scripts, scriptNames, scriptSrc, True)
#----------------------------------------------------------------
oFileName = os.path.join(oDirName, "target-script-min.js")
writeMergedFile(oFileName, scripts, scriptNames, scriptMin, False)
#--------------------------------------------------------------------
def writeMergedFile(oFileName, scripts, scriptNames, srcs, useEval):
lines = []
licenseFile = os.path.join(os.path.dirname(__file__), "..", "LICENSE-header.js")
with open(licenseFile, "r") as iFile:
lines.append(iFile.read())
lines.append(";(function(){")
for script in scripts:
src = srcs[script]
srcName = scriptNames[script]
if not useEval:
lines.append("// %s" % srcName)
lines.append(src)
lines.append(";")
else:
src = "%s\n//@ sourceURL=%s" % (src, srcName)
lines.append(";eval(%s)" % json.dumps(src))
if srcName == "modjewel.js":
lines.append("modjewel.require('modjewel').warnOnRecursiveRequire(true);")
if not useEval:
lines.append("")
lines.append("// modjewel.require('weinre/common/Weinre').showNotImplemented();")
lines.append("modjewel.require('weinre/target/Target').main()")
lines.append("})();")
targetScript = "\n".join(lines)
with open(oFileName, "w") as oFile:
oFile.write(targetScript)
log("generated: %s" % oFileName)
#--------------------------------------------------------------------
def min(script):
patternCommentC = re.compile(r"/\*.*?\*/", re.MULTILINE + re.DOTALL)
patternCommentCPP = re.compile(r"(?<!\\)//.*?$", re.MULTILINE)
patternIndent = re.compile(r"^\s*", re.MULTILINE)
patternBlankLine = re.compile(r"^\s*\n", re.MULTILINE)
script = patternCommentC.sub( "", script)
script = patternCommentCPP.sub( "", script)
script = patternIndent.sub( "", script)
script = patternBlankLine.sub( "", script)
return script
#--------------------------------------------------------------------
def log(message):
message = "%s: %s" % (PROGRAM_NAME, message)
print >>sys.stderr, message
#--------------------------------------------------------------------
def error(message):
log(message)
sys.exit(-1)
#--------------------------------------------------------------------
PROGRAM_NAME = os.path.basename(sys.argv[0])
main()
| kyungmi/webida-server | src/ext/cordova-weinre/weinre.build/scripts/build-target-scripts.py | Python | apache-2.0 | 5,339 |
#!/usr/bin/env python
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from hcat import hcat
from ambari_commons import OSConst
from ambari_commons.os_family_impl import OsFamilyImpl
from resource_management.core.logger import Logger
from resource_management.core.exceptions import ClientComponentHasNoStatus
from resource_management.libraries.functions import stack_select
from resource_management.libraries.functions import StackFeature
from resource_management.libraries.functions.stack_features import check_stack_feature
from resource_management.libraries.script.script import Script
class HCatClient(Script):
def install(self, env):
import params
self.install_packages(env)
self.configure(env)
def configure(self, env):
import params
env.set_params(params)
hcat()
def status(self, env):
raise ClientComponentHasNoStatus()
@OsFamilyImpl(os_family=OSConst.WINSRV_FAMILY)
class HCatClientWindows(HCatClient):
pass
@OsFamilyImpl(os_family=OsFamilyImpl.DEFAULT)
class HCatClientDefault(HCatClient):
def get_component_name(self):
# HCat client doesn't have a first-class entry in <stack-selector-tool>. Since clients always
# update after daemons, this ensures that the hcat directories are correct on hosts
# which do not include the WebHCat daemon
return "hive-webhcat"
def pre_upgrade_restart(self, env, upgrade_type=None):
"""
Execute <stack-selector-tool> before reconfiguring this client to the new stack version.
:param env:
:param upgrade_type:
:return:
"""
Logger.info("Executing Hive HCat Client Stack Upgrade pre-restart")
import params
env.set_params(params)
# this function should not execute if the stack version does not support rolling upgrade
if not (params.version and check_stack_feature(StackFeature.ROLLING_UPGRADE, params.version)):
return
# HCat client doesn't have a first-class entry in <stack-selector-tool>. Since clients always
# update after daemons, this ensures that the hcat directories are correct on hosts
# which do not include the WebHCat daemon
stack_select.select("hive-webhcat", params.version)
if __name__ == "__main__":
HCatClient().execute()
| radicalbit/ambari | contrib/management-packs/odpi-ambari-mpack/src/main/resources/stacks/ODPi/2.0/services/HIVE/package/scripts/hcat_client.py | Python | apache-2.0 | 2,941 |
Subsets and Splits
Unique Repositories with URLs
Lists unique repository names along with their GitHub URLs, providing basic identification information for each repository.