gt
stringclasses 1
value | context
stringlengths 2.49k
119k
|
---|---|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import os
import sys
import numpy as np
import tensorflow as tf
import resnet_model
HEIGHT = 32
WIDTH = 32
DEPTH = 3
NUM_CLASSES = 10
NUM_DATA_BATCHES = 5
NUM_EXAMPLES_PER_EPOCH_FOR_TRAIN = 10000 * NUM_DATA_BATCHES
NUM_EXAMPLES_PER_EPOCH_FOR_EVAL = 10000
parser = argparse.ArgumentParser()
# Basic model parameters.
parser.add_argument('--data_dir', type=str, default='/tmp/cifar10_data',
help='The path to the CIFAR-10 data directory.')
parser.add_argument('--model_dir', type=str, default='/tmp/cifar10_model',
help='The directory where the model will be stored.')
parser.add_argument('--resnet_size', type=int, default=32,
help='The size of the ResNet model to use.')
parser.add_argument('--train_steps', type=int, default=100000,
help='The number of batches to train.')
parser.add_argument('--steps_per_eval', type=int, default=4000,
help='The number of batches to run in between evaluations.')
parser.add_argument('--batch_size', type=int, default=128,
help='The number of images per batch.')
FLAGS = parser.parse_args()
# Scale the learning rate linearly with the batch size. When the batch size is
# 128, the learning rate should be 0.1.
_INITIAL_LEARNING_RATE = 0.1 * FLAGS.batch_size / 128
_MOMENTUM = 0.9
# We use a weight decay of 0.0002, which performs better than the 0.0001 that
# was originally suggested.
_WEIGHT_DECAY = 2e-4
_BATCHES_PER_EPOCH = NUM_EXAMPLES_PER_EPOCH_FOR_TRAIN / FLAGS.batch_size
def record_dataset(filenames):
"""Returns an input pipeline Dataset from `filenames`."""
record_bytes = HEIGHT * WIDTH * DEPTH + 1
return tf.contrib.data.FixedLengthRecordDataset(filenames, record_bytes)
def filenames(mode):
"""Returns a list of filenames based on 'mode'."""
data_dir = os.path.join(FLAGS.data_dir, 'cifar-10-batches-bin')
assert os.path.exists(data_dir), ('Run cifar10_download_and_extract.py first '
'to download and extract the CIFAR-10 data.')
if mode == tf.estimator.ModeKeys.TRAIN:
return [
os.path.join(data_dir, 'data_batch_%d.bin' % i)
for i in range(1, NUM_DATA_BATCHES + 1)
]
elif mode == tf.estimator.ModeKeys.EVAL:
return [os.path.join(data_dir, 'test_batch.bin')]
else:
raise ValueError('Invalid mode: %s' % mode)
def dataset_parser(value):
"""Parse a CIFAR-10 record from value."""
# Every record consists of a label followed by the image, with a fixed number
# of bytes for each.
label_bytes = 1
image_bytes = HEIGHT * WIDTH * DEPTH
record_bytes = label_bytes + image_bytes
# Convert from a string to a vector of uint8 that is record_bytes long.
raw_record = tf.decode_raw(value, tf.uint8)
# The first byte represents the label, which we convert from uint8 to int32.
label = tf.cast(raw_record[0], tf.int32)
# The remaining bytes after the label represent the image, which we reshape
# from [depth * height * width] to [depth, height, width].
depth_major = tf.reshape(raw_record[label_bytes:record_bytes],
[DEPTH, HEIGHT, WIDTH])
# Convert from [depth, height, width] to [height, width, depth], and cast as
# float32.
image = tf.cast(tf.transpose(depth_major, [1, 2, 0]), tf.float32)
return image, tf.one_hot(label, NUM_CLASSES)
def train_preprocess_fn(image, label):
"""Preprocess a single training image of layout [height, width, depth]."""
# Resize the image to add four extra pixels on each side.
image = tf.image.resize_image_with_crop_or_pad(image, HEIGHT + 8, WIDTH + 8)
# Randomly crop a [HEIGHT, WIDTH] section of the image.
image = tf.random_crop(image, [HEIGHT, WIDTH, DEPTH])
# Randomly flip the image horizontally.
image = tf.image.random_flip_left_right(image)
return image, label
def input_fn(mode, batch_size):
"""Input_fn using the contrib.data input pipeline for CIFAR-10 dataset.
Args:
mode: Standard names for model modes (tf.estimators.ModeKeys).
batch_size: The number of samples per batch of input requested.
"""
dataset = record_dataset(filenames(mode))
# For training repeat forever.
if mode == tf.estimator.ModeKeys.TRAIN:
dataset = dataset.repeat()
dataset = dataset.map(dataset_parser, num_threads=1,
output_buffer_size=2 * batch_size)
# For training, preprocess the image and shuffle.
if mode == tf.estimator.ModeKeys.TRAIN:
dataset = dataset.map(train_preprocess_fn, num_threads=1,
output_buffer_size=2 * batch_size)
# Ensure that the capacity is sufficiently large to provide good random
# shuffling.
buffer_size = int(NUM_EXAMPLES_PER_EPOCH_FOR_TRAIN * 0.4) + 3 * batch_size
dataset = dataset.shuffle(buffer_size=buffer_size)
# Subtract off the mean and divide by the variance of the pixels.
dataset = dataset.map(
lambda image, label: (tf.image.per_image_standardization(image), label),
num_threads=1,
output_buffer_size=2 * batch_size)
# Batch results by up to batch_size, and then fetch the tuple from the
# iterator.
iterator = dataset.batch(batch_size).make_one_shot_iterator()
images, labels = iterator.get_next()
return images, labels
def cifar10_model_fn(features, labels, mode):
"""Model function for CIFAR-10."""
tf.summary.image('images', features, max_outputs=6)
network = resnet_model.cifar10_resnet_v2_generator(
FLAGS.resnet_size, NUM_CLASSES)
inputs = tf.reshape(features, [-1, HEIGHT, WIDTH, DEPTH])
logits = network(inputs, mode == tf.estimator.ModeKeys.TRAIN)
predictions = {
'classes': tf.argmax(logits, axis=1),
'probabilities': tf.nn.softmax(logits, name='softmax_tensor')
}
if mode == tf.estimator.ModeKeys.PREDICT:
return tf.estimator.EstimatorSpec(mode=mode, predictions=predictions)
# Calculate loss, which includes softmax cross entropy and L2 regularization.
cross_entropy = tf.losses.softmax_cross_entropy(
logits=logits, onehot_labels=labels)
# Create a tensor named cross_entropy for logging purposes.
tf.identity(cross_entropy, name='cross_entropy')
tf.summary.scalar('cross_entropy', cross_entropy)
# Add weight decay to the loss.
loss = cross_entropy + _WEIGHT_DECAY * tf.add_n(
[tf.nn.l2_loss(v) for v in tf.trainable_variables()])
if mode == tf.estimator.ModeKeys.TRAIN:
global_step = tf.train.get_or_create_global_step()
# Multiply the learning rate by 0.1 at 100, 150, and 200 epochs.
boundaries = [int(_BATCHES_PER_EPOCH * epoch) for epoch in [100, 150, 200]]
values = [_INITIAL_LEARNING_RATE * decay for decay in [1, 0.1, 0.01, 0.001]]
learning_rate = tf.train.piecewise_constant(
tf.cast(global_step, tf.int32), boundaries, values)
# Create a tensor named learning_rate for logging purposes
tf.identity(learning_rate, name='learning_rate')
tf.summary.scalar('learning_rate', learning_rate)
optimizer = tf.train.MomentumOptimizer(
learning_rate=learning_rate,
momentum=_MOMENTUM)
# Batch norm requires update ops to be added as a dependency to the train_op
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
with tf.control_dependencies(update_ops):
train_op = optimizer.minimize(loss, global_step)
else:
train_op = None
accuracy= tf.metrics.accuracy(
tf.argmax(labels, axis=1), predictions['classes'])
metrics = {'accuracy': accuracy}
# Create a tensor named train_accuracy for logging purposes
tf.identity(accuracy[1], name='train_accuracy')
tf.summary.scalar('train_accuracy', accuracy[1])
return tf.estimator.EstimatorSpec(
mode=mode,
predictions=predictions,
loss=loss,
train_op=train_op,
eval_metric_ops=metrics)
def main(unused_argv):
# Using the Winograd non-fused algorithms provides a small performance boost.
os.environ['TF_ENABLE_WINOGRAD_NONFUSED'] = '1'
cifar_classifier = tf.estimator.Estimator(
model_fn=cifar10_model_fn, model_dir=FLAGS.model_dir)
for cycle in range(FLAGS.train_steps // FLAGS.steps_per_eval):
tensors_to_log = {
'learning_rate': 'learning_rate',
'cross_entropy': 'cross_entropy',
'train_accuracy': 'train_accuracy'
}
logging_hook = tf.train.LoggingTensorHook(
tensors=tensors_to_log, every_n_iter=100)
cifar_classifier.train(
input_fn=lambda: input_fn(tf.estimator.ModeKeys.TRAIN,
batch_size=FLAGS.batch_size),
steps=FLAGS.steps_per_eval,
hooks=[logging_hook])
# Evaluate the model and print results
eval_results = cifar_classifier.evaluate(
input_fn=lambda: input_fn(tf.estimator.ModeKeys.EVAL,
batch_size=FLAGS.batch_size))
print(eval_results)
if __name__ == '__main__':
tf.logging.set_verbosity(tf.logging.INFO)
tf.app.run()
|
|
##### Versions of key Torps functions to implement congestion-aware Tor #####
# Re-implemented functions:
# - create_circuit():
# "Builds" k=3 circuits, measures congestion with m=5 circuit pings, and
# chooses the best one. Also makes m additional pings and stores average.
# - client_assign_stream():
# If avg stored latency is > l=500ms, don't use. Ping after use and store.
import pathsim
from random import choice
import stem
import collections
from models import *
### Congestion-aware Tor parameters ###
client_ip = '74.125.131.105' # www.google.com
num_pings_create = 5 # number of measurements to choose built circuit to use
num_pings_use = 5 # number of measurements to record after use
min_ping = 500 # minimum ping to use circuit in milliseconds
def ping_circuit(client_ip, guard_node, middle_node, exit_node,\
cons_rel_stats, descriptors, congmodel, pdelmodel):
ping_time = 0
for node, coef in ((guard_node, 2), (middle_node, 2), (exit_node, 1)):
rel_stat = cons_rel_stats[node]
is_exit = (stem.Flag.EXIT in rel_stat.flags)
is_guard = (stem.Flag.GUARD in rel_stat.flags)
ping_time += coef*(congmodel.get_congestion(node,\
rel_stat.bandwidth, is_exit, is_guard))
# ca-tor subtracts minrtt from its pings to isolate congestion
# so we dont actually want to include prop delay
'''
guard_ip = descriptors[guard_node]
middle_ip = descriptors[middle_node]
exit_ip = descriptors[exit_node]
for ip, next_ip in ((client_ip, guard_ip), (guard_ip, middle_ip),\
(middle_ip, exit_ip)):
ping_time += pdelmodel.get_prop_delay(ip, next_ip)
ping_time += pdelmodel.get_prop_delay(next_ip, ip)
'''
return ping_time
def create_circuit(cons_rel_stats, cons_valid_after, cons_fresh_until,\
cons_bw_weights, cons_bwweightscale, descriptors, hibernating_status,\
guards, circ_time, circ_fast, circ_stable, circ_internal, circ_ip,\
circ_port,\
congmodel, pdelmodel, weighted_exits=None,\
exits_exact=False, weighted_middles=None, weighted_guards=None):
"""Creates path for requested circuit based on the input consensus
statuses and descriptors. Uses congestion-aware path selection.
Inputs:
cons_rel_stats: (dict) relay fingerprint keys and relay status vals
cons_valid_after: (int) timestamp of valid_after for consensus
cons_fresh_until: (int) timestamp of fresh_until for consensus
cons_bw_weights: (dict) bw_weights of consensus
cons_bwweightscale: (should be float()able) bwweightscale of consensus
descriptors: (dict) relay fingerprint keys and descriptor vals
hibernating_status: (dict) indicates hibernating relays
guards: (dict) contains guards of requesting client
circ_time: (int) timestamp of circuit request
circ_fast: (bool) all relays should be fast
circ_stable: (bool) all relays should be stable
circ_internal: (bool) circuit is for name resolution or hidden service
circ_ip: (str) IP address of destination (None if not known)
circ_port: (int) desired TCP port (None if not known)
congmodel: congestion model
pdelmodel: propagation delay model
weighted_exits: (list) (middle, cum_weight) pairs for exit position
exits_exact: (bool) Is weighted_exits exact or does it need rechecking?
weighed_exits is special because exits are chosen first and thus
don't depend on the other circuit positions, and so potentially are
precomputed exactly.
weighted_middles: (list) (middle, cum_weight) pairs for middle position
weighted_guards: (list) (middle, cum_weight) pairs for middle position
Output:
circuit: (dict) a newly created circuit with keys
'time': (int) seconds from time zero
'fast': (bool) relays must have Fast flag
'stable': (bool) relays must have Stable flag
'internal': (bool) is internal (e.g. for hidden service)
'dirty_time': (int) timestamp of time dirtied, None if clean
'path': (tuple) list in-order fingerprints for path's nodes
'covering': (list) ports with needs covered by circuit
'avg_ping': (float) average ping time during most-recent use
"""
# 'cons_rel_stats': (dict) relay stats for active consensus
if (circ_time < cons_valid_after) or\
(circ_time >= cons_fresh_until):
raise ValueError('consensus not fresh for circ_time in create_circuit')
# select exit node
i = 1
while (True):
exit_node = pathsim.select_exit_node(cons_bw_weights,
cons_bwweightscale, cons_rel_stats, descriptors, circ_fast,
circ_stable, circ_internal, circ_ip, circ_port, weighted_exits,
exits_exact)
# exit_node = pathsim.select_weighted_node(weighted_exits)
if (not hibernating_status[exit_node]):
break
if pathsim._testing:
print('Exit selection #{0} is hibernating - retrying.'.\
format(i))
i += 1
if pathsim._testing:
print('Exit node: {0} [{1}]'.format(
cons_rel_stats[exit_node].nickname,
cons_rel_stats[exit_node].fingerprint))
# select guard node
# Hibernation status again checked here to reflect how in Tor
# new guards would be chosen and added to the list prior to a circuit-
# creation attempt. If the circuit fails at a new guard, that guard
# gets removed from the list.
while True:
# get first <= num_guards guards suitable for circuit
circ_guards = pathsim.get_guards_for_circ(cons_bw_weights,\
cons_bwweightscale, cons_rel_stats, descriptors,\
circ_fast, circ_stable, guards,\
exit_node, circ_time, weighted_guards)
guard_node = choice(circ_guards)
if (hibernating_status[guard_node]):
if (not guards[guard_node]['made_contact']):
if pathsim._testing:
print(\
'[Time {0}]: Removing new hibernating guard: {1}.'.\
format(circ_time, cons_rel_stats[guard_node].nickname))
del guards[guard_node]
elif (guards[guard_node]['unreachable_since'] != None):
if pathsim._testing:
print(\
'[Time {0}]: Guard retried but hibernating: {1}'.\
format(circ_time, cons_rel_stats[guard_node].nickname))
guards[guard_node]['last_attempted'] = circ_time
else:
if pathsim._testing:
print('[Time {0}]: Guard newly hibernating: {1}'.\
format(circ_time, \
cons_rel_stats[guard_node].nickname))
guards[guard_node]['unreachable_since'] = circ_time
guards[guard_node]['last_attempted'] = circ_time
else:
guards[guard_node]['unreachable_since'] = None
guards[guard_node]['made_contact'] = True
break
if pathsim._testing:
print('Guard node: {0} [{1}]'.format(
cons_rel_stats[guard_node].nickname,
cons_rel_stats[guard_node].fingerprint))
# select middle node
# As with exit selection, hibernating status checked here to mirror Tor
# selecting middle, having the circuit fail, reselecting a path,
# and attempting circuit creation again.
i = 1
while (True):
middle_node = pathsim.select_middle_node(cons_bw_weights,\
cons_bwweightscale, cons_rel_stats, descriptors, circ_fast,\
circ_stable, exit_node, guard_node, weighted_middles)
if (not hibernating_status[middle_node]):
break
if pathsim._testing:
print(\
'Middle selection #{0} is hibernating - retrying.'.format(i))
i += 1
if pathsim._testing:
print('Middle node: {0} [{1}]'.format(
cons_rel_stats[middle_node].nickname,
cons_rel_stats[middle_node].fingerprint))
cum_ping_time = 0
if pathsim._testing: print 'Doing {0} circuit pings on creation... '.format(num_pings_create),
for i in xrange(num_pings_create):
cum_ping_time += ping_circuit(client_ip, guard_node, middle_node,\
exit_node, cons_rel_stats, descriptors, congmodel, pdelmodel)
avg_ping_time = float(cum_ping_time)/num_pings_create
if pathsim._testing: print "ave congestion is {0}".format(avg_ping_time)
return {'time':circ_time,\
'fast':circ_fast,\
'stable':circ_stable,\
'internal':circ_internal,\
'dirty_time':None,\
'path':(guard_node, middle_node, exit_node),\
# 'cons_rel_stats':cons_rel_stats,\
'covering':[],\
'initial_avg_ping':avg_ping_time,
'avg_ping':None}
def client_assign_stream(client_state, stream, cons_rel_stats,\
cons_valid_after, cons_fresh_until, cons_bw_weights, cons_bwweightscale,\
descriptors, hibernating_status, stream_weighted_exits,\
weighted_middles, weighted_guards, congmodel, pdelmodel):
"""Assigns a stream to a circuit for a given client.
Stores circuit measurements (pings) as would be measured during use."""
guards = client_state['guards']
stream_assigned = None
# find dirty circuit with fastest initial_avg_ping
for circuit in client_state['dirty_exit_circuits']:
if ((circuit['avg_ping'] == None) or\
(circuit['avg_ping'] <= min_ping)) and\
pathsim.circuit_supports_stream(circuit, stream, descriptors) and\
((stream_assigned == None) or\
(stream_assigned['initial_avg_ping'] > \
circuit['initial_avg_ping'])):
stream_assigned = circuit
# look for clean circuit with faster initial_avg_ping
for circuit in client_state['clean_exit_circuits']:
if ((circuit['avg_ping'] == None) or\
(circuit['avg_ping'] <= min_ping)) and\
pathsim.circuit_supports_stream(circuit, stream, descriptors) and\
((stream_assigned == None) or\
(stream_assigned['initial_avg_ping'] > \
circuit['initial_avg_ping'])):
stream_assigned = circuit
if (stream_assigned != None):
# if circuit is clean, move to dirty list
if (stream_assigned['dirty_time'] == None):
new_clean_exit_circuits = collections.deque()
while (len(client_state['clean_exit_circuits']) > 0):
circuit = client_state['clean_exit_circuits'].popleft()
if (circuit == stream_assigned):
circuit['dirty_time'] = stream['time']
client_state['dirty_exit_circuits'].appendleft(circuit)
new_clean_exit_circuits.extend(\
client_state['clean_exit_circuits'])
client_state['clean_exit_circuits'].clear()
if pathsim._testing:
if (stream['type'] == 'connect'):
print('Assigned CONNECT stream to port {0} to \
clean circuit at {1}'.format(stream['port'], stream['time']))
elif (stream['type'] == 'resolve'):
print('Assigned RESOLVE stream to clean circuit \
at {0}'.format(stream['time']))
else:
print('Assigned unrecognized stream to clean \
circuit at {0}'.format(stream['time']))
# reduce cover count for covered port needs
pathsim.uncover_circuit_ports(circuit,\
client_state['port_needs_covered'])
else:
new_clean_exit_circuits.append(circuit)
client_state['clean_exit_circuits'] = new_clean_exit_circuits
else:
if pathsim._testing:
if (stream['type'] == 'connect'):
print('Assigned CONNECT stream to port {0} to \
dirty circuit at {1}'.format(stream['port'], stream['time']))
elif (stream['type'] == 'resolve'):
print('Assigned RESOLVE stream to dirty circuit \
at {0}'.format(stream['time']))
else:
print('Assigned unrecognized stream to dirty circuit \
at {0}'.format(stream['time']))
# if stream still unassigned we must make new circuit
if (stream_assigned == None):
new_circ = None
if (stream['type'] == 'connect'):
stable = (stream['port'] in pathsim.TorOptions.long_lived_ports)
new_circ = create_circuit(cons_rel_stats,\
cons_valid_after, cons_fresh_until,\
cons_bw_weights, cons_bwweightscale,\
descriptors, hibernating_status, guards, stream['time'], True,\
stable, False, stream['ip'], stream['port'],\
congmodel, pdelmodel,\
stream_weighted_exits, False,\
weighted_middles, weighted_guards)
elif (stream['type'] == 'resolve'):
new_circ = create_circuit(cons_rel_stats,\
cons_valid_after, cons_fresh_until,\
cons_bw_weights, cons_bwweightscale,\
descriptors, hibernating_status, guards, stream['time'], True,\
False, False, None, None,\
congmodel, pdelmodel,\
stream_weighted_exits, True,\
weighted_middles, weighted_guards)
else:
raise ValueError('Unrecognized stream in client_assign_stream(): \
{0}'.format(stream['type']))
new_circ['dirty_time'] = stream['time']
stream_assigned = new_circ
client_state['dirty_exit_circuits'].appendleft(new_circ)
if pathsim._testing:
if (stream['type'] == 'connect'):
print('Created circuit at time {0} to cover CONNECT \
stream to ip {1} and port {2}.'.format(stream['time'], stream['ip'],\
stream['port']))
elif (stream['type'] == 'resolve'):
print('Created circuit at time {0} to cover RESOLVE \
stream.'.format(stream['time']))
else:
print('Created circuit at time {0} to cover unrecognized \
stream.'.format(stream['time']))
if pathsim._testing: print 'Doing {0} circuit pings on use... '.format(num_pings_use),
cum_ping_time = 0
guard_node = stream_assigned['path'][0]
middle_node = stream_assigned['path'][1]
exit_node = stream_assigned['path'][2]
for i in xrange(num_pings_use):
cum_ping_time += ping_circuit(client_ip, guard_node, middle_node,\
exit_node, cons_rel_stats, descriptors, congmodel, pdelmodel)
stream_assigned['avg_ping'] = float(cum_ping_time)/num_pings_use
if pathsim._testing: print "ave congestion is {0}".format(stream_assigned['avg_ping'])
return stream_assigned
|
|
#
# Copyright (c) 2015 Red Hat
# Licensed under The MIT License (MIT)
# http://opensource.org/licenses/MIT
#
from rest_framework import serializers
from django.core.exceptions import FieldError
from pdc.apps.common.fields import ChoiceSlugField
from pdc.apps.common import models as common_models
from pdc.apps.common.serializers import StrictSerializerMixin
from .models import (Product, ProductVersion, Release,
BaseProduct, ReleaseType, Variant,
VariantArch, VariantType, ReleaseGroup, ReleaseGroupType)
from . import signals
class ProductSerializer(StrictSerializerMixin, serializers.ModelSerializer):
product_versions = serializers.SlugRelatedField(
many=True,
read_only=True,
source='productversion_set',
slug_field='product_version_id'
)
active = serializers.BooleanField(read_only=True)
class Meta:
model = Product
fields = ('name', 'short', 'active', 'product_versions')
class ProductVersionSerializer(StrictSerializerMixin, serializers.ModelSerializer):
product_version_id = serializers.CharField(read_only=True)
active = serializers.BooleanField(read_only=True)
releases = serializers.SerializerMethodField()
product = serializers.SlugRelatedField(slug_field='short',
queryset=Product.objects.all())
class Meta:
model = ProductVersion
fields = ('name', 'short', 'version', 'active', 'product_version_id', 'product', 'releases')
def to_internal_value(self, data):
if not self.partial and 'short' not in data:
data['short'] = data.get('product')
return super(ProductVersionSerializer, self).to_internal_value(data)
def get_releases(self, obj):
"""[release_id]"""
return [x.release_id for x in sorted(obj.release_set.all(), key=Release.version_sort_key)]
class ReleaseSerializer(StrictSerializerMixin, serializers.ModelSerializer):
release_type = ChoiceSlugField(slug_field='short',
queryset=ReleaseType.objects.all())
release_id = serializers.CharField(read_only=True)
compose_set = serializers.SerializerMethodField()
base_product = serializers.SlugRelatedField(slug_field='base_product_id',
queryset=BaseProduct.objects.all(),
required=False,
default=None,
allow_null=True)
product_version = serializers.SlugRelatedField(slug_field='product_version_id',
queryset=ProductVersion.objects.all(),
required=False,
allow_null=True,
default=None)
active = serializers.BooleanField(default=True)
integrated_with = serializers.SlugRelatedField(slug_field='release_id',
queryset=Release.objects.all(),
required=False,
allow_null=True,
default=None)
class Meta:
model = Release
fields = ('release_id', 'short', 'version', 'name', 'base_product',
'active', 'product_version', 'release_type',
'compose_set', 'integrated_with')
def get_compose_set(self, obj):
"""[Compose.compose_id]"""
return [compose.compose_id for compose in sorted(obj.get_all_composes())]
def create(self, validated_data):
signals.release_serializer_extract_data.send(sender=self, validated_data=validated_data)
obj = super(ReleaseSerializer, self).create(validated_data)
signals.release_serializer_post_create.send(sender=self, release=obj)
return obj
def update(self, instance, validated_data):
signals.release_serializer_extract_data.send(sender=self, validated_data=validated_data)
obj = super(ReleaseSerializer, self).update(instance, validated_data)
signals.release_serializer_post_update.send(sender=self, release=obj)
if hasattr(instance, 'pk'):
# reload to make sure changes in mapping are reflected
obj = Release.objects.get(pk=obj.pk)
# By default, PUT does not erase optional field if not specified. This
# loops over all optional fields and resets them manually.
if not self.partial:
for field_name, field in self.fields.iteritems():
if not field.read_only and field_name not in validated_data:
attr = field.source or field_name
try:
if hasattr(obj, attr):
setattr(obj, attr, None)
except ValueError:
pass
obj.save()
return obj
class BaseProductSerializer(StrictSerializerMixin, serializers.ModelSerializer):
base_product_id = serializers.CharField(read_only=True)
release_type = ChoiceSlugField(slug_field='short', queryset=ReleaseType.objects.all())
class Meta:
model = BaseProduct
fields = ('base_product_id', 'short', 'version', 'name', 'release_type')
class ReleaseTypeSerializer(StrictSerializerMixin, serializers.ModelSerializer):
short = serializers.CharField()
name = serializers.CharField()
suffix = serializers.CharField()
class Meta:
model = ReleaseType
fields = ("short", "name", "suffix",)
class VariantArchNestedSerializer(serializers.BaseSerializer):
doc_format = "string"
def to_representation(self, obj):
return obj.arch.name
def to_internal_value(self, data, files=None):
try:
arch = common_models.Arch.objects.get(name=data)
return VariantArch(arch=arch)
except common_models.Arch.DoesNotExist:
raise FieldError('No such arch: "%s".' % data)
class ReleaseVariantSerializer(StrictSerializerMixin, serializers.ModelSerializer):
type = ChoiceSlugField(source='variant_type', slug_field='name',
queryset=VariantType.objects.all())
release = serializers.SlugRelatedField(slug_field='release_id',
queryset=Release.objects.all())
id = serializers.CharField(source='variant_id')
uid = serializers.CharField(source='variant_uid')
name = serializers.CharField(source='variant_name')
arches = VariantArchNestedSerializer(source='variantarch_set',
many=True)
key_combination_error = 'add_arches/remove_arches can not be combined with arches.'
extra_fields = ['add_arches', 'remove_arches']
class Meta:
model = Variant
fields = ('release', 'id', 'uid', 'name', 'type', 'arches')
def to_internal_value(self, data):
# Save value of attributes not directly corresponding to serializer
# fields. We can't rely on data dict to be mutable, so the values can
# not be removed from it.
self.add_arches = data.get('add_arches', None)
self.remove_arches = data.get('remove_arches', None)
return super(ReleaseVariantSerializer, self).to_internal_value(data)
def update(self, instance, validated_data):
arches = validated_data.pop('variantarch_set', [])
instance = super(ReleaseVariantSerializer, self).update(instance, validated_data)
if arches:
if self.add_arches or self.remove_arches:
raise FieldError(self.key_combination_error)
# If arches were completely specified, try first to remove unwanted
# arches, then create new ones.
requested = dict([(x.arch.name, x) for x in arches])
for variant in instance.variantarch_set.all():
if variant.arch.name in requested:
del requested[variant.arch.name]
else:
variant.delete()
for arch in requested.values():
arch.variant = instance
arch.save()
# These loops can only do something on partial update: when doing PUT,
# "arches" is required and if any of the other arch modifications were
# specified, an exception would be raised above.
for arch_name in self.add_arches or []:
arch = common_models.Arch.objects.get(name=arch_name)
vararch = VariantArch(arch=arch, variant=instance)
vararch.save()
for arch_name in self.remove_arches or []:
instance.variantarch_set.filter(arch__name=arch_name).delete()
return instance
class VariantTypeSerializer(StrictSerializerMixin, serializers.ModelSerializer):
class Meta:
model = VariantType
fields = ('name',)
class ReleaseGroupSerializer(StrictSerializerMixin, serializers.ModelSerializer):
description = serializers.CharField(required=True)
type = ChoiceSlugField(slug_field='name',
queryset=ReleaseGroupType.objects.all())
releases = ChoiceSlugField(slug_field='release_id',
many=True, queryset=Release.objects.all(),
allow_null=True, required=False, default=[])
active = serializers.BooleanField(default=True)
class Meta:
model = ReleaseGroup
fields = ('name', 'description', 'type', 'releases', 'active')
def to_internal_value(self, data):
releases = data.get('releases', [])
for release in releases:
try:
Release.objects.get(release_id=release)
except Release.DoesNotExist:
raise serializers.ValidationError({'detail': 'release %s does not exist' % release})
return super(ReleaseGroupSerializer, self).to_internal_value(data)
|
|
"""Common functionality."""
import os.path
import tempfile
import shutil
import sys
import six
from nose2.compat import unittest
from nose2 import discover, util
HERE = os.path.abspath(os.path.dirname(__file__))
SUPPORT = os.path.join(HERE, 'functional', 'support')
class TestCase(unittest.TestCase):
"""TestCase extension.
If the class variable _RUN_IN_TEMP is True (default: False), tests will be
performed in a temporary directory, which is deleted afterwards.
"""
_RUN_IN_TEMP = False
def setUp(self):
super(TestCase, self).setUp()
if self._RUN_IN_TEMP:
self._orig_dir = os.getcwd()
work_dir = self._work_dir = tempfile.mkdtemp()
os.chdir(self._work_dir)
# Make sure it's possible to import modules from current directory
sys.path.insert(0, work_dir)
def tearDown(self):
super(TestCase, self).tearDown()
if self._RUN_IN_TEMP:
os.chdir(self._orig_dir)
shutil.rmtree(self._work_dir, ignore_errors=True)
class FunctionalTestCase(unittest.TestCase):
tags = ['functional']
def assertTestRunOutputMatches(self, proc, stdout=None, stderr=None):
cmd_stdout, cmd_stderr = None, None
try:
cmd_stdout, cmd_stderr = self._output[proc.pid]
except AttributeError:
self._output = {}
except KeyError:
pass
if cmd_stdout is None:
cmd_stdout, cmd_stderr = proc.communicate()
self._output[proc.pid] = cmd_stdout, cmd_stderr
testf = self.assertRegex if hasattr(self, 'assertRegex') \
else self.assertRegexpMatches
if stdout:
testf(util.safe_decode(cmd_stdout), stdout)
if stderr:
testf(util.safe_decode(cmd_stderr), stderr)
def runIn(self, testdir, *args, **kw):
return run_nose2(*args, cwd=testdir, **kw)
class _FakeEventBase(object):
"""Baseclass for fake Events."""
def __init__(self):
self.handled = False
self.version = '0.1'
self.metadata = {}
class FakeHandleFileEvent(_FakeEventBase):
"""Fake HandleFileEvent."""
def __init__(self, name):
super(FakeHandleFileEvent, self).__init__()
self.loader = Stub() # FIXME
self.name = name
self.path = os.path.split(name)[1]
self.extraTests = []
class FakeStartTestEvent(_FakeEventBase):
"""Fake StartTestEvent."""
def __init__(self, test):
super(FakeStartTestEvent, self).__init__()
self.test = test
self.result = test.defaultTestResult()
import time
self.startTime = time.time()
class FakeLoadFromNameEvent(_FakeEventBase):
"""Fake LoadFromNameEvent."""
def __init__(self, name):
super(FakeLoadFromNameEvent, self).__init__()
self.name = name
class FakeLoadFromNamesEvent(_FakeEventBase):
"""Fake LoadFromNamesEvent."""
def __init__(self, names):
super(FakeLoadFromNamesEvent, self).__init__()
self.names = names
class FakeStartTestRunEvent(_FakeEventBase):
"""Fake StartTestRunEvent"""
def __init__(self, runner=None, suite=None, result=None, startTime=None,
executeTests=None):
super(FakeStartTestRunEvent, self).__init__()
self.suite = suite
self.runner = runner
self.result = result
self.startTime = startTime
self.executeTests = executeTests
class Stub(object):
"""Stub object for use in tests"""
def __getattr__(self, attr):
return Stub()
def __call__(self, *arg, **kw):
return Stub()
def support_file(*path_parts):
return os.path.abspath(os.path.join(SUPPORT, *path_parts))
def run_nose2(*nose2_args, **nose2_kwargs):
if 'cwd' in nose2_kwargs:
cwd = nose2_kwargs.pop('cwd')
if not os.path.isabs(cwd):
nose2_kwargs['cwd'] = support_file(cwd)
if 'module' not in nose2_kwargs:
nose2_kwargs['module'] = None
return NotReallyAProc(nose2_args, **nose2_kwargs)
class NotReallyAProc(object):
def __init__(self, args, cwd=None, **kwargs):
self.args = args
self.chdir = cwd
self.kwargs = kwargs
def __enter__(self):
self._stdout = sys.__stdout__
self._stderr = sys.__stderr__
self.cwd = os.getcwd()
if self.chdir:
os.chdir(self.chdir)
self.stdout = sys.stdout = sys.__stdout__ = six.StringIO()
self.stderr = sys.stderr = sys.__stderr__ = six.StringIO()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
sys.stdout = sys.__stdout__ = self._stdout
sys.stderr = sys.__stderr__ = self._stderr
if self.chdir:
os.chdir(self.cwd)
return False
def communicate(self):
with self:
try:
self.result = discover(
argv=('nose2',) + self.args, exit=False,
**self.kwargs)
except SystemExit as e:
return "", "EXIT CODE %s" % str(e)
return self.stdout.getvalue(), self.stderr.getvalue()
@property
def pid(self):
return id(self)
def poll(self):
return not self.result.result.wasSuccessful()
class RedirectStdStreams(object):
"""
Context manager that replaces the stdin/out streams with StringIO
buffers.
"""
def __init__(self):
self.stdout = six.StringIO()
self.stderr = six.StringIO()
def __enter__(self):
self.old_stdout, self.old_stderr = sys.stdout, sys.stderr
self.old_stdout.flush()
self.old_stderr.flush()
sys.stdout, sys.stderr = self.stdout, self.stderr
return self
def __exit__(self, exc_type, exc_value, traceback):
self.stdout.flush()
self.stderr.flush()
sys.stdout = self.old_stdout
sys.stderr = self.old_stderr
# mock multprocessing Connection
class Conn(object):
def __init__(self, items):
self.items = items
self.sent = []
self.closed = False
def recv(self):
if self.closed:
raise EOFError("closed")
try:
return self.items.pop(0)
except:
raise EOFError("EOF")
def send(self, item):
self.sent.append(item)
def close(self):
self.closed = True
|
|
from __future__ import unicode_literals
from tests.utils import ConverterTestCase
class TypeReferencingCases:
#######################################################################
# This base class defined a spanning set of type referencing cases.
# By subclassing and providing a template, a wide range of referencing
# interactions can be tested.
#######################################################################
# Primitive type
def test_primitive(self):
self.assert_example(data='int', usage='int', result='int', value='42', arg='val', py_value='42')
def test_primitive_ref(self):
self.assert_example(data='int', usage='int&', result='int', value='42', arg='val', py_value='42')
def test_primitive_pointer(self):
self.assert_example(data='int', usage='int*', result='int*', value='42', arg='&val', py_value='42')
def test_primitive_multi_pointer(self):
self.assert_example(data='int*', usage='int**', result='int**', value='0', arg='&val', py_value='0')
#----------------------------------------------------------------------
# Explicitly referenced enum in the same namespace
def test_same_ns_explicit_enum(self):
self.assert_example(data='C::D::F', usage='C::D::F', result='C::D::F', value='C::D::N', arg='val', imports=('F',), py_value='F.N')
def test_same_ns_explicit_enum_ref(self):
self.assert_example(data='C::D::F', usage='C::D::F&', result='C::D::F', value='C::D::N', arg='val', imports=('F',), py_value='F.N')
def test_same_ns_explicit_enum_pointer(self):
self.assert_example(data='C::D::F', usage='C::D::F*', result='C::D::F*', value='C::D::N', arg='&val', imports=('F',), py_value='F.N')
def test_same_ns_explicit_enum_multi_pointer(self):
self.assert_example(data='C::D::F*', usage='C::D::F**', result='C::D::F**', value='0', arg='&val', py_value='0')
#----------------------------------------------------------------------
# Implicitly referenced enum in the same namespace
def test_same_ns_implicit_enum(self):
self.assert_example(data='C::D::F', usage='F', result='C::D::F', value='C::D::N', arg='val', imports=('F',), py_value='F.N')
def test_same_ns_implicit_enum_ref(self):
self.assert_example(data='C::D::F', usage='F&', result='C::D::F', value='C::D::N', arg='val', imports=('F',), py_value='F.N')
def test_same_ns_implicit_enum_pointer(self):
self.assert_example(data='C::D::F', usage='F*', result='C::D::F*', value='C::D::N', arg='&val', imports=('F',), py_value='F.N')
def test_same_ns_implicit_enum_multi_pointer(self):
self.assert_example(data='C::D::F*', usage='F**', result='C::D::F**', value='0', arg='&val', py_value='0')
#----------------------------------------------------------------------
# Enum from a different namespace
def test_other_ns_enum(self):
self.assert_example(data='A::B::E', usage='A::B::E', result='A::B::E', value='A::B::P', arg='val', imports=('E',), py_value='E.P')
def test_other_ns_enum_ref(self):
self.assert_example(data='A::B::E', usage='A::B::E&', result='A::B::E', value='A::B::P', arg='val', imports=('E',), py_value='E.P')
def test_other_ns_enum_pointer(self):
self.assert_example(data='A::B::E', usage='A::B::E*', result='A::B::E*', value='A::B::P', arg='&val', imports=('E',), py_value='E.P')
def test_other_ns_enum_multi_pointer(self):
self.assert_example(data='A::B::E*', usage='A::B::E**', result='A::B::E**', value='0', arg='&val', py_value='0')
#----------------------------------------------------------------------
# Explicitly referenced class in the same namespace
def test_same_ns_explicit_class(self):
self.assert_example(data='C::D::Y', usage='C::D::Y', result='C::D::Y', value='C::D::Y()', arg='val', imports=('Y',), py_value='Y()')
def test_same_ns_explicit_class_ref(self):
self.assert_example(data='C::D::Y', usage='C::D::Y&', result='C::D::Y', value='C::D::Y()', arg='val', imports=('Y',), py_value='Y()')
def test_same_ns_explicit_class_pointer(self):
self.assert_example(data='C::D::Y', usage='C::D::Y*', result='C::D::Y*', value='C::D::Y()', arg='&val', imports=('Y',), py_value='Y()')
def test_same_ns_explicit_class_multi_pointer(self):
self.assert_example(data='C::D::Y*', usage='C::D::Y**', result='C::D::Y**', value='0', arg='&val', py_value='0')
#----------------------------------------------------------------------
# Implicitly referenced class in the same namespace
def test_same_ns_implicit_class(self):
self.assert_example(data='C::D::Y', usage='Y', result='C::D::Y', value='C::D::Y()', arg='val', imports=('Y',), py_value='Y()')
def test_same_ns_implicit_class_ref(self):
self.assert_example(data='C::D::Y', usage='Y&', result='C::D::Y', value='C::D::Y()', arg='val', imports=('Y',), py_value='Y()')
def test_same_ns_implicit_class_pointer(self):
self.assert_example(data='C::D::Y', usage='Y*', result='C::D::Y*', value='C::D::Y()', arg='&val', imports=('Y',), py_value='Y()')
def test_same_ns_implicit_class_multi_pointer(self):
self.assert_example(data='C::D::Y*', usage='Y**', result='C::D::Y**', value='0', arg='&val', py_value='0')
#----------------------------------------------------------------------
# Class from a different namespace
def test_other_ns_class(self):
self.assert_example(data='A::B::X', usage='A::B::X', result='A::B::X', value='A::B::X()', arg='val', imports=('X',), py_value='X()')
def test_other_ns_class_ref(self):
self.assert_example(data='A::B::X', usage='A::B::X&', result='A::B::X', value='A::B::X()', arg='val', imports=('X',), py_value='X()')
def test_other_ns_class_pointer(self):
self.assert_example(data='A::B::X', usage='A::B::X*', result='A::B::X*', value='A::B::X()', arg='&val', imports=('X',), py_value='X()')
def test_other_ns_class_multi_pointer(self):
self.assert_example(data='A::B::X*', usage='A::B::X**', result='A::B::X**', value='0', arg='&val', py_value='0')
#----------------------------------------------------------------------
# Explicitly referenced inner class in the same namespace
def test_same_ns_explicit_inner_class(self):
self.assert_example(data='C::D::Y::YInner', usage='C::D::Y::YInner', result='C::D::Y::YInner', value='C::D::Y::YInner()', arg='val', imports=('Y',), py_value='Y.YInner()')
def test_same_ns_explicit_inner_class_ref(self):
self.assert_example(data='C::D::Y::YInner', usage='C::D::Y::YInner&', result='C::D::Y::YInner', value='C::D::Y::YInner()', arg='val', imports=('Y',), py_value='Y.YInner()')
def test_same_ns_explicit_inner_class_pointer(self):
self.assert_example(data='C::D::Y::YInner', usage='C::D::Y::YInner*', result='C::D::Y::YInner*', value='C::D::Y::YInner()', arg='&val', imports=('Y',), py_value='Y.YInner()')
def test_same_ns_explicit_inner_class_multi_pointer(self):
self.assert_example(data='C::D::Y::YInner*', usage='C::D::Y::YInner**', result='C::D::Y::YInner**', value='0', arg='&val', py_value='0')
#----------------------------------------------------------------------
# Implicitly referenced inner class in the same namespace
def test_same_ns_implicit_inner_class(self):
self.assert_example(data='C::D::Y::YInner', usage='Y::YInner', result='C::D::Y::YInner', value='C::D::Y::YInner()', arg='val', imports=('Y',), py_value='Y.YInner()')
def test_same_ns_implicit_inner_class_ref(self):
self.assert_example(data='C::D::Y::YInner', usage='Y::YInner&', result='C::D::Y::YInner', value='C::D::Y::YInner()', arg='val', imports=('Y',), py_value='Y.YInner()')
def test_same_ns_implicit_inner_class_pointer(self):
self.assert_example(data='C::D::Y::YInner', usage='Y::YInner*', result='C::D::Y::YInner*', value='C::D::Y::YInner()', arg='&val', imports=('Y',), py_value='Y.YInner()')
def test_same_ns_implicit_inner_class_multi_pointer(self):
self.assert_example(data='C::D::Y::YInner*', usage='Y::YInner**', result='C::D::Y::YInner**', value='0', arg='&val', py_value='0')
#----------------------------------------------------------------------
# Inner class from a different namespace
def test_other_ns_inner_class(self):
self.assert_example(data='A::B::X::XInner', usage='A::B::X::XInner', result='A::B::X::XInner', value='A::B::X::XInner()', arg='val', imports=('X',), py_value='X.XInner()')
def test_other_ns_inner_class_ref(self):
self.assert_example(data='A::B::X::XInner', usage='A::B::X::XInner&', result='A::B::X::XInner', value='A::B::X::XInner()', arg='val', imports=('X',), py_value='X.XInner()')
def test_other_ns_inner_class_pointer(self):
self.assert_example(data='A::B::X::XInner', usage='A::B::X::XInner*', result='A::B::X::XInner*', value='A::B::X::XInner()', arg='&val', imports=('X',), py_value='X.XInner()')
def test_other_ns_inner_class_multi_pointer(self):
self.assert_example(data='A::B::X::XInner*', usage='A::B::X::XInner**', result='A::B::X::XInner**', value='0', arg='&val', py_value='0')
class InlineClassTypeReferencingTestCase(ConverterTestCase, TypeReferencingCases):
def assert_example(self, **kwargs):
cpp_source = """
namespace A {
namespace B {
enum E {
P, Q
};
class X {
public:
class XInner {};
};
}
}
namespace C {
namespace D {
enum F {
M, N
};
class Y {
public:
class YInner {};
};
class Z {
public:
%(usage)s method(%(usage)s value) {
return value;
}
};
}
}
void test() {
C::D::Z *obj = new C::D::Z();
%(data)s val = %(value)s;
%(result)s result = obj->method(%(arg)s);
}
""" % kwargs
# DEBUG: Dump the sample cpp code to a file for external testing.
# import inspect
# from .utils import adjust
# curframe = inspect.currentframe()
# calframe = inspect.getouterframes(curframe, 2)
# with open('%s.cpp' % calframe[1][3], 'w') as out:
# print(adjust(cpp_source), file=out)
imports = kwargs.get('imports', tuple())
imports += ('Z',)
kwargs['imports'] = ''
if 'E' in imports or 'X' in imports:
kwargs['imports'] += 'from test.A.B import %s\n ' % ', '.join(
imp for imp in sorted(imports)
if imp in ('E', 'X')
)
if 'F' in imports or 'Y' in imports or 'Z' in imports:
kwargs['imports'] += 'from test.C.D import %s\n ' % ', '.join(
imp for imp in sorted(imports)
if imp in ('F', 'Y', 'Z')
)
if imports:
kwargs['imports'] += "\n\n "
self.assertMultifileGeneratedOutput(
cpp=[('test.cpp', cpp_source)],
py=[
(
'test',
"""
%(imports)sdef test():
obj = Z()
val = %(py_value)s
result = obj.method(val)
""" % kwargs
),
(
'test.A',
"""
"""
),
(
'test.A.B',
"""
from enum import Enum
class E(Enum):
P = 0
Q = 1
class X:
class XInner:
pass
"""
),
(
'test.C',
"""
"""
),
(
'test.C.D',
"""
from enum import Enum
class F(Enum):
M = 0
N = 1
class Y:
class YInner:
pass
class Z:
def method(self, value):
return value
"""
),
]
)
class ClassTypeReferencingTestCase(ConverterTestCase, TypeReferencingCases):
def assert_example(self, **kwargs):
cpp_source = """
namespace A {
namespace B {
enum E {
P, Q
};
class X {
public:
class XInner {};
};
}
}
namespace C {
namespace D {
enum F {
M, N
};
class Y {
public:
class YInner {};
};
class Z {
%(data)s m_data;
public:
%(usage)s method(%(usage)s value);
};
%(usage)s Z::method(%(usage)s value) {
return value;
}
}
}
void test() {
C::D::Z *obj = new C::D::Z();
%(data)s val = %(value)s;
%(result)s result = obj->method(%(arg)s);
}
""" % kwargs
# DEBUG: Dump the sample cpp code to a file for external testing.
# import inspect
# from .utils import adjust
# curframe = inspect.currentframe()
# calframe = inspect.getouterframes(curframe, 2)
# with open('%s.cpp' % calframe[1][3], 'w') as out:
# print(adjust(cpp_source), file=out)
imports = kwargs.get('imports', tuple())
imports += ('Z',)
kwargs['imports'] = ''
if 'E' in imports or 'X' in imports:
kwargs['imports'] += 'from test.A.B import %s\n ' % ', '.join(
imp for imp in sorted(imports)
if imp in ('E', 'X')
)
if 'F' in imports or 'Y' in imports or 'Z' in imports:
kwargs['imports'] += 'from test.C.D import %s\n ' % ', '.join(
imp for imp in sorted(imports)
if imp in ('F', 'Y', 'Z')
)
if imports:
kwargs['imports'] += "\n\n "
self.assertMultifileGeneratedOutput(
cpp=[('test.cpp', cpp_source)],
py=[
(
'test',
"""
%(imports)sdef test():
obj = Z()
val = %(py_value)s
result = obj.method(val)
""" % kwargs
),
(
'test.A',
"""
"""
),
(
'test.A.B',
"""
from enum import Enum
class E(Enum):
P = 0
Q = 1
class X:
class XInner:
pass
"""
),
(
'test.C',
"""
"""
),
(
'test.C.D',
"""
from enum import Enum
class F(Enum):
M = 0
N = 1
class Y:
class YInner:
pass
class Z:
def method(self, value):
return value
"""
),
]
)
class InlineFunctionTypeReferencingTestCase(ConverterTestCase, TypeReferencingCases):
def assert_example(self, **kwargs):
cpp_source = """
namespace A {
namespace B {
enum E {
P, Q
};
class X {
public:
class XInner {};
};
}
}
namespace C {
namespace D {
enum F {
M, N
};
class Y {
public:
class YInner {};
};
%(usage)s method(%(usage)s value) {
return value;
}
}
}
void test() {
%(data)s val = %(value)s;
%(result)s result = C::D::method(%(arg)s);
}
""" % kwargs
# DEBUG: Dump the sample cpp code to a file for external testing.
# import inspect
# from .utils import adjust
# curframe = inspect.currentframe()
# calframe = inspect.getouterframes(curframe, 2)
# with open('%s.cpp' % calframe[1][3], 'w') as out:
# print(adjust(cpp_source), file=out)
imports = kwargs.get('imports', tuple())
imports += ('method',)
kwargs['imports'] = ''
if 'E' in imports or 'X' in imports:
kwargs['imports'] += 'from test.A.B import %s\n ' % ', '.join(
imp for imp in sorted(imports)
if imp in ('E', 'X')
)
if 'F' in imports or 'method' in imports or 'Y' in imports or 'Z' in imports:
kwargs['imports'] += 'from test.C.D import %s\n ' % ', '.join(
imp for imp in sorted(imports)
if imp in ('F', 'method', 'Y', 'Z')
)
if imports:
kwargs['imports'] += "\n\n "
self.assertMultifileGeneratedOutput(
cpp=[('test.cpp', cpp_source)],
py=[
(
'test',
"""
%(imports)sdef test():
val = %(py_value)s
result = method(val)
""" % kwargs
),
(
'test.A',
"""
"""
),
(
'test.A.B',
"""
from enum import Enum
class E(Enum):
P = 0
Q = 1
class X:
class XInner:
pass
"""
),
(
'test.C',
"""
"""
),
(
'test.C.D',
"""
from enum import Enum
class F(Enum):
M = 0
N = 1
class Y:
class YInner:
pass
def method(value):
return value
"""
),
]
)
class FunctionTypeReferencingTestCase(ConverterTestCase, TypeReferencingCases):
def assert_example(self, **kwargs):
cpp_source = """
namespace A {
namespace B {
enum E {
P, Q
};
class X {
public:
class XInner {};
};
}
}
namespace C {
namespace D {
enum F {
M, N
};
class Y {
public:
class YInner {};
};
%(usage)s method(%(usage)s value);
%(usage)s method(%(usage)s value) {
return value;
}
}
}
void test() {
%(data)s val = %(value)s;
%(result)s result = C::D::method(%(arg)s);
}
""" % kwargs
# DEBUG: Dump the sample cpp code to a file for external testing.
# import inspect
# from .utils import adjust
# curframe = inspect.currentframe()
# calframe = inspect.getouterframes(curframe, 2)
# with open('%s.cpp' % calframe[1][3], 'w') as out:
# print(adjust(cpp_source), file=out)
imports = kwargs.get('imports', tuple())
imports += ('method',)
kwargs['imports'] = ''
if 'E' in imports or 'X' in imports:
kwargs['imports'] += 'from test.A.B import %s\n ' % ', '.join(
imp for imp in sorted(imports)
if imp in ('E', 'X')
)
if 'F' in imports or 'method' in imports or 'Y' in imports or 'Z' in imports:
kwargs['imports'] += 'from test.C.D import %s\n ' % ', '.join(
imp for imp in sorted(imports)
if imp in ('F', 'method', 'Y', 'Z')
)
if imports:
kwargs['imports'] += "\n\n "
self.assertMultifileGeneratedOutput(
cpp=[('test.cpp', cpp_source)],
py=[
(
'test',
"""
%(imports)sdef test():
val = %(py_value)s
result = method(val)
""" % kwargs
),
(
'test.A',
"""
"""
),
(
'test.A.B',
"""
from enum import Enum
class E(Enum):
P = 0
Q = 1
class X:
class XInner:
pass
"""
),
(
'test.C',
"""
"""
),
(
'test.C.D',
"""
from enum import Enum
class F(Enum):
M = 0
N = 1
class Y:
class YInner:
pass
def method(value):
return value
"""
),
]
)
|
|
# -*- coding: utf-8 -*-
# This file is part of beets.
# Copyright 2015, Adrian Sampson.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
"""Tests for the DBCore database abstraction.
"""
from __future__ import (division, absolute_import, print_function,
unicode_literals)
import os
import sqlite3
from test._common import unittest
from beets import dbcore
from tempfile import mkstemp
# Fixture: concrete database and model classes. For migration tests, we
# have multiple models with different numbers of fields.
class TestSort(dbcore.query.FieldSort):
pass
class TestModel1(dbcore.Model):
_table = 'test'
_flex_table = 'testflex'
_fields = {
'id': dbcore.types.PRIMARY_ID,
'field_one': dbcore.types.INTEGER,
}
_types = {
'some_float_field': dbcore.types.FLOAT,
}
_sorts = {
'some_sort': TestSort,
}
@classmethod
def _getters(cls):
return {}
def _template_funcs(self):
return {}
class TestDatabase1(dbcore.Database):
_models = (TestModel1,)
pass
class TestModel2(TestModel1):
_fields = {
'id': dbcore.types.PRIMARY_ID,
'field_one': dbcore.types.INTEGER,
'field_two': dbcore.types.INTEGER,
}
class TestDatabase2(dbcore.Database):
_models = (TestModel2,)
pass
class TestModel3(TestModel1):
_fields = {
'id': dbcore.types.PRIMARY_ID,
'field_one': dbcore.types.INTEGER,
'field_two': dbcore.types.INTEGER,
'field_three': dbcore.types.INTEGER,
}
class TestDatabase3(dbcore.Database):
_models = (TestModel3,)
pass
class TestModel4(TestModel1):
_fields = {
'id': dbcore.types.PRIMARY_ID,
'field_one': dbcore.types.INTEGER,
'field_two': dbcore.types.INTEGER,
'field_three': dbcore.types.INTEGER,
'field_four': dbcore.types.INTEGER,
}
class TestDatabase4(dbcore.Database):
_models = (TestModel4,)
pass
class AnotherTestModel(TestModel1):
_table = 'another'
_flex_table = 'anotherflex'
_fields = {
'id': dbcore.types.PRIMARY_ID,
'foo': dbcore.types.INTEGER,
}
class TestDatabaseTwoModels(dbcore.Database):
_models = (TestModel2, AnotherTestModel)
pass
class MigrationTest(unittest.TestCase):
"""Tests the ability to change the database schema between
versions.
"""
def setUp(self):
handle, self.libfile = mkstemp('db')
os.close(handle)
# Set up a database with the two-field schema.
old_lib = TestDatabase2(self.libfile)
# Add an item to the old library.
old_lib._connection().execute(
'insert into test (field_one, field_two) values (4, 2)'
)
old_lib._connection().commit()
del old_lib
def tearDown(self):
os.remove(self.libfile)
def test_open_with_same_fields_leaves_untouched(self):
new_lib = TestDatabase2(self.libfile)
c = new_lib._connection().cursor()
c.execute("select * from test")
row = c.fetchone()
self.assertEqual(len(row.keys()), len(TestModel2._fields))
def test_open_with_new_field_adds_column(self):
new_lib = TestDatabase3(self.libfile)
c = new_lib._connection().cursor()
c.execute("select * from test")
row = c.fetchone()
self.assertEqual(len(row.keys()), len(TestModel3._fields))
def test_open_with_fewer_fields_leaves_untouched(self):
new_lib = TestDatabase1(self.libfile)
c = new_lib._connection().cursor()
c.execute("select * from test")
row = c.fetchone()
self.assertEqual(len(row.keys()), len(TestModel2._fields))
def test_open_with_multiple_new_fields(self):
new_lib = TestDatabase4(self.libfile)
c = new_lib._connection().cursor()
c.execute("select * from test")
row = c.fetchone()
self.assertEqual(len(row.keys()), len(TestModel4._fields))
def test_extra_model_adds_table(self):
new_lib = TestDatabaseTwoModels(self.libfile)
try:
new_lib._connection().execute("select * from another")
except sqlite3.OperationalError:
self.fail("select failed")
class ModelTest(unittest.TestCase):
def setUp(self):
self.db = TestDatabase1(':memory:')
def tearDown(self):
self.db._connection().close()
def test_add_model(self):
model = TestModel1()
model.add(self.db)
rows = self.db._connection().execute('select * from test').fetchall()
self.assertEqual(len(rows), 1)
def test_store_fixed_field(self):
model = TestModel1()
model.add(self.db)
model.field_one = 123
model.store()
row = self.db._connection().execute('select * from test').fetchone()
self.assertEqual(row[b'field_one'], 123)
def test_retrieve_by_id(self):
model = TestModel1()
model.add(self.db)
other_model = self.db._get(TestModel1, model.id)
self.assertEqual(model.id, other_model.id)
def test_store_and_retrieve_flexattr(self):
model = TestModel1()
model.add(self.db)
model.foo = 'bar'
model.store()
other_model = self.db._get(TestModel1, model.id)
self.assertEqual(other_model.foo, 'bar')
def test_delete_flexattr(self):
model = TestModel1()
model['foo'] = 'bar'
self.assertTrue('foo' in model)
del model['foo']
self.assertFalse('foo' in model)
def test_delete_flexattr_via_dot(self):
model = TestModel1()
model['foo'] = 'bar'
self.assertTrue('foo' in model)
del model.foo
self.assertFalse('foo' in model)
def test_delete_flexattr_persists(self):
model = TestModel1()
model.add(self.db)
model.foo = 'bar'
model.store()
model = self.db._get(TestModel1, model.id)
del model['foo']
model.store()
model = self.db._get(TestModel1, model.id)
self.assertFalse('foo' in model)
def test_delete_non_existent_attribute(self):
model = TestModel1()
with self.assertRaises(KeyError):
del model['foo']
def test_delete_fixed_attribute(self):
model = TestModel1()
with self.assertRaises(KeyError):
del model['field_one']
def test_null_value_normalization_by_type(self):
model = TestModel1()
model.field_one = None
self.assertEqual(model.field_one, 0)
def test_null_value_stays_none_for_untyped_field(self):
model = TestModel1()
model.foo = None
self.assertEqual(model.foo, None)
def test_normalization_for_typed_flex_fields(self):
model = TestModel1()
model.some_float_field = None
self.assertEqual(model.some_float_field, 0.0)
def test_load_deleted_flex_field(self):
model1 = TestModel1()
model1['flex_field'] = True
model1.add(self.db)
model2 = self.db._get(TestModel1, model1.id)
self.assertIn('flex_field', model2)
del model1['flex_field']
model1.store()
model2.load()
self.assertNotIn('flex_field', model2)
class FormatTest(unittest.TestCase):
def test_format_fixed_field(self):
model = TestModel1()
model.field_one = u'caf\xe9'
value = model.formatted().get('field_one')
self.assertEqual(value, u'caf\xe9')
def test_format_flex_field(self):
model = TestModel1()
model.other_field = u'caf\xe9'
value = model.formatted().get('other_field')
self.assertEqual(value, u'caf\xe9')
def test_format_flex_field_bytes(self):
model = TestModel1()
model.other_field = u'caf\xe9'.encode('utf8')
value = model.formatted().get('other_field')
self.assertTrue(isinstance(value, unicode))
self.assertEqual(value, u'caf\xe9')
def test_format_unset_field(self):
model = TestModel1()
value = model.formatted().get('other_field')
self.assertEqual(value, u'')
def test_format_typed_flex_field(self):
model = TestModel1()
model.some_float_field = 3.14159265358979
value = model.formatted().get('some_float_field')
self.assertEqual(value, u'3.1')
class FormattedMappingTest(unittest.TestCase):
def test_keys_equal_model_keys(self):
model = TestModel1()
formatted = model.formatted()
self.assertEqual(set(model.keys(True)), set(formatted.keys()))
def test_get_unset_field(self):
model = TestModel1()
formatted = model.formatted()
with self.assertRaises(KeyError):
formatted['other_field']
def test_get_method_with_default(self):
model = TestModel1()
formatted = model.formatted()
self.assertEqual(formatted.get('other_field'), u'')
def test_get_method_with_specified_default(self):
model = TestModel1()
formatted = model.formatted()
self.assertEqual(formatted.get('other_field', 'default'), 'default')
class ParseTest(unittest.TestCase):
def test_parse_fixed_field(self):
value = TestModel1._parse('field_one', u'2')
self.assertIsInstance(value, int)
self.assertEqual(value, 2)
def test_parse_flex_field(self):
value = TestModel1._parse('some_float_field', u'2')
self.assertIsInstance(value, float)
self.assertEqual(value, 2.0)
def test_parse_untyped_field(self):
value = TestModel1._parse('field_nine', u'2')
self.assertEqual(value, u'2')
class QueryParseTest(unittest.TestCase):
def pqp(self, part):
return dbcore.queryparse.parse_query_part(
part,
{'year': dbcore.query.NumericQuery},
{':': dbcore.query.RegexpQuery},
)[:-1] # remove the negate flag
def test_one_basic_term(self):
q = 'test'
r = (None, 'test', dbcore.query.SubstringQuery)
self.assertEqual(self.pqp(q), r)
def test_one_keyed_term(self):
q = 'test:val'
r = ('test', 'val', dbcore.query.SubstringQuery)
self.assertEqual(self.pqp(q), r)
def test_colon_at_end(self):
q = 'test:'
r = ('test', '', dbcore.query.SubstringQuery)
self.assertEqual(self.pqp(q), r)
def test_one_basic_regexp(self):
q = r':regexp'
r = (None, 'regexp', dbcore.query.RegexpQuery)
self.assertEqual(self.pqp(q), r)
def test_keyed_regexp(self):
q = r'test::regexp'
r = ('test', 'regexp', dbcore.query.RegexpQuery)
self.assertEqual(self.pqp(q), r)
def test_escaped_colon(self):
q = r'test\:val'
r = (None, 'test:val', dbcore.query.SubstringQuery)
self.assertEqual(self.pqp(q), r)
def test_escaped_colon_in_regexp(self):
q = r':test\:regexp'
r = (None, 'test:regexp', dbcore.query.RegexpQuery)
self.assertEqual(self.pqp(q), r)
def test_single_year(self):
q = 'year:1999'
r = ('year', '1999', dbcore.query.NumericQuery)
self.assertEqual(self.pqp(q), r)
def test_multiple_years(self):
q = 'year:1999..2010'
r = ('year', '1999..2010', dbcore.query.NumericQuery)
self.assertEqual(self.pqp(q), r)
def test_empty_query_part(self):
q = ''
r = (None, '', dbcore.query.SubstringQuery)
self.assertEqual(self.pqp(q), r)
class QueryFromStringsTest(unittest.TestCase):
def qfs(self, strings):
return dbcore.queryparse.query_from_strings(
dbcore.query.AndQuery,
TestModel1,
{':': dbcore.query.RegexpQuery},
strings,
)
def test_zero_parts(self):
q = self.qfs([])
self.assertIsInstance(q, dbcore.query.AndQuery)
self.assertEqual(len(q.subqueries), 1)
self.assertIsInstance(q.subqueries[0], dbcore.query.TrueQuery)
def test_two_parts(self):
q = self.qfs(['foo', 'bar:baz'])
self.assertIsInstance(q, dbcore.query.AndQuery)
self.assertEqual(len(q.subqueries), 2)
self.assertIsInstance(q.subqueries[0], dbcore.query.AnyFieldQuery)
self.assertIsInstance(q.subqueries[1], dbcore.query.SubstringQuery)
def test_parse_fixed_type_query(self):
q = self.qfs(['field_one:2..3'])
self.assertIsInstance(q.subqueries[0], dbcore.query.NumericQuery)
def test_parse_flex_type_query(self):
q = self.qfs(['some_float_field:2..3'])
self.assertIsInstance(q.subqueries[0], dbcore.query.NumericQuery)
def test_empty_query_part(self):
q = self.qfs([''])
self.assertIsInstance(q.subqueries[0], dbcore.query.TrueQuery)
class SortFromStringsTest(unittest.TestCase):
def sfs(self, strings):
return dbcore.queryparse.sort_from_strings(
TestModel1,
strings,
)
def test_zero_parts(self):
s = self.sfs([])
self.assertIsInstance(s, dbcore.query.NullSort)
self.assertEqual(s, dbcore.query.NullSort())
def test_one_parts(self):
s = self.sfs(['field+'])
self.assertIsInstance(s, dbcore.query.Sort)
def test_two_parts(self):
s = self.sfs(['field+', 'another_field-'])
self.assertIsInstance(s, dbcore.query.MultipleSort)
self.assertEqual(len(s.sorts), 2)
def test_fixed_field_sort(self):
s = self.sfs(['field_one+'])
self.assertIsInstance(s, dbcore.query.FixedFieldSort)
self.assertEqual(s, dbcore.query.FixedFieldSort('field_one'))
def test_flex_field_sort(self):
s = self.sfs(['flex_field+'])
self.assertIsInstance(s, dbcore.query.SlowFieldSort)
self.assertEqual(s, dbcore.query.SlowFieldSort('flex_field'))
def test_special_sort(self):
s = self.sfs(['some_sort+'])
self.assertIsInstance(s, TestSort)
class ParseSortedQueryTest(unittest.TestCase):
def psq(self, parts):
return dbcore.parse_sorted_query(
TestModel1,
parts.split(),
)
def test_and_query(self):
q, s = self.psq('foo bar')
self.assertIsInstance(q, dbcore.query.AndQuery)
self.assertIsInstance(s, dbcore.query.NullSort)
self.assertEqual(len(q.subqueries), 2)
def test_or_query(self):
q, s = self.psq('foo , bar')
self.assertIsInstance(q, dbcore.query.OrQuery)
self.assertIsInstance(s, dbcore.query.NullSort)
self.assertEqual(len(q.subqueries), 2)
def test_no_space_before_comma_or_query(self):
q, s = self.psq('foo, bar')
self.assertIsInstance(q, dbcore.query.OrQuery)
self.assertIsInstance(s, dbcore.query.NullSort)
self.assertEqual(len(q.subqueries), 2)
def test_no_spaces_or_query(self):
q, s = self.psq('foo,bar')
self.assertIsInstance(q, dbcore.query.AndQuery)
self.assertIsInstance(s, dbcore.query.NullSort)
self.assertEqual(len(q.subqueries), 1)
def test_trailing_comma_or_query(self):
q, s = self.psq('foo , bar ,')
self.assertIsInstance(q, dbcore.query.OrQuery)
self.assertIsInstance(s, dbcore.query.NullSort)
self.assertEqual(len(q.subqueries), 3)
def test_leading_comma_or_query(self):
q, s = self.psq(', foo , bar')
self.assertIsInstance(q, dbcore.query.OrQuery)
self.assertIsInstance(s, dbcore.query.NullSort)
self.assertEqual(len(q.subqueries), 3)
def test_only_direction(self):
q, s = self.psq('-')
self.assertIsInstance(q, dbcore.query.AndQuery)
self.assertIsInstance(s, dbcore.query.NullSort)
self.assertEqual(len(q.subqueries), 1)
class ResultsIteratorTest(unittest.TestCase):
def setUp(self):
self.db = TestDatabase1(':memory:')
model = TestModel1()
model['foo'] = 'baz'
model.add(self.db)
model = TestModel1()
model['foo'] = 'bar'
model.add(self.db)
def tearDown(self):
self.db._connection().close()
def test_iterate_once(self):
objs = self.db._fetch(TestModel1)
self.assertEqual(len(list(objs)), 2)
def test_iterate_twice(self):
objs = self.db._fetch(TestModel1)
list(objs)
self.assertEqual(len(list(objs)), 2)
def test_concurrent_iterators(self):
results = self.db._fetch(TestModel1)
it1 = iter(results)
it2 = iter(results)
it1.next()
list(it2)
self.assertEqual(len(list(it1)), 1)
def test_slow_query(self):
q = dbcore.query.SubstringQuery('foo', 'ba', False)
objs = self.db._fetch(TestModel1, q)
self.assertEqual(len(list(objs)), 2)
def test_slow_query_negative(self):
q = dbcore.query.SubstringQuery('foo', 'qux', False)
objs = self.db._fetch(TestModel1, q)
self.assertEqual(len(list(objs)), 0)
def test_iterate_slow_sort(self):
s = dbcore.query.SlowFieldSort('foo')
res = self.db._fetch(TestModel1, sort=s)
objs = list(res)
self.assertEqual(objs[0].foo, 'bar')
self.assertEqual(objs[1].foo, 'baz')
def test_unsorted_subscript(self):
objs = self.db._fetch(TestModel1)
self.assertEqual(objs[0].foo, 'baz')
self.assertEqual(objs[1].foo, 'bar')
def test_slow_sort_subscript(self):
s = dbcore.query.SlowFieldSort('foo')
objs = self.db._fetch(TestModel1, sort=s)
self.assertEqual(objs[0].foo, 'bar')
self.assertEqual(objs[1].foo, 'baz')
def test_length(self):
objs = self.db._fetch(TestModel1)
self.assertEqual(len(objs), 2)
def suite():
return unittest.TestLoader().loadTestsFromName(__name__)
if __name__ == b'__main__':
unittest.main(defaultTest='suite')
|
|
# ##HPanda3D
# ##HGameObject
from direct.actor.Actor import Actor
from HUtils import *
from panda3d.bullet import BulletCharacterControllerNode, BulletRigidBodyNode, BulletGhostNode
from panda3d.bullet import BulletClosestHitSweepResult
from panda3d.core import TransformState, BitMask32, VBase3, Point3, Vec3, NodePath
physicsTypes = {"static": 0, "character": 1, "dynamic": 2, "None": 3, "ghost": 4}
class HBulletRigidBodyNode(BulletRigidBodyNode):
def __init__(self, name):
BulletRigidBodyNode.__init__(self, name)
self.customData = {"name": name}
class HNewGameObject(NodePath):
def __init__(self, name, scene, visualMesh, physicsMesh=None, physicsType="character", shapeMargin=0.02, stepHeight=0.5, x=0, y=0, z=0, mass=0, convex=True,
directRender=True, parent=None):
global physicsTypes
self.name = name
self.scene = scene
NodePath.__init__(self, self.scene.loadEgg(visualMesh))
if physicsMesh != None:
if physicsType == "rigid":
self.body = BulletRigidBodyNode(self.name + "_RigidBody")
self.attachNewNode(self.body)
m = self.scene.Base.loader.loadModel(physicsMesh)
if convex:
sTuple = modelToConvex(m)
else:
sTuple = modelToShape(m)
sTuple[0].setMargin(shapeMargin)
self.body.addShape(sTuple[0], sTuple[1])
self.body.setMass(mass)
self.body.setPythonTag("name", self.name + "_RigidBody")
self.scene.world.attachRigidBody(self.body)
elif physicsType=="character":
m = self.scene.Base.loader.loadModel(physicsMesh)
sTuple = modelToConvex(m)
sTuple[0].setMargin(shapeMargin)
self.body = BulletCharacterControllerNode(sTuple[0], stepHeight, self.name + "_Character")
self.attachNewNode(self.body)
self.body.setPythonTag("name", self.name + "_Character")
self.scene.world.attachCharacter(self.body)
self.setPos(x, y, z)
if directRender:
self.reparentTo(self.scene.Base.render)
elif parent != None:
self.reparentTo(parent)
class HGameObject():
def __init__(self, name, scene, visualMeshEgg, parent, physicsType, physicsShapeEgg=None, shapeMargin=0.04,
animable=False, animationsDict=None, stepHeight=0.5, x=0, y=0, z=0, mass=0, perpixelShading=False):
"""
:type name: str
:type scene: HLevel
:type visualMeshEgg: str
:type parent: panda3d.core.NodePath
:type physicsType: int
:type physicsShapeEgg: str
:type shapeMargin: float
:type animable: bool
:type animationsDict: dict
:type stepHeight: float
:type x: float
:type y: float
:type z: float
:type mass: float
:type perpixelShading: bool
"""
self.name = name
self.scene = scene
if visualMeshEgg is not None:
if animable:
if animationsDict is not None:
self.vMesh = Actor(visualMeshEgg, animationsDict)
self.vMesh.setBlend(frameBlend=True)
else:
self.vMesh = Actor(visualMeshEgg)
self.vMesh.setBlend(frameBlend=True)
else:
self.vMesh = scene.Base.loader.loadModel(visualMeshEgg)
else:
self.vMesh = None
if physicsType == physicsTypes["character"]:
print name + " is a character"
self.shapeModel = self.scene.loadEgg(physicsShapeEgg)
self.shape = modelToConvex(self.shapeModel)[0]
self.shape.setMargin(shapeMargin)
self.body = BulletCharacterControllerNode(self.shape, stepHeight, name)
self.bodyNP = parent.attachNewNode(self.body)
if visualMeshEgg is not None:
self.vMesh.reparentTo(self.bodyNP)
self.scene.world.attachCharacter(self.body)
self.bodyNP.setPos(x, y, z)
self.body.setPythonTag("name", name)
elif physicsType == physicsTypes["dynamic"]:
self.shapeModel = self.scene.loadEgg(physicsShapeEgg)
self.shape = modelToConvex(self.shapeModel)[0]
self.shape.setMargin(shapeMargin)
self.body = BulletRigidBodyNode(name)
self.body.setMass(mass)
self.body.addShape(self.shape)
self.bodyNP = parent.attachNewNode(self.body)
if visualMeshEgg is not None:
self.vMesh.reparentTo(self.bodyNP)
self.scene.world.attachRigidBody(self.body)
self.bodyNP.setPos(x, y, z)
self.body.setPythonTag("name", name)
elif physicsType == physicsTypes["ghost"]:
self.shapeModel = self.scene.loadEgg(physicsShapeEgg)
self.shape = modelToConvex(self.shapeModel)[0]
self.shape.setMargin(shapeMargin)
self.body = BulletGhostNode(name)
# self.body.setMass(mass)
self.body.addShape(self.shape)
self.bodyNP = parent.attachNewNode(self.body)
if visualMeshEgg is not None:
self.vMesh.reparentTo(self.bodyNP)
self.scene.world.attachGhost(self.body)
self.bodyNP.setPos(x, y, z)
self.body.setPythonTag("name", name)
else:
pass
# ###3Events
# self.scene.Base.taskMgr.add(self.onFrame,"onFrame")
self.shaders = perpixelShading
if self.vMesh is not None and not self.shaders:
self.scene.Base.taskMgr.add(self.clearShaderTask, name + "_clearShader")
# self.scene.Base.taskMgr.add(self._calcVel,self.name+"_calcVelTask")
self._lastPos = Point3()
self.velocity = Vec3()
def _calcVel(self, t):
if self.scene.pause is False:
try:
n = self.bodyNP.getPos()
self.velocity = (n - self._lastPos) / globalClock.getDt()
self._lastPos = n
except:
# print self.velocity
pass
return t.cont
def clearShaderTask(self, t):
self.vMesh.clearShader()
print "Shader clear_", self.name
def onFrame(self, task):
pass
def doRelativeSweepTest(self, relativePoint, BitMask=None, height=0.1):
globalPoint = self.scene.Base.render.getRelativePoint(self.bodyNP, relativePoint)
fromT = TransformState.makePos(self.bodyNP.getPos(self.scene.Base.render) + VBase3(0, 0, height))
toT = TransformState.makePos(globalPoint + VBase3(0, 0, height))
if BitMask != None:
r = self.scene.world.sweepTestClosest(self.shape, fromT, toT, BitMask)
else:
r = self.scene.world.sweepTestClosest(self.shape, fromT, toT)
if r.getNode() == self.body:
return BulletClosestHitSweepResult.empty()
else:
return r
def willCollide(self, relativePoint, bitMask=None, height=0.1):
r = self.doRelativeSweepTest(relativePoint, bitMask, height)
if r.getNode() == self.body:
return False
else:
return r.hasHit()
def doInverseRelativeSweepTest(self, relativePoint, bitMask=None, height=0.1):
globalPoint = self.scene.Base.render.getRelativePoint(self.bodyNP, relativePoint)
fromT = TransformState.makePos(self.bodyNP.getPos(self.scene.Base.render) + VBase3(0, 0, height))
toT = TransformState.makePos(globalPoint + VBase3(0, 0, height))
if bitMask != None:
r = self.scene.world.sweepTestClosest(self.shape, toT, fromT, bitMask)
else:
r = self.scene.world.sweepTestClosest(self.shape, toT, fromT)
if r.getNode() == self.body:
return BulletClosestHitSweepResult.empty()
else:
return r
def inverseWillCollide(self, relativePoint, bitMask=None, height=0.1):
r = self.doRelativeSweepTest(relativePoint, bitMask, height)
if r.getNode() == self:
return False
else:
return r.hasHit()
def destroy(self):
if "Character" in str(self.body):
self.scene.world.removeCharacter(self.body)
elif "Rigid" in str(self.body):
self.scene.world.removeRigidBody(self.body)
elif "Ghost" in str(self.body):
self.scene.world.removeGhost(self.body)
self.bodyNP.removeNode()
try:
self.vMesh.removeNode()
except:
pass
def isOnGround(self):
r = self.willCollide(Point3(0, 0, -0.1), self.body.getIntoCollideMask(), 0)
print r
return r
def setVelocity(self, v):
globalV = self.scene.Base.render.getRelativeVector(self.bodyNP, v)
self.body.setLinearVelocity(globalV)
def applyForce(self, v):
globalV = self.scene.Base.render.getRelativeVector(self.bodyNP, v)
self.body.applyCentralForce(globalV)
class HInteractiveObject(HGameObject):
def __init__(self, scene, name0, visualEgg, collisionEgg, mass, x0=0, y0=0, z0=0, margin=0.04,
sound=None, perpixelShading=True, CCD=False, CCDradius=0.05):
HGameObject.__init__(self, name0, scene, visualEgg, scene.Base.render, 2, collisionEgg, margin, False, None,
0, x0, y0, z0, mass, perpixelShading)
self.scene.Base.taskMgr.add(self.testCollision, self.name + "_testCollisionTask")
if CCD:
self.body.setCcdMotionThreshold(0.01)
self.body.setCcdSweptSphereRadius(CCDradius)
def onContact(self, bodyList):
"Play sound"
def testCollision(self, task):
result = self.scene.world.contactTest(self.body)
if result.getNumContacts() > 0:
self.onContact(result)
else:
pass
# if self.bodyNP.getZ(self.scene.Base.render)<50: self.destroy()
return task.cont
def destroy(self):
self.scene.world.removeRigidBody(self.body)
self.vMesh.removeNode()
self.bodyNP.removeNode()
self.scene.Base.taskMgr.remove(self.name + "_testCollisionTask")
print self.name, "_destroyed"
class HDynamicObject(NodePath):
def __init__(self, name, scene, visibleEgg, collisionEgg=None, x0=0, y0=0, z0=0, parent=None, margin=0.02, mass=0,
directRender=True, convex=True):
self.name = name
self.scene = scene
NodePath.__init__(self, self.scene.loadEgg(visibleEgg))
self.body = BulletRigidBodyNode(self.name + "_RigidBody")
self.attachNewNode(self.body)
if collisionEgg != None:
m = self.scene.Base.loader.loadModel(collisionEgg)
if convex:
sTuple = modelToConvex(m)
else:
sTuple = modelToShape(m)
sTuple[0].setMargin(margin)
self.body.addShape(sTuple[0], sTuple[1])
self.body.setMass(mass)
self.body.setPythonTag("name", self.name + "_RigidBody")
self.scene.world.attachRigidBody(self.body)
self.setPos(x0, y0, z0)
if directRender:
self.reparentTo(self.scene.Base.render)
elif parent != None:
self.reparentTo(parent)
|
|
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import os.path
import requests
from requests.adapters import HTTPAdapter
from requests.packages.urllib3 import Retry
from ducktape.services.service import Service
from ducktape.utils.util import wait_until
from kafkatest.directory_layout.kafka_path import KafkaPathResolverMixin
class TrogdorService(KafkaPathResolverMixin, Service):
"""
A ducktape service for running the trogdor fault injection daemons.
Attributes:
PERSISTENT_ROOT The root filesystem path to store service files under.
COORDINATOR_STDOUT_STDERR The path where we store the coordinator's stdout/stderr output.
AGENT_STDOUT_STDERR The path where we store the agents's stdout/stderr output.
COORDINATOR_LOG The path where we store the coordinator's log4j output.
AGENT_LOG The path where we store the agent's log4j output.
AGENT_LOG4J_PROPERTIES The path to the agent log4j.properties file for log config.
COORDINATOR_LOG4J_PROPERTIES The path to the coordinator log4j.properties file for log config.
CONFIG_PATH The path to the trogdor configuration file.
DEFAULT_AGENT_PORT The default port to use for trogdor_agent daemons.
DEFAULT_COORDINATOR_PORT The default port to use for trogdor_coordinator daemons.
REQUEST_TIMEOUT The request timeout in seconds to use for REST requests.
REQUEST_HEADERS The request headers to use when communicating with trogdor.
"""
PERSISTENT_ROOT="/mnt/trogdor"
COORDINATOR_STDOUT_STDERR = os.path.join(PERSISTENT_ROOT, "trogdor-coordinator-stdout-stderr.log")
AGENT_STDOUT_STDERR = os.path.join(PERSISTENT_ROOT, "trogdor-agent-stdout-stderr.log")
COORDINATOR_LOG = os.path.join(PERSISTENT_ROOT, "trogdor-coordinator.log")
AGENT_LOG = os.path.join(PERSISTENT_ROOT, "trogdor-agent.log")
COORDINATOR_LOG4J_PROPERTIES = os.path.join(PERSISTENT_ROOT, "trogdor-coordinator-log4j.properties")
AGENT_LOG4J_PROPERTIES = os.path.join(PERSISTENT_ROOT, "trogdor-agent-log4j.properties")
CONFIG_PATH = os.path.join(PERSISTENT_ROOT, "trogdor.conf")
DEFAULT_AGENT_PORT=8888
DEFAULT_COORDINATOR_PORT=8889
REQUEST_TIMEOUT=5
REQUEST_HEADERS = {"Content-type": "application/json"}
logs = {
"trogdor_coordinator_stdout_stderr": {
"path": COORDINATOR_STDOUT_STDERR,
"collect_default": True},
"trogdor_agent_stdout_stderr": {
"path": AGENT_STDOUT_STDERR,
"collect_default": True},
"trogdor_coordinator_log": {
"path": COORDINATOR_LOG,
"collect_default": True},
"trogdor_agent_log": {
"path": AGENT_LOG,
"collect_default": True},
}
def __init__(self, context, agent_nodes=None, client_services=None,
agent_port=DEFAULT_AGENT_PORT, coordinator_port=DEFAULT_COORDINATOR_PORT):
"""
Create a Trogdor service.
:param context: The test context.
:param agent_nodes: The nodes to run the agents on.
:param client_services: Services whose nodes we should run agents on.
:param agent_port: The port to use for the trogdor_agent daemons.
:param coordinator_port: The port to use for the trogdor_coordinator daemons.
"""
Service.__init__(self, context, num_nodes=1)
self.coordinator_node = self.nodes[0]
if client_services is not None:
for client_service in client_services:
for node in client_service.nodes:
self.nodes.append(node)
if agent_nodes is not None:
for agent_node in agent_nodes:
self.nodes.append(agent_node)
if (len(self.nodes) == 1):
raise RuntimeError("You must supply at least one agent node to run the service on.")
self.agent_port = agent_port
self.coordinator_port = coordinator_port
def free(self):
# We only want to deallocate the coordinator node, not the agent nodes. So we
# change self.nodes to include only the coordinator node, and then invoke
# the base class' free method.
if self.coordinator_node is not None:
self.nodes = [self.coordinator_node]
self.coordinator_node = None
Service.free(self)
def _create_config_dict(self):
"""
Create a dictionary with the Trogdor configuration.
:return: The configuration dictionary.
"""
dict_nodes = {}
for node in self.nodes:
dict_nodes[node.name] = {
"hostname": node.account.ssh_hostname,
}
if node.name == self.coordinator_node.name:
dict_nodes[node.name]["trogdor.coordinator.port"] = self.coordinator_port
else:
dict_nodes[node.name]["trogdor.agent.port"] = self.agent_port
return {
"platform": "org.apache.kafka.trogdor.basic.BasicPlatform",
"nodes": dict_nodes,
}
def start_node(self, node):
node.account.mkdirs(TrogdorService.PERSISTENT_ROOT)
# Create the configuration file on the node.
str = json.dumps(self._create_config_dict(), indent=2)
self.logger.info("Creating configuration file %s with %s" % (TrogdorService.CONFIG_PATH, str))
node.account.create_file(TrogdorService.CONFIG_PATH, str)
if self.is_coordinator(node):
self._start_coordinator_node(node)
else:
self._start_agent_node(node)
def _start_coordinator_node(self, node):
node.account.create_file(TrogdorService.COORDINATOR_LOG4J_PROPERTIES,
self.render('log4j.properties',
log_path=TrogdorService.COORDINATOR_LOG))
self._start_trogdor_daemon("coordinator", TrogdorService.COORDINATOR_STDOUT_STDERR,
TrogdorService.COORDINATOR_LOG4J_PROPERTIES,
TrogdorService.COORDINATOR_LOG, node)
self.logger.info("Started trogdor coordinator on %s." % node.name)
def _start_agent_node(self, node):
node.account.create_file(TrogdorService.AGENT_LOG4J_PROPERTIES,
self.render('log4j.properties',
log_path=TrogdorService.AGENT_LOG))
self._start_trogdor_daemon("agent", TrogdorService.AGENT_STDOUT_STDERR,
TrogdorService.AGENT_LOG4J_PROPERTIES,
TrogdorService.AGENT_LOG, node)
self.logger.info("Started trogdor agent on %s." % node.name)
def _start_trogdor_daemon(self, daemon_name, stdout_stderr_capture_path,
log4j_properties_path, log_path, node):
cmd = "export KAFKA_LOG4J_OPTS='-Dlog4j.configuration=file:%s'; " % log4j_properties_path
cmd += "%s %s --%s.config %s --node-name %s 1>> %s 2>> %s &" % \
(self.path.script("trogdor.sh", node),
daemon_name,
daemon_name,
TrogdorService.CONFIG_PATH,
node.name,
stdout_stderr_capture_path,
stdout_stderr_capture_path)
node.account.ssh(cmd)
with node.account.monitor_log(log_path) as monitor:
monitor.wait_until("Starting %s process." % daemon_name, timeout_sec=60, backoff_sec=.10,
err_msg=("%s on %s didn't finish startup" % (daemon_name, node.name)))
def wait_node(self, node, timeout_sec=None):
if self.is_coordinator(node):
return len(node.account.java_pids(self.coordinator_class_name())) == 0
else:
return len(node.account.java_pids(self.agent_class_name())) == 0
def stop_node(self, node):
"""Halt trogdor processes on this node."""
if self.is_coordinator(node):
node.account.kill_java_processes(self.coordinator_class_name())
else:
node.account.kill_java_processes(self.agent_class_name())
def clean_node(self, node):
"""Clean up persistent state on this node - e.g. service logs, configuration files etc."""
self.stop_node(node)
node.account.ssh("rm -rf -- %s" % TrogdorService.PERSISTENT_ROOT)
def _coordinator_url(self, path):
return "http://%s:%d/coordinator/%s" % \
(self.coordinator_node.account.ssh_hostname, self.coordinator_port, path)
def request_session(self):
"""
Creates a new request session which will retry for a while.
"""
session = requests.Session()
session.mount('http://',
HTTPAdapter(max_retries=Retry(total=5, backoff_factor=0.3)))
return session
def _coordinator_post(self, path, message):
"""
Make a POST request to the Trogdor coordinator.
:param path: The URL path to use.
:param message: The message object to send.
:return: The response as an object.
"""
url = self._coordinator_url(path)
self.logger.info("POST %s %s" % (url, message))
response = self.request_session().post(url, json=message,
timeout=TrogdorService.REQUEST_TIMEOUT,
headers=TrogdorService.REQUEST_HEADERS)
response.raise_for_status()
return response.json()
def _coordinator_put(self, path, message):
"""
Make a PUT request to the Trogdor coordinator.
:param path: The URL path to use.
:param message: The message object to send.
:return: The response as an object.
"""
url = self._coordinator_url(path)
self.logger.info("PUT %s %s" % (url, message))
response = self.request_session().put(url, json=message,
timeout=TrogdorService.REQUEST_TIMEOUT,
headers=TrogdorService.REQUEST_HEADERS)
response.raise_for_status()
return response.json()
def _coordinator_get(self, path, message):
"""
Make a GET request to the Trogdor coordinator.
:param path: The URL path to use.
:param message: The message object to send.
:return: The response as an object.
"""
url = self._coordinator_url(path)
self.logger.info("GET %s %s" % (url, message))
response = self.request_session().get(url, json=message,
timeout=TrogdorService.REQUEST_TIMEOUT,
headers=TrogdorService.REQUEST_HEADERS)
response.raise_for_status()
return response.json()
def create_task(self, id, spec):
"""
Create a new task.
:param id: The task id.
:param spec: The task spec.
"""
self._coordinator_post("task/create", { "id": id, "spec": spec.message})
return TrogdorTask(id, self)
def stop_task(self, id):
"""
Stop a task.
:param id: The task id.
"""
self._coordinator_put("task/stop", { "id": id })
def tasks(self):
"""
Get the tasks which are on the coordinator.
:returns: A map of task id strings to task state objects.
Task state objects contain a 'spec' field with the spec
and a 'state' field with the state.
"""
return self._coordinator_get("tasks", {})
def is_coordinator(self, node):
return node == self.coordinator_node
def agent_class_name(self):
return "org.apache.kafka.trogdor.agent.Agent"
def coordinator_class_name(self):
return "org.apache.kafka.trogdor.coordinator.Coordinator"
class TrogdorTask(object):
PENDING_STATE = "PENDING"
RUNNING_STATE = "RUNNING"
STOPPING_STATE = "STOPPING"
DONE_STATE = "DONE"
def __init__(self, id, trogdor):
self.id = id
self.trogdor = trogdor
def task_state_or_error(self):
task_state = self.trogdor.tasks()["tasks"][self.id]
if task_state is None:
raise RuntimeError("Coordinator did not know about %s." % self.id)
error = task_state.get("error")
if error is None or error == "":
return task_state["state"], None
else:
return None, error
def done(self):
"""
Check if this task is done.
:raises RuntimeError: If the task encountered an error.
:returns: True if the task is in DONE_STATE;
False if it is in a different state.
"""
(task_state, error) = self.task_state_or_error()
if task_state is not None:
return task_state == TrogdorTask.DONE_STATE
else:
raise RuntimeError("Failed to gracefully stop %s: got task error: %s" % (self.id, error))
def running(self):
"""
Check if this task is running.
:raises RuntimeError: If the task encountered an error.
:returns: True if the task is in RUNNING_STATE;
False if it is in a different state.
"""
(task_state, error) = self.task_state_or_error()
if task_state is not None:
return task_state == TrogdorTask.RUNNING_STATE
else:
raise RuntimeError("Failed to start %s: got task error: %s" % (self.id, error))
def stop(self):
"""
Stop this task.
:raises RuntimeError: If the task encountered an error.
"""
if self.done():
return
self.trogdor.stop_task(self.id)
def wait_for_done(self, timeout_sec=360):
wait_until(lambda: self.done(),
timeout_sec=timeout_sec,
err_msg="%s failed to finish in the expected amount of time." % self.id)
|
|
import os
import time
from datetime import datetime
import threading
import sqlite3 as lite
from kervi.core.utility.thread import KerviThread
from kervi.plugin.storage.storage_plugin import StoragePlugin
_DB_CREATE_SQL = """
CREATE TABLE `log` (
`id` INTEGER PRIMARY KEY AUTOINCREMENT,
`sourceId` TEXT,
`sourceName` TEXT,
`area` TEXT,
`data` TEXT,
`level` INTEGER,
`topic` TEXT,
`body` TEXT,
'ts' `timeStamp` REAL,
`logType` TEXT
);
CREATE TABLE "dynamicData" (
`id` INTEGER PRIMARY KEY AUTOINCREMENT,
`dynamicValue` TEXT,
`value` TEXT,
`timeStamp` TEXT
);
CREATE INDEX `sensorindex` ON `dynamicData` (`dynamicValue` ,`timeStamp` );
CREATE TABLE "settings" (
`id` INTEGER PRIMARY KEY AUTOINCREMENT,
`setting_group` TEXT,
`name` TEXT,
`value` TEXT
);
CREATE INDEX `settingsindex` ON `settings` (`setting_group` ,`name` );
CREATE TABLE "cron" (
`id` INTEGER PRIMARY KEY AUTOINCREMENT,
`job_id` TEXT,
`component_id` TEXT
`name` TEXT,
`job_parameters` TEXT
);
CREATE TABLE "cron_meta" (
`id` INTEGER PRIMARY KEY AUTOINCREMENT,
`cron_id` INTEGER,
`repeat_start` REAL NOT NULL,
`repeat_end` REAL NOT NULL,
`repeat_interval` varchar(255) NOT NULL,
`repeat_year` varchar(255) NOT NULL,
`repeat_month` varchar(255) NOT NULL,
`repeat_day` varchar(255) NOT NULL,
`repeat_week` varchar(255) NOT NULL,
`repeat_weekday` varchar(255) NOT NULL
);
"""
class SQLiteStoragePlugin(StoragePlugin):
def __init__(self, name, config, storage_type, manager):
StoragePlugin.__init__(self, name, config, manager)
self._storage_type = storage_type
if self._storage_type == "memory":
self._connection = lite.connect(self.manager.config.application.id + "_mem.db", check_same_thread=False)
else:
self._connection = lite.connect(self.manager.config.application.id + ".db", check_same_thread=False)
self._init_db()
self._db_lock = threading.Lock()
self._ts_start = datetime.utcnow()
def _init_db(self):
cursor = self._connection.cursor()
cursor.execute("SELECT name FROM sqlite_master WHERE type='table';")
tables_exists = False
tables = cursor.fetchall()
for table in tables:
tables_exists = True
break
if not tables_exists:
self._execute_sql(_DB_CREATE_SQL)
def _execute_sql(self, sql):
sql_commands = sql.split(';')
for command in sql_commands:
try:
self._connection.execute(command)
except lite.Error as msg:
self.log_error("create db, Command skipped: {0}, command{1}", msg, command)
@property
def storage_type(self):
return self._storage_type
def store_value(self, value_id, value, persist=False):
self._db_lock.acquire()
try:
cursor = self._connection.cursor()
cursor.execute(
"INSERT INTO dynamicData ('dynamicValue','value','timeStamp') VALUES (?, ?, ?)",
(value["id"], self.to_json(value["value"]) , value["timestamp"])
)
self._connection.commit()
except lite.Error as er:
self.log_error('error store dynamic data, persist{0} error:{1}', persist, er)
finally:
self._db_lock.release()
def get_value_data(self, value, date_from, date_to, limit):
result = []
if date_from is None:
date_from = self._ts_start
elif isinstance(date_from, str):
date_from = datetime.strptime(date_from,'%Y-%m-%dT%H:%M:%S.%fZ')
if date_to is None:
date_to = datetime.utcnow()
elif isinstance(date_to, str):
date_to = datetime.strptime(date_to,'%Y-%m-%dT%H:%M:%S.%fZ')
self._db_lock.acquire()
try:
cur = self._connection.cursor()
cur.execute(
"select * from dynamicData where dynamicValue=? and timestamp >= Datetime(?) and timestamp < Datetime(?)",
(value, date_from, date_to)
)
all_rows = cur.fetchall()
for row in all_rows:
result += [
{
"value":self.to_json(row[2]),
"ts": row[3]
}
]
except lite.Error as er:
self.log_error('error get dynamic data:{0}', er)
finally:
self._db_lock.release()
return result
def store_setting(self, group, name, value):
setting = self._retrieve_setting_db(group, name)
self._db_lock.acquire()
try:
json_value = self.to_json(value)
cur = self._connection.cursor()
if setting:
cur.execute(
"update settings set value=? where id=?",
(json_value, setting["id"])
)
else:
cur.execute(
"INSERT INTO settings ('setting_group','name','value') VALUES (?, ?, ?)",
(group, name, json_value)
)
self._connection.commit()
except lite.Error as er:
self.log_error('error store settings data:{0}', er)
print("error store setting", er)
except Exception as er:
print("error store setting", er)
finally:
self._db_lock.release()
def retrieve_setting(self, group, name):
setting = self._retrieve_setting_db(group, name)
if setting:
return setting["value"]
def _retrieve_setting_db(self, group, name):
self._db_lock.acquire()
try:
cur = self._connection.cursor()
cur.execute(
"select * from settings where setting_group=? and name=?",
(group, name)
)
all_rows = cur.fetchall()
if len(all_rows) > 0:
value = None
try:
value = self.from_json(all_rows[0][3])
except Exception as ex:
#print("d", ex)
pass
return {
"id": all_rows[0][0],
"group": all_rows[0][1],
"name": all_rows[0][2],
"value": value
}
finally:
self._db_lock.release()
return None
def store_message(self, source_id, message_item):
raise NotImplementedError
def get_messages(self):
raise NotImplementedError
|
|
"""Simple implementation of the RSA cryptosystem.
This module is meant to show a simple and clear implementation of the
RSA algorithm: http://en.wikipedia.org/wiki/RSA_(cryptosystem). It is meant
to be readable, not fast.
The usage is simple. First, create a random key pair:
>>> public_key, private_key = make_key_pair(8)
The number 8 is the _key length_. The higher this number, the stronger the
encryption. The public key can be used to encrypt a message (in this module,
a message is simply a positive integer number):
>>> message = 5
>>> encrypted_message = public_key.encrypt(message)
The encrypted information can be retrieved only with the private key:
>>> private_key.decrypt(encrypted_message)
5
Private and public keys are made of three numeric parameters: ``n``, ``d`` and
``e``. ``n`` has the bit length specified with ``make_key_pair`` and is shared
between the two keys; ``e`` is used by the public key encrypt; ``d`` is used
by the private key to decrypt.
It's worth noting that ``n - 2`` is the highest number that can be safely
encrypted or decrypted. For example, encrypting (or decrypting) the number
``n - 1`` does nothing, and encrypting (or decrypting) the number ``n`` always
returns 0.
>>> key = PublicKey(n=143, e=113)
>>> key.encrypt(142) # n - 1
142
>>> key.encrypt(143) # n
0
Also encrypting (or decrypting) 0 or 1 always returns 0 or 1:
>>> key.encrypt(0)
0
>>> key.encrypt(1)
1
Note that sometimes the original and the encrypted messages are the same, as
shown in the following example:
>>> for x in range(143): # n
... if key.encrypt(x) == x:
... print(x)
0
1
12
21
34
44
65
66
77
78
99
109
122
131
142
"""
import random
from collections import namedtuple
def get_primes(start, stop):
"""Return a list of prime numbers in ``range(start, stop)``."""
if start >= stop:
return []
primes = [2]
for n in range(3, stop + 1, 2):
for p in primes:
if n % p == 0:
break
else:
primes.append(n)
while primes and primes[0] < start:
del primes[0]
return primes
def are_relatively_prime(a, b):
"""Return ``True`` if ``a`` and ``b`` are two relatively prime numbers.
Two numbers are relatively prime if they share no common factors,
i.e. there is no integer (except 1) that divides both.
"""
for n in range(2, min(a, b) + 1):
if a % n == b % n == 0:
return False
return True
def make_key_pair(length):
"""Create a public-private key pair.
The key pair is generated from two random prime numbers. The argument
``length`` specifies the bit length of the number ``n`` shared between
the two keys: the higher, the better.
"""
if length < 4:
raise ValueError('cannot generate a key of length less '
'than 4 (got {!r})'.format(length))
# First step: find a number ``n`` which is the product of two prime
# numbers (``p`` and ``q``). ``n`` must have the number of bits specified
# by ``length``, therefore it must be in ``range(n_min, n_max + 1)``.
n_min = 1 << (length - 1)
n_max = (1 << length) - 1
# The key is stronger if ``p`` and ``q`` have similar bit length. We
# choose two prime numbers in ``range(start, stop)`` so that the
# difference of bit lengths is at most 2.
start = 1 << (length // 2 - 1)
stop = 1 << (length // 2 + 1)
primes = get_primes(start, stop)
# Now that we have a list of prime number candidates, randomly select
# two so that their product is in ``range(n_min, n_max + 1)``.
while primes:
p = random.choice(primes)
primes.remove(p)
q_candidates = [q for q in primes
if n_min <= p * q <= n_max]
if q_candidates:
q = random.choice(q_candidates)
break
else:
raise AssertionError("cannot find 'p' and 'q' for a key of "
"length={!r}".format(length))
# Second step: choose a number ``e`` lower than ``(p - 1) * (q - 1)``
# which shares no factors with ``(p - 1) * (q - 1)``.
stop = (p - 1) * (q - 1)
for e in range(3, stop, 2):
if are_relatively_prime(e, stop):
break
else:
raise AssertionError("cannot find 'e' with p={!r} "
"and q={!r}".format(p, q))
# Third step: find ``d`` such that ``(d * e - 1)`` is divisible by
# ``(p - 1) * (q - 1)``.
for d in range(3, stop, 2):
if d * e % stop == 1:
break
else:
raise AssertionError("cannot find 'd' with p={!r}, q={!r} "
"and e={!r}".format(p, q, e))
# That's all. We can build and return the public and private keys.
return PublicKey(p * q, e), PrivateKey(p * q, d)
class PublicKey(namedtuple('PublicKey', 'n e')):
"""Public key which can be used to encrypt data."""
__slots__ = ()
def encrypt(self, x):
"""Encrypt the number ``x``.
The result is a number which can be decrypted only using the
private key.
"""
return pow(x, self.e, self.n)
class PrivateKey(namedtuple('PrivateKey', 'n d')):
"""Private key which can be used both to decrypt data."""
__slots__ = ()
def decrypt(self, x):
"""Decrypt the number ``x``.
The argument ``x`` must be the result of the ``encrypt`` method of
the public key.
"""
return pow(x, self.d, self.n)
if __name__ == '__main__':
# Test with known results.
public = PublicKey(n=2534665157, e=7)
private = PrivateKey(n=2534665157, d=1810402843)
assert public.encrypt(123) == 2463995467
assert public.encrypt(456) == 2022084991
assert public.encrypt(123456) == 1299565302
assert private.decrypt(2463995467) == 123
assert private.decrypt(2022084991) == 456
assert private.decrypt(1299565302) == 123456
# Test with random values.
for length in range(4, 17):
public, private = make_key_pair(length)
assert public.n == private.n
assert len(bin(public.n)) - 2 == length
x = random.randrange(public.n - 2)
y = public.encrypt(x)
assert private.decrypt(y) == x
assert public.encrypt(public.n - 1) == public.n - 1
assert public.encrypt(public.n) == 0
assert private.decrypt(public.n - 1) == public.n - 1
assert private.decrypt(public.n) == 0
import doctest
doctest.testfile(__file__, globs=globals())
|
|
import os
import dill
import matplotlib.ticker as ticker
import numpy as np
import pySDC.helpers.plot_helper as plt_helper
from pySDC.helpers.stats_helper import filter_stats, sort_stats
from pySDC.implementations.collocation_classes.gauss_radau_right import CollGaussRadau_Right
from pySDC.implementations.controller_classes.controller_nonMPI import controller_nonMPI
from pySDC.implementations.problem_classes.AllenCahn_2D_FD import allencahn_fullyimplicit, allencahn_semiimplicit, \
allencahn_semiimplicit_v2, allencahn_multiimplicit, allencahn_multiimplicit_v2
from pySDC.implementations.sweeper_classes.generic_implicit import generic_implicit
from pySDC.implementations.sweeper_classes.imex_1st_order import imex_1st_order
from pySDC.implementations.sweeper_classes.multi_implicit import multi_implicit
from pySDC.projects.TOMS.AllenCahn_monitor import monitor
# http://www.personal.psu.edu/qud2/Res/Pre/dz09sisc.pdf
def setup_parameters():
"""
Helper routine to fill in all relevant parameters
Note that this file will be used for all versions of SDC, containing more than necessary for each individual run
Returns:
description (dict)
controller_params (dict)
"""
# initialize level parameters
level_params = dict()
level_params['restol'] = 1E-08
level_params['dt'] = 1E-03
level_params['nsweeps'] = [1]
# initialize sweeper parameters
sweeper_params = dict()
sweeper_params['collocation_class'] = CollGaussRadau_Right
sweeper_params['num_nodes'] = [3]
sweeper_params['Q1'] = ['LU']
sweeper_params['Q2'] = ['LU']
sweeper_params['QI'] = ['LU']
sweeper_params['QE'] = ['EE']
sweeper_params['initial_guess'] = 'zero'
# This comes as read-in for the problem class
problem_params = dict()
problem_params['nu'] = 2
problem_params['nvars'] = [(128, 128)]
problem_params['eps'] = [0.04]
problem_params['newton_maxiter'] = 100
problem_params['newton_tol'] = 1E-09
problem_params['lin_tol'] = 1E-10
problem_params['lin_maxiter'] = 100
problem_params['radius'] = 0.25
# initialize step parameters
step_params = dict()
step_params['maxiter'] = 50
# initialize controller parameters
controller_params = dict()
controller_params['logger_level'] = 30
controller_params['hook_class'] = monitor
# fill description dictionary for easy step instantiation
description = dict()
description['problem_class'] = None # pass problem class
description['problem_params'] = problem_params # pass problem parameters
description['sweeper_class'] = None # pass sweeper (see part B)
description['sweeper_params'] = sweeper_params # pass sweeper parameters
description['level_params'] = level_params # pass level parameters
description['step_params'] = step_params # pass step parameters
return description, controller_params
def run_SDC_variant(variant=None, inexact=False):
"""
Routine to run particular SDC variant
Args:
variant (str): string describing the variant
inexact (bool): flag to use inexact nonlinear solve (or nor)
Returns:
timing (float)
niter (float)
"""
# load (incomplete) default parameters
description, controller_params = setup_parameters()
# add stuff based on variant
if variant == 'fully-implicit':
description['problem_class'] = allencahn_fullyimplicit
description['sweeper_class'] = generic_implicit
if inexact:
description['problem_params']['newton_maxiter'] = 1
elif variant == 'semi-implicit':
description['problem_class'] = allencahn_semiimplicit
description['sweeper_class'] = imex_1st_order
if inexact:
description['problem_params']['lin_maxiter'] = 10
elif variant == 'semi-implicit_v2':
description['problem_class'] = allencahn_semiimplicit_v2
description['sweeper_class'] = imex_1st_order
if inexact:
description['problem_params']['newton_maxiter'] = 1
elif variant == 'multi-implicit':
description['problem_class'] = allencahn_multiimplicit
description['sweeper_class'] = multi_implicit
if inexact:
description['problem_params']['newton_maxiter'] = 1
description['problem_params']['lin_maxiter'] = 10
elif variant == 'multi-implicit_v2':
description['problem_class'] = allencahn_multiimplicit_v2
description['sweeper_class'] = multi_implicit
if inexact:
description['problem_params']['newton_maxiter'] = 1
else:
raise NotImplementedError('Wrong variant specified, got %s' % variant)
if inexact:
out = 'Working on inexact %s variant...' % variant
else:
out = 'Working on exact %s variant...' % variant
print(out)
# setup parameters "in time"
t0 = 0
Tend = 0.032
# instantiate controller
controller = controller_nonMPI(num_procs=1, controller_params=controller_params, description=description)
# get initial values on finest level
P = controller.MS[0].levels[0].prob
uinit = P.u_exact(t0)
# call main function to get things done...
uend, stats = controller.run(u0=uinit, t0=t0, Tend=Tend)
# filter statistics by variant (number of iterations)
filtered_stats = filter_stats(stats, type='niter')
# convert filtered statistics to list of iterations count, sorted by process
iter_counts = sort_stats(filtered_stats, sortby='time')
# compute and print statistics
niters = np.array([item[1] for item in iter_counts])
out = ' Mean number of iterations: %4.2f' % np.mean(niters)
print(out)
out = ' Range of values for number of iterations: %2i ' % np.ptp(niters)
print(out)
out = ' Position of max/min number of iterations: %2i -- %2i' % \
(int(np.argmax(niters)), int(np.argmin(niters)))
print(out)
out = ' Std and var for number of iterations: %4.2f -- %4.2f' % (float(np.std(niters)), float(np.var(niters)))
print(out)
print(' Iteration count (nonlinear/linear): %i / %i' % (P.newton_itercount, P.lin_itercount))
print(' Mean Iteration count per call: %4.2f / %4.2f' % (P.newton_itercount / max(P.newton_ncalls, 1),
P.lin_itercount / max(P.lin_ncalls, 1)))
timing = sort_stats(filter_stats(stats, type='timing_run'), sortby='time')
print('Time to solution: %6.4f sec.' % timing[0][1])
print()
return stats
def show_results(fname, cwd=''):
"""
Plotting routine
Args:
fname (str): file name to read in and name plots
cwd (str): current working directory
"""
file = open(cwd + fname + '.pkl', 'rb')
results = dill.load(file)
file.close()
# plt_helper.mpl.style.use('classic')
plt_helper.setup_mpl()
# set up plot for timings
fig, ax1 = plt_helper.newfig(textwidth=238.96, scale=1.5, ratio=0.4)
timings = {}
niters = {}
for key, item in results.items():
timings[key] = sort_stats(filter_stats(item, type='timing_run'), sortby='time')[0][1]
iter_counts = sort_stats(filter_stats(item, type='niter'), sortby='time')
niters[key] = np.mean(np.array([item[1] for item in iter_counts]))
xcoords = list(range(len(timings)))
sorted_timings = sorted([(key, timings[key]) for key in timings], reverse=True, key=lambda tup: tup[1])
sorted_niters = [(k, niters[k]) for k in [key[0] for key in sorted_timings]]
heights_timings = [item[1] for item in sorted_timings]
heights_niters = [item[1] for item in sorted_niters]
keys = [(item[0][1] + ' ' + item[0][0]).replace('-', '\n').replace('_v2', ' mod.') for item in sorted_timings]
ax1.bar(xcoords, heights_timings, align='edge', width=-0.3, label='timings (left axis)')
ax1.set_ylabel('time (sec)')
ax2 = ax1.twinx()
ax2.bar(xcoords, heights_niters, color='lightcoral', align='edge', width=0.3, label='iterations (right axis)')
ax2.set_ylabel('mean number of iterations')
ax1.set_xticks(xcoords)
ax1.set_xticklabels(keys, rotation=90, ha='center')
# ask matplotlib for the plotted objects and their labels
lines, labels = ax1.get_legend_handles_labels()
lines2, labels2 = ax2.get_legend_handles_labels()
ax2.legend(lines + lines2, labels + labels2, loc=0)
# save plot, beautify
f = fname + '_timings'
plt_helper.savefig(f)
assert os.path.isfile(f + '.pdf'), 'ERROR: plotting did not create PDF file'
# assert os.path.isfile(f + '.pgf'), 'ERROR: plotting did not create PGF file'
assert os.path.isfile(f + '.png'), 'ERROR: plotting did not create PNG file'
# set up plot for radii
fig, ax = plt_helper.newfig(textwidth=238.96, scale=1.0)
exact_radii = []
for key, item in results.items():
computed_radii = sort_stats(filter_stats(item, type='computed_radius'), sortby='time')
xcoords = [item0[0] for item0 in computed_radii]
radii = [item0[1] for item0 in computed_radii]
if key[0] + ' ' + key[1] == 'fully-implicit exact':
ax.plot(xcoords, radii, label=(key[0] + ' ' + key[1]).replace('_v2', ' mod.'))
exact_radii = sort_stats(filter_stats(item, type='exact_radius'), sortby='time')
diff = np.array([abs(item0[1] - item1[1]) for item0, item1 in zip(exact_radii, computed_radii)])
max_pos = int(np.argmax(diff))
assert max(diff) < 0.07, 'ERROR: computed radius is too far away from exact radius, got %s' % max(diff)
assert 0.028 < computed_radii[max_pos][0] < 0.03, \
'ERROR: largest difference is at wrong time, got %s' % computed_radii[max_pos][0]
xcoords = [item[0] for item in exact_radii]
radii = [item[1] for item in exact_radii]
ax.plot(xcoords, radii, color='k', linestyle='--', linewidth=1, label='exact')
ax.yaxis.set_major_formatter(ticker.FormatStrFormatter('%1.2f'))
ax.set_ylabel('radius')
ax.set_xlabel('time')
ax.grid()
ax.legend(loc=3)
# save plot, beautify
f = fname + '_radii'
plt_helper.savefig(f)
assert os.path.isfile(f + '.pdf'), 'ERROR: plotting did not create PDF file'
# assert os.path.isfile(f + '.pgf'), 'ERROR: plotting did not create PGF file'
assert os.path.isfile(f + '.png'), 'ERROR: plotting did not create PNG file'
# set up plot for interface width
fig, ax = plt_helper.newfig(textwidth=238.96, scale=1.0)
interface_width = []
for key, item in results.items():
interface_width = sort_stats(filter_stats(item, type='interface_width'), sortby='time')
xcoords = [item[0] for item in interface_width]
width = [item[1] for item in interface_width]
if key[0] + ' ' + key[1] == 'fully-implicit exact':
ax.plot(xcoords, width, label=key[0] + ' ' + key[1])
xcoords = [item[0] for item in interface_width]
init_width = [interface_width[0][1]] * len(xcoords)
ax.plot(xcoords, init_width, color='k', linestyle='--', linewidth=1, label='exact')
ax.yaxis.set_major_formatter(ticker.FormatStrFormatter('%1.2f'))
ax.set_ylabel(r'interface width ($\epsilon$)')
ax.set_xlabel('time')
ax.grid()
ax.legend(loc=3)
# save plot, beautify
f = fname + '_interface'
plt_helper.savefig(f)
assert os.path.isfile(f + '.pdf'), 'ERROR: plotting did not create PDF file'
# assert os.path.isfile(f + '.pgf'), 'ERROR: plotting did not create PGF file'
assert os.path.isfile(f + '.png'), 'ERROR: plotting did not create PNG file'
return None
def main(cwd=''):
"""
Main driver
Args:
cwd (str): current working directory (need this for testing)
"""
# Loop over variants, exact and inexact solves
results = {}
for variant in ['multi-implicit', 'semi-implicit', 'fully-implicit', 'semi-implicit_v2', 'multi-implicit_v2']:
results[(variant, 'exact')] = run_SDC_variant(variant=variant, inexact=False)
results[(variant, 'inexact')] = run_SDC_variant(variant=variant, inexact=True)
# dump result
fname = 'data/results_SDC_variants_AllenCahn_1E-03'
file = open(cwd + fname + '.pkl', 'wb')
dill.dump(results, file)
file.close()
assert os.path.isfile(cwd + fname + '.pkl'), 'ERROR: dill did not create file'
# visualize
show_results(fname, cwd=cwd)
if __name__ == "__main__":
main()
|
|
#!/usr/bin/env python
# encoding: utf-8
'''
Created by Brian Cherinka on 2016-03-28 23:30:14
Licensed under a 3-clause BSD license.
Revision History:
Initial Version: 2016-03-28 23:30:14 by Brian Cherinka
Last Modified On: 2016-03-28 23:30:14 by Brian
'''
from __future__ import print_function, division
from brain.db.modelGraph import ModelGraph
import inspect
__author__ = 'Brian Cherinka'
class MarvinDB(object):
''' Class designed to handle database related things with Marvin '''
def __init__(self, dbtype=None, log=None, allowed_releases=None):
self.dbtype = dbtype
self.db = None
self.log = log
self.allowed_releases = allowed_releases
self.error = []
self.spaxelpropdict = None
self.datadb = None
self.dapdb = None
self.sampledb = None
self._init_the_db()
def _init_the_db(self):
''' Initialize the db '''
if self.dbtype:
self._setupDB()
if self.db:
self._importModels()
self._setSession()
self.testDbConnection()
self._setModelGraph()
self.cache_bits = []
if self.db:
self._addCache()
def _setupDB(self):
''' Try to import the database '''
# time - 14.8 ms
try:
from marvin.db.database import db
except RuntimeError as e:
self.log.debug('RuntimeError raised: Problem importing db: {0}'.format(e))
self.db = None
except ImportError as e:
self.log.debug('ImportError raised: Problem importing db: {0}'.format(e))
self.db = None
else:
self.db = db
def _importModels(self):
''' Try to import the sql alchemy model classes '''
# tested lazy imports - speeds init until they get called
# import lazy_import
# sampledb = lazy_import.lazy_module("marvin.db.models.SampleModelClasses")
# time 1.6 seconds
try:
import marvin.db.models.SampleModelClasses as sampledb
except Exception as e:
self.log.debug('Exception raised: Problem importing mangadb SampleModelClasses: {0}'.format(e))
else:
self.sampledb = sampledb
try:
import marvin.db.models.DataModelClasses as datadb
except Exception as e:
self.log.debug('Exception raised: Problem importing mangadb DataModelClasses: {0}'.format(e))
else:
self.datadb = datadb
try:
import marvin.db.models.DapModelClasses as dapdb
except Exception as e:
self.log.debug('Exception raised: Problem importing mangadb DapModelClasses: {0}'.format(e))
else:
self.dapdb = dapdb
self.spaxelpropdict = self._setSpaxelPropDict()
def has_models(self):
''' check if the marvin db has all the models properly loaded '''
isdata = self.datadb is not None
isdap = self.dapdb is not None
issample = self.sampledb is not None
self.log.info('datadb? {0}'.format(isdata))
self.log.info('dapdb? {0}'.format(isdap))
self.log.info('sampledb? {0}'.format(issample))
return all([isdata, isdap, issample])
def _setSpaxelPropDict(self):
''' Set the SpaxelProp lookup dictionary '''
# time - 38 us
from marvin.utils.datamodel.dap import datamodel
spdict = {}
for release in self.allowed_releases:
if release in datamodel:
dm = datamodel[release]
spdict.update({release: dm.property_table})
return spdict
def _getSpaxelProp(self):
''' Get the correct SpaxelProp class given an release '''
inspdict = self._release in self.spaxelpropdict
if inspdict:
specific_spaxelprop = {'full': self.spaxelpropdict[self._release], 'clean':
'Clean{0}'.format(self.spaxelpropdict[self._release])}
else:
specific_spaxelprop = {'full': None, 'clean': None}
return specific_spaxelprop
def _setSession(self):
''' Sets the database session '''
self.session = self.db.Session() if self.db else None
def testDbConnection(self):
''' Test the database connection to ensure it works. Sets a boolean variable isdbconnected '''
# time - 4.7 ms
if self.db and self.datadb:
try:
tmp = self.session.query(self.datadb.PipelineVersion).first()
except Exception as e:
self.isdbconnected = False
self.error.append('Error connecting to manga database: {0}'.format(str(e)))
else:
self.isdbconnected = True
else:
self.isdbconnected = False
def forceDbOff(self):
''' Force the database to turn off '''
self.db = None
self.session = None
self.isdbconnected = False
self.datadb = None
self.dapdb = None
self.sampledb = None
def forceDbOn(self, dbtype=None):
''' Force the database to turn on '''
self._init_the_db()
def generateClassDict(self, module=None, lower=None):
''' Generates a dictionary of the Model Classes, based on class name as key, to the object class.
Selects only those classes in the module with attribute __tablename__
lower = True makes class name key all lowercase
'''
if not module:
module = self.datadb
classdict = {}
for model in inspect.getmembers(module, inspect.isclass):
keyname = model[0].lower() if lower else model[0]
if hasattr(model[1], '__tablename__'):
# only include the spaxelprop table matching the MPL version
if 'SpaxelProp' in keyname:
if keyname in self._getSpaxelProp().values():
classdict[keyname] = model[1]
else:
classdict[keyname] = model[1]
return classdict
def buildUberClassDict(self, release=None):
''' Builds an uber class dictionary from all modelclasses '''
self._release = release
classdict = {}
models = [self.datadb, self.sampledb, self.dapdb]
for model in models:
if model:
modelclasses = self.generateClassDict(module=model)
classdict.update(modelclasses)
return classdict
def _setModelGraph(self):
''' Initiates the ModelGraph using all available ModelClasses '''
models = list(filter(None, [self.datadb, self.sampledb, self.dapdb]))
if models:
self.modelgraph = ModelGraph(models)
else:
self.modelgraph = None
def _addCache(self):
''' Initialize dogpile caching for relationships
Caching options. A set of three RelationshipCache options
which can be applied to Query(), causing the "lazy load"
of these attributes to be loaded from cache.
'''
if self.datadb:
self.cache_bits.append(self.datadb.data_cache)
if self.sampledb:
self.cache_bits.append(self.sampledb.sample_cache)
if self.dapdb:
self.cache_bits.append(self.dapdb.dap_cache)
|
|
# -*- coding: utf-8 -*-
import os
import shutil
import unittest
import bpy
from math import pi
from mathutils import Euler
from mathutils import Vector
from mmd_tools.core import pmx
from mmd_tools.core.model import Model
from mmd_tools.core.pmd.importer import import_pmd_to_pmx
from mmd_tools.core.pmx.importer import PMXImporter
TESTS_DIR = os.path.dirname(os.path.abspath(__file__))
SAMPLES_DIR = os.path.join(os.path.dirname(TESTS_DIR), 'samples')
class TestPmxExporter(unittest.TestCase):
@classmethod
def setUpClass(cls):
'''
Clean up output from previous tests
'''
output_dir = os.path.join(TESTS_DIR, 'output')
for item in os.listdir(output_dir):
if item.endswith('.OUTPUT'):
continue # Skip the placeholder
item_fp = os.path.join(output_dir, item)
if os.path.isfile(item_fp):
os.remove(item_fp)
elif os.path.isdir(item_fp):
shutil.rmtree(item_fp)
def setUp(self):
'''
'''
import logging
logger = logging.getLogger()
logger.setLevel('ERROR')
#********************************************
# Utils
#********************************************
def __vector_error(self, vec0, vec1):
return (Vector(vec0) - Vector(vec1)).length
def __quaternion_error(self, quat0, quat1):
angle = quat0.rotation_difference(quat1).angle % pi
assert(angle >= 0)
return min(angle, pi-angle)
#********************************************
# Header & Informations
#********************************************
def __check_pmx_header_info(self, source_model, result_model, import_types):
'''
Test pmx model info, header
'''
# Informations ================
self.assertEqual(source_model.name, result_model.name)
self.assertEqual(source_model.name_e, result_model.name_e)
self.assertEqual(source_model.comment.replace('\r', ''), result_model.comment.replace('\r', ''))
self.assertEqual(source_model.comment_e.replace('\r', ''), result_model.comment_e.replace('\r', ''))
# Header ======================
if source_model.header:
source_header = source_model.header
result_header = result_model.header
self.assertEqual(source_header.sign, result_header.sign)
self.assertEqual(source_header.version, result_header.version)
self.assertEqual(source_header.encoding.index, result_header.encoding.index)
self.assertEqual(source_header.encoding.charset, result_header.encoding.charset)
if 'MESH' in import_types:
self.assertEqual(source_header.additional_uvs, result_header.additional_uvs)
self.assertEqual(source_header.vertex_index_size, result_header.vertex_index_size)
self.assertEqual(source_header.texture_index_size, result_header.texture_index_size)
self.assertEqual(source_header.material_index_size, result_header.material_index_size)
if 'ARMATURE' in import_types:
self.assertEqual(source_header.bone_index_size, result_header.bone_index_size)
if 'MORPHS' in import_types:
self.assertEqual(source_header.morph_index_size, result_header.morph_index_size)
if 'PHYSICS' in import_types:
self.assertEqual(source_header.rigid_index_size, result_header.rigid_index_size)
#********************************************
# Mesh
#********************************************
def __get_pmx_textures(self, textures):
ret = []
for t in textures:
path = t.path
path = os.path.basename(path)
ret.append(path)
return ret
def __get_texture(self, tex_id, textures):
if 0 <= tex_id < len(textures):
return textures[tex_id]
return tex_id
def __get_toon_texture(self, tex_id, textures, is_shared):
return tex_id if is_shared else self.__get_texture(tex_id, textures)
def __check_pmx_mesh(self, source_model, result_model):
'''
Test pmx textures, materials, vertices, faces
'''
# textures ====================
# TODO
source_textures = self.__get_pmx_textures(source_model.textures)
result_textures = self.__get_pmx_textures(result_model.textures)
self.assertEqual(len(source_textures), len(result_textures))
for tex0, tex1 in zip(sorted(source_textures), sorted(result_textures)):
self.assertEqual(tex0, tex1)
# materials ===================
source_materials = source_model.materials
result_materials = result_model.materials
self.assertEqual(len(source_materials), len(result_materials))
source_table = sorted(source_materials, key=lambda x: x.name)
result_table = sorted(result_materials, key=lambda x: x.name)
for mat0, mat1 in zip(source_table, result_table):
msg = mat0.name
self.assertEqual(mat0.name, mat1.name, msg)
self.assertEqual(mat0.name_e or mat0.name, mat1.name_e, msg)
self.assertEqual(mat0.diffuse, mat1.diffuse, msg)
self.assertEqual(mat0.specular, mat1.specular, msg)
self.assertEqual(mat0.shininess, mat1.shininess, msg)
self.assertEqual(mat0.ambient, mat1.ambient, msg)
self.assertEqual(mat0.is_double_sided, mat1.is_double_sided, msg)
self.assertEqual(mat0.enabled_drop_shadow, mat1.enabled_drop_shadow, msg)
self.assertEqual(mat0.enabled_self_shadow_map, mat1.enabled_self_shadow_map, msg)
self.assertEqual(mat0.enabled_self_shadow, mat1.enabled_self_shadow, msg)
self.assertEqual(mat0.enabled_toon_edge, mat1.enabled_toon_edge, msg)
self.assertEqual(mat0.edge_color, mat1.edge_color, msg)
self.assertEqual(mat0.edge_size, mat1.edge_size, msg)
self.assertEqual(mat0.comment, mat1.comment, msg)
self.assertEqual(mat0.vertex_count, mat1.vertex_count, msg)
tex0 = self.__get_texture(mat0.texture, source_textures)
tex1 = self.__get_texture(mat1.texture, result_textures)
self.assertEqual(tex0, tex1, msg)
self.assertEqual(mat0.sphere_texture_mode, mat1.sphere_texture_mode, msg)
sph0 = self.__get_texture(mat0.sphere_texture, source_textures)
sph1 = self.__get_texture(mat1.sphere_texture, result_textures)
self.assertEqual(sph0, sph1, msg)
self.assertEqual(mat0.is_shared_toon_texture, mat1.is_shared_toon_texture, msg)
toon0 = self.__get_toon_texture(mat0.toon_texture, source_textures, mat0.is_shared_toon_texture)
toon1 = self.__get_toon_texture(mat1.toon_texture, result_textures, mat1.is_shared_toon_texture)
self.assertEqual(toon0, toon1, msg)
# vertices & faces ============
# TODO
source_vertices = source_model.vertices
result_vertices = result_model.vertices
#self.assertEqual(len(source_vertices), len(result_vertices))
source_faces = source_model.faces
result_faces = result_model.faces
self.assertEqual(len(source_faces), len(result_faces))
for f0, f1 in zip(source_faces, result_faces):
seq0 = [source_vertices[i] for i in f0]
seq1 = [result_vertices[i] for i in f1]
for v0, v1 in zip(seq0, seq1):
self.assertLess(self.__vector_error(v0.co, v1.co), 1e-6)
self.assertLess(self.__vector_error(v0.uv, v1.uv), 1e-6)
#self.assertLess(self.__vector_error(v0.normal, v1.normal), 1e-3)
self.assertEqual(v0.additional_uvs, v1.additional_uvs)
self.assertEqual(v0.edge_scale, v1.edge_scale)
#self.assertEqual(v0.weight.weights, v1.weight.weights)
#self.assertEqual(v0.weight.bones, v1.weight.bones)
#********************************************
# Armature
#********************************************
def __get_bone(self, bone_id, bones):
if bone_id is not None and 0 <= bone_id < len(bones):
return bones[bone_id]
return bone_id
def __get_bone_name(self, bone_id, bones):
if bone_id is not None and 0 <= bone_id < len(bones):
return bones[bone_id].name
return bone_id
def __get_bone_display_connection(self, bone, bones):
displayConnection = bone.displayConnection
if displayConnection == -1 or displayConnection == [0.0, 0.0, 0.0]:
return [0.0, 0.0, 0.0]
if isinstance(displayConnection, int):
tail_bone = self.__get_bone(displayConnection, bones)
if self.__get_bone_name(tail_bone.parent, bones) == bone.name and not tail_bone.isMovable:
return tail_bone.name
return list(Vector(tail_bone.location) - Vector(bone.location))
return displayConnection
def __check_pmx_bones(self, source_model, result_model):
'''
Test pmx bones
'''
source_bones = source_model.bones
result_bones = result_model.bones
self.assertEqual(len(source_bones), len(result_bones))
# check bone order
bone_order0 = [x.name for x in source_bones]
bone_order1 = [x.name for x in result_bones]
self.assertEqual(bone_order0, bone_order1)
for bone0, bone1 in zip(source_bones, result_bones):
msg = bone0.name
self.assertEqual(bone0.name, bone1.name, msg)
self.assertEqual(bone0.name_e or bone0.name, bone1.name_e, msg)
self.assertLess(self.__vector_error(bone0.location, bone1.location), 1e-6, msg)
parent0 = self.__get_bone_name(bone0.parent, source_bones)
parent1 = self.__get_bone_name(bone1.parent, result_bones)
self.assertEqual(parent0, parent1, msg)
self.assertEqual(bone0.transform_order, bone1.transform_order, msg)
self.assertEqual(bone0.isRotatable, bone1.isRotatable, msg)
self.assertEqual(bone0.isMovable, bone1.isMovable, msg)
self.assertEqual(bone0.visible, bone1.visible, msg)
self.assertEqual(bone0.isControllable, bone1.isControllable, msg)
self.assertEqual(bone0.isIK, bone1.isIK, msg)
self.assertEqual(bone0.transAfterPhis, bone1.transAfterPhis, msg)
self.assertEqual(bone0.externalTransKey, bone1.externalTransKey, msg)
self.assertEqual(bone0.axis, bone1.axis, msg)
if bone0.localCoordinate and bone1.localCoordinate:
self.assertEqual(bone0.localCoordinate.x_axis, bone1.localCoordinate.x_axis, msg)
self.assertEqual(bone0.localCoordinate.z_axis, bone1.localCoordinate.z_axis, msg)
else:
self.assertEqual(bone0.localCoordinate, bone1.localCoordinate, msg)
self.assertEqual(bone0.hasAdditionalRotate, bone1.hasAdditionalRotate, msg)
self.assertEqual(bone0.hasAdditionalLocation, bone1.hasAdditionalLocation, msg)
if bone0.additionalTransform and bone1.additionalTransform:
at_target0, at_infl0 = bone0.additionalTransform
at_target1, at_infl1 = bone1.additionalTransform
at_target0 = self.__get_bone_name(at_target0, source_bones)
at_target1 = self.__get_bone_name(at_target1, result_bones)
self.assertEqual(at_target0, at_target1, msg)
self.assertLess(abs(at_infl0 - at_infl1), 1e-4, msg)
else:
self.assertEqual(bone0.additionalTransform, bone1.additionalTransform, msg)
target0 = self.__get_bone_name(bone0.target, source_bones)
target1 = self.__get_bone_name(bone1.target, result_bones)
self.assertEqual(target0, target1, msg)
self.assertEqual(bone0.loopCount, bone1.loopCount, msg)
self.assertEqual(bone0.rotationConstraint, bone1.rotationConstraint, msg)
self.assertEqual(len(bone0.ik_links), len(bone1.ik_links), msg)
for link0, link1 in zip(bone0.ik_links, bone1.ik_links):
target0 = self.__get_bone_name(link0.target, source_bones)
target1 = self.__get_bone_name(link1.target, result_bones)
self.assertEqual(target0, target1, msg)
maximumAngle0 = link0.maximumAngle
maximumAngle1 = link1.maximumAngle
if maximumAngle0 and maximumAngle1:
self.assertLess(self.__vector_error(maximumAngle0, maximumAngle1), 1e-6, msg)
else:
self.assertEqual(maximumAngle0, maximumAngle1, msg)
minimumAngle0 = link0.minimumAngle
minimumAngle1 = link1.minimumAngle
if minimumAngle0 and minimumAngle1:
self.assertLess(self.__vector_error(minimumAngle0, minimumAngle1), 1e-6, msg)
else:
self.assertEqual(minimumAngle0, minimumAngle1, msg)
for bone0, bone1 in zip(source_bones, result_bones):
msg = bone0.name
displayConnection0 = self.__get_bone_display_connection(bone0, source_bones)
displayConnection1 = self.__get_bone_display_connection(bone1, result_bones)
if isinstance(displayConnection0, list) and isinstance(displayConnection1, list):
self.assertLess(self.__vector_error(displayConnection0, displayConnection1), 1e-4, msg)
else:
self.assertEqual(displayConnection0, displayConnection1, msg)
#********************************************
# Physics
#********************************************
def __get_rigid_name(self, rigid_id, rigids):
if rigid_id is not None and 0 <= rigid_id < len(rigids):
return rigids[rigid_id].name
return rigid_id
def __check_pmx_physics(self, source_model, result_model):
'''
Test pmx rigids, joints
'''
# rigids ======================
source_rigids = source_model.rigids
result_rigids = result_model.rigids
self.assertEqual(len(source_rigids), len(result_rigids))
source_bones = source_model.bones
result_bones = result_model.bones
source_table = sorted(source_rigids, key=lambda x: x.name)
result_table = sorted(result_rigids, key=lambda x: x.name)
for rigid0, rigid1 in zip(source_table, result_table):
msg = rigid0.name
self.assertEqual(rigid0.name, rigid1.name, msg)
self.assertEqual(rigid0.name_e, rigid1.name_e, msg)
bone0 = self.__get_bone_name(rigid0.bone, source_bones)
bone1 = self.__get_bone_name(rigid0.bone, source_bones)
self.assertEqual(bone0, bone1, msg)
self.assertEqual(rigid0.collision_group_number, rigid1.collision_group_number, msg)
self.assertEqual(rigid0.collision_group_mask, rigid1.collision_group_mask, msg)
self.assertEqual(rigid0.type, rigid1.type, msg)
if rigid0.type == 0: # SHAPE_SPHERE
self.assertEqual(rigid0.size[0], rigid1.size[0], msg)
elif rigid0.type == 1: # SHAPE_BOX
self.assertEqual(rigid0.size, rigid1.size, msg)
elif rigid0.type == 2: # SHAPE_CAPSULE
self.assertLess(self.__vector_error(rigid0.size[0:2], rigid1.size[0:2]), 1e-6, msg)
self.assertLess(self.__vector_error(rigid0.location, rigid1.location), 1e-6, msg)
rigid0_rotation = Euler(rigid0.rotation,'YXZ').to_quaternion()
rigid1_rotation = Euler(rigid1.rotation,'YXZ').to_quaternion()
self.assertLess(self.__quaternion_error(rigid0_rotation, rigid1_rotation), 1e-6, msg)
self.assertEqual(rigid0.mass, rigid1.mass, msg)
self.assertEqual(rigid0.velocity_attenuation, rigid1.velocity_attenuation, msg)
self.assertEqual(rigid0.rotation_attenuation, rigid1.rotation_attenuation, msg)
self.assertEqual(rigid0.bounce, rigid1.bounce, msg)
self.assertEqual(rigid0.friction, rigid1.friction, msg)
self.assertEqual(rigid0.mode, rigid1.mode, msg)
# joints ======================
source_joints = source_model.joints
result_joints = result_model.joints
self.assertEqual(len(source_joints), len(result_joints))
source_table = sorted(source_joints, key=lambda x: x.name)
result_table = sorted(result_joints, key=lambda x: x.name)
for joint0, joint1 in zip(source_table, result_table):
msg = joint0.name
self.assertEqual(joint0.name, joint1.name, msg)
self.assertEqual(joint0.name_e, joint1.name_e, msg)
self.assertEqual(joint0.mode, joint1.mode, msg)
src_rigid0 = self.__get_rigid_name(joint0.src_rigid, source_rigids)
src_rigid1 = self.__get_rigid_name(joint1.src_rigid, result_rigids)
self.assertEqual(src_rigid0, src_rigid1, msg)
dest_rigid0 = self.__get_rigid_name(joint0.dest_rigid, source_rigids)
dest_rigid1 = self.__get_rigid_name(joint1.dest_rigid, result_rigids)
self.assertEqual(dest_rigid0, dest_rigid1, msg)
self.assertEqual(joint0.location, joint1.location, msg)
joint0_rotation = Euler(joint0.rotation,'YXZ').to_quaternion()
joint1_rotation = Euler(joint1.rotation,'YXZ').to_quaternion()
self.assertLess(self.__quaternion_error(joint0_rotation, joint1_rotation), 1e-6, msg)
self.assertEqual(joint0.maximum_location, joint1.maximum_location, msg)
self.assertEqual(joint0.minimum_location, joint1.minimum_location, msg)
self.assertEqual(joint0.maximum_rotation, joint1.maximum_rotation, msg)
self.assertEqual(joint0.minimum_rotation, joint1.minimum_rotation, msg)
self.assertEqual(joint0.spring_constant, joint1.spring_constant, msg)
self.assertEqual(joint0.spring_rotation_constant, joint1.spring_rotation_constant, msg)
#********************************************
# Morphs
#********************************************
def __get_material(self, index, materials):
if 0 <= index < len(materials):
return materials[index]
class _dummy:
name = None
return _dummy
def __check_pmx_morphs(self, source_model, result_model):
'''
Test pmx morphs
'''
source_morphs = source_model.morphs
result_morphs = result_model.morphs
self.assertEqual(len(source_morphs), len(result_morphs))
source_table = {}
for m in source_morphs:
source_table.setdefault(type(m), []).append(m)
result_table = {}
for m in result_morphs:
result_table.setdefault(type(m), []).append(m)
self.assertEqual(source_table.keys(), result_table.keys(), 'types mismatch')
#source_vertices = source_model.vertices
#result_vertices = result_model.vertices
# VertexMorph =================
# TODO
source = source_table.get(pmx.VertexMorph, [])
result = result_table.get(pmx.VertexMorph, [])
self.assertEqual(len(source), len(result))
for m0, m1 in zip(source, result):
msg = 'VertexMorph %s'%m0.name
self.assertEqual(m0.name, m1.name, msg)
self.assertEqual(m0.name_e, m1.name_e, msg)
self.assertEqual(m0.category, m1.category, msg)
#self.assertEqual(len(m0.offsets), len(m1.offsets), msg)
# UVMorph =====================
# TODO
source = source_table.get(pmx.UVMorph, [])
result = result_table.get(pmx.UVMorph, [])
self.assertEqual(len(source), len(result))
for m0, m1 in zip(source, result):
msg = 'UVMorph %s'%m0.name
self.assertEqual(m0.name, m1.name, msg)
self.assertEqual(m0.name_e, m1.name_e, msg)
self.assertEqual(m0.category, m1.category, msg)
self.assertEqual(len(m0.offsets), len(m1.offsets), msg)
#for s0, s1 in zip(m0.offsets, m1.offsets):
# self.assertEqual(s0.index, s1.index, msg)
# self.assertEqual(s0.offset, s1.offset, msg)
# BoneMorph ===================
source_bones = source_model.bones
result_bones = result_model.bones
source = source_table.get(pmx.BoneMorph, [])
result = result_table.get(pmx.BoneMorph, [])
self.assertEqual(len(source), len(result))
for m0, m1 in zip(source, result):
msg = 'BoneMorph %s'%m0.name
self.assertEqual(m0.name, m1.name, msg)
self.assertEqual(m0.name_e, m1.name_e, msg)
self.assertEqual(m0.category, m1.category, msg)
# the source may contains invalid data
source_offsets = [m for m in m0.offsets if 0 <= m.index < len(source_bones)]
result_offsets = m1.offsets
self.assertEqual(len(source_offsets), len(result_offsets), msg)
for s0, s1 in zip(source_offsets, result_offsets):
bone0 = source_bones[s0.index]
bone1 = result_bones[s1.index]
self.assertEqual(bone0.name, bone1.name, msg)
self.assertLess(self.__vector_error(s0.location_offset, s1.location_offset), 1e-5, msg)
self.assertLess(self.__vector_error(s0.rotation_offset, s1.rotation_offset), 1e-5, msg)
# MaterialMorph ===============
source_materials = source_model.materials
result_materials = result_model.materials
source = source_table.get(pmx.MaterialMorph, [])
result = result_table.get(pmx.MaterialMorph, [])
self.assertEqual(len(source), len(result))
for m0, m1 in zip(source, result):
msg = 'MaterialMorph %s'%m0.name
self.assertEqual(m0.name, m1.name, msg)
self.assertEqual(m0.name_e, m1.name_e, msg)
self.assertEqual(m0.category, m1.category, msg)
source_offsets = m0.offsets
result_offsets = m1.offsets
self.assertEqual(len(source_offsets), len(result_offsets), msg)
for s0, s1 in zip(source_offsets, result_offsets):
mat0 = self.__get_material(s0.index, source_materials)
mat1 = self.__get_material(s1.index, result_materials)
self.assertEqual(mat0.name, mat1.name, msg)
self.assertEqual(s0.offset_type, s1.offset_type, msg)
self.assertEqual(s0.diffuse_offset, s1.diffuse_offset, msg)
self.assertEqual(s0.specular_offset, s1.specular_offset, msg)
self.assertEqual(s0.shininess_offset, s1.shininess_offset, msg)
self.assertEqual(s0.ambient_offset, s1.ambient_offset, msg)
self.assertEqual(s0.edge_color_offset, s1.edge_color_offset, msg)
self.assertEqual(s0.edge_size_offset, s1.edge_size_offset, msg)
self.assertEqual(s0.texture_factor, s1.texture_factor, msg)
self.assertEqual(s0.sphere_texture_factor, s1.sphere_texture_factor, msg)
self.assertEqual(s0.toon_texture_factor, s1.toon_texture_factor, msg)
# GroupMorph ==================
source = source_table.get(pmx.GroupMorph, [])
result = result_table.get(pmx.GroupMorph, [])
self.assertEqual(len(source), len(result))
for m0, m1 in zip(source, result):
msg = 'GroupMorph %s'%m0.name
self.assertEqual(m0.name, m1.name, msg)
self.assertEqual(m0.name_e, m1.name_e, msg)
self.assertEqual(m0.category, m1.category, msg)
# the source may contains invalid data
source_offsets = [m for m in m0.offsets if 0 <= m.morph < len(source_morphs)]
result_offsets = m1.offsets
self.assertEqual(len(source_offsets), len(result_offsets), msg)
for s0, s1 in zip(source_offsets, result_offsets):
morph0 = source_morphs[s0.morph]
morph1 = result_morphs[s1.morph]
self.assertEqual(morph0.name, morph1.name, msg)
self.assertEqual(morph0.category, morph1.category, msg)
self.assertEqual(s0.factor, s1.factor, msg)
#********************************************
# Display
#********************************************
def __check_pmx_display_data(self, source_model, result_model, check_morphs):
'''
Test pmx display
'''
source_display = source_model.display
result_display = result_model.display
self.assertEqual(len(source_display), len(result_display))
for source, result in zip(source_display, result_display):
self.assertEqual(source.name, result.name)
self.assertEqual(source.name_e, result.name_e)
self.assertEqual(source.isSpecial, result.isSpecial)
source_items = source.data
if not check_morphs:
source_items = [i for i in source_items if i[0] == 0]
result_items = result.data
self.assertEqual(len(source_items), len(result_items))
for item0, item1 in zip(source_items, result_items):
disp_type0, index0 = item0
disp_type1, index1 = item1
self.assertEqual(disp_type0, disp_type1)
if disp_type0 == 0:
bone_name0 = source_model.bones[index0].name
bone_name1 = result_model.bones[index1].name
self.assertEqual(bone_name0, bone_name1)
elif disp_type0 == 1:
morph0 = source_model.morphs[index0]
morph1 = result_model.morphs[index1]
self.assertEqual(morph0.name, morph1.name)
self.assertEqual(morph0.category, morph1.category)
#********************************************
# Test Function
#********************************************
def __get_import_types(self, types):
types = types.copy()
if 'PHYSICS' in types:
types.add('ARMATURE')
if 'DISPLAY' in types:
types.add('ARMATURE')
if 'MORPHS' in types:
types.add('ARMATURE')
types.add('MESH')
return types
def __list_sample_files(self, file_types):
ret = []
for file_type in file_types:
file_ext ='.' + file_type
for root, dirs, files in os.walk(os.path.join(SAMPLES_DIR, file_type)):
for name in files:
if name.lower().endswith(file_ext):
ret.append(os.path.join(root, name))
return ret
def test_pmx_exporter(self):
'''
'''
input_files = self.__list_sample_files(('pmd', 'pmx'))
if len(input_files) < 1:
self.fail('required pmd/pmx sample file(s)!')
check_types = set()
check_types.add('MESH')
check_types.add('ARMATURE')
check_types.add('PHYSICS')
check_types.add('MORPHS')
check_types.add('DISPLAY')
import_types = self.__get_import_types(check_types)
print('\n Check: %s | Import: %s'%(str(check_types), str(import_types)))
for test_num, filepath in enumerate(input_files):
print('\n - %2d/%d | filepath: %s'%(test_num+1, len(input_files), filepath))
try:
bpy.ops.wm.read_homefile() # reload blender startup file
if not bpy.context.user_preferences.addons.get('mmd_tools', None):
bpy.ops.wm.addon_enable(module='mmd_tools') # make sure addon 'mmd_tools' is enabled
file_loader = pmx.load
if filepath.lower().endswith('.pmd'):
file_loader = import_pmd_to_pmx
source_model = file_loader(filepath)
PMXImporter().execute(
pmx=source_model,
types=import_types,
scale=1,
clean_model=False,
renameBones=False,
)
bpy.context.scene.update()
except Exception:
self.fail('Exception happened during import %s'%filepath)
else:
try:
output_pmx = os.path.join(TESTS_DIR, 'output', '%d.pmx'%test_num)
bpy.ops.mmd_tools.export_pmx(
filepath=output_pmx,
copy_textures=False,
sort_materials=False,
log_level='ERROR',
)
except Exception:
self.fail('Exception happened during export %s'%output_pmx)
else:
self.assertTrue(os.path.isfile(output_pmx), 'File was not created') # Is this a race condition?
try:
result_model = pmx.load(output_pmx)
except:
self.fail('Failed to load output file %s'%output_pmx)
self.__check_pmx_header_info(source_model, result_model, import_types)
if 'MESH' in check_types:
self.__check_pmx_mesh(source_model, result_model)
if 'ARMATURE' in check_types:
self.__check_pmx_bones(source_model, result_model)
if 'PHYSICS' in check_types:
self.__check_pmx_physics(source_model, result_model)
if 'MORPHS' in check_types:
self.__check_pmx_morphs(source_model, result_model)
if 'DISPLAY' in check_types:
self.__check_pmx_display_data(source_model, result_model, 'MORPHS' in check_types)
if __name__ == '__main__':
import sys
sys.argv = [__file__] + (sys.argv[sys.argv.index('--') + 1:] if '--' in sys.argv else [])
unittest.main()
|
|
# Copyright 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Quotas for instances, volumes, and floating ips."""
import sys
from oslo.config import cfg
import webob
from neutron.common import exceptions
from neutron.openstack.common import importutils
from neutron.openstack.common import log as logging
LOG = logging.getLogger(__name__)
QUOTA_DB_MODULE = 'neutron.db.quota_db'
QUOTA_DB_DRIVER = 'neutron.db.quota_db.DbQuotaDriver'
QUOTA_CONF_DRIVER = 'neutron.quota.ConfDriver'
quota_opts = [
cfg.ListOpt('quota_items',
default=['network', 'subnet', 'port'],
help=_('Resource name(s) that are supported in quota '
'features')),
cfg.IntOpt('default_quota',
default=-1,
help=_('Default number of resource allowed per tenant. '
'A negative value means unlimited.')),
cfg.IntOpt('quota_network',
default=10,
help=_('Number of networks allowed per tenant.'
'A negative value means unlimited.')),
cfg.IntOpt('quota_subnet',
default=10,
help=_('Number of subnets allowed per tenant, '
'A negative value means unlimited.')),
cfg.IntOpt('quota_port',
default=50,
help=_('Number of ports allowed per tenant. '
'A negative value means unlimited.')),
cfg.StrOpt('quota_driver',
default=QUOTA_DB_DRIVER,
help=_('Default driver to use for quota checks')),
]
# Register the configuration options
cfg.CONF.register_opts(quota_opts, 'QUOTAS')
class ConfDriver(object):
"""Configuration driver.
Driver to perform necessary checks to enforce quotas and obtain
quota information. The default driver utilizes the default values
in neutron.conf.
"""
def _get_quotas(self, context, resources, keys):
"""Get quotas.
A helper method which retrieves the quotas for the specific
resources identified by keys, and which apply to the current
context.
:param context: The request context, for access checks.
:param resources: A dictionary of the registered resources.
:param keys: A list of the desired quotas to retrieve.
"""
# Filter resources
desired = set(keys)
sub_resources = dict((k, v) for k, v in resources.items()
if k in desired)
# Make sure we accounted for all of them...
if len(keys) != len(sub_resources):
unknown = desired - set(sub_resources.keys())
raise exceptions.QuotaResourceUnknown(unknown=sorted(unknown))
quotas = {}
for resource in sub_resources.values():
quotas[resource.name] = resource.default
return quotas
def limit_check(self, context, tenant_id,
resources, values):
"""Check simple quota limits.
For limits--those quotas for which there is no usage
synchronization function--this method checks that a set of
proposed values are permitted by the limit restriction.
This method will raise a QuotaResourceUnknown exception if a
given resource is unknown or if it is not a simple limit
resource.
If any of the proposed values is over the defined quota, an
OverQuota exception will be raised with the sorted list of the
resources which are too high. Otherwise, the method returns
nothing.
:param context: The request context, for access checks.
:param tennant_id: The tenant_id to check quota.
:param resources: A dictionary of the registered resources.
:param values: A dictionary of the values to check against the
quota.
"""
# Ensure no value is less than zero
unders = [key for key, val in values.items() if val < 0]
if unders:
raise exceptions.InvalidQuotaValue(unders=sorted(unders))
# Get the applicable quotas
quotas = self._get_quotas(context, resources, values.keys())
# Check the quotas and construct a list of the resources that
# would be put over limit by the desired values
overs = [key for key, val in values.items()
if quotas[key] >= 0 and quotas[key] < val]
if overs:
raise exceptions.OverQuota(overs=sorted(overs), quotas=quotas,
usages={})
@staticmethod
def get_tenant_quotas(context, resources, tenant_id):
quotas = {}
sub_resources = dict((k, v) for k, v in resources.items())
for resource in sub_resources.values():
quotas[resource.name] = resource.default
return quotas
@staticmethod
def get_all_quotas(context, resources):
return []
@staticmethod
def delete_tenant_quota(context, tenant_id):
msg = _('Access to this resource was denied.')
raise webob.exc.HTTPForbidden(msg)
@staticmethod
def update_quota_limit(context, tenant_id, resource, limit):
msg = _('Access to this resource was denied.')
raise webob.exc.HTTPForbidden(msg)
class BaseResource(object):
"""Describe a single resource for quota checking."""
def __init__(self, name, flag):
"""Initializes a resource.
:param name: The name of the resource, i.e., "instances".
:param flag: The name of the flag or configuration option
"""
self.name = name
self.flag = flag
@property
def default(self):
"""Return the default value of the quota."""
return getattr(cfg.CONF.QUOTAS,
self.flag,
cfg.CONF.QUOTAS.default_quota)
class CountableResource(BaseResource):
"""Describe a resource where the counts are determined by a function."""
def __init__(self, name, count, flag=None):
"""Initializes a CountableResource.
Countable resources are those resources which directly
correspond to objects in the database, i.e., netowk, subnet,
etc.,. A CountableResource must be constructed with a counting
function, which will be called to determine the current counts
of the resource.
The counting function will be passed the context, along with
the extra positional and keyword arguments that are passed to
Quota.count(). It should return an integer specifying the
count.
:param name: The name of the resource, i.e., "instances".
:param count: A callable which returns the count of the
resource. The arguments passed are as described
above.
:param flag: The name of the flag or configuration option
which specifies the default value of the quota
for this resource.
"""
super(CountableResource, self).__init__(name, flag=flag)
self.count = count
class QuotaEngine(object):
"""Represent the set of recognized quotas."""
def __init__(self, quota_driver_class=None):
"""Initialize a Quota object."""
self._resources = {}
self._driver = None
self._driver_class = quota_driver_class
def get_driver(self):
if self._driver is None:
_driver_class = (self._driver_class or
cfg.CONF.QUOTAS.quota_driver)
if (_driver_class == QUOTA_DB_DRIVER and
QUOTA_DB_MODULE not in sys.modules):
# If quotas table is not loaded, force config quota driver.
_driver_class = QUOTA_CONF_DRIVER
LOG.info(_("ConfDriver is used as quota_driver because the "
"loaded plugin does not support 'quotas' table."))
if isinstance(_driver_class, basestring):
_driver_class = importutils.import_object(_driver_class)
self._driver = _driver_class
LOG.info(_('Loaded quota_driver: %s.'), _driver_class)
return self._driver
def __contains__(self, resource):
return resource in self._resources
def register_resource(self, resource):
"""Register a resource."""
if resource.name in self._resources:
LOG.warn(_('%s is already registered.'), resource.name)
return
self._resources[resource.name] = resource
def register_resource_by_name(self, resourcename):
"""Register a resource by name."""
resource = CountableResource(resourcename, _count_resource,
'quota_' + resourcename)
self.register_resource(resource)
def register_resources(self, resources):
"""Register a list of resources."""
for resource in resources:
self.register_resource(resource)
def count(self, context, resource, *args, **kwargs):
"""Count a resource.
For countable resources, invokes the count() function and
returns its result. Arguments following the context and
resource are passed directly to the count function declared by
the resource.
:param context: The request context, for access checks.
:param resource: The name of the resource, as a string.
"""
# Get the resource
res = self._resources.get(resource)
if not res or not hasattr(res, 'count'):
raise exceptions.QuotaResourceUnknown(unknown=[resource])
return res.count(context, *args, **kwargs)
def limit_check(self, context, tenant_id, **values):
"""Check simple quota limits.
For limits--those quotas for which there is no usage
synchronization function--this method checks that a set of
proposed values are permitted by the limit restriction. The
values to check are given as keyword arguments, where the key
identifies the specific quota limit to check, and the value is
the proposed value.
This method will raise a QuotaResourceUnknown exception if a
given resource is unknown or if it is not a simple limit
resource.
If any of the proposed values is over the defined quota, an
OverQuota exception will be raised with the sorted list of the
resources which are too high. Otherwise, the method returns
nothing.
:param context: The request context, for access checks.
"""
return self.get_driver().limit_check(context, tenant_id,
self._resources, values)
@property
def resources(self):
return self._resources
QUOTAS = QuotaEngine()
def _count_resource(context, plugin, resources, tenant_id):
count_getter_name = "get_%s_count" % resources
# Some plugins support a count method for particular resources,
# using a DB's optimized counting features. We try to use that one
# if present. Otherwise just use regular getter to retrieve all objects
# and count in python, allowing older plugins to still be supported
try:
obj_count_getter = getattr(plugin, count_getter_name)
return obj_count_getter(context, filters={'tenant_id': [tenant_id]})
except (NotImplementedError, AttributeError):
obj_getter = getattr(plugin, "get_%s" % resources)
obj_list = obj_getter(context, filters={'tenant_id': [tenant_id]})
return len(obj_list) if obj_list else 0
def register_resources_from_config():
resources = []
for resource_item in cfg.CONF.QUOTAS.quota_items:
resources.append(CountableResource(resource_item, _count_resource,
'quota_' + resource_item))
QUOTAS.register_resources(resources)
register_resources_from_config()
|
|
# This file is part of Androguard.
#
# Copyright (C) 2012, Anthony Desnos <desnos at t0t0.fr>
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from androguard.core import bytecode
from androguard.core import androconf
from androguard.core.bytecodes.dvm_permissions import DVM_PERMISSIONS
import StringIO
from struct import pack, unpack
from xml.sax.saxutils import escape
from zlib import crc32
import re
from xml.dom import minidom
# 0: chilkat
# 1: default python zipfile module
# 2: patch zipfile module
ZIPMODULE = 1
import sys
if sys.hexversion < 0x2070000 :
try :
import chilkat
ZIPMODULE = 0
# UNLOCK : change it with your valid key !
try :
CHILKAT_KEY = open("key.txt", "rb").read()
except Exception :
CHILKAT_KEY = "testme"
except ImportError :
ZIPMODULE = 1
else :
ZIPMODULE = 1
################################################### CHILKAT ZIP FORMAT #####################################################
class ChilkatZip :
def __init__(self, raw) :
self.files = []
self.zip = chilkat.CkZip()
self.zip.UnlockComponent( CHILKAT_KEY )
self.zip.OpenFromMemory( raw, len(raw) )
filename = chilkat.CkString()
e = self.zip.FirstEntry()
while e != None :
e.get_FileName(filename)
self.files.append( filename.getString() )
e = e.NextEntry()
def delete(self, patterns) :
el = []
filename = chilkat.CkString()
e = self.zip.FirstEntry()
while e != None :
e.get_FileName(filename)
if re.match(patterns, filename.getString()) != None :
el.append( e )
e = e.NextEntry()
for i in el :
self.zip.DeleteEntry( i )
def remplace_file(self, filename, buff) :
entry = self.zip.GetEntryByName(filename)
if entry != None :
obj = chilkat.CkByteData()
obj.append2( buff, len(buff) )
return entry.ReplaceData( obj )
return False
def write(self) :
obj = chilkat.CkByteData()
self.zip.WriteToMemory( obj )
return obj.getBytes()
def namelist(self) :
return self.files
def read(self, elem) :
e = self.zip.GetEntryByName( elem )
s = chilkat.CkByteData()
e.Inflate( s )
return s.getBytes()
def sign_apk(filename, keystore, storepass):
from subprocess import Popen, PIPE, STDOUT
compile = Popen([androconf.CONF["PATH_JARSIGNER"],
"-sigalg",
"MD5withRSA",
"-digestalg",
"SHA1",
"-storepass",
storepass,
"-keystore",
keystore,
filename,
"alias_name"],
stdout=PIPE, stderr=STDOUT)
stdout, stderr = compile.communicate()
######################################################## APK FORMAT ########################################################
class APK:
"""
This class can access to all elements in an APK file
:param filename: specify the path of the file, or raw data
:param raw: specify if the filename is a path or raw data (optional)
:param mode: specify the mode to open the file (optional)
:param magic_file: specify the magic file (optional)
:param zipmodule: specify the type of zip module to use (0:chilkat, 1:zipfile, 2:patch zipfile)
:type filename: string
:type raw: boolean
:type mode: string
:type magic_file: string
:type zipmodule: int
:Example:
APK("myfile.apk")
APK(open("myfile.apk", "rb").read(), raw=True)
"""
def __init__(self, filename, raw=False, mode="r", magic_file=None, zipmodule=ZIPMODULE):
self.filename = filename
self.xml = {}
self.axml = {}
self.arsc = {}
self.package = ""
self.androidversion = {}
self.permissions = []
self.valid_apk = False
self.files = {}
self.files_crc32 = {}
self.magic_file = magic_file
if raw == True:
self.__raw = filename
else:
fd = open(filename, "rb")
self.__raw = fd.read()
fd.close()
self.zipmodule = zipmodule
if zipmodule == 0:
self.zip = ChilkatZip(self.__raw)
elif zipmodule == 2:
from androguard.patch import zipfile
self.zip = zipfile.ZipFile(StringIO.StringIO(self.__raw), mode=mode)
else:
import zipfile
self.zip = zipfile.ZipFile(StringIO.StringIO(self.__raw), mode=mode)
for i in self.zip.namelist():
if i == "AndroidManifest.xml":
self.axml[i] = AXMLPrinter(self.zip.read(i))
try:
self.xml[i] = minidom.parseString(self.axml[i].get_buff())
except:
self.xml[i] = None
if self.xml[i] != None:
self.package = self.xml[i].documentElement.getAttribute("package")
self.androidversion["Code"] = self.xml[i].documentElement.getAttribute("android:versionCode")
self.androidversion["Name"] = self.xml[i].documentElement.getAttribute("android:versionName")
for item in self.xml[i].getElementsByTagName('uses-permission'):
self.permissions.append(str(item.getAttribute("android:name")))
self.valid_apk = True
self.get_files_types()
def get_AndroidManifest(self):
"""
Return the Android Manifest XML file
:rtype: xml object
"""
return self.xml["AndroidManifest.xml"]
def is_valid_APK(self):
"""
Return true if the APK is valid, false otherwise
:rtype: boolean
"""
return self.valid_apk
def get_filename(self):
"""
Return the filename of the APK
:rtype: string
"""
return self.filename
def get_package(self):
"""
Return the name of the package
:rtype: string
"""
return self.package
def get_androidversion_code(self):
"""
Return the android version code
:rtype: string
"""
return self.androidversion["Code"]
def get_androidversion_name(self):
"""
Return the android version name
:rtype: string
"""
return self.androidversion["Name"]
def get_files(self):
"""
Return the files inside the APK
:rtype: a list of strings
"""
return self.zip.namelist()
def get_files_types(self):
"""
Return the files inside the APK with their associated types (by using python-magic)
:rtype: a dictionnary
"""
try:
import magic
except ImportError:
# no lib magic !
for i in self.get_files():
buffer = self.zip.read(i)
self.files_crc32[i] = crc32(buffer)
self.files[i] = "Unknown"
return self.files
if self.files != {}:
return self.files
builtin_magic = 0
try:
getattr(magic, "MagicException")
except AttributeError:
builtin_magic = 1
if builtin_magic:
ms = magic.open(magic.MAGIC_NONE)
ms.load()
for i in self.get_files():
buffer = self.zip.read(i)
self.files[i] = ms.buffer(buffer)
self.files[i] = self._patch_magic(buffer, self.files[i])
self.files_crc32[i] = crc32(buffer)
else:
m = magic.Magic(magic_file=self.magic_file)
for i in self.get_files():
buffer = self.zip.read(i)
self.files[i] = m.from_buffer(buffer)
self.files[i] = self._patch_magic(buffer, self.files[i])
self.files_crc32[i] = crc32(buffer)
return self.files
def _patch_magic(self, buffer, orig):
if ("Zip" in orig) or ("DBase" in orig):
val = androconf.is_android_raw(buffer)
if val == "APK":
if androconf.is_valid_android_raw(buffer):
return "Android application package file"
elif val == "AXML":
return "Android's binary XML"
return orig
def get_files_crc32(self):
if self.files_crc32 == {}:
self.get_files_types()
return self.files_crc32
def get_files_information(self):
"""
Return the files inside the APK with their associated types and crc32
:rtype: string, string, int
"""
if self.files == {}:
self.get_files_types()
for i in self.get_files():
try:
yield i, self.files[i], self.files_crc32[i]
except KeyError:
yield i, "", ""
def get_raw(self):
"""
Return raw bytes of the APK
:rtype: string
"""
return self.__raw
def get_file(self, filename):
"""
Return the raw data of the specified filename
:rtype: string
"""
try:
return self.zip.read(filename)
except KeyError:
return ""
def get_dex(self):
"""
Return the raw data of the classes dex file
:rtype: string
"""
return self.get_file("classes.dex")
def get_elements(self, tag_name, attribute):
"""
Return elements in xml files which match with the tag name and the specific attribute
:param tag_name: a string which specify the tag name
:param attribute: a string which specify the attribute
"""
l = []
for i in self.xml :
for item in self.xml[i].getElementsByTagName(tag_name) :
value = item.getAttribute(attribute)
value = self.format_value( value )
l.append( str( value ) )
return l
def format_value(self, value) :
if len(value) > 0 :
if value[0] == "." :
value = self.package + value
else :
v_dot = value.find(".")
if v_dot == 0 :
value = self.package + "." + value
elif v_dot == -1 :
value = self.package + "." + value
return value
def get_element(self, tag_name, attribute):
"""
Return element in xml files which match with the tag name and the specific attribute
:param tag_name: specify the tag name
:type tag_name: string
:param attribute: specify the attribute
:type attribute: string
:rtype: string
"""
for i in self.xml :
for item in self.xml[i].getElementsByTagName(tag_name) :
value = item.getAttribute(attribute)
if len(value) > 0 :
return value
return None
def get_main_activity(self) :
"""
Return the name of the main activity
:rtype: string
"""
x = set()
y = set()
for i in self.xml:
for item in self.xml[i].getElementsByTagName("activity") :
for sitem in item.getElementsByTagName( "action" ) :
val = sitem.getAttribute( "android:name" )
if val == "android.intent.action.MAIN" :
x.add( item.getAttribute( "android:name" ) )
for sitem in item.getElementsByTagName( "category" ) :
val = sitem.getAttribute( "android:name" )
if val == "android.intent.category.LAUNCHER" :
y.add( item.getAttribute( "android:name" ) )
z = x.intersection(y)
if len(z) > 0 :
return self.format_value(z.pop())
return None
def get_activities(self):
"""
Return the android:name attribute of all activities
:rtype: a list of string
"""
return self.get_elements("activity", "android:name")
def get_services(self):
"""
Return the android:name attribute of all services
:rtype: a list of string
"""
return self.get_elements("service", "android:name")
def get_receivers(self) :
"""
Return the android:name attribute of all receivers
:rtype: a list of string
"""
return self.get_elements("receiver", "android:name")
def get_providers(self):
"""
Return the android:name attribute of all providers
:rtype: a list of string
"""
return self.get_elements("provider", "android:name")
def get_intent_filters(self, category, name):
d = {}
d["action"] = []
d["category"] = []
for i in self.xml:
for item in self.xml[i].getElementsByTagName(category):
if self.format_value(item.getAttribute("android:name")) == name:
for sitem in item.getElementsByTagName("intent-filter"):
for ssitem in sitem.getElementsByTagName("action"):
if ssitem.getAttribute("android:name") not in d["action"]:
d["action"].append(ssitem.getAttribute("android:name"))
for ssitem in sitem.getElementsByTagName("category"):
if ssitem.getAttribute("android:name") not in d["category"]:
d["category"].append(ssitem.getAttribute("android:name"))
if not d["action"]:
del d["action"]
if not d["category"]:
del d["category"]
return d
def get_permissions(self):
"""
Return permissions
:rtype: list of string
"""
return self.permissions
def get_details_permissions(self):
"""
Return permissions with details
:rtype: list of string
"""
l = {}
for i in self.permissions :
perm = i
pos = i.rfind(".")
if pos != -1 :
perm = i[pos+1:]
try :
l[ i ] = DVM_PERMISSIONS["MANIFEST_PERMISSION"][ perm ]
except KeyError :
l[ i ] = [ "normal", "Unknown permission from android reference", "Unknown permission from android reference" ]
return l
def get_max_sdk_version(self):
"""
Return the android:maxSdkVersion attribute
:rtype: string
"""
return self.get_element("uses-sdk", "android:maxSdkVersion")
def get_min_sdk_version(self):
"""
Return the android:minSdkVersion attribute
:rtype: string
"""
return self.get_element("uses-sdk", "android:minSdkVersion")
def get_target_sdk_version(self) :
"""
Return the android:targetSdkVersion attribute
:rtype: string
"""
return self.get_element( "uses-sdk", "android:targetSdkVersion" )
def get_libraries(self) :
"""
Return the android:name attributes for libraries
:rtype: list
"""
return self.get_elements( "uses-library", "android:name" )
def get_certificate(self, filename):
"""
Return a certificate object by giving the name in the apk file
"""
import chilkat
cert = chilkat.CkCert()
f = self.get_file(filename)
success = cert.LoadFromBinary2(f, len(f))
return success, cert
def new_zip(self, filename, deleted_files=None, new_files={}) :
"""
Create a new zip file
:param filename: the output filename of the zip
:param deleted_files: a regex pattern to remove specific file
:param new_files: a dictionnary of new files
:type filename: string
:type deleted_files: None or a string
:type new_files: a dictionnary (key:filename, value:content of the file)
"""
if self.zipmodule == 2:
from androguard.patch import zipfile
zout = zipfile.ZipFile(filename, 'w')
else:
import zipfile
zout = zipfile.ZipFile(filename, 'w')
for item in self.zip.infolist():
if deleted_files != None:
if re.match(deleted_files, item.filename) == None:
if item.filename in new_files:
zout.writestr(item, new_files[item.filename])
else:
buffer = self.zip.read(item.filename)
zout.writestr(item, buffer)
zout.close()
def get_android_manifest_axml(self):
"""
Return the :class:`AXMLPrinter` object which corresponds to the AndroidManifest.xml file
:rtype: :class:`AXMLPrinter`
"""
try:
return self.axml["AndroidManifest.xml"]
except KeyError:
return None
def get_android_manifest_xml(self):
"""
Return the xml object which corresponds to the AndroidManifest.xml file
:rtype: object
"""
try:
return self.xml["AndroidManifest.xml"]
except KeyError:
return None
def get_android_resources(self):
"""
Return the :class:`ARSCParser` object which corresponds to the resources.arsc file
:rtype: :class:`ARSCParser`
"""
try:
return self.arsc["resources.arsc"]
except KeyError:
try:
self.arsc["resources.arsc"] = ARSCParser(self.zip.read("resources.arsc"))
return self.arsc["resources.arsc"]
except KeyError:
return None
def get_signature_name(self):
signature_expr = re.compile("^(META-INF/)(.*)(\.RSA)$")
for i in self.get_files():
if signature_expr.search(i):
return i
return None
def get_signature(self):
signature_expr = re.compile("^(META-INF/)(.*)(\.RSA)$")
for i in self.get_files():
if signature_expr.search(i):
return self.get_file(i)
return None
def show(self):
self.get_files_types()
print "FILES: "
for i in self.get_files():
try:
print "\t", i, self.files[i], "%x" % self.files_crc32[i]
except KeyError:
print "\t", i, "%x" % self.files_crc32[i]
print "PERMISSIONS: "
details_permissions = self.get_details_permissions()
for i in details_permissions:
print "\t", i, details_permissions[i]
print "MAIN ACTIVITY: ", self.get_main_activity()
print "ACTIVITIES: "
activities = self.get_activities()
for i in activities:
filters = self.get_intent_filters("activity", i)
print "\t", i, filters or ""
print "SERVICES: "
services = self.get_services()
for i in services:
filters = self.get_intent_filters("service", i)
print "\t", i, filters or ""
print "RECEIVERS: "
receivers = self.get_receivers()
for i in receivers:
filters = self.get_intent_filters("receiver", i)
print "\t", i, filters or ""
print "PROVIDERS: ", self.get_providers()
def show_Certificate(cert):
print "Issuer: C=%s, CN=%s, DN=%s, E=%s, L=%s, O=%s, OU=%s, S=%s" % (cert.issuerC(), cert.issuerCN(), cert.issuerDN(), cert.issuerE(), cert.issuerL(), cert.issuerO(), cert.issuerOU(), cert.issuerS())
print "Subject: C=%s, CN=%s, DN=%s, E=%s, L=%s, O=%s, OU=%s, S=%s" % (cert.subjectC(), cert.subjectCN(), cert.subjectDN(), cert.subjectE(), cert.subjectL(), cert.subjectO(), cert.subjectOU(), cert.subjectS())
######################################################## AXML FORMAT ########################################################
# Translated from http://code.google.com/p/android4me/source/browse/src/android/content/res/AXmlResourceParser.java
UTF8_FLAG = 0x00000100
CHUNK_STRINGPOOL_TYPE = 0x001C0001
CHUNK_NULL_TYPE = 0x00000000
class StringBlock:
def __init__(self, buff):
self.start = buff.get_idx()
self._cache = {}
self.header_size, self.header = self.skipNullPadding(buff)
self.chunkSize = unpack('<i', buff.read(4))[0]
self.stringCount = unpack('<i', buff.read(4))[0]
self.styleOffsetCount = unpack('<i', buff.read(4))[0]
self.flags = unpack('<i', buff.read(4))[0]
self.m_isUTF8 = ((self.flags & UTF8_FLAG) != 0)
self.stringsOffset = unpack('<i', buff.read(4))[0]
self.stylesOffset = unpack('<i', buff.read(4))[0]
self.m_stringOffsets = []
self.m_styleOffsets = []
self.m_strings = []
self.m_styles = []
for i in range(0, self.stringCount):
self.m_stringOffsets.append(unpack('<i', buff.read(4))[0])
for i in range(0, self.styleOffsetCount):
self.m_styleOffsets.append(unpack('<i', buff.read(4))[0])
size = self.chunkSize - self.stringsOffset
if self.stylesOffset != 0:
size = self.stylesOffset - self.stringsOffset
# FIXME
if (size % 4) != 0:
androconf.warning("ooo")
for i in range(0, size):
self.m_strings.append(unpack('=b', buff.read(1))[0])
if self.stylesOffset != 0:
size = self.chunkSize - self.stylesOffset
# FIXME
if (size % 4) != 0:
androconf.warning("ooo")
for i in range(0, size / 4):
self.m_styles.append(unpack('<i', buff.read(4))[0])
def skipNullPadding(self, buff):
def readNext(buff, first_run=True):
header = unpack('<i', buff.read(4))[0]
if header == CHUNK_NULL_TYPE and first_run:
androconf.info("Skipping null padding in StringBlock header")
header = readNext(buff, first_run=False)
elif header != CHUNK_STRINGPOOL_TYPE:
androconf.warning("Invalid StringBlock header")
return header
header = readNext(buff)
return header >> 8, header & 0xFF
def getString(self, idx):
if idx in self._cache:
return self._cache[idx]
if idx < 0 or not self.m_stringOffsets or idx >= len(self.m_stringOffsets):
return ""
offset = self.m_stringOffsets[idx]
if not self.m_isUTF8:
length = self.getShort2(self.m_strings, offset)
offset += 2
self._cache[idx] = self.decode(self.m_strings, offset, length)
else:
offset += self.getVarint(self.m_strings, offset)[1]
varint = self.getVarint(self.m_strings, offset)
offset += varint[1]
length = varint[0]
self._cache[idx] = self.decode2(self.m_strings, offset, length)
return self._cache[idx]
def getStyle(self, idx):
print idx
print idx in self.m_styleOffsets, self.m_styleOffsets[idx]
print self.m_styles[0]
def decode(self, array, offset, length):
length = length * 2
length = length + length % 2
data = ""
for i in range(0, length):
t_data = pack("=b", self.m_strings[offset + i])
data += unicode(t_data, errors='ignore')
if data[-2:] == "\x00\x00":
break
end_zero = data.find("\x00\x00")
if end_zero != -1:
data = data[:end_zero]
return data.decode("utf-16", 'replace')
def decode2(self, array, offset, length):
data = ""
for i in range(0, length):
t_data = pack("=b", self.m_strings[offset + i])
data += unicode(t_data, errors='ignore')
return data.decode("utf-8", 'replace')
def getVarint(self, array, offset):
val = array[offset]
more = (val & 0x80) != 0
val &= 0x7f
if not more:
return val, 1
return val << 8 | array[offset + 1] & 0xff, 2
def getShort(self, array, offset):
value = array[offset / 4]
if ((offset % 4) / 2) == 0:
return value & 0xFFFF
else:
return value >> 16
def getShort2(self, array, offset):
return (array[offset + 1] & 0xff) << 8 | array[offset] & 0xff
def show(self):
print "StringBlock", hex(self.start), hex(self.header), hex(self.header_size), hex(self.chunkSize), hex(self.stringsOffset), self.m_stringOffsets
for i in range(0, len(self.m_stringOffsets)):
print i, repr(self.getString(i))
ATTRIBUTE_IX_NAMESPACE_URI = 0
ATTRIBUTE_IX_NAME = 1
ATTRIBUTE_IX_VALUE_STRING = 2
ATTRIBUTE_IX_VALUE_TYPE = 3
ATTRIBUTE_IX_VALUE_DATA = 4
ATTRIBUTE_LENGHT = 5
CHUNK_AXML_FILE = 0x00080003
CHUNK_RESOURCEIDS = 0x00080180
CHUNK_XML_FIRST = 0x00100100
CHUNK_XML_START_NAMESPACE = 0x00100100
CHUNK_XML_END_NAMESPACE = 0x00100101
CHUNK_XML_START_TAG = 0x00100102
CHUNK_XML_END_TAG = 0x00100103
CHUNK_XML_TEXT = 0x00100104
CHUNK_XML_LAST = 0x00100104
START_DOCUMENT = 0
END_DOCUMENT = 1
START_TAG = 2
END_TAG = 3
TEXT = 4
class AXMLParser:
def __init__(self, raw_buff):
self.reset()
self.valid_axml = True
self.buff = bytecode.BuffHandle(raw_buff)
axml_file = unpack('<L', self.buff.read(4))[0]
if axml_file == CHUNK_AXML_FILE:
self.buff.read(4)
self.sb = StringBlock(self.buff)
self.m_resourceIDs = []
self.m_prefixuri = {}
self.m_uriprefix = {}
self.m_prefixuriL = []
self.visited_ns = []
else:
self.valid_axml = False
androconf.warning("Not a valid xml file")
def is_valid(self):
return self.valid_axml
def reset(self):
self.m_event = -1
self.m_lineNumber = -1
self.m_name = -1
self.m_namespaceUri = -1
self.m_attributes = []
self.m_idAttribute = -1
self.m_classAttribute = -1
self.m_styleAttribute = -1
def next(self):
self.doNext()
return self.m_event
def doNext(self):
if self.m_event == END_DOCUMENT:
return
event = self.m_event
self.reset()
while True:
chunkType = -1
# Fake END_DOCUMENT event.
if event == END_TAG:
pass
# START_DOCUMENT
if event == START_DOCUMENT:
chunkType = CHUNK_XML_START_TAG
else:
if self.buff.end():
self.m_event = END_DOCUMENT
break
chunkType = unpack('<L', self.buff.read(4))[0]
if chunkType == CHUNK_RESOURCEIDS:
chunkSize = unpack('<L', self.buff.read(4))[0]
# FIXME
if chunkSize < 8 or chunkSize % 4 != 0:
androconf.warning("Invalid chunk size")
for i in range(0, chunkSize / 4 - 2):
self.m_resourceIDs.append(unpack('<L', self.buff.read(4))[0])
continue
# FIXME
if chunkType < CHUNK_XML_FIRST or chunkType > CHUNK_XML_LAST:
androconf.warning("invalid chunk type")
# Fake START_DOCUMENT event.
if chunkType == CHUNK_XML_START_TAG and event == -1:
self.m_event = START_DOCUMENT
break
self.buff.read(4) # /*chunkSize*/
lineNumber = unpack('<L', self.buff.read(4))[0]
self.buff.read(4) # 0xFFFFFFFF
if chunkType == CHUNK_XML_START_NAMESPACE or chunkType == CHUNK_XML_END_NAMESPACE:
if chunkType == CHUNK_XML_START_NAMESPACE:
prefix = unpack('<L', self.buff.read(4))[0]
uri = unpack('<L', self.buff.read(4))[0]
self.m_prefixuri[prefix] = uri
self.m_uriprefix[uri] = prefix
self.m_prefixuriL.append((prefix, uri))
self.ns = uri
else:
self.ns = -1
self.buff.read(4)
self.buff.read(4)
(prefix, uri) = self.m_prefixuriL.pop()
#del self.m_prefixuri[ prefix ]
#del self.m_uriprefix[ uri ]
continue
self.m_lineNumber = lineNumber
if chunkType == CHUNK_XML_START_TAG:
self.m_namespaceUri = unpack('<L', self.buff.read(4))[0]
self.m_name = unpack('<L', self.buff.read(4))[0]
# FIXME
self.buff.read(4) # flags
attributeCount = unpack('<L', self.buff.read(4))[0]
self.m_idAttribute = (attributeCount >> 16) - 1
attributeCount = attributeCount & 0xFFFF
self.m_classAttribute = unpack('<L', self.buff.read(4))[0]
self.m_styleAttribute = (self.m_classAttribute >> 16) - 1
self.m_classAttribute = (self.m_classAttribute & 0xFFFF) - 1
for i in range(0, attributeCount * ATTRIBUTE_LENGHT):
self.m_attributes.append(unpack('<L', self.buff.read(4))[0])
for i in range(ATTRIBUTE_IX_VALUE_TYPE, len(self.m_attributes), ATTRIBUTE_LENGHT):
self.m_attributes[i] = self.m_attributes[i] >> 24
self.m_event = START_TAG
break
if chunkType == CHUNK_XML_END_TAG:
self.m_namespaceUri = unpack('<L', self.buff.read(4))[0]
self.m_name = unpack('<L', self.buff.read(4))[0]
self.m_event = END_TAG
break
if chunkType == CHUNK_XML_TEXT:
self.m_name = unpack('<L', self.buff.read(4))[0]
# FIXME
self.buff.read(4)
self.buff.read(4)
self.m_event = TEXT
break
def getPrefixByUri(self, uri):
try:
return self.m_uriprefix[uri]
except KeyError:
return -1
def getPrefix(self):
try:
return self.sb.getString(self.m_uriprefix[self.m_namespaceUri])
except KeyError:
return u''
def getName(self):
if self.m_name == -1 or (self.m_event != START_TAG and self.m_event != END_TAG) :
return u''
return self.sb.getString(self.m_name)
def getText(self) :
if self.m_name == -1 or self.m_event != TEXT :
return u''
return self.sb.getString(self.m_name)
def getNamespacePrefix(self, pos):
prefix = self.m_prefixuriL[pos][0]
return self.sb.getString(prefix)
def getNamespaceUri(self, pos):
uri = self.m_prefixuriL[pos][1]
return self.sb.getString(uri)
def getXMLNS(self):
buff = ""
for i in self.m_uriprefix:
if i not in self.visited_ns:
buff += "xmlns:%s=\"%s\"\n" % (self.sb.getString(self.m_uriprefix[i]), self.sb.getString(self.m_prefixuri[self.m_uriprefix[i]]))
self.visited_ns.append(i)
return buff
def getNamespaceCount(self, pos) :
pass
def getAttributeOffset(self, index):
# FIXME
if self.m_event != START_TAG:
androconf.warning("Current event is not START_TAG.")
offset = index * 5
# FIXME
if offset >= len(self.m_attributes):
androconf.warning("Invalid attribute index")
return offset
def getAttributeCount(self):
if self.m_event != START_TAG:
return -1
return len(self.m_attributes) / ATTRIBUTE_LENGHT
def getAttributePrefix(self, index):
offset = self.getAttributeOffset(index)
uri = self.m_attributes[offset + ATTRIBUTE_IX_NAMESPACE_URI]
prefix = self.getPrefixByUri(uri)
if prefix == -1:
return ""
return self.sb.getString(prefix)
def getAttributeName(self, index) :
offset = self.getAttributeOffset(index)
name = self.m_attributes[offset+ATTRIBUTE_IX_NAME]
if name == -1 :
return ""
return self.sb.getString( name )
def getAttributeValueType(self, index) :
offset = self.getAttributeOffset(index)
return self.m_attributes[offset+ATTRIBUTE_IX_VALUE_TYPE]
def getAttributeValueData(self, index) :
offset = self.getAttributeOffset(index)
return self.m_attributes[offset+ATTRIBUTE_IX_VALUE_DATA]
def getAttributeValue(self, index) :
offset = self.getAttributeOffset(index)
valueType = self.m_attributes[offset+ATTRIBUTE_IX_VALUE_TYPE]
if valueType == TYPE_STRING :
valueString = self.m_attributes[offset+ATTRIBUTE_IX_VALUE_STRING]
return self.sb.getString( valueString )
# WIP
return ""
#int valueData=m_attributes[offset+ATTRIBUTE_IX_VALUE_DATA];
#return TypedValue.coerceToString(valueType,valueData);
TYPE_ATTRIBUTE = 2
TYPE_DIMENSION = 5
TYPE_FIRST_COLOR_INT = 28
TYPE_FIRST_INT = 16
TYPE_FLOAT = 4
TYPE_FRACTION = 6
TYPE_INT_BOOLEAN = 18
TYPE_INT_COLOR_ARGB4 = 30
TYPE_INT_COLOR_ARGB8 = 28
TYPE_INT_COLOR_RGB4 = 31
TYPE_INT_COLOR_RGB8 = 29
TYPE_INT_DEC = 16
TYPE_INT_HEX = 17
TYPE_LAST_COLOR_INT = 31
TYPE_LAST_INT = 31
TYPE_NULL = 0
TYPE_REFERENCE = 1
TYPE_STRING = 3
RADIX_MULTS = [ 0.00390625, 3.051758E-005, 1.192093E-007, 4.656613E-010 ]
DIMENSION_UNITS = [ "px","dip","sp","pt","in","mm" ]
FRACTION_UNITS = [ "%", "%p" ]
COMPLEX_UNIT_MASK = 15
def complexToFloat(xcomplex):
return (float)(xcomplex & 0xFFFFFF00) * RADIX_MULTS[(xcomplex >> 4) & 3]
class AXMLPrinter:
def __init__(self, raw_buff):
self.axml = AXMLParser(raw_buff)
self.xmlns = False
self.buff = u''
while True and self.axml.is_valid():
_type = self.axml.next()
# print "tagtype = ", _type
if _type == START_DOCUMENT:
self.buff += u'<?xml version="1.0" encoding="utf-8"?>\n'
elif _type == START_TAG:
self.buff += u'<' + self.getPrefix(self.axml.getPrefix()) + self.axml.getName() + u'\n'
self.buff += self.axml.getXMLNS()
for i in range(0, self.axml.getAttributeCount()):
self.buff += "%s%s=\"%s\"\n" % (self.getPrefix(
self.axml.getAttributePrefix(i)), self.axml.getAttributeName(i), self._escape(self.getAttributeValue(i)))
self.buff += u'>\n'
elif _type == END_TAG:
self.buff += "</%s%s>\n" % (self.getPrefix(self.axml.getPrefix()), self.axml.getName())
elif _type == TEXT:
self.buff += "%s\n" % self.axml.getText()
elif _type == END_DOCUMENT:
break
# pleed patch
def _escape(self, s):
s = s.replace("&", "&")
s = s.replace('"', """)
s = s.replace("'", "'")
s = s.replace("<", "<")
s = s.replace(">", ">")
return escape(s)
def get_buff(self):
return self.buff.encode('utf-8')
def get_xml(self):
return minidom.parseString(self.get_buff()).toprettyxml(encoding="utf-8")
def get_xml_obj(self):
return minidom.parseString(self.get_buff())
def getPrefix(self, prefix):
if prefix == None or len(prefix) == 0:
return u''
return prefix + u':'
def getAttributeValue(self, index):
_type = self.axml.getAttributeValueType(index)
_data = self.axml.getAttributeValueData(index)
if _type == TYPE_STRING:
return self.axml.getAttributeValue(index)
elif _type == TYPE_ATTRIBUTE:
return "?%s%08X" % (self.getPackage(_data), _data)
elif _type == TYPE_REFERENCE:
return "@%s%08X" % (self.getPackage(_data), _data)
elif _type == TYPE_FLOAT:
return "%f" % unpack("=f", pack("=L", _data))[0]
elif _type == TYPE_INT_HEX:
return "0x%08X" % _data
elif _type == TYPE_INT_BOOLEAN:
if _data == 0:
return "false"
return "true"
elif _type == TYPE_DIMENSION:
return "%f%s" % (complexToFloat(_data), DIMENSION_UNITS[_data & COMPLEX_UNIT_MASK])
elif _type == TYPE_FRACTION:
return "%f%s" % (complexToFloat(_data) * 100, FRACTION_UNITS[_data & COMPLEX_UNIT_MASK])
elif _type >= TYPE_FIRST_COLOR_INT and _type <= TYPE_LAST_COLOR_INT:
return "#%08X" % _data
elif _type >= TYPE_FIRST_INT and _type <= TYPE_LAST_INT:
return "%d" % androconf.long2int(_data)
return "<0x%X, type 0x%02X>" % (_data, _type)
def getPackage(self, id):
if id >> 24 == 1:
return "android:"
return ""
RES_NULL_TYPE = 0x0000
RES_STRING_POOL_TYPE = 0x0001
RES_TABLE_TYPE = 0x0002
RES_XML_TYPE = 0x0003
# Chunk types in RES_XML_TYPE
RES_XML_FIRST_CHUNK_TYPE = 0x0100
RES_XML_START_NAMESPACE_TYPE= 0x0100
RES_XML_END_NAMESPACE_TYPE = 0x0101
RES_XML_START_ELEMENT_TYPE = 0x0102
RES_XML_END_ELEMENT_TYPE = 0x0103
RES_XML_CDATA_TYPE = 0x0104
RES_XML_LAST_CHUNK_TYPE = 0x017f
# This contains a uint32_t array mapping strings in the string
# pool back to resource identifiers. It is optional.
RES_XML_RESOURCE_MAP_TYPE = 0x0180
# Chunk types in RES_TABLE_TYPE
RES_TABLE_PACKAGE_TYPE = 0x0200
RES_TABLE_TYPE_TYPE = 0x0201
RES_TABLE_TYPE_SPEC_TYPE = 0x0202
class ARSCParser:
def __init__(self, raw_buff):
self.analyzed = False
self.buff = bytecode.BuffHandle(raw_buff)
#print "SIZE", hex(self.buff.size())
self.header = ARSCHeader(self.buff)
self.packageCount = unpack('<i', self.buff.read(4))[0]
#print hex(self.packageCount)
self.stringpool_main = StringBlock(self.buff)
self.next_header = ARSCHeader(self.buff)
self.packages = {}
self.values = {}
for i in range(0, self.packageCount):
current_package = ARSCResTablePackage(self.buff)
package_name = current_package.get_name()
self.packages[package_name] = []
mTableStrings = StringBlock(self.buff)
mKeyStrings = StringBlock(self.buff)
#self.stringpool_main.show()
#self.mTableStrings.show()
#self.mKeyStrings.show()
self.packages[package_name].append(current_package)
self.packages[package_name].append(mTableStrings)
self.packages[package_name].append(mKeyStrings)
pc = PackageContext(current_package, self.stringpool_main, mTableStrings, mKeyStrings)
current = self.buff.get_idx()
while not self.buff.end():
header = ARSCHeader(self.buff)
self.packages[package_name].append(header)
if header.type == RES_TABLE_TYPE_SPEC_TYPE:
self.packages[package_name].append(ARSCResTypeSpec(self.buff, pc))
elif header.type == RES_TABLE_TYPE_TYPE:
a_res_type = ARSCResType(self.buff, pc)
self.packages[package_name].append(a_res_type)
entries = []
for i in range(0, a_res_type.entryCount):
current_package.mResId = current_package.mResId & 0xffff0000 | i
entries.append((unpack('<i', self.buff.read(4))[0], current_package.mResId))
self.packages[package_name].append(entries)
for entry, res_id in entries:
if self.buff.end():
break
if entry != -1:
ate = ARSCResTableEntry(self.buff, res_id, pc)
self.packages[package_name].append(ate)
elif header.type == RES_TABLE_PACKAGE_TYPE:
break
else:
androconf.warning("unknown type")
break
current += header.size
self.buff.set_idx(current)
def _analyse(self):
if self.analyzed:
return
self.analyzed = True
for package_name in self.packages:
self.values[package_name] = {}
nb = 3
for header in self.packages[package_name][nb:]:
if isinstance(header, ARSCHeader):
if header.type == RES_TABLE_TYPE_TYPE:
a_res_type = self.packages[package_name][nb + 1]
if a_res_type.config.get_language() not in self.values[package_name]:
self.values[package_name][a_res_type.config.get_language()] = {}
self.values[package_name][a_res_type.config.get_language()]["public"] = []
c_value = self.values[package_name][a_res_type.config.get_language()]
entries = self.packages[package_name][nb + 2]
nb_i = 0
for entry, res_id in entries:
if entry != -1:
ate = self.packages[package_name][nb + 3 + nb_i]
#print ate.is_public(), a_res_type.get_type(), ate.get_value(), hex(ate.mResId)
if ate.get_index() != -1:
c_value["public"].append((a_res_type.get_type(), ate.get_value(), ate.mResId))
if a_res_type.get_type() not in c_value:
c_value[a_res_type.get_type()] = []
if a_res_type.get_type() == "string":
c_value["string"].append(self.get_resource_string(ate))
elif a_res_type.get_type() == "id":
if not ate.is_complex():
c_value["id"].append(self.get_resource_id(ate))
elif a_res_type.get_type() == "bool":
if not ate.is_complex():
c_value["bool"].append(self.get_resource_bool(ate))
elif a_res_type.get_type() == "integer":
c_value["integer"].append(self.get_resource_integer(ate))
elif a_res_type.get_type() == "color":
c_value["color"].append(self.get_resource_color(ate))
elif a_res_type.get_type() == "dimen":
c_value["dimen"].append(self.get_resource_dimen(ate))
#elif a_res_type.get_type() == "style":
# c_value["style"].append(self.get_resource_style(ate))
nb_i += 1
nb += 1
def get_resource_string(self, ate):
return [ate.get_value(), ate.get_key_data()]
def get_resource_id(self, ate):
x = [ate.get_value()]
if ate.key.get_data() == 0:
x.append("false")
elif ate.key.get_data() == 1:
x.append("true")
return x
def get_resource_bool(self, ate):
x = [ate.get_value()]
if ate.key.get_data() == 0:
x.append("false")
elif ate.key.get_data() == -1:
x.append("true")
return x
def get_resource_integer(self, ate):
return [ate.get_value(), ate.key.get_data()]
def get_resource_color(self, ate):
entry_data = ate.key.get_data()
return [ate.get_value(), "#%02x%02x%02x%02x" % (((entry_data >> 24) & 0xFF), ((entry_data >> 16) & 0xFF), ((entry_data >> 8) & 0xFF), (entry_data & 0xFF))]
def get_resource_dimen(self, ate):
try:
return [ate.get_value(), "%s%s" % (complexToFloat(ate.key.get_data()), DIMENSION_UNITS[ate.key.get_data() & COMPLEX_UNIT_MASK])]
except Exception, why:
androconf.warning(why.__str__())
return [ate.get_value(), ate.key.get_data()]
# FIXME
def get_resource_style(self, ate):
return ["", ""]
def get_packages_names(self):
return self.packages.keys()
def get_locales(self, package_name):
self._analyse()
return self.values[package_name].keys()
def get_types(self, package_name, locale):
self._analyse()
return self.values[package_name][locale].keys()
def get_public_resources(self, package_name, locale='\x00\x00'):
self._analyse()
buff = '<?xml version="1.0" encoding="utf-8"?>\n'
buff += '<resources>\n'
try:
for i in self.values[package_name][locale]["public"]:
buff += '<public type="%s" name="%s" id="0x%08x" />\n' % (i[0], i[1], i[2])
except KeyError:
pass
buff += '</resources>\n'
return buff.encode('utf-8')
def get_string_resources(self, package_name, locale='\x00\x00'):
self._analyse()
buff = '<?xml version="1.0" encoding="utf-8"?>\n'
buff += '<resources>\n'
try:
for i in self.values[package_name][locale]["string"]:
buff += '<string name="%s">%s</string>\n' % (i[0], i[1])
except KeyError:
pass
buff += '</resources>\n'
return buff.encode('utf-8')
def get_strings_resources(self):
self._analyse()
buff = '<?xml version="1.0" encoding="utf-8"?>\n'
buff += "<packages>\n"
for package_name in self.get_packages_names():
buff += "<package name=\"%s\">\n" % package_name
for locale in self.get_locales(package_name):
buff += "<locale value=%s>\n" % repr(locale)
buff += '<resources>\n'
try:
for i in self.values[package_name][locale]["string"]:
buff += '<string name="%s">%s</string>\n' % (i[0], i[1])
except KeyError:
pass
buff += '</resources>\n'
buff += '</locale>\n'
buff += "</package>\n"
buff += "</packages>\n"
return buff.encode('utf-8')
def get_id_resources(self, package_name, locale='\x00\x00'):
self._analyse()
buff = '<?xml version="1.0" encoding="utf-8"?>\n'
buff += '<resources>\n'
try:
for i in self.values[package_name][locale]["id"]:
if len(i) == 1:
buff += '<item type="id" name="%s"/>\n' % (i[0])
else:
buff += '<item type="id" name="%s">%s</item>\n' % (i[0], i[1])
except KeyError:
pass
buff += '</resources>\n'
return buff.encode('utf-8')
def get_bool_resources(self, package_name, locale='\x00\x00'):
self._analyse()
buff = '<?xml version="1.0" encoding="utf-8"?>\n'
buff += '<resources>\n'
try:
for i in self.values[package_name][locale]["bool"]:
buff += '<bool name="%s">%s</bool>\n' % (i[0], i[1])
except KeyError:
pass
buff += '</resources>\n'
return buff.encode('utf-8')
def get_integer_resources(self, package_name, locale='\x00\x00'):
self._analyse()
buff = '<?xml version="1.0" encoding="utf-8"?>\n'
buff += '<resources>\n'
try:
for i in self.values[package_name][locale]["integer"]:
buff += '<integer name="%s">%s</integer>\n' % (i[0], i[1])
except KeyError:
pass
buff += '</resources>\n'
return buff.encode('utf-8')
def get_color_resources(self, package_name, locale='\x00\x00'):
self._analyse()
buff = '<?xml version="1.0" encoding="utf-8"?>\n'
buff += '<resources>\n'
try:
for i in self.values[package_name][locale]["color"]:
buff += '<color name="%s">%s</color>\n' % (i[0], i[1])
except KeyError:
pass
buff += '</resources>\n'
return buff.encode('utf-8')
def get_dimen_resources(self, package_name, locale='\x00\x00'):
self._analyse()
buff = '<?xml version="1.0" encoding="utf-8"?>\n'
buff += '<resources>\n'
try:
for i in self.values[package_name][locale]["dimen"]:
buff += '<dimen name="%s">%s</dimen>\n' % (i[0], i[1])
except KeyError:
pass
buff += '</resources>\n'
return buff.encode('utf-8')
def get_id(self, package_name, rid, locale='\x00\x00'):
self._analyse()
try:
for i in self.values[package_name][locale]["public"]:
if i[2] == rid:
return i
except KeyError:
return None
def get_string(self, package_name, name, locale='\x00\x00'):
self._analyse()
try:
for i in self.values[package_name][locale]["string"]:
if i[0] == name:
return i
except KeyError:
return None
def get_items(self, package_name):
self._analyse()
return self.packages[package_name]
class PackageContext:
def __init__(self, current_package, stringpool_main, mTableStrings, mKeyStrings):
self.stringpool_main = stringpool_main
self.mTableStrings = mTableStrings
self.mKeyStrings = mKeyStrings
self.current_package = current_package
def get_mResId(self):
return self.current_package.mResId
def set_mResId(self, mResId):
self.current_package.mResId = mResId
class ARSCHeader:
def __init__(self, buff):
self.start = buff.get_idx()
self.type = unpack('<h', buff.read(2))[0]
self.header_size = unpack('<h', buff.read(2))[0]
self.size = unpack('<i', buff.read(4))[0]
#print "ARSCHeader", hex(self.start), hex(self.type), hex(self.header_size), hex(self.size)
class ARSCResTablePackage:
def __init__(self, buff):
self.start = buff.get_idx()
self.id = unpack('<i', buff.read(4))[0]
self.name = buff.readNullString(256)
self.typeStrings = unpack('<i', buff.read(4))[0]
self.lastPublicType = unpack('<i', buff.read(4))[0]
self.keyStrings = unpack('<i', buff.read(4))[0]
self.lastPublicKey = unpack('<i', buff.read(4))[0]
self.mResId = self.id << 24
#print "ARSCResTablePackage", hex(self.start), hex(self.id), hex(self.mResId), repr(self.name.decode("utf-16", errors='replace')), hex(self.typeStrings), hex(self.lastPublicType), hex(self.keyStrings), hex(self.lastPublicKey)
def get_name(self):
name = self.name.decode("utf-16", 'replace')
name = name[:name.find("\x00")]
return name
class ARSCResTypeSpec:
def __init__(self, buff, parent=None):
self.start = buff.get_idx()
self.parent = parent
self.id = unpack('<b', buff.read(1))[0]
self.res0 = unpack('<b', buff.read(1))[0]
self.res1 = unpack('<h', buff.read(2))[0]
self.entryCount = unpack('<i', buff.read(4))[0]
#print "ARSCResTypeSpec", hex(self.start), hex(self.id), hex(self.res0), hex(self.res1), hex(self.entryCount), "table:" + self.parent.mTableStrings.getString(self.id - 1)
self.typespec_entries = []
for i in range(0, self.entryCount):
self.typespec_entries.append(unpack('<i', buff.read(4))[0])
class ARSCResType:
def __init__(self, buff, parent=None):
self.start = buff.get_idx()
self.parent = parent
self.id = unpack('<b', buff.read(1))[0]
self.res0 = unpack('<b', buff.read(1))[0]
self.res1 = unpack('<h', buff.read(2))[0]
self.entryCount = unpack('<i', buff.read(4))[0]
self.entriesStart = unpack('<i', buff.read(4))[0]
self.mResId = (0xff000000 & self.parent.get_mResId()) | self.id << 16
self.parent.set_mResId(self.mResId)
#print "ARSCResType", hex(self.start), hex(self.id), hex(self.res0), hex(self.res1), hex(self.entryCount), hex(self.entriesStart), hex(self.mResId), "table:" + self.parent.mTableStrings.getString(self.id - 1)
self.config = ARSCResTableConfig(buff)
def get_type(self):
return self.parent.mTableStrings.getString(self.id - 1)
class ARSCResTableConfig:
def __init__(self, buff):
self.start = buff.get_idx()
self.size = unpack('<i', buff.read(4))[0]
self.imsi = unpack('<i', buff.read(4))[0]
self.locale = unpack('<i', buff.read(4))[0]
self.screenType = unpack('<i', buff.read(4))[0]
self.input = unpack('<i', buff.read(4))[0]
self.screenSize = unpack('<i', buff.read(4))[0]
self.version = unpack('<i', buff.read(4))[0]
self.screenConfig = 0
self.screenSizeDp = 0
if self.size >= 32:
self.screenConfig = unpack('<i', buff.read(4))[0]
if self.size >= 36:
self.screenSizeDp = unpack('<i', buff.read(4))[0]
self.exceedingSize = self.size - 36
if self.exceedingSize > 0:
androconf.info("Skipping padding bytes.")
self.padding = buff.read(self.exceedingSize)
#print "ARSCResTableConfig", hex(self.start), hex(self.size), hex(self.imsi), hex(self.locale), repr(self.get_language()), repr(self.get_country()), hex(self.screenType), hex(self.input), hex(self.screenSize), hex(self.version), hex(self.screenConfig), hex(self.screenSizeDp)
def get_language(self):
x = self.locale & 0x0000ffff
return chr(x & 0x00ff) + chr((x & 0xff00) >> 8)
def get_country(self):
x = (self.locale & 0xffff0000) >> 16
return chr(x & 0x00ff) + chr((x & 0xff00) >> 8)
class ARSCResTableEntry:
def __init__(self, buff, mResId, parent=None):
self.start = buff.get_idx()
self.mResId = mResId
self.parent = parent
self.size = unpack('<h', buff.read(2))[0]
self.flags = unpack('<h', buff.read(2))[0]
self.index = unpack('<i', buff.read(4))[0]
#print "ARSCResTableEntry", hex(self.start), hex(self.mResId), hex(self.size), hex(self.flags), hex(self.index), self.is_complex()#, hex(self.mResId)
if self.flags & 1:
self.item = ARSCComplex(buff, parent)
else:
self.key = ARSCResStringPoolRef(buff, self.parent)
def get_index(self):
return self.index
def get_value(self):
return self.parent.mKeyStrings.getString(self.index)
def get_key_data(self):
return self.key.get_data_value()
def is_public(self):
return self.flags == 0 or self.flags == 2
def is_complex(self):
return (self.flags & 1) == 1
class ARSCComplex:
def __init__(self, buff, parent=None):
self.start = buff.get_idx()
self.parent = parent
self.id_parent = unpack('<i', buff.read(4))[0]
self.count = unpack('<i', buff.read(4))[0]
self.items = []
for i in range(0, self.count):
self.items.append((unpack('<i', buff.read(4))[0], ARSCResStringPoolRef(buff, self.parent)))
#print "ARSCComplex", hex(self.start), self.id_parent, self.count, repr(self.parent.mKeyStrings.getString(self.id_parent))
class ARSCResStringPoolRef:
def __init__(self, buff, parent=None):
self.start = buff.get_idx()
self.parent = parent
self.skip_bytes = buff.read(3)
self.data_type = unpack('<b', buff.read(1))[0]
self.data = unpack('<i', buff.read(4))[0]
#print "ARSCResStringPoolRef", hex(self.start), hex(self.data_type), hex(self.data)#, "key:" + self.parent.mKeyStrings.getString(self.index), self.parent.stringpool_main.getString(self.data)
def get_data_value(self):
return self.parent.stringpool_main.getString(self.data)
def get_data(self):
return self.data
def get_data_type(self):
return self.data_type
def get_arsc_info(arscobj):
buff = ""
for package in arscobj.get_packages_names():
buff += package + ":\n"
for locale in arscobj.get_locales(package):
buff += "\t" + repr(locale) + ":\n"
for ttype in arscobj.get_types(package, locale):
buff += "\t\t" + ttype + ":\n"
try:
tmp_buff = getattr(arscobj, "get_" + ttype + "_resources")(package, locale).decode("utf-8", 'replace').split("\n")
for i in tmp_buff:
buff += "\t\t\t" + i + "\n"
except AttributeError:
pass
return buff
|
|
'''User API unittest'''
import tests
import model.user
from unittest import TestCase
class TestRegister(TestCase):
'''Register unittest.'''
@tests.async_test
async def test_register(self):
'''Test register.'''
response = await tests.request('/user/register', {
'mail': '[email protected]',
'password': '1234',
'name': 'Foo',
})
self.assertEqual(response, 'Success')
response = await tests.request('/user/register', {
'mail': '[email protected]',
'password': '1234',
'name': 'Foo',
})
self.assertEqual(response, 'Success')
@tests.async_test
async def test_exist(self):
'''Test duplicated register.'''
response = await tests.request('/user/register', {
'mail': '[email protected]',
'password': '1234',
'name': 'Foo',
})
self.assertEqual(response, 'Success')
response = await tests.request('/user/register', {
'mail': '[email protected]',
'password': '1234',
'name': 'Foo',
})
self.assertEqual(response, 'Eexist')
class TestLogin(TestCase):
'''Login unittest.'''
@tests.async_test
async def test_login(self):
'''Test login.'''
response = await tests.request('/user/register', {
'mail': '[email protected]',
'password': '1234',
'name': 'Foo',
})
self.assertEqual(response, 'Success')
response = await tests.request('/user/register', {
'mail': '[email protected]',
'password': '1234',
'name': 'Foo',
})
self.assertEqual(response, 'Success')
response = await tests.request('/user/login', {
'mail': '[email protected]',
'password': '1234',
})
self.assertEqual(response, 'Success')
response = await tests.request('/user/login', {
'mail': '[email protected]',
'password': '1234',
})
self.assertEqual(response, 'Success')
@tests.async_test
async def test_failed(self):
'''Test login failed.'''
response = await tests.request('/user/register', {
'mail': '[email protected]',
'password': '1234',
'name': 'Foo',
})
self.assertEqual(response, 'Success')
response = await tests.request('/user/login', {
'mail': '[email protected]',
'password': '12345'
})
self.assertEqual(response, 'Error')
response = await tests.request('/user/login', {
'mail': '[email protected]',
'password': '1234'
})
self.assertEqual(response, 'Error')
class TestGet(TestCase):
'''Get unittest.'''
@tests.async_test
async def test_get(self):
'''Test get information.'''
response = await tests.request('/user/get', {})
self.assertEqual(response, 'Error')
response = await tests.request('/user/register', {
'mail': '[email protected]',
'password': '1234',
'name': 'Foo',
})
self.assertEqual(response, 'Success')
response = await tests.request('/user/get', {})
self.assertEqual(response, 'Error')
response = await tests.request('/user/login', {
'mail': '[email protected]',
'password': '1234'
})
self.assertEqual(response, 'Success')
response = await tests.request('/user/get', {})
self.assertEqual(response, { 'uid': 1, 'name': 'Foo', 'level': 3 })
response = await tests.request('/user/1/get', {})
self.assertEqual(response, { 'uid': 1, 'name': 'Foo', 'level': 3 })
response = await tests.request('/user/100/get', {})
self.assertEqual(response, 'Error')
response = await tests.request('/user/register', {
'mail': '[email protected]',
'password': '1234',
'name': 'Foo',
})
self.assertEqual(response, 'Success')
response = await tests.request('/user/2/get', {})
self.assertEqual(response, 'Error')
class TestSet(TestCase):
'''Set unittest.'''
@tests.async_test
async def test_get(self):
'''Test set information.'''
response = await tests.request('/user/register', {
'mail': '[email protected]',
'password': '1234',
'name': 'Foo',
})
self.assertEqual(response, 'Success')
response = await tests.request('/user/login', {
'mail': '[email protected]',
'password': '1234',
})
self.assertEqual(response, 'Success')
response = await tests.request('/user/get', {})
self.assertNotEqual(response, 'Error')
uid = response['uid']
response = await tests.request('/user/{}/set'.format(uid), {
'name': 'Foo',
})
self.assertEqual(response, 'Success')
response = await tests.request('/user/{}/set'.format(uid), {
'name': 'Foo',
'password': '5678',
})
self.assertEqual(response, 'Success')
response = await tests.request('/user/get', {})
self.assertEqual(response, 'Error')
response = await tests.request('/user/login', {
'mail': '[email protected]',
'password': '1234',
})
self.assertEqual(response, 'Error')
response = await tests.request('/user/login', {
'mail': '[email protected]',
'password': '5678',
})
self.assertEqual(response, 'Success')
class TestList(TestCase):
'''List unittest.'''
@tests.async_test
async def test_list(self):
'''Test get self information.'''
response = await tests.request('/user/register', {
'mail': '[email protected]',
'password': '1234',
'name': 'Foo',
})
self.assertEqual(response, 'Success')
await model.user.create('admin', '1234', 'Foo',
level=model.user.UserLevel.kernel)
response = await tests.request('/user/login', {
'mail': 'admin',
'password': '1234'
})
self.assertEqual(response, 'Success')
response = await tests.request('/user/list', {})
self.assertNotEqual(response, 'Error')
self.assertEqual(len(response), 2)
|
|
# JoinRefseqMouseGeneSymbol/JoinRefseqMouseGeneSymbol.py - a self annotated version of rgToolFactory.py generated by running rgToolFactory.py
# to make a new Galaxy tool called JoinRefseqMouseGeneSymbol
# User [email protected] at 02/02/2015 15:15:09
# rgToolFactory.py
# see https://bitbucket.org/fubar/galaxytoolfactory/wiki/Home
#
# copyright ross lazarus (ross stop lazarus at gmail stop com) May 2012
#
# all rights reserved
# Licensed under the LGPL
# suggestions for improvement and bug fixes welcome at https://bitbucket.org/fubar/galaxytoolfactory/wiki/Home
#
# August 2014
# merged John Chilton's citation addition and ideas from Marius van den Beek to enable arbitrary
# data types for input and output - thanks!
#
# march 2014
# had to remove dependencies because cross toolshed dependencies are not possible - can't pre-specify a toolshed url for graphicsmagick and ghostscript
# grrrrr - night before a demo
# added dependencies to a tool_dependencies.xml if html page generated so generated tool is properly portable
#
# added ghostscript and graphicsmagick as dependencies
# fixed a wierd problem where gs was trying to use the new_files_path from universe (database/tmp) as ./database/tmp
# errors ensued
#
# august 2013
# found a problem with GS if $TMP or $TEMP missing - now inject /tmp and warn
#
# july 2013
# added ability to combine images and individual log files into html output
# just make sure there's a log file foo.log and it will be output
# together with all images named like "foo_*.pdf
# otherwise old format for html
#
# January 2013
# problem pointed out by Carlos Borroto
# added escaping for <>$ - thought I did that ages ago...
#
# August 11 2012
# changed to use shell=False and cl as a sequence
# This is a Galaxy tool factory for simple scripts in python, R or whatever ails ye.
# It also serves as the wrapper for the new tool.
#
# you paste and run your script
# Only works for simple scripts that read one input from the history.
# Optionally can write one new history dataset,
# and optionally collect any number of outputs into links on an autogenerated HTML page.
# DO NOT install on a public or important site - please.
# installed generated tools are fine if the script is safe.
# They just run normally and their user cannot do anything unusually insecure
# but please, practice safe toolshed.
# Read the fucking code before you install any tool
# especially this one
# After you get the script working on some test data, you can
# optionally generate a toolshed compatible gzip file
# containing your script safely wrapped as an ordinary Galaxy script in your local toolshed for
# safe and largely automated installation in a production Galaxy.
# If you opt for an HTML output, you get all the script outputs arranged
# as a single Html history item - all output files are linked, thumbnails for all the pdfs.
# Ugly but really inexpensive.
#
# Patches appreciated please.
#
#
# long route to June 2012 product
# Behold the awesome power of Galaxy and the toolshed with the tool factory to bind them
# derived from an integrated script model
# called rgBaseScriptWrapper.py
# Note to the unwary:
# This tool allows arbitrary scripting on your Galaxy as the Galaxy user
# There is nothing stopping a malicious user doing whatever they choose
# Extremely dangerous!!
# Totally insecure. So, trusted users only
#
# preferred model is a developer using their throw away workstation instance - ie a private site.
# no real risk. The universe_wsgi.ini admin_users string is checked - only admin users are permitted to run this tool.
#
import sys
import shutil
import subprocess
import os
import time
import tempfile
import optparse
import tarfile
import re
import shutil
import math
progname = os.path.split(sys.argv[0])[1]
myversion = 'V001.1 March 2014'
verbose = False
debug = False
toolFactoryURL = 'https://bitbucket.org/fubar/galaxytoolfactory'
# if we do html we need these dependencies specified in a tool_dependencies.xml file and referred to in the generated
# tool xml
toolhtmldepskel = """<?xml version="1.0"?>
<tool_dependency>
<package name="ghostscript" version="9.10">
<repository name="package_ghostscript_9_10" owner="devteam" prior_installation_required="True" />
</package>
<package name="graphicsmagick" version="1.3.18">
<repository name="package_graphicsmagick_1_3" owner="iuc" prior_installation_required="True" />
</package>
<readme>
%s
</readme>
</tool_dependency>
"""
protorequirements = """<requirements>
<requirement type="package" version="9.10">ghostscript</requirement>
<requirement type="package" version="1.3.18">graphicsmagick</requirement>
</requirements>"""
def timenow():
"""return current time as a string
"""
return time.strftime('%d/%m/%Y %H:%M:%S', time.localtime(time.time()))
html_escape_table = {
"&": "&",
">": ">",
"<": "<",
"$": "\$"
}
def html_escape(text):
"""Produce entities within text."""
return "".join(html_escape_table.get(c,c) for c in text)
def cmd_exists(cmd):
return subprocess.call("type " + cmd, shell=True,
stdout=subprocess.PIPE, stderr=subprocess.PIPE) == 0
def parse_citations(citations_text):
"""
"""
citations = [c for c in citations_text.split("**ENTRY**") if c.strip()]
citation_tuples = []
for citation in citations:
if citation.startswith("doi"):
citation_tuples.append( ("doi", citation[len("doi"):].strip() ) )
else:
citation_tuples.append( ("bibtex", citation[len("bibtex"):].strip() ) )
return citation_tuples
class ScriptRunner:
"""class is a wrapper for an arbitrary script
"""
def __init__(self,opts=None,treatbashSpecial=True):
"""
cleanup inputs, setup some outputs
"""
self.useGM = cmd_exists('gm')
self.useIM = cmd_exists('convert')
self.useGS = cmd_exists('gs')
self.temp_warned = False # we want only one warning if $TMP not set
self.treatbashSpecial = treatbashSpecial
if opts.output_dir: # simplify for the tool tarball
os.chdir(opts.output_dir)
self.thumbformat = 'png'
self.opts = opts
self.toolname = re.sub('[^a-zA-Z0-9_]+', '', opts.tool_name) # a sanitizer now does this but..
self.toolid = self.toolname
self.myname = sys.argv[0] # get our name because we write ourselves out as a tool later
self.pyfile = self.myname # crude but efficient - the cruft won't hurt much
self.xmlfile = '%s.xml' % self.toolname
s = open(self.opts.script_path,'r').readlines()
s = [x.rstrip() for x in s] # remove pesky dos line endings if needed
self.script = '\n'.join(s)
fhandle,self.sfile = tempfile.mkstemp(prefix=self.toolname,suffix=".%s" % (opts.interpreter))
tscript = open(self.sfile,'w') # use self.sfile as script source for Popen
tscript.write(self.script)
tscript.close()
self.indentedScript = '\n'.join([' %s' % html_escape(x) for x in s]) # for restructured text in help
self.escapedScript = '\n'.join([html_escape(x) for x in s])
self.elog = os.path.join(self.opts.output_dir,"%s_error.log" % self.toolname)
if opts.output_dir: # may not want these complexities
self.tlog = os.path.join(self.opts.output_dir,"%s_runner.log" % self.toolname)
art = '%s.%s' % (self.toolname,opts.interpreter)
artpath = os.path.join(self.opts.output_dir,art) # need full path
artifact = open(artpath,'w') # use self.sfile as script source for Popen
artifact.write(self.script)
artifact.close()
self.cl = []
self.html = []
a = self.cl.append
a(opts.interpreter)
if self.treatbashSpecial and opts.interpreter in ['bash','sh']:
a(self.sfile)
else:
a('-') # stdin
a(opts.input_tab)
a(opts.output_tab)
self.outputFormat = self.opts.output_format
self.inputFormats = self.opts.input_formats
self.test1Input = '%s_test1_input.xls' % self.toolname
self.test1Output = '%s_test1_output.xls' % self.toolname
self.test1HTML = '%s_test1_output.html' % self.toolname
def makeXML(self):
"""
Create a Galaxy xml tool wrapper for the new script as a string to write out
fixme - use templating or something less fugly than this example of what we produce
<tool id="reverse" name="reverse" version="0.01">
<description>a tabular file</description>
<command interpreter="python">
reverse.py --script_path "$runMe" --interpreter "python"
--tool_name "reverse" --input_tab "$input1" --output_tab "$tab_file"
</command>
<inputs>
<param name="input1" type="data" format="tabular" label="Select a suitable input file from your history"/><param name="job_name" type="text" label="Supply a name for the outputs to remind you what they contain" value="reverse"/>
</inputs>
<outputs>
<data format="tabular" name="tab_file" label="${job_name}"/>
</outputs>
<help>
**What it Does**
Reverse the columns in a tabular file
</help>
<configfiles>
<configfile name="runMe">
# reverse order of columns in a tabular file
import sys
inp = sys.argv[1]
outp = sys.argv[2]
i = open(inp,'r')
o = open(outp,'w')
for row in i:
rs = row.rstrip().split('\t')
rs.reverse()
o.write('\t'.join(rs))
o.write('\n')
i.close()
o.close()
</configfile>
</configfiles>
</tool>
"""
newXML="""<tool id="%(toolid)s" name="%(toolname)s" version="%(tool_version)s">
%(tooldesc)s
%(requirements)s
<command interpreter="python">
%(command)s
</command>
<inputs>
%(inputs)s
</inputs>
<outputs>
%(outputs)s
</outputs>
<configfiles>
<configfile name="runMe">
%(script)s
</configfile>
</configfiles>
%(tooltests)s
<help>
%(help)s
</help>
<citations>
%(citations)s
<citation type="doi">10.1093/bioinformatics/bts573</citation>
</citations>
</tool>""" # needs a dict with toolname, toolid, interpreter, scriptname, command, inputs as a multi line string ready to write, outputs ditto, help ditto
newCommand="""
%(toolname)s.py --script_path "$runMe" --interpreter "%(interpreter)s"
--tool_name "%(toolname)s" %(command_inputs)s %(command_outputs)s """
# may NOT be an input or htmlout - appended later
tooltestsTabOnly = """
<tests>
<test>
<param name="input1" value="%(test1Input)s" ftype="%(inputFormats)s"/>
<param name="job_name" value="test1"/>
<param name="runMe" value="$runMe"/>
<output name="tab_file" file="%(test1Output)s" ftype="%(outputFormat)s"/>
</test>
</tests>
"""
tooltestsHTMLOnly = """
<tests>
<test>
<param name="input1" value="%(test1Input)s" ftype="%(inputFormats)s"/>
<param name="job_name" value="test1"/>
<param name="runMe" value="$runMe"/>
<output name="html_file" file="%(test1HTML)s" ftype="html" lines_diff="5"/>
</test>
</tests>
"""
tooltestsBoth = """<tests>
<test>
<param name="input1" value="%(test1Input)s" ftype="%(inputFormats)s"/>
<param name="job_name" value="test1"/>
<param name="runMe" value="$runMe"/>
<output name="tab_file" file="%(test1Output)s" ftype="%(outputFormat)s" />
<output name="html_file" file="%(test1HTML)s" ftype="html" lines_diff="10"/>
</test>
</tests>
"""
xdict = {}
xdict['outputFormat'] = self.outputFormat
xdict['inputFormats'] = self.inputFormats
xdict['requirements'] = ''
if self.opts.make_HTML:
if self.opts.include_dependencies == "yes":
xdict['requirements'] = protorequirements
xdict['tool_version'] = self.opts.tool_version
xdict['test1Input'] = self.test1Input
xdict['test1HTML'] = self.test1HTML
xdict['test1Output'] = self.test1Output
if self.opts.make_HTML and self.opts.output_tab <> 'None':
xdict['tooltests'] = tooltestsBoth % xdict
elif self.opts.make_HTML:
xdict['tooltests'] = tooltestsHTMLOnly % xdict
else:
xdict['tooltests'] = tooltestsTabOnly % xdict
xdict['script'] = self.escapedScript
# configfile is least painful way to embed script to avoid external dependencies
# but requires escaping of <, > and $ to avoid Mako parsing
if self.opts.help_text:
helptext = open(self.opts.help_text,'r').readlines()
helptext = [html_escape(x) for x in helptext] # must html escape here too - thanks to Marius van den Beek
xdict['help'] = ''.join([x for x in helptext])
else:
xdict['help'] = 'Please ask the tool author (%s) for help as none was supplied at tool generation\n' % (self.opts.user_email)
if self.opts.citations:
citationstext = open(self.opts.citations,'r').read()
citation_tuples = parse_citations(citationstext)
citations_xml = ""
for citation_type, citation_content in citation_tuples:
citation_xml = """<citation type="%s">%s</citation>""" % (citation_type, html_escape(citation_content))
citations_xml += citation_xml
xdict['citations'] = citations_xml
else:
xdict['citations'] = ""
coda = ['**Script**','Pressing execute will run the following code over your input file and generate some outputs in your history::']
coda.append('\n')
coda.append(self.indentedScript)
coda.append('\n**Attribution**\nThis Galaxy tool was created by %s at %s\nusing the Galaxy Tool Factory.\n' % (self.opts.user_email,timenow()))
coda.append('See %s for details of that project' % (toolFactoryURL))
coda.append('Please cite: Creating re-usable tools from scripts: The Galaxy Tool Factory. Ross Lazarus; Antony Kaspi; Mark Ziemann; The Galaxy Team. ')
coda.append('Bioinformatics 2012; doi: 10.1093/bioinformatics/bts573\n')
xdict['help'] = '%s\n%s' % (xdict['help'],'\n'.join(coda))
if self.opts.tool_desc:
xdict['tooldesc'] = '<description>%s</description>' % self.opts.tool_desc
else:
xdict['tooldesc'] = ''
xdict['command_outputs'] = ''
xdict['outputs'] = ''
if self.opts.input_tab <> 'None':
xdict['command_inputs'] = '--input_tab "$input1" ' # the space may matter a lot if we append something
xdict['inputs'] = '<param name="input1" type="data" format="%s" label="Select a suitable input file from your history"/> \n' % self.inputFormats
else:
xdict['command_inputs'] = '' # assume no input - eg a random data generator
xdict['inputs'] = ''
xdict['inputs'] += '<param name="job_name" type="text" label="Supply a name for the outputs to remind you what they contain" value="%s"/> \n' % self.toolname
xdict['toolname'] = self.toolname
xdict['toolid'] = self.toolid
xdict['interpreter'] = self.opts.interpreter
xdict['scriptname'] = self.sfile
if self.opts.make_HTML:
xdict['command_outputs'] += ' --output_dir "$html_file.files_path" --output_html "$html_file" --make_HTML "yes"'
xdict['outputs'] += ' <data format="html" name="html_file" label="${job_name}.html"/>\n'
else:
xdict['command_outputs'] += ' --output_dir "./"'
if self.opts.output_tab <> 'None':
xdict['command_outputs'] += ' --output_tab "$tab_file"'
xdict['outputs'] += ' <data format="%s" name="tab_file" label="${job_name}"/>\n' % self.outputFormat
xdict['command'] = newCommand % xdict
xmls = newXML % xdict
xf = open(self.xmlfile,'w')
xf.write(xmls)
xf.write('\n')
xf.close()
# ready for the tarball
def makeTooltar(self):
"""
a tool is a gz tarball with eg
/toolname/tool.xml /toolname/tool.py /toolname/test-data/test1_in.foo ...
"""
retval = self.run()
if retval:
print >> sys.stderr,'## Run failed. Cannot build yet. Please fix and retry'
sys.exit(1)
tdir = self.toolname
os.mkdir(tdir)
self.makeXML()
if self.opts.make_HTML:
if self.opts.help_text:
hlp = open(self.opts.help_text,'r').read()
else:
hlp = 'Please ask the tool author for help as none was supplied at tool generation\n'
if self.opts.include_dependencies:
tooldepcontent = toolhtmldepskel % hlp
depf = open(os.path.join(tdir,'tool_dependencies.xml'),'w')
depf.write(tooldepcontent)
depf.write('\n')
depf.close()
if self.opts.input_tab <> 'None': # no reproducible test otherwise? TODO: maybe..
testdir = os.path.join(tdir,'test-data')
os.mkdir(testdir) # make tests directory
shutil.copyfile(self.opts.input_tab,os.path.join(testdir,self.test1Input))
if self.opts.output_tab <> 'None':
shutil.copyfile(self.opts.output_tab,os.path.join(testdir,self.test1Output))
if self.opts.make_HTML:
shutil.copyfile(self.opts.output_html,os.path.join(testdir,self.test1HTML))
if self.opts.output_dir:
shutil.copyfile(self.tlog,os.path.join(testdir,'test1_out.log'))
outpif = '%s.py' % self.toolname # new name
outpiname = os.path.join(tdir,outpif) # path for the tool tarball
pyin = os.path.basename(self.pyfile) # our name - we rewrite ourselves (TM)
notes = ['# %s - a self annotated version of %s generated by running %s\n' % (outpiname,pyin,pyin),]
notes.append('# to make a new Galaxy tool called %s\n' % self.toolname)
notes.append('# User %s at %s\n' % (self.opts.user_email,timenow()))
pi = open(self.pyfile,'r').readlines() # our code becomes new tool wrapper (!) - first Galaxy worm
notes += pi
outpi = open(outpiname,'w')
outpi.write(''.join(notes))
outpi.write('\n')
outpi.close()
stname = os.path.join(tdir,self.sfile)
if not os.path.exists(stname):
shutil.copyfile(self.sfile, stname)
xtname = os.path.join(tdir,self.xmlfile)
if not os.path.exists(xtname):
shutil.copyfile(self.xmlfile,xtname)
tarpath = "%s.gz" % self.toolname
tar = tarfile.open(tarpath, "w:gz")
tar.add(tdir,arcname=self.toolname)
tar.close()
shutil.copyfile(tarpath,self.opts.new_tool)
shutil.rmtree(tdir)
## TODO: replace with optional direct upload to local toolshed?
return retval
def compressPDF(self,inpdf=None,thumbformat='png'):
"""need absolute path to pdf
note that GS gets confoozled if no $TMP or $TEMP
so we set it
"""
assert os.path.isfile(inpdf), "## Input %s supplied to %s compressPDF not found" % (inpdf,self.myName)
hlog = os.path.join(self.opts.output_dir,"compress_%s.txt" % os.path.basename(inpdf))
sto = open(hlog,'a')
our_env = os.environ.copy()
our_tmp = our_env.get('TMP',None)
if not our_tmp:
our_tmp = our_env.get('TEMP',None)
if not (our_tmp and os.path.exists(our_tmp)):
newtmp = os.path.join(self.opts.output_dir,'tmp')
try:
os.mkdir(newtmp)
except:
sto.write('## WARNING - cannot make %s - it may exist or permissions need fixing\n' % newtmp)
our_env['TEMP'] = newtmp
if not self.temp_warned:
sto.write('## WARNING - no $TMP or $TEMP!!! Please fix - using %s temporarily\n' % newtmp)
self.temp_warned = True
outpdf = '%s_compressed' % inpdf
cl = ["gs", "-sDEVICE=pdfwrite", "-dNOPAUSE", "-dUseCIEColor", "-dBATCH","-dPDFSETTINGS=/printer", "-sOutputFile=%s" % outpdf,inpdf]
x = subprocess.Popen(cl,stdout=sto,stderr=sto,cwd=self.opts.output_dir,env=our_env)
retval1 = x.wait()
sto.close()
if retval1 == 0:
os.unlink(inpdf)
shutil.move(outpdf,inpdf)
os.unlink(hlog)
hlog = os.path.join(self.opts.output_dir,"thumbnail_%s.txt" % os.path.basename(inpdf))
sto = open(hlog,'w')
outpng = '%s.%s' % (os.path.splitext(inpdf)[0],thumbformat)
if self.useGM:
cl2 = ['gm', 'convert', inpdf, outpng]
else: # assume imagemagick
cl2 = ['convert', inpdf, outpng]
x = subprocess.Popen(cl2,stdout=sto,stderr=sto,cwd=self.opts.output_dir,env=our_env)
retval2 = x.wait()
sto.close()
if retval2 == 0:
os.unlink(hlog)
retval = retval1 or retval2
return retval
def getfSize(self,fpath,outpath):
"""
format a nice file size string
"""
size = ''
fp = os.path.join(outpath,fpath)
if os.path.isfile(fp):
size = '0 B'
n = float(os.path.getsize(fp))
if n > 2**20:
size = '%1.1f MB' % (n/2**20)
elif n > 2**10:
size = '%1.1f KB' % (n/2**10)
elif n > 0:
size = '%d B' % (int(n))
return size
def makeHtml(self):
""" Create an HTML file content to list all the artifacts found in the output_dir
"""
galhtmlprefix = """<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
<head> <meta http-equiv="Content-Type" content="text/html; charset=utf-8" />
<meta name="generator" content="Galaxy %s tool output - see http://g2.trac.bx.psu.edu/" />
<title></title>
<link rel="stylesheet" href="/static/style/base.css" type="text/css" />
</head>
<body>
<div class="toolFormBody">
"""
galhtmlattr = """<hr/><div class="infomessage">This tool (%s) was generated by the <a href="https://bitbucket.org/fubar/galaxytoolfactory/overview">Galaxy Tool Factory</a></div><br/>"""
galhtmlpostfix = """</div></body></html>\n"""
flist = os.listdir(self.opts.output_dir)
flist = [x for x in flist if x <> 'Rplots.pdf']
flist.sort()
html = []
html.append(galhtmlprefix % progname)
html.append('<div class="infomessage">Galaxy Tool "%s" run at %s</div><br/>' % (self.toolname,timenow()))
fhtml = []
if len(flist) > 0:
logfiles = [x for x in flist if x.lower().endswith('.log')] # log file names determine sections
logfiles.sort()
logfiles = [x for x in logfiles if os.path.abspath(x) <> os.path.abspath(self.tlog)]
logfiles.append(os.path.abspath(self.tlog)) # make it the last one
pdflist = []
npdf = len([x for x in flist if os.path.splitext(x)[-1].lower() == '.pdf'])
for rownum,fname in enumerate(flist):
dname,e = os.path.splitext(fname)
sfsize = self.getfSize(fname,self.opts.output_dir)
if e.lower() == '.pdf' : # compress and make a thumbnail
thumb = '%s.%s' % (dname,self.thumbformat)
pdff = os.path.join(self.opts.output_dir,fname)
retval = self.compressPDF(inpdf=pdff,thumbformat=self.thumbformat)
if retval == 0:
pdflist.append((fname,thumb))
else:
pdflist.append((fname,fname))
if (rownum+1) % 2 == 0:
fhtml.append('<tr class="odd_row"><td><a href="%s">%s</a></td><td>%s</td></tr>' % (fname,fname,sfsize))
else:
fhtml.append('<tr><td><a href="%s">%s</a></td><td>%s</td></tr>' % (fname,fname,sfsize))
for logfname in logfiles: # expect at least tlog - if more
if os.path.abspath(logfname) == os.path.abspath(self.tlog): # handled later
sectionname = 'All tool run'
if (len(logfiles) > 1):
sectionname = 'Other'
ourpdfs = pdflist
else:
realname = os.path.basename(logfname)
sectionname = os.path.splitext(realname)[0].split('_')[0] # break in case _ added to log
ourpdfs = [x for x in pdflist if os.path.basename(x[0]).split('_')[0] == sectionname]
pdflist = [x for x in pdflist if os.path.basename(x[0]).split('_')[0] <> sectionname] # remove
nacross = 1
npdf = len(ourpdfs)
if npdf > 0:
nacross = math.sqrt(npdf) ## int(round(math.log(npdf,2)))
if int(nacross)**2 != npdf:
nacross += 1
nacross = int(nacross)
width = min(400,int(1200/nacross))
html.append('<div class="toolFormTitle">%s images and outputs</div>' % sectionname)
html.append('(Click on a thumbnail image to download the corresponding original PDF image)<br/>')
ntogo = nacross # counter for table row padding with empty cells
html.append('<div><table class="simple" cellpadding="2" cellspacing="2">\n<tr>')
for i,paths in enumerate(ourpdfs):
fname,thumb = paths
s= """<td><a href="%s"><img src="%s" title="Click to download a PDF of %s" hspace="5" width="%d"
alt="Image called %s"/></a></td>\n""" % (fname,thumb,fname,width,fname)
if ((i+1) % nacross == 0):
s += '</tr>\n'
ntogo = 0
if i < (npdf - 1): # more to come
s += '<tr>'
ntogo = nacross
else:
ntogo -= 1
html.append(s)
if html[-1].strip().endswith('</tr>'):
html.append('</table></div>\n')
else:
if ntogo > 0: # pad
html.append('<td> </td>'*ntogo)
html.append('</tr></table></div>\n')
logt = open(logfname,'r').readlines()
logtext = [x for x in logt if x.strip() > '']
html.append('<div class="toolFormTitle">%s log output</div>' % sectionname)
if len(logtext) > 1:
html.append('\n<pre>\n')
html += logtext
html.append('\n</pre>\n')
else:
html.append('%s is empty<br/>' % logfname)
if len(fhtml) > 0:
fhtml.insert(0,'<div><table class="colored" cellpadding="3" cellspacing="3"><tr><th>Output File Name (click to view)</th><th>Size</th></tr>\n')
fhtml.append('</table></div><br/>')
html.append('<div class="toolFormTitle">All output files available for downloading</div>\n')
html += fhtml # add all non-pdf files to the end of the display
else:
html.append('<div class="warningmessagelarge">### Error - %s returned no files - please confirm that parameters are sane</div>' % self.opts.interpreter)
html.append(galhtmlpostfix)
htmlf = file(self.opts.output_html,'w')
htmlf.write('\n'.join(html))
htmlf.write('\n')
htmlf.close()
self.html = html
def run(self):
"""
scripts must be small enough not to fill the pipe!
"""
if self.treatbashSpecial and self.opts.interpreter in ['bash','sh']:
retval = self.runBash()
else:
if self.opts.output_dir:
ste = open(self.elog,'w')
sto = open(self.tlog,'w')
sto.write('## Toolfactory generated command line = %s\n' % ' '.join(self.cl))
sto.flush()
#p = subprocess.Popen(self.cl,shell=False,stdout=sto,stderr=ste,stdin=subprocess.PIPE,cwd=self.opts.output_dir)
p = subprocess.Popen(self.cl,shell=False,stdout=subprocess.PIPE,stderr=subprocess.PIPE,stdin=subprocess.PIPE,cwd=self.opts.output_dir)
else:
p = subprocess.Popen(self.cl,shell=False,stdin=subprocess.PIPE)
p.stdin.write(self.script)
stdout_data, stderr_data = p.communicate()
p.stdin.close()
retval = p.returncode
#retval = p.wait()
if self.opts.output_dir:
sto.close()
ste.close()
err = stderr_data
#err = open(self.elog,'r').readlines()
print >> sys.stdout,stdout_data
if retval <> 0 and err: # problem
print >> sys.stderr,err
if self.opts.make_HTML:
self.makeHtml()
return retval
def runBash(self):
"""
cannot use - for bash so use self.sfile
"""
if self.opts.output_dir:
s = '## Toolfactory generated command line = %s\n' % ' '.join(self.cl)
sto = open(self.tlog,'w')
sto.write(s)
sto.flush()
p = subprocess.Popen(self.cl,shell=False,stdout=sto,stderr=sto,cwd=self.opts.output_dir)
else:
p = subprocess.Popen(self.cl,shell=False)
retval = p.wait()
if self.opts.output_dir:
sto.close()
if self.opts.make_HTML:
self.makeHtml()
return retval
def main():
u = """
This is a Galaxy wrapper. It expects to be called by a special purpose tool.xml as:
<command interpreter="python">rgBaseScriptWrapper.py --script_path "$scriptPath" --tool_name "foo" --interpreter "Rscript"
</command>
"""
op = optparse.OptionParser()
a = op.add_option
a('--script_path',default=None)
a('--tool_name',default=None)
a('--interpreter',default=None)
a('--output_dir',default='./')
a('--output_html',default=None)
a('--input_tab',default="None")
a('--input_formats',default="tabular,text")
a('--output_tab',default="None")
a('--output_format',default="tabular")
a('--user_email',default='Unknown')
a('--bad_user',default=None)
a('--make_Tool',default=None)
a('--make_HTML',default=None)
a('--help_text',default=None)
a('--citations',default=None)
a('--tool_desc',default=None)
a('--new_tool',default=None)
a('--tool_version',default=None)
a('--include_dependencies',default=None)
opts, args = op.parse_args()
assert not opts.bad_user,'UNAUTHORISED: %s is NOT authorized to use this tool until Galaxy admin adds %s to admin_users in universe_wsgi.ini' % (opts.bad_user,opts.bad_user)
assert opts.tool_name,'## Tool Factory expects a tool name - eg --tool_name=DESeq'
assert opts.interpreter,'## Tool Factory wrapper expects an interpreter - eg --interpreter=Rscript'
assert os.path.isfile(opts.script_path),'## Tool Factory wrapper expects a script path - eg --script_path=foo.R'
if opts.output_dir:
try:
os.makedirs(opts.output_dir)
except:
pass
r = ScriptRunner(opts)
if opts.make_Tool:
retcode = r.makeTooltar()
else:
retcode = r.run()
os.unlink(r.sfile)
if retcode:
sys.exit(retcode) # indicate failure to job runner
if __name__ == "__main__":
main()
|
|
# Portions Copyright (c) 2008-2009 Nokia Corporation
r"""OS routines for Mac, NT, or Posix depending on what system we're on.
This exports:
- all functions from posix, nt, os2, mac, or ce, e.g. unlink, stat, etc.
- os.path is one of the modules posixpath, ntpath, or macpath
- os.name is 'posix', 'nt', 'os2', 'mac', 'ce' or 'riscos'
- os.curdir is a string representing the current directory ('.' or ':')
- os.pardir is a string representing the parent directory ('..' or '::')
- os.sep is the (or a most common) pathname separator ('/' or ':' or '\\')
- os.extsep is the extension separator ('.' or '/')
- os.altsep is the alternate pathname separator (None or '/')
- os.pathsep is the component separator used in $PATH etc
- os.linesep is the line separator in text files ('\r' or '\n' or '\r\n')
- os.defpath is the default search path for executables
- os.devnull is the file path of the null device ('/dev/null', etc.)
Programs that import and use 'os' stand a better chance of being
portable between different platforms. Of course, they must then
only use functions that are defined by all platforms (e.g., unlink
and opendir), and leave all pathname manipulation to os.path
(e.g., split and join).
"""
#'
import sys
_names = sys.builtin_module_names
# Note: more names are added to __all__ later.
__all__ = ["altsep", "curdir", "pardir", "sep", "pathsep", "linesep",
"defpath", "name", "path", "devnull",
"SEEK_SET", "SEEK_CUR", "SEEK_END"]
def _get_exports_list(module):
try:
return list(module.__all__)
except AttributeError:
return [n for n in dir(module) if n[0] != '_']
if 'posix' in _names:
name = 'posix'
from posix import *
try:
from posix import _exit
except ImportError:
pass
if sys.platform == 'symbian_s60':
linesep = '\r\n'
import ntpath as path
else:
linesep = '\n'
import posixpath as path
import posix
__all__.extend(_get_exports_list(posix))
del posix
elif 'nt' in _names:
name = 'nt'
linesep = '\r\n'
from nt import *
try:
from nt import _exit
except ImportError:
pass
import ntpath as path
import nt
__all__.extend(_get_exports_list(nt))
del nt
elif 'os2' in _names:
name = 'os2'
linesep = '\r\n'
from os2 import *
try:
from os2 import _exit
except ImportError:
pass
if sys.version.find('EMX GCC') == -1:
import ntpath as path
else:
import os2emxpath as path
from _emx_link import link
import os2
__all__.extend(_get_exports_list(os2))
del os2
elif 'mac' in _names:
name = 'mac'
linesep = '\r'
from mac import *
try:
from mac import _exit
except ImportError:
pass
import macpath as path
import mac
__all__.extend(_get_exports_list(mac))
del mac
elif 'ce' in _names:
name = 'ce'
linesep = '\r\n'
from ce import *
try:
from ce import _exit
except ImportError:
pass
# We can use the standard Windows path.
import ntpath as path
import ce
__all__.extend(_get_exports_list(ce))
del ce
elif 'riscos' in _names:
name = 'riscos'
linesep = '\n'
from riscos import *
try:
from riscos import _exit
except ImportError:
pass
import riscospath as path
import riscos
__all__.extend(_get_exports_list(riscos))
del riscos
else:
raise ImportError, 'no os specific module found'
sys.modules['os.path'] = path
from os.path import (curdir, pardir, sep, pathsep, defpath, extsep, altsep,
devnull)
del _names
# Python uses fixed values for the SEEK_ constants; they are mapped
# to native constants if necessary in posixmodule.c
SEEK_SET = 0
SEEK_CUR = 1
SEEK_END = 2
#'
# Super directory utilities.
# (Inspired by Eric Raymond; the doc strings are mostly his)
def makedirs(name, mode=0777):
"""makedirs(path [, mode=0777])
Super-mkdir; create a leaf directory and all intermediate ones.
Works like mkdir, except that any intermediate path segment (not
just the rightmost) will be created if it does not exist. This is
recursive.
"""
from errno import EEXIST
head, tail = path.split(name)
if not tail:
head, tail = path.split(head)
if head and tail and not path.exists(head):
try:
makedirs(head, mode)
except OSError, e:
# be happy if someone already created the path
if e.errno != EEXIST:
raise
if tail == curdir: # xxx/newdir/. exists if xxx/newdir exists
return
mkdir(name, mode)
def removedirs(name):
"""removedirs(path)
Super-rmdir; remove a leaf directory and all empty intermediate
ones. Works like rmdir except that, if the leaf directory is
successfully removed, directories corresponding to rightmost path
segments will be pruned away until either the whole path is
consumed or an error occurs. Errors during this latter phase are
ignored -- they generally mean that a directory was not empty.
"""
rmdir(name)
head, tail = path.split(name)
if not tail:
head, tail = path.split(head)
while head and tail:
try:
rmdir(head)
except error:
break
head, tail = path.split(head)
def renames(old, new):
"""renames(old, new)
Super-rename; create directories as necessary and delete any left
empty. Works like rename, except creation of any intermediate
directories needed to make the new pathname good is attempted
first. After the rename, directories corresponding to rightmost
path segments of the old name will be pruned way until either the
whole path is consumed or a nonempty directory is found.
Note: this function can fail with the new directory structure made
if you lack permissions needed to unlink the leaf directory or
file.
"""
head, tail = path.split(new)
if head and tail and not path.exists(head):
makedirs(head)
rename(old, new)
head, tail = path.split(old)
if head and tail:
try:
removedirs(head)
except error:
pass
__all__.extend(["makedirs", "removedirs", "renames"])
def walk(top, topdown=True, onerror=None):
"""Directory tree generator.
For each directory in the directory tree rooted at top (including top
itself, but excluding '.' and '..'), yields a 3-tuple
dirpath, dirnames, filenames
dirpath is a string, the path to the directory. dirnames is a list of
the names of the subdirectories in dirpath (excluding '.' and '..').
filenames is a list of the names of the non-directory files in dirpath.
Note that the names in the lists are just names, with no path components.
To get a full path (which begins with top) to a file or directory in
dirpath, do os.path.join(dirpath, name).
If optional arg 'topdown' is true or not specified, the triple for a
directory is generated before the triples for any of its subdirectories
(directories are generated top down). If topdown is false, the triple
for a directory is generated after the triples for all of its
subdirectories (directories are generated bottom up).
When topdown is true, the caller can modify the dirnames list in-place
(e.g., via del or slice assignment), and walk will only recurse into the
subdirectories whose names remain in dirnames; this can be used to prune
the search, or to impose a specific order of visiting. Modifying
dirnames when topdown is false is ineffective, since the directories in
dirnames have already been generated by the time dirnames itself is
generated.
By default errors from the os.listdir() call are ignored. If
optional arg 'onerror' is specified, it should be a function; it
will be called with one argument, an os.error instance. It can
report the error to continue with the walk, or raise the exception
to abort the walk. Note that the filename is available as the
filename attribute of the exception object.
Caution: if you pass a relative pathname for top, don't change the
current working directory between resumptions of walk. walk never
changes the current directory, and assumes that the client doesn't
either.
Example:
from os.path import join, getsize
for root, dirs, files in walk('python/Lib/email'):
print root, "consumes",
print sum([getsize(join(root, name)) for name in files]),
print "bytes in", len(files), "non-directory files"
if 'CVS' in dirs:
dirs.remove('CVS') # don't visit CVS directories
"""
from os.path import join, isdir, islink
# We may not have read permission for top, in which case we can't
# get a list of the files the directory contains. os.path.walk
# always suppressed the exception then, rather than blow up for a
# minor reason when (say) a thousand readable directories are still
# left to visit. That logic is copied here.
try:
# Note that listdir and error are globals in this module due
# to earlier import-*.
names = listdir(top)
except error, err:
if onerror is not None:
onerror(err)
return
dirs, nondirs = [], []
for name in names:
if isdir(join(top, name)):
dirs.append(name)
else:
nondirs.append(name)
if topdown:
yield top, dirs, nondirs
for name in dirs:
path = join(top, name)
if not islink(path):
for x in walk(path, topdown, onerror):
yield x
if not topdown:
yield top, dirs, nondirs
__all__.append("walk")
# Make sure os.environ exists, at least
try:
environ
except NameError:
environ = {}
def execl(file, *args):
"""execl(file, *args)
Execute the executable file with argument list args, replacing the
current process. """
execv(file, args)
def execle(file, *args):
"""execle(file, *args, env)
Execute the executable file with argument list args and
environment env, replacing the current process. """
env = args[-1]
execve(file, args[:-1], env)
def execlp(file, *args):
"""execlp(file, *args)
Execute the executable file (which is searched for along $PATH)
with argument list args, replacing the current process. """
execvp(file, args)
def execlpe(file, *args):
"""execlpe(file, *args, env)
Execute the executable file (which is searched for along $PATH)
with argument list args and environment env, replacing the current
process. """
env = args[-1]
execvpe(file, args[:-1], env)
def execvp(file, args):
"""execp(file, args)
Execute the executable file (which is searched for along $PATH)
with argument list args, replacing the current process.
args may be a list or tuple of strings. """
_execvpe(file, args)
def execvpe(file, args, env):
"""execvpe(file, args, env)
Execute the executable file (which is searched for along $PATH)
with argument list args and environment env , replacing the
current process.
args may be a list or tuple of strings. """
_execvpe(file, args, env)
__all__.extend(["execl","execle","execlp","execlpe","execvp","execvpe"])
def _execvpe(file, args, env=None):
from errno import ENOENT, ENOTDIR
if env is not None:
func = execve
argrest = (args, env)
else:
func = execv
argrest = (args,)
env = environ
head, tail = path.split(file)
if head:
func(file, *argrest)
return
if 'PATH' in env:
envpath = env['PATH']
else:
envpath = defpath
PATH = envpath.split(pathsep)
saved_exc = None
saved_tb = None
for dir in PATH:
fullname = path.join(dir, file)
try:
func(fullname, *argrest)
except error, e:
tb = sys.exc_info()[2]
if (e.errno != ENOENT and e.errno != ENOTDIR
and saved_exc is None):
saved_exc = e
saved_tb = tb
if saved_exc:
raise error, saved_exc, saved_tb
raise error, e, tb
# Change environ to automatically call putenv() if it exists
try:
# This will fail if there's no putenv
putenv
except NameError:
pass
else:
import UserDict
# Fake unsetenv() for Windows
# not sure about os2 here but
# I'm guessing they are the same.
if name in ('os2', 'nt'):
def unsetenv(key):
putenv(key, "")
if name == "riscos":
# On RISC OS, all env access goes through getenv and putenv
from riscosenviron import _Environ
elif name in ('os2', 'nt'): # Where Env Var Names Must Be UPPERCASE
# But we store them as upper case
class _Environ(UserDict.IterableUserDict):
def __init__(self, environ):
UserDict.UserDict.__init__(self)
data = self.data
for k, v in environ.items():
data[k.upper()] = v
def __setitem__(self, key, item):
putenv(key, item)
self.data[key.upper()] = item
def __getitem__(self, key):
return self.data[key.upper()]
try:
unsetenv
except NameError:
def __delitem__(self, key):
del self.data[key.upper()]
else:
def __delitem__(self, key):
unsetenv(key)
del self.data[key.upper()]
def has_key(self, key):
return key.upper() in self.data
def __contains__(self, key):
return key.upper() in self.data
def get(self, key, failobj=None):
return self.data.get(key.upper(), failobj)
def update(self, dict=None, **kwargs):
if dict:
try:
keys = dict.keys()
except AttributeError:
# List of (key, value)
for k, v in dict:
self[k] = v
else:
# got keys
# cannot use items(), since mappings
# may not have them.
for k in keys:
self[k] = dict[k]
if kwargs:
self.update(kwargs)
def copy(self):
return dict(self)
else: # Where Env Var Names Can Be Mixed Case
class _Environ(UserDict.IterableUserDict):
def __init__(self, environ):
UserDict.UserDict.__init__(self)
self.data = environ
def __setitem__(self, key, item):
putenv(key, item)
self.data[key] = item
def update(self, dict=None, **kwargs):
if dict:
try:
keys = dict.keys()
except AttributeError:
# List of (key, value)
for k, v in dict:
self[k] = v
else:
# got keys
# cannot use items(), since mappings
# may not have them.
for k in keys:
self[k] = dict[k]
if kwargs:
self.update(kwargs)
try:
unsetenv
except NameError:
pass
else:
def __delitem__(self, key):
unsetenv(key)
del self.data[key]
def copy(self):
return dict(self)
environ = _Environ(environ)
def getenv(key, default=None):
"""Get an environment variable, return None if it doesn't exist.
The optional second argument can specify an alternate default."""
return environ.get(key, default)
__all__.append("getenv")
def _exists(name):
try:
eval(name)
return True
except NameError:
return False
# Supply spawn*() (probably only for Unix)
if _exists("fork") and not _exists("spawnv") and _exists("execv"):
P_WAIT = 0
P_NOWAIT = P_NOWAITO = 1
# XXX Should we support P_DETACH? I suppose it could fork()**2
# and close the std I/O streams. Also, P_OVERLAY is the same
# as execv*()?
def _spawnvef(mode, file, args, env, func):
# Internal helper; func is the exec*() function to use
pid = fork()
if not pid:
# Child
try:
if env is None:
func(file, args)
else:
func(file, args, env)
except:
_exit(127)
else:
# Parent
if mode == P_NOWAIT:
return pid # Caller is responsible for waiting!
while 1:
wpid, sts = waitpid(pid, 0)
if WIFSTOPPED(sts):
continue
elif WIFSIGNALED(sts):
return -WTERMSIG(sts)
elif WIFEXITED(sts):
return WEXITSTATUS(sts)
else:
raise error, "Not stopped, signaled or exited???"
def spawnv(mode, file, args):
"""spawnv(mode, file, args) -> integer
Execute file with arguments from args in a subprocess.
If mode == P_NOWAIT return the pid of the process.
If mode == P_WAIT return the process's exit code if it exits normally;
otherwise return -SIG, where SIG is the signal that killed it. """
return _spawnvef(mode, file, args, None, execv)
def spawnve(mode, file, args, env):
"""spawnve(mode, file, args, env) -> integer
Execute file with arguments from args in a subprocess with the
specified environment.
If mode == P_NOWAIT return the pid of the process.
If mode == P_WAIT return the process's exit code if it exits normally;
otherwise return -SIG, where SIG is the signal that killed it. """
return _spawnvef(mode, file, args, env, execve)
# Note: spawnvp[e] is't currently supported on Windows
def spawnvp(mode, file, args):
"""spawnvp(mode, file, args) -> integer
Execute file (which is looked for along $PATH) with arguments from
args in a subprocess.
If mode == P_NOWAIT return the pid of the process.
If mode == P_WAIT return the process's exit code if it exits normally;
otherwise return -SIG, where SIG is the signal that killed it. """
return _spawnvef(mode, file, args, None, execvp)
def spawnvpe(mode, file, args, env):
"""spawnvpe(mode, file, args, env) -> integer
Execute file (which is looked for along $PATH) with arguments from
args in a subprocess with the supplied environment.
If mode == P_NOWAIT return the pid of the process.
If mode == P_WAIT return the process's exit code if it exits normally;
otherwise return -SIG, where SIG is the signal that killed it. """
return _spawnvef(mode, file, args, env, execvpe)
if _exists("spawnv"):
# These aren't supplied by the basic Windows code
# but can be easily implemented in Python
def spawnl(mode, file, *args):
"""spawnl(mode, file, *args) -> integer
Execute file with arguments from args in a subprocess.
If mode == P_NOWAIT return the pid of the process.
If mode == P_WAIT return the process's exit code if it exits normally;
otherwise return -SIG, where SIG is the signal that killed it. """
return spawnv(mode, file, args)
def spawnle(mode, file, *args):
"""spawnle(mode, file, *args, env) -> integer
Execute file with arguments from args in a subprocess with the
supplied environment.
If mode == P_NOWAIT return the pid of the process.
If mode == P_WAIT return the process's exit code if it exits normally;
otherwise return -SIG, where SIG is the signal that killed it. """
env = args[-1]
return spawnve(mode, file, args[:-1], env)
__all__.extend(["spawnv", "spawnve", "spawnl", "spawnle",])
if _exists("spawnvp"):
# At the moment, Windows doesn't implement spawnvp[e],
# so it won't have spawnlp[e] either.
def spawnlp(mode, file, *args):
"""spawnlp(mode, file, *args) -> integer
Execute file (which is looked for along $PATH) with arguments from
args in a subprocess with the supplied environment.
If mode == P_NOWAIT return the pid of the process.
If mode == P_WAIT return the process's exit code if it exits normally;
otherwise return -SIG, where SIG is the signal that killed it. """
return spawnvp(mode, file, args)
def spawnlpe(mode, file, *args):
"""spawnlpe(mode, file, *args, env) -> integer
Execute file (which is looked for along $PATH) with arguments from
args in a subprocess with the supplied environment.
If mode == P_NOWAIT return the pid of the process.
If mode == P_WAIT return the process's exit code if it exits normally;
otherwise return -SIG, where SIG is the signal that killed it. """
env = args[-1]
return spawnvpe(mode, file, args[:-1], env)
__all__.extend(["spawnvp", "spawnvpe", "spawnlp", "spawnlpe",])
# Supply popen2 etc. (for Unix)
if _exists("fork"):
if not _exists("popen2"):
def popen2(cmd, mode="t", bufsize=-1):
"""Execute the shell command 'cmd' in a sub-process. On UNIX, 'cmd'
may be a sequence, in which case arguments will be passed directly to
the program without shell intervention (as with os.spawnv()). If 'cmd'
is a string it will be passed to the shell (as with os.system()). If
'bufsize' is specified, it sets the buffer size for the I/O pipes. The
file objects (child_stdin, child_stdout) are returned."""
import popen2
stdout, stdin = popen2.popen2(cmd, bufsize)
return stdin, stdout
__all__.append("popen2")
if not _exists("popen3"):
def popen3(cmd, mode="t", bufsize=-1):
"""Execute the shell command 'cmd' in a sub-process. On UNIX, 'cmd'
may be a sequence, in which case arguments will be passed directly to
the program without shell intervention (as with os.spawnv()). If 'cmd'
is a string it will be passed to the shell (as with os.system()). If
'bufsize' is specified, it sets the buffer size for the I/O pipes. The
file objects (child_stdin, child_stdout, child_stderr) are returned."""
import popen2
stdout, stdin, stderr = popen2.popen3(cmd, bufsize)
return stdin, stdout, stderr
__all__.append("popen3")
if not _exists("popen4"):
def popen4(cmd, mode="t", bufsize=-1):
"""Execute the shell command 'cmd' in a sub-process. On UNIX, 'cmd'
may be a sequence, in which case arguments will be passed directly to
the program without shell intervention (as with os.spawnv()). If 'cmd'
is a string it will be passed to the shell (as with os.system()). If
'bufsize' is specified, it sets the buffer size for the I/O pipes. The
file objects (child_stdin, child_stdout_stderr) are returned."""
import popen2
stdout, stdin = popen2.popen4(cmd, bufsize)
return stdin, stdout
__all__.append("popen4")
import copy_reg as _copy_reg
def _make_stat_result(tup, dict):
return stat_result(tup, dict)
def _pickle_stat_result(sr):
(type, args) = sr.__reduce__()
return (_make_stat_result, args)
try:
_copy_reg.pickle(stat_result, _pickle_stat_result, _make_stat_result)
except NameError: # stat_result may not exist
pass
def _make_statvfs_result(tup, dict):
return statvfs_result(tup, dict)
def _pickle_statvfs_result(sr):
(type, args) = sr.__reduce__()
return (_make_statvfs_result, args)
try:
_copy_reg.pickle(statvfs_result, _pickle_statvfs_result,
_make_statvfs_result)
except NameError: # statvfs_result may not exist
pass
if not _exists("urandom"):
def urandom(n):
"""urandom(n) -> str
Return a string of n random bytes suitable for cryptographic use.
"""
try:
_urandomfd = open("/dev/urandom", O_RDONLY)
except (OSError, IOError):
raise NotImplementedError("/dev/urandom (or equivalent) not found")
bytes = ""
while len(bytes) < n:
bytes += read(_urandomfd, n - len(bytes))
close(_urandomfd)
return bytes
|
|
from dectate.app import App, directive
from dectate.config import commit, Action, Composite
from dectate.error import ConflictError, ConfigError
import pytest
def test_simple():
class MyDirective(Action):
config = {"my": list}
def __init__(self, message):
self.message = message
def identifier(self, my):
return self.message
def perform(self, obj, my):
my.append((self.message, obj))
class MyApp(App):
foo = directive(MyDirective)
@MyApp.foo("hello")
def f():
pass
commit(MyApp)
assert MyApp.config.my == [("hello", f)]
def test_decorator():
class MyApp(App):
@directive
class foo(Action):
config = {"my": list}
def __init__(self, message):
self.message = message
def identifier(self, my):
return self.message
def perform(self, obj, my):
my.append((self.message, obj))
@MyApp.foo("hello")
def f():
pass
commit(MyApp)
assert MyApp.config.my == [("hello", f)]
def test_commit_method():
class MyDirective(Action):
config = {"my": list}
def __init__(self, message):
self.message = message
def identifier(self, my):
return self.message
def perform(self, obj, my):
my.append((self.message, obj))
class MyApp(App):
foo = directive(MyDirective)
@MyApp.foo("hello")
def f():
pass
result = MyApp.commit()
assert MyApp.config.my == [("hello", f)]
assert list(result) == [MyApp]
def test_directive_name():
class MyDirective(Action):
config = {"my": list}
def __init__(self, message):
self.message = message
def identifier(self, my):
return self.message
def perform(self, obj, my):
my.append(self)
class MyApp(App):
foo = directive(MyDirective)
@MyApp.foo("hello")
def f():
pass
MyApp.commit()
MyApp.config.my[0].directive.directive_name == "foo"
def test_conflict_same_directive():
class MyDirective(Action):
config = {"my": list}
def __init__(self, message):
self.message = message
def identifier(self, my):
return self.message
def perform(self, obj, my):
my.append((self.message, obj))
class MyApp(App):
foo = directive(MyDirective)
@MyApp.foo("hello")
def f():
pass
@MyApp.foo("hello")
def f2():
pass
with pytest.raises(ConflictError):
commit(MyApp)
def test_app_inherit():
class Registry:
pass
class MyDirective(Action):
config = {"my": Registry}
def __init__(self, message):
self.message = message
def identifier(self, my):
return self.message
def perform(self, obj, my):
my.message = self.message
my.obj = obj
class MyApp(App):
foo = directive(MyDirective)
class SubApp(MyApp):
pass
@MyApp.foo("hello")
def f():
pass
commit(MyApp, SubApp)
assert MyApp.config.my.message == "hello"
assert MyApp.config.my.obj is f
assert SubApp.config.my.message == "hello"
assert SubApp.config.my.obj is f
def test_app_override():
class Registry:
pass
class MyDirective(Action):
config = {"my": Registry}
def __init__(self, message):
self.message = message
def identifier(self, my):
return self.message
def perform(self, obj, my):
my.message = self.message
my.obj = obj
class MyApp(App):
foo = directive(MyDirective)
class SubApp(MyApp):
pass
@MyApp.foo("hello")
def f():
pass
@SubApp.foo("hello")
def f2():
pass
commit(MyApp, SubApp)
assert MyApp.config.my.message == "hello"
assert MyApp.config.my.obj is f
assert SubApp.config.my.message == "hello"
assert SubApp.config.my.obj is f2
def test_different_group_no_conflict():
class FooDirective(Action):
config = {"foo": list}
def __init__(self, message):
self.message = message
def identifier(self, foo):
return self.message
def perform(self, obj, foo):
foo.append((self.message, obj))
class BarDirective(Action):
config = {"bar": list}
def __init__(self, message):
self.message = message
def identifier(self, bar):
return self.message
def perform(self, obj, bar):
bar.append((self.message, obj))
class MyApp(App):
foo = directive(FooDirective)
bar = directive(BarDirective)
@MyApp.foo("hello")
def f():
pass
@MyApp.bar("hello")
def g():
pass
commit(MyApp)
assert MyApp.config.foo == [("hello", f)]
assert MyApp.config.bar == [("hello", g)]
def test_same_group_conflict():
class FooDirective(Action):
config = {"foo": list}
def __init__(self, message):
self.message = message
def identifier(self, foo):
return self.message
def perform(self, obj, foo):
foo.append((self.message, obj))
class BarDirective(Action):
# should now conflict
group_class = FooDirective
def __init__(self, message):
self.message = message
def identifier(self, foo):
return self.message
def perform(self, obj, foo):
foo.append((self.message, obj))
class MyApp(App):
foo = directive(FooDirective)
bar = directive(BarDirective)
@MyApp.foo("hello")
def f():
pass
@MyApp.bar("hello")
def g():
pass
with pytest.raises(ConflictError):
commit(MyApp)
def test_discriminator_conflict():
class FooDirective(Action):
config = {"my": list}
def __init__(self, message, others):
self.message = message
self.others = others
def identifier(self, my):
return self.message
def discriminators(self, my):
return self.others
def perform(self, obj, my):
my.append((self.message, obj))
class MyApp(App):
foo = directive(FooDirective)
@MyApp.foo("f", ["a"])
def f():
pass
@MyApp.foo("g", ["a", "b"])
def g():
pass
with pytest.raises(ConflictError):
commit(MyApp)
def test_discriminator_same_group_conflict():
class FooDirective(Action):
config = {"my": list}
def __init__(self, message, others):
self.message = message
self.others = others
def identifier(self, my):
return self.message
def discriminators(self, my):
return self.others
def perform(self, obj, my):
my.append((self.message, obj))
class BarDirective(FooDirective):
group_class = FooDirective
class MyApp(App):
foo = directive(FooDirective)
bar = directive(BarDirective)
@MyApp.foo("f", ["a"])
def f():
pass
@MyApp.bar("g", ["a", "b"])
def g():
pass
with pytest.raises(ConflictError):
commit(MyApp)
def test_discriminator_no_conflict():
class FooDirective(Action):
config = {"my": list}
def __init__(self, message, others):
self.message = message
self.others = others
def identifier(self, my):
return self.message
def discriminators(self, my):
return self.others
def perform(self, obj, my):
my.append((self.message, obj))
class MyApp(App):
foo = directive(FooDirective)
@MyApp.foo("f", ["a"])
def f():
pass
@MyApp.foo("g", ["b"])
def g():
pass
commit(MyApp)
assert MyApp.config.my == [("f", f), ("g", g)]
def test_discriminator_different_group_no_conflict():
class FooDirective(Action):
config = {"my": list}
def __init__(self, message, others):
self.message = message
self.others = others
def identifier(self, my):
return self.message
def discriminators(self, my):
return self.others
def perform(self, obj, my):
my.append((self.message, obj))
class BarDirective(FooDirective):
# will have its own group key so in a different group
depends = [FooDirective]
class MyApp(App):
foo = directive(FooDirective)
bar = directive(BarDirective)
@MyApp.foo("f", ["a"])
def f():
pass
@MyApp.bar("g", ["a", "b"])
def g():
pass
commit(MyApp)
assert MyApp.config.my == [("f", f), ("g", g)]
def test_depends():
class FooDirective(Action):
config = {"my": list}
def __init__(self, message):
self.message = message
def identifier(self, my):
return self.message
def perform(self, obj, my):
my.append((self.message, obj))
class BarDirective(Action):
depends = [FooDirective]
config = {"my": list}
def __init__(self, message):
self.message = message
def identifier(self, my):
return self.message
def perform(self, obj, my):
my.append((self.message, obj))
class MyApp(App):
foo = directive(FooDirective)
bar = directive(BarDirective)
@MyApp.bar("a")
def g():
pass
@MyApp.foo("b")
def f():
pass
commit(MyApp)
# since bar depends on foo, it should be executed last
assert MyApp.config.my == [("b", f), ("a", g)]
def test_composite():
class SubDirective(Action):
config = {"my": list}
def __init__(self, message):
self.message = message
def identifier(self, my):
return self.message
def perform(self, obj, my):
my.append((self.message, obj))
class CompositeDirective(Composite):
def __init__(self, messages):
self.messages = messages
def actions(self, obj):
return [(SubDirective(message), obj) for message in self.messages]
class MyApp(App):
_sub = directive(SubDirective)
composite = directive(CompositeDirective)
@MyApp.composite(["a", "b", "c"])
def f():
pass
commit(MyApp)
assert MyApp.config.my == [("a", f), ("b", f), ("c", f)]
def test_composite_change_object():
class SubDirective(Action):
config = {"my": list}
def __init__(self, message):
self.message = message
def identifier(self, my):
return self.message
def perform(self, obj, my):
my.append((self.message, obj))
def other():
pass
class CompositeDirective(Composite):
def __init__(self, messages):
self.messages = messages
def actions(self, obj):
return [(SubDirective(message), other) for message in self.messages]
class MyApp(App):
_sub = directive(SubDirective)
composite = directive(CompositeDirective)
@MyApp.composite(["a", "b", "c"])
def f():
pass
commit(MyApp)
assert MyApp.config.my == [("a", other), ("b", other), ("c", other)]
def test_composite_private_sub():
class SubDirective(Action):
config = {"my": list}
def __init__(self, message):
self.message = message
def identifier(self, my):
return self.message
def perform(self, obj, my):
my.append((self.message, obj))
class CompositeDirective(Composite):
def __init__(self, messages):
self.messages = messages
def actions(self, obj):
return [(SubDirective(message), obj) for message in self.messages]
class MyApp(App):
# mark sub as private by using the underscore
_sub = directive(SubDirective)
composite = directive(CompositeDirective)
@MyApp.composite(["a", "b", "c"])
def f():
pass
commit(MyApp)
assert MyApp.config.my == [("a", f), ("b", f), ("c", f)]
def test_composite_private_composite():
class SubDirective(Action):
config = {"my": list}
def __init__(self, message):
self.message = message
def identifier(self, my):
return self.message
def perform(self, obj, my):
my.append((self.message, obj))
class CompositeDirective(Composite):
def __init__(self, messages):
self.messages = messages
def actions(self, obj):
return [(SubDirective(message), obj) for message in self.messages]
class MyApp(App):
sub = directive(SubDirective)
_composite = directive(CompositeDirective)
@MyApp.sub("a")
def f():
pass
commit(MyApp)
assert MyApp.config.my == [("a", f)]
def test_nested_composite():
class SubDirective(Action):
config = {"my": list}
def __init__(self, message):
self.message = message
def identifier(self, my):
return self.message
def perform(self, obj, my):
my.append((self.message, obj))
class SubCompositeDirective(Composite):
def __init__(self, message):
self.message = message
def actions(self, obj):
yield SubDirective(self.message + "_0"), obj
yield SubDirective(self.message + "_1"), obj
class CompositeDirective(Composite):
def __init__(self, messages):
self.messages = messages
def actions(self, obj):
return [
(SubCompositeDirective(message), obj)
for message in self.messages
]
class MyApp(App):
sub = directive(SubDirective)
subcomposite = directive(SubCompositeDirective)
composite = directive(CompositeDirective)
@MyApp.composite(["a", "b", "c"])
def f():
pass
commit(MyApp)
# since bar depends on foo, it should be executed last
assert MyApp.config.my == [
("a_0", f),
("a_1", f),
("b_0", f),
("b_1", f),
("c_0", f),
("c_1", f),
]
def test_with_statement_kw():
class FooDirective(Action):
config = {"my": list}
def __init__(self, model, name):
self.model = model
self.name = name
def identifier(self, my):
return (self.model, self.name)
def perform(self, obj, my):
my.append((self.model, self.name, obj))
class Dummy:
pass
class MyApp(App):
foo = directive(FooDirective)
with MyApp.foo(model=Dummy) as foo:
@foo(name="a")
def f():
pass
@foo(name="b")
def g():
pass
commit(MyApp)
assert MyApp.config.my == [
(Dummy, "a", f),
(Dummy, "b", g),
]
def test_with_statement_args():
class FooDirective(Action):
config = {"my": list}
def __init__(self, model, name):
self.model = model
self.name = name
def identifier(self, my):
return (self.model, self.name)
def perform(self, obj, my):
my.append((self.model, self.name, obj))
class MyApp(App):
foo = directive(FooDirective)
class Dummy:
pass
with MyApp.foo(Dummy) as foo:
@foo("a")
def f():
pass
@foo("b")
def g():
pass
commit(MyApp)
assert MyApp.config.my == [
(Dummy, "a", f),
(Dummy, "b", g),
]
def test_before():
class Registry:
def __init__(self):
self.li = []
self.before = False
def add(self, name, obj):
assert self.before
self.li.append((name, obj))
class FooDirective(Action):
config = {"my": Registry}
def __init__(self, name):
self.name = name
def identifier(self, my):
return self.name
def perform(self, obj, my):
my.add(self.name, obj)
@staticmethod
def before(my):
my.before = True
class MyApp(App):
foo = directive(FooDirective)
@MyApp.foo(name="hello")
def f():
pass
commit(MyApp)
assert MyApp.config.my.before
assert MyApp.config.my.li == [
("hello", f),
]
def test_before_without_use():
class Registry:
def __init__(self):
self.li = []
self.before = False
def add(self, name, obj):
assert self.before
self.li.append((name, obj))
class FooDirective(Action):
config = {"my": Registry}
def __init__(self, name):
self.name = name
def identifier(self, my):
return self.name
def perform(self, obj, my):
my.add(self.name, obj)
@staticmethod
def before(my):
my.before = True
class MyApp(App):
foo = directive(FooDirective)
commit(MyApp)
assert MyApp.config.my.before
assert MyApp.config.my.li == []
def test_before_group():
class Registry:
def __init__(self):
self.li = []
self.before = False
def add(self, name, obj):
assert self.before
self.li.append((name, obj))
class FooDirective(Action):
config = {"my": Registry}
def __init__(self, name):
self.name = name
def identifier(self, my):
return self.name
def perform(self, obj, my):
my.add(self.name, obj)
@staticmethod
def before(my):
my.before = True
class BarDirective(Action):
group_class = FooDirective
def __init__(self, name):
self.name = name
def identifier(self, my):
return self.name
def perform(self, obj, my):
pass
class MyApp(App):
foo = directive(FooDirective)
bar = directive(BarDirective)
@MyApp.bar(name="bye")
def f():
pass
@MyApp.foo(name="hello")
def g():
pass
commit(MyApp)
assert MyApp.config.my.before
assert MyApp.config.my.li == [
("hello", g),
]
def test_config_group():
class FooDirective(Action):
config = {"my": list}
def __init__(self, name):
self.name = name
def identifier(self, my):
return self.name
def perform(self, obj, my):
my.append((self.name, obj))
class BarDirective(Action):
group_class = FooDirective
def __init__(self, name):
self.name = name
def identifier(self, my):
return self.name
def perform(self, obj, my):
my.append((self.name, obj))
class MyApp(App):
foo = directive(FooDirective)
bar = directive(BarDirective)
@MyApp.bar(name="bye")
def f():
pass
@MyApp.foo(name="hello")
def g():
pass
commit(MyApp)
assert MyApp.config.my == [
("bye", f),
("hello", g),
]
def test_before_group_without_use():
class Registry:
def __init__(self):
self.li = []
self.before = False
def add(self, name, obj):
assert self.before
self.li.append((name, obj))
class FooDirective(Action):
config = {"my": Registry}
def __init__(self, name):
self.name = name
def identifier(self, my):
return self.name
def perform(self, obj, my):
my.add(self.name, obj)
@staticmethod
def before(my):
my.before = True
class BarDirective(Action):
group_class = FooDirective
def __init__(self, name):
self.name = name
def identifier(self):
return self.name
def perform(self, obj):
pass
class MyApp(App):
foo = directive(FooDirective)
bar = directive(BarDirective)
commit(MyApp)
assert MyApp.config.my.before
assert MyApp.config.my.li == []
def test_after():
class Registry:
def __init__(self):
self.li = []
self.after = False
def add(self, name, obj):
assert not self.after
self.li.append((name, obj))
class FooDirective(Action):
config = {"my": Registry}
def __init__(self, name):
self.name = name
def identifier(self, my):
return self.name
def perform(self, obj, my):
my.add(self.name, obj)
@staticmethod
def after(my):
my.after = True
class MyApp(App):
foo = directive(FooDirective)
@MyApp.foo(name="hello")
def f():
pass
commit(MyApp)
assert MyApp.config.my.after
assert MyApp.config.my.li == [
("hello", f),
]
def test_after_without_use():
class Registry:
def __init__(self):
self.li = []
self.after = False
def add(self, name, obj):
assert not self.after
self.li.append((name, obj))
class FooDirective(Action):
config = {"my": Registry}
def __init__(self, name):
self.name = name
def identifier(self, my):
return self.name
def perform(self, obj, my):
my.add(self.name, obj)
@staticmethod
def after(my):
my.after = True
class MyApp(App):
foo = directive(FooDirective)
commit(MyApp)
assert MyApp.config.my.after
assert MyApp.config.my.li == []
def test_action_loop_should_conflict():
class MyDirective(Action):
config = {"my": list}
def __init__(self, message):
self.message = message
def identifier(self, my):
return self.message
def perform(self, obj, my):
my.append((self.message, obj))
class MyApp(App):
foo = directive(MyDirective)
for i in range(2):
@MyApp.foo("hello")
def f():
pass
with pytest.raises(ConflictError):
commit(MyApp)
def test_action_init_only_during_commit():
init_called = []
class MyDirective(Action):
config = {"my": list}
def __init__(self, message):
init_called.append("there")
self.message = message
def identifier(self, my):
return self.message
def perform(self, obj, my):
my.append((self.message, obj))
class MyApp(App):
foo = directive(MyDirective)
@MyApp.foo("hello")
def f():
pass
assert init_called == []
commit(MyApp)
assert init_called == ["there"]
def test_registry_should_exist_even_without_directive_use():
class MyDirective(Action):
config = {"my": list}
def __init__(self, message):
self.message = message
def identifier(self, my):
return self.message
def perform(self, obj, my):
my.append((self.message, obj))
class MyApp(App):
foo = directive(MyDirective)
commit(MyApp)
assert MyApp.config.my == []
def test_registry_should_exist_even_without_directive_use_subclass():
class MyDirective(Action):
config = {"my": list}
def __init__(self, message):
self.message = message
def identifier(self, my):
return self.message
def perform(self, obj, my):
my.append((self.message, obj))
class MyApp(App):
foo = directive(MyDirective)
class SubApp(MyApp):
pass
commit(MyApp, SubApp)
assert MyApp.config.my == []
assert SubApp.config.my == []
def test_rerun_commit():
class MyDirective(Action):
config = {"my": list}
def __init__(self, message):
self.message = message
def identifier(self, my):
return self.message
def perform(self, obj, my):
my.append((self.message, obj))
class MyApp(App):
foo = directive(MyDirective)
@MyApp.foo("hello")
def f():
pass
commit(MyApp)
# and again
commit(MyApp)
assert MyApp.config.my == [("hello", f)]
def test_rerun_commit_add_directive():
class MyDirective(Action):
config = {"my": list}
def __init__(self, message):
self.message = message
def identifier(self, my):
return self.message
def perform(self, obj, my):
my.append((self.message, obj))
class MyApp(App):
foo = directive(MyDirective)
@MyApp.foo("hello")
def f():
pass
commit(MyApp)
@MyApp.foo("bye")
def g():
pass
# and again
commit(MyApp)
assert MyApp.config.my == [("hello", f), ("bye", g)]
def test_order_subclass():
class MyDirective(Action):
config = {"my": list}
def __init__(self, message):
self.message = message
def identifier(self, my):
return self.message
def perform(self, obj, my):
my.append((self.message, obj))
class MyApp(App):
foo = directive(MyDirective)
class SubApp(MyApp):
pass
@SubApp.foo("c")
def h():
pass
@MyApp.foo("a")
def f():
pass
@MyApp.foo("b")
def g():
pass
commit(MyApp, SubApp)
assert SubApp.config.my == [("a", f), ("b", g), ("c", h)]
def test_registry_single_factory_argument():
class Other:
factory_arguments = {"my": list}
def __init__(self, my):
self.my = my
class MyDirective(Action):
config = {"my": list, "other": Other}
def __init__(self, message):
self.message = message
def identifier(self, my, other):
return self.message
def perform(self, obj, my, other):
my.append((self.message, obj))
class MyApp(App):
foo = directive(MyDirective)
@MyApp.foo("hello")
def f():
pass
commit(MyApp)
assert MyApp.config.other.my == [("hello", f)]
def test_registry_factory_argument_introduces_new_registry():
class Other:
factory_arguments = {"my": list}
def __init__(self, my):
self.my = my
class MyDirective(Action):
config = {"other": Other}
def __init__(self, message):
self.message = message
def identifier(self, other):
return self.message
def perform(self, obj, other):
other.my.append((self.message, obj))
class MyApp(App):
foo = directive(MyDirective)
@MyApp.foo("hello")
def f():
pass
commit(MyApp)
assert MyApp.config.other.my == [("hello", f)]
assert MyApp.config.my is MyApp.config.other.my
def test_registry_factory_argument_introduces_new_registry_subclass():
class IsUsedElsewhere:
poked = False
class Other:
factory_arguments = {"my": IsUsedElsewhere}
def __init__(self, my):
self.my = my
class MyDirective(Action):
config = {"other": Other}
def __init__(self, message):
self.message = message
def identifier(self, other):
return self.message
def perform(self, obj, other):
assert not other.my.poked
other.my.poked = True
class MyApp(App):
foo = directive(MyDirective)
class SubApp(MyApp):
pass
@MyApp.foo("hello")
def f():
pass
commit(MyApp)
assert MyApp.config.other.my.poked
assert MyApp.config.my is MyApp.config.other.my
commit(SubApp)
def test_registry_multiple_factory_arguments():
class Other:
factory_arguments = {"my": list, "my2": list}
def __init__(self, my, my2):
self.my = my
self.my2 = my2
class MyDirective(Action):
config = {"my": list, "my2": list, "other": Other}
def __init__(self, message):
self.message = message
def identifier(self, my, my2, other):
return self.message
def perform(self, obj, my, my2, other):
my.append((self.message, obj))
my2.append("blah")
class MyApp(App):
foo = directive(MyDirective)
@MyApp.foo("hello")
def f():
pass
commit(MyApp)
assert MyApp.config.other.my == [("hello", f)]
assert MyApp.config.other.my2 == ["blah"]
def test_registry_factory_arguments_depends():
class Other:
factory_arguments = {"my": list}
def __init__(self, my):
self.my = my
class FooDirective(Action):
config = {"my": list}
def __init__(self, message):
self.message = message
def identifier(self, my):
return self.message
def perform(self, obj, my):
my.append((self.message, obj))
class BarDirective(Action):
config = {"other": Other}
depends = [FooDirective]
def __init__(self, name):
self.name = name
def identifier(self, other):
return self.name
def perform(self, obj, other):
pass
class MyApp(App):
foo = directive(FooDirective)
bar = directive(BarDirective)
@MyApp.foo("hello")
def f():
pass
commit(MyApp)
assert MyApp.config.other.my == [("hello", f)]
def test_registry_factory_arguments_depends_complex():
class Registry:
pass
class PredicateRegistry:
factory_arguments = {"registry": Registry}
def __init__(self, registry):
self.registry = registry
class SettingAction(Action):
config = {"registry": Registry}
class PredicateAction(Action):
config = {"predicate_registry": PredicateRegistry}
depends = [SettingAction]
class ViewAction(Action):
config = {"registry": Registry}
depends = [PredicateAction]
class MyApp(App):
setting = directive(SettingAction)
predicate = directive(PredicateAction)
view = directive(ViewAction)
commit(MyApp)
assert MyApp.config.registry is MyApp.config.predicate_registry.registry
def test_is_committed():
class MyApp(App):
pass
assert not MyApp.is_committed()
commit(MyApp)
assert MyApp.is_committed()
def test_registry_config_inconsistent():
class FooDirective(Action):
config = {"my": list}
def __init__(self, message):
self.message = message
def identifier(self, my):
return self.message
def perform(self, obj, my):
my.append((self.message, obj))
class BarDirective(Action):
config = {"my": dict}
def __init__(self, message):
self.message = message
def identifier(self, my):
return self.message
def perform(self, obj, my):
my[self.message] = obj
class MyApp(App):
foo = directive(FooDirective)
bar = directive(BarDirective)
with pytest.raises(ConfigError):
commit(MyApp)
def test_registry_factory_argument_inconsistent():
class Other:
factory_arguments = {"my": list}
def __init__(self, my):
self.my = my
class YetAnother:
factory_arguments = {"my": dict}
def __init__(self, my):
self.my = my
class MyDirective(Action):
config = {"other": Other, "yetanother": YetAnother}
def __init__(self, message):
self.message = message
def identifier(self, other, yetanother):
return self.message
def perform(self, obj, other, yetanother):
pass
class MyApp(App):
foo = directive(MyDirective)
with pytest.raises(ConfigError):
commit(MyApp)
def test_registry_factory_argument_and_config_inconsistent():
class Other:
factory_arguments = {"my": dict}
def __init__(self, my):
self.my = my
class MyDirective(Action):
config = {"my": list, "other": Other}
def __init__(self, message):
self.message = message
def identifier(self, my, other):
return self.message
def perform(self, obj, my, other):
my.append((self.message, obj))
class MyApp(App):
foo = directive(MyDirective)
with pytest.raises(ConfigError):
commit(MyApp)
# making this global to ensure the repr is the same
# on Python 3.5 and earlier versions (see PEP 3155)
class ReprDirective(Action):
"""Doc"""
config = {"my": list}
def __init__(self, message):
self.message = message
def identifier(self, my):
return self.message
def perform(self, obj, my):
my.append((self.message, obj))
class MyAppForRepr(App):
foo = directive(ReprDirective)
def test_directive_repr():
MyAppForRepr.commit()
assert repr(MyAppForRepr.foo) == (
"<bound method AppMeta.foo of "
"<class 'dectate.tests.test_directive.MyAppForRepr'>>"
)
def test_app_class_passed_into_action():
class MyDirective(Action):
config = {"my": list}
app_class_arg = True
def __init__(self, message):
self.message = message
def identifier(self, app_class, my):
return self.message
def perform(self, obj, app_class, my):
app_class.touched.append(None)
my.append((self.message, obj))
class MyApp(App):
touched = []
foo = directive(MyDirective)
class SubApp(MyApp):
touched = []
@MyApp.foo("hello")
def f():
pass
assert not MyApp.touched
commit(MyApp)
assert MyApp.touched == [None]
# the subclass is not affected until we commit for it too
assert not SubApp.touched
commit(SubApp)
assert SubApp.touched == [None]
def test_app_class_passed_into_factory():
class Other:
factory_arguments = {"my": list}
app_class_arg = True
def __init__(self, my, app_class):
self.my = my
self.app_class = app_class
def touch(self):
self.app_class.touched = True
class MyDirective(Action):
config = {"other": Other}
def __init__(self):
pass
def identifier(self, other):
return ()
def perform(self, obj, other):
other.touch()
class MyApp(App):
touched = False
foo = directive(MyDirective)
@MyApp.foo()
def f():
pass
assert not MyApp.touched
commit(MyApp)
assert MyApp.touched
def test_app_class_passed_into_factory_no_factory_arguments():
class Other:
app_class_arg = True
def __init__(self, app_class):
self.app_class = app_class
def touch(self):
self.app_class.touched = True
class MyDirective(Action):
config = {"other": Other}
def __init__(self):
pass
def identifier(self, other):
return ()
def perform(self, obj, other):
other.touch()
class MyApp(App):
touched = False
foo = directive(MyDirective)
@MyApp.foo()
def f():
pass
assert not MyApp.touched
commit(MyApp)
assert MyApp.touched
def test_app_class_passed_into_factory_separation():
class Other:
factory_arguments = {"my": list}
app_class_arg = True
def __init__(self, my, app_class):
self.my = my
self.app_class = app_class
def touch(self):
self.app_class.touched = True
class MyDirective(Action):
config = {"other": Other}
def __init__(self):
pass
def identifier(self, other):
return ()
def perform(self, obj, other):
other.touch()
class MyApp(App):
touched = False
foo = directive(MyDirective)
class SubApp(MyApp):
touched = False
@MyApp.foo()
def f():
pass
assert not MyApp.touched
commit(MyApp)
assert MyApp.touched
assert not SubApp.touched
commit(SubApp)
assert SubApp.touched
def test_app_class_cleanup():
class MyDirective(Action):
config = {}
app_class_arg = True
def __init__(self):
pass
def identifier(self, app_class):
return ()
def perform(self, obj, app_class):
app_class.touched.append(None)
class MyApp(App):
touched = []
@classmethod
def clean(cls):
cls.touched = []
foo = directive(MyDirective)
@MyApp.foo()
def f():
pass
assert not MyApp.touched
commit(MyApp)
assert MyApp.touched == [None]
commit(MyApp)
assert MyApp.touched == [None]
|
|
#!/usr/bin/python
#----------------------------------------------------------------------
# Copyright (c) 2013-2016 Raytheon BBN Technologies
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and/or hardware specification (the "Work") to
# deal in the Work without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Work, and to permit persons to whom the Work
# is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Work.
#
# THE WORK IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE WORK OR THE USE OR OTHER DEALINGS
# IN THE WORK.
#----------------------------------------------------------------------
"""
Framework to run a GENI Aggregate Manager. See geni/am for the
Reference Aggregate Manager that this runs.
Run with "-h" flag to see usage and command line options.
"""
import importlib
import pdb
import sys
import subprocess
import time
# Check python version. Requires 2.6 or greater, but less than 3.
if sys.version_info < (2, 6):
raise Exception('Must use python 2.6 or greater.')
elif sys.version_info >= (3,):
raise Exception('Not python 3 ready')
import threading
import logging
import optparse
import os
import gcf.geni
import gram
import gram.am
import gram.am.am3
import gram.am.gram_am2
import gram.am.rpiv3
import gram.am.gram.config
from gcf.geni.config import read_config
# Return an instance of a class given by fully qualified name
# (module_path.classname)
# Return an instance of a class given by fully qualified name
# (module_path.classname) with variable constructor args
def getInstanceFromClassname(class_name, *argv):
class_module_name = ".".join(class_name.split('.')[:-1])
class_base_name = class_name.split('.')[-1]
class_module = importlib.import_module(class_module_name)
class_instance = eval("class_module.%s" % class_base_name)
object_instance = class_instance(*argv)
return object_instance
# Set up parser and return parsed argumetns
def parse_args(argv):
parser = optparse.OptionParser()
parser.add_option("-k", "--keyfile",
help="AM key file name", metavar="FILE")
parser.add_option("-g", "--certfile",
help="AM certificate file name (PEM format)", metavar="FILE")
parser.add_option("-c", "--configfile", help="config file path", metavar="FILE")
# Note: The trusted CH certificates are _not_ enough here.
# It needs self signed certificates. EG CA certificates.
parser.add_option("-r", "--rootcadir",
help="Trusted Root certificates directory (files in PEM format)", metavar="FILE")
# Could try to determine the real IP Address instead of the loopback
# using socket.gethostbyname(socket.gethostname())
parser.add_option("-H", "--host",
help="server ip", metavar="HOST")
parser.add_option("-p", "--v3_port", type=int,
help="V3 server port", metavar="PORT")
parser.add_option("-q", "--v2_port", type=int,
help="V2 server port", metavar="PORT")
parser.add_option("-z", "--rpi_port", type=int,
help="RPI V3 server port", metavar="PORT")
parser.add_option("--debug", action="store_true", default=False,
help="enable debugging output")
parser.add_option("-V", "--api-version", type=int,
help="AM API Version", default=3)
parser.add_option("--snapshot_dir", \
help="name of directory to save snapshots", \
default=None)
parser.add_option("--recover_from_snapshot", \
help="name of snapshot to initialize gram state", \
default=None)
parser.add_option("--recover_from_most_recent_snapshot", \
help="whether to recover from most recent " + \
"snapshot in 'gram_snapshot_directory'", \
default=True)
parser.add_option("--snapshot_maintain_limit", type=int,
help="Retain only this limit of recent snapshots",
default=10)
parser.add_option("--config_file",
help="Location of GRAM installation-specific " +
"configuration",
default="/etc/gram/config.json")
return parser.parse_args()
def getAbsPath(path):
"""Return None or a normalized absolute path version of the argument string.
Does not check that the path exists."""
if path is None:
return None
if path.strip() == "":
return None
path = os.path.normcase(os.path.expanduser(path))
if os.path.isabs(path):
return path
else:
return os.path.abspath(path)
def main(argv=None):
if argv is None:
argv = sys.argv
opts = parse_args(argv)[0]
gram.am.gram.config.initialize(opts.config_file)
# If the port isn't set explicitly, use defaults from config
if not opts.v3_port:
opts.v3_port = gram.am.gram.config.gram_am_port
if not opts.v2_port:
opts.v2_port = gram.am.gram.config.gram_am_v2_port
if not opts.rpi_port:
opts.rpi_port = gram.am.gram.config.gram_am_rpi_port
level = logging.INFO
if opts.debug:
level = logging.DEBUG
logging.basicConfig(level=level, format = '%(asctime)s %(message)s')
# Read in config file options, command line gets priority
optspath = None
if not opts.configfile is None:
optspath = os.path.expanduser(opts.configfile)
config = read_config(optspath)
for (key,val) in config['aggregate_manager'].items():
if hasattr(opts,key) and getattr(opts,key) is None:
setattr(opts,key,val)
if not hasattr(opts,key):
setattr(opts,key,val)
if getattr(opts,'rootcadir') is None:
setattr(opts,'rootcadir',config['global']['rootcadir'])
if opts.rootcadir is None:
sys.exit('Missing path to trusted root certificate directory (-r argument)')
certfile = getAbsPath(opts.certfile)
keyfile = getAbsPath(opts.keyfile)
if not os.path.exists(certfile):
sys.exit("Aggregate certfile %s doesn't exist" % certfile)
if not os.path.exists(keyfile):
sys.exit("Aggregate keyfile %s doesn't exist" % keyfile)
# Removed because OpenStack is not required for pi usage
# # Check if quantum is running, if not, then take a nap
# command_str = '%s net-list' % gram.am.gram.config.network_type
# command = command_str.split()
# ready = 0
# while(not ready):
# try :
# subprocess.check_output(command)
# ready = 1
# logging.getLogger('gram-am').info(' Ready to start GRAM')
# except :
# logging.getLogger('gram-am').error('Error executing command %s' % command)
# time.sleep(15)
gram.am.gram.config.snapshot_dir = opts.snapshot_dir
gram.am.gram.config.recover_from_snapshot = opts.recover_from_snapshot
gram.am.gram.config.recover_from_most_recent_snapshot = \
opts.recover_from_most_recent_snapshot
gram.am.gram.config.snapshot_maintain_limit = opts.snapshot_maintain_limit
# Instantiate an argument guard that will reject or modify
# arguments and options provided to calls
argument_guard = None
if hasattr(opts, 'argument_guard'):
argument_guard = getInstanceFromClassname(opts.argument_guard)
# Instantiate authorizer from 'authorizer' config argument
# By default, use the SFA authorizer
if hasattr(opts, 'authorizer'):
authorizer_classname = opts.authorizer
else:
authorizer_classname = "gcf.geni.auth.sfa_authorizer.SFA_Authorizer"
authorizer = getInstanceFromClassname(authorizer_classname,
opts.rootcadir, opts, argument_guard)
# Use XMLRPC authorizer if opt.remote_authorizer is set
if hasattr(opts, 'remote_authorizer'):
import xmlrpclib
authorizer = xmlrpclib.Server(opts.remote_authorizer)
# Instantiate resource manager from 'authorizer_resource_manager'
# config argument. Default = None
resource_manager = None
if hasattr(opts, 'authorizer_resource_manager'):
resource_manager = \
getInstanceFromClassname(opts.authorizer_resource_manager)
# rootcadir is dir of multiple certificates
delegate = gcf.geni.ReferenceAggregateManager(getAbsPath(opts.rootcadir))
# here rootcadir is supposed to be a single file with multiple
# certs possibly concatenated together
comboCertsFile = gcf.geni.CredentialVerifier.getCAsFileFromDir(getAbsPath(opts.rootcadir))
server_url = "https://%s:%d/" % (opts.host, int(opts.v3_port))
GRAM=gram.am.am3.GramReferenceAggregateManager(getAbsPath(opts.rootcadir), config['global']['base_name'], certfile, server_url)
if opts.api_version == 1:
msg = "Version 1 of AM API unsopported in GRAM"
sys.exit(msg)
#elif opts.api_version == 2:
ams_v2 = gram.am.gram_am2.GramAggregateManagerServer((opts.host, int(opts.v2_port)),
keyfile=keyfile,
certfile=certfile,
trust_roots_dir=getAbsPath(opts.rootcadir),
ca_certs=comboCertsFile,
base_name=config['global']['base_name'],
authorizer=authorizer,
resource_manager = resource_manager,
GRAM=GRAM)
#elif opts.api_version == 3:
ams_v3 = gram.am.am3.GramAggregateManagerServer((opts.host, int(opts.v3_port)),
keyfile=keyfile,
certfile=certfile,
trust_roots_dir=getAbsPath(opts.rootcadir),
ca_certs=comboCertsFile,
base_name=config['global']['base_name'],
authorizer=authorizer,
resource_manager = resource_manager,
GRAM=GRAM)
ams_rpi_v3 = gram.am.rpiv3.GramAggregateManagerServer((opts.host, int(opts.rpi_port)),
keyfile=keyfile,
certfile=certfile,
trust_roots_dir=getAbsPath(opts.rootcadir),
ca_certs=comboCertsFile,
base_name=config['global']['base_name'],
authorizer=authorizer,
resource_manager = resource_manager,
GRAM=GRAM)
#else:
# msg = "Unknown API version: %d. Valid choices are \"1\", \"2\", or \"3\""
# sys.exit(msg % (opts.api_version))
logging.getLogger('gcf-am').info('GENI AM 3 Listening on port %s...' % (opts.v3_port))
logging.getLogger('gcf-am').info('GENI AM 2 Listening on port %s...' % (opts.v2_port))
logging.getLogger('gcf-am').info('GENI AM PI Listening on port %s...' % (opts.rpi_port))
thread = threading.Thread(target=ams_v2.serve_forever,args=())
thread.start()
thread = threading.Thread(target=ams_rpi_v3.serve_forever,args=())
thread.start()
ams_v3.serve_forever()
if __name__ == "__main__":
sys.exit(main())
|
|
# Copyright 2013-2016 DataStax, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This module holds classes for working with prepared statements and
specifying consistency levels and retry policies for individual
queries.
"""
from collections import namedtuple
from datetime import datetime, timedelta
import re
import struct
import time
import six
from six.moves import range, zip
from cassandra import ConsistencyLevel, OperationTimedOut
from cassandra.util import unix_time_from_uuid1
from cassandra.encoder import Encoder
import cassandra.encoder
from cassandra.protocol import _UNSET_VALUE
from cassandra.util import OrderedDict, _sanitize_identifiers
import logging
log = logging.getLogger(__name__)
UNSET_VALUE = _UNSET_VALUE
"""
Specifies an unset value when binding a prepared statement.
Unset values are ignored, allowing prepared statements to be used without specify
See https://issues.apache.org/jira/browse/CASSANDRA-7304 for further details on semantics.
.. versionadded:: 2.6.0
Only valid when using native protocol v4+
"""
NON_ALPHA_REGEX = re.compile('[^a-zA-Z0-9]')
START_BADCHAR_REGEX = re.compile('^[^a-zA-Z0-9]*')
END_BADCHAR_REGEX = re.compile('[^a-zA-Z0-9_]*$')
_clean_name_cache = {}
def _clean_column_name(name):
try:
return _clean_name_cache[name]
except KeyError:
clean = NON_ALPHA_REGEX.sub("_", START_BADCHAR_REGEX.sub("", END_BADCHAR_REGEX.sub("", name)))
_clean_name_cache[name] = clean
return clean
def tuple_factory(colnames, rows):
"""
Returns each row as a tuple
Example::
>>> from cassandra.query import tuple_factory
>>> session = cluster.connect('mykeyspace')
>>> session.row_factory = tuple_factory
>>> rows = session.execute("SELECT name, age FROM users LIMIT 1")
>>> print rows[0]
('Bob', 42)
.. versionchanged:: 2.0.0
moved from ``cassandra.decoder`` to ``cassandra.query``
"""
return rows
def named_tuple_factory(colnames, rows):
"""
Returns each row as a `namedtuple <https://docs.python.org/2/library/collections.html#collections.namedtuple>`_.
This is the default row factory.
Example::
>>> from cassandra.query import named_tuple_factory
>>> session = cluster.connect('mykeyspace')
>>> session.row_factory = named_tuple_factory
>>> rows = session.execute("SELECT name, age FROM users LIMIT 1")
>>> user = rows[0]
>>> # you can access field by their name:
>>> print "name: %s, age: %d" % (user.name, user.age)
name: Bob, age: 42
>>> # or you can access fields by their position (like a tuple)
>>> name, age = user
>>> print "name: %s, age: %d" % (name, age)
name: Bob, age: 42
>>> name = user[0]
>>> age = user[1]
>>> print "name: %s, age: %d" % (name, age)
name: Bob, age: 42
.. versionchanged:: 2.0.0
moved from ``cassandra.decoder`` to ``cassandra.query``
"""
clean_column_names = map(_clean_column_name, colnames)
try:
Row = namedtuple('Row', clean_column_names)
except Exception:
clean_column_names = list(map(_clean_column_name, colnames)) # create list because py3 map object will be consumed by first attempt
log.warning("Failed creating named tuple for results with column names %s (cleaned: %s) "
"(see Python 'namedtuple' documentation for details on name rules). "
"Results will be returned with positional names. "
"Avoid this by choosing different names, using SELECT \"<col name>\" AS aliases, "
"or specifying a different row_factory on your Session" %
(colnames, clean_column_names))
Row = namedtuple('Row', _sanitize_identifiers(clean_column_names))
return [Row(*row) for row in rows]
def dict_factory(colnames, rows):
"""
Returns each row as a dict.
Example::
>>> from cassandra.query import dict_factory
>>> session = cluster.connect('mykeyspace')
>>> session.row_factory = dict_factory
>>> rows = session.execute("SELECT name, age FROM users LIMIT 1")
>>> print rows[0]
{u'age': 42, u'name': u'Bob'}
.. versionchanged:: 2.0.0
moved from ``cassandra.decoder`` to ``cassandra.query``
"""
return [dict(zip(colnames, row)) for row in rows]
def ordered_dict_factory(colnames, rows):
"""
Like :meth:`~cassandra.query.dict_factory`, but returns each row as an OrderedDict,
so the order of the columns is preserved.
.. versionchanged:: 2.0.0
moved from ``cassandra.decoder`` to ``cassandra.query``
"""
return [OrderedDict(zip(colnames, row)) for row in rows]
FETCH_SIZE_UNSET = object()
class Statement(object):
"""
An abstract class representing a single query. There are three subclasses:
:class:`.SimpleStatement`, :class:`.BoundStatement`, and :class:`.BatchStatement`.
These can be passed to :meth:`.Session.execute()`.
"""
retry_policy = None
"""
An instance of a :class:`cassandra.policies.RetryPolicy` or one of its
subclasses. This controls when a query will be retried and how it
will be retried.
"""
consistency_level = None
"""
The :class:`.ConsistencyLevel` to be used for this operation. Defaults
to :const:`None`, which means that the default consistency level for
the Session this is executed in will be used.
"""
fetch_size = FETCH_SIZE_UNSET
"""
How many rows will be fetched at a time. This overrides the default
of :attr:`.Session.default_fetch_size`
This only takes effect when protocol version 2 or higher is used.
See :attr:`.Cluster.protocol_version` for details.
.. versionadded:: 2.0.0
"""
keyspace = None
"""
The string name of the keyspace this query acts on. This is used when
:class:`~.TokenAwarePolicy` is configured for
:attr:`.Cluster.load_balancing_policy`
It is set implicitly on :class:`.BoundStatement`, and :class:`.BatchStatement`,
but must be set explicitly on :class:`.SimpleStatement`.
.. versionadded:: 2.1.3
"""
custom_payload = None
"""
:ref:`custom_payload` to be passed to the server.
These are only allowed when using protocol version 4 or higher.
.. versionadded:: 2.6.0
"""
is_idempotent = False
"""
Flag indicating whether this statement is safe to run multiple times in speculative execution.
"""
_serial_consistency_level = None
_routing_key = None
def __init__(self, retry_policy=None, consistency_level=None, routing_key=None,
serial_consistency_level=None, fetch_size=FETCH_SIZE_UNSET, keyspace=None, custom_payload=None,
is_idempotent=False):
if retry_policy and not hasattr(retry_policy, 'on_read_timeout'): # just checking one method to detect positional parameter errors
raise ValueError('retry_policy should implement cassandra.policies.RetryPolicy')
self.retry_policy = retry_policy
if consistency_level is not None:
self.consistency_level = consistency_level
self._routing_key = routing_key
if serial_consistency_level is not None:
self.serial_consistency_level = serial_consistency_level
if fetch_size is not FETCH_SIZE_UNSET:
self.fetch_size = fetch_size
if keyspace is not None:
self.keyspace = keyspace
if custom_payload is not None:
self.custom_payload = custom_payload
self.is_idempotent = is_idempotent
def _key_parts_packed(self, parts):
for p in parts:
l = len(p)
yield struct.pack(">H%dsB" % l, l, p, 0)
def _get_routing_key(self):
return self._routing_key
def _set_routing_key(self, key):
if isinstance(key, (list, tuple)):
if len(key) == 1:
self._routing_key = key[0]
else:
self._routing_key = b"".join(self._key_parts_packed(key))
else:
self._routing_key = key
def _del_routing_key(self):
self._routing_key = None
routing_key = property(
_get_routing_key,
_set_routing_key,
_del_routing_key,
"""
The :attr:`~.TableMetadata.partition_key` portion of the primary key,
which can be used to determine which nodes are replicas for the query.
If the partition key is a composite, a list or tuple must be passed in.
Each key component should be in its packed (binary) format, so all
components should be strings.
""")
def _get_serial_consistency_level(self):
return self._serial_consistency_level
def _set_serial_consistency_level(self, serial_consistency_level):
acceptable = (None, ConsistencyLevel.SERIAL, ConsistencyLevel.LOCAL_SERIAL)
if serial_consistency_level not in acceptable:
raise ValueError(
"serial_consistency_level must be either ConsistencyLevel.SERIAL "
"or ConsistencyLevel.LOCAL_SERIAL")
self._serial_consistency_level = serial_consistency_level
def _del_serial_consistency_level(self):
self._serial_consistency_level = None
serial_consistency_level = property(
_get_serial_consistency_level,
_set_serial_consistency_level,
_del_serial_consistency_level,
"""
The serial consistency level is only used by conditional updates
(``INSERT``, ``UPDATE`` and ``DELETE`` with an ``IF`` condition). For
those, the ``serial_consistency_level`` defines the consistency level of
the serial phase (or "paxos" phase) while the normal
:attr:`~.consistency_level` defines the consistency for the "learn" phase,
i.e. what type of reads will be guaranteed to see the update right away.
For example, if a conditional write has a :attr:`~.consistency_level` of
:attr:`~.ConsistencyLevel.QUORUM` (and is successful), then a
:attr:`~.ConsistencyLevel.QUORUM` read is guaranteed to see that write.
But if the regular :attr:`~.consistency_level` of that write is
:attr:`~.ConsistencyLevel.ANY`, then only a read with a
:attr:`~.consistency_level` of :attr:`~.ConsistencyLevel.SERIAL` is
guaranteed to see it (even a read with consistency
:attr:`~.ConsistencyLevel.ALL` is not guaranteed to be enough).
The serial consistency can only be one of :attr:`~.ConsistencyLevel.SERIAL`
or :attr:`~.ConsistencyLevel.LOCAL_SERIAL`. While ``SERIAL`` guarantees full
linearizability (with other ``SERIAL`` updates), ``LOCAL_SERIAL`` only
guarantees it in the local data center.
The serial consistency level is ignored for any query that is not a
conditional update. Serial reads should use the regular
:attr:`consistency_level`.
Serial consistency levels may only be used against Cassandra 2.0+
and the :attr:`~.Cluster.protocol_version` must be set to 2 or higher.
See :doc:`/lwt` for a discussion on how to work with results returned from
conditional statements.
.. versionadded:: 2.0.0
""")
class SimpleStatement(Statement):
"""
A simple, un-prepared query.
"""
def __init__(self, query_string, retry_policy=None, consistency_level=None, routing_key=None,
serial_consistency_level=None, fetch_size=FETCH_SIZE_UNSET, keyspace=None,
custom_payload=None, is_idempotent=False):
"""
`query_string` should be a literal CQL statement with the exception
of parameter placeholders that will be filled through the
`parameters` argument of :meth:`.Session.execute()`.
See :class:`Statement` attributes for a description of the other parameters.
"""
Statement.__init__(self, retry_policy, consistency_level, routing_key,
serial_consistency_level, fetch_size, keyspace, custom_payload, is_idempotent)
self._query_string = query_string
@property
def query_string(self):
return self._query_string
def __str__(self):
consistency = ConsistencyLevel.value_to_name.get(self.consistency_level, 'Not Set')
return (u'<SimpleStatement query="%s", consistency=%s>' %
(self.query_string, consistency))
__repr__ = __str__
class PreparedStatement(object):
"""
A statement that has been prepared against at least one Cassandra node.
Instances of this class should not be created directly, but through
:meth:`.Session.prepare()`.
A :class:`.PreparedStatement` should be prepared only once. Re-preparing a statement
may affect performance (as the operation requires a network roundtrip).
"""
column_metadata = None #TODO: make this bind_metadata in next major
consistency_level = None
custom_payload = None
fetch_size = FETCH_SIZE_UNSET
keyspace = None # change to prepared_keyspace in major release
protocol_version = None
query_id = None
query_string = None
result_metadata = None
routing_key_indexes = None
_routing_key_index_set = None
serial_consistency_level = None
def __init__(self, column_metadata, query_id, routing_key_indexes, query,
keyspace, protocol_version, result_metadata):
self.column_metadata = column_metadata
self.query_id = query_id
self.routing_key_indexes = routing_key_indexes
self.query_string = query
self.keyspace = keyspace
self.protocol_version = protocol_version
self.result_metadata = result_metadata
self.is_idempotent = False
@classmethod
def from_message(cls, query_id, column_metadata, pk_indexes, cluster_metadata,
query, prepared_keyspace, protocol_version, result_metadata):
if not column_metadata:
return PreparedStatement(column_metadata, query_id, None, query, prepared_keyspace, protocol_version, result_metadata)
if pk_indexes:
routing_key_indexes = pk_indexes
else:
routing_key_indexes = None
first_col = column_metadata[0]
ks_meta = cluster_metadata.keyspaces.get(first_col.keyspace_name)
if ks_meta:
table_meta = ks_meta.tables.get(first_col.table_name)
if table_meta:
partition_key_columns = table_meta.partition_key
# make a map of {column_name: index} for each column in the statement
statement_indexes = dict((c.name, i) for i, c in enumerate(column_metadata))
# a list of which indexes in the statement correspond to partition key items
try:
routing_key_indexes = [statement_indexes[c.name]
for c in partition_key_columns]
except KeyError: # we're missing a partition key component in the prepared
pass # statement; just leave routing_key_indexes as None
return PreparedStatement(column_metadata, query_id, routing_key_indexes,
query, prepared_keyspace, protocol_version, result_metadata)
def bind(self, values):
"""
Creates and returns a :class:`BoundStatement` instance using `values`.
See :meth:`BoundStatement.bind` for rules on input ``values``.
"""
return BoundStatement(self).bind(values)
def is_routing_key_index(self, i):
if self._routing_key_index_set is None:
self._routing_key_index_set = set(self.routing_key_indexes) if self.routing_key_indexes else set()
return i in self._routing_key_index_set
def __str__(self):
consistency = ConsistencyLevel.value_to_name.get(self.consistency_level, 'Not Set')
return (u'<PreparedStatement query="%s", consistency=%s>' %
(self.query_string, consistency))
__repr__ = __str__
class BoundStatement(Statement):
"""
A prepared statement that has been bound to a particular set of values.
These may be created directly or through :meth:`.PreparedStatement.bind()`.
"""
prepared_statement = None
"""
The :class:`PreparedStatement` instance that this was created from.
"""
values = None
"""
The sequence of values that were bound to the prepared statement.
"""
def __init__(self, prepared_statement, retry_policy=None, consistency_level=None, routing_key=None,
serial_consistency_level=None, fetch_size=FETCH_SIZE_UNSET, keyspace=None,
custom_payload=None):
"""
`prepared_statement` should be an instance of :class:`PreparedStatement`.
See :class:`Statement` attributes for a description of the other parameters.
"""
self.prepared_statement = prepared_statement
self.consistency_level = prepared_statement.consistency_level
self.serial_consistency_level = prepared_statement.serial_consistency_level
self.fetch_size = prepared_statement.fetch_size
self.custom_payload = prepared_statement.custom_payload
self.is_idempotent = prepared_statement.is_idempotent
self.values = []
meta = prepared_statement.column_metadata
if meta:
self.keyspace = meta[0].keyspace_name
Statement.__init__(self, retry_policy, consistency_level, routing_key,
serial_consistency_level, fetch_size, keyspace, custom_payload)
def bind(self, values):
"""
Binds a sequence of values for the prepared statement parameters
and returns this instance. Note that `values` *must* be:
* a sequence, even if you are only binding one value, or
* a dict that relates 1-to-1 between dict keys and columns
.. versionchanged:: 2.6.0
:data:`~.UNSET_VALUE` was introduced. These can be bound as positional parameters
in a sequence, or by name in a dict. Additionally, when using protocol v4+:
* short sequences will be extended to match bind parameters with UNSET_VALUE
* names may be omitted from a dict with UNSET_VALUE implied.
.. versionchanged:: 3.0.0
method will not throw if extra keys are present in bound dict (PYTHON-178)
"""
if values is None:
values = ()
proto_version = self.prepared_statement.protocol_version
col_meta = self.prepared_statement.column_metadata
# special case for binding dicts
if isinstance(values, dict):
values_dict = values
values = []
# sort values accordingly
for col in col_meta:
try:
values.append(values_dict[col.name])
except KeyError:
if proto_version >= 4:
values.append(UNSET_VALUE)
else:
raise KeyError(
'Column name `%s` not found in bound dict.' %
(col.name))
value_len = len(values)
col_meta_len = len(col_meta)
if value_len > col_meta_len:
raise ValueError(
"Too many arguments provided to bind() (got %d, expected %d)" %
(len(values), len(col_meta)))
# this is fail-fast for clarity pre-v4. When v4 can be assumed,
# the error will be better reported when UNSET_VALUE is implicitly added.
if proto_version < 4 and self.prepared_statement.routing_key_indexes and \
value_len < len(self.prepared_statement.routing_key_indexes):
raise ValueError(
"Too few arguments provided to bind() (got %d, required %d for routing key)" %
(value_len, len(self.prepared_statement.routing_key_indexes)))
self.raw_values = values
self.values = []
for value, col_spec in zip(values, col_meta):
if value is None:
self.values.append(None)
elif value is UNSET_VALUE:
if proto_version >= 4:
self._append_unset_value()
else:
raise ValueError("Attempt to bind UNSET_VALUE while using unsuitable protocol version (%d < 4)" % proto_version)
else:
try:
self.values.append(col_spec.type.serialize(value, proto_version))
except (TypeError, struct.error) as exc:
actual_type = type(value)
message = ('Received an argument of invalid type for column "%s". '
'Expected: %s, Got: %s; (%s)' % (col_spec.name, col_spec.type, actual_type, exc))
raise TypeError(message)
if proto_version >= 4:
diff = col_meta_len - len(self.values)
if diff:
for _ in range(diff):
self._append_unset_value()
return self
def _append_unset_value(self):
next_index = len(self.values)
if self.prepared_statement.is_routing_key_index(next_index):
col_meta = self.prepared_statement.column_metadata[next_index]
raise ValueError("Cannot bind UNSET_VALUE as a part of the routing key '%s'" % col_meta.name)
self.values.append(UNSET_VALUE)
@property
def routing_key(self):
if not self.prepared_statement.routing_key_indexes:
return None
if self._routing_key is not None:
return self._routing_key
routing_indexes = self.prepared_statement.routing_key_indexes
if len(routing_indexes) == 1:
self._routing_key = self.values[routing_indexes[0]]
else:
self._routing_key = b"".join(self._key_parts_packed(self.values[i] for i in routing_indexes))
return self._routing_key
def __str__(self):
consistency = ConsistencyLevel.value_to_name.get(self.consistency_level, 'Not Set')
return (u'<BoundStatement query="%s", values=%s, consistency=%s>' %
(self.prepared_statement.query_string, self.raw_values, consistency))
__repr__ = __str__
class BatchType(object):
"""
A BatchType is used with :class:`.BatchStatement` instances to control
the atomicity of the batch operation.
.. versionadded:: 2.0.0
"""
LOGGED = None
"""
Atomic batch operation.
"""
UNLOGGED = None
"""
Non-atomic batch operation.
"""
COUNTER = None
"""
Batches of counter operations.
"""
def __init__(self, name, value):
self.name = name
self.value = value
def __str__(self):
return self.name
def __repr__(self):
return "BatchType.%s" % (self.name, )
BatchType.LOGGED = BatchType("LOGGED", 0)
BatchType.UNLOGGED = BatchType("UNLOGGED", 1)
BatchType.COUNTER = BatchType("COUNTER", 2)
class BatchStatement(Statement):
"""
A protocol-level batch of operations which are applied atomically
by default.
.. versionadded:: 2.0.0
"""
batch_type = None
"""
The :class:`.BatchType` for the batch operation. Defaults to
:attr:`.BatchType.LOGGED`.
"""
serial_consistency_level = None
"""
The same as :attr:`.Statement.serial_consistency_level`, but is only
supported when using protocol version 3 or higher.
"""
_statements_and_parameters = None
_session = None
def __init__(self, batch_type=BatchType.LOGGED, retry_policy=None,
consistency_level=None, serial_consistency_level=None,
session=None, custom_payload=None):
"""
`batch_type` specifies The :class:`.BatchType` for the batch operation.
Defaults to :attr:`.BatchType.LOGGED`.
`retry_policy` should be a :class:`~.RetryPolicy` instance for
controlling retries on the operation.
`consistency_level` should be a :class:`~.ConsistencyLevel` value
to be used for all operations in the batch.
`custom_payload` is a :ref:`custom_payload` passed to the server.
Note: as Statement objects are added to the batch, this map is
updated with any values found in their custom payloads. These are
only allowed when using protocol version 4 or higher.
Example usage:
.. code-block:: python
insert_user = session.prepare("INSERT INTO users (name, age) VALUES (?, ?)")
batch = BatchStatement(consistency_level=ConsistencyLevel.QUORUM)
for (name, age) in users_to_insert:
batch.add(insert_user, (name, age))
session.execute(batch)
You can also mix different types of operations within a batch:
.. code-block:: python
batch = BatchStatement()
batch.add(SimpleStatement("INSERT INTO users (name, age) VALUES (%s, %s)"), (name, age))
batch.add(SimpleStatement("DELETE FROM pending_users WHERE name=%s"), (name,))
session.execute(batch)
.. versionadded:: 2.0.0
.. versionchanged:: 2.1.0
Added `serial_consistency_level` as a parameter
.. versionchanged:: 2.6.0
Added `custom_payload` as a parameter
"""
self.batch_type = batch_type
self._statements_and_parameters = []
self._session = session
Statement.__init__(self, retry_policy=retry_policy, consistency_level=consistency_level,
serial_consistency_level=serial_consistency_level, custom_payload=custom_payload)
def clear(self):
"""
This is a convenience method to clear a batch statement for reuse.
*Note:* it should not be used concurrently with uncompleted execution futures executing the same
``BatchStatement``.
"""
del self._statements_and_parameters[:]
self.keyspace = None
self.routing_key = None
if self.custom_payload:
self.custom_payload.clear()
def add(self, statement, parameters=None):
"""
Adds a :class:`.Statement` and optional sequence of parameters
to be used with the statement to the batch.
Like with other statements, parameters must be a sequence, even
if there is only one item.
"""
if isinstance(statement, six.string_types):
if parameters:
encoder = Encoder() if self._session is None else self._session.encoder
statement = bind_params(statement, parameters, encoder)
self._add_statement_and_params(False, statement, ())
elif isinstance(statement, PreparedStatement):
query_id = statement.query_id
bound_statement = statement.bind(() if parameters is None else parameters)
self._update_state(bound_statement)
self._add_statement_and_params(True, query_id, bound_statement.values)
elif isinstance(statement, BoundStatement):
if parameters:
raise ValueError(
"Parameters cannot be passed with a BoundStatement "
"to BatchStatement.add()")
self._update_state(statement)
self._add_statement_and_params(True, statement.prepared_statement.query_id, statement.values)
else:
# it must be a SimpleStatement
query_string = statement.query_string
if parameters:
encoder = Encoder() if self._session is None else self._session.encoder
query_string = bind_params(query_string, parameters, encoder)
self._update_state(statement)
self._add_statement_and_params(False, query_string, ())
return self
def add_all(self, statements, parameters):
"""
Adds a sequence of :class:`.Statement` objects and a matching sequence
of parameters to the batch. Statement and parameter sequences must be of equal length or
one will be truncated. :const:`None` can be used in the parameters position where are needed.
"""
for statement, value in zip(statements, parameters):
self.add(statement, value)
def _add_statement_and_params(self, is_prepared, statement, parameters):
if len(self._statements_and_parameters) >= 0xFFFF:
raise ValueError("Batch statement cannot contain more than %d statements." % 0xFFFF)
self._statements_and_parameters.append((is_prepared, statement, parameters))
def _maybe_set_routing_attributes(self, statement):
if self.routing_key is None:
if statement.keyspace and statement.routing_key:
self.routing_key = statement.routing_key
self.keyspace = statement.keyspace
def _update_custom_payload(self, statement):
if statement.custom_payload:
if self.custom_payload is None:
self.custom_payload = {}
self.custom_payload.update(statement.custom_payload)
def _update_state(self, statement):
self._maybe_set_routing_attributes(statement)
self._update_custom_payload(statement)
def __len__(self):
return len(self._statements_and_parameters)
def __str__(self):
consistency = ConsistencyLevel.value_to_name.get(self.consistency_level, 'Not Set')
return (u'<BatchStatement type=%s, statements=%d, consistency=%s>' %
(self.batch_type, len(self), consistency))
__repr__ = __str__
ValueSequence = cassandra.encoder.ValueSequence
"""
A wrapper class that is used to specify that a sequence of values should
be treated as a CQL list of values instead of a single column collection when used
as part of the `parameters` argument for :meth:`.Session.execute()`.
This is typically needed when supplying a list of keys to select.
For example::
>>> my_user_ids = ('alice', 'bob', 'charles')
>>> query = "SELECT * FROM users WHERE user_id IN %s"
>>> session.execute(query, parameters=[ValueSequence(my_user_ids)])
"""
def bind_params(query, params, encoder):
if six.PY2 and isinstance(query, six.text_type):
query = query.encode('utf-8')
if isinstance(params, dict):
return query % dict((k, encoder.cql_encode_all_types(v)) for k, v in six.iteritems(params))
else:
return query % tuple(encoder.cql_encode_all_types(v) for v in params)
class TraceUnavailable(Exception):
"""
Raised when complete trace details cannot be fetched from Cassandra.
"""
pass
class QueryTrace(object):
"""
A trace of the duration and events that occurred when executing
an operation.
"""
trace_id = None
"""
:class:`uuid.UUID` unique identifier for this tracing session. Matches
the ``session_id`` column in ``system_traces.sessions`` and
``system_traces.events``.
"""
request_type = None
"""
A string that very generally describes the traced operation.
"""
duration = None
"""
A :class:`datetime.timedelta` measure of the duration of the query.
"""
client = None
"""
The IP address of the client that issued this request
This is only available when using Cassandra 2.2+
"""
coordinator = None
"""
The IP address of the host that acted as coordinator for this request.
"""
parameters = None
"""
A :class:`dict` of parameters for the traced operation, such as the
specific query string.
"""
started_at = None
"""
A UTC :class:`datetime.datetime` object describing when the operation
was started.
"""
events = None
"""
A chronologically sorted list of :class:`.TraceEvent` instances
representing the steps the traced operation went through. This
corresponds to the rows in ``system_traces.events`` for this tracing
session.
"""
_session = None
_SELECT_SESSIONS_FORMAT = "SELECT * FROM system_traces.sessions WHERE session_id = %s"
_SELECT_EVENTS_FORMAT = "SELECT * FROM system_traces.events WHERE session_id = %s"
_BASE_RETRY_SLEEP = 0.003
def __init__(self, trace_id, session):
self.trace_id = trace_id
self._session = session
def populate(self, max_wait=2.0, wait_for_complete=True, query_cl=None):
"""
Retrieves the actual tracing details from Cassandra and populates the
attributes of this instance. Because tracing details are stored
asynchronously by Cassandra, this may need to retry the session
detail fetch. If the trace is still not available after `max_wait`
seconds, :exc:`.TraceUnavailable` will be raised; if `max_wait` is
:const:`None`, this will retry forever.
`wait_for_complete=False` bypasses the wait for duration to be populated.
This can be used to query events from partial sessions.
`query_cl` specifies a consistency level to use for polling the trace tables,
if it should be different than the session default.
"""
attempt = 0
start = time.time()
while True:
time_spent = time.time() - start
if max_wait is not None and time_spent >= max_wait:
raise TraceUnavailable(
"Trace information was not available within %f seconds. Consider raising Session.max_trace_wait." % (max_wait,))
log.debug("Attempting to fetch trace info for trace ID: %s", self.trace_id)
session_results = self._execute(
SimpleStatement(self._SELECT_SESSIONS_FORMAT, consistency_level=query_cl), (self.trace_id,), time_spent, max_wait)
is_complete = session_results and session_results[0].duration is not None
if not session_results or (wait_for_complete and not is_complete):
time.sleep(self._BASE_RETRY_SLEEP * (2 ** attempt))
attempt += 1
continue
if is_complete:
log.debug("Fetched trace info for trace ID: %s", self.trace_id)
else:
log.debug("Fetching parital trace info for trace ID: %s", self.trace_id)
session_row = session_results[0]
self.request_type = session_row.request
self.duration = timedelta(microseconds=session_row.duration) if is_complete else None
self.started_at = session_row.started_at
self.coordinator = session_row.coordinator
self.parameters = session_row.parameters
# since C* 2.2
self.client = getattr(session_row, 'client', None)
log.debug("Attempting to fetch trace events for trace ID: %s", self.trace_id)
time_spent = time.time() - start
event_results = self._execute(
SimpleStatement(self._SELECT_EVENTS_FORMAT, consistency_level=query_cl), (self.trace_id,), time_spent, max_wait)
log.debug("Fetched trace events for trace ID: %s", self.trace_id)
self.events = tuple(TraceEvent(r.activity, r.event_id, r.source, r.source_elapsed, r.thread)
for r in event_results)
break
def _execute(self, query, parameters, time_spent, max_wait):
timeout = (max_wait - time_spent) if max_wait is not None else None
future = self._session._create_response_future(query, parameters, trace=False, custom_payload=None, timeout=timeout)
# in case the user switched the row factory, set it to namedtuple for this query
future.row_factory = named_tuple_factory
future.send_request()
try:
return future.result()
except OperationTimedOut:
raise TraceUnavailable("Trace information was not available within %f seconds" % (max_wait,))
def __str__(self):
return "%s [%s] coordinator: %s, started at: %s, duration: %s, parameters: %s" \
% (self.request_type, self.trace_id, self.coordinator, self.started_at,
self.duration, self.parameters)
class TraceEvent(object):
"""
Representation of a single event within a query trace.
"""
description = None
"""
A brief description of the event.
"""
datetime = None
"""
A UTC :class:`datetime.datetime` marking when the event occurred.
"""
source = None
"""
The IP address of the node this event occurred on.
"""
source_elapsed = None
"""
A :class:`datetime.timedelta` measuring the amount of time until
this event occurred starting from when :attr:`.source` first
received the query.
"""
thread_name = None
"""
The name of the thread that this event occurred on.
"""
def __init__(self, description, timeuuid, source, source_elapsed, thread_name):
self.description = description
self.datetime = datetime.utcfromtimestamp(unix_time_from_uuid1(timeuuid))
self.source = source
if source_elapsed is not None:
self.source_elapsed = timedelta(microseconds=source_elapsed)
else:
self.source_elapsed = None
self.thread_name = thread_name
def __str__(self):
return "%s on %s[%s] at %s" % (self.description, self.source, self.thread_name, self.datetime)
|
|
#! /usr/bin/python2
#
# Copyright 2016 the V8 project authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
import argparse
import collections
import re
import subprocess
import sys
__DESCRIPTION = """
Processes a perf.data sample file and reports the hottest Ignition bytecodes,
or write an input file for flamegraph.pl.
"""
__HELP_EPILOGUE = """
examples:
# Get a flamegraph for Ignition bytecode handlers on Octane benchmark,
# without considering the time spent compiling JS code, entry trampoline
# samples and other non-Ignition samples.
#
$ tools/run-perf.sh out/x64.release/d8 \\
--ignition --noturbo --nocrankshaft run.js
$ tools/ignition/linux_perf_report.py --flamegraph -o out.collapsed
$ flamegraph.pl --colors js out.collapsed > out.svg
# Same as above, but show all samples, including time spent compiling JS code,
# entry trampoline samples and other samples.
$ # ...
$ tools/ignition/linux_perf_report.py \\
--flamegraph --show-all -o out.collapsed
$ # ...
# Same as above, but show full function signatures in the flamegraph.
$ # ...
$ tools/ignition/linux_perf_report.py \\
--flamegraph --show-full-signatures -o out.collapsed
$ # ...
# See the hottest bytecodes on Octane benchmark, by number of samples.
#
$ tools/run-perf.sh out/x64.release/d8 \\
--ignition --noturbo --nocrankshaft octane/run.js
$ tools/ignition/linux_perf_report.py
"""
COMPILER_SYMBOLS_RE = re.compile(
r"v8::internal::(?:\(anonymous namespace\)::)?Compile|v8::internal::Parser")
JIT_CODE_SYMBOLS_RE = re.compile(
r"(LazyCompile|Compile|Eval|Script):(\*|~)")
GC_SYMBOLS_RE = re.compile(
r"v8::internal::Heap::CollectGarbage")
def strip_function_parameters(symbol):
if symbol[-1] != ')': return symbol
pos = 1
parenthesis_count = 0
for c in reversed(symbol):
if c == ')':
parenthesis_count += 1
elif c == '(':
parenthesis_count -= 1
if parenthesis_count == 0:
break
else:
pos += 1
return symbol[:-pos]
def collapsed_callchains_generator(perf_stream, hide_other=False,
hide_compiler=False, hide_jit=False,
hide_gc=False, show_full_signatures=False):
current_chain = []
skip_until_end_of_chain = False
compiler_symbol_in_chain = False
for line in perf_stream:
# Lines starting with a "#" are comments, skip them.
if line[0] == "#":
continue
line = line.strip()
# Empty line signals the end of the callchain.
if not line:
if (not skip_until_end_of_chain and current_chain
and not hide_other):
current_chain.append("[other]")
yield current_chain
# Reset parser status.
current_chain = []
skip_until_end_of_chain = False
compiler_symbol_in_chain = False
continue
if skip_until_end_of_chain:
continue
# Trim the leading address and the trailing +offset, if present.
symbol = line.split(" ", 1)[1].split("+", 1)[0]
if not show_full_signatures:
symbol = strip_function_parameters(symbol)
# Avoid chains of [unknown]
if (symbol == "[unknown]" and current_chain and
current_chain[-1] == "[unknown]"):
continue
current_chain.append(symbol)
if symbol.startswith("BytecodeHandler:"):
current_chain.append("[interpreter]")
yield current_chain
skip_until_end_of_chain = True
elif JIT_CODE_SYMBOLS_RE.match(symbol):
if not hide_jit:
current_chain.append("[jit]")
yield current_chain
skip_until_end_of_chain = True
elif GC_SYMBOLS_RE.match(symbol):
if not hide_gc:
current_chain.append("[gc]")
yield current_chain
skip_until_end_of_chain = True
elif symbol == "Stub:CEntryStub" and compiler_symbol_in_chain:
if not hide_compiler:
current_chain.append("[compiler]")
yield current_chain
skip_until_end_of_chain = True
elif COMPILER_SYMBOLS_RE.match(symbol):
compiler_symbol_in_chain = True
elif symbol == "Builtin:InterpreterEntryTrampoline":
if len(current_chain) == 1:
yield ["[entry trampoline]"]
else:
# If we see an InterpreterEntryTrampoline which is not at the top of the
# chain and doesn't have a BytecodeHandler above it, then we have
# skipped the top BytecodeHandler due to the top-level stub not building
# a frame. File the chain in the [misattributed] bucket.
current_chain[-1] = "[misattributed]"
yield current_chain
skip_until_end_of_chain = True
def calculate_samples_count_per_callchain(callchains):
chain_counters = collections.defaultdict(int)
for callchain in callchains:
key = ";".join(reversed(callchain))
chain_counters[key] += 1
return chain_counters.items()
def calculate_samples_count_per_handler(callchains):
def strip_handler_prefix_if_any(handler):
return handler if handler[0] == "[" else handler.split(":", 1)[1]
handler_counters = collections.defaultdict(int)
for callchain in callchains:
handler = strip_handler_prefix_if_any(callchain[-1])
handler_counters[handler] += 1
return handler_counters.items()
def write_flamegraph_input_file(output_stream, callchains):
for callchain, count in calculate_samples_count_per_callchain(callchains):
output_stream.write("{}; {}\n".format(callchain, count))
def write_handlers_report(output_stream, callchains):
handler_counters = calculate_samples_count_per_handler(callchains)
samples_num = sum(counter for _, counter in handler_counters)
# Sort by decreasing number of samples
handler_counters.sort(key=lambda entry: entry[1], reverse=True)
for bytecode_name, count in handler_counters:
output_stream.write(
"{}\t{}\t{:.3f}%\n".format(bytecode_name, count,
100. * count / samples_num))
def parse_command_line():
command_line_parser = argparse.ArgumentParser(
formatter_class=argparse.RawDescriptionHelpFormatter,
description=__DESCRIPTION,
epilog=__HELP_EPILOGUE)
command_line_parser.add_argument(
"perf_filename",
help="perf sample file to process (default: perf.data)",
nargs="?",
default="perf.data",
metavar="<perf filename>"
)
command_line_parser.add_argument(
"--flamegraph", "-f",
help="output an input file for flamegraph.pl, not a report",
action="store_true",
dest="output_flamegraph"
)
command_line_parser.add_argument(
"--hide-other",
help="Hide other samples",
action="store_true"
)
command_line_parser.add_argument(
"--hide-compiler",
help="Hide samples during compilation",
action="store_true"
)
command_line_parser.add_argument(
"--hide-jit",
help="Hide samples from JIT code execution",
action="store_true"
)
command_line_parser.add_argument(
"--hide-gc",
help="Hide samples from garbage collection",
action="store_true"
)
command_line_parser.add_argument(
"--show-full-signatures", "-s",
help="show full signatures instead of function names",
action="store_true"
)
command_line_parser.add_argument(
"--output", "-o",
help="output file name (stdout if omitted)",
type=argparse.FileType('wt'),
default=sys.stdout,
metavar="<output filename>",
dest="output_stream"
)
return command_line_parser.parse_args()
def main():
program_options = parse_command_line()
perf = subprocess.Popen(["perf", "script", "--fields", "ip,sym",
"-i", program_options.perf_filename],
stdout=subprocess.PIPE)
callchains = collapsed_callchains_generator(
perf.stdout, program_options.hide_other, program_options.hide_compiler,
program_options.hide_jit, program_options.hide_gc,
program_options.show_full_signatures)
if program_options.output_flamegraph:
write_flamegraph_input_file(program_options.output_stream, callchains)
else:
write_handlers_report(program_options.output_stream, callchains)
if __name__ == "__main__":
main()
|
|
# Copyright (C) 2013 eNovance SAS <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
import time
from oslo_config import cfg
from oslo_log import log as logging
import oslo_messaging
from oslo_service import loopingcall
from oslo_service import periodic_task
from oslo_service import service
from oslo_utils import importutils
from neutron._i18n import _, _LE, _LI, _LW
from neutron.agent.common import config
from neutron.agent import rpc as agent_rpc
from neutron.common import config as common_config
from neutron.common import constants as constants
from neutron.common import rpc as n_rpc
from neutron.common import topics
from neutron.common import utils
from neutron import context
from neutron import manager
from neutron import service as neutron_service
LOG = logging.getLogger(__name__)
class MeteringPluginRpc(object):
def __init__(self, host):
# NOTE(yamamoto): super.__init__() call here is not only for
# aesthetics. Because of multiple inheritances in MeteringAgent,
# it's actually necessary to initialize parent classes of
# manager.Manager correctly.
super(MeteringPluginRpc, self).__init__()
target = oslo_messaging.Target(topic=topics.METERING_PLUGIN,
version='1.0')
self.client = n_rpc.get_client(target)
def _get_sync_data_metering(self, context):
try:
cctxt = self.client.prepare()
return cctxt.call(context, 'get_sync_data_metering',
host=self.host)
except Exception:
LOG.exception(_LE("Failed synchronizing routers"))
class MeteringAgent(MeteringPluginRpc, manager.Manager):
Opts = [
cfg.StrOpt('driver',
default='neutron.services.metering.drivers.noop.'
'noop_driver.NoopMeteringDriver',
help=_("Metering driver")),
cfg.IntOpt('measure_interval', default=30,
help=_("Interval between two metering measures")),
cfg.IntOpt('report_interval', default=300,
help=_("Interval between two metering reports")),
]
def __init__(self, host, conf=None):
self.conf = conf or cfg.CONF
self._load_drivers()
self.context = context.get_admin_context_without_session()
self.metering_loop = loopingcall.FixedIntervalLoopingCall(
self._metering_loop
)
measure_interval = self.conf.measure_interval
self.last_report = 0
self.metering_loop.start(interval=measure_interval)
self.host = host
self.label_tenant_id = {}
self.routers = {}
self.metering_infos = {}
super(MeteringAgent, self).__init__(host=host)
def _load_drivers(self):
"""Loads plugin-driver from configuration."""
LOG.info(_LI("Loading Metering driver %s"), self.conf.driver)
if not self.conf.driver:
raise SystemExit(_('A metering driver must be specified'))
self.metering_driver = importutils.import_object(
self.conf.driver, self, self.conf)
def _metering_notification(self):
for label_id, info in self.metering_infos.items():
data = {'label_id': label_id,
'tenant_id': self.label_tenant_id.get(label_id),
'pkts': info['pkts'],
'bytes': info['bytes'],
'time': info['time'],
'first_update': info['first_update'],
'last_update': info['last_update'],
'host': self.host}
LOG.debug("Send metering report: %s", data)
notifier = n_rpc.get_notifier('metering')
notifier.info(self.context, 'l3.meter', data)
info['pkts'] = 0
info['bytes'] = 0
info['time'] = 0
def _purge_metering_info(self):
deadline_timestamp = int(time.time()) - self.conf.report_interval
label_ids = [
label_id
for label_id, info in self.metering_infos.items()
if info['last_update'] < deadline_timestamp]
for label_id in label_ids:
del self.metering_infos[label_id]
def _add_metering_info(self, label_id, pkts, bytes):
ts = int(time.time())
info = self.metering_infos.get(label_id, {'bytes': 0,
'pkts': 0,
'time': 0,
'first_update': ts,
'last_update': ts})
info['bytes'] += bytes
info['pkts'] += pkts
info['time'] += ts - info['last_update']
info['last_update'] = ts
self.metering_infos[label_id] = info
return info
def _add_metering_infos(self):
self.label_tenant_id = {}
for router in self.routers.values():
tenant_id = router['tenant_id']
labels = router.get(constants.METERING_LABEL_KEY, [])
for label in labels:
label_id = label['id']
self.label_tenant_id[label_id] = tenant_id
tenant_id = self.label_tenant_id.get
accs = self._get_traffic_counters(self.context, self.routers.values())
if not accs:
return
for label_id, acc in accs.items():
self._add_metering_info(label_id, acc['pkts'], acc['bytes'])
def _metering_loop(self):
self._add_metering_infos()
ts = int(time.time())
delta = ts - self.last_report
report_interval = self.conf.report_interval
if delta > report_interval:
self._metering_notification()
self._purge_metering_info()
self.last_report = ts
@utils.synchronized('metering-agent')
def _invoke_driver(self, context, meterings, func_name):
try:
return getattr(self.metering_driver, func_name)(context, meterings)
except AttributeError:
LOG.exception(_LE("Driver %(driver)s does not implement %(func)s"),
{'driver': self.conf.driver,
'func': func_name})
except RuntimeError:
LOG.exception(_LE("Driver %(driver)s:%(func)s runtime error"),
{'driver': self.conf.driver,
'func': func_name})
@periodic_task.periodic_task(run_immediately=True)
def _sync_routers_task(self, context):
routers = self._get_sync_data_metering(self.context)
if not routers:
return
self._update_routers(context, routers)
def router_deleted(self, context, router_id):
self._add_metering_infos()
if router_id in self.routers:
del self.routers[router_id]
return self._invoke_driver(context, router_id,
'remove_router')
def routers_updated(self, context, routers=None):
if not routers:
routers = self._get_sync_data_metering(self.context)
if not routers:
return
self._update_routers(context, routers)
def _update_routers(self, context, routers):
for router in routers:
self.routers[router['id']] = router
return self._invoke_driver(context, routers,
'update_routers')
def _get_traffic_counters(self, context, routers):
LOG.debug("Get router traffic counters")
return self._invoke_driver(context, routers, 'get_traffic_counters')
def add_metering_label_rule(self, context, routers):
return self._invoke_driver(context, routers,
'add_metering_label_rule')
def remove_metering_label_rule(self, context, routers):
return self._invoke_driver(context, routers,
'remove_metering_label_rule')
def update_metering_label_rules(self, context, routers):
LOG.debug("Update metering rules from agent")
return self._invoke_driver(context, routers,
'update_metering_label_rules')
def add_metering_label(self, context, routers):
LOG.debug("Creating a metering label from agent")
return self._invoke_driver(context, routers,
'add_metering_label')
def remove_metering_label(self, context, routers):
self._add_metering_infos()
LOG.debug("Delete a metering label from agent")
return self._invoke_driver(context, routers,
'remove_metering_label')
class MeteringAgentWithStateReport(MeteringAgent):
def __init__(self, host, conf=None):
super(MeteringAgentWithStateReport, self).__init__(host=host,
conf=conf)
self.state_rpc = agent_rpc.PluginReportStateAPI(topics.REPORTS)
self.agent_state = {
'binary': 'neutron-metering-agent',
'host': host,
'topic': topics.METERING_AGENT,
'configurations': {
'metering_driver': self.conf.driver,
'measure_interval':
self.conf.measure_interval,
'report_interval': self.conf.report_interval
},
'start_flag': True,
'agent_type': constants.AGENT_TYPE_METERING}
report_interval = cfg.CONF.AGENT.report_interval
self.use_call = True
if report_interval:
self.heartbeat = loopingcall.FixedIntervalLoopingCall(
self._report_state)
self.heartbeat.start(interval=report_interval)
def _report_state(self):
try:
self.state_rpc.report_state(self.context, self.agent_state,
self.use_call)
self.agent_state.pop('start_flag', None)
self.use_call = False
except AttributeError:
# This means the server does not support report_state
LOG.warn(_LW("Neutron server does not support state report."
" State report for this agent will be disabled."))
self.heartbeat.stop()
return
except Exception:
LOG.exception(_LE("Failed reporting state!"))
def agent_updated(self, context, payload):
LOG.info(_LI("agent_updated by server side %s!"), payload)
def main():
conf = cfg.CONF
conf.register_opts(MeteringAgent.Opts)
config.register_agent_state_opts_helper(conf)
common_config.init(sys.argv[1:])
config.setup_logging()
server = neutron_service.Service.create(
binary='neutron-metering-agent',
topic=topics.METERING_AGENT,
report_interval=cfg.CONF.AGENT.report_interval,
manager='neutron.services.metering.agents.'
'metering_agent.MeteringAgentWithStateReport')
service.launch(cfg.CONF, server).wait()
|
|
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import os
import sys
import string
import re
import logging
import string
import MySQLdb
# m_extensions - This file is created to cater for a different kind of
# requirement in MIMOS Lab. Instead of PXE boot images, MIMOS Lab customize
# Zoni to PXE boot the servers to install a functional OS and relevant packages
# into their respective local storage using preseed (Ubuntu/Debian) or
# kickstart (Centos/Redhat). It is also serve as a file for testing additional
# codes for the convertz script.
# Revised Version: 20111202
from zoni.data.infostore import InfoStore
class mimos(InfoStore):
def __init__(self, config):
self.config = config
self.host = config['dbHost']
self.user = config['dbUser']
self.passwd = config['dbPassword']
self.db = config['dbInst']
self.port = config['dbPort']
self._isDb = 1
if self.port == "":
self.port = 3306
self.log = logging.getLogger(__name__)
self.conn = self.createConn()
def createConn(self):
try:
return MySQLdb.connect(host = self.host, port = self.port, user = self.user, passwd = self.passwd, db = self.db)
except MySQLdb.OperationalError, e:
msg = "Error: %s" % str(e[1])
self.log.error(msg)
return
def keepAlive(self):
while True:
if not self.conn.open:
msg = "Reinitializing DB connection"
self.log.info(msg)
self.conn - self.createConn()
time.sleep(10)
return
def getConfig(self, configs, theconfig):
print configs[theconfig]
return
def getDestFile(self, configs, host):
therole = ("%s/01-%s" % (configs['tftpImageDir'], (host['mac_addr']).replace(":", "-").lower()))
return therole
def addRoletoNode(self, configs, host, thenode, roletemplate):
therole = ("%s/01-%s" % (configs['tftpImageDir'], (host['mac_addr']).replace(":", "-").lower()))
self.log.info("Roles: addRole for %s" % thenode)
srctpl = "%s/%s" % (configs['tftpTemplateDir'], roletemplate)
if os.path.isfile(therole):
mesg = "Roles: Role file exists! Exiting!"
self.log.error(mesg)
exit()
if not os.path.isfile(srctpl):
mesg = "Roles: Role template missing! Exiting!"
self.log.error(mesg)
exit()
#shutil.copy(srctpl,therole) #this is direct copy approach, template is not customized, retained here just in case we still need it
#read and parse srctpl and write to therole, trying to be a bit more flexible from here on
infile = open(srctpl,'r')
outfile = open(therole,'w')
# Use sys_vendor to determine HDD Type, HP servers use the /dev/cciss/c0d0 form for their storage device
if (host['sys_vendor'] == "HP"):
hddtype = "cciss"
else: # Most other vendors just use standard /dev/sdxy form for storage device
hddtype = "normal"
for line in infile.readlines():
line = line.replace("$IMAGEHOST",configs['imageServerIP'])
line = line.replace("$NTPSVRIP",configs['ntpsvrIP'])
line = line.replace("$ROLE",roletemplate)
line = line.replace("$USEHDDTYPE",hddtype)
outfile.write(line)
infile.close()
outfile.close()
self.log.info("Roles: %s created" % therole)
return 0
def removeRolefromNode(self, configs, host, thenode):
therole = ("%s/01-%s" % (configs['tftpImageDir'], (host['mac_addr']).replace(":", "-").lower()))
self.log.info("Roles: removeRole for %s" % thenode)
if not os.path.isfile(therole):
mesg = "No Role File for %s! Exiting!" % thenode
log.error(mesg)
exit()
os.remove(therole)
self.log.info("Roles: %s removed" % therole)
return 0
# This is a temp workaround instead of using assignImagetoHost
# A new temp table rolemap added to support this but should merge back to imagemap
def assignRoletoHost(self, host, image):
cur_image = host['pxe_image_name']
query = "select image_id from imageinfo where image_name = '%s'" % image
row = self.queryDb(query)
if len(row) < 1:
mesg = "assignRoletoHost: Image \"%s\" does not exist in db" % image
self.log.error(mesg)
return 1
new_image_id = str(row[0][0])
query = "select * from rolemap where sys_id = '%s'" % host['sys_id']
result = self.selectDb(query)
if result.rowcount > 0:
mesg = "assignRoletoHost: detected assigned role - removing from db first"
self.log.info(mesg)
query = "delete from rolemap where sys_id = '%s'" % host['sys_id']
self.delDb(query)
query = "insert into rolemap (sys_id, image_id) values ('%s', %s)" % (host['sys_id'], new_image_id)
self.insertDb(connection,query)
return 0
def unassignRolefromHost(self, host):
query="delete from rolemap where sys_id = '%s'" % str(host['sys_id'])
self.delDb(query)
return 0
def showRoletoHost(self):
query="select s.location, s.mac_addr, i.image_name from sysinfo s, imageinfo i, rolemap r where r.image_id=i.image_id and r.sys_id=s.sys_id order by s.location"
rows = self.queryDb(connection,query)
print "Node MAC Address Image Name"
for row in rows:
print "%-20s %-17s %-30s" % (row[0],row[1],row[2])
return 0
def showKernelInfo(self):
query="select k.kernel_id, k.kernel_name, k.kernel_release, k.kernel_arch from kernelinfo k"
rows = self.queryDb(query)
print "Available Kernels"
print "ID Name Release Arch"
for row in rows:
kid=row[0]
kname=row[1]
krelease=row[2]
karch=row[3]
print "%-3s %-30s %-17s %-6s" % (kid, kname, krelease, karch)
return 0
def showInitrdInfo(self):
query="select i.initrd_id, i.initrd_name, i.initrd_arch from initrdinfo i"
rows = self.queryDb(query)
print
print "Available Initial Ramdisks"
print "ID Name Arch"
for row in rows:
iid=row[0]
iname=row[1]
iarch=row[2]
print "%-3s %-30s %-6s" % (iid, iname, iarch)
print
return 0
def getKernelInitrdID(self, info):
kernel_name = info.split(":")[0]
initrd_name = info.split(":")[1]
kernel_arch = info.split(":")[2]
query = "select k.kernel_id, i.initrd_id from kernelinfo k, initrdinfo i where k.kernel_name='%s' and i.initrd_name='%s' and k.kernel_arch='%s' and i.initrd_arch='%s'" % (kernel_name, initrd_name, kernel_arch, kernel_arch)
rows=self.queryDb(query)
if len(rows) > 0:
for row in rows:
kid=str(row[0])
iid=str(row[1])
print "%s:%s" % (kid, iid)
return 0
def registerKernelInitrd(self, configs, info):
foo = info.split(":")
kernel_name = foo[0]
kernel_release = foo[1]
kernel_arch = foo[2]
initrd_name = foo[3]
initrd_arch = foo[4]
kernel_something = foo[5]
initrd_options = "boot=live toram nopersistent fetch=http://%s/%s/%s initrd=%s/%s" % (configs['imageServerIP'], configs['fsImagesBaseDir'], kernel_something, configs['initrdRoot'], initrd_name)
query = "insert into kernelinfo (kernel_name, kernel_release, kernel_arch) values ('%s', '%s', '%s)" % (kernel_name, kernel_release, kernel_arch)
k_id=self.insertDb(query)
query = "insert into initrdinfo (initrd_name, initrd_arch, initrd_options) values ('%s', '%s', '%s')" % (initrd_name, initrd_arch, initrd_options)
i_id=self.insertDb(query)
print "%s:%s" % (k_id, i_id)
return 0
def queryDb(self, thequery):
self.conn.ping(True)
cursor=self.conn.cursor()
try:
cursor.execute(thequery)
self.conn.commit()
row=cursor.fetchall()
except MySQLdb.OperationalError, e:
self.log.error("queryDb - %s", e)
return -1
return row
def selectDb(self, thequery):
self.conn.ping(True)
cursor=self.conn.cursor()
try:
cursor.execute(thequery)
self.conn.commit()
except MySQLdb.OperationalError, e:
self.log.error("selectDb - %s", e)
return -1
return cursor
def insertDb(self, thequery):
self.conn.ping(True)
cursor=self.conn.cursor()
try:
cursor.execute(thequery)
self.conn.commit()
except MySQLdb.OperationalError, e:
self.log.error("insertDb - %s", e)
return -1
return cursor.lastrowid
def delDb(self, thequery):
self.conn.ping(True)
cursor=self.conn.cursor()
try:
cursor.execute(thequery)
self.conn.commit()
except MySQLdb.OperationalError, e:
self.log.error("delDb - %s", e)
return -1
return cursor
|
|
import os
import shutil
import time
from loguru import logger
from flexget import plugin
from flexget.config_schema import one_or_more
from flexget.event import event
from flexget.utils.pathscrub import pathscrub
from flexget.utils.template import RenderError
def get_directory_size(directory):
"""
:param directory: Path
:return: Size in bytes (recursively)
"""
dir_size = 0
for (path, _, files) in os.walk(directory):
for file in files:
filename = os.path.join(path, file)
dir_size += os.path.getsize(filename)
return dir_size
def get_siblings(ext, main_file_no_ext, main_file_ext, abs_path):
siblings = {}
files = os.listdir(abs_path)
for filename in files:
# skip the main file
if filename == main_file_no_ext + main_file_ext:
continue
filename_lower = filename.lower()
if not filename_lower.startswith(main_file_no_ext.lower()) or not filename_lower.endswith(
ext.lower()
):
continue
# we have to use the length of the main file (no ext) to extract the rest of the filename
# for the future renaming
file_ext = filename[len(main_file_no_ext) :]
file_path = os.path.join(abs_path, filename)
if os.path.exists(file_path):
siblings[file_path] = file_ext
return siblings
class BaseFileOps:
# Defined by subclasses
logger = None
along = {
'type': 'object',
'properties': {
'extensions': one_or_more({'type': 'string'}),
'subdirs': one_or_more({'type': 'string'}),
},
'additionalProperties': False,
'required': ['extensions'],
}
def prepare_config(self, config):
if config is True:
return {}
elif config is False:
return
if 'along' not in config:
return config
extensions = config['along'].get('extensions')
subdirs = config['along'].get('subdirs')
if extensions and not isinstance(extensions, list):
config['along']['extensions'] = [extensions]
if subdirs and not isinstance(subdirs, list):
config['along']['subdirs'] = [subdirs]
return config
def on_task_output(self, task, config):
config = self.prepare_config(config)
if config is None:
return
for entry in task.accepted:
if 'location' not in entry:
self.logger.verbose(
'Cannot handle {} because it does not have the field location.', entry['title']
)
continue
src = entry['location']
src_isdir = os.path.isdir(src)
try:
# check location
if not os.path.exists(src):
self.logger.warning('location `{}` does not exists (anymore).', src)
continue
if src_isdir:
if not config.get('allow_dir'):
self.logger.warning('location `{}` is a directory.', src)
continue
elif not os.path.isfile(src):
self.logger.warning('location `{}` is not a file.', src)
continue
# search for namesakes
siblings = {} # dict of (path=ext) pairs
if not src_isdir and 'along' in config:
parent = os.path.dirname(src)
filename_no_ext, filename_ext = os.path.splitext(os.path.basename(src))
for ext in config['along']['extensions']:
siblings.update(get_siblings(ext, filename_no_ext, filename_ext, parent))
files = os.listdir(parent)
files_lower = list(map(str.lower, files))
for subdir in config['along'].get('subdirs', []):
try:
idx = files_lower.index(subdir)
except ValueError:
continue
subdir_path = os.path.join(parent, files[idx])
if not os.path.isdir(subdir_path):
continue
for ext in config['along']['extensions']:
siblings.update(
get_siblings(ext, filename_no_ext, filename_ext, subdir_path)
)
# execute action in subclasses
self.handle_entry(task, config, entry, siblings)
except (OSError, IOError) as err:
entry.fail(str(err))
continue
def clean_source(self, task, config, entry):
min_size = entry.get('clean_source', config.get('clean_source', -1))
if min_size < 0:
return
base_path = os.path.split(entry.get('old_location', entry['location']))[0]
# everything here happens after a successful execution of the main action: the entry has been moved in a
# different location, or it does not exists anymore. so from here we can just log warnings and move on.
if not os.path.isdir(base_path):
self.logger.warning(
'Cannot delete path `{}` because it does not exists (anymore).', base_path
)
return
dir_size = get_directory_size(base_path) / 1024 / 1024
if dir_size >= min_size:
self.logger.info(
'Path `{}` left because it exceeds safety value set in clean_source option.',
base_path,
)
return
if task.options.test:
self.logger.info('Would delete `{}` and everything under it.', base_path)
return
try:
shutil.rmtree(base_path)
self.logger.info(
'Path `{}` has been deleted because was less than clean_source safe value.',
base_path,
)
except Exception as err:
self.logger.warning('Unable to delete path `{}`: {}', base_path, err)
def handle_entry(self, task, config, entry, siblings):
raise NotImplementedError()
class DeleteFiles(BaseFileOps):
"""Delete all accepted files."""
schema = {
'oneOf': [
{'type': 'boolean'},
{
'type': 'object',
'properties': {
'allow_dir': {'type': 'boolean'},
'along': BaseFileOps.along,
'clean_source': {'type': 'number'},
},
'additionalProperties': False,
},
]
}
logger = logger.bind(name='delete')
def handle_entry(self, task, config, entry, siblings):
src = entry['location']
src_isdir = os.path.isdir(src)
if task.options.test:
if src_isdir:
self.logger.info('Would delete `{}` and all its content.', src)
else:
self.logger.info('Would delete `{}`', src)
for s, _ in siblings.items():
self.logger.info('Would also delete `{}`', s)
return
# IO errors will have the entry mark failed in the base class
if src_isdir:
shutil.rmtree(src)
self.logger.info('`{}` and all its content has been deleted.', src)
else:
os.remove(src)
self.logger.info('`{}` has been deleted.', src)
# further errors will not have any effect (the entry does not exists anymore)
for s, _ in siblings.items():
try:
os.remove(s)
self.logger.info('`{}` has been deleted as well.', s)
except Exception as err:
self.logger.warning(str(err))
if not src_isdir:
self.clean_source(task, config, entry)
class TransformingOps(BaseFileOps):
# Defined by subclasses
move = None
destination_field = None
def handle_entry(self, task, config, entry, siblings):
src = entry['location']
src_isdir = os.path.isdir(src)
src_path, src_name = os.path.split(src)
# get the proper path and name in order of: entry, config, above split
dst_path = entry.get(self.destination_field, config.get('to', src_path))
if config.get('rename'):
dst_name = config['rename']
elif entry.get('filename') and entry['filename'] != src_name:
# entry specifies different filename than what was split from the path
# since some inputs fill in filename it must be different in order to be used
dst_name = entry['filename']
else:
dst_name = src_name
try:
dst_path = entry.render(dst_path)
except RenderError as err:
raise plugin.PluginError(
'Path value replacement `%s` failed: %s' % (dst_path, err.args[0])
)
try:
dst_name = entry.render(dst_name)
except RenderError as err:
raise plugin.PluginError(
'Filename value replacement `%s` failed: %s' % (dst_name, err.args[0])
)
# Clean invalid characters with pathscrub plugin
dst_path = pathscrub(os.path.expanduser(dst_path))
dst_name = pathscrub(dst_name, filename=True)
# Join path and filename
dst = os.path.join(dst_path, dst_name)
if dst == entry['location']:
raise plugin.PluginWarning('source and destination are the same.')
if not os.path.exists(dst_path):
if task.options.test:
self.logger.info('Would create `{}`', dst_path)
else:
self.logger.info('Creating destination directory `{}`', dst_path)
os.makedirs(dst_path)
if not os.path.isdir(dst_path) and not task.options.test:
raise plugin.PluginWarning('destination `%s` is not a directory.' % dst_path)
# unpack_safety
if config.get('unpack_safety', entry.get('unpack_safety', True)):
count = 0
while True:
if count > 60 * 30:
raise plugin.PluginWarning(
'The task has been waiting unpacking for 30 minutes'
)
size = os.path.getsize(src)
time.sleep(1)
new_size = os.path.getsize(src)
if size != new_size:
if not count % 10:
self.logger.verbose(
'File `{}` is possibly being unpacked, waiting ...', src_name
)
else:
break
count += 1
src_file, src_ext = os.path.splitext(src)
dst_file, dst_ext = os.path.splitext(dst)
# Check dst contains src_ext
if config.get('keep_extension', entry.get('keep_extension', True)):
if not src_isdir and dst_ext != src_ext:
self.logger.verbose('Adding extension `{}` to dst `{}`', src_ext, dst)
dst += src_ext
dst_file += dst_ext # this is used for sibling files. dst_ext turns out not to be an extension!
funct_name = 'move' if self.move else 'copy'
funct_done = 'moved' if self.move else 'copied'
if task.options.test:
self.logger.info('Would {} `{}` to `{}`', funct_name, src, dst)
for s, ext in siblings.items():
# we cannot rely on splitext for extensions here (subtitles may have the language code)
d = dst_file + ext
self.logger.info('Would also {} `{}` to `{}`', funct_name, s, d)
else:
# IO errors will have the entry mark failed in the base class
if self.move:
shutil.move(src, dst)
elif src_isdir:
shutil.copytree(src, dst)
else:
shutil.copy(src, dst)
self.logger.info('`{}` has been {} to `{}`', src, funct_done, dst)
# further errors will not have any effect (the entry has been successfully moved or copied out)
for s, ext in siblings.items():
# we cannot rely on splitext for extensions here (subtitles may have the language code)
d = dst_file + ext
try:
if self.move:
shutil.move(s, d)
else:
shutil.copy(s, d)
self.logger.info('`{}` has been {} to `{}` as well.', s, funct_done, d)
except Exception as err:
self.logger.warning(str(err))
entry['old_location'] = entry['location']
entry['location'] = dst
if self.move and not src_isdir:
self.clean_source(task, config, entry)
class CopyFiles(TransformingOps):
"""Copy all accepted files."""
schema = {
'oneOf': [
{'type': 'boolean'},
{
'type': 'object',
'properties': {
'to': {'type': 'string', 'format': 'path'},
'rename': {'type': 'string'},
'allow_dir': {'type': 'boolean'},
'unpack_safety': {'type': 'boolean'},
'keep_extension': {'type': 'boolean'},
'along': TransformingOps.along,
},
'additionalProperties': False,
},
]
}
move = False
destination_field = 'copy_to'
logger = logger.bind(name='copy')
class MoveFiles(TransformingOps):
"""Move all accepted files."""
schema = {
'oneOf': [
{'type': 'boolean'},
{
'type': 'object',
'properties': {
'to': {'type': 'string', 'format': 'path'},
'rename': {'type': 'string'},
'allow_dir': {'type': 'boolean'},
'unpack_safety': {'type': 'boolean'},
'keep_extension': {'type': 'boolean'},
'along': TransformingOps.along,
'clean_source': {'type': 'number'},
},
'additionalProperties': False,
},
]
}
move = True
destination_field = 'move_to'
logger = logger.bind(name='move')
@event('plugin.register')
def register_plugin():
plugin.register(DeleteFiles, 'delete', api_ver=2)
plugin.register(CopyFiles, 'copy', api_ver=2)
plugin.register(MoveFiles, 'move', api_ver=2)
|
|
# -*- coding: utf-8 -*-
""" This module contains classes:
HostsEntry:
A representation of a hosts file entry, i.e. a line containing an IP address
and name(s), a comment, or a blank line/line separator.
Hosts:
A representation of a hosts file, e.g. /etc/hosts and
c:\\\\windows\\\\system32\\\\drivers\\\\etc\\\\hosts for a linux or MS windows
based machine respectively. Each entry being represented as an instance
of the HostsEntry class.
"""
import sys
try:
from urllib.request import urlopen
except ImportError: # pragma: no cover
from urllib2 import urlopen
from python_hosts.utils import (is_ipv4, is_ipv6, is_readable, valid_hostnames,
dedupe_list)
from python_hosts.exception import (InvalidIPv6Address, InvalidIPv4Address,
UnableToWriteHosts)
class HostsEntry(object):
""" An entry in a hosts file. """
__slots__ = ['entry_type', 'address', 'comment', 'names']
def __init__(self,
entry_type=None,
address=None,
comment=None,
names=None):
"""
Initialise an instance of a Hosts file entry
:param entry_type: ipv4 | ipv6 | comment | blank
:param address: The ipv4 or ipv6 address belonging to the instance
:param comment: The comment belonging to the instance
:param names: The names that resolve to the specified address
:return: None
"""
if not entry_type or entry_type not in ('ipv4',
'ipv6',
'comment',
'blank'):
raise Exception('entry_type invalid or not specified')
if entry_type == 'comment' and not comment:
raise Exception('entry_type comment supplied without value.')
if entry_type == 'ipv4':
if not all((address, names)):
raise Exception('Address and Name(s) must be specified.')
if not is_ipv4(address):
raise InvalidIPv4Address()
if entry_type == 'ipv6':
if not all((address, names)):
raise Exception('Address and Name(s) must be specified.')
if not is_ipv6(address):
raise InvalidIPv6Address()
self.entry_type = entry_type
self.address = address
self.comment = comment
self.names = names
def is_real_entry(self):
return self.entry_type in ('ipv4', 'ipv6')
def __repr__(self):
return "HostsEntry(entry_type=\'{0}\', address=\'{1}\', " \
"comment={2}, names={3})".format(self.entry_type,
self.address,
self.comment,
self.names)
def __str__(self):
if self.entry_type in ('ipv4', 'ipv6'):
return "TYPE={0}, ADDR={1}, NAMES={2}".format(self.entry_type,
self.address,
" ".join(self.names))
elif self.entry_type == 'comment':
return "TYPE = {0}, COMMENT = {1}".format(self.entry_type, self.comment)
elif self.entry_type == 'blank':
return "TYPE = {0}".format(self.entry_type)
@staticmethod
def get_entry_type(hosts_entry=None):
"""
Return the type of entry for the line of hosts file passed
:param hosts_entry: A line from the hosts file
:return: 'comment' | 'blank' | 'ipv4' | 'ipv6'
"""
if hosts_entry and isinstance(hosts_entry, str):
entry = hosts_entry.strip()
if not entry or not entry[0] or entry[0] == "\n":
return 'blank'
if entry[0] == "#":
return 'comment'
entry_chunks = entry.split()
if is_ipv6(entry_chunks[0]):
return 'ipv6'
if is_ipv4(entry_chunks[0]):
return 'ipv4'
@staticmethod
def str_to_hostentry(entry):
"""
Transform a line from a hosts file into an instance of HostsEntry
:param entry: A line from the hosts file
:return: An instance of HostsEntry
"""
line_parts = entry.strip().split()
if is_ipv4(line_parts[0]) and valid_hostnames(line_parts[1:]):
return HostsEntry(entry_type='ipv4',
address=line_parts[0],
names=line_parts[1:])
elif is_ipv6(line_parts[0]) and valid_hostnames(line_parts[1:]):
return HostsEntry(entry_type='ipv6',
address=line_parts[0],
names=line_parts[1:])
else:
return False
class Hosts(object):
""" A hosts file. """
__slots__ = ['entries', 'hosts_path']
def __init__(self, path=None):
"""
Initialise an instance of a hosts file
:param path: The filesystem path of the hosts file to manage
:return: None
"""
self.entries = []
if path:
self.hosts_path = path
else:
self.hosts_path = self.determine_hosts_path()
self.populate_entries()
def __repr__(self):
return 'Hosts(hosts_path=\'{0}\', entries={1})'.format(self.hosts_path, self.entries)
def __str__(self):
output = ('hosts_path={0}, '.format(self.hosts_path))
for entry in self.entries:
output += str(entry)
return output
def count(self):
""" Get a count of the number of host entries
:return: The number of host entries
"""
return len(self.entries)
@staticmethod
def determine_hosts_path(platform=None):
"""
Return the hosts file path based on the supplied
or detected platform.
:param platform: a string used to identify the platform
:return: detected filesystem path of the hosts file
"""
if not platform:
platform = sys.platform
if platform.startswith('win'):
result = r"c:\windows\system32\drivers\etc\hosts"
return result
else:
return '/etc/hosts'
def write(self, path=None, mode='w'):
"""
Write all of the HostsEntry instances back to the hosts file
:param path: override the write path
:return: Dictionary containing counts
"""
written_count = 0
comments_written = 0
blanks_written = 0
ipv4_entries_written = 0
ipv6_entries_written = 0
if path:
output_file_path = path
else:
output_file_path = self.hosts_path
try:
with open(output_file_path, mode) as hosts_file:
for written_count, line in enumerate(self.entries):
if line.entry_type == 'comment':
hosts_file.write(line.comment + "\n")
comments_written += 1
if line.entry_type == 'blank':
hosts_file.write("\n")
blanks_written += 1
if line.entry_type == 'ipv4':
hosts_file.write(
"{0}\t{1}\n".format(
line.address,
' '.join(line.names),
)
)
ipv4_entries_written += 1
if line.entry_type == 'ipv6':
hosts_file.write(
"{0}\t{1}\n".format(
line.address,
' '.join(line.names), ))
ipv6_entries_written += 1
except:
raise UnableToWriteHosts()
return {'total_written': written_count + 1,
'comments_written': comments_written,
'blanks_written': blanks_written,
'ipv4_entries_written': ipv4_entries_written,
'ipv6_entries_written': ipv6_entries_written}
@staticmethod
def get_hosts_by_url(url=None):
"""
Request the content of a URL and return the response
:param url: The URL of the hosts file to download
:return: The content of the passed URL
"""
response = urlopen(url)
return response.read()
def exists(self, address=None, names=None, comment=None):
"""
Determine if the supplied address and/or names, or comment, exists in a HostsEntry within Hosts
:param address: An ipv4 or ipv6 address to search for
:param names: A list of names to search for
:param comment: A comment to search for
:return: True if a supplied address, name, or comment is found. Otherwise, False.
"""
for entry in self.entries:
if entry.entry_type in ('ipv4', 'ipv6'):
if address and address == entry.address:
return True
if names:
for name in names:
if name in entry.names:
return True
elif entry.entry_type == 'comment' and entry.comment == comment:
return True
return False
def remove_all_matching(self, address=None, name=None):
"""
Remove all HostsEntry instances from the Hosts object
where the supplied ip address or name matches
:param address: An ipv4 or ipv6 address
:param name: A host name
:return: None
"""
if self.entries:
if address and name:
func = lambda entry: not entry.is_real_entry() or (entry.address != address and name not in entry.names)
elif address:
func = lambda entry: not entry.is_real_entry() or entry.address != address
elif name:
func = lambda entry: not entry.is_real_entry() or name not in entry.names
else:
raise ValueError('No address or name was specified for removal.')
self.entries = list(filter(func, self.entries))
def find_all_matching(self, address=None, name=None):
"""
Return all HostsEntry instances from the Hosts object
where the supplied ip address or name matches
:param address: An ipv4 or ipv6 address
:param name: A host name
:return: HostEntry instances
"""
results = []
if self.entries:
for entry in self.entries:
if not entry.is_real_entry():
continue
if address and name:
if address == entry.address and name in entry.names:
results.append(entry)
elif address and address == entry.address:
results.append(entry)
elif name in entry.names:
results.append(entry)
return results
def import_url(self, url=None, force=None):
"""
Read a list of host entries from a URL, convert them into instances of HostsEntry and
then append to the list of entries in Hosts
:param url: The URL of where to download a hosts file
:return: Counts reflecting the attempted additions
"""
file_contents = self.get_hosts_by_url(url=url).decode('utf-8')
file_contents = file_contents.rstrip().replace('^M', '\n')
file_contents = file_contents.rstrip().replace('\r\n', '\n')
lines = file_contents.split('\n')
skipped = 0
import_entries = []
for line in lines:
stripped_entry = line.strip()
if (not stripped_entry) or (stripped_entry.startswith('#')):
skipped += 1
else:
line = line.partition('#')[0]
line = line.rstrip()
import_entry = HostsEntry.str_to_hostentry(line)
if import_entry:
import_entries.append(import_entry)
add_result = self.add(entries=import_entries, force=force)
write_result = self.write()
return {'result': 'success',
'skipped': skipped,
'add_result': add_result,
'write_result': write_result}
def import_file(self, import_file_path=None):
"""
Read a list of host entries from a file, convert them into instances
of HostsEntry and then append to the list of entries in Hosts
:param import_file_path: The path to the file containing the host entries
:return: Counts reflecting the attempted additions
"""
skipped = 0
invalid_count = 0
if is_readable(import_file_path):
import_entries = []
with open(import_file_path, 'r') as infile:
for line in infile:
stripped_entry = line.strip()
if (not stripped_entry) or (stripped_entry.startswith('#')):
skipped += 1
else:
line = line.partition('#')[0]
line = line.rstrip()
import_entry = HostsEntry.str_to_hostentry(line)
if import_entry:
import_entries.append(import_entry)
else:
invalid_count += 1
add_result = self.add(entries=import_entries)
write_result = self.write()
return {'result': 'success',
'skipped': skipped,
'invalid_count': invalid_count,
'add_result': add_result,
'write_result': write_result}
else:
return {'result': 'failed',
'message': 'Cannot read: file {0}.'.format(import_file_path)}
def add(self, entries=None, force=False, allow_address_duplication=False, merge_names=False):
"""
Add instances of HostsEntry to the instance of Hosts.
:param entries: A list of instances of HostsEntry
:param force: Remove matching before adding
:param allow_address_duplication: Allow using multiple entries for same address
:param merge_names: Merge names where address already exists
:return: The counts of successes and failures
"""
ipv4_count = 0
ipv6_count = 0
comment_count = 0
invalid_count = 0
duplicate_count = 0
replaced_count = 0
import_entries = []
existing_addresses = [x.address for x in self.entries if x.address]
existing_names = []
for item in self.entries:
if item.names:
existing_names.extend(item.names)
existing_names = dedupe_list(existing_names)
for entry in entries:
if entry.entry_type == 'comment':
entry.comment = entry.comment.strip()
if entry.comment[0] != "#":
entry.comment = "# " + entry.comment
import_entries.append(entry)
elif entry.address in ('0.0.0.0', '127.0.0.1') or allow_address_duplication:
# Allow duplicates entries for addresses used for adblocking
if set(entry.names).intersection(existing_names):
if force:
for name in entry.names:
self.remove_all_matching(name=name)
import_entries.append(entry)
else:
duplicate_count += 1
else:
import_entries.append(entry)
elif entry.address in existing_addresses:
if not any((force, merge_names)):
duplicate_count += 1
elif merge_names:
# get the last entry with matching address
entry_names = list()
for existing_entry in self.entries:
if entry.address == existing_entry.address:
entry_names = existing_entry.names
break
# merge names with that entry
merged_names = list(set(entry.names + entry_names))
# remove all matching
self.remove_all_matching(address=entry.address)
# append merged entry
entry.names = merged_names
import_entries.append(entry)
elif force:
self.remove_all_matching(address=entry.address)
replaced_count += 1
import_entries.append(entry)
elif set(entry.names).intersection(existing_names):
if not force:
duplicate_count += 1
else:
for name in entry.names:
self.remove_all_matching(name=name)
replaced_count += 1
import_entries.append(entry)
else:
import_entries.append(entry)
for item in import_entries:
if item.entry_type == 'comment':
comment_count += 1
self.entries.append(item)
elif item.entry_type == 'ipv4':
ipv4_count += 1
self.entries.append(item)
elif item.entry_type == 'ipv6':
ipv6_count += 1
self.entries.append(item)
return {'comment_count': comment_count,
'ipv4_count': ipv4_count,
'ipv6_count': ipv6_count,
'invalid_count': invalid_count,
'duplicate_count': duplicate_count,
'replaced_count': replaced_count}
def populate_entries(self):
"""
Called by the initialiser of Hosts. This reads the entries from the local hosts file,
converts them into instances of HostsEntry and adds them to the Hosts list of entries.
:return: None
"""
try:
with open(self.hosts_path, 'r') as hosts_file:
hosts_entries = [line for line in hosts_file]
for hosts_entry in hosts_entries:
entry_type = HostsEntry.get_entry_type(hosts_entry)
if entry_type == "comment":
hosts_entry = hosts_entry.replace("\r", "")
hosts_entry = hosts_entry.replace("\n", "")
self.entries.append(HostsEntry(entry_type="comment",
comment=hosts_entry))
elif entry_type == "blank":
self.entries.append(HostsEntry(entry_type="blank"))
elif entry_type in ("ipv4", "ipv6"):
chunked_entry = hosts_entry.split()
stripped_name_list = [name.strip() for name in chunked_entry[1:]]
self.entries.append(
HostsEntry(
entry_type=entry_type,
address=chunked_entry[0].strip(),
names=stripped_name_list))
except IOError:
return {'result': 'failed',
'message': 'Cannot read: {0}.'.format(self.hosts_path)}
|
|
from typing import Optional, Dict
import abc
import os
import subprocess as sp
import uuid
import time
import copy
from shlex import quote as shq
import webbrowser
import warnings
from hailtop.config import get_deploy_config, get_user_config
import hailtop.batch_client.client as bc
from hailtop.batch_client.client import BatchClient
from . import resource, batch, job as _job # pylint: disable=unused-import
class Backend(abc.ABC):
"""
Abstract class for backends.
"""
_DEFAULT_SHELL = '/bin/bash'
@abc.abstractmethod
def _run(self, batch, dry_run, verbose, delete_scratch_on_exit, **backend_kwargs):
"""
Execute a batch.
Warning
-------
This method should not be called directly. Instead, use :meth:`.batch.Batch.run`.
"""
return
# pylint: disable=R0201
def close(self):
"""
Close a Hail Batch backend.
"""
return
class LocalBackend(Backend):
"""
Backend that executes batches on a local computer.
Examples
--------
>>> local_backend = LocalBackend(tmp_dir='/tmp/user/')
>>> b = Batch(backend=local_backend)
Parameters
----------
tmp_dir:
Temporary directory to use.
gsa_key_file:
Mount a file with a gsa key to `/gsa-key/key.json`. Only used if a
job specifies a docker image. This option will override the value set by
the environment variable `HAIL_BATCH_GSA_KEY_FILE`.
extra_docker_run_flags:
Additional flags to pass to `docker run`. Only used if a job specifies
a docker image. This option will override the value set by the environment
variable `HAIL_BATCH_EXTRA_DOCKER_RUN_FLAGS`.
"""
def __init__(self,
tmp_dir: str = '/tmp/',
gsa_key_file: Optional[str] = None,
extra_docker_run_flags: Optional[str] = None):
self._tmp_dir = tmp_dir.rstrip('/')
flags = ''
if extra_docker_run_flags is not None:
flags += extra_docker_run_flags
elif os.environ.get('HAIL_BATCH_EXTRA_DOCKER_RUN_FLAGS') is not None:
flags += os.environ['HAIL_BATCH_EXTRA_DOCKER_RUN_FLAGS']
if gsa_key_file is None:
gsa_key_file = os.environ.get('HAIL_BATCH_GSA_KEY_FILE')
if gsa_key_file is not None:
flags += f' -v {gsa_key_file}:/gsa-key/key.json'
self._extra_docker_run_flags = flags
def _run(self,
batch: 'batch.Batch',
dry_run: bool,
verbose: bool,
delete_scratch_on_exit: bool,
**backend_kwargs): # pylint: disable=R0915
"""
Execute a batch.
Warning
-------
This method should not be called directly. Instead, use :meth:`.batch.Batch.run`.
Parameters
----------
batch:
Batch to execute.
dry_run:
If `True`, don't execute code.
verbose:
If `True`, print debugging output.
delete_scratch_on_exit:
If `True`, delete temporary directories with intermediate files.
"""
if backend_kwargs:
raise ValueError(f'LocalBackend does not support any of these keywords: {backend_kwargs}')
tmpdir = self._get_scratch_dir()
lines = ['set -e' + ('x' if verbose else ''),
'\n',
'# change cd to tmp directory',
f"cd {tmpdir}",
'\n']
copied_input_resource_files = set()
os.makedirs(tmpdir + '/inputs/', exist_ok=True)
if batch.requester_pays_project:
requester_pays_project = f'-u {batch.requester_pays_project}'
else:
requester_pays_project = ''
def copy_input(job, r):
if isinstance(r, resource.InputResourceFile):
if r not in copied_input_resource_files:
copied_input_resource_files.add(r)
if r._input_path.startswith('gs://'):
return [f'gsutil {requester_pays_project} cp {shq(r._input_path)} {shq(r._get_path(tmpdir))}']
absolute_input_path = os.path.realpath(r._input_path)
dest = r._get_path(tmpdir)
dir = os.path.dirname(dest)
os.makedirs(dir, exist_ok=True)
if job._image is not None: # pylint: disable-msg=W0640
return [f'cp {shq(absolute_input_path)} {shq(dest)}']
return [f'ln -sf {shq(absolute_input_path)} {shq(dest)}']
return []
assert isinstance(r, resource.JobResourceFile)
return []
def copy_external_output(r):
def _cp(dest):
if not dest.startswith('gs://'):
dest = os.path.abspath(dest)
directory = os.path.dirname(dest)
os.makedirs(directory, exist_ok=True)
return 'cp'
return f'gsutil {requester_pays_project} cp'
if isinstance(r, resource.InputResourceFile):
return [f'{_cp(dest)} {shq(r._input_path)} {shq(dest)}'
for dest in r._output_paths]
assert isinstance(r, resource.JobResourceFile)
return [f'{_cp(dest)} {r._get_path(tmpdir)} {shq(dest)}'
for dest in r._output_paths]
def symlink_input_resource_group(r):
symlinks = []
if isinstance(r, resource.ResourceGroup) and r._source is None:
for name, irf in r._resources.items():
src = irf._get_path(tmpdir)
dest = f'{r._get_path(tmpdir)}.{name}'
symlinks.append(f'ln -sf {shq(src)} {shq(dest)}')
return symlinks
write_inputs = [x for r in batch._input_resources for x in copy_external_output(r)]
if write_inputs:
lines += ["# Write input resources to output destinations"]
lines += write_inputs
lines += ['\n']
for job in batch._jobs:
os.makedirs(f'{tmpdir}/{job._job_id}/', exist_ok=True)
lines.append(f"# {job._job_id}: {job.name if job.name else ''}")
lines += [x for r in job._inputs for x in copy_input(job, r)]
lines += [x for r in job._mentioned for x in symlink_input_resource_group(r)]
resource_defs = [r._declare(tmpdir) for r in job._mentioned]
env = [f'export {k}={v}' for k, v in job._env.items()]
job_shell = job._shell if job._shell else self._DEFAULT_SHELL
defs = '; '.join(resource_defs) + '; ' if resource_defs else ''
joined_env = '; '.join(env) + '; ' if env else ''
cmd = " && ".join(f'{{\n{x}\n}}' for x in job._command)
quoted_job_script = shq(joined_env + defs + cmd)
if job._image:
memory = f'-m {job._memory}' if job._memory else ''
cpu = f'--cpus={job._cpu}' if job._cpu else ''
lines.append(f"docker run "
"--entrypoint=''"
f"{self._extra_docker_run_flags} "
f"-v {tmpdir}:{tmpdir} "
f"-w {tmpdir} "
f"{memory} "
f"{cpu} "
f"{job._image} "
f"{job_shell} -c {quoted_job_script}")
else:
lines.append(f"{job_shell} -c {quoted_job_script}")
lines += [x for r in job._external_outputs for x in copy_external_output(r)]
lines += ['\n']
script = "\n".join(lines)
if dry_run:
print(lines)
else:
try:
sp.check_call(script, shell=True)
except sp.CalledProcessError as e:
print(e)
print(e.output)
raise
finally:
if delete_scratch_on_exit:
sp.run(f'rm -rf {tmpdir}', shell=True, check=False)
print('Batch completed successfully!')
def _get_scratch_dir(self):
def _get_random_name():
dir = f'{self._tmp_dir}/batch/{uuid.uuid4().hex[:6]}'
if os.path.isdir(dir):
return _get_random_name()
os.makedirs(dir, exist_ok=True)
return dir
return _get_random_name()
class ServiceBackend(Backend):
"""Backend that executes batches on Hail's Batch Service on Google Cloud.
Examples
--------
>>> service_backend = ServiceBackend('my-billing-account', 'my-bucket') # doctest: +SKIP
>>> b = Batch(backend=service_backend) # doctest: +SKIP
>>> b.run() # doctest: +SKIP
>>> service_backend.close() # doctest: +SKIP
If the Hail configuration parameters batch/billing_project and
batch/bucket were previously set with ``hailctl config set``, then
one may elide the `billing_project` and `bucket` parameters.
>>> service_backend = ServiceBackend()
>>> b = Batch(backend=service_backend)
>>> b.run() # doctest: +SKIP
>>> service_backend.close()
Parameters
----------
billing_project:
Name of billing project to use.
bucket:
Name of bucket to use. Should not include the ``gs://``
prefix.
"""
def __init__(self, billing_project: str = None, bucket: str = None):
if billing_project is None:
billing_project = get_user_config().get('batch', 'billing_project', fallback=None)
if billing_project is None:
raise ValueError(
'the billing_project parameter of ServiceBackend must be set '
'or run `hailctl config set batch/billing_project '
'MY_BILLING_PROJECT`')
self._batch_client = BatchClient(billing_project)
if bucket is None:
bucket = get_user_config().get('batch', 'bucket', fallback=None)
if bucket is None:
raise ValueError(
'the bucket parameter of ServiceBackend must be set '
'or run `hailctl config set batch/bucket '
'MY_BUCKET`')
self._bucket_name = bucket
def close(self):
"""
Close the connection with the Batch Service.
Notes
-----
This method should be called after executing your batches at the
end of your script.
"""
self._batch_client.close()
def _run(self,
batch: 'batch.Batch',
dry_run: bool,
verbose: bool,
delete_scratch_on_exit: bool,
wait: bool = True,
open: bool = False,
disable_progress_bar: bool = False,
callback: Optional[str] = None,
**backend_kwargs): # pylint: disable-msg=too-many-statements
"""Execute a batch.
Warning
-------
This method should not be called directly. Instead, use :meth:`.batch.Batch.run`
and pass :class:`.ServiceBackend` specific arguments as key-word arguments.
Parameters
----------
batch:
Batch to execute.
dry_run:
If `True`, don't execute code.
verbose:
If `True`, print debugging output.
delete_scratch_on_exit:
If `True`, delete temporary directories with intermediate files.
wait:
If `True`, wait for the batch to finish executing before returning.
open:
If `True`, open the UI page for the batch.
disable_progress_bar:
If `True`, disable the progress bar.
callback:
If not `None`, a URL that will receive at most one POST request
after the entire batch completes.
"""
if backend_kwargs:
raise ValueError(f'ServiceBackend does not support any of these keywords: {backend_kwargs}')
build_dag_start = time.time()
token = uuid.uuid4().hex[:6]
remote_tmpdir = f'gs://{self._bucket_name}/batch/{token}'
local_tmpdir = f'/io/batch/{token}'
default_image = 'ubuntu:18.04'
attributes = copy.deepcopy(batch.attributes)
if batch.name is not None:
attributes['name'] = batch.name
bc_batch = self._batch_client.create_batch(attributes=attributes, callback=callback)
n_jobs_submitted = 0
used_remote_tmpdir = False
job_to_client_job_mapping: Dict[_job.Job, bc.Job] = {}
jobs_to_command = {}
commands = []
bash_flags = 'set -e' + ('x' if verbose else '')
activate_service_account = 'gcloud -q auth activate-service-account ' \
'--key-file=/gsa-key/key.json'
def copy_input(r):
if isinstance(r, resource.InputResourceFile):
return [(r._input_path, r._get_path(local_tmpdir))]
assert isinstance(r, resource.JobResourceFile)
return [(r._get_path(remote_tmpdir), r._get_path(local_tmpdir))]
def copy_internal_output(r):
assert isinstance(r, resource.JobResourceFile)
return [(r._get_path(local_tmpdir), r._get_path(remote_tmpdir))]
def copy_external_output(r):
if isinstance(r, resource.InputResourceFile):
return [(r._input_path, dest) for dest in r._output_paths]
assert isinstance(r, resource.JobResourceFile)
return [(r._get_path(local_tmpdir), dest) for dest in r._output_paths]
def symlink_input_resource_group(r):
symlinks = []
if isinstance(r, resource.ResourceGroup) and r._source is None:
for name, irf in r._resources.items():
src = irf._get_path(local_tmpdir)
dest = f'{r._get_path(local_tmpdir)}.{name}'
symlinks.append(f'ln -sf {shq(src)} {shq(dest)}')
return symlinks
write_external_inputs = [x for r in batch._input_resources for x in copy_external_output(r)]
if write_external_inputs:
def _cp(src, dst):
return f'gsutil -m cp -R {shq(src)} {shq(dst)}'
write_cmd = f'''
{bash_flags}
{activate_service_account}
{' && '.join([_cp(*files) for files in write_external_inputs])}
'''
if dry_run:
commands.append(write_cmd)
else:
j = bc_batch.create_job(image='gcr.io/google.com/cloudsdktool/cloud-sdk:310.0.0-alpine',
command=['/bin/bash', '-c', write_cmd],
attributes={'name': 'write_external_inputs'})
jobs_to_command[j] = write_cmd
n_jobs_submitted += 1
for job in batch._jobs:
inputs = [x for r in job._inputs for x in copy_input(r)]
outputs = [x for r in job._internal_outputs for x in copy_internal_output(r)]
if outputs:
used_remote_tmpdir = True
outputs += [x for r in job._external_outputs for x in copy_external_output(r)]
symlinks = [x for r in job._mentioned for x in symlink_input_resource_group(r)]
env_vars = {
**job._env,
**{r._uid: r._get_path(local_tmpdir) for r in job._mentioned}}
if job._image is None:
if verbose:
print(f"Using image '{default_image}' since no image was specified.")
make_local_tmpdir = f'mkdir -p {local_tmpdir}/{job._job_id}'
job_command = [cmd.strip() for cmd in job._command]
prepared_job_command = (f'{{\n{x}\n}}' for x in job_command)
cmd = f'''
{bash_flags}
{make_local_tmpdir}
{"; ".join(symlinks)}
{" && ".join(prepared_job_command)}
'''
if dry_run:
commands.append(cmd)
continue
parents = [job_to_client_job_mapping[j] for j in job._dependencies]
attributes = copy.deepcopy(job.attributes) if job.attributes else dict()
if job.name:
attributes['name'] = job.name
resources = {}
if job._cpu:
resources['cpu'] = job._cpu
if job._memory:
resources['memory'] = job._memory
if job._storage:
resources['storage'] = job._storage
image = job._image if job._image else default_image
if not image.startswith('gcr.io/'):
warnings.warn(f'Using an image {image} not in GCR. '
f'Jobs may fail due to Docker Hub rate limits.')
j = bc_batch.create_job(image=image,
command=[job._shell if job._shell else self._DEFAULT_SHELL, '-c', cmd],
parents=parents,
attributes=attributes,
resources=resources,
input_files=inputs if len(inputs) > 0 else None,
output_files=outputs if len(outputs) > 0 else None,
always_run=job._always_run,
timeout=job._timeout,
gcsfuse=job._gcsfuse if len(job._gcsfuse) > 0 else None,
env=env_vars,
requester_pays_project=batch.requester_pays_project)
n_jobs_submitted += 1
job_to_client_job_mapping[job] = j
jobs_to_command[j] = cmd
if dry_run:
print("\n\n".join(commands))
return None
if delete_scratch_on_exit and used_remote_tmpdir:
parents = list(jobs_to_command.keys())
rm_cmd = f'gsutil -m rm -r {remote_tmpdir}'
cmd = f'''
{bash_flags}
{activate_service_account}
{rm_cmd}
'''
j = bc_batch.create_job(
image='gcr.io/google.com/cloudsdktool/cloud-sdk:310.0.0-alpine',
command=['/bin/bash', '-c', cmd],
parents=parents,
attributes={'name': 'remove_tmpdir'},
always_run=True)
jobs_to_command[j] = cmd
n_jobs_submitted += 1
if verbose:
print(f'Built DAG with {n_jobs_submitted} jobs in {round(time.time() - build_dag_start, 3)} seconds.')
submit_batch_start = time.time()
bc_batch = bc_batch.submit(disable_progress_bar=disable_progress_bar)
jobs_to_command = {j.id: cmd for j, cmd in jobs_to_command.items()}
if verbose:
print(f'Submitted batch {bc_batch.id} with {n_jobs_submitted} jobs in {round(time.time() - submit_batch_start, 3)} seconds:')
for jid, cmd in jobs_to_command.items():
print(f'{jid}: {cmd}')
print('')
deploy_config = get_deploy_config()
url = deploy_config.url('batch', f'/batches/{bc_batch.id}')
print(f'Submitted batch {bc_batch.id}, see {url}')
if open:
webbrowser.open(url)
if wait:
print(f'Waiting for batch {bc_batch.id}...')
status = bc_batch.wait()
print(f'batch {bc_batch.id} complete: {status["state"]}')
return bc_batch
|
|
# -*- encoding: utf-8 -*
import json
from django.conf import settings
from django.contrib import messages
from django.contrib.auth import REDIRECT_FIELD_NAME
from django.contrib.auth.decorators import login_required
from django.core.exceptions import ValidationError
from django.core.mail import send_mail
from django.core.urlresolvers import reverse
from django.db import IntegrityError, models
from django.template.loader import render_to_string
from django.http import HttpResponse, HttpResponseRedirect
from django.utils.decorators import method_decorator
from django.utils.translation import ugettext as _
from vanilla import CreateView, DeleteView, DetailView, ListView, UpdateView
from djqscsv import render_to_csv_response
from .models import Event, Proposal, Vote, Activity
from .forms import EventForm, ProposalForm, ActivityForm, ActivityTimetableForm
class FormValidRedirectMixing(object):
def success_redirect(self, message):
messages.success(self.request, message)
return HttpResponseRedirect(self.get_success_url())
class BaseEventView(object):
model = Event
form_class = EventForm
lookup_field = 'slug'
class ListEvents(BaseEventView, ListView):
template_name = 'event/event_list.html'
queryset = Event.objects.published_ones()
def get_context_data(self, **kwargs):
context = super(ListEvents, self).get_context_data(**kwargs)
if self.request.user.is_authenticated():
event_list = Event.objects.filter(
models.Q(is_published=True) |
models.Q(author=self.request.user))
context.update(event_list=event_list)
return context
class CreateEvent(BaseEventView, CreateView, FormValidRedirectMixing):
template_name = 'event/event_form.html'
def form_valid(self, form):
self.object = form.save(commit=False)
self.object.author = self.request.user
self.object.save()
self.send_event_creation_email()
return self.success_redirect(_(u'Event created.'))
def send_event_creation_email(self):
event = self.object
context = {'event_title': event.title}
message = render_to_string('mailing/event_created.txt', context)
subject = _(u'Your event is ready to receive proposals')
send_mail(subject, message,
settings.NO_REPLY_EMAIL, [event.author.email])
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(CreateEvent, self).dispatch(*args, **kwargs)
class DetailEvent(BaseEventView, DetailView):
template_name = 'event/event_detail.html'
def get_context_data(self, **kwargs):
context = super(DetailEvent, self).get_context_data(**kwargs)
context['vote_rates'] = Vote.VOTE_RATES
event_proposals = self.object.proposals.cached_authors()
if self.object.user_can_see_proposals(self.request.user):
if not self.request.user.is_anonymous():
event_proposals = event_proposals.order_by_never_voted(
user_id=self.request.user.id)
elif not self.request.user.is_anonymous():
event_proposals = event_proposals.filter(author=self.request.user)
else:
event_proposals = event_proposals.none()
context.update(event_proposals=event_proposals)
return context
class UpdateEvent(BaseEventView, UpdateView, FormValidRedirectMixing):
template_name = 'event/event_form.html'
def form_valid(self, form):
self.object = form.save()
return self.success_redirect(_(u'Event updated.'))
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
event = self.get_object()
if (event.author != self.request.user and
not self.request.user.is_superuser):
messages.error(
self.request, _(u'You are not allowed to see this page.'))
return HttpResponseRedirect(
reverse('view_event', kwargs={'slug': event.slug}),
)
return super(UpdateEvent, self).dispatch(*args, **kwargs)
class ExportEvent(BaseEventView, DetailView):
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
event = self.get_object()
if (event.author != self.request.user and
not self.request.user.is_superuser):
messages.error(
self.request, _(u'You are not allowed to see this page.'))
return HttpResponseRedirect(reverse('list_events'))
return super(ExportEvent, self).dispatch(*args, **kwargs)
def get(self, request, *args, **kwargs):
event = self.get_object()
filename = "event_%s_export" % event.slug.replace('-', '_')
field_header_map = {
'author__username': _('Author'),
'author__email': _('Author E-Mail'),
'votes__rate__sum': _('Vote Rate'),
'votes__count': _('Votes Count'),
}
proposals = event.get_votes_to_export()
return render_to_csv_response(
proposals,
append_datestamp=True,
filename=filename,
field_header_map=field_header_map
)
class CreateEventGrade(BaseEventView, DetailView):
template_name = 'event/event_create_grade.html'
def get_context_data(self, **kwargs):
context = super(CreateEventGrade, self).get_context_data(**kwargs)
context.update(activity_form=ActivityForm())
context.update(activity_timetable_form=ActivityTimetableForm())
return context
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
self.object = self.get_object()
in_jury = self.object.jury.users.filter(
pk=self.request.user.pk).exists()
if (not in_jury and not self.request.user.is_superuser):
messages.error(
self.request, _(u'You are not allowed to see this page.'))
return HttpResponseRedirect(
reverse('view_event', kwargs={'slug': self.object.slug}),
)
return super(CreateEventGrade, self).dispatch(*args, **kwargs)
def get(self, request, *args, **kwargs):
self.object = self.get_object()
track = self.object.tracks.first()
# On the first time we generate a grade based on the Slots.
if not track.activities.exists():
top_not_approved_ones = self.object.get_not_approved_grade()
order = 0
for proposal in top_not_approved_ones[:self.object.slots]:
proposal.track = track
proposal.is_approved = True
proposal.track_order = order
proposal.save()
order += 1
return super(CreateEventGrade, self).get(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
self.object = self.get_object()
approved_activities_pks = self.request.POST.getlist(
'approved_activities')
track = self.object.tracks.first()
track.proposals.update(is_approved=False)
track.activities.update(track=None, track_order=None)
if not approved_activities_pks:
return HttpResponseRedirect(
reverse('create_event_grade',
kwargs={'slug': self.object.slug}),
)
order = 0
for activity_pk in approved_activities_pks:
activity = Activity.objects.get(pk=activity_pk)
activity.track = track
activity.track_order = order
activity.save()
if activity.activity_type == Activity.PROPOSAL:
activity.proposal.is_approved = True
activity.proposal.save()
order += 1
return HttpResponseRedirect(
reverse('create_event_grade',
kwargs={'slug': self.object.slug}),
)
class DetailEventGrade(BaseEventView, DetailView):
template_name = 'event/event_detail_grade.html'
class BaseProposalView(object):
model = Proposal
form_class = ProposalForm
lookup_field = 'slug'
class CreateProposal(BaseProposalView, CreateView, FormValidRedirectMixing):
template_name = 'proposal/proposal_form.html'
def get_context_data(self, **kwargs):
context = super(CreateProposal, self).get_context_data(**kwargs)
context['event'] = Event.objects.get(slug=self.kwargs['slug'])
return context
def get(self, request, *args, **kwargs):
data = self.get_context_data()
event = data.get('event')
if event.due_date_is_passed:
messages.error(
self.request,
_(u"This Event doesn't accept Proposals anymore."))
return HttpResponseRedirect(
reverse('view_event', kwargs={'slug': event.slug}),
)
return super(CreateProposal, self).get(request, *args, **kwargs)
def form_valid(self, form):
self.object = form.save(commit=False)
self.object.author = self.request.user
self.object.event = Event.objects.get(slug=self.kwargs['slug'])
self.object.save()
self.send_new_proposal_to_jury_email()
self.send_proposal_creation_email()
return self.success_redirect(_(u'Proposal created.'))
def send_new_proposal_to_jury_email(self):
proposal = self.object
context = {
'event_title': proposal.event.title,
'proposal_title': proposal.title
}
message = render_to_string('mailing/jury_new_proposal.txt', context)
subject = _(u'Your event has new proposals')
recipients = proposal.event.jury.users.values_list('email', flat=True)
send_mail(subject, message, settings.NO_REPLY_EMAIL, recipients)
def send_proposal_creation_email(self):
proposal = self.object
context = {
'event_title': proposal.event.title,
'proposal_title': proposal.title
}
message = render_to_string(
'mailing/author_proposal_created.txt', context)
subject = _(u'Your proposal was submitted')
recipients = [proposal.author.email]
send_mail(subject, message, settings.NO_REPLY_EMAIL, recipients)
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(CreateProposal, self).dispatch(*args, **kwargs)
class ListMyProposals(BaseProposalView, ListView):
template_name = 'proposal/my_proposals.html'
def get_queryset(self):
return Proposal.objects.filter(author_id=self.request.user.id)
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(ListMyProposals, self).dispatch(*args, **kwargs)
class UpdateProposal(BaseProposalView, UpdateView, FormValidRedirectMixing):
template_name = 'proposal/proposal_form.html'
def get_context_data(self, **kwargs):
context = super(UpdateProposal, self).get_context_data(**kwargs)
context['event'] = Event.objects.get(slug=self.kwargs['event_slug'])
return context
def form_valid(self, form):
self.object = form.save()
return self.success_redirect(_(u'Proposal updated.'))
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
return super(UpdateProposal, self).dispatch(*args, **kwargs)
class DeleteProposal(BaseProposalView, DeleteView):
template_name = 'proposal/proposal_confirm_delete.html'
def post(self, request, *args, **kwargs):
proposal = self.get_object()
proposal.delete()
messages.success(self.request, _(u'Proposal deleted.'))
return HttpResponseRedirect(proposal.event.get_absolute_url())
@method_decorator(login_required)
def dispatch(self, *args, **kwargs):
proposal = self.get_object()
if (proposal.author != self.request.user and
not self.request.user.is_superuser):
messages.error(
self.request, _(u'You are not allowed to see this page.'))
return HttpResponseRedirect(proposal.event.get_absolute_url())
return super(DeleteProposal, self).dispatch(*args, **kwargs)
class RateProposal(BaseProposalView, UpdateView):
def post(self, request, *args, **kwargs):
self.object = self.get_object()
rate = kwargs.get('rate')
response_content = {}
response_status = 200
try:
self.object.rate(self.request.user, rate)
except IndexError:
response_content['message'] = _(u'Rate Index not found.')
response_status = 400
except (IntegrityError, ValidationError), e:
response_content['message'] = e.message
response_status = 400
else:
response_content['message'] = _(u'Proposal rated.')
return HttpResponse(
json.dumps(response_content),
status=response_status,
content_type='application/json')
def get(self, request, *args, **kwargs):
self.object = self.get_object()
rate = kwargs.get('rate')
try:
self.object.rate(self.request.user, rate)
except IndexError:
messages.error(self.request, _(u'Rate Index not found.'))
except (IntegrityError, ValidationError), e:
messages.error(self.request, e.message)
else:
messages.success(self.request, _(u'Proposal rated.'))
return HttpResponseRedirect(self.get_success_url())
def dispatch(self, *args, **kwargs):
proposal = self.get_object()
view_event_url = reverse(
'view_event', kwargs={'slug': proposal.event.slug})
if not self.request.user.is_authenticated():
message = _(u'You need to be logged in to '
u'continue to the next step.')
if self.request.method == 'GET':
messages.error(self.request, message)
return HttpResponseRedirect(view_event_url)
response = {}
response['message'] = message
response['redirectUrl'] = u'{}?{}={}'.format(
settings.LOGIN_URL,
REDIRECT_FIELD_NAME,
self.request.META.get('PATH_INFO')
)
return HttpResponse(
json.dumps(response),
status=401,
content_type='application/json')
elif not proposal.user_can_vote(self.request.user):
message = _(u'You are not allowed to see this page.')
if self.request.method == 'GET':
messages.error(self.request, message)
return HttpResponseRedirect(view_event_url)
response = {}
response['message'] = message
response['redirectUrl'] = ''
return HttpResponse(
json.dumps(response),
status=401,
content_type='application/json'
)
return super(RateProposal, self).dispatch(*args, **kwargs)
class ApproveProposal(BaseProposalView, UpdateView):
def post(self, request, *args, **kwargs):
self.object = self.get_object()
response_content = {}
response_status = 200
try:
self.object.approve()
except (IntegrityError, ValidationError), e:
response_content['message'] = e.message
response_status = 400
else:
response_content['message'] = _(u'Proposal approved.')
return HttpResponse(
json.dumps(response_content),
status=response_status,
content_type='application/json')
def get(self, request, *args, **kwargs):
self.object = self.get_object()
try:
self.object.approve()
except (IntegrityError, ValidationError), e:
messages.error(self.request, e.message)
else:
messages.success(self.request, _(u'Proposal approved.'))
return HttpResponseRedirect(self.get_success_url())
def dispatch(self, *args, **kwargs):
proposal = self.get_object()
view_event_url = reverse(
'view_event', kwargs={'slug': proposal.event.slug})
if not self.request.user.is_authenticated():
message = _(u'You need to be logged in to '
u'continue to the next step.')
if self.request.method == 'GET':
messages.error(self.request, message)
return HttpResponseRedirect(view_event_url)
response = {}
response['message'] = message
response['redirectUrl'] = u'{}?{}={}'.format(
settings.LOGIN_URL,
REDIRECT_FIELD_NAME,
self.request.META.get('PATH_INFO')
)
return HttpResponse(
json.dumps(response),
status=401,
content_type='application/json')
elif not proposal.user_can_approve(self.request.user):
message = _(u'You are not allowed to see this page.')
if self.request.method == 'GET':
messages.error(self.request, message)
return HttpResponseRedirect(view_event_url)
response = {}
response['message'] = message
response['redirectUrl'] = ''
return HttpResponse(
json.dumps(response),
status=401,
content_type='application/json'
)
return super(ApproveProposal, self).dispatch(*args, **kwargs)
class DisapproveProposal(BaseProposalView, UpdateView):
def post(self, request, *args, **kwargs):
self.object = self.get_object()
response_content = {}
response_status = 200
try:
self.object.disapprove()
except (IntegrityError, ValidationError), e:
response_content['message'] = e.message
response_status = 400
else:
response_content['message'] = _(u'Proposal disapproved.')
return HttpResponse(
json.dumps(response_content),
status=response_status,
content_type='application/json')
def get(self, request, *args, **kwargs):
self.object = self.get_object()
try:
self.object.disapprove()
except (IntegrityError, ValidationError), e:
messages.error(self.request, e.message)
else:
messages.success(self.request, _(u'Proposal disapproved.'))
return HttpResponseRedirect(self.get_success_url())
def dispatch(self, *args, **kwargs):
proposal = self.get_object()
view_event_url = reverse(
'view_event', kwargs={'slug': proposal.event.slug})
if not self.request.user.is_authenticated():
message = _(u'You need to be logged in to '
u'continue to the next step.')
if self.request.method == 'GET':
messages.error(self.request, message)
return HttpResponseRedirect(view_event_url)
response = {}
response['message'] = message
response['redirectUrl'] = u'{}?{}={}'.format(
settings.LOGIN_URL,
REDIRECT_FIELD_NAME,
self.request.META.get('PATH_INFO')
)
return HttpResponse(
json.dumps(response),
status=401,
content_type='application/json')
elif not proposal.user_can_approve(self.request.user):
message = _(u'You are not allowed to see this page.')
if self.request.method == 'GET':
messages.error(self.request, message)
return HttpResponseRedirect(view_event_url)
response = {}
response['message'] = message
response['redirectUrl'] = ''
return HttpResponse(
json.dumps(response),
status=401,
content_type='application/json'
)
return super(DisapproveProposal, self).dispatch(*args, **kwargs)
|
|
#!/usr/bin/env python
import os
import shutil
import fnmatch
import json
import re
from optparse import OptionParser
import logging
section_line = re.compile('\[(?P<section>.*)\]')
import_line = re.compile('@import url\((?P<filename>.*)\)')
property_line = re.compile('(?P<id>.*)\s*[:=]\s*(?P<value>.*)')
def _get_locales(filename):
locales_list = json.load(open(filename), encoding="utf-8")
return locales_list.keys()
def find_files(dirs, pattern):
matches = []
for dir in dirs:
for current, dirnames, filenames in os.walk(dir):
for filename in fnmatch.filter(filenames, pattern):
matches.append(os.path.join(current, filename))
return matches
def parse_manifest_properties(filename):
with open(filename) as f:
data = f.readlines()
strings = {
"default": {},
"entry_points": {},
}
for line in data:
m = property_line.search(line)
if not m or line.strip().startswith('#'):
continue
value = m.group('value').strip()
if '.' in m.group('id'):
entry_point, key = m.group('id').split('.',1)
if entry_point not in strings["entry_points"]:
strings["entry_points"][entry_point] = {}
strings["entry_points"][entry_point][key.strip()] = value
else:
key = m.group('id')
strings["default"][key.strip()] = value
return strings
def parse_ini(filename):
log = logging.getLogger(__name__)
with open(filename) as f:
data = f.readlines()
section = 'default'
imports = { section: [] }
for line in data:
if line.strip() == "" or line.startswith('!') or line.startswith('#'):
continue
elif line.strip().startswith('['): # Section header
section = section_line.search(line).group('section')
imports[section] = []
elif '@import' in line: # Import lines
property_file = import_line.search(line).group('filename')
imports[section].append(property_file)
else:
log.warn('parse_ini - found a line with contents '
'unaccounted for "%s"', line.strip())
return imports
def serialize_ini(outfile, imports):
def _section(locale):
return "[%s]" % locale
def _import(path):
return "@import url(%s)" % path
output = []
for locale, paths in imports.items():
if locale == "default":
for path in paths:
output.insert(0, _import(path))
continue
output.append(_section(locale))
for path in paths:
output.append(_import(path))
with open(outfile, 'w') as o:
o.write("\n".join(output))
def add_locale_imports(locales, ini_file):
"""Recreate an ini file with all locales sections"""
log = logging.getLogger(__name__)
imports = {
"default": parse_ini(ini_file)["default"]
}
for locale in locales:
log.info("adding %s to %s" % (locale, ini_file))
imports[locale] = []
for path in imports["default"]:
locale_path = path.replace("en-US", locale)
imports[locale].append(locale_path)
log.debug("added %s" % locale_path)
serialize_ini(ini_file, imports)
log.info("updated %s saved" % ini_file)
def copy_properties(source, locales, ini_file):
log = logging.getLogger(__name__)
ini_dirname = os.path.dirname(ini_file)
imports = parse_ini(ini_file)
for locale in locales:
log.info("copying %s files as per %s" % (locale, ini_file))
for path in imports[locale]:
target_path = os.path.join(ini_dirname, path)
# apps/browser/locales/browser.fr.properties becomes
# apps/browser/browser.properties
source_path = target_path.replace(os.sep + 'locales', '') \
.replace('.%s' % locale, '')
source_path = os.path.join(source, locale, source_path)
if not os.path.exists(source_path):
log.warn('%s does not exist' % source_path)
continue
shutil.copy(source_path, target_path)
log.debug("copied %s to %s" % (source_path, target_path))
def add_locale_manifest(source, locales, manifest_file):
log = logging.getLogger(__name__)
with open(manifest_file) as f:
manifest = json.load(f, encoding="utf-8")
for locale in locales:
log.info("adding %s to %s" % (locale, manifest_file))
manifest_properties = os.path.join(source, locale,
os.path.dirname(manifest_file),
'manifest.properties')
log.debug("getting strings from %s" % manifest_properties)
if not os.path.exists(manifest_properties):
log.warn("%s does not exist" % manifest_properties)
continue
strings = parse_manifest_properties(manifest_properties)
if "entry_points" in manifest:
for name, ep in manifest["entry_points"].items():
if "locales" not in ep:
continue
log.debug("adding to entry_points.%s.locales" % name)
if name not in strings["entry_points"]:
log.warn("%s.* strings are missing from %s" %
(name, manifest_properties))
continue
ep["locales"][locale] = {}
ep["locales"][locale].update(strings["entry_points"][name])
if "locales" in manifest:
log.debug("adding to locales")
manifest["locales"][locale] = {}
manifest["locales"][locale].update(strings["default"])
f.close()
with open(manifest_file, 'w') as o:
json.dump(manifest, o, encoding="utf-8", indent=2)
log.debug("updated %s saved" % manifest_file)
def setup_logging(volume=1, console=True, filename=None):
logger = logging.getLogger(__name__)
levels = [logging.DEBUG,
logging.INFO,
logging.WARNING,
logging.ERROR,
logging.CRITICAL][::1]
if volume > len(levels):
volume = len(levels) - 1
elif volume < 0:
volume = 0
logger.setLevel(levels[len(levels)-volume])
if console:
console_handler = logging.StreamHandler()
console_formatter = logging.Formatter('%(levelname)s: %(message)s')
console_handler.setFormatter(console_formatter)
logger.addHandler(console_handler)
if filename:
file_handler = logging.FileHandler(filename)
file_formatter = logging.Formatter('%(asctime) - %(levelname)s: %(message)s')
file_handler.addFormatter(file_formatter)
logger.addHandler(file_handler)
def main():
parser = OptionParser("%prog [OPTIONS] [LOCALES...] - create multilocale Gaia")
parser.add_option("-v", "--verbose",
action="count", dest="verbose", default=2,
help="use more to make louder")
parser.add_option("-i", "--ini",
action="store_true", dest="onlyini", default=False,
help=("just edit the ini files and exit; "
"use this with DEBUG=1 make profile"))
parser.add_option("--target",
action="append", dest="target",
help=("path to directory to make changes in "
"(more than one is fine)"))
parser.add_option("--source",
action="store", dest="source",
help="path to the l10n basedir")
parser.add_option("--config",
action="store", dest="config_file",
help=("path to the languages.json config file; "
"will be used instead of LOCALES"))
options, locales = parser.parse_args()
setup_logging(volume=options.verbose)
log = logging.getLogger(__name__)
if options.config_file is not None:
locales = _get_locales(options.config_file)
log.debug("config file specified; ignoring any locales passed as args")
elif len(locales) == 0:
parser.error("You need to specify --config or pass the list of locales")
if options.target is None:
parser.error("You need to specify at least one --target")
if options.source is None and not options.onlyini:
parser.error("You need to specify --source (unless you meant --ini)")
if "en-US" in locales:
locales.remove("en-US")
ini_files = find_files(options.target, "*.ini")
# 1. link properties files from the inis
for ini_file in ini_files:
log.info("########## adding locale import rules to %s" % ini_file)
add_locale_imports(locales, ini_file)
if options.onlyini:
parser.exit(1)
# 2. copy properties files as per the inis
for ini_file in ini_files:
log.info("########## copying locale files as per %s" % ini_file)
copy_properties(options.source, locales, ini_file)
# 3. edit manifests
manifest_files = find_files(options.target, 'manifest.webapp')
for manifest_file in manifest_files:
log.info("########## adding localized names to %s" % manifest_file)
add_locale_manifest(options.source, locales, manifest_file)
if __name__ == "__main__":
main()
|
|
# -*- coding: utf-8 -*-
"""
Created on Wed Nov 25 13:58:50 2015
@author: ktritz
"""
from __future__ import print_function
from builtins import str
from builtins import zip
from builtins import object
import os
import inspect
import types
import numpy as np
import xml.etree.ElementTree as ET
# import kernprof
from . import parse
from .globals import FDP_DIR
from .node import Node
from .signal import Signal
_tree_dict = {}
def initContainerClass(cls, module_tree, **kwargs):
cls._name = module_tree.get('name')
if cls not in cls._instances:
cls._instances[cls] = {}
for read_only in ['root', 'container', 'classparent']:
try:
setattr(cls, '_' + read_only, kwargs[read_only])
except:
pass
for item in ['mdstree', 'mdspath', 'units']:
getitem = module_tree.get(item)
if getitem is not None:
setattr(cls, item, getitem)
cls._base_items = set(cls.__dict__.keys())
parse.parse_submachine(cls)
class Container(object):
"""
Container class
"""
_instances = {}
_classes = {}
def __init__(self, module_tree, top=False, **kwargs):
cls = self.__class__
self._signals = {}
self._axes = {}
self._containers = {}
self._dynamic_containers = {}
self._tags = []
self._title = module_tree.get('title')
self._desc = module_tree.get('desc')
self._parent = kwargs.get('parent', None)
# print('Init container class: {}'.format(self._name))
try:
self.shot = kwargs['shot']
self.mdstree = kwargs['mdstree']
except:
pass
if self.shot is not None:
try:
cls._instances[cls][self.shot].append(self)
except:
cls._instances[cls][self.shot] = [self]
if top:
self._set_dynamic_containers()
for node in module_tree.findall('node'):
branch_str = self._get_branchstr()
NodeClassName = ''.join(['Node', branch_str])
if NodeClassName not in cls._classes:
NodeClass = type(NodeClassName, (Node, cls), {})
cls._classes[NodeClassName] = NodeClass
else:
NodeClass = cls._classes[NodeClassName]
setattr(self, node.get('name'), NodeClass(node, parent=self))
for element in module_tree.findall('defaults'):
method_defaults, defaults_dict = parse.parse_defaults(element)
if hasattr(self._parent, method_defaults):
defaults_dict.update(getattr(self._parent, method_defaults))
setattr(self, method_defaults, defaults_dict)
for element in module_tree.findall('axis'):
signal_list = parse.parse_signal(self, element)
branch_str = self._get_branchstr()
for signal_dict in signal_list:
SignalClassName = ''.join(['Axis', branch_str])
if SignalClassName in cls._classes:
SignalClass = cls._classes[SignalClassName]
else:
SignalClass = type(SignalClassName, (Signal, cls), {})
parse.parse_submachine(SignalClass)
cls._classes[SignalClassName] = SignalClass
SignalObj = SignalClass(**signal_dict)
refs = parse.parse_refs(self, element, SignalObj._transpose)
if not refs:
refs = SignalObj.axes
for axis, ref in zip(SignalObj.axes, refs):
setattr(SignalObj, axis, getattr(self, '_' + ref))
setattr(self, ''.join(['_', signal_dict['_name']]), SignalObj)
for branch in module_tree.findall('container'):
name = branch.get('name')
branch_str = self._get_branchstr()
ContainerClassName = ''.join(['Container', branch_str,
name.capitalize()])
if ContainerClassName not in cls._classes:
ContainerClass = type(ContainerClassName, (cls, Container), {})
initContainerClass(ContainerClass, branch, classparent=cls)
cls._classes[ContainerClassName] = ContainerClass
else:
ContainerClass = cls._classes[ContainerClassName]
ContainerObj = ContainerClass(branch, parent=self)
setattr(self, name, ContainerObj)
self._containers[name] = ContainerObj
for element in module_tree.findall('signal'):
signal_list = parse.parse_signal(self, element)
branch_str = self._get_branchstr()
for signal_dict in signal_list:
SignalClassName = ''.join(['Signal', branch_str])
if SignalClassName in cls._classes:
SignalClass = cls._classes[SignalClassName]
else:
SignalClass = type(SignalClassName, (Signal, cls), {})
parse.parse_submachine(SignalClass)
cls._classes[SignalClassName] = SignalClass
SignalObj = SignalClass(**signal_dict)
refs = parse.parse_refs(self, element, SignalObj._transpose)
if not refs:
refs = SignalObj.axes
for axis, ref in zip(SignalObj.axes, refs):
setattr(SignalObj, axis, getattr(self, '_' + ref))
for default in element.findall('defaults'):
method_defaults, defaults_dict = parse.parse_defaults(
default)
if hasattr(self, method_defaults):
defaults_dict.update(getattr(self, method_defaults))
setattr(SignalObj, method_defaults, defaults_dict)
setattr(self, signal_dict['_name'], SignalObj)
self._signals[signal_dict['_name']] = SignalObj
if top and hasattr(self, '_preprocess'):
self._preprocess()
def __getattr__(self, attribute):
try:
if self._dynamic_containers[attribute] is None:
branch_path = '.'.join([self._get_branch(), attribute])
self._dynamic_containers[attribute] = \
containerClassFactory(branch_path,
root=self._root,
shot=self.shot,
parent=self)
return self._dynamic_containers[attribute]
except KeyError:
pass
if not hasattr(self, '_parent') or self._parent is None:
raise AttributeError("Attribute '{}' not found".format(attribute))
if hasattr(self._parent, '_signals') and \
attribute in self._parent._signals:
raise AttributeError("Attribute '{}' not found".format(attribute))
attr = getattr(self._parent, attribute)
if 'Shot' in str(type(attr)):
raise AttributeError("Attribute '{}' not found".format(attribute))
if Container in attr.__class__.__mro__ and attribute[0] != '_':
raise AttributeError("Attribute '{}' not found".format(attribute))
if inspect.ismethod(attr):
return types.MethodType(attr.__func__, self)
else:
return attr
def _set_dynamic_containers(self):
if not self._dynamic_containers:
container_dir = self._get_path()
if not os.path.isdir(container_dir):
return
files = os.listdir(container_dir)
self._dynamic_containers = {}
for container in files:
subcontainer_dir = os.path.join(container_dir, container)
if container[0] != '_' and os.path.isdir(subcontainer_dir):
self._dynamic_containers[container] = None
# self._dynamic_containers = {container: None for container in
# files if os.path.isdir(
# os.path.join(container_dir, container)) and
# container[0] is not '_'}
@classmethod
def _get_path(cls):
branch = cls._get_branch().split('.')
path = os.path.join(FDP_DIR, 'diagnostics', cls._root._name)
for step in branch:
newpath = os.path.join(path, step)
if not os.path.isdir(newpath):
break
path = newpath
return path
def __dir__(self):
items = list(self.__dict__.keys())
items.extend(list(self.__class__.__dict__.keys()))
if Signal not in self.__class__.mro():
items.extend(list(self._dynamic_containers.keys()))
return [item for item in set(items).difference(self._base_items)
if item[0] != '_']
def __iter__(self):
if not len(self._signals):
items = sorted(list(self._containers.values()),
key=lambda obj: obj._name.lower())
# items.extend(self._dynamic_containers.values())
else:
items = sorted(list(self._signals.values()),
key=lambda obj: obj._name.lower())
return iter(items)
@classmethod
def _get_branch(cls):
if 'Shot' in str(cls):
return None
branch = cls._name
parent = cls._classparent
while 'Shot' not in str(parent) and 'Shot' not in str(parent.__class__):
branch = '.'.join([parent._name, branch])
parent = parent._classparent
return branch
@classmethod
def _get_branchstr(cls):
branch = cls._get_branch()
return ''.join([sub.capitalize() for sub in branch.split('.')])
@classmethod
def _is_container(cls):
return 'Container' in str(cls)
@classmethod
def _is_signal(cls):
return 'Signal' in str(cls)
@classmethod
def _is_axis(cls):
return 'Axis' in str(cls)
@classmethod
def _is_type(cls, obj_type):
method_name = '_is_{}'.format(obj_type.lower())
try:
return getattr(cls, method_name)()
except:
return False
def _contains(self, string):
word_list = [s for s in [self._name, self._title] if s]
word_list.extend(self._tags)
return np.any([string.lower() in word.lower() for word in word_list])
def containerClassFactory(module_branch, root=None, shot=None, parent=None):
"""
Factory method
"""
global _tree_dict
module_branch = module_branch.lower()
module_list = module_branch.split('.')
module = module_list[-1]
branch_str = ''.join([word.capitalize() for word in module_list])
if module_branch not in _tree_dict:
module_path = os.path.join(FDP_DIR,
'diagnostics',
root._name,
*module_list)
xml_filename = module + '.xml'
parse_tree = ET.parse(os.path.join(module_path, xml_filename))
_tree_dict[module_branch] = parse_tree.getroot()
ContainerClassName = 'Container' + branch_str
if ContainerClassName in Container._classes:
ContainerClass = Container._classes[ContainerClassName]
else:
ContainerClass = type(ContainerClassName, (Container,), {})
initContainerClass(ContainerClass,
_tree_dict[module_branch],
root=root,
container=module,
classparent=parent.__class__)
Container._classes[ContainerClassName] = ContainerClass
return ContainerClass(_tree_dict[module_branch],
shot=shot,
parent=parent,
top=True)
|
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
def camel(offset, *args):
parts = []
for a in args:
parts.extend(a.split("-"))
return "".join(parts[:offset] + [p[0].upper() + p[1:] for p in parts[offset:]])
def dromedary(s):
return s[0].lower() + s[1:]
def scream(*args):
return "_".join([a.replace("-", "_").upper() for a in args])
def num(x, default=None):
if x is not None and x != "":
return int(x, 0)
else:
return default
def klass(nd):
parent = nd.parent
while parent is not None:
if hasattr(parent, "name") and parent.name == "class":
return parent
parent = parent.parent
untyped = -1
def code(nd):
global untyped
cd = num(nd["@code"])
if cd is None:
cd = untyped
untyped -= 1
return cd
cls = klass(nd)
if cls:
cd |= (num(cls["@code"]) << 8)
return cd
def root(nd):
if nd.parent is None:
return nd
else:
return root(nd.parent)
def qname(nd):
name = nd["@name"]
cls = klass(nd)
if cls != None:
return "%s.%s" % (cls["@name"], name)
else:
return name
RESOLVED = {}
def resolve(node, name):
key = (node, name)
if RESOLVED.has_key(key):
return RESOLVED[key]
else:
spec = root(node)
cls = klass(node)
if cls:
for nd in cls.query["#tag"]:
if nd["@name"] == name:
RESOLVED[key] = nd
return nd
for nd in spec.query["amqp/#tag"] + spec.query["amqp/class/#tag"]:
if name == qname(nd):
RESOLVED[key] = nd
return nd
raise Exception("unresolved name: %s" % name)
def resolve_type(nd):
if hasattr(nd, "_resolved_type"):
return nd._resolved_type
else:
name = nd["@type"]
type = resolve(nd, name)
if type.name == "domain" and not type["enum"]:
type = resolve_type(type)
nd._resolved_type = type
return type
TYPES = {
"bit": "boolean",
"uint8": "short",
"uint16": "int",
"uint32": "long",
"uint64": "long",
"datetime": "long",
"uuid": "UUID",
"sequence-no": "int",
"sequence-set": "RangeSet", # XXX
"byte-ranges": "RangeSet", # XXX
"str8": "String",
"str16": "String",
"vbin8": "byte[]",
"vbin16": "byte[]",
"vbin32": "byte[]",
"struct32": "Struct",
"map": "Map<String,Object>",
"array": "List<Object>"
}
def cname(nd, field="@name"):
cls = klass(nd)
if cls:
if (nd.name in ("struct", "result") and
cls["@name"] != "session" and
nd[field] != "header"):
return camel(0, nd[field])
else:
return camel(0, cls["@name"], nd[field])
else:
return camel(0, nd[field])
def jtype(nd):
if nd.name == "struct" or nd["enum"]:
return cname(nd)
else:
return TYPES[nd["@name"]]
REFS = {
"boolean": "Boolean",
"byte": "Byte",
"short": "Short",
"int": "Integer",
"long": "Long",
"float": "Float",
"double": "Double",
"char": "Character"
}
def jref(jt):
return REFS.get(jt, jt)
def jclass(jt):
idx = jt.find('<')
if idx > 0:
return jt[:idx]
else:
return jt
DEFAULTS = {
"long": 0,
"int": 0,
"short": 0,
"byte": 0,
"char": 0,
"boolean": "false"
}
class Field:
def __init__(self, index, nd):
self.index = index
self.name = camel(1, nd["@name"])
self.type_node = resolve_type(nd)
if self.type_node.name == "domain":
self.prim_type = resolve_type(self.type_node)
else:
self.prim_type = self.type_node
self.variable_width = num(self.prim_type["@variable-width"], 0)
self.fixed_width = num(self.prim_type["@fixed-width"], 0)
self.empty = self.variable_width == 0 and self.fixed_width == 0 and self.prim_type.name != "struct"
tname = cname(self.type_node)
if self.type_node.name == "struct":
self.read = "(%s) dec.readStruct(%s.TYPE)" % (tname, tname)
self.write = "enc.writeStruct(%s.TYPE, check(struct).%s)" % (tname, self.name)
self.coder = "Struct"
elif self.type_node.name == "domain":
self.coder = camel(0, self.prim_type["@name"])
self.read = "%s.get(dec.read%s())" % (tname, self.coder)
self.write = "enc.write%s(check(struct).%s.getValue())" % (self.coder, self.name)
else:
self.coder = camel(0, self.type_node["@name"])
self.read = "dec.read%s()" % self.coder
self.write = "enc.write%s(check(struct).%s)" % (self.coder, self.name)
self.type = jtype(self.type_node)
self.ref_type = jref(self.type)
self.default = DEFAULTS.get(self.type, "null")
self.has = camel(1, "has", self.name)
self.get = camel(1, "get", self.name)
self.set = camel(1, "set", self.name)
self.clear = camel(1, "clear", self.name)
if self.type == "boolean":
self.option = scream(nd["@name"])
else:
self.option = None
def flag_mask(self, pack):
flag = pack * 8 - 8 - (self.index/8)*8 + (self.index % 8)
return 1 << flag
def get_fields(nd):
fields = []
index = 0
for f in nd.query["field"]:
fields.append(Field(index, f))
index += 1
return fields
def get_parameters(type, fields):
params = []
options = False
for f in fields:
if f.option:
options = True
else:
params.append("%s %s" % (f.type, f.name))
if type["segments"]:
params.append("Header header")
params.append("ByteBuffer body")
if options or type.name in ("control", "command"):
params.append("Option ... _options")
return params
def get_arguments(type, fields):
args = []
options = False
for f in fields:
if f.option:
options = True
else:
args.append(f.name)
if type["segments"]:
args.append("header")
args.append("body")
if options or type.name in ("control", "command"):
args.append("_options")
return args
def get_options(fields):
return [f for f in fields if f.option]
|
|
"""
Submlime Text Package File Search.
Licensed under MIT
Copyright (c) 2012 Isaac Muse <[email protected]>
"""
import sublime
import re
from os import walk, listdir
from os.path import basename, dirname, isdir, join, normpath, splitext, exists
from fnmatch import fnmatch
import zipfile
__all__ = (
"sublime_package_paths",
"scan_for_packages",
"packagename",
"get_packages",
"get_packages_location",
"get_package_contents",
"PackageSearch"
)
EXCLUDE_PATTERN = re.compile(r"(?:/|^)(?:[^/]*\.(?:pyc|pyo)|\.git|\.svn|\.hg|\.DS_Store)(?=$|/)")
def sublime_package_paths():
"""Get all the locations where plugins live."""
return [
sublime.installed_packages_path(),
join(dirname(sublime.executable_path()), "Packages"),
sublime.packages_path()
]
def scan_for_packages(file_path, archives=False):
"""Look for zipped and unzipped plugins."""
if archives:
plugins = [join(file_path, item) for item in listdir(file_path) if fnmatch(item, "*.sublime-package")]
else:
plugins = [join(file_path, item) for item in listdir(file_path) if isdir(join(file_path, item))]
return plugins
def packagename(pth, normalize=False):
"""Get the package name from the path."""
if isdir(pth):
name = basename(pth)
else:
name = splitext(basename(pth))[0]
return name.lower() if sublime.platform() == "windows" and normalize else name
def get_packages_location():
"""Get all packages. Optionally disable resolving override packages."""
installed_pth, default_pth, user_pth = sublime_package_paths()
installed_pkgs = scan_for_packages(installed_pth, archives=True)
default_pkgs = scan_for_packages(default_pth, archives=True)
user_pkgs = scan_for_packages(user_pth)
return default_pkgs, installed_pkgs, user_pkgs
def get_folder_resources(folder_pkg, pkg_name, content_folders, content_files):
"""Get resources in folder."""
if exists(folder_pkg):
for base, dirs, files in walk(folder_pkg):
file_objs = []
for d in dirs[:]:
if EXCLUDE_PATTERN.search(d) is not None:
dirs.remove(d)
for f in files:
if EXCLUDE_PATTERN.search(f) is None:
file_name = join(base, f).replace(folder_pkg, "Packages/%s" % pkg_name, 1).replace("\\", "/")
file_objs.append(file_name)
content_files.append(file_name)
if len(file_objs) == 0 and len(dirs) == 0:
content_folders.append(base.replace(folder_pkg, "Packages/%s" % pkg_name, 1).replace("\\", "/") + "/")
def in_list(x, l):
"""Find if x (string) is in l (list)."""
found = False
if sublime.platform() == "windows":
for item in l:
if item.lower() == x.lower():
found = True
break
else:
found = x in l
return found
def get_zip_resources(zip_pkg, pkg_name, content_folders, content_files):
"""Get resources in archive that are not already in the lists."""
if exists(zip_pkg):
with zipfile.ZipFile(zip_pkg, 'r') as z:
for item in z.infolist():
file_name = item.filename
if EXCLUDE_PATTERN.search(file_name) is None:
package_name = "Packages/%s/%s" % (pkg_name, file_name)
if package_name.endswith('/'):
if not in_list(package_name, content_folders):
content_folders.append(package_name)
elif not package_name.endswith('/'):
if not in_list(package_name, content_files):
content_files.append(package_name)
def get_package_contents(pkg):
"""Get contents of package."""
m = re.match(r"^Packages/([^/]*)/?$", pkg)
assert(m is not None)
pkg = m.group(1)
installed_pth, default_pth, user_pth = sublime_package_paths()
content_files = []
content_folders = []
get_folder_resources(join(user_pth, pkg), pkg, content_folders, content_files)
get_zip_resources(join(installed_pth, "%s.sublime-package" % pkg), pkg, content_folders, content_files)
get_zip_resources(join(default_pth, "%s.sublime-package" % pkg), pkg, content_folders, content_files)
return content_folders + content_files
def get_packages():
"""Get the package names."""
installed_pth, default_pth, user_pth = sublime_package_paths()
installed_pkgs = scan_for_packages(installed_pth, archives=True)
default_pkgs = scan_for_packages(default_pth, archives=True)
user_pkgs = scan_for_packages(user_pth)
pkgs = []
for pkg_type in [user_pkgs, installed_pkgs, default_pkgs]:
for pkg in pkg_type:
name = packagename(pkg)
if not in_list(name, pkgs):
pkgs.append(name)
pkgs.sort()
return pkgs
class PackageSearch(object):
"""Search packages."""
def pre_process(self, **kwargs):
"""Preprocess event."""
return kwargs
def on_select(self, value, settings):
"""On select event."""
def process_file(self, value, settings):
"""Handle processing the file."""
################
# Qualify Files
################
def find_files(self, files, file_path, pattern, settings, regex):
"""Find the file that matches the pattern."""
for f in files:
if regex:
if re.match(pattern, f[0], re.IGNORECASE) is not None:
settings.append([f[0].replace(file_path, "").lstrip("\\").lstrip("/"), f[1]])
else:
if fnmatch(f[0], pattern):
settings.append([f[0].replace(file_path, "").lstrip("\\").lstrip("/"), f[1]])
################
# Zipped
################
def walk_zip(self, settings, plugin, pattern, regex):
"""Walk the archived files within the plugin."""
with zipfile.ZipFile(plugin[0], 'r') as z:
zipped = [(join(basename(plugin[0]), normpath(fn)), plugin[1]) for fn in sorted(z.namelist())]
self.find_files(zipped, "", pattern, settings, regex)
def get_zip_packages(self, settings, file_path, package_type, pattern, regex=False):
"""Get all the archived plugins in the plugin folder."""
plugins = [
(join(file_path, item), package_type) for item in listdir(file_path) if fnmatch(item, "*.sublime-package")
]
for plugin in plugins:
self.walk_zip(settings, plugin, pattern.strip(), regex)
def search_zipped_files(self, settings, pattern, regex):
"""Search the plugin folders for archived plugins."""
st_packages = sublime_package_paths()
self.get_zip_packages(settings, st_packages[0], "Installed", pattern, regex)
self.get_zip_packages(settings, st_packages[1], "Default", pattern, regex)
################
# Unzipped
################
def walk(self, settings, file_path, plugin, package_type, pattern, regex=False):
"""Walk the files within the plugin."""
for base, dirs, files in walk(plugin):
files = [(join(base, f), package_type) for f in files]
self.find_files(files, file_path, pattern, settings, regex)
def get_unzipped_packages(self, settings, file_path, package_type, pattern, regex=False):
"""Get all of the plugins in the plugin folder."""
plugins = [join(file_path, item) for item in listdir(file_path) if isdir(join(file_path, item))]
for plugin in plugins:
self.walk(settings, file_path, plugin, package_type, pattern.strip(), regex)
def search_unzipped_files(self, settings, pattern, regex):
"""Search the plugin folders for unzipped packages."""
st_packages = sublime_package_paths()
self.get_unzipped_packages(settings, st_packages[2], "Packages", pattern, regex)
################
# Search All
################
def find_raw(self, pattern, regex=False):
"""Search all packages regardless of whether it is being overridden."""
settings = []
self.search_unzipped_files(settings, pattern, regex)
self.zipped_idx = len(settings)
self.search_zipped_files(settings, pattern, regex)
self.window.show_quick_panel(
settings,
lambda x: self.process_file(x, settings=settings)
)
################
# Search Override
################
def find(self, pattern, regex):
"""Search just the active packages. Not the ones that have been overridden."""
resources = []
if not regex:
resources = sublime.find_resources(pattern)
else:
temp = sublime.find_resources("*")
for t in temp:
if re.match(pattern, t, re.IGNORECASE) is not None:
resources.append(t)
self.window.show_quick_panel(
resources,
lambda x: self.process_file(x, settings=resources),
0,
0,
lambda x: self.on_select(x, settings=resources)
)
def search(self, **kwargs):
"""Search packages."""
kwargs = self.pre_process(**kwargs)
pattern = kwargs.get("pattern", None)
regex = kwargs.get("regex", False)
self.find_all = kwargs.get("find_all", False)
if not self.find_all:
self.find(pattern, regex)
else:
self.find_raw(pattern, regex)
|
|
from __future__ import absolute_import
__author__ = 'chris'
import json
import errno
import os
import re
import sys
import six
import uuid
import traceback
from operator import itemgetter
from collections import OrderedDict, defaultdict
from pkg_resources import parse_version
from django.conf import settings
from django.db import transaction
from django.db.utils import OperationalError
from django.core.files.storage import default_storage
from django.core.files import File
from django.utils.translation import ugettext_lazy as _
from django.db.models import Q
from celery.contrib import rdb
# Python2.7 encoding= support
from io import open
from clinto.parser import Parser
from .. import settings as wooey_settings
def sanitize_name(name):
return name.replace(' ', '_').replace('-', '_')
def sanitize_string(value):
return value.replace('"', '\\"')
def get_storage(local=True):
if wooey_settings.WOOEY_EPHEMERAL_FILES:
storage = default_storage.local_storage if local else default_storage
else:
storage = default_storage
return storage
def purge_output(job=None):
from ..models import WooeyFile
# cleanup the old files, we need to be somewhat aggressive here.
local_storage = get_storage(local=True)
for dj_file in WooeyFile.objects.filter(job=job):
if dj_file.parameter is None or dj_file.parameter.parameter.is_output:
wooey_file = dj_file.filepath.name
# this will delete the default file -- which if we are using an ephemeral file system will be the
# remote instance
dj_file.filepath.delete(False)
dj_file.delete()
# check our local storage and remove it if it is there as well
path = local_storage.path(wooey_file)
if local_storage.exists(path):
local_storage.delete(path)
def get_job_commands(job=None):
script_version = job.script_version
com = [sys.executable] if sys.executable else []
com.extend([script_version.get_script_path()])
parameters = job.get_parameters()
param_dict = OrderedDict()
for param in parameters:
subproc_dict = param.get_subprocess_value()
if subproc_dict is None:
continue
subproc_param = subproc_dict['parameter']
if subproc_param not in param_dict:
param_dict[subproc_param] = []
subproc_value = subproc_dict.get('value', None)
if subproc_value:
param_dict[subproc_param].append(subproc_value)
for param, values in param_dict.items():
if param and not values:
com.append(param)
else:
if param:
com.append(param)
for value in values:
com.append(value)
return com
@transaction.atomic
def create_wooey_job(user=None, script_version_pk=None, data=None):
from ..models import Script, WooeyJob, ScriptParameter, ScriptParameters, ScriptVersion
script_version = ScriptVersion.objects.select_related('script').get(pk=script_version_pk)
if data is None:
data = {}
job = WooeyJob(user=user, job_name=data.pop('job_name', None), job_description=data.pop('job_description', None),
script_version=script_version)
job.save()
parameters = OrderedDict([(i.slug, i) for i in ScriptParameter.objects.filter(slug__in=data.keys()).order_by('pk')])
for slug, param in six.iteritems(parameters):
slug_values = data.get(slug)
slug_values = slug_values if isinstance(slug_values, list) else [slug_values]
for slug_value in slug_values:
new_param = ScriptParameters(job=job, parameter=param)
new_param.value = slug_value
new_param.save()
return job
def get_master_form(script_version=None, pk=None):
from ..forms.factory import DJ_FORM_FACTORY
return DJ_FORM_FACTORY.get_master_form(script_version=script_version, pk=pk)
def get_form_groups(script_version=None, pk=None, initial_dict=None, render_fn=None):
from ..forms.factory import DJ_FORM_FACTORY
return DJ_FORM_FACTORY.get_group_forms(script_version=script_version, pk=pk, initial_dict=initial_dict, render_fn=render_fn)
def reset_form_factory(script_version=None):
from ..forms.factory import DJ_FORM_FACTORY
DJ_FORM_FACTORY.reset_forms(script_version=script_version)
def validate_form(form=None, data=None, files=None):
form.add_wooey_fields()
form.data = data if data is not None else {}
form.files = files if files is not None else {}
form.is_bound = True
form.full_clean()
def get_current_scripts():
from ..models import ScriptVersion
try:
scripts = ScriptVersion.objects.count()
except OperationalError:
# database not initialized yet
return
# get the scripts with default version
scripts = ScriptVersion.objects.select_related('script').filter(default_version=True)
# scripts we need to figure out the default version for some reason
non_default_scripts = ScriptVersion.objects.filter(default_version=False).exclude(script__in=[i.script for i in scripts])
script_versions = defaultdict(list)
for sv in non_default_scripts:
try:
version_string = parse_version(str(sv.script_version))
except:
sys.stderr.write('Error converting script version:\n{}'.format(traceback.format_exc()))
version_string = sv.script_version
script_versions[sv.script.script_name].append((version_string, sv.script_iteration, sv))
[script_versions[i].sort(key=itemgetter(0, 1, 2), reverse=True) for i in script_versions]
scripts = [i.script for i in scripts]
if script_versions:
for script_version_info in script_versions.values():
new_scripts = ScriptVersion.objects.select_related('script').filter(pk__in=[i[2].pk for i in script_version_info])
scripts.extend([i.script for i in new_scripts])
return scripts
def get_storage_object(path, local=False):
storage = get_storage(local=local)
obj = storage.open(path)
obj.url = storage.url(path)
obj.path = storage.path(path)
return obj
def add_wooey_script(script_version=None, script_path=None, group=None):
# There is a class called 'Script' which contains the general information about a script. However, that is not where the file details
# of the script lie. That is the ScriptVersion model. This allows the end user to tag a script as a favorite/etc. and set
# information such as script descriptions/names that do not constantly need to be updated with every version change. Thus,
# a ScriptVersion stores the file info and such.
from ..models import Script, ScriptGroup, ScriptParameter, ScriptParameterGroup, ScriptVersion
# if we are adding through the admin, at this point the file will be saved already and this method will be receiving
# the scriptversion object. Otherwise, we are adding through the managementment command. In this case, the file will be
# a location and we need to setup the Script and ScriptVersion in here.
local_storage = get_storage(local=True)
if script_version is not None:
# we are updating the script here or creating it through the admin
# we need to move the script to the wooey scripts directory now
# handle remotely first, because by default scripts will be saved remotely if we are using an
# ephemeral file system
old_name = script_version.script_path.name
new_name = os.path.normpath(os.path.join(wooey_settings.WOOEY_SCRIPT_DIR, old_name) if not old_name.startswith(wooey_settings.WOOEY_SCRIPT_DIR) else old_name)
current_storage = get_storage(local=not wooey_settings.WOOEY_EPHEMERAL_FILES)
current_file = current_storage.open(old_name)
if current_storage.exists(new_name):
new_name = current_storage.get_available_name(new_name)
new_path = current_storage.save(new_name, current_file)
# remove the old file
if old_name != new_name:
current_storage.delete(old_name)
script_version._rename_script = True
script_version.script_path.name = new_name
script_version.save()
# download the script locally if it doesn't exist
if not local_storage.exists(new_path):
new_path = local_storage.save(new_path, current_file)
script = get_storage_object(new_path, local=True).path
local_file = local_storage.open(new_path).name
else:
# we got a path, if we are using a remote file system, it will be located remotely by default
# make sure we have it locally as well
if wooey_settings.WOOEY_EPHEMERAL_FILES:
remote_storage = get_storage(local=False)
remote_file = remote_storage.open(script_path)
local_file = local_storage.save(script_path, remote_file)
else:
local_file = local_storage.open(script_path).name
script = get_storage_object(local_file, local=True).path
if isinstance(group, ScriptGroup):
group = group.group_name
if group is None:
group = 'Wooey Scripts'
basename, extension = os.path.splitext(script)
filename = os.path.split(basename)[1]
parser = Parser(script_name=filename, script_path=local_storage.path(local_file))
if not parser.valid:
return {'valid': False, 'errors': parser.error}
# make our script
d = parser.get_script_description()
script_group, created = ScriptGroup.objects.get_or_create(group_name=group)
version_string = d.get('version')
if version_string is None:
version_string = '1'
try:
parse_version(version_string)
except:
sys.stderr.write('Error parsing version, defaulting to 1. Error message:\n {}'.format(traceback.format_exc()))
version_string = '1'
if script_version is None:
# we are being loaded from the management command, create/update our script/version
script_kwargs = {'script_group': script_group, 'script_name': d['name']}
version_kwargs = {'script_version': version_string, 'script_path': local_file, 'default_version': True}
# does this script already exist in the database?
script_created = Script.objects.filter(**script_kwargs).count() == 0
if script_created:
# we are creating it, add the description if we can
script_kwargs.update({'script_description': d['description']})
wooey_script = Script(**script_kwargs)
wooey_script._script_cl_creation = True
wooey_script.save()
version_kwargs.update({'script_iteration': 1})
else:
# we're updating it
wooey_script = Script.objects.get(**script_kwargs)
if not wooey_script.script_description and d['description']:
wooey_script.script_description = d['description']
wooey_script.save()
# check if we have the version in our script version
current_versions = ScriptVersion.objects.filter(script=wooey_script, script_version=version_string)
if current_versions.count() == 0:
next_iteration = 1
# disable older versions
ScriptVersion.objects.filter(script=wooey_script, script_version=version_string).update(default_version=False)
else:
# get the largest iteration and add 1 to it
next_iteration = sorted([i.script_iteration for i in current_versions])[-1]+1
version_kwargs.update({'script_iteration': next_iteration})
version_kwargs.update({'script': wooey_script})
script_version = ScriptVersion(**version_kwargs)
script_version._script_cl_creation = True
script_version.save()
else:
# we are being created/updated from the admin
if not script_version.script.script_description:
script_version.script.script_description = d['description']
if not script_version.script.script_name:
script_version.script.script_name = d['name']
past_versions = ScriptVersion.objects.filter(script=script_version.script, script_version=version_string).exclude(pk=script_version.pk)
script_version.script_iteration = past_versions.count()+1
past_versions.update(default_version=False)
script_version.default_version = True
script_version.script.save()
script_version.save()
# make our parameters
for param_group_info in d['inputs']:
param_group, created = ScriptParameterGroup.objects.get_or_create(group_name=param_group_info.get('group'), script_version=script_version)
for param in param_group_info.get('nodes'):
# TODO: fix 'file' to be global in argparse
is_out = True if param.get('upload', None) is False and param.get('type') == 'file' else not param.get('upload', False)
script_param, created = ScriptParameter.objects.get_or_create(script_version=script_version, short_param=param['param'], script_param=param['name'],
is_output=is_out, required=param.get('required', False),
form_field=param['model'], default=param.get('default'), input_type=param.get('type'),
choices=json.dumps(param.get('choices')), choice_limit=json.dumps(param.get('choice_limit', 1)),
param_help=param.get('help'), is_checked=param.get('checked', False),
parameter_group=param_group)
return {'valid': True, 'errors': None, 'script': script_version}
def valid_user(obj, user):
ret = {'valid': False, 'error': '', 'display': ''}
from ..models import Script
groups = obj.user_groups.all()
if wooey_settings.WOOEY_ALLOW_ANONYMOUS or user.is_authenticated():
if isinstance(obj, Script):
from itertools import chain
groups = list(chain(groups, obj.script_group.user_groups.all()))
if not user.is_authenticated() and wooey_settings.WOOEY_ALLOW_ANONYMOUS and len(groups) == 0:
ret['valid'] = True
elif groups:
ret['error'] = _('You are not permitted to use this script')
if not groups and obj.is_active:
ret['valid'] = True
if obj.is_active is True:
if set(list(user.groups.all())) & set(list(groups)):
ret['valid'] = True
ret['display'] = 'disabled' if wooey_settings.WOOEY_SHOW_LOCKED_SCRIPTS else 'hide'
return ret
def mkdirs(path):
try:
os.makedirs(path)
except OSError as exc:
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
def get_file_info(filepath):
# returns info about the file
filetype, preview = False, None
tests = [('tabular', test_delimited), ('fasta', test_fastx)]
while filetype is False and tests:
ptype, pmethod = tests.pop()
filetype, preview = pmethod(filepath)
filetype = ptype if filetype else filetype
preview = None if filetype is False else preview
filetype = None if filetype is False else filetype
try:
json_preview = json.dumps(preview)
except:
sys.stderr.write('Error encountered in file preview:\n {}\n'.format(traceback.format_exc()))
json_preview = json.dumps(None)
return {'type': filetype, 'preview': json_preview}
def test_delimited(filepath):
import csv
if six.PY3:
handle = open(filepath, 'r', newline='')
else:
handle = open(filepath, 'rb')
with handle as csv_file:
try:
dialect = csv.Sniffer().sniff(csv_file.read(1024*16), delimiters=',\t')
except Exception as e:
return False, None
csv_file.seek(0)
reader = csv.reader(csv_file, dialect)
rows = []
try:
for index, entry in enumerate(reader):
rows.append(entry)
except Exception as e:
return False, None
# If > 10 rows, generate preview by slicing top and bottom 5
# ? this might not be a great idea for massive files
if len(rows) > 10:
rows = rows[:5] + [None] + rows[-5:]
# FIXME: This should be more intelligent:
# for small files (<1000 rows?) we should take top and bottom preview 10
# for large files we should give up and present top 10 (11)
# same rules should apply to columns: this will require us to discard them as they're read
return True, rows
def test_fastx(filepath):
# if we can be delimited by + or > we're maybe a fasta/q
with open(filepath, encoding='latin-1') as fastx_file:
sequences = OrderedDict()
seq = []
header = ''
found_caret = False
for row_index, row in enumerate(fastx_file, 1):
if row_index > 30:
break
if not row.strip():
continue
if found_caret is False and row[0] != '>':
if row[0] == ';':
continue
break
elif found_caret is False and row[0] == '>':
found_caret = True
if row and row[0] == '>':
if seq:
sequences[header] = ''.join(seq)
seq = []
header = row
elif row:
# we bundle the fastq stuff in here since it's just a visual
seq.append(row)
if seq and header:
sequences[header] = ''.join(seq)
if sequences:
rows = []
[rows.extend([i, v]) for i, v in six.iteritems(sequences)]
return True, rows
return False, None
def create_job_fileinfo(job):
parameters = job.get_parameters()
from ..models import WooeyFile
# first, create a reference to things the script explicitly created that is a parameter
files = []
for field in parameters:
try:
if field.parameter.form_field == 'FileField':
value = field.value
if value is None:
continue
local_storage = get_storage(local=True)
if isinstance(value, six.string_types):
# check if this was ever created and make a fileobject if so
if local_storage.exists(value):
if not get_storage(local=False).exists(value):
get_storage(local=False).save(value, File(local_storage.open(value)))
value = field.value
else:
field.force_value(None)
try:
with transaction.atomic():
field.save()
except:
sys.stderr.write('{}\n'.format(traceback.format_exc()))
continue
d = {'parameter': field, 'file': value}
files.append(d)
except ValueError:
continue
known_files = {i['file'].name for i in files}
# add the user_output files, these are things which may be missed by the model fields because the script
# generated them without an explicit arguments reference in the script
file_groups = {'archives': []}
absbase = os.path.join(settings.MEDIA_ROOT, job.save_path)
for filename in os.listdir(absbase):
new_name = os.path.join(job.save_path, filename)
if any([i.endswith(new_name) for i in known_files]):
continue
try:
filepath = os.path.join(absbase, filename)
if os.path.isdir(filepath):
continue
full_path = os.path.join(job.save_path, filename)
try:
storage_file = get_storage_object(full_path)
except:
sys.stderr.write('Error in accessing stored file:\n{}'.format(traceback.format_exc()))
continue
d = {'name': filename, 'file': storage_file, 'size_bytes': storage_file.size}
if filename.endswith('.tar.gz') or filename.endswith('.zip'):
file_groups['archives'].append(d)
else:
files.append(d)
except IOError:
sys.stderr.write('{}'.format(traceback.format_exc()))
continue
# establish grouping by inferring common things
file_groups['all'] = files
import imghdr
file_groups['image'] = []
for filemodel in files:
if imghdr.what(filemodel['file'].path):
file_groups['image'].append(filemodel)
file_groups['tabular'] = []
file_groups['fasta'] = []
for filemodel in files:
fileinfo = get_file_info(filemodel['file'].path)
filetype = fileinfo.get('type')
if filetype is not None:
file_groups[filetype].append(dict(filemodel, **{'preview': fileinfo.get('preview')}))
else:
filemodel['preview'] = json.dumps(None)
# Create our WooeyFile models
# mark things that are in groups so we don't add this to the 'all' category too to reduce redundancy
grouped = set([i['file'].path for file_type, groups in six.iteritems(file_groups) for i in groups if file_type != 'all'])
for file_type, group_files in six.iteritems(file_groups):
for group_file in group_files:
if file_type == 'all' and group_file['file'].path in grouped:
continue
try:
preview = group_file.get('preview')
size_bytes = group_file.get('size_bytes')
dj_file = WooeyFile(job=job, filetype=file_type, filepreview=preview, size_bytes=size_bytes,
parameter=group_file.get('parameter'))
filepath = group_file['file'].path
save_path = job.get_relative_path(filepath)
dj_file.filepath.name = save_path
try:
with transaction.atomic():
dj_file.save()
except:
sys.stderr.write('Error in saving DJFile: {}\n'.format(traceback.format_exc()))
except:
sys.stderr.write('Error in saving DJFile: {}\n'.format(traceback.format_exc()))
continue
def get_grouped_file_previews(files):
groups = {'all': []}
for file_info in files:
filedict = {'id': file_info.id,
'object': file_info,
'name': file_info.filepath.name,
'preview': json.loads(file_info.filepreview) if file_info.filepreview else None,
'url': get_storage(local=False).url(file_info.filepath.name),
'slug': file_info.parameter.parameter.script_param if file_info.parameter else None,
'basename': os.path.basename(file_info.filepath.name),
'filetype': file_info.filetype,
'size_bytes': file_info.size_bytes,
}
try:
groups[file_info.filetype].append(filedict)
except KeyError:
groups[file_info.filetype] = [filedict]
if file_info.filetype != 'all':
groups['all'].append(filedict)
return groups
def get_file_previews(job):
from ..models import WooeyFile
files = WooeyFile.objects.filter(job=job)
return get_grouped_file_previews(files)
def get_file_previews_by_ids(ids):
from ..models import WooeyFile
files = WooeyFile.objects.filter(pk__in=ids)
return get_grouped_file_previews(files)
def normalize_query(query_string,
findterms=re.compile(r'"([^"]+)"|(\S+)').findall,
normspace=re.compile(r'\s{2,}').sub):
"""
Split the query string into individual keywords, discarding spaces
and grouping quoted words together.
>>> normalize_query(' some random words "with quotes " and spaces')
['some', 'random', 'words', 'with quotes', 'and', 'spaces']
"""
return [normspace(' ', (t[0] or t[1]).strip()) for t in findterms(query_string)]
def get_query(query_string, search_fields):
"""
Returns a query as a combination of Q objects that query the specified
search fields.
"""
query = None # Query to search for every search term
terms = normalize_query(query_string)
for term in terms:
or_query = None # Query to search for a given term in each field
for field_name in search_fields:
q = Q(**{"%s__icontains" % field_name: term})
if or_query is None:
or_query = q
else:
or_query = or_query | q
if query is None:
query = or_query
else:
query = query & or_query
if query is None:
query = Q()
return query
|
|
# -*- coding: utf-8 -*-
import sys
exceptions = None
if sys.version_info[0] >= 3:
exceptions = (ImportError, ModuleNotFoundError)
else:
exceptions = ImportError
try:
# for Fusion 6 and 7
import PeyeonScript as bmf
except exceptions:
# for Fusion 8+
try:
# for Fusion inside Resolve
import BlackmagicFusion as bmf
except exceptions:
from anima.dcc import blackmagic as bmd
bmf = bmd.get_bmd()
from anima import logger
from anima.dcc import empty_reference_resolution
from anima.dcc.base import DCCBase
from anima.recent import RecentFileManager
class Fusion(DCCBase):
"""the fusion DCC class"""
name = "Fusion"
extensions = [".comp"]
fusion_formats = {
"Multimedia": {
"id": 0,
"Width": 320,
"Height": 240,
"AspectX": 1.0,
"AspectY": 1.0,
"Rate": 15.0,
},
"NTSC (D1)": {
"id": 1,
"Width": 720,
"Height": 486,
"AspectX": 0.9,
"AspectY": 1.0,
"Rate": 29.97,
},
"NTSC (DV)": {
"id": 2,
"Width": 720,
"Height": 480,
"AspectX": 0.9,
"AspectY": 1.0,
"Rate": 29.97,
},
"NTSC (Perception)": {
"id": 3,
"Width": 720,
"Height": 480,
"AspectX": 0.9,
"AspectY": 1.0,
"Rate": 29.97,
},
"NTSC (Square Pixel)": {
"id": 4,
"Width": 640,
"Height": 480,
"AspectX": 1.0,
"AspectY": 1.0,
"Rate": 29.97,
},
"NTSC 16:9": {
"id": 5,
"Width": 720,
"Height": 486,
"AspectX": 1.2,
"AspectY": 1.0,
"Rate": 29.97,
},
"PAL / SECAM (D1)": {
"id": 6,
"Width": 720,
"Height": 576,
"AspectX": 1.0,
"AspectY": 0.9375,
"Rate": 25,
},
"PAL / SECAM (Square Pixel)": {
"id": 7,
"Width": 768,
"Height": 576,
"AspectX": 1.0,
"AspectY": 1.0,
"Rate": 25,
},
"PALplus 16:9": {
"id": 8,
"Width": 720,
"Height": 576,
"AspectX": 1.0,
"AspectY": 0.703125,
"Rate": 25,
},
"HDTV 720": {
"id": 9,
"Width": 1280,
"Height": 720,
"AspectX": 1.0,
"AspectY": 1.0,
"Rate": 30,
},
"HDTV 1080": {
"id": 10,
"Width": 1920,
"Height": 1080,
"AspectX": 1.0,
"AspectY": 1.0,
"Rate": 30,
},
"D16": {
"id": 11,
"Width": 2880,
"Height": 2304,
"AspectX": 1.0,
"AspectY": 1.0,
"Rate": 24,
},
"2K Full Aperture (Super 35)": {
"id": 12,
"Width": 2048,
"Height": 1556,
"AspectX": 1.0,
"AspectY": 1.0,
"Rate": 24,
},
"4K Full Aperture (Super 35)": {
"id": 13,
"Width": 4096,
"Height": 3112,
"AspectX": 1.0,
"AspectY": 1.0,
"Rate": 24,
},
"2K Academy (Regular 35)": {
"id": 14,
"Width": 1828,
"Height": 1332,
"AspectX": 1.0,
"AspectY": 1.0,
"Rate": 24,
},
"4K Academy (Regular 35)": {
"id": 15,
"Width": 3656,
"Height": 2664,
"AspectX": 1.0,
"AspectY": 1.0,
"Rate": 24,
},
"2K Academy in Full Aperture": {
"id": 16,
"Width": 2048,
"Height": 1556,
"AspectX": 1.0,
"AspectY": 1.0,
"Rate": 24,
},
"4K Academy in Full Aperture": {
"id": 17,
"Width": 4096,
"Height": 3112,
"AspectX": 1.0,
"AspectY": 1.0,
"Rate": 24,
},
"2K Anamorphic (CinemaScope)": {
"id": 18,
"Width": 1828,
"Height": 1556,
"AspectX": 2.0,
"AspectY": 1.0,
"Rate": 24,
},
"4K Anamorphic (CinemaScope)": {
"id": 19,
"Width": 3656,
"Height": 3112,
"AspectX": 2.0,
"AspectY": 1.0,
"Rate": 24,
},
"2K 1.85": {
"id": 20,
"Width": 1828,
"Height": 988,
"AspectX": 1.0,
"AspectY": 1.0,
"Rate": 24,
},
"4K 1.85": {
"id": 21,
"Width": 3656,
"Height": 1976,
"AspectX": 1.0,
"AspectY": 1.0,
"Rate": 24,
},
"3K VistaVision": {
"id": 22,
"Width": 3072,
"Height": 2048,
"AspectX": 1.0,
"AspectY": 1.0,
"Rate": 24,
},
"6K VistaVision": {
"id": 23,
"Width": 6144,
"Height": 4096,
"AspectX": 1.0,
"AspectY": 1.0,
"Rate": 24,
},
"5K IMAX 70mm": {
"id": 24,
"Width": 5464,
"Height": 4096,
"AspectX": 1.0,
"AspectY": 1.0,
"Rate": 24,
},
}
def __init__(self, name="", version=None):
"""fusion specific init"""
super(Fusion, self).__init__(name=name, version=version)
# and add you own modifications to __init__
# self.fusion = bmd.scriptapp("Fusion")
# self.fusion = bmd.get_fusion()
self.fusion = bmf.scriptapp("Fusion")
self.fusion_prefs = self.fusion.GetPrefs()["Global"]
# update name with version
self.name = "Fusion%s" % self.fusion.GetAttrs("FUSIONS_Version").split(".")[0]
self.comp = self.fusion.GetCurrentComp()
self.comp_prefs = self.comp.GetPrefs()["Comp"]
self._main_output_node_name = "Main_Output"
def save_as(self, version, run_pre_publishers=True):
"""the save action for fusion DCC
uses Fusions own python binding
"""
# set the extension to '.comp'
# refresh the current comp
self.comp = self.fusion.GetCurrentComp()
from stalker import Version
assert isinstance(version, Version)
# its a new version please update the paths
version.update_paths()
version.extension = self.extensions[0]
version.created_with = self.name
# set project_directory
import os
self.project_directory = os.path.dirname(version.absolute_path)
# set range from the shot
self.set_range_from_shot(version)
# create the main write node
self.create_main_saver_node(version)
# replace read and write node paths
# self.replace_external_paths()
# create the path before saving
try:
os.makedirs(version.absolute_path)
except OSError:
# path already exists OSError
pass
version_full_path = os.path.normpath(version.absolute_full_path)
self.comp.Lock()
self.comp.Save(
version_full_path
if sys.version_info[0] >= 3
else version_full_path.encode()
)
self.comp.Unlock()
# create a local copy
self.create_local_copy(version)
rfm = RecentFileManager()
rfm.add(self.name, version.absolute_full_path)
return True
def set_range_from_shot(self, version):
"""sets the frame range from the Shot entity if this version is related
to one.
:param version:
:return:
"""
# check if this is a shot related task
shot = self.get_shot(version)
if shot:
# use the shot image_format
fps = shot.fps
imf = shot.image_format
# set frame ranges
self.set_frame_range(
start_frame=shot.cut_in,
end_frame=shot.cut_out,
)
else:
# use the Project image_format
fps = version.task.project.fps
imf = version.task.project.image_format
# set comp resolution and fps
if imf:
self.comp.SetPrefs(
{
# Image Format
"Comp.FrameFormat.Width": imf.width,
"Comp.FrameFormat.Height": imf.height,
"Comp.FrameFormat.AspectY": imf.pixel_aspect,
"Comp.FrameFormat.AspectX": imf.pixel_aspect,
# FPS
"Comp.FrameFormat.Rate": fps,
# set project frame format to 16bit
"Comp.FrameFormat.DepthFull": 2.0,
"Comp.FrameFormat.DepthLock": True,
}
)
def set_shot_from_range(self, version):
"""sets the Shot.cut_in and Shot.cut_out attributes from the current frame range if the current task is related
to a Stalker Shot instance.
:param Stalker.Version version: A Stalker Version instance.
:return:
"""
# check if this is a shot related task
is_shot_related_task = False
shot = None
from stalker import Shot
for task in version.task.parents:
if isinstance(task, Shot):
is_shot_related_task = True
shot = task
break
if is_shot_related_task and shot:
# set frame ranges
cut_in, cut_out = self.get_frame_range()
shot.cut_in = int(cut_in)
shot.cut_out = int(cut_out)
from stalker.db.session import DBSession
DBSession.add(shot)
DBSession.commit()
def export_as(self, version):
"""the export action for nuke DCC"""
# its a new version please update the paths
version.update_paths()
# set the extension to '.comp'
version.extension = self.extensions[0]
version.created_with = self.name
raise NotImplementedError("export_as() is not implemented yet for Fusion")
# # create a local copy
# self.create_local_copy(version)
def open(
self,
version,
force=False,
representation=None,
reference_depth=0,
skip_update_check=False,
):
"""the open action for nuke DCC"""
import os
version_full_path = os.path.normpath(version.absolute_full_path)
# # delete all the comps and open new one
# comps = self.fusion.GetCompList().values()
# for comp_ in comps:
# comp_.Close()
self.fusion.LoadComp(
version_full_path
if sys.version_info[0] >= 3
else version_full_path.encode()
)
self.comp.Lock()
# set the project_directory
# get the current comp fist
self.comp = self.fusion.GetCurrentComp()
self.project_directory = os.path.dirname(version.absolute_path)
# update the savers
self.create_main_saver_node(version)
# file paths in different OS'es should be replaced with a path that is suitable for the current one
# update loaders
self.fix_loader_paths()
self.comp.Unlock()
rfm = RecentFileManager()
rfm.add(self.name, version.absolute_full_path)
# return True to specify everything was ok and an empty list
# for the versions those needs to be updated
return empty_reference_resolution()
def import_(self, version):
"""the import action for nuke DCC"""
# nuke.nodePaste(version.absolute_full_path)
return True
def get_current_version(self):
"""Finds the Version instance from the current open file.
If it can't find any then returns None.
:return: :class:`~oyProjectManager.models.version.Version`
"""
# full_path = self._root.knob('name').value()
import os
full_path = os.path.normpath(self.comp.GetAttrs()["COMPS_FileName"]).replace(
"\\", "/"
)
return self.get_version_from_full_path(full_path)
def get_version_from_recent_files(self):
"""It will try to create a
:class:`~oyProjectManager.models.version.Version` instance by looking
at the recent files list.
It will return None if it can not find one.
:return: :class:`~oyProjectManager.models.version.Version`
"""
# full_path = self.fusion_prefs["LastCompFile"]
# return self.get_version_from_full_path(full_path)
version = None
rfm = RecentFileManager()
try:
recent_files = rfm[self.name]
except KeyError:
logger.debug("no recent files")
recent_files = None
if recent_files is not None:
for i in range(len(recent_files)):
version = self.get_version_from_full_path(recent_files[i])
if version is not None:
break
logger.debug("version from recent files is: %s" % version)
return version
def get_version_from_project_dir(self):
"""Tries to find a Version from the current project directory
:return: :class:`~oyProjectManager.models.version.Version`
"""
versions = self.get_versions_from_path(self.project_directory)
version = None
if versions and len(versions):
version = versions[0]
return version
def get_last_version(self):
"""gets the file name from fusion"""
version = self.get_current_version()
# read the recent file list
if version is None:
version = self.get_version_from_recent_files()
# get the latest possible Version instance by using the workspace path
if version is None:
version = self.get_version_from_project_dir()
return version
def get_frame_range(self):
"""returns the current frame range"""
start_frame = self.comp.GetAttrs()["COMPN_RenderStart"]
end_frame = self.comp.GetAttrs()["COMPN_RenderEnd"]
return start_frame, end_frame
def set_frame_range(self, start_frame=1, end_frame=100, adjust_frame_range=False):
"""sets the start and end frame range"""
self.comp.SetAttrs(
{
"COMPN_GlobalStart": start_frame,
"COMPN_RenderStart": start_frame,
"COMPN_GlobalEnd": end_frame,
"COMPN_RenderEnd": end_frame,
}
)
def set_fps(self, fps=25):
"""sets the current fps"""
pass
def get_fps(self):
"""returns the current fps"""
return None
def fix_loader_paths(self):
"""fixes loader paths mainly from one OS to another"""
import os
# get all loaders
for loader in self.comp.GetToolList(False, "Loader").values():
path = self.get_node_input_entry_value_by_name(loader, "Clip")
if os.path.sep not in path:
# replace '\\' with os.path.sep
path = path.replace("/", "\\").replace("\\", os.path.sep)
# TODO: Also replace absolute paths with proper paths for the current OS
self.set_node_input_entry_by_name(loader, "Clip", path)
def get_node_input_entry_by_name(self, node, key):
"""returns the Input List entry by input list entry name
:param node: The node
:param string key: The entry name
:return:
"""
node_input_list = node.GetInputList()
for input_entry_key in node_input_list.keys():
input_entry = node_input_list[input_entry_key]
input_id = input_entry.GetAttrs()["INPS_ID"]
if input_id == key:
return input_entry
def get_node_input_entry_value_by_name(self, node, key):
"""returns the Input List entry by input list entry name
:param node: The node
:param string key: The entry name
:return:
"""
input_entry = self.get_node_input_entry_by_name(node, key)
return input_entry[0]
def set_node_input_entry_by_name(self, node, key, value):
"""sets the Input List entry value by Input ID
:param node: The node
:param string key: The INS_ID of the key
:param value: The value
:return:
"""
input_entry = self.get_node_input_entry_by_name(node, key)
input_entry[0] = value
def get_main_saver_node(self):
"""Returns the main saver nodes in the scene or an empty list.
:return: list
"""
# list all the saver nodes in the current file
all_saver_nodes = self.comp.GetToolList(False, "Saver").values()
saver_nodes = []
for saver_node in all_saver_nodes:
if saver_node.GetAttrs("TOOLS_Name").startswith(
self._main_output_node_name
):
saver_nodes.append(saver_node)
return saver_nodes
def create_node_tree(self, node_tree):
"""Creates a node tree from the given node tree.
The node_tree is a Python dictionary showing node types and attribute
values. Also it can be a list of dictionaries to create more complex
trees.
Each node_tree can create only one shading network. The format of the
dictionary should be as follows.
node_tree: {
'type': <- The fusion node type of the toppest shader
'attr': {
<- A dictionary that contains attribute names and values.
'Input': {
'type': --- type name of the connected node
'attr': {
<- attribute values ->
}
}
},
}
:param [dict, list] node_tree: A dictionary showing the node tree
attributes.
:return:
"""
# allow it to accept both a list or dict
if isinstance(node_tree, list):
created_root_nodes = []
for item in node_tree:
created_root_nodes.append(self.create_node_tree(item))
return created_root_nodes
node_type = node_tree["type"]
self.comp.Lock()
node = self.comp.AddTool(node_type)
self.comp.Unlock()
# attributes
if "attr" in node_tree:
attributes = node_tree["attr"]
for key in attributes:
value = attributes[key]
if isinstance(value, dict):
new_node = self.create_node_tree(value)
node.Input = new_node
else:
node.SetAttrs({key: value})
# input lists
if "input_list" in node_tree:
input_list = node_tree["input_list"]
for key in input_list:
node_input_list = node.GetInputList()
for input_entry_key in node_input_list.keys():
input_entry = node_input_list[input_entry_key]
input_id = input_entry.GetAttrs()["INPS_ID"]
if input_id == key:
value = input_list[key]
input_entry[0] = value
break
# ref_id
if "ref_id" in node_tree:
node.SetData("ref_id", node_tree["ref_id"])
# connected to
if "connected_to" in node_tree:
connected_to = node_tree["connected_to"]
if "Input" in connected_to:
input_node = self.create_node_tree(connected_to["Input"])
node.Input = input_node
elif "ref_id" in node_tree["connected_to"]:
ref_id = node_tree["connected_to"]["ref_id"]
print("ref_id: %s" % ref_id)
# find a node with ref_id equals to ref_id that is given in the
# node tree
all_nodes = self.comp.GetToolList().values()
for r_node in all_nodes:
node_ref_id = r_node.GetData("ref_id")
print("node_ref_id: %s" % node_ref_id)
if node_ref_id == ref_id:
node.Input = r_node
break
return node
def output_path_generator(self, version, file_format):
"""helper function to generate the output path
:param version: Stalker Version instance
:param str file_format: A string showing the file format. Ex: tga, exr
etc.
:return:
"""
# generate the data needed
# the output path
file_name_buffer = []
template_kwargs = {}
# if this is a shot related task set it to shots resolution
version_sig_name = self.get_significant_name(
version, include_project_code=False
)
file_name_buffer.append("%(version_sig_name)s.001.%(format)s")
template_kwargs.update(
{"version_sig_name": version_sig_name, "format": file_format}
)
output_file_name = "".join(file_name_buffer) % template_kwargs
# check if it is a stereo comp
# if it is enable separate view rendering
import os
output_file_path = os.path.join(
version.absolute_path,
"Outputs",
version.take_name,
"v%03d" % version.version_number,
file_format,
)
# create the dir
try:
os.makedirs(output_file_path)
except OSError:
# path exists
pass
output_file_full_path = os.path.join(
output_file_path, output_file_name
).replace("\\", "/")
# make the path Project: relative
output_file_full_path = "Project:%s" % os.path.relpath(
output_file_full_path, os.path.dirname(version.absolute_path)
)
# set the output path
if sys.version_info[0] >= 3:
return "%s" % os.path.normpath(output_file_full_path)
else:
return "%s" % os.path.normpath(output_file_full_path).encode()
def output_node_name_generator(self, file_format):
return "%s_%s" % (self._main_output_node_name, file_format)
def create_slate_node(self, version, submitting_for="FINAL", submission_note=""):
"""Creates the slate node
:param version: A Stalker Version instance
:param str submitting_for: Submitting for "FINAL" or "WIP". Default is "FINAL".
:param str submission_note: Submission note.
:return:
"""
# if the channels are animated, set new keyframes
# first try to find the slate tool
slate_node = self.comp.FindTool("MainSlate")
if not slate_node:
# create one
self.comp.Lock()
self.comp.DoAction("AddSetting", {"filename": "Macros:/AnimaSlate.setting"})
slate_node = self.comp.FindTool("AnimaSlate1")
self.comp.Unlock()
slate_node.SetAttrs({"TOOLS_Name": "MainSlate", "TOOLB_Locked": False})
# set slate attributes
from anima.dcc.fusion import utils
# Thumbnail
shot = self.get_shot(version)
imf = None
if shot:
if shot.thumbnail:
import os
thumbnail_full_path = os.path.expandvars(shot.thumbnail.full_path)
slate_node.Input1 = thumbnail_full_path
if shot:
imf = shot.image_format
else:
imf = version.task.project.image_format
# Shot Types
# TODO: For now use Netflix format, extend it later on
from anima.utils.report import NetflixReporter
slate_node.Input8 = ", ".join(
NetflixReporter.generate_shot_methodologies(shot)
)
# Shot Description
from anima.utils import text_splitter
split_description = text_splitter(shot.description, 40)
slate_node.Input9 = "\n".join(split_description[0:3])
slate_node.Input10 = "\n".join(split_description[0:3])
# Submission Note
slate_node.Input11 = submission_note
# Shot Name
slate_node.Input12 = shot.name
# Episode and Sequence
seq = None
if shot.sequences:
seq = shot.sequences[0]
slate_node.Input14 = seq.name
slate_node.Input15 = seq.name
# Scene Name
# Use shot name for now
parts = shot.name.split("_")
try:
scene_name = parts[2]
except IndexError:
scene_name = ""
slate_node.Input16 = scene_name
# Frames
slate_node.Input17 = shot.cut_out - shot.cut_in + 1
else:
# Frames
slate_node.Input17 = ""
# Show Name
slate_node.Input4 = version.task.project.name
# Version Name
slate_node.Input5 = "%s_v%03d" % (version.nice_name, version.version_number)
# Submitting For
slate_node.Input6 = submitting_for
# Date
import datetime
today = datetime.datetime.today()
date_time_format = "%Y-%m-%d"
slate_node.Input7 = today.strftime(date_time_format)
# Vendor
from stalker import Studio
studio = Studio.query.first()
if studio:
slate_node.Input13 = studio.name
# Media Color
slate_node.Input18 = ""
# connect the output to MediaOut
media_out_node = None
i = 0
import time
while not media_out_node and i < 2:
media_out_node = self.comp.FindTool("MediaOut1")
if not media_out_node:
print("no MediaOut1 node, waiting for 1 sec!")
time.sleep(1)
else:
print("found MediaOut1 node!")
media_out_node.Input = slate_node
i += 1
return slate_node
def create_main_saver_node(self, version):
"""Creates the default saver node if there is no created before.
Creates the default saver nodes if there isn't any existing outputs,
and updates the ones that is already created
"""
fps = 25
if version:
project = version.task.project
fps = project.fps
import uuid
random_ref_id = uuid.uuid4().hex
output_format_data = [
{
"name": "jpg",
"node_tree": {
"type": "Saver",
"attr": {
"TOOLS_Name": self.output_node_name_generator("jpg"),
},
"input_list": {
"Clip": self.output_path_generator(version, "jpg"),
"CreateDir": 1,
"ProcessRed": 1,
"ProcessGreen": 1,
"ProcessBlue": 1,
"ProcessAlpha": 0,
"OutputFormat": "JPEGFormat",
"JpegFormat.Quality": 85,
},
"connected_to": {
"Input": {
"type": "OCIOColorSpace",
"ref_id": random_ref_id,
"input_list": {
"OCIOConfig": "LUTs:/OpenColorIO-Configs/aces_1.2/config.ocio",
"SourceSpace": "ACES - ACES2065-1",
"OutputSpace": "Output - Rec.709",
},
"connected_to": {
"Input": {
"type": "OCIOColorSpace",
"input_list": {
"OCIOConfig": "LUTs:/OpenColorIO-Configs/aces_1.2/config.ocio",
"SourceSpace": "Utility - Linear - sRGB",
"OutputSpace": "ACES - ACES2065-1",
},
}
},
}
},
},
},
{
"name": "tga",
"node_tree": {
"type": "Saver",
"attr": {
"TOOLS_Name": self.output_node_name_generator("tga"),
},
"input_list": {
"Clip": self.output_path_generator(version, "tga"),
"CreateDir": 1,
"ProcessRed": 1,
"ProcessGreen": 1,
"ProcessBlue": 1,
"ProcessAlpha": 0,
"OutputFormat": "TGAFormat",
},
"connected_to": {"ref_id": random_ref_id},
},
},
{
"name": "exr",
"node_tree": {
"type": "Saver",
"attr": {
"TOOLS_Name": self.output_node_name_generator("exr"),
},
"input_list": {
"Clip": self.output_path_generator(version, "exr"),
"CreateDir": 1,
"ProcessRed": 1,
"ProcessGreen": 1,
"ProcessBlue": 1,
"ProcessAlpha": 0,
"OutputFormat": "OpenEXRFormat",
"OpenEXRFormat.Depth": 1, # 16-bit float
"OpenEXRFormat.RedEnable": 1,
"OpenEXRFormat.GreenEnable": 1,
"OpenEXRFormat.BlueEnable": 1,
"OpenEXRFormat.AlphaEnable": 0,
"OpenEXRFormat.ZEnable": 0,
"OpenEXRFormat.CovEnable": 0,
"OpenEXRFormat.ObjIDEnable": 0,
"OpenEXRFormat.MatIDEnable": 0,
"OpenEXRFormat.UEnable": 0,
"OpenEXRFormat.VEnable": 0,
"OpenEXRFormat.XNormEnable": 0,
"OpenEXRFormat.YNormEnable": 0,
"OpenEXRFormat.ZNormEnable": 0,
"OpenEXRFormat.XVelEnable": 0,
"OpenEXRFormat.YVelEnable": 0,
"OpenEXRFormat.XRevVelEnable": 0,
"OpenEXRFormat.YRevVelEnable": 0,
"OpenEXRFormat.XPosEnable": 0,
"OpenEXRFormat.YPosEnable": 0,
"OpenEXRFormat.ZPosEnable": 0,
"OpenEXRFormat.XDispEnable": 0,
"OpenEXRFormat.YDispEnable": 0,
},
"connected_to": {"ref_id": random_ref_id},
},
},
{
"name": "mp4",
"node_tree": {
"type": "Saver",
"attr": {
"TOOLS_Name": self.output_node_name_generator("mp4"),
},
"input_list": {
"Clip": self.output_path_generator(version, "mp4"),
"CreateDir": 1,
"ProcessRed": 1,
"ProcessGreen": 1,
"ProcessBlue": 1,
"ProcessAlpha": 0,
"OutputFormat": "QuickTimeMovies",
"ProcessMode": "Auto",
"SaveFrames": "Full",
"QuickTimeMovies.Compression": "H.264_avc1",
"QuickTimeMovies.Quality": 95.0,
"QuickTimeMovies.FrameRateFps": fps,
"QuickTimeMovies.KeyFrames": 5,
"StartRenderScript": 'frames_at_once = comp:GetPrefs("Comp.Memory.FramesAtOnce")\ncomp:SetPrefs("Comp.Memory.FramesAtOnce", 1)',
"EndRenderScript": 'comp:SetPrefs("Comp.Memory.FramesAtOnce", frames_at_once)',
},
"connected_to": {"ref_id": random_ref_id},
},
},
{
"name": "mov",
"node_tree": {
"type": "Saver",
"attr": {
"TOOLS_Name": self.output_node_name_generator("mov"),
},
"input_list": {
"Clip": self.output_path_generator(version, "mov"),
"CreateDir": 1,
"ProcessRed": 1,
"ProcessGreen": 1,
"ProcessBlue": 1,
"ProcessAlpha": 0,
"OutputFormat": "QuickTimeMovies",
"ProcessMode": "Auto",
"SaveFrames": "Full",
"QuickTimeMovies.Compression": "Apple ProRes 422 HQ_apch",
"QuickTimeMovies.Quality": 95.0,
"QuickTimeMovies.FrameRateFps": fps,
"QuickTimeMovies.KeyFrames": 5,
"QuickTimeMovies.LimitDataRate": 0.0,
"QuickTimeMovies.DataRateK": 1000.0,
"QuickTimeMovies.Advanced": 1.0,
"QuickTimeMovies.Primaries": 0.0,
"QuickTimeMovies.Transfer": 0.0,
"QuickTimeMovies.Matrix": 0.0,
"QuickTimeMovies.PixelAspectRatio": 0.0,
"QuickTimeMovies.ErrorDiffusion": 1.0,
"QuickTimeMovies.SaveAlphaChannel": 1.0,
"StartRenderScript": 'frames_at_once = comp:GetPrefs("Comp.Memory.FramesAtOnce")\ncomp:SetPrefs("Comp.Memory.FramesAtOnce", 1)',
"EndRenderScript": 'comp:SetPrefs("Comp.Memory.FramesAtOnce", frames_at_once)',
},
"connected_to": {"ref_id": random_ref_id},
},
},
]
if version.task.type and version.task.type.name == "Plate":
# create a different type of outputs
output_format_data = [
{
"name": "jpg",
"node_tree": {
"type": "Saver",
"attr": {
"TOOLS_Name": self.output_node_name_generator("jpg"),
},
"input_list": {
"Clip": self.output_path_generator(version, "jpg"),
"CreateDir": 1,
"ProcessRed": 1,
"ProcessGreen": 1,
"ProcessBlue": 1,
"ProcessAlpha": 0,
"OutputFormat": "JPEGFormat",
"JpegFormat.Quality": 85,
},
"connected_to": {
"Input": {
"type": "OCIOColorSpace",
"input_list": {
"OCIOConfig": "LUTs:/OpenColorIO-Configs/aces_1.2/config.ocio",
"SourceSpace": "ACES - ACES2065-1",
"OutputSpace": "Output - sRGB",
},
"connected_to": {
"Input": {
"type": "OCIOColorSpace",
"ref_id": random_ref_id,
"input_list": {
"OCIOConfig": "LUTs:/OpenColorIO-Configs/aces_1.2/config.ocio",
"SourceSpace": "ACES - ACES2065-1",
"OutputSpace": "ACES - ACES2065-1",
},
}
},
}
},
},
},
{
"name": "exr",
"node_tree": {
"type": "Saver",
"attr": {
"TOOLS_Name": self.output_node_name_generator("exr"),
},
"input_list": {
"Clip": self.output_path_generator(version, "exr"),
"CreateDir": 1,
"ProcessRed": 1,
"ProcessGreen": 1,
"ProcessBlue": 1,
"ProcessAlpha": 0,
"OutputFormat": "OpenEXRFormat",
"OpenEXRFormat.Depth": 1, # 16-bit float
"OpenEXRFormat.RedEnable": 1,
"OpenEXRFormat.GreenEnable": 1,
"OpenEXRFormat.BlueEnable": 1,
"OpenEXRFormat.AlphaEnable": 0,
"OpenEXRFormat.ZEnable": 0,
"OpenEXRFormat.CovEnable": 0,
"OpenEXRFormat.ObjIDEnable": 0,
"OpenEXRFormat.MatIDEnable": 0,
"OpenEXRFormat.UEnable": 0,
"OpenEXRFormat.VEnable": 0,
"OpenEXRFormat.XNormEnable": 0,
"OpenEXRFormat.YNormEnable": 0,
"OpenEXRFormat.ZNormEnable": 0,
"OpenEXRFormat.XVelEnable": 0,
"OpenEXRFormat.YVelEnable": 0,
"OpenEXRFormat.XRevVelEnable": 0,
"OpenEXRFormat.YRevVelEnable": 0,
"OpenEXRFormat.XPosEnable": 0,
"OpenEXRFormat.YPosEnable": 0,
"OpenEXRFormat.ZPosEnable": 0,
"OpenEXRFormat.XDispEnable": 0,
"OpenEXRFormat.YDispEnable": 0,
},
"connected_to": {
"Input": {
"type": "OCIOColorSpace",
"input_list": {
"OCIOConfig": "LUTs:/OpenColorIO-Configs/aces_1.2/config.ocio",
"SourceSpace": "ACES - ACES2065-1",
"OutputSpace": "ACES - ACES2065-1",
},
"connected_to": {
"ref_id": random_ref_id,
},
}
},
},
},
{
"name": "mov",
"node_tree": {
"type": "Saver",
"attr": {
"TOOLS_Name": self.output_node_name_generator("mov"),
},
"input_list": {
"Clip": self.output_path_generator(version, "mov"),
"CreateDir": 1,
"ProcessRed": 1,
"ProcessGreen": 1,
"ProcessBlue": 1,
"ProcessAlpha": 0,
"OutputFormat": "QuickTimeMovies",
"ProcessMode": "Auto",
"SaveFrames": "Full",
"QuickTimeMovies.Compression": "Apple ProRes 422 HQ_apch",
"QuickTimeMovies.Quality": 95.0,
"QuickTimeMovies.FrameRateFps": fps,
"QuickTimeMovies.KeyFrames": 5,
"QuickTimeMovies.LimitDataRate": 0.0,
"QuickTimeMovies.DataRateK": 1000.0,
"QuickTimeMovies.Advanced": 1.0,
"QuickTimeMovies.Primaries": 0.0,
"QuickTimeMovies.Transfer": 0.0,
"QuickTimeMovies.Matrix": 0.0,
"QuickTimeMovies.PixelAspectRatio": 0.0,
"QuickTimeMovies.ErrorDiffusion": 1.0,
"QuickTimeMovies.SaveAlphaChannel": 1.0,
"StartRenderScript": 'frames_at_once = comp:GetPrefs("Comp.Memory.FramesAtOnce")\ncomp:SetPrefs("Comp.Memory.FramesAtOnce", 1)',
"EndRenderScript": 'comp:SetPrefs("Comp.Memory.FramesAtOnce", frames_at_once)',
},
"connected_to": {
"Input": {
"type": "OCIOColorSpace",
"input_list": {
"OCIOConfig": "LUTs:/OpenColorIO-Configs/aces_1.2/config.ocio",
"SourceSpace": "ACES - ACES2065-1",
"OutputSpace": "Output - Rec.709",
},
"connected_to": {
"ref_id": random_ref_id,
},
}
},
},
},
]
if version.take_name == "STMap":
output_format_data = [
{
"name": "exr",
"node_tree": {
"type": "Saver",
"attr": {
"TOOLS_Name": self.output_node_name_generator("exr"),
},
"input_list": {
"Clip": self.output_path_generator(version, "exr"),
"CreateDir": 1,
"ProcessRed": 1,
"ProcessGreen": 1,
"ProcessBlue": 1,
"ProcessAlpha": 0,
"OutputFormat": "OpenEXRFormat",
"OpenEXRFormat.Depth": 2, # 32-bit float
"OpenEXRFormat.RedEnable": 1,
"OpenEXRFormat.GreenEnable": 1,
"OpenEXRFormat.BlueEnable": 1,
"OpenEXRFormat.AlphaEnable": 0,
"OpenEXRFormat.ZEnable": 0,
"OpenEXRFormat.CovEnable": 0,
"OpenEXRFormat.ObjIDEnable": 0,
"OpenEXRFormat.MatIDEnable": 0,
"OpenEXRFormat.UEnable": 0,
"OpenEXRFormat.VEnable": 0,
"OpenEXRFormat.XNormEnable": 0,
"OpenEXRFormat.YNormEnable": 0,
"OpenEXRFormat.ZNormEnable": 0,
"OpenEXRFormat.XVelEnable": 0,
"OpenEXRFormat.YVelEnable": 0,
"OpenEXRFormat.XRevVelEnable": 0,
"OpenEXRFormat.YRevVelEnable": 0,
"OpenEXRFormat.XPosEnable": 0,
"OpenEXRFormat.YPosEnable": 0,
"OpenEXRFormat.ZPosEnable": 0,
"OpenEXRFormat.XDispEnable": 0,
"OpenEXRFormat.YDispEnable": 0,
},
"connected_to": {"ref_id": random_ref_id},
},
},
]
self.comp.SetPrefs(
{
# set project frame format to 32bit
"Comp.FrameFormat.DepthFull": 3.0,
"Comp.FrameFormat.DepthLock": True,
}
)
# selectively generate output format
saver_nodes = self.get_main_saver_node()
for data in output_format_data:
format_name = data["name"]
node_tree = data["node_tree"]
# now check if a node with the same name exists
format_node = None
format_node_name = self.output_node_name_generator(format_name)
for node in saver_nodes:
node_name = node.GetAttrs("TOOLS_Name")
if node_name.startswith(format_node_name):
format_node = node
break
# create the saver node for this format if missing
if not format_node:
self.create_node_tree(node_tree)
else:
# just update the input_lists
if "input_list" in node_tree:
input_list = node_tree["input_list"]
for key in input_list:
node_input_list = format_node.GetInputList()
for input_entry_key in node_input_list.keys():
input_entry = node_input_list[input_entry_key]
input_id = input_entry.GetAttrs()["INPS_ID"]
if input_id == key:
value = input_list[key]
input_entry[0] = value
break
try:
import os
os.makedirs(
os.path.dirname(self.output_path_generator(version, format_name))
)
except OSError:
# path already exists
pass
@property
def project_directory(self):
"""The project directory.
Set it to the project root, and set all your paths relative to this
directory.
"""
# try to figure it out from the maps
# search for Project path
project_dir = None
maps = self.comp_prefs["Paths"].get("Map", None)
if maps:
project_dir = maps.get("Project:", None)
# if not project_dir:
# # set the map for the project dir
# if self.version:
# project_dir = os.path.dirname(self.version.absolute_path)
# self.project_directory = project_dir
return project_dir
@project_directory.setter
def project_directory(self, project_directory_in):
"""Sets project directory
:param str project_directory_in: the project directory
:return:
"""
import os
project_directory_in = os.path.normpath(project_directory_in)
print("setting project directory to: %s" % project_directory_in)
# set a path map
self.comp.SetPrefs(
{
"Comp.Paths.Map": {
"Project:": project_directory_in
if sys.version_info[0] >= 3
else project_directory_in.encode()
}
}
)
|
|
# Copyright 2015 Internap.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import textwrap
import unittest
import mock
from flexmock import flexmock, flexmock_teardown
from hamcrest import assert_that, equal_to, is_, instance_of
from netman.adapters.switches import juniper
from netman.adapters.switches.juniper.base import Juniper
from netman.core.objects.exceptions import UnknownInterface
from netman.core.objects.port_modes import ACCESS, TRUNK, BOND_MEMBER
from netman.core.objects.switch_descriptor import SwitchDescriptor
from netman.core.objects.switch_transactional import FlowControlSwitch
from tests import ignore_deprecation_warnings
from tests.adapters.switches.juniper_test import an_ok_response, is_xml, a_configuration, an_rpc_response
@ignore_deprecation_warnings
def test_factory():
lock = mock.Mock()
switch = juniper.qfx_copper_factory(
SwitchDescriptor(hostname='hostname', model='juniper_qfx_copper', username='username', password='password',
port=22), lock)
assert_that(switch, instance_of(FlowControlSwitch))
assert_that(switch.wrapped_switch, instance_of(Juniper))
assert_that(switch.lock, is_(lock))
assert_that(switch.switch_descriptor.hostname, equal_to("hostname"))
assert_that(switch.switch_descriptor.model, equal_to("juniper_qfx_copper"))
assert_that(switch.switch_descriptor.username, equal_to("username"))
assert_that(switch.switch_descriptor.password, equal_to("password"))
assert_that(switch.switch_descriptor.port, equal_to(22))
class JuniperTest(unittest.TestCase):
def setUp(self):
self.switch = juniper.qfx_copper.netconf(SwitchDescriptor(model='juniper', hostname="toto"))
self.netconf_mock = flexmock()
self.switch.netconf = self.netconf_mock
self.switch.in_transaction = True
def tearDown(self):
flexmock_teardown()
def test_get_interface(self):
self.switch.in_transaction = False
self.netconf_mock.should_receive("get_config").with_args(source="running", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/1</name>
</interface>
</interfaces>
<vlans />
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ge-0/0/1</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
</ethernet-switching>
</family>
</unit>
</interface>
<interface>
<name>ge-0/0/2</name>
<disable />
<description>Howdy</description>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<vlan>
<members>1000</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
<vlans/>
"""))
if1 = self.switch.get_interface('ge-0/0/1')
assert_that(if1.name, equal_to("ge-0/0/1"))
assert_that(if1.shutdown, equal_to(False))
assert_that(if1.port_mode, equal_to(ACCESS))
assert_that(if1.access_vlan, equal_to(None))
assert_that(if1.trunk_native_vlan, equal_to(None))
assert_that(if1.trunk_vlans, equal_to([]))
assert_that(if1.auto_negotiation, equal_to(None))
def test_get_nonexistent_interface_raises(self):
self.switch.in_transaction = False
self.netconf_mock.should_receive("get_config").with_args(source="running", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/INEXISTENT</name>
</interface>
</interfaces>
<vlans />
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces/>
<vlans/>
"""))
self.netconf_mock.should_receive("rpc").with_args(is_xml("""
<get-interface-information>
<terse/>
</get-interface-information>
""")).and_return(an_rpc_response(textwrap.dedent("""
<interface-information style="terse">
<physical-interface>
<name>
ge-0/0/1
</name>
<admin-status>
down
</admin-status>
<oper-status>
down
</oper-status>
</physical-interface>
</interface-information>
""")))
with self.assertRaises(UnknownInterface) as expect:
self.switch.get_interface('ge-0/0/INEXISTENT')
assert_that(str(expect.exception), equal_to("Unknown interface ge-0/0/INEXISTENT"))
def test_get_interfaces(self):
self.switch.in_transaction = False
self.netconf_mock.should_receive("rpc").with_args(is_xml("""
<get-interface-information>
<terse/>
</get-interface-information>
""")).and_return(an_rpc_response(textwrap.dedent("""
<interface-information style="terse">
<physical-interface>
<name>
ge-0/0/1
</name>
<admin-status>
up
</admin-status>
<oper-status>
down
</oper-status>
<logical-interface>
<name>
ge-0/0/1.0
</name>
<admin-status>
up
</admin-status>
<oper-status>
down
</oper-status>
<filter-information>
</filter-information>
<address-family>
<address-family-name>
eth-switch
</address-family-name>
</address-family>
</logical-interface>
</physical-interface>
<physical-interface>
<name>
ge-0/0/2
</name>
<admin-status>
down
</admin-status>
<oper-status>
down
</oper-status>
<logical-interface>
<name>
ge-0/0/2.0
</name>
<admin-status>
up
</admin-status>
<oper-status>
down
</oper-status>
<filter-information>
</filter-information>
<address-family>
<address-family-name>
eth-switch
</address-family-name>
</address-family>
</logical-interface>
</physical-interface>
<physical-interface>
<name>
ge-0/0/3
</name>
<admin-status>
up
</admin-status>
<oper-status>
down
</oper-status>
<logical-interface>
<name>
ge-0/0/3.0
</name>
<admin-status>
up
</admin-status>
<oper-status>
down
</oper-status>
<filter-information>
</filter-information>
<address-family>
<address-family-name>
eth-switch
</address-family-name>
</address-family>
</logical-interface>
</physical-interface>
<physical-interface>
<name>
ge-0/0/4
</name>
<admin-status>
up
</admin-status>
<oper-status>
down
</oper-status>
<logical-interface>
<name>
ge-0/0/4.0
</name>
<admin-status>
up
</admin-status>
<oper-status>
down
</oper-status>
<filter-information>
</filter-information>
<address-family>
<address-family-name>
eth-switch
</address-family-name>
</address-family>
</logical-interface>
</physical-interface>
<physical-interface>
<name>
ge-0/0/5
</name>
<admin-status>
up
</admin-status>
<oper-status>
down
</oper-status>
</physical-interface>
</interface-information>
""")))
self.netconf_mock.should_receive("get_config").with_args(source="running", filter=is_xml("""
<filter>
<configuration>
<interfaces />
<vlans />
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ge-0/0/1</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
</ethernet-switching>
</family>
</unit>
</interface>
<interface>
<name>ge-0/0/2</name>
<disable />
<description>Howdy</description>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<vlan>
<members>1000</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
<interface>
<name>ge-0/0/3</name>
<native-vlan-id>2000</native-vlan-id>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<interface-mode>trunk</interface-mode>
<vlan>
<members>999-1001</members>
<members>1000</members>
</vlan>
</ethernet-switching>
</family>
</unit>
</interface>
<interface>
<name>ge-0/0/4</name>
<ether-options>
<no-auto-negotiation/>
</ether-options>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<interface-mode>trunk</interface-mode>
</ethernet-switching>
</family>
</unit>
</interface>
<interface>
<name>ge-0/0/5</name>
<ether-options>
<auto-negotiation/>
<ieee-802.3ad>
<bundle>ae10</bundle>
</ieee-802.3ad>
</ether-options>
</interface>
<interface>
<name>vlan</name>
<unit>
<name>40</name>
</unit>
</interface>
<interface>
<name>ae10</name>
<aggregated-ether-options>
<lacp>
<active/>
<periodic>slow</periodic>
</lacp>
</aggregated-ether-options>
<unit>
<name>0</name>
<family>
<ethernet-switching />
</family>
</unit>
</interface>
</interfaces>
<vlans/>
"""))
if1, if2, if3, if4, if5 = self.switch.get_interfaces()
assert_that(if1.name, equal_to("ge-0/0/1"))
assert_that(if1.shutdown, equal_to(False))
assert_that(if1.port_mode, equal_to(ACCESS))
assert_that(if1.access_vlan, equal_to(None))
assert_that(if1.trunk_native_vlan, equal_to(None))
assert_that(if1.trunk_vlans, equal_to([]))
assert_that(if1.auto_negotiation, equal_to(None))
assert_that(if2.name, equal_to("ge-0/0/2"))
assert_that(if2.shutdown, equal_to(True))
assert_that(if2.port_mode, equal_to(ACCESS))
assert_that(if2.access_vlan, equal_to(1000))
assert_that(if2.trunk_native_vlan, equal_to(None))
assert_that(if2.trunk_vlans, equal_to([]))
assert_that(if3.name, equal_to("ge-0/0/3"))
assert_that(if3.port_mode, equal_to(TRUNK))
assert_that(if3.access_vlan, equal_to(None))
assert_that(if3.trunk_native_vlan, equal_to(2000))
assert_that(if3.trunk_vlans, equal_to([999, 1000, 1001]))
assert_that(if4.name, equal_to("ge-0/0/4"))
assert_that(if4.trunk_native_vlan, equal_to(None))
assert_that(if4.trunk_vlans, equal_to([]))
assert_that(if4.auto_negotiation, equal_to(False))
assert_that(if5.name, equal_to("ge-0/0/5"))
assert_that(if5.port_mode, equal_to(BOND_MEMBER))
assert_that(if5.bond_master, equal_to(10))
assert_that(if5.auto_negotiation, equal_to(True))
def test_get_interface_with_trunk_native_vlan_at_root(self):
self.switch.in_transaction = False
self.netconf_mock.should_receive("get_config").with_args(source="running", filter=is_xml("""
<filter>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/1</name>
</interface>
</interfaces>
<vlans />
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ge-0/0/1</name>
<native-vlan-id>1000</native-vlan-id>
<unit>
<name>0</name>
<family>
<ethernet-switching>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
<vlans/>
"""))
if1 = self.switch.get_interface('ge-0/0/1')
assert_that(if1.name, equal_to("ge-0/0/1"))
assert_that(if1.trunk_native_vlan, equal_to(1000))
def test_port_mode_access_with_no_port_mode_or_vlan_set_just_sets_the_port_mode(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<vlans/>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
<vlans/>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ge-0/0/6</name>
<unit>
<name>0</name>
<family>
<ethernet-switching>
<interface-mode>access</interface-mode>
</ethernet-switching>
</family>
</unit>
</interface>
</interfaces>
</configuration>
</config>
""")).and_return(an_ok_response())
self.switch.set_access_mode("ge-0/0/6")
def test_add_interface_to_bond(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<vlans/>
<protocols>
<rstp>
<interface />
</rstp>
</protocols>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ae10</name>
</interface>
<interface>
<name>ge-0/0/1</name>
</interface>
</interfaces>
<vlans/>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface operation="replace">
<name>ge-0/0/1</name>
<ether-options>
<auto-negotiation/>
<ieee-802.3ad>
<bundle>ae10</bundle>
</ieee-802.3ad>
</ether-options>
</interface>
</interfaces>
</configuration>
</config>""")).and_return(an_ok_response())
self.switch.add_interface_to_bond('ge-0/0/1', 10)
def test_add_interface_to_bond_gets_up_to_speed_and_removes_existing_rstp_protocol(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces/>
<vlans/>
<protocols>
<rstp>
<interface />
</rstp>
</protocols>
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ae10</name>
<aggregated-ether-options>
<link-speed>1g</link-speed>
</aggregated-ether-options>
</interface>
<interface>
<name>ge-0/0/1</name>
</interface>
</interfaces>
<vlans/>
<protocols>
<rstp>
<interface>
<name>ge-0/0/1</name>
<edge />
</interface>
</rstp>
</protocols>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface operation="replace">
<name>ge-0/0/1</name>
<ether-options>
<auto-negotiation/>
<ieee-802.3ad>
<bundle>ae10</bundle>
</ieee-802.3ad>
</ether-options>
</interface>
</interfaces>
<protocols>
<rstp>
<interface operation="delete">
<name>ge-0/0/1</name>
</interface>
</rstp>
</protocols>
</configuration>
</config>""")).and_return(an_ok_response())
self.switch.add_interface_to_bond('ge-0/0/1', 10)
def test_change_bond_speed_update_slaves_and_interface_at_same_time(self):
self.netconf_mock.should_receive("get_config").with_args(source="candidate", filter=is_xml("""
<filter>
<configuration>
<interfaces />
</configuration>
</filter>
""")).and_return(a_configuration("""
<interfaces>
<interface>
<name>ae10</name>
</interface>
<interface>
<name>ge-0/0/1</name>
<ether-options>
<ieee-802.3ad>
<bundle>ae10</bundle>
</ieee-802.3ad>
</ether-options>
</interface>
<interface>
<name>ge-0/0/2</name>
</interface>
</interfaces>
"""))
self.netconf_mock.should_receive("edit_config").once().with_args(target="candidate", config=is_xml("""
<config>
<configuration>
<interfaces>
<interface>
<name>ae10</name>
<aggregated-ether-options>
<link-speed>1g</link-speed>
</aggregated-ether-options>
</interface>
</interfaces>
</configuration>
</config>""")).and_return(an_ok_response())
self.switch.set_bond_link_speed(10, '1g')
def test_get_mac_addresses(self):
self.netconf_mock.should_receive("rpc").with_args(is_xml("""
<get-ethernet-switching-table-information>
</get-ethernet-switching-table-information>
""")).and_return(an_rpc_response(textwrap.dedent("""
<l2ng-l2ald-rtb-macdb>
<l2ng-l2ald-mac-entry-vlan style="extensive">
<l2ng-l2-mac-address>00:11:c6:01:53:a7</l2ng-l2-mac-address>
<mac-count-global>8</mac-count-global>
<learnt-mac-count>8</learnt-mac-count>
<l2ng-l2-mac-routing-instance>default-switch</l2ng-l2-mac-routing-instance>
<l2ng-l2-vlan-id>4063</l2ng-l2-vlan-id>
<l2ng-l2-mac-vlan-name>VLAN4063</l2ng-l2-mac-vlan-name>
<l2ng-l2-mac-logical-interface>ae47.0</l2ng-l2-mac-logical-interface>
<l2ng-l2-mac-ifl-generation>240</l2ng-l2-mac-ifl-generation>
<l2ng-l2-mac-entry-flags>in_hash,in_ifd,in_ifl,in_vlan,in_rtt,kernel,in_ifbd</l2ng-l2-mac-entry-flags>
<l2ng-l2-mac-epoch>1</l2ng-l2-mac-epoch>
<l2ng-l2-mac-sequence-number>2</l2ng-l2-mac-sequence-number>
<l2ng-l2-mac-learn-mask>0x00000002</l2ng-l2-mac-learn-mask>
</l2ng-l2ald-mac-entry-vlan>
<l2ng-l2ald-mac-entry-vlan style="extensive">
<l2ng-l2-mac-address>00:11:90:3d:33:58</l2ng-l2-mac-address>
<mac-count-global>6</mac-count-global>
<learnt-mac-count>6</learnt-mac-count>
<l2ng-l2-mac-routing-instance>default-switch</l2ng-l2-mac-routing-instance>
<l2ng-l2-vlan-id>4080</l2ng-l2-vlan-id>
<l2ng-l2-mac-vlan-name>VLAN4080</l2ng-l2-mac-vlan-name>
<l2ng-l2-mac-logical-interface>xe-0/0/6.0</l2ng-l2-mac-logical-interface>
<l2ng-l2-mac-ifl-generation>3045</l2ng-l2-mac-ifl-generation>
<l2ng-l2-mac-entry-flags>in_hash,in_ifd,in_ifl,in_vlan,in_rtt,kernel,in_ifbd</l2ng-l2-mac-entry-flags>
<l2ng-l2-mac-epoch>31</l2ng-l2-mac-epoch>
<l2ng-l2-mac-sequence-number>0</l2ng-l2-mac-sequence-number>
<l2ng-l2-mac-learn-mask>0x00000001</l2ng-l2-mac-learn-mask>
</l2ng-l2ald-mac-entry-vlan>
<l2ng-l2ald-mac-entry-vlan style="extensive">
<l2ng-l2-mac-address>00:11:b7:b4:74:2c</l2ng-l2-mac-address>
<mac-count-global>8</mac-count-global>
<learnt-mac-count>8</learnt-mac-count>
<l2ng-l2-mac-routing-instance>default-switch</l2ng-l2-mac-routing-instance>
<l2ng-l2-vlan-id>4063</l2ng-l2-vlan-id>
<l2ng-l2-mac-vlan-name>VLAN4063</l2ng-l2-mac-vlan-name>
<l2ng-l2-mac-logical-interface>ae47.0</l2ng-l2-mac-logical-interface>
<l2ng-l2-mac-ifl-generation>240</l2ng-l2-mac-ifl-generation>
<l2ng-l2-mac-entry-flags>in_hash,in_ifd,in_ifl,in_vlan,in_rtt,kernel,in_ifbd</l2ng-l2-mac-entry-flags>
<l2ng-l2-mac-epoch>1</l2ng-l2-mac-epoch>
<l2ng-l2-mac-sequence-number>2</l2ng-l2-mac-sequence-number>
<l2ng-l2-mac-learn-mask>0x00000002</l2ng-l2-mac-learn-mask>
</l2ng-l2ald-mac-entry-vlan>
</l2ng-l2ald-rtb-macdb>
""")))
mac_addresses = self.switch.get_mac_addresses()
assert_that(len(mac_addresses), is_(3))
for mac_address in mac_addresses:
if mac_address.mac_address == '00:11:c6:01:53:a7':
assert_that(mac_address.interface, is_('ae47'))
assert_that(mac_address.vlan, is_(4063))
assert_that(mac_address.type, is_('Agregated'))
elif mac_address.mac_address == '00:11:90:3d:33:58':
assert_that(mac_address.interface, is_('xe-0/0/6'))
assert_that(mac_address.vlan, is_(4080))
assert_that(mac_address.type, is_('Physical'))
elif mac_address.mac_address == '00:11:b7:b4:74:2c':
assert_that(mac_address.interface, is_('ae47'))
assert_that(mac_address.vlan, is_(4063))
assert_that(mac_address.type, is_('Agregated'))
else:
self.assert_(False, "Invalid mac_address returned : {}".format(mac_address.mac_address))
|
|
"""Tools to align a reconstruction to GPS and GCP data."""
import logging
import math
import numpy as np
from opensfm import multiview
from opensfm import transformations as tf
logger = logging.getLogger(__name__)
def align_reconstruction(reconstruction, gcp, config):
"""Align a reconstruction with GPS and GCP data."""
res = align_reconstruction_similarity(reconstruction, gcp, config)
if res:
s, A, b = res
apply_similarity(reconstruction, s, A, b)
def apply_similarity_pose(pose, s, A, b):
""" Apply a similarity (y = s A x + b) to an object having a 'pose' member. """
R = pose.get_rotation_matrix()
t = np.array(pose.translation)
Rp = R.dot(A.T)
tp = -Rp.dot(b) + s * t
pose.set_rotation_matrix(Rp)
pose.translation = list(tp)
def apply_similarity(reconstruction, s, A, b):
"""Apply a similarity (y = s A x + b) to a reconstruction.
:param reconstruction: The reconstruction to transform.
:param s: The scale (a scalar)
:param A: The rotation matrix (3x3)
:param b: The translation vector (3)
"""
# Align points.
for point in reconstruction.points.values():
Xp = s * A.dot(point.coordinates) + b
point.coordinates = Xp.tolist()
# Align cameras.
for shot in reconstruction.shots.values():
if shot.is_in_rig():
continue
apply_similarity_pose(shot.pose, s, A, b)
# Align rig instances
for rig_instance in reconstruction.rig_instances.values():
apply_similarity_pose(rig_instance.pose, s, A, b)
def align_reconstruction_similarity(reconstruction, gcp, config):
"""Align reconstruction with GPS and GCP data.
Config parameter `align_method` can be used to choose the alignment method.
Accepted values are
- navie: does a direct 3D-3D fit
- orientation_prior: assumes a particular camera orientation
"""
align_method = config["align_method"]
if align_method == "auto":
align_method = detect_alignment_constraints(config, reconstruction, gcp)
if align_method == "orientation_prior":
res = align_reconstruction_orientation_prior_similarity(
reconstruction, config, gcp
)
elif align_method == "naive":
res = align_reconstruction_naive_similarity(config, reconstruction, gcp)
s, A, b = res
if (s == 0) or np.isnan(A).any() or np.isnan(b).any():
logger.warning(
"Computation of alignment similarity (%s) is degenerate." % align_method
)
return None
return res
def alignment_constraints(config, reconstruction, gcp):
""" Gather alignment constraints to be used by checking bundle_use_gcp and bundle_use_gps. """
X, Xp = [], []
# Get Ground Control Point correspondences
if gcp and config["bundle_use_gcp"]:
triangulated, measured = triangulate_all_gcp(reconstruction, gcp)
X.extend(triangulated)
Xp.extend(measured)
# Get camera center correspondences
if config["bundle_use_gps"]:
for shot in reconstruction.shots.values():
if shot.metadata.gps_position.has_value:
X.append(shot.pose.get_origin())
Xp.append(shot.metadata.gps_position.value)
return X, Xp
def detect_alignment_constraints(config, reconstruction, gcp):
"""Automatically pick the best alignment method, depending
if alignment data such as GPS/GCP is aligned on a single-line or not.
"""
X, Xp = alignment_constraints(config, reconstruction, gcp)
if len(X) < 3:
return "orientation_prior"
X = np.array(X)
X = X - np.average(X, axis=0)
evalues, _ = np.linalg.eig(X.T.dot(X))
evalues = np.array(sorted(evalues))
ratio_1st_2nd = math.fabs(evalues[2] / evalues[1])
epsilon_abs = 1e-10
epsilon_ratio = 5e3
is_line = sum(evalues < epsilon_abs) > 1 or ratio_1st_2nd > epsilon_ratio
if is_line:
logger.warning(
"Shots and/or GCPs are aligned on a single-line. Using %s prior",
config["align_orientation_prior"],
)
return "orientation_prior"
else:
logger.info(
"Shots and/or GCPs are well-conditioned. Using naive 3D-3D alignment."
)
return "naive"
def align_reconstruction_naive_similarity(config, reconstruction, gcp):
"""Align with GPS and GCP data using direct 3D-3D matches."""
X, Xp = alignment_constraints(config, reconstruction, gcp)
if len(X) == 0:
return 1.0, np.identity(3), np.zeros((3))
# Translation-only case, either :
# - a single value
# - identical values
same_values = np.linalg.norm(np.std(Xp, axis=0)) < 1e-10
single_value = len(X) == 1
if single_value:
logger.warning("Only 1 constraints. Using translation-only alignment.")
if same_values:
logger.warning(
"GPS/GCP data seems to have identical values. Using translation-only alignment."
)
if same_values or single_value:
t = np.array(Xp[0]) - np.array(X[0])
return 1.0, np.identity(3), t
# Will be up to some unknown rotation
if len(X) == 2:
logger.warning("Only 2 constraints. Will be up to some unknown rotation.")
X.append(X[1])
Xp.append(Xp[1])
# Compute similarity Xp = s A X + b
X = np.array(X)
Xp = np.array(Xp)
T = tf.superimposition_matrix(X.T, Xp.T, scale=True)
A, b = T[:3, :3], T[:3, 3]
s = np.linalg.det(A) ** (1.0 / 3)
A /= s
return s, A, b
def align_reconstruction_orientation_prior_similarity(reconstruction, config, gcp):
"""Align with GPS data assuming particular a camera orientation.
In some cases, using 3D-3D matches directly fails to find proper
orientation of the world. That happends mainly when all cameras lie
close to a straigh line.
In such cases, we can impose a particular orientation of the cameras
to improve the orientation of the alignment. The config parameter
`align_orientation_prior` can be used to specify such orientation.
Accepted values are:
- no_roll: assumes horizon is horizontal on the images
- horizontal: assumes cameras are looking towards the horizon
- vertical: assumes cameras are looking down towards the ground
"""
X, Xp = alignment_constraints(config, reconstruction, gcp)
X = np.array(X)
Xp = np.array(Xp)
if len(X) < 1:
return 1.0, np.identity(3), np.zeros((3))
p = estimate_ground_plane(reconstruction, config)
Rplane = multiview.plane_horizontalling_rotation(p)
X = Rplane.dot(X.T).T
# Estimate 2d similarity to align to GPS
two_shots = len(X) == 2
single_shot = len(X) < 2
same_shots = (
X.std(axis=0).max() < 1e-8
or Xp.std(axis=0).max() < 0.01 # All points are the same.
) # All GPS points are the same.
if single_shot or same_shots:
s = 1.0
A = Rplane
b = Xp.mean(axis=0) - X.mean(axis=0)
# Clamp shots pair scale to 1km, so the
# optimizer can still catch-up acceptable error
max_scale = 1000
current_scale = np.linalg.norm(b)
if two_shots and current_scale > max_scale:
b = max_scale * b / current_scale
s = max_scale / current_scale
else:
T = tf.affine_matrix_from_points(X.T[:2], Xp.T[:2], shear=False)
s = np.linalg.det(T[:2, :2]) ** 0.5
A = np.eye(3)
A[:2, :2] = T[:2, :2] / s
A = A.dot(Rplane)
b = np.array(
[
T[0, 2],
T[1, 2],
Xp[:, 2].mean() - s * X[:, 2].mean(), # vertical alignment
]
)
return s, A, b
def estimate_ground_plane(reconstruction, config):
"""Estimate ground plane orientation.
It assumes cameras are all at a similar height and uses the
align_orientation_prior option to enforce cameras to look
horizontally or vertically.
"""
orientation_type = config["align_orientation_prior"]
onplane, verticals = [], []
for shot in reconstruction.shots.values():
R = shot.pose.get_rotation_matrix()
x, y, z = get_horizontal_and_vertical_directions(
R, shot.metadata.orientation.value
)
if orientation_type == "no_roll":
onplane.append(x)
verticals.append(-y)
elif orientation_type == "horizontal":
onplane.append(x)
onplane.append(z)
verticals.append(-y)
elif orientation_type == "vertical":
onplane.append(x)
onplane.append(y)
verticals.append(-z)
ground_points = []
for shot in reconstruction.shots.values():
ground_points.append(shot.pose.get_origin())
ground_points = np.array(ground_points)
ground_points -= ground_points.mean(axis=0)
plane = multiview.fit_plane(ground_points, onplane, verticals)
return plane
def get_horizontal_and_vertical_directions(R, orientation):
"""Get orientation vectors from camera rotation matrix and orientation tag.
Return a 3D vectors pointing to the positive XYZ directions of the image.
X points to the right, Y to the bottom, Z to the front.
"""
# See http://sylvana.net/jpegcrop/exif_orientation.html
if orientation == 1:
return R[0, :], R[1, :], R[2, :]
if orientation == 2:
return -R[0, :], R[1, :], -R[2, :]
if orientation == 3:
return -R[0, :], -R[1, :], R[2, :]
if orientation == 4:
return R[0, :], -R[1, :], R[2, :]
if orientation == 5:
return R[1, :], R[0, :], -R[2, :]
if orientation == 6:
return -R[1, :], R[0, :], R[2, :]
if orientation == 7:
return -R[1, :], -R[0, :], -R[2, :]
if orientation == 8:
return R[1, :], -R[0, :], R[2, :]
logger.error("unknown orientation {0}. Using 1 instead".format(orientation))
return R[0, :], R[1, :], R[2, :]
def triangulate_all_gcp(reconstruction, gcp):
"""Group and triangulate Ground Control Points seen in 2+ images."""
triangulated, measured = [], []
for point in gcp:
x = multiview.triangulate_gcp(
point,
reconstruction.shots,
reproj_threshold=0.004,
min_ray_angle_degrees=2.0,
)
if x is not None:
triangulated.append(x)
measured.append(point.coordinates.value)
return triangulated, measured
|
|
import os
from generic import obj
from developer import DevBase
from simulation import Simulation,SimulationInput,SimulationAnalyzer
# PLEASE READ THIS
#
# depending on what you want to do with a simulation
# you will have to implement different functions below
# here are a few use cases and the functions required
# details about each function are given within the classes
#
# use cases
# 1) standalone simulation
# nexus drives this simulation in isolation of others
# i.e., one performs parameter scans to drive several independent opium runs
# in this setting, a opium simulation does not provide information to
# other simulations (e.g. pseudopotentials to qmcpack)
#
# the input file will be read from a template file
# and modified to obtain the desired inputs
# one could also provide the input longhand in python
# in a form OpiumInput understands (this depends on your implementation)
#
# required functions to be implemented:
# OpiumInput: read_contents, write_contents
# Opium: app_command, check_sim_status
#
# 2) generated standalone simulation
# as above, but with fully generated input files
# generate functions provide a short-hand of minimal vars for input
#
# required functions to be implemented:
# OpiumInput: read_contents, write_contents
# Opium: app_command, check_sim_status
# generate_opium_input
#
# 3) simulation that provides information to subsequent chained simulations
# as above (with or without #2)
# other simulations can request and get information about
# results produced by this simulation
# (e.g. relaxed structure data, location of orbital files, etc.)
# this information is used by the others to populate input files
#
# required functions to be implemented:
# OpiumInput: read_contents, write_contents
# Opium: app_command,check_sim_status,
# check_result, get_result
#
# if required to get needed output information:
# OpiumAnalyzer: analyze
def readval(s):
try:
val = int(s)
except:
try:
val = float(s)
except:
val = s
#end try
#end try
return val
#end def readval
class Section(DevBase):
types = {int:'int',float:'float',str:'str',list:'list',obj:'obj'}
variables = None
def __init__(self,list_rep=None,**kwargs):
if isinstance(list_rep,list):
self.from_list_rep(list_rep)
else:
self.set(**kwargs)
#end if
#self.validate()
#for name,type in self.variables.iteritems():
# if type in ('str','int','float'):
# val = None
# elif type=='list':
# val = []
# elif type=='obj':
# val = obj()
# #end if
# self[name] = val
##end for
#end def __init__
def validate(self):
allowed = set(self.variables.keys())
if len(allowed)>0: # some have numeric rather than named entries
present = set(self.keys())
missing = allowed-present
invalid = present-allowed
if len(missing)>0:
self.error('the following variables are missing: '+str(sorted(missing)))
#end if
if len(invalid)>0:
self.error('invalid variable names encountered\ninvalid variables: {0}\nallowed variables: {1}'.format(sorted(invalid),sorted(allowed)))
#end if
for name in sorted(present):
type = self.variables[name]
if not isinstance(self[name],type):
self.error('type of variable {0} is incorrect\ntype required: {1}\ncontents: {2}'.format(name,self.types[type]),str(self[name]))
#end if
#end for
#end if
#end def validate
def read(self,text):
list_rep = []
lines = text.splitlines()
for line in lines:
tokens = line.split()
vals = []
for token in tokens:
vals.append(readval(token))
#end for
list_rep.append(vals)
#end for
self.from_list_rep(list_rep)
#end def read
def write(self):
s = '['+self.__class__.__name__+']\n'
for line_list in self.list_rep():
for val in line_list:
s+=' '+str(val)
#end for
s+='\n'
#end for
s+='\n'
return s
#end def write
def list_rep(self):
self.not_implemented()
#end def list_rep
def from_list_rep(self,list_rep):
self.not_implemented()
#end def from_list_rep
#end class Section
class Atom(Section):
variables = obj(symbol=str,rorbs=int,ref=obj)
def list_rep(self):
list_rep = [[self.symbol],[self.rorbs]]
for index in sorted(self.ref.keys()):
v = self.ref[index]
list_rep.append([v.nlm,v.occ,v.eig])
#end for
return list_rep
#end def list_rep
def from_list_rep(self,list_rep):
self.symbol = list_rep[0][0]
self.rorbs = list_rep[1][0]
ref = obj()
n = 0
for nlm,occ,eig in list_rep[2:]:
ref[n] = obj(nlm=nlm,occ=occ,eig=eig)
n+=1
#end for
self.ref = ref
#end def from_list_rep
#end class Atom
class Pseudo(Section):
variables = obj(porbs=int,rcuts=list,method=str)
def list_rep(self):
list_rep = [[self.porbs]+list(self.rcuts),[self.method]]
return list_rep
#end def list_rep
def from_list_rep(self,list_rep):
self.porbs = list_rep[0][0]
self.rcuts = list_rep[0][1:]
self.method = list_rep[1][0]
#end def from_list_rep
#end class Pseudo
class Optinfo(Section):
variables = obj(qcuts=list,bessels=list)
def list_rep(self):
list_rep = zip(self.qcuts,self.bessels)
return list_rep
#end def list_rep
def from_list_rep(self,list_rep):
qc = []
bs = []
for q,b in list_rep:
qc.append(q)
bs.append(b)
#end for
self.qcuts = qc
self.bessels = bs
#end def from_list_rep
#end class Optinfo
class XC(Section):
variables = obj(functional=str)
def list_rep(self):
list_rep = [[self.functional]]
return list_rep
#end def list_rep
def from_list_rep(self,list_rep):
self.functional = list_rep[0][0]
#end def from_list_rep
#end class XC
class Pcc(Section):
variables = obj(radius=float,method=str)
def list_rep(self):
list_rep = [[self.radius],[self.method]]
return list_rep
#end def list_rep
def from_list_rep(self,list_rep):
self.radius = list_rep[0][0]
self.method = list_rep[1][0]
#end def from_list_rep
#end class Pcc
class Relativity(Section):
variables = obj(rl=str)
def list_rep(self):
list_rep = [[self.rl]]
return list_rep
#end def list_rep
def from_list_rep(self,list_rep):
self.rl = list_rep[0][0]
#end def from_list_rep
#end class Relativity
class Grid(Section):
variables = obj(np=int,a=float,b=float)
def list_rep(self):
list_rep = [[self.np,self.a,self.b]]
return list_rep
#end def list_rep
def from_list_rep(self,list_rep):
np,a,b = list_rep[0]
self.np = np
self.a = a
self.b = b
#end def from_list_rep
#end class Grid
class Tol(Section):
variables = obj(aetol=float,nltol=float)
def list_rep(self):
list_rep = [[self.aetol,self.nltol]]
return list_rep
#end def list_rep
def from_list_rep(self,list_rep):
ae,nl = list_rep[0]
self.aetol = ae
self.nltol = nl
#end def from_list_rep
#end class Tol
class Configs(Section):
variables = obj()
def list_rep(self):
list_rep = [[len(self)]]
for index in sorted(self.keys()):
v = self[index]
list_rep.append([v.nlm,v.occ,v.eig])
#end for
return list_rep
#end def list_rep
def from_list_rep(self,list_rep):
nconfigs = list_rep[0][0]
orbs = list_rep[1:]
orbs_per_config = len(orbs)/nconfigs
n=1
for nlm,occ,eig in orbs:
if n%orbs_per_config==0:
self.append(obj(nlm=nlm,occ=occ,eig=eig))
#end if
n+=1
#end for
#end def from_list_rep
#end class Configs
class KBdesign(Section):
variables = obj(local=str,boxes=obj)
def list_rep(self):
list_rep = [[self.local]]
if 'boxes' in self:
list_rep.append([len(self.boxes)])
for index in sorted(self.boxes.keys()):
v = self.boxes[index]
list_rep.append([v.units,v.rmin,v.rmax,v.depth])
#end for
#end if
return list_rep
#end def list_rep
def from_list_rep(self,list_rep):
self.local = list_rep[0][0]
if len(list_rep)>1:
boxes = obj()
for units,rmin,rmax,depth in list_rep[2:]:
boxes.append(obj(units=units,rmin=rmin,rmax=rmax,depth=depth))
#end for
self.boxes = boxes
#end if
#end def from_list_rep
#end class KBdesign
class Loginfo(Section):
variables = obj(config=int,radius=float,Emin=float,Emax=float)
def list_rep(self):
list_rep = [[self.config],[self.radius,self.Emin,self.Emax]]
return list_rep
#end def list_rep
def from_list_rep(self,list_rep):
self.config = list_rep[0][0]
radius,Emin,Emax = list_rep[1]
self.radius = radius
self.Emin = Emin
self.Emax = Emax
#end def from_list_rep
#end class Loginfo
class OpiumInput(SimulationInput):
method_map = obj(o='optimized',k='kerker',t='tm')
section_map = obj(atom=Atom,pseudo=Pseudo,optinfo=Optinfo,xc=XC,pcc=Pcc,relativity=Relativity,grid=Grid,tol=Tol,configs=Configs,kbdesign=KBdesign,loginfo=Loginfo)
section_order = 'Atom Pseudo Optinfo XC Pcc Relativity Grid Tol Configs KBdesign Loginfo'.split()
def __init__(self,filepath=None,
atom = None,
pseudo = None,
optinfo = None,
xc = None,
pcc = None,
relativity = None,
grid = None,
tol = None,
configs = None,
kbdesign = None,
loginfo = None
):
if filepath!=None:
self.read(filepath)
else:
inputs = obj(atom=atom,pseudo=pseudo,optinfo=optinfo,xc=xc,pcc=pcc,relativity=relativity,grid=grid,tol=tol,configs=configs,kbdesign=kbdesign,loginfo=loginfo)
for secname,input in inputs.iteritems():
section_type = self.section_map[secname]
if isinstance(input,section_type):
self[secname]=input
elif isinstance(input,(dict,obj)):
self[secname] = section_type(**input)
elif isinstance(input,(list,tuple)):
self[secname] = section_type(list(input))
elif input!=None:
self.error('invalid type encountered for {0} input\nexpected types are dict, obj, list, or tuple\nvalue provided: {1}'.format(secname,input))
#end if
#end for
#end if
#end def __init__
def read_contents(self,contents):
lines = contents.splitlines()
sections = obj()
sec=None
secname=None
for line in lines:
ls = line.strip()
if len(ls)>0 and not ls.startswith('#'):
if ls.startswith('[') and ls.endswith(']'):
prevsecname = secname
secname = ls.strip('[]').lower()
if not secname in self.section_map:
self.error('cannot read file\n{0} is not a valid section name\nvalid options are: {1}'.format(secname,self.section_order))
#end if
if sec!=None:
sections[prevsecname]=sec
#end if
sec=''
elif sec is None:
self.error('invalid text encountered: '+line)
else:
sec+=ls+'\n'
#end if
#end if
#end for
if sec!=None and secname!=None and not secname in sections:
sections[secname]=sec
#end if
for secname,sectext in sections.iteritems():
section = self.section_map[secname]()
section.read(sectext)
self[secname] = section
#end for
#end def read_contents
def write_contents(self):
contents = ''
for secname in self.section_order:
secname = secname.lower()
if secname in self:
contents += self[secname].write()
#end if
#end for
return contents
#end def write_contents
#end class OpiumInput
def generate_opium_input(selector,**kwargs):
if selector=='basic':
return generate_basic_opium_input(**kwargs)
elif selector=='full':
return generate_full_opium_input(**kwargs)
else:
OpiumInput.class_error('selection '+str(selector)+' has not been implemented for opium input generation')
#end if
#end def generate_opium_input
def generate_basic_opium_input(
):
oi = None
return oi
#end def generate_basic_opium_input
def generate_full_opium_input(
atom = None,
pseudo = None,
optinfo = None,
xc = None,
pcc = None,
relativity = None,
grid = None,
tol = None,
configs = None,
kbdesign = None,
loginfo = None
):
oi = OpiumInput(
atom = atom ,
pseudo = pseudo ,
optinfo = optinfo ,
xc = xc ,
pcc = pcc ,
relativity = relativity,
grid = grid ,
tol = tol ,
configs = configs ,
kbdesign = kbdesign ,
loginfo = loginfo
)
return oi
#end def generate_full_opium_input
class OpiumAnalyzer(SimulationAnalyzer):
def __init__(self,arg0=None):
# optional
# only necessary if you want to use results from output files
# to inform the inputs of subsequent simulations
# or if you want to have a general purpose class to scrape
# and process simulation data
# below is a reasonable default implementation
# if you don't want to implement it, just uncomment the following line
#return
self.path = None
self.input = None
infile = None
if isinstance(arg0,Simulation):
sim = arg0
infile = os.path.join(sim.locdir,sim.infile)
else:
infile = arg0
#end if
if infile!=None:
self.path = os.path.dirname(infile)
self.input = OpiumInput(infile)
#end if
#end def __init__
def analyze(self):
# optional
# only necessary if you want to use results from output files
# to inform the inputs of subsequent simulations
# or if you want to have a general purpose class to scrape
# and process simulation data
# if you don't want to implement it, no action is required
None
#end def analyze
#end class OpiumAnalyzer
class Opium(Simulation):
input_type = OpiumInput
analyzer_type = OpiumAnalyzer
generic_identifier = 'opium'
application = 'opium_exe' #replace with default name of opium executable
application_properties = set(['serial','mpi'])
application_results = set(['orbitals']) #what opium produces that other simulations can use
def check_result(self,result_name,sim):
# optional
# only necessary if another simulation depends on this one
# e.g.
# other_sim.depends(opium_sim,'pseudopotential') or similar
# if you don't want to implement it, uncomment the line below
#return False
calculating_result = False
input = self.input # a OpiumInput object
# check the input to see if result is being calculated
# (e.g. result_name='pseudopotential')
return calculating_result
#end def check_result
def get_result(self,result_name,sim):
# optional
# only necessary if another simulation depends on this one
# e.g.
# other_sim.depends(opium_sim,'pseudopotential') or similar
# if you don't want to implement it, uncomment the line below
#self.not_implemented()
result = obj()
input = self.input
#analyzer = self.load_analyzer_image()
# package information about a result/product in the result object
# for example, if pseudopotentials are requested,
# the path to the pseudopotential file might be provided:
# result.pseudopotential_file = '/path/to/pseudopotential/file'
return result
#end def get_result
def app_command(self):
# required
# specify command line arguments to the executable, such as the input file
# e.g. command_line_args = ' '+self.infile
command_line_args = ''
return self.app_name + command_line_args
#end def app_command
def check_sim_status(self):
# required
# read output/error files to check whether simulation has
# completed successfully
# one could also check whether all output files exist
output = open(os.path.join(self.locdir,self.outfile),'r').read()
errors = open(os.path.join(self.locdir,self.errfile),'r').read()
success = False
# check output and errors
# set success=True if run completed successfully
self.finished = success and self.job.finished
#end def check_sim_status
def get_output_files(self):
# optional
# if provided, the listed output files will be copied to the results directory
# if you don't want to implement it, no action is required
output_files = []
return output_files
#end def get_output_files
#end class Opium
def generate_opium(**kwargs):
has_input = 'input_type' in kwargs
if has_input:
input_type = kwargs['input_type']
del kwargs['input_type']
else:
input_type = 'basic'
#end if
kw = set(kwargs.keys())
sim_kw = kw & Simulation.allowed_inputs
inp_kw = (kw - sim_kw)
sim_args = dict()
inp_args = dict()
for kw in sim_kw:
sim_args[kw] = kwargs[kw]
#end for
for kw in inp_kw:
inp_args[kw] = kwargs[kw]
#end for
if len(inp_args)>0:
sim_args['input'] = generate_opium_input(input_type,**inp_args)
#end if
opium = Opium(**sim_args)
return opium
#end def generate_opium
|
|
# Copyright ClusterHQ Inc. See LICENSE file for details.
"""
Low-level logic for a certificate authority.
We have three categories of certificates:
1. Control service, used by AMP and REST API servers. Needs to be
validated over HTTP by 3rd party clients, as well as by AMP clients.
2. Node agents, used by AMP clients in agents. Needs to encode a node
UUID, and is validated by the control service.
3. API clients. Used by HTTP API clients to authenticate, so the control
service REST API needs to validate them.
None of these should be able to impersonate the others. We therefore use
the following Distinguised Name scheme:
1. Control service: common name is "control-service", subjectAltName is
administrator-specifiable DNS hostname, to support standard HTTPS
client authentication.
2. Node agents: common name is "node-<uuid>".
3. API clients: Common name is set to "user-<username>", and
extendedKeyUsage is set to "clientAuth" (under no circumstances should
a client certificate ever be a server.)
It would be nice to use a custom x509v3 extension rather than abusing the
common name, but that will have to wait for some future revision.
"""
import datetime
import os
from uuid import uuid4, UUID
from ipaddr import IPAddress
from OpenSSL import crypto
from pyrsistent import PRecord, field
from twisted.internet.ssl import (
DistinguishedName, KeyPair, Certificate, CertificateOptions,
PrivateCertificate,
)
EXPIRY_20_YEARS = 60 * 60 * 24 * 365 * 20
AUTHORITY_CERTIFICATE_FILENAME = b"cluster.crt"
AUTHORITY_KEY_FILENAME = b"cluster.key"
class CertificateAlreadyExistsError(Exception):
"""
Error raised when a certificate file already exists.
"""
class KeyAlreadyExistsError(Exception):
"""
Error raised when a keypair file already exists.
"""
class PathError(Exception):
"""
Error raised when the directory for certificate files does not exist.
"""
def __init__(self, message, filename=None, code=None, failure=None):
super(PathError, self).__init__(message)
self.filename = filename
self.code = code
self.failure = failure
def __str__(self):
error = self.message
if self.failure:
error = error + b" " + self.failure
if self.filename:
error = error + b" " + self.filename
return error
class ComparableKeyPair(object):
"""
KeyPair with added functionality for comparison and signing a request
object with additional extensions for generating a self-signed CA.
Written in Twisted-style as these changes should be upstreamed to
``twisted.internet.ssl.KeyPair``
https://twistedmatrix.com/trac/ticket/7847
"""
def __init__(self, keypair):
self.keypair = keypair
def __eq__(self, other):
if isinstance(other, ComparableKeyPair):
return self.keypair.dump() == other.keypair.dump()
return False
def __ne__(self, other):
return not self.__eq__(other)
def create_certificate_authority(keypair, dn, request, serial,
validity_period, digest, start=None):
"""
Sign a CertificateRequest with extensions for use as a CA certificate.
See
https://www.openssl.org/docs/apps/x509v3_config.html#Basic-Constraints
for further information.
This code based on ``twisted.internet.ssl.KeyPair.signRequestObject``
:param KeyPair keypair: The private/public key pair.
:param DistinguishedName dn: The ``DistinguishedName`` for the
certificate.
:param CertificateRequest request: The signing request object.
:param int serial: The certificate serial number.
:param int validity_period: The number of seconds from ``start`` after
which the certificate expires.
:param bytes digest: The digest algorithm to use.
:param datetime start: The datetime from which the certificate is valid.
Defaults to current date and time.
"""
if start is None:
start = datetime.datetime.utcnow()
expire = start + datetime.timedelta(seconds=validity_period)
start = start.strftime(b"%Y%m%d%H%M%SZ")
expire = expire.strftime(b"%Y%m%d%H%M%SZ")
req = request.original
cert = crypto.X509()
cert.set_subject(req.get_subject())
cert.set_pubkey(req.get_pubkey())
cert.set_notBefore(start)
cert.set_notAfter(expire)
cert.set_serial_number(serial)
cert.add_extensions([
crypto.X509Extension("basicConstraints", True,
"CA:TRUE, pathlen:0"),
crypto.X509Extension("keyUsage", True,
"keyCertSign, cRLSign"),
crypto.X509Extension("subjectKeyIdentifier", False, "hash",
subject=cert),
])
cert.add_extensions([
crypto.X509Extension(
"authorityKeyIdentifier", False,
"keyid:always", issuer=cert
)
])
cert.set_issuer(cert.get_subject())
cert.sign(keypair.original, digest)
return Certificate(cert)
def sign_certificate_request(keypair, dn, request, serial,
validity_period, digest, start=None,
additional_extensions=()):
"""
Sign a CertificateRequest and return a Certificate.
This code based on ``twisted.internet.ssl.KeyPair.signRequestObject``
:param KeyPair keypair: The private/public key pair.
:param X509Name dn: The distinguished name for the
certificate.
:param CertificateRequest request: The signing request object.
:param int serial: The certificate serial number.
:param int validity_period: The number of seconds from ``start`` after
which the certificate expires.
:param bytes digest: The digest algorithm to use.
:param datetime start: The datetime from which the certificate is valid.
Defaults to current date and time.
:param additional_extensions: A sequence of additional
``X509Extension`` objects to add to the certificate.
"""
if start is None:
start = datetime.datetime.utcnow()
expire = start + datetime.timedelta(seconds=validity_period)
start = start.strftime(b"%Y%m%d%H%M%SZ")
expire = expire.strftime(b"%Y%m%d%H%M%SZ")
req = request.original
cert = crypto.X509()
cert.set_issuer(dn)
cert.set_subject(req.get_subject())
cert.set_pubkey(req.get_pubkey())
cert.set_notBefore(start)
cert.set_notAfter(expire)
cert.set_serial_number(serial)
cert.add_extensions(additional_extensions)
cert.sign(keypair.original, digest)
return Certificate(cert)
def flocker_keypair():
"""
Create a new 4096-bit RSA key pair.
"""
return ComparableKeyPair(
keypair=KeyPair.generate(crypto.TYPE_RSA, size=4096)
)
def load_certificate_from_path(path, key_filename, cert_filename):
"""
Load a certificate and keypair from a specified path.
:param FilePath path: Directory where certificate and key files
are stored.
:param bytes key_filename: The file name of the private key.
:param bytes cert_filename: The file name of the certificate.
:return: A ``tuple`` containing the loaded key and certificate
instances.
"""
certPath = path.child(cert_filename)
keyPath = path.child(key_filename)
try:
certFile = certPath.open()
except IOError as e:
code, failure = e
raise PathError(
b"Certificate file could not be opened.",
e.filename, code, failure
)
try:
keyFile = keyPath.open()
except IOError as e:
code, failure = e
raise PathError(
b"Private key file could not be opened.",
e.filename, code, failure
)
certificate = Certificate.load(
certFile.read(), format=crypto.FILETYPE_PEM)
keypair = ComparableKeyPair(
keypair=KeyPair.load(keyFile.read(), format=crypto.FILETYPE_PEM)
)
return (keypair, certificate)
class FlockerCredential(PRecord):
"""
Flocker credentials record, comprising a certificate and
public/private key pair.
:ivar FilePath path: A ``FilePath`` representing the absolute path of
a directory containing the certificate and key files.
:ivar Certificate certificate: A signed certificate.
:ivar ComparableKeyPair keypair: A private/public keypair.
"""
path = field(mandatory=True)
certificate = field(mandatory=True)
keypair = field(mandatory=True)
def write_credential_files(self, key_filename, certificate_filename):
"""
Write PEM encoded certificate and private key files for this credential
instance.
:param bytes key_filename: The name of the private key file to write,
e.g. "cluster.key"
:param bytes certificate_filename: The name of the certificate file to
write, e.g. "cluster.crt"
"""
key_path = self.path.child(key_filename)
cert_path = self.path.child(certificate_filename)
original_umask = os.umask(0)
mode = 0o600
try:
with os.fdopen(os.open(
cert_path.path, os.O_WRONLY | os.O_CREAT | os.O_EXCL, mode
), b'w') as cert_file:
cert_file.write(self.certificate.dumpPEM())
try:
with os.fdopen(os.open(
key_path.path, os.O_WRONLY | os.O_CREAT | os.O_EXCL, mode
), b'w') as key_file:
key_file.write(
self.keypair.keypair.dump(crypto.FILETYPE_PEM))
except (IOError, OSError) as e:
code, failure = e
raise PathError(
b"Unable to write private key file.",
e.filename, code, failure
)
except (IOError, OSError) as e:
code, failure = e
raise PathError(
b"Unable to write certificate file.",
e.filename, code, failure
)
finally:
os.umask(original_umask)
def private_certificate(self):
"""
Combine private key and certificate into a ``PrivateCertificate``.
:return: ``PrivateCertificate`` instance.
"""
return PrivateCertificate.fromCertificateAndKeyPair(
self.certificate, self.keypair.keypair)
class UserCredential(PRecord):
"""
A certificate for an API user, signed by a supplied certificate
authority.
:ivar FlockerCredential credential: The certificate and key pair
credential object.
:ivar bytes username: A username.
"""
credential = field(mandatory=True, type=FlockerCredential)
username = field(mandatory=True, type=unicode)
@classmethod
def from_path(cls, path, username):
"""
Load a user certificate from a specified path.
:param FilePath path: Directory where user certificate and key
files are stored.
:param unicode username: The UTF-8 encoded username.
"""
key_filename = username + u".key"
cert_filename = username + u".crt"
keypair, certificate = load_certificate_from_path(
path, key_filename, cert_filename
)
credential = FlockerCredential(
path=path, keypair=keypair, certificate=certificate)
return cls(credential=credential, username=username)
@classmethod
def initialize(cls, output_path, authority, username, begin=None):
"""
Generate a certificate signed by the supplied root certificate.
:param FilePath output_path: Directory where the certificate will be
written.
:param CertificateAuthority authority: The certificate authority with
which this certificate will be signed.
:param unicode username: A UTF-8 encoded username to be included in
the certificate.
:param datetime begin: The datetime from which the generated
certificate should be valid.
"""
key_filename = username + u".key"
cert_filename = username + u".crt"
# The common name for the node certificate.
name = u"user-" + username
# The organizational unit is set to the common name of the
# authority, which in our case is a byte string identifying
# the cluster.
organizational_unit = authority.organizational_unit
dn = DistinguishedName(
commonName=name, organizationalUnitName=organizational_unit
)
keypair = flocker_keypair()
request = keypair.keypair.requestObject(dn)
serial = os.urandom(16).encode(b"hex")
serial = int(serial, 16)
cert = sign_certificate_request(
authority.credential.keypair.keypair,
authority.credential.certificate.original.get_subject(), request,
serial, EXPIRY_20_YEARS, b'sha256', start=begin,
additional_extensions=[crypto.X509Extension(
b"extendedKeyUsage", False, b"clientAuth")])
credential = FlockerCredential(
path=output_path, keypair=keypair, certificate=cert
)
credential.write_credential_files(key_filename, cert_filename)
instance = cls(credential=credential, username=username)
return instance
class NodeCredential(PRecord):
"""
A certificate for a node agent, signed by a supplied certificate
authority.
:ivar FlockerCredential credential: The certificate and key pair
credential object.
:ivar UUID uuid: A unique identifier for the node this certificate
identifies, in the form of a version 4 UUID.
:ivar UUID cluster_uuid: A unique identifier for the cluster this
certificate identifies, in the form of a version 4 UUID.
"""
credential = field(mandatory=True)
# The prefix to the UUID we store in the common name:
_UUID_PREFIX = b"node-"
@classmethod
def from_path(cls, path, uuid):
"""
Load a node certificate from a specified path.
:param FilePath path: Directory where user certificate and key
files are stored.
:param bytes uuid: The UUID of the node.
"""
key_filename = b"{uuid}.key".format(uuid=uuid)
cert_filename = b"{uuid}.crt".format(uuid=uuid)
keypair, certificate = load_certificate_from_path(
path, key_filename, cert_filename
)
credential = FlockerCredential(
path=path, keypair=keypair, certificate=certificate)
return cls(credential=credential)
@classmethod
def initialize(cls, path, authority, begin=None, uuid=None):
"""
Generate a certificate signed by the supplied root certificate.
:param FilePath path: Directory where the certificate will be stored.
:param CertificateAuthority authority: The certificate authority with
which this certificate will be signed.
:param datetime begin: The datetime from which the generated
certificate should be valid.
:param bytes uuid: The UUID to be included in this certificate.
Generated if not supplied.
"""
if uuid is None:
uuid = bytes(uuid4())
key_filename = b"{uuid}.key".format(uuid=uuid)
cert_filename = b"{uuid}.crt".format(uuid=uuid)
# The common name for the node certificate.
name = b"{prefix}{uuid}".format(prefix=cls._UUID_PREFIX, uuid=uuid)
# The organizational unit is set to the organizational unit of the
# authority, which in our case is cluster's UUID.
organizational_unit = authority.organizational_unit
dn = DistinguishedName(
commonName=name, organizationalUnitName=organizational_unit
)
keypair = flocker_keypair()
request = keypair.keypair.requestObject(dn)
serial = os.urandom(16).encode(b"hex")
serial = int(serial, 16)
cert = sign_certificate_request(
authority.credential.keypair.keypair,
authority.credential.certificate.original.get_subject(), request,
serial, EXPIRY_20_YEARS, 'sha256', start=begin)
credential = FlockerCredential(
path=path, keypair=keypair, certificate=cert)
credential.write_credential_files(
key_filename, cert_filename)
instance = cls(credential=credential)
return instance
@property
def uuid(self):
common_name = self.credential.certificate.getSubject().CN
return UUID(hex=common_name[len(self._UUID_PREFIX):])
@property
def cluster_uuid(self):
return UUID(hex=self.credential.certificate.getSubject().OU)
class ControlCredential(PRecord):
"""
A certificate and key pair for a control service, signed by a supplied
certificate authority.
:ivar FlockerCredential credential: The certificate and key pair
credential object.
"""
credential = field(mandatory=True, type=FlockerCredential)
@classmethod
def from_path(cls, path, hostname):
"""
Load a control service certificate and key from the supplied path.
:param FilePath path: Directory where control service certificate
and key files are stored.
:param bytes hostname: The hostname of the control service certificate.
"""
keypair, certificate = load_certificate_from_path(
path, b"control-{}.key".format(hostname),
b"control-{}.crt".format(hostname)
)
credential = FlockerCredential(
path=path, keypair=keypair, certificate=certificate)
return cls(credential=credential)
@classmethod
def initialize(cls, path, authority, hostname, begin=None):
"""
Generate a certificate signed by the supplied root certificate.
:param FilePath path: Directory where the certificate will be stored.
:param RootCredential authority: The certificate authority with
which this certificate will be signed.
:param datetime begin: The datetime from which the generated
certificate should be valid.
:param bytes hostname: The hostname of the node where the control
service will be running.
"""
# The common name for the control service certificate.
# This is used to distinguish between control service and node
# certificates.
name = b"control-service"
# The organizational unit is set to the organizational_unit of the
# authority, which in our case is the cluster UUID.
organizational_unit = authority.organizational_unit
dn = DistinguishedName(
commonName=name, organizationalUnitName=organizational_unit
)
keypair = flocker_keypair()
request = keypair.keypair.requestObject(dn)
serial = os.urandom(16).encode(b"hex")
serial = int(serial, 16)
try:
IPAddress(hostname)
except ValueError:
alt_name = b"DNS:" + hostname
else:
alt_name = b"IP:" + hostname
cert = sign_certificate_request(
authority.credential.keypair.keypair,
authority.credential.certificate.original.get_subject(), request,
serial, EXPIRY_20_YEARS, 'sha256', start=begin,
additional_extensions=[
crypto.X509Extension(
b"subjectAltName", False, alt_name)
])
credential = FlockerCredential(
path=path, keypair=keypair, certificate=cert)
credential.write_credential_files(
b"control-{}.key".format(hostname),
b"control-{}.crt".format(hostname))
instance = cls(credential=credential)
return instance
def _default_options(self, trust_root):
"""
Construct a ``CertificateOptions`` that exposes this credential's
certificate and keypair.
:param trust_root: Trust root to pass to ``CertificateOptions``.
:return: ``CertificateOptions`` instance with CA validation
configured.
"""
key = self.credential.keypair.keypair.original
certificate = self.credential.certificate.original
return CertificateOptions(
privateKey=key, certificate=certificate, trustRoot=trust_root)
class RootCredential(PRecord):
"""
A credential representing a self-signed certificate authority.
:ivar FlockerCredential credential: The certificate and key pair
credential object.
"""
credential = field(mandatory=True)
@property
def common_name(self):
return self.credential.certificate.getSubject().CN
@property
def organizational_unit(self):
return self.credential.certificate.getSubject().OU
@classmethod
def from_path(cls, path):
try:
keypair, certificate = load_certificate_from_path(
path, AUTHORITY_KEY_FILENAME, AUTHORITY_CERTIFICATE_FILENAME
)
except PathError as e:
# Re-raise, but with a more specific message.
error = b"Unable to load certificate authority file."
if e.code == 2:
error = error + (b" Please run `flocker-ca initialize` to "
b"generate a new certificate authority.")
raise PathError(error, e.filename, e.code, e.failure)
credential = FlockerCredential(
path=path, keypair=keypair, certificate=certificate)
return cls(credential=credential)
@classmethod
def initialize(cls, path, name, begin=None):
"""
Generate new private/public key pair and self-sign, then store in
given directory.
:param FilePath path: Directory where private key and certificate are
stored.
:param bytes name: The name of the cluster. This is used as the
subject and issuer identities of the generated root certificate.
:param datetime begin: The datetime from which the generated
certificate should be valid.
:return RootCredential: Initialized certificate authority.
"""
dn = DistinguishedName(commonName=name,
organizationalUnitName=bytes(uuid4()))
keypair = flocker_keypair()
request = keypair.keypair.requestObject(dn)
serial = os.urandom(16).encode(b"hex")
serial = int(serial, 16)
certificate = create_certificate_authority(
keypair.keypair, dn, request, serial,
EXPIRY_20_YEARS, b'sha256', start=begin
)
credential = FlockerCredential(
path=path, keypair=keypair, certificate=certificate)
credential.write_credential_files(
AUTHORITY_KEY_FILENAME, AUTHORITY_CERTIFICATE_FILENAME)
instance = cls(credential=credential)
return instance
|
|
# Copyright (c) 2013 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import contextlib
import mock
import webob.exc
from neutron.api.v2 import attributes
from neutron import context
from neutron.plugins.nec.common import exceptions as nexc
from neutron.plugins.nec.extensions import packetfilter as ext_pf
from neutron.tests.unit.nec import test_nec_plugin
from neutron.tests.unit import test_db_plugin as test_plugin
NEC_PLUGIN_PF_INI = """
[DEFAULT]
api_extensions_path = neutron/plugins/nec/extensions
[OFC]
driver = neutron.tests.unit.nec.stub_ofc_driver.StubOFCDriver
enable_packet_filter = True
"""
class PacketfilterExtensionManager(ext_pf.Packetfilter):
@classmethod
def get_resources(cls):
# Add the resources to the global attribute map
# This is done here as the setup process won't
# initialize the main API router which extends
# the global attribute map
attributes.RESOURCE_ATTRIBUTE_MAP.update(
{'packet_filters': ext_pf.PACKET_FILTER_ATTR_MAP})
return super(PacketfilterExtensionManager, cls).get_resources()
class TestNecPluginPacketFilterBase(test_nec_plugin.NecPluginV2TestCase):
_nec_ini = NEC_PLUGIN_PF_INI
def setUp(self):
ext_mgr = PacketfilterExtensionManager()
super(TestNecPluginPacketFilterBase, self).setUp(ext_mgr=ext_mgr)
def _create_packet_filter(self, fmt, net_id, expected_res_status=None,
arg_list=None, **kwargs):
data = {'packet_filter': {'network_id': net_id,
'tenant_id': self._tenant_id,
'priority': '1',
'action': 'ALLOW'}}
for arg in (('name', 'admin_state_up', 'action', 'priority', 'in_port',
'src_mac', 'dst_mac', 'eth_type', 'src_cidr', 'dst_cidr',
'protocol', 'src_port', 'dst_port') +
(arg_list or ())):
# Arg must be present
if arg in kwargs:
data['packet_filter'][arg] = kwargs[arg]
pf_req = self.new_create_request('packet_filters', data, fmt)
if (kwargs.get('set_context') and 'tenant_id' in kwargs):
# create a specific auth context for this request
pf_req.environ['neutron.context'] = context.Context(
'', kwargs['tenant_id'])
pf_res = pf_req.get_response(self.ext_api)
if expected_res_status:
self.assertEqual(pf_res.status_int, expected_res_status)
return pf_res
def _make_packet_filter(self, fmt, net_id, expected_res_status=None,
**kwargs):
res = self._create_packet_filter(fmt, net_id, expected_res_status,
**kwargs)
# Things can go wrong - raise HTTP exc with res code only
# so it can be caught by unit tests
if res.status_int >= 400:
raise webob.exc.HTTPClientError(code=res.status_int)
return self.deserialize(fmt, res)
@contextlib.contextmanager
def packet_filter_on_network(self, network=None, fmt=None, do_delete=True,
**kwargs):
with test_plugin.optional_ctx(network, self.network) as network_to_use:
net_id = network_to_use['network']['id']
pf = self._make_packet_filter(fmt or self.fmt, net_id, **kwargs)
yield pf
if do_delete:
self._delete('packet_filters', pf['packet_filter']['id'])
if not network:
self._delete('networks', network_to_use['network']['id'])
@contextlib.contextmanager
def packet_filter_on_port(self, port=None, fmt=None, do_delete=True,
set_portinfo=True, **kwargs):
with test_plugin.optional_ctx(port, self.port) as port_to_use:
net_id = port_to_use['port']['network_id']
port_id = port_to_use['port']['id']
if set_portinfo:
portinfo = {'id': port_id,
'port_no': kwargs.get('port_no', 123)}
kw = {'added': [portinfo]}
if 'datapath_id' in kwargs:
kw['datapath_id'] = kwargs['datapath_id']
self.rpcapi_update_ports(**kw)
kwargs['in_port'] = port_id
pf = self._make_packet_filter(fmt or self.fmt, net_id, **kwargs)
self.assertEqual(port_id, pf['packet_filter']['in_port'])
yield pf
if do_delete:
self._delete('packet_filters', pf['packet_filter']['id'])
class TestNecPluginPacketFilter(TestNecPluginPacketFilterBase):
def setUp(self):
super(TestNecPluginPacketFilter, self).setUp()
# Remove attributes explicitly from mock object to check
# a case where there are no update_filter and validate_*.
del self.ofc.driver.update_filter
del self.ofc.driver.validate_filter_create
del self.ofc.driver.validate_filter_update
def test_list_packet_filters(self):
self._list('packet_filters')
def test_create_pf_on_network_no_ofc_creation(self):
with self.packet_filter_on_network(admin_state_up=False) as pf:
self.assertEqual(pf['packet_filter']['status'], 'DOWN')
self.assertFalse(self.ofc.create_ofc_packet_filter.called)
self.assertFalse(self.ofc.delete_ofc_packet_filter.called)
def test_create_pf_on_port_no_ofc_creation(self):
with self.packet_filter_on_port(admin_state_up=False,
set_portinfo=False) as pf:
self.assertEqual(pf['packet_filter']['status'], 'DOWN')
self.assertFalse(self.ofc.create_ofc_packet_filter.called)
self.assertFalse(self.ofc.delete_ofc_packet_filter.called)
def test_create_pf_on_network_with_ofc_creation(self):
with self.packet_filter_on_network() as pf:
pf_id = pf['packet_filter']['id']
self.assertEqual(pf['packet_filter']['status'], 'ACTIVE')
ctx = mock.ANY
pf_dict = mock.ANY
expected = [
mock.call.exists_ofc_packet_filter(ctx, pf_id),
mock.call.create_ofc_packet_filter(ctx, pf_id, pf_dict),
mock.call.exists_ofc_packet_filter(ctx, pf_id),
mock.call.delete_ofc_packet_filter(ctx, pf_id),
]
self.ofc.assert_has_calls(expected)
self.assertEqual(self.ofc.create_ofc_packet_filter.call_count, 1)
self.assertEqual(self.ofc.delete_ofc_packet_filter.call_count, 1)
def test_create_pf_on_port_with_ofc_creation(self):
with self.packet_filter_on_port() as pf:
pf_id = pf['packet_filter']['id']
self.assertEqual(pf['packet_filter']['status'], 'ACTIVE')
ctx = mock.ANY
pf_dict = mock.ANY
expected = [
mock.call.exists_ofc_packet_filter(ctx, pf_id),
mock.call.create_ofc_packet_filter(ctx, pf_id, pf_dict),
mock.call.exists_ofc_packet_filter(ctx, pf_id),
mock.call.delete_ofc_packet_filter(ctx, pf_id),
]
self.ofc.assert_has_calls(expected)
self.assertEqual(self.ofc.create_ofc_packet_filter.call_count, 1)
self.assertEqual(self.ofc.delete_ofc_packet_filter.call_count, 1)
def _test_create_pf_with_protocol(self, protocol, expected_eth_type):
with self.packet_filter_on_network(protocol=protocol) as pf:
pf_data = pf['packet_filter']
self.assertEqual(protocol, pf_data['protocol'])
self.assertEqual(expected_eth_type, pf_data['eth_type'])
def test_create_pf_with_protocol_tcp(self):
self._test_create_pf_with_protocol('TCP', 0x800)
def test_create_pf_with_protocol_udp(self):
self._test_create_pf_with_protocol('UDP', 0x800)
def test_create_pf_with_protocol_icmp(self):
self._test_create_pf_with_protocol('ICMP', 0x800)
def test_create_pf_with_protocol_arp(self):
self._test_create_pf_with_protocol('ARP', 0x806)
def test_create_pf_with_inconsistent_protocol_and_eth_type(self):
with self.packet_filter_on_network(protocol='TCP') as pf:
pf_data = pf['packet_filter']
pf_id = pf_data['id']
self.assertEqual('TCP', pf_data['protocol'])
self.assertEqual(0x800, pf_data['eth_type'])
data = {'packet_filter': {'eth_type': 0x806}}
self._update('packet_filters', pf_id, data,
expected_code=409)
def test_create_pf_with_invalid_priority(self):
with self.network() as net:
net_id = net['network']['id']
kwargs = {'priority': 'high'}
self._create_packet_filter(self.fmt, net_id,
webob.exc.HTTPBadRequest.code,
**kwargs)
self.assertFalse(self.ofc.create_ofc_packet_filter.called)
def test_create_pf_with_ofc_creation_failure(self):
self.ofc.set_raise_exc('create_ofc_packet_filter',
nexc.OFCException(reason='hoge'))
with self.packet_filter_on_network() as pf:
pf_id = pf['packet_filter']['id']
pf_ref = self._show('packet_filters', pf_id)
self.assertEqual(pf_ref['packet_filter']['status'], 'ERROR')
self.ofc.set_raise_exc('create_ofc_packet_filter', None)
# Retry activate packet_filter (even if there is no change).
data = {'packet_filter': {}}
self._update('packet_filters', pf_id, data)
pf_ref = self._show('packet_filters', pf_id)
self.assertEqual(pf_ref['packet_filter']['status'], 'ACTIVE')
ctx = mock.ANY
pf_dict = mock.ANY
expected = [
mock.call.exists_ofc_packet_filter(ctx, pf_id),
mock.call.create_ofc_packet_filter(ctx, pf_id, pf_dict),
mock.call.exists_ofc_packet_filter(ctx, pf_id),
mock.call.exists_ofc_packet_filter(ctx, pf_id),
mock.call.create_ofc_packet_filter(ctx, pf_id, pf_dict),
]
self.ofc.assert_has_calls(expected)
self.assertEqual(self.ofc.create_ofc_packet_filter.call_count, 2)
def test_show_pf_on_network(self):
kwargs = {
'name': 'test-pf-net',
'admin_state_up': False,
'action': 'DENY',
'priority': '102',
'src_mac': '00:11:22:33:44:55',
'dst_mac': '66:77:88:99:aa:bb',
'eth_type': '2048',
'src_cidr': '192.168.1.0/24',
'dst_cidr': '192.168.2.0/24',
'protocol': 'TCP',
'src_port': '35001',
'dst_port': '22'
}
with self.packet_filter_on_network(**kwargs) as pf:
pf_id = pf['packet_filter']['id']
pf_ref = self._show('packet_filters', pf_id)
# convert string to int.
kwargs.update({'priority': 102, 'eth_type': 2048,
'src_port': 35001, 'dst_port': 22,
'in_port': None})
self.assertEqual(pf_id, pf_ref['packet_filter']['id'])
for key in kwargs:
self.assertEqual(kwargs[key], pf_ref['packet_filter'][key])
def test_show_pf_on_network_with_wildcards(self):
kwargs = {
'name': 'test-pf-net',
'admin_state_up': False,
'action': 'DENY',
'priority': '102',
}
with self.packet_filter_on_network(**kwargs) as pf:
pf_id = pf['packet_filter']['id']
pf_ref = self._show('packet_filters', pf_id)
# convert string to int.
kwargs.update({'priority': 102,
'in_port': None,
'src_mac': None,
'dst_mac': None,
'eth_type': None,
'src_cidr': None,
'dst_cidr': None,
'protocol': None,
'src_port': None,
'dst_port': None})
self.assertEqual(pf_id, pf_ref['packet_filter']['id'])
for key in kwargs:
self.assertEqual(kwargs[key], pf_ref['packet_filter'][key])
def test_show_pf_on_port(self):
kwargs = {
'name': 'test-pf-port',
'admin_state_up': False,
'action': 'DENY',
'priority': '0o147',
'src_mac': '00:11:22:33:44:55',
'dst_mac': '66:77:88:99:aa:bb',
'eth_type': 2048,
'src_cidr': '192.168.1.0/24',
'dst_cidr': '192.168.2.0/24',
'protocol': 'TCP',
'dst_port': '0x50'
}
with self.packet_filter_on_port(**kwargs) as pf:
pf_id = pf['packet_filter']['id']
pf_ref = self._show('packet_filters', pf_id)
# convert string to int.
kwargs.update({'priority': 103, 'eth_type': 2048,
'dst_port': 80,
# wildcard field is None in a response.
'src_port': None})
self.assertEqual(pf_id, pf_ref['packet_filter']['id'])
self.assertTrue(pf_ref['packet_filter']['in_port'])
for key in kwargs:
self.assertEqual(kwargs[key], pf_ref['packet_filter'][key])
def test_show_pf_not_found(self):
pf_id = '00000000-ffff-ffff-ffff-000000000000'
self._show('packet_filters', pf_id,
expected_code=webob.exc.HTTPNotFound.code)
def test_update_pf_on_network(self):
ctx = mock.ANY
pf_dict = mock.ANY
with self.packet_filter_on_network(admin_state_up=False) as pf:
pf_id = pf['packet_filter']['id']
self.assertFalse(self.ofc.create_ofc_packet_filter.called)
data = {'packet_filter': {'admin_state_up': True}}
self._update('packet_filters', pf_id, data)
self.ofc.create_ofc_packet_filter.assert_called_once_with(
ctx, pf_id, pf_dict)
self.assertFalse(self.ofc.delete_ofc_packet_filter.called)
data = {'packet_filter': {'admin_state_up': False}}
self._update('packet_filters', pf_id, data)
self.ofc.delete_ofc_packet_filter.assert_called_once_with(
ctx, pf_id)
def test_update_pf_on_port(self):
ctx = mock.ANY
pf_dict = mock.ANY
with self.packet_filter_on_port(admin_state_up=False) as pf:
pf_id = pf['packet_filter']['id']
self.assertFalse(self.ofc.create_ofc_packet_filter.called)
data = {'packet_filter': {'admin_state_up': True}}
self._update('packet_filters', pf_id, data)
self.ofc.create_ofc_packet_filter.assert_called_once_with(
ctx, pf_id, pf_dict)
self.assertFalse(self.ofc.delete_ofc_packet_filter.called)
data = {'packet_filter': {'admin_state_up': False}}
self._update('packet_filters', pf_id, data)
self.ofc.delete_ofc_packet_filter.assert_called_once_with(
ctx, pf_id)
def test_delete_pf_with_error_status(self):
self.ofc.set_raise_exc('create_ofc_packet_filter',
nexc.OFCException(reason='fake'))
with self.packet_filter_on_network() as pf:
pf_id = pf['packet_filter']['id']
pf_ref = self._show('packet_filters', pf_id)
self.assertEqual(pf_ref['packet_filter']['status'], 'ERROR')
ctx = mock.ANY
pf_dict = mock.ANY
expected = [
mock.call.exists_ofc_packet_filter(ctx, pf_id),
mock.call.create_ofc_packet_filter(ctx, pf_id, pf_dict),
mock.call.exists_ofc_packet_filter(ctx, pf_id),
]
self.ofc.assert_has_calls(expected)
self.assertEqual(1, self.ofc.create_ofc_packet_filter.call_count)
self.assertEqual(0, self.ofc.delete_ofc_packet_filter.call_count)
def test_activate_pf_on_port_triggered_by_update_port(self):
ctx = mock.ANY
pf_dict = mock.ANY
with self.packet_filter_on_port(set_portinfo=False) as pf:
pf_id = pf['packet_filter']['id']
in_port_id = pf['packet_filter']['in_port']
self.assertFalse(self.ofc.create_ofc_packet_filter.called)
portinfo = {'id': in_port_id, 'port_no': 123}
kw = {'added': [portinfo]}
self.rpcapi_update_ports(**kw)
self.ofc.create_ofc_packet_filter.assert_called_once_with(
ctx, pf_id, pf_dict)
self.assertFalse(self.ofc.delete_ofc_packet_filter.called)
kw = {'removed': [in_port_id]}
self.rpcapi_update_ports(**kw)
self.ofc.delete_ofc_packet_filter.assert_called_once_with(
ctx, pf_id)
# Ensure pf was created before in_port has activated.
ctx = mock.ANY
pf_dict = mock.ANY
port_dict = mock.ANY
expected = [
mock.call.exists_ofc_packet_filter(ctx, pf_id),
mock.call.create_ofc_packet_filter(ctx, pf_id, pf_dict),
mock.call.exists_ofc_port(ctx, in_port_id),
mock.call.create_ofc_port(ctx, in_port_id, port_dict),
mock.call.exists_ofc_port(ctx, in_port_id),
mock.call.delete_ofc_port(ctx, in_port_id, port_dict),
mock.call.exists_ofc_packet_filter(ctx, pf_id),
mock.call.delete_ofc_packet_filter(ctx, pf_id),
]
self.ofc.assert_has_calls(expected)
self.assertEqual(self.ofc.create_ofc_packet_filter.call_count, 1)
self.assertEqual(self.ofc.delete_ofc_packet_filter.call_count, 1)
def test_activate_pf_while_exists_on_ofc(self):
ctx = mock.ANY
with self.packet_filter_on_network() as pf:
pf_id = pf['packet_filter']['id']
self.ofc.set_raise_exc('delete_ofc_packet_filter',
nexc.OFCException(reason='hoge'))
# This update request will make plugin reactivate pf.
data = {'packet_filter': {'priority': 1000}}
self._update('packet_filters', pf_id, data,
expected_code=webob.exc.HTTPInternalServerError.code)
self.ofc.set_raise_exc('delete_ofc_packet_filter', None)
ctx = mock.ANY
pf_dict = mock.ANY
expected = [
mock.call.exists_ofc_packet_filter(ctx, pf_id),
mock.call.create_ofc_packet_filter(ctx, pf_id, pf_dict),
mock.call.exists_ofc_packet_filter(ctx, pf_id),
mock.call.delete_ofc_packet_filter(ctx, pf_id),
mock.call.exists_ofc_packet_filter(ctx, pf_id),
mock.call.delete_ofc_packet_filter(ctx, pf_id),
]
self.ofc.assert_has_calls(expected)
self.assertEqual(self.ofc.delete_ofc_packet_filter.call_count, 2)
def test_deactivate_pf_with_ofc_deletion_failure(self):
ctx = mock.ANY
with self.packet_filter_on_network() as pf:
pf_id = pf['packet_filter']['id']
self.ofc.set_raise_exc('delete_ofc_packet_filter',
nexc.OFCException(reason='hoge'))
data = {'packet_filter': {'admin_state_up': False}}
self._update('packet_filters', pf_id, data,
expected_code=webob.exc.HTTPInternalServerError.code)
pf_ref = self._show('packet_filters', pf_id)
self.assertEqual(pf_ref['packet_filter']['status'], 'ERROR')
self.ofc.set_raise_exc('delete_ofc_packet_filter', None)
data = {'packet_filter': {'priority': 1000}}
self._update('packet_filters', pf_id, data)
pf_ref = self._show('packet_filters', pf_id)
self.assertEqual(pf_ref['packet_filter']['status'], 'DOWN')
ctx = mock.ANY
pf_dict = mock.ANY
expected = [
mock.call.exists_ofc_packet_filter(ctx, pf_id),
mock.call.create_ofc_packet_filter(ctx, pf_id, pf_dict),
mock.call.exists_ofc_packet_filter(ctx, pf_id),
mock.call.delete_ofc_packet_filter(ctx, pf_id),
mock.call.exists_ofc_packet_filter(ctx, pf_id),
mock.call.delete_ofc_packet_filter(ctx, pf_id),
mock.call.exists_ofc_packet_filter(ctx, pf_id),
]
self.ofc.assert_has_calls(expected)
self.assertEqual(self.ofc.delete_ofc_packet_filter.call_count, 2)
def test_delete_pf_with_ofc_deletion_failure(self):
self.ofc.set_raise_exc('delete_ofc_packet_filter',
nexc.OFCException(reason='hoge'))
with self.packet_filter_on_network() as pf:
pf_id = pf['packet_filter']['id']
self._delete('packet_filters', pf_id,
expected_code=webob.exc.HTTPInternalServerError.code)
pf_ref = self._show('packet_filters', pf_id)
self.assertEqual(pf_ref['packet_filter']['status'], 'ERROR')
self.ofc.set_raise_exc('delete_ofc_packet_filter', None)
# Then, self._delete('packet_filters', pf_id) will success.
ctx = mock.ANY
pf_dict = mock.ANY
expected = [
mock.call.exists_ofc_packet_filter(ctx, pf_id),
mock.call.create_ofc_packet_filter(ctx, pf_id, pf_dict),
mock.call.exists_ofc_packet_filter(ctx, pf_id),
mock.call.delete_ofc_packet_filter(ctx, pf_id),
mock.call.exists_ofc_packet_filter(ctx, pf_id),
mock.call.delete_ofc_packet_filter(ctx, pf_id),
]
self.ofc.assert_has_calls(expected)
self.assertEqual(self.ofc.delete_ofc_packet_filter.call_count, 2)
def test_auto_delete_pf_in_network_deletion(self):
with self.packet_filter_on_network(admin_state_up=False,
do_delete=False) as pf:
pf_id = pf['packet_filter']['id']
self._show('packet_filters', pf_id,
expected_code=webob.exc.HTTPNotFound.code)
def test_auto_delete_pf_in_port_deletion(self):
with self.port() as port:
network = self._show('networks', port['port']['network_id'])
with self.packet_filter_on_network(network=network) as pfn:
with self.packet_filter_on_port(port=port, do_delete=False,
set_portinfo=False) as pf:
pf_id = pf['packet_filter']['id']
in_port_id = pf['packet_filter']['in_port']
self._delete('ports', in_port_id)
# Check the packet filter on the port is deleted.
self._show('packet_filters', pf_id,
expected_code=webob.exc.HTTPNotFound.code)
# Check the packet filter on the network is not deleted.
self._show('packet_filters', pfn['packet_filter']['id'])
def test_no_pf_activation_while_port_operations(self):
with self.packet_filter_on_port() as pf:
in_port_id = pf['packet_filter']['in_port']
self.assertEqual(self.ofc.create_ofc_packet_filter.call_count, 1)
self.assertEqual(self.ofc.delete_ofc_packet_filter.call_count, 0)
data = {'port': {'admin_state_up': False}}
self._update('ports', in_port_id, data)
self.assertEqual(self.ofc.create_ofc_packet_filter.call_count, 1)
self.assertEqual(self.ofc.delete_ofc_packet_filter.call_count, 0)
data = {'port': {'admin_state_up': True}}
self._update('ports', in_port_id, data)
self.assertEqual(self.ofc.create_ofc_packet_filter.call_count, 1)
self.assertEqual(self.ofc.delete_ofc_packet_filter.call_count, 0)
class TestNecPluginPacketFilterWithValidate(TestNecPluginPacketFilterBase):
def setUp(self):
super(TestNecPluginPacketFilterWithValidate, self).setUp()
# Remove attributes explicitly from mock object to check
# a case where there are no update_filter.
del self.ofc.driver.update_filter
self.validate_create = self.ofc.driver.validate_filter_create
self.validate_update = self.ofc.driver.validate_filter_update
def test_create_pf_on_network(self):
with self.packet_filter_on_network() as pf:
pf_id = pf['packet_filter']['id']
self.assertEqual(pf['packet_filter']['status'], 'ACTIVE')
ctx = mock.ANY
pf_dict = mock.ANY
expected = [
mock.call.driver.validate_filter_create(ctx, pf_dict),
mock.call.exists_ofc_packet_filter(ctx, pf_id),
mock.call.create_ofc_packet_filter(ctx, pf_id, pf_dict),
mock.call.exists_ofc_packet_filter(ctx, pf_id),
mock.call.delete_ofc_packet_filter(ctx, pf_id),
]
self.ofc.assert_has_calls(expected)
self.assertEqual(self.ofc.create_ofc_packet_filter.call_count, 1)
self.assertEqual(self.ofc.delete_ofc_packet_filter.call_count, 1)
def test_update_pf_on_network(self):
ctx = mock.ANY
pf_dict = mock.ANY
with self.packet_filter_on_network(admin_state_up=False) as pf:
pf_id = pf['packet_filter']['id']
self.assertFalse(self.ofc.create_ofc_packet_filter.called)
data = {'packet_filter': {'admin_state_up': True}}
self._update('packet_filters', pf_id, data)
self.ofc.create_ofc_packet_filter.assert_called_once_with(
ctx, pf_id, pf_dict)
self.ofc.driver.validate_filter_update.assert_called_once_with(
ctx, data['packet_filter'])
self.assertFalse(self.ofc.delete_ofc_packet_filter.called)
data = {'packet_filter': {'admin_state_up': False}}
self._update('packet_filters', pf_id, data)
self.ofc.delete_ofc_packet_filter.assert_called_once_with(
ctx, pf_id)
self.assertEqual(
2, self.ofc.driver.validate_filter_update.call_count)
def test_create_pf_on_network_with_validation_error(self):
self.validate_create.side_effect = ext_pf.PacketFilterInvalidPriority(
min=1, max=65535)
with self.network() as net:
net_id = net['network']['id']
e = self.assertRaises(webob.exc.HTTPClientError,
self._make_packet_filter,
self.fmt, net_id, expected_res_status=400)
self.assertEqual(400, e.status_int)
def test_update_pf_on_network_with_validation_error(self):
self.validate_update.side_effect = (
ext_pf.PacketFilterUpdateNotSupported(field='priority'))
with self.packet_filter_on_network() as pf:
pf_id = pf['packet_filter']['id']
pf_ref = self._show('packet_filters', pf_id)
self.assertEqual(pf_ref['packet_filter']['status'], 'ACTIVE')
data = {'packet_filter': {'priority': 1000}}
self._update('packet_filters', pf_id, data,
expected_code=400)
class TestNecPluginPacketFilterWithFilterUpdate(TestNecPluginPacketFilterBase):
def setUp(self):
super(TestNecPluginPacketFilterWithFilterUpdate, self).setUp()
# Remove attributes explicitly from mock object to check
# a case where there are no update_filter and validate_*.
del self.ofc.driver.validate_filter_create
del self.ofc.driver.validate_filter_update
def test_update_pf_toggle_admin_state(self):
ctx = mock.ANY
pf_dict = mock.ANY
with self.packet_filter_on_network(admin_state_up=False) as pf:
pf_id = pf['packet_filter']['id']
self.assertFalse(self.ofc.create_ofc_packet_filter.called)
data = {'packet_filter': {'admin_state_up': True}}
self._update('packet_filters', pf_id, data)
self.ofc.create_ofc_packet_filter.assert_called_once_with(
ctx, pf_id, pf_dict)
self.assertFalse(self.ofc.delete_ofc_packet_filter.called)
data = {'packet_filter': {'admin_state_up': False}}
self._update('packet_filters', pf_id, data)
self.ofc.delete_ofc_packet_filter.assert_called_once_with(
ctx, pf_id)
def test_update_pf_change_field(self):
ctx = mock.ANY
with self.packet_filter_on_network(admin_state_up=True) as pf:
pf_id = pf['packet_filter']['id']
self.assertTrue(self.ofc.create_ofc_packet_filter.called)
data = {'packet_filter': {'src_mac': '12:34:56:78:9a:bc'}}
self._update('packet_filters', pf_id, data)
self.ofc.update_ofc_packet_filter.assert_called_once_with(
ctx, pf_id, data['packet_filter'])
self.assertEqual(1, self.ofc.update_ofc_packet_filter.call_count)
self.assertFalse(self.ofc.delete_ofc_packet_filter.called)
data = {'packet_filter': {'admin_state_up': False}}
self._update('packet_filters', pf_id, data)
self.ofc.delete_ofc_packet_filter.assert_called_once_with(
ctx, pf_id)
data = {'packet_filter': {'src_mac': '11:22:33:44:55:66'}}
self._update('packet_filters', pf_id, data)
self.assertEqual(1, self.ofc.update_ofc_packet_filter.call_count)
data = {'packet_filter': {'admin_state_up': True}}
self._update('packet_filters', pf_id, data)
data = {'packet_filter': {'src_mac': '66:55:44:33:22:11'}}
self._update('packet_filters', pf_id, data)
self.assertEqual(2, self.ofc.update_ofc_packet_filter.call_count)
|
|
#!/usr/bin/python
import sys
import os
from subprocess import Popen, PIPE
import optparse
import tempfile
import shutil
def is_exe(fpath):
return os.path.isfile(fpath) and os.access(fpath, os.X_OK)
def which(program):
assert isinstance(program, str)
fpath, fname = os.path.split(program)
if fpath:
if is_exe(program):
return program
else:
for path in os.environ["PATH"].split(os.pathsep):
path = path.strip('"')
exe_file = os.path.join(path, program)
if is_exe(exe_file):
return exe_file
return None
class FslToolExecuteException(Exception):
pass
class NiftiConvention(object):
def __init__(self):
self.tmp_folder = ''
self.fslorient_bin = ''
self.fslswapdim_bin = ''
self.options = None
self.neuro_default = 'LR AP SI'
self.radio_default = 'RL AP SI'
self.valid_chars = [['L', 'R'], ['A', 'P'], ['S', 'I']]
self.fslbinary_names = [['fsl5.0-fslorient', 'fsl5.0-fslswapdim'],
['fslorient', 'fslswapdim']]
def convention_is_valid(self, convention_string):
c_in_indicator = [False, False, False]
is_valid = True
if not len(convention_string) is 4:
is_valid = False
if not convention_string[0] in ['R', 'N']:
is_valid = False
for letter in convention_string[1:]:
for i in range(len(self.valid_chars)):
if letter in self.valid_chars[i] and c_in_indicator[i] is True:
is_valid = False
elif letter in self.valid_chars[i]:
c_in_indicator[i] = True
if not all(c_in_indicator):
is_valid = False
return is_valid
def short_to_long_convention(self, short_conv):
long_conv = ''
assert len(short_conv) is 3
for letter in short_conv:
for vc in self.valid_chars:
if letter in vc:
long_conv += letter
long_conv += vc[vc.index(letter) - 1] + ' '
long_conv = long_conv[0:-1]
return long_conv
@property
def check_fslbinaries(self):
"""
checks for existence of fls binaries in the system path
:return: true if binaries are found and false if not found
"""
binaries_found = False
for b_names in self.fslbinary_names:
if which(b_names[0]) and which(b_names[1]):
binaries_found = True
self.fslorient_bin = b_names[0]
self.fslswapdim_bin = b_names[1]
return binaries_found
def is_radiological_conv(self, filepath):
command = self.fslorient_bin + ' -getorient ' + filepath
output = self.execute(command)
if output[0].strip('\n') == 'RADIOLOGICAL':
input_is_r = True
elif output[0].strip('\n') == 'NEUROLOGICAL':
input_is_r = False
else:
raise Exception('Radiological nor Neurological status could be determined')
return input_is_r
def swap_orient(self, filepath, is_radiologcal):
# left right needs to be first dimension
if is_radiologcal:
self.swap_dim(filepath, self.radio_default)
else:
self.swap_dim(filepath, self.neuro_default)
command = self.fslorient_bin + ' -swaporient ' + filepath
self.execute(command)
self.swap_dim(filepath, '-x y z')
def execute(self, command):
if self.options.verbose:
print 'Command: ' + command
stream = Popen(command, stdout=PIPE, shell=True)
rcode = stream.wait()
output = stream.communicate()
if rcode:
err = FslToolExecuteException(command, output)
err.message = "FSL tool execution failed"
raise err
if self.options.verbose:
print output
return output
def swap_dim(self, filepath, convention):
command = self.fslswapdim_bin + ' ' + filepath + ' ' + convention + ' ' + filepath
self.execute(command)
def run(self):
input_file = ''
output_file = ''
# check for fls binaries
binaries_found = self.check_fslbinaries
if not binaries_found:
sys.exit("Error: FSL binaries not found, make sure binaries are added to the system path")
usage = "usage: nifti_convention <inputfile> <outputfile> [options]"
parser = optparse.OptionParser(usage=usage,
description="Changing the convention how the data ist stored in the nifti file."
"The anatomical labels (orientation) need "
"to be set correctly for the tool to yield the desired result."
" FSL tools (http://fsl.fmrib.ox.ac.uk/) is "
"required to be installed. The tool performs a series of "
"-fslorient and -fslswapdim commands to change the "
"storage convention.")
parser.add_option('-c', help="nifti storage convention [default: RRAS] "
"4 letters [R,N] [L,R] [A,P] [S,I] defining how data is stored in the nifti file. "
"first letter: [R,N] for radiological or neurological convention. "
"letter 2-4: convention for the dimensions R=RightLeft, A=AnteriorPosterior,"
" S=SuperiorInferior",
action='store', type='string', dest='convention', default='RRAS')
parser.add_option('-v', help="verbose", action='store_true', dest='verbose', default=False)
self.options, args = parser.parse_args()
self.options.convention = self.options.convention.upper()
if not args:
parser.error('No input file given')
elif not os.path.isfile(args[0]):
parser.error('Input file does not exist!')
else:
input_file = args[0]
if len(args) < 2:
parser.error('No output file given')
else:
output_file = args[1]
if len(args) < 2:
print "Warning: Additional argument ignored!"
if not self.convention_is_valid(self.options.convention):
parser.error(
"Convention argument is not valid! Only a combination of these 4 letters "
"[R,N] [L,R] [A,P] [I,S] is allowed!")
if self.options.verbose:
print "Input file: " + input_file
print "Output file: " + output_file
if not output_file.endswith('.nii.gz'):
print "Warning: Output file will be zipped to a .nii.gz file"
if not output_file.endswith('.nii'):
output_file += '.nii.gz'
else:
output_file += '.gz'
if not (input_file.endswith('.nii') or input_file.endswith('.nii.gz')):
print parser.error('Input file must be a nifti file!')
# special case, fsl tools does not work with multiple dots in filenmae
file_extension = '.nii.gz'
self.tmp_folder = tempfile.mkdtemp()
# copy input to tmp folder
working_input = os.path.join(self.tmp_folder, 'input' + file_extension)
if self.options.verbose:
print "Working input: " + working_input
shutil.copy(input_file, working_input)
# check for convention
is_radiological = self.is_radiological_conv(working_input)
# change orientation if we have to
if (self.options.convention[0] is 'R' and not is_radiological) or (
self.options.convention[0] is 'N' and is_radiological):
self.swap_orient(working_input, is_radiological)
# swap the dimensions to the desired convention
self.swap_dim(working_input, self.short_to_long_convention(self.options.convention[1:]))
# copy file to destination
shutil.copy(working_input, output_file)
def __del__(self):
if os.path.isdir(self.tmp_folder):
shutil.rmtree(self.tmp_folder, ignore_errors=True)
if __name__ == "__main__":
try:
nc = NiftiConvention()
nc.run()
exit(0)
except FslToolExecuteException, e:
print 'Error: ' + e.message
if e[1][0].startswith('Cannot perform requested swap'):
print "The given convention is not allowed! Try to use the default convention."
exit(1)
|
|
#!/usr/bin/env python3
# encoding: utf-8
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Script to convert Autosys JIL files into dag-factory yaml format
"""
import argparse
import re
from parsimonious.grammar import Grammar
from parsimonious.nodes import NodeVisitor
import yaml
class JilVisitor(NodeVisitor):
"""
Instance of Class inhereting the library's Class to customize each element of
what the parser found.
"""
def visit_expr(self, _node, visited_children):
""" Returns the overall output. """
value = None
key = None
def setup_default_args():
""" Add the default args section to the result dictionary"""
default_arg_keys = ["owner"]
if "default_args" not in value.keys():
value["default_args"] = dict()
value["default_args"]["start_date"] = "1 days"
for def_arg in default_arg_keys:
if def_arg in value.keys():
value["default_args"][def_arg] = value[def_arg]
del value[def_arg]
def setup_task_group():
""" Adds a taskgroup section"""
if not value.get("task_groups"):
value["task_groups"] = dict()
if value.get("description"):
value["task_groups"][f'task_group_{key}'] = {
"tooltip": value["description"]
}
else:
value["task_groups"][f'task_group_{key}'] = {"tooltip": key}
# This creates a dependency if you have a nested box within a box
if value.get("box_name"):
dependency = value.get("box_name")
for val in result.values():
if val.get('task_groups'):
if val['task_groups'].get(f"task_group_{dependency}"):
if value["task_groups"][f'task_group_{key}'].\
get('dependencies'):
value["task_groups"][f'task_group_{key}']['dependencies'].\
append(f"task_group_{dependency}")
else:
value["task_groups"][f'task_group_{key}'].\
update({'dependencies': [f"task_group_{dependency}"]})
# check if a condition statement exists to set it as a dependency
if value.get("condition"):
create_dependencies()
def setup_task():
""" Adds a task section"""
if not value.get("tasks"):
value.update({"tasks": {f'task_{key}': dict()}})
cmd_dict = {
"operator": "airflow.operators.bash_operator.BashOperator",
"bash_command": f'echo [{value["command"]}]'
}
value["tasks"][f'task_{key}'].update(cmd_dict)
# check if a condition statement exists to set it as a dependency
if value.get("condition"):
create_dependencies()
if value.get("box_name"):
value["tasks"][f'task_{key}']["task_group_name"] = \
f'task_group_{value.get("box_name")}'
# tasks can't have descriptions only dags/top level boxes can
if value.get("description"):
del value["description"]
# clean up the converted field
del value["command"]
def create_dependencies():
""" Converts condition statement to dependencies"""
condition_pattern = r"s\((\w+)\)"
mat = re.findall(condition_pattern, value["condition"])
if mat:
for dep in mat:
for val in result.values():
# check if the dependency is one of the tasks
if val.get('tasks'):
if val['tasks'].get(f"task_{dep}"):
if value["tasks"][f'task_{key}'].get(
'dependencies'):
value["tasks"][f'task_{key}']['dependencies'].\
append(f"task_{dep}")
else:
value["tasks"][f'task_{key}'].\
update({'dependencies': [f"task_{dep}"]})
# check if the dependency is one of the tasksgroups
if val.get('task_groups'):
if val['task_groups'].get(f"task_group_{dep}"):
if value["task_groups"][
f'task_group_{key}'].get(
'dependencies'):
value["task_groups"][f'task_group_{key}']['dependencies'].\
append(f"task_group_{dep}")
else:
value["task_groups"][f'task_group_{key}'].\
update({'dependencies': [f"task_group_{dep}"]})
# clean up the converted field
del value["condition"]
# create the result dictionary
result = {}
for child in visited_children:
for key, value in child[0].items():
## Convert top level Box to DAG
if value['job_type'] == "box" and not value.get("box_name"):
setup_default_args()
setup_task_group()
# Clean Up
if value.get("description"):
del value["description"]
del value["job_type"]
result[f"{key}_DAG"] = value
## Convert Box inside a box into a TaskGroup
elif value['job_type'] == "box" and value.get('box_name'):
dag_name = list(result.keys())[0]
setup_task_group()
result[dag_name]["task_groups"].update(value["task_groups"])
## Convert Commands inside Boxes into Tasks of the TaskGroups
elif value.get("box_name") and value['job_type'] == "cmd":
dag_name = list(result.keys())[0]
setup_task()
if result[dag_name].get("tasks"):
result[dag_name]["tasks"].update(value["tasks"])
else:
result[dag_name]["tasks"] = value["tasks"]
# clean up
del value["box_name"]
del value["job_type"]
if value["owner"]:
del value["owner"]
## Convert Stand Alone Commands into a DAG
elif 'box_name' not in value.keys(
) and value['job_type'] == "cmd":
# Populate the Default Args
setup_default_args()
# Populate the Task
value['tasks'] = {
f"task_cmd_{key}": {
"operator":
"airflow.operators.bash_operator.BashOperator",
"bash_command":
f'echo [{value["command"]}]'
}
}
if value.get("condition"):
create_dependencies()
# Clean Up
del value["command"]
del value["job_type"]
result[f"{key}"] = value
return result
def visit_entry(self, _node, visited_children):
""" Makes a dict of the job (as key) and the key/value pairs. """
key, values = visited_children
clean_values = [x for x in values if x is not None]
return {f"{key}": dict(clean_values)}
def visit_job(self, _node, visited_children):
""" Gets the job name. """
_, _, job, *_ = visited_children
return job.text
def visit_box(self, _node, visited_children):
""" Gets the box name and task. """
_, _, box, *_ = visited_children
return box.text
def visit_pair(self, node, _visited_children):
""" Gets each key/value pair, returns a tuple. """
key, _, value, *_ = node.children
converted_fields = ("start_times")
unsupported_fields = ("permission", "std_err_file", "std_out_file",
"date_conditions", "machine", "alarm_if_fail",
"alarm_if_terminated", "avg_runtime",
"max_run_alarm", "notification_alarm_types",
"notification_template", "notification_id",
"send_notification", "notification_emailaddress",
"days_of_week", "notification_msg")
if key.text not in unsupported_fields:
if key.text in converted_fields:
if key.text == "start_times":
converted_key = "schedule_interval"
hour, minute = value.text.strip('\"').split(":")
converted_value = f"{minute} {hour} * * *"
return converted_key, converted_value
else:
return key.text, value.text
def generic_visit(self, node, visited_children):
""" The generic visit method. """
return visited_children or node
def parse_jil(input_file):
"""Parse Jil file and return a python dictionary of the parsed data"""
grammar = Grammar(r"""
expr = (entry / emptyline)*
entry = job pair*
job = jobstart colon jobname ws
pair = key colon value ws?
key = !jobstart word+
value = (word / quoted)+
word = ~r"[- ,\w\(\)\@\.\/\$\*\'\&\<\>]+"
wordwild = ~r"(.*)"
quoted = ~'"+[^\"]+"+'
colon = ws? ":" ws?
jobname = ~r"[\w]+"
jobstart = "insert_job"
ws = ~"\s*"
emptyline = ws+
""")
with open(input_file, 'r') as rfh:
jil_data = rfh.read()
tree = grammar.parse(jil_data)
jil_vis = JilVisitor()
output = jil_vis.visit(tree)
return output
if __name__ == "__main__":
CMD_DESC = "Convert JIL File to dag-factory yaml and airflow dag python"
parser = argparse.ArgumentParser(description=CMD_DESC)
parser.add_argument(
"-p",
"--prefix",
help='specify prefix to be used for converted files',
required=True,
)
parser.add_argument("-i",
"--input",
help='specify input JIL file',
required=True)
args = parser.parse_args()
output_dict = parse_jil(args.input)
dag_factory_yaml_file = f"{args.prefix}.yaml"
airflow_py_file = f"{args.prefix}-dag.py"
# write out the dag-factory yaml file
with open(dag_factory_yaml_file, 'w') as dfy_wfh:
yaml.safe_dump(output_dict, dfy_wfh)
# write out the dag-factory yaml file
airflow_py_script = f"""from airflow import DAG
import dagfactory
config_file = "/home/airflow/gcsfuse/data/{dag_factory_yaml_file}"
example_dag_factory = dagfactory.DagFactory(config_file)
# Creating task dependencies
example_dag_factory.clean_dags(globals())
example_dag_factory.generate_dags(globals())
"""
with open(airflow_py_file, 'w') as afp_wfh:
afp_wfh.write(airflow_py_script)
ENV_TEMPL = "<YOUR_ENV>"
gcloud_uri_command = (
f"gcloud composer environments describe {ENV_TEMPL}"
f" --location us-central1 --format=\"get(config.airflowUri)\"")
gcloud_gcs_command = (
f"gcloud composer environments describe {ENV_TEMPL}"
f" --location us-central1 --format=\"get(config.dagGcsPrefix)\"")
gsutil_cp_command = (
f"gsutil cp {dag_factory_yaml_file} gs://{ENV_TEMPL}/data")
gcloud_upload_command = (
f"gcloud composer environments storage dags import --environment"
f" <YOUR_ENV> --location us-central1 --source {airflow_py_file}")
mesg = (f"dag-factory yaml written to: {dag_factory_yaml_file}\n"
f"airflow python file written to: {airflow_py_file}\n\n"
f"Run the following to get your GCS Bucket \n"
f"{gcloud_gcs_command}\n\n"
f"Run the following to upload the dag-factory yaml file to the "
f"bucket:\n{gsutil_cp_command}\n\n"
f"Then run the following to upload the airflow dag python"
f" script to your composer environment: \n"
f"{gcloud_upload_command}\n\n"
f"Then run the following to get the URL of the Airflow UI:\n"
f"{gcloud_uri_command} \n\n"
f"Then visit the URL and trigger your DAG")
print(mesg)
|
|
""" Karr Lab build utilities
:Author: Jonathan Karr <[email protected]>
:Date: 2016-10-27
:Copyright: 2018, Karr Lab
:License: MIT
"""
import cement
from karr_lab_build_utils.core import BuildHelper, BuildHelperError
import karr_lab_build_utils
import os
import sys
import traceback
class BaseController(cement.Controller):
""" Base controller for command line application """
class Meta:
label = 'base'
description = "Karr Lab build utilities"
help = "Karr Lab build utilities"
arguments = [
(['-v', '--version'], dict(action='version', version=karr_lab_build_utils.__version__)),
]
@cement.ex(help='Archive test report')
def archive_test_report(self):
""" Upload test report to history server """
buildHelper = BuildHelper()
buildHelper.archive_test_report()
@cement.ex(help='Install requirements')
def install_requirements(self):
""" Install requirements """
buildHelper = BuildHelper()
buildHelper.install_requirements()
@cement.ex(help="Upgrade the packages from the Karr Lab's GitHub organization")
def upgrade_karr_lab_packages(self):
""" Upgrade the packages from the Karr Lab's GitHub organization """
buildHelper = BuildHelper()
buildHelper.upgrade_karr_lab_packages()
@cement.ex(hide=True)
def _default(self):
self._parser.print_help()
class CreatePackageController(cement.Controller):
""" Create a package
* Create local and remote Git repositories;
* Setup the directory structure of the repository;
* Add the repository to CircleCI, Coveralls, Code Climate, Read the Docs, and code.karrlab.org;
* Update the downstream dependencies of the package''s dependencies
"""
class Meta:
label = 'create-package'
description = (
'- Create local and remote Git repositories;\n'
' - Setup the directory structure of the repository;\n'
' - Add the repository to CircleCI, Coveralls, Code Climate, Read the Docs, and code.karrlab.org;\n'
' - Update the downstream dependencies of the package''s dependencies'
)
help = (
'- Create local and remote Git repositories;\n'
' - Setup the directory structure of the repository;\n'
' - Add the repository to CircleCI, Coveralls, Code Climate, Read the Docs, and code.karrlab.org;\n'
' - Update the downstream dependencies of the package''s dependencies'
)
stacked_on = 'base'
stacked_type = 'nested'
arguments = [
(['--pypi-repository'], dict(
default='pypi', type=str, help='Repository upload package (e.g. pypi or testpypi)')),
]
@cement.ex(hide=True)
def _default(self):
args = self.app.pargs
buildHelper = BuildHelper()
buildHelper.create_package(pypi_repository=args.pypi_repository)
class CreateRepositoryController(cement.Controller):
""" Create a GitHub repository and clone the repository locally """
class Meta:
label = 'create-repository'
description = 'Create a GitHub repository and clone the repository locally'
help = 'Create a GitHub repository and clone the repository locally'
stacked_on = 'base'
stacked_type = 'nested'
arguments = [
(['name'], dict(
type=str, help='Name of the repository (i.e. repo_<name>)')),
(['--description'], dict(
default='', type=str, help='Description of the repository')),
(['--public'], dict(
default=False, action='store_true', help='if set, make the repository public')),
(['--dirname'], dict(
default=None, type=str, help='Path for the repository')),
]
@cement.ex(hide=True)
def _default(self):
args = self.app.pargs
buildHelper = BuildHelper()
buildHelper.create_repository(args.name, description=args.description, private=(not args.public), dirname=args.dirname)
class SetupRepositoryController(cement.Controller):
""" Setup a local Git repository with the default directory structure """
class Meta:
label = 'setup-repository'
description = 'Setup a local Git repository with the default directory structure'
help = 'Setup a local Git repository with the default directory structure'
stacked_on = 'base'
stacked_type = 'nested'
arguments = [
(['name'], dict(
type=str, help='Name of the repository (i.e. repo_<name>)')),
(['--description'], dict(
default='', type=str, help='Description of the repository')),
(['--keyword'], dict(
dest='keywords', default=[], type=str, action='append', help='Keyword for the repository')),
(['--dependency'], dict(
dest='dependencies', default=[], type=str, action='append', help='Karr Lab package that the package depends on')),
(['--public'], dict(
default=False, action='store_true', help='if set, make the repository public')),
(['--build-image-version'], dict(
default=None, type=str, help='Build image version')),
(['--dirname'], dict(
default=None, type=str, help='Path for the repository')),
(['--coveralls-repo-badge-token'], dict(
default=None, type=str, help='Coveralls badge token for the repository')),
(['--code-climate-repo-id'], dict(
default=None, type=str, help='Code Climate ID the repository')),
(['--code-climate-repo-badge-token'], dict(
default=None, type=str, help='Code Climate badge token for the repository')),
]
@cement.ex(hide=True)
def _default(self):
args = self.app.pargs
buildHelper = BuildHelper()
buildHelper.setup_repository(
args.name, description=args.description, keywords=args.keywords, dependencies=args.dependencies,
private=(not args.public), build_image_version=args.build_image_version, dirname=args.dirname,
coveralls_repo_badge_token=args.coveralls_repo_badge_token,
code_climate_repo_id=args.code_climate_repo_id, code_climate_repo_badge_token=args.code_climate_repo_badge_token)
class CreateDocumentationTemplateController(cement.Controller):
""" Create a Sphinx documentation template for a package """
class Meta:
label = 'create-documentation-template'
description = 'Create a Sphinx documentation template for a package'
help = 'Create a Sphinx documentation template for a package'
stacked_on = 'base'
stacked_type = 'nested'
arguments = [
(['--dirname'], dict(
default='.', type=str, help="Path to the package; default='.'")),
]
@cement.ex(hide=True)
def _default(self):
args = self.app.pargs
buildHelper = BuildHelper()
buildHelper.create_documentation_template(dirname=args.dirname)
class DownloadInstallPackageConfigFilesController(cement.Controller):
""" Download and install configuration files from GitHub configuration repository """
class Meta:
label = 'download-install-package-config-files'
description = 'Download and install configuration files from GitHub configuration repository'
help = 'Download and install configuration files from GitHub configuration repository'
stacked_on = 'base'
stacked_type = 'nested'
arguments = []
@cement.ex(hide=True)
def _default(self):
args = self.app.pargs
buildHelper = BuildHelper()
buildHelper.download_package_config_files()
buildHelper.install_package_config_files()
class RunTestsController(cement.Controller):
""" cement.Controller for run_tests.
Run unit tests located at `test-path`.
Optionally, generate a coverage report.
Optionally, save the results to an XML file.
"""
class Meta:
label = 'run-tests'
description = 'Run unit tests located at `test_path`'
help = 'Run unit tests located at `test_path`'
stacked_on = 'base'
stacked_type = 'nested'
arguments = [
(['--test-path'], dict(
type=str, default=None, help=(
'Path to tests to run. '
'The path should be relative to the current directory, or an absolute path. '
'Default: the value of the environment variable `test_path` or '
'`./tests` if the environment variable has not been set.'))),
(['--dirname'], dict(
type=str, default='.', help="Path to package to test; default='.'")),
(['--n-workers'], dict(
type=int, default=1, help='Numbers of workers to run tests`')),
(['--i-worker'], dict(
type=int, default=0, help='Index of worker within {0 .. n-workers - 1}')),
(['--verbose'], dict(
default=False, action='store_true', help='if set display test output')),
(['--with-xunit'], dict(
default=False, action='store_true', help='if set save test results to XML file')),
(['--with-coverage'], dict(
default=False, action='store_true', help='if set assess code coverage')),
(['--coverage-dirname'], dict(
type=str, default='tests/reports', help="Directory to store coverage data; default='tests/reports'")),
(['--coverage-type'], dict(
type=str, default='branch',
help="Type of coverage analysis to run {statement, branch, or multiple-decision}; default='branch'")),
(['--environment'], dict(
type=str, default='local',
help="Environment to run tests (local, docker, or circleci); default='local'")),
(['--ssh-key-filename'], dict(
type=str, default='~/.ssh/id_rsa', help='Path to GitHub SSH key')),
(['--keep-docker-container'], dict(
dest='remove_docker_container', action='store_false', default=True, help='Keep Docker container')),
]
@cement.ex(hide=True)
def _default(self):
args = self.app.pargs
# if `test_path` was not specified at the command line, try to get it from the `test_path` environment variable
# which can be set in CircleCI via build parameters
if args.test_path is None:
if 'test_path' in os.environ:
test_path = os.getenv('test_path')
else:
test_path = 'tests'
else:
test_path = args.test_path
verbose = args.verbose or bool(int(os.getenv('verbose', '0')))
# get coverage type
coverage_type = karr_lab_build_utils.core.CoverageType[args.coverage_type.lower().replace('-', '_')]
# run tests
buildHelper = BuildHelper()
buildHelper.run_tests(dirname=args.dirname, test_path=test_path,
n_workers=args.n_workers, i_worker=args.i_worker,
verbose=verbose, with_xunit=args.with_xunit,
with_coverage=args.with_coverage, coverage_dirname=args.coverage_dirname,
coverage_type=coverage_type, environment=karr_lab_build_utils.core.Environment[args.environment],
ssh_key_filename=args.ssh_key_filename, remove_docker_container=args.remove_docker_container)
class DockerController(cement.Controller):
""" Base controller for Docker tasks """
class Meta:
label = 'docker'
description = 'Docker utilities'
help = 'Docker utilities'
stacked_on = 'base'
stacked_type = 'nested'
arguments = []
@cement.ex(hide=True)
def _default(self):
self._parser.print_help()
class DockerCreateContainerController(cement.Controller):
""" Create a Docker container for running tests """
class Meta:
label = 'create-container'
description = 'Create a Docker container for running tests'
help = 'Create a Docker container for running tests'
stacked_on = 'docker'
stacked_type = 'nested'
arguments = [
(['--ssh-key-filename'], dict(
type=str, default='~/.ssh/id_rsa', help='Path to GitHub SSH key')),
]
@cement.ex(hide=True)
def _default(self):
args = self.app.pargs
buildHelper = BuildHelper()
container = buildHelper.create_docker_container(ssh_key_filename=args.ssh_key_filename)
print('Created Docker container {0} with volume {0}'.format(container))
class InstallPackageToDockerContainerController(cement.Controller):
""" Copy and install a package to a Docker container """
class Meta:
label = 'install-package-to-container'
description = 'Copy and install a package to a Docker container'
help = 'Copy and install a package to a Docker container'
stacked_on = 'docker'
stacked_type = 'nested'
arguments = [
(['container'], dict(type=str, help="Container id")),
(['--dirname'], dict(
type=str, default='.', help="Path to package to test; default='.'")),
]
@cement.ex(hide=True)
def _default(self):
args = self.app.pargs
buildHelper = BuildHelper()
buildHelper.install_package_to_docker_container(args.container, dirname=args.dirname)
class RunTestsInDockerContainerController(cement.Controller):
""" Run tests in a Docker container """
class Meta:
label = 'run-tests-in-container'
description = 'Run tests in a Docker container'
help = 'Run tests in a Docker container'
stacked_on = 'docker'
stacked_type = 'nested'
arguments = [
(['container'], dict(type=str, help="Container id")),
(['--test-path'], dict(
type=str, default=None, help=(
'Path to tests to run. '
'The path should be relative to the current directory, or an absolute path. '
'Default: the value of the environment variable `test_path` or '
'`./tests` if the environment variable has not been set.'))),
(['--n-workers'], dict(
type=int, default=1, help='Numbers of workers to run tests`')),
(['--i-worker'], dict(
type=int, default=0, help='Index of worker within {0 .. n-workers - 1}')),
(['--verbose'], dict(
default=False, action='store_true', help='if set display test output')),
(['--with-xunit'], dict(
default=False, action='store_true', help='if set save test results to XML file')),
(['--with-coverage'], dict(
default=False, action='store_true', help='if set assess code coverage')),
(['--coverage-dirname'], dict(
type=str, default='tests/reports', help="Directory to store coverage data; default='tests/reports'")),
(['--coverage-type'], dict(
type=str, default='branch',
help="Type of coverage analysis to run {statement, branch, or multiple-decision}; default='branch'")),
]
@cement.ex(hide=True)
def _default(self):
# if `test_path` was not specified at the command line, try to get it from the `test_path` environment variable
# which can be set in CircleCI via build parameters
args = self.app.pargs
if args.test_path is None:
if 'test_path' in os.environ:
test_path = os.getenv('test_path')
else:
test_path = 'tests'
else:
test_path = args.test_path
verbose = args.verbose or bool(int(os.getenv('verbose', '0')))
# get coverage type
coverage_type = karr_lab_build_utils.core.CoverageType[args.coverage_type.lower().replace('-', '_')]
# run tests
buildHelper = BuildHelper()
buildHelper.run_tests_in_docker_container(args.container, test_path=test_path,
n_workers=args.n_workers, i_worker=args.i_worker,
verbose=verbose, with_xunit=args.with_xunit,
with_coverage=args.with_coverage, coverage_dirname=args.coverage_dirname,
coverage_type=coverage_type)
class DockerRemoveContainerController(cement.Controller):
""" Remove a Docker container """
class Meta:
label = 'remove-container'
description = 'Remove a Docker container'
help = 'Remove a Docker container'
stacked_on = 'docker'
stacked_type = 'nested'
arguments = [
(['container'], dict(type=str, help="Container id")),
]
@cement.ex(hide=True)
def _default(self):
args = self.app.pargs
buildHelper = BuildHelper()
buildHelper.remove_docker_container(args.container)
class FollowCircleciBuildController(cement.Controller):
""" Follow a CircleCI build for a repository """
class Meta:
label = 'follow-circleci-build'
description = 'Follow a CircleCI build for a repository'
help = 'Follow a CircleCI build for a repository'
stacked_on = 'base'
stacked_type = 'nested'
arguments = [
(['--repo-type'], dict(
type=str, default=None, help='Repository type (e.g., github)')),
(['--repo-owner'], dict(
type=str, default=None, help='Repository owner')),
(['--repo-name'], dict(
type=str, default=None, help='Name of the repository to build. This defaults to the name of the current repository.')),
(['--has-private-dependencies'], dict(
default=False, action='store_true',
help=('Set if the build requires an SSH key for the Karr Lab machine user because the repository depends on '
'another private repository'))),
]
@cement.ex(hide=True)
def _default(self):
args = self.app.pargs
buildHelper = BuildHelper()
buildHelper.follow_circleci_build(
repo_type=args.repo_type, repo_owner=args.repo_owner,
repo_name=args.repo_name,
has_private_dependencies=args.has_private_dependencies)
class GetCircleciEnvironmentVariablesController(cement.Controller):
""" Get the CircleCI environment variables for a repository and their partial values"""
class Meta:
label = 'get-circleci-environment-variables'
description = 'Get the CircleCI environment variables for a repository and their partial values'
help = 'Get the CircleCI environment variables for a repository and their partial values'
stacked_on = 'base'
stacked_type = 'nested'
arguments = [
(['--repo-type'], dict(
type=str, default=None, help='Repository type (e.g., github)')),
(['--repo-owner'], dict(
type=str, default=None, help='Repository owner')),
(['--repo-name'], dict(
type=str, default=None, help='Name of the repository to build. This defaults to the name of the current repository.')),
]
@cement.ex(hide=True)
def _default(self):
args = self.app.pargs
buildHelper = BuildHelper()
vars = buildHelper.get_circleci_environment_variables(
repo_type=args.repo_type, repo_owner=args.repo_owner,
repo_name=args.repo_name)
for key, val in vars.items():
print('{}={}'.format(key, val))
class SetCircleciEnvironmentVariableController(cement.Controller):
""" Set a CircleCI environment variable for a repository """
class Meta:
label = 'set-circleci-environment-variable'
description = 'Set a CircleCI environment variable for a repository'
help = 'Set a CircleCI environment variable for a repository'
stacked_on = 'base'
stacked_type = 'nested'
arguments = [
(['name'], dict(
type=str, help='Name of the environment variable.')),
(['value'], dict(
type=str, help='Value of the environment variable.')),
(['--repo-type'], dict(
type=str, default=None, help='Repository type (e.g., github)')),
(['--repo-owner'], dict(
type=str, default=None, help='Repository owner')),
(['--repo-name'], dict(
type=str, default=None, help='Name of the repository to build. This defaults to the name of the current repository.')),
]
@cement.ex(hide=True)
def _default(self):
args = self.app.pargs
buildHelper = BuildHelper()
buildHelper.set_circleci_environment_variables(
{args.name: args.value},
repo_type=args.repo_type, repo_owner=args.repo_owner,
repo_name=args.repo_name)
class DeleteCircleciEnvironmentVariableController(cement.Controller):
""" Delete a CircleCI environment variable for a repository """
class Meta:
label = 'delete-circleci-environment-variable'
description = 'Delete a CircleCI environment variable for a repository'
help = 'Delete a CircleCI environment variable for a repository'
stacked_on = 'base'
stacked_type = 'nested'
arguments = [
(['name'], dict(
type=str, help='Name of the environment variable.')),
(['--repo-type'], dict(
type=str, default=None, help='Repository type (e.g., github)')),
(['--repo-owner'], dict(
type=str, default=None, help='Repository owner')),
(['--repo-name'], dict(
type=str, default=None, help='Name of the repository to build. This defaults to the name of the current repository.')),
]
@cement.ex(hide=True)
def _default(self):
args = self.app.pargs
buildHelper = BuildHelper()
buildHelper.delete_circleci_environment_variable(args.name,
repo_type=args.repo_type, repo_owner=args.repo_owner,
repo_name=args.repo_name)
class CreateCodeClimateGithubWebhookController(cement.Controller):
""" Create Code Climate GitHub webhook for the current repository """
class Meta:
label = 'create-code-climate-github-webhook'
description = 'Create Code Climate GitHub webhook for the current repository'
help = 'Create Code Climate GitHub webhook for the current repository'
stacked_on = 'base'
stacked_type = 'nested'
arguments = [
(['--repo-type'], dict(
type=str, default=None, help='Repository type (e.g., github)')),
(['--repo-owner'], dict(
type=str, default=None, help='Repository owner')),
(['--repo-name'], dict(
type=str, default=None, help='Name of the repository to build. This defaults to the name of the current repository.')),
]
@cement.ex(hide=True)
def _default(self):
args = self.app.pargs
buildHelper = BuildHelper()
buildHelper.create_code_climate_github_webhook(
repo_type=args.repo_type, repo_owner=args.repo_owner, repo_name=args.repo_name)
class DoPostTestTasksController(cement.Controller):
""" Do all post-test tasks for CircleCI """
class Meta:
label = 'do-post-test-tasks'
description = 'Do all post-test tasks for CircleCI'
help = 'Do all post-test tasks for CircleCI'
stacked_on = 'base'
stacked_type = 'nested'
arguments = [
(['installation_exit_code'], dict(
type=int, help='Exit code of the package installation tasks')),
(['tests_exit_code'], dict(
type=int, help='Exit code of the tests')),
(['--dry-run'], dict(
default=False, dest='dry_run', action='store_true', help='If set, do not send results to Coveralls and Code Climate')),
]
@cement.ex(hide=True)
def _default(self):
args = self.app.pargs
dry_run = args.dry_run or bool(int(os.getenv('dry_run', '0')))
""" Do all post-test tasks for CircleCI """
buildHelper = BuildHelper()
triggered_packages, not_triggered_packages, status, other_exception = buildHelper.do_post_test_tasks(
args.installation_exit_code != 0, args.tests_exit_code != 0, dry_run=dry_run)
# downstream triggered tests
if triggered_packages:
print('{} downstream builds were triggered'.format(len(triggered_packages)))
for triggered_package in triggered_packages:
print(' {}'.format(triggered_package))
else:
print("No downstream builds were triggered")
if not_triggered_packages:
for key, msg in not_triggered_packages.items():
print(' {}: {}'.format(key, msg.replace('\n', '\n ')))
# email notifications
num_notifications = sum(status.values())
if num_notifications > 0:
print('{} notifications were sent'.format(num_notifications))
if status['is_fixed']:
print(' Build fixed')
if status['is_old_error']:
print(' Recurring error')
if status['is_new_error']:
print(' New error')
if status['is_other_error']:
print(' Other error')
if status['is_new_downstream_error']:
print(' Downstream error')
else:
print('No notifications were sent.')
if status['is_other_error']:
if other_exception:
traceback.print_tb(other_exception['traceback'])
raise SystemExit('Post-test tasks were not successful: {}'.format(
other_exception['exception']))
else:
raise SystemExit('Post-test tasks were not successful')
class MakeAndArchiveReportsController(cement.Controller):
""" Make and archive reports:
* Generate HTML test history reports
* Generate HTML API documentation
* Archive coverage report to Coveralls and Code Climate
"""
class Meta:
label = 'make-and-archive-reports'
description = 'Make and archive reports'
help = 'Make and archive reports'
stacked_on = 'base'
stacked_type = 'nested'
arguments = [
(['--coverage-dirname'], dict(
type=str, default='tests/reports', help="Directory to store coverage data; default='tests/reports'")),
(['--dry-run'], dict(
default=False, dest='dry_run', action='store_true', help='If set, do not send results to Coveralls and Code Climate')),
]
@cement.ex(hide=True)
def _default(self):
args = self.app.pargs
dry_run = args.dry_run or bool(int(os.getenv('dry_run', '0')))
buildHelper = BuildHelper()
buildHelper.make_and_archive_reports(coverage_dirname=args.coverage_dirname, dry_run=dry_run)
class CombineCoverageReportsController(cement.Controller):
""" Combine coverage reports """
class Meta:
label = 'combine-coverage-reports'
description = 'Combine coverage reports (.coverage.*) into a single file (.coverage)'
help = 'Combine coverage reports (.coverage.*) into a single file (.coverage)'
stacked_on = 'base'
stacked_type = 'nested'
arguments = [
(['--coverage-dirname'], dict(
type=str, default='tests/reports', help="Directory to store coverage data; default='tests/reports'")),
]
@cement.ex(hide=True)
def _default(self):
args = self.app.pargs
buildHelper = BuildHelper()
buildHelper.combine_coverage_reports(coverage_dirname=args.coverage_dirname)
class ArchiveCoverageReportController(cement.Controller):
""" Archive a coverage report:
* Upload report to Coveralls and Code Climate
"""
class Meta:
label = 'archive-coverage-report'
description = 'Archive coverage report'
help = 'Archive coverage report'
stacked_on = 'base'
stacked_type = 'nested'
arguments = [
(['--coverage-dirname'], dict(
type=str, default='tests/reports', help="Directory to store coverage data; default='tests/reports'")),
(['--dry-run'], dict(
default=False, dest='dry_run', action='store_true', help='If set, do not send results to Coveralls and Code Climate')),
]
@cement.ex(hide=True)
def _default(self):
""" Archive a coverage report:
* Upload report to Coveralls and Code Climate
"""
args = self.app.pargs
dry_run = args.dry_run or bool(int(os.getenv('dry_run', '0')))
buildHelper = BuildHelper()
buildHelper.archive_coverage_report(coverage_dirname=args.coverage_dirname, dry_run=dry_run)
class UploadCoverageReportToCoverallsController(cement.Controller):
""" Upload coverage report to Code Climate """
class Meta:
label = 'upload-coverage-report-to-coveralls'
description = 'Upload coverage report to Coveralls'
help = 'Upload coverage report to Coveralls'
stacked_on = 'base'
stacked_type = 'nested'
arguments = [
(['--coverage-dirname'], dict(
type=str, default='tests/reports', help="Directory to store coverage data; default='tests/reports'")),
(['--dry-run'], dict(
default=False, dest='dry_run', action='store_true', help='If set, do not send results to Coveralls')),
]
@cement.ex(hide=True)
def _default(self):
""" Upload coverage report to Coveralls """
args = self.app.pargs
dry_run = args.dry_run or bool(int(os.getenv('dry_run', '0')))
buildHelper = BuildHelper()
buildHelper.upload_coverage_report_to_coveralls(coverage_dirname=args.coverage_dirname, dry_run=dry_run)
class UploadCoverageReportToCodeClimateController(cement.Controller):
""" Upload coverage report to Code Climate """
class Meta:
label = 'upload-coverage-report-to-code-climate'
description = 'Upload coverage report to Code Climate'
help = 'Upload coverage report to Code Climate'
stacked_on = 'base'
stacked_type = 'nested'
arguments = [
(['--coverage-dirname'], dict(
type=str, default='tests/reports', help="Directory to store coverage data; default='tests/reports'")),
(['--dry-run'], dict(
default=False, dest='dry_run', action='store_true', help='If set, do not send results to Code Climate')),
]
@cement.ex(hide=True)
def _default(self):
""" Upload coverage report to Code Climate """
args = self.app.pargs
dry_run = args.dry_run or bool(int(os.getenv('dry_run', '0')))
buildHelper = BuildHelper()
buildHelper.upload_coverage_report_to_code_climate(coverage_dirname=args.coverage_dirname, dry_run=dry_run)
class MakeDocumentationController(cement.Controller):
""" cement.Controller for make_documentation.
Make HTML documentation. Optionally, spell check documentation.
"""
class Meta:
label = 'make-documentation'
description = 'Make HTML documentation. Optionally, spell check documentation.'
help = 'Make HTML documentation. Optionally, spell check documentation.'
stacked_on = 'base'
stacked_type = 'nested'
arguments = [
(['--spell-check'], dict(
default=False, dest='spell_check', action='store_true', help='If set, spell check documentation')),
]
@cement.ex(hide=True)
def _default(self):
args = self.app.pargs
buildHelper = BuildHelper()
buildHelper.make_documentation(spell_check=args.spell_check)
class CompileDownstreamDependenciesController(cement.Controller):
""" Compile the downstream dependencies of a package by analyzing the requirements files of other packages """
class Meta:
label = 'compile-downstream-dependencies'
description = 'Compile the downstream dependencies of a package by analyzing the requirements files of other packages'
help = 'Compile the downstream dependencies of a package by analyzing the requirements files of other packages'
stacked_on = 'base'
stacked_type = 'nested'
arguments = [
(['--dirname'], dict(
type=str, default='.', help='Path to package')),
(['--packages-parent-dir'], dict(
type=str, default='..', help='Path to the parent directory of the other packages')),
(['--config-filename'], dict(
type=str, default=None, help='Path to save the configuration including downstream dependencies in YAML format')),
]
@cement.ex(hide=True)
def _default(self):
args = self.app.pargs
buildHelper = BuildHelper()
packages = buildHelper.compile_downstream_dependencies(
dirname=args.dirname,
packages_parent_dir=args.packages_parent_dir,
config_filename=args.config_filename)
if packages:
print('The following downstream dependencies were found:')
for package in packages:
print(' {}'.format(package))
else:
print('No downstream packages were found.')
class ArePackageDependenciesAcyclicController(cement.Controller):
""" Check if the package dependencies are acyclic so they are supported by CircleCI """
class Meta:
label = 'are-package-dependencies-acyclic'
description = 'Check if the package dependencies are acyclic so they are supported by CircleCI'
help = 'Check if the package dependencies are acyclic so they are supported by CircleCI'
stacked_on = 'base'
stacked_type = 'nested'
arguments = [
(['--packages-parent-dir'], dict(
type=str, default='..', help='Path to the parent directory of the other packages')),
]
@cement.ex(hide=True)
def _default(self):
args = self.app.pargs
buildHelper = BuildHelper()
if buildHelper.are_package_dependencies_acyclic(packages_parent_dir=args.packages_parent_dir):
print('The dependencies are acyclic.')
else:
print('The dependencies are cyclic. This must be corrected for CircleCI.')
class VisualizePackageDependenciesController(cement.Controller):
""" Visualize downstream package dependencies as a graph """
class Meta:
label = 'visualize-package-dependencies'
description = 'Visualize downstream package dependencies as a graph'
help = 'Visualize downstream package dependencies as a graph'
stacked_on = 'base'
stacked_type = 'nested'
arguments = [
(['--packages-parent-dir'], dict(
type=str, default='..', help='Path to the parent directory of the other packages')),
(['--out-filename'], dict(
type=str, default='../package_dependencies.pdf', help='Path to save the visualization')),
]
@cement.ex(hide=True)
def _default(self):
args = self.app.pargs
buildHelper = BuildHelper()
buildHelper.visualize_package_dependencies(packages_parent_dir=args.packages_parent_dir, out_filename=args.out_filename)
class AnalyzePackageController(cement.Controller):
""" Perform static analyses of a package using Pylint """
class Meta:
label = 'analyze-package'
description = 'Perform static analyses of a package using Pylint'
help = 'Perform static analyses of a package using Pylint'
stacked_on = 'base'
stacked_type = 'nested'
arguments = [
(['package_name'], dict(
type=str, help='Name of the package to analyze')),
(['--messages'], dict(
type=str, default='', help='comma-separated list of ids of Pylint checks to run')),
]
@cement.ex(hide=True)
def _default(self):
args = self.app.pargs
buildHelper = BuildHelper()
if args.messages:
messages = [msg.strip() for msg in args.messages.split(',')]
else:
messages = None
buildHelper.analyze_package(args.package_name, messages=messages)
class FindMissingRequirementsController(cement.Controller):
""" cement.Controller for finding missing requirements """
class Meta:
label = 'find-missing-requirements'
description = 'Finding missing requirements for a package.'
help = 'Finding missing requirements for a package.'
stacked_on = 'base'
stacked_type = 'nested'
arguments = [
(['package_name'], dict(
type=str, help='Package name')),
(['--dirname'], dict(
type=str, default='.', help='Path to package')),
]
@cement.ex(hide=True)
def _default(self):
args = self.app.pargs
buildHelper = BuildHelper()
missing = buildHelper.find_missing_requirements(
args.package_name, dirname=args.dirname)
missing = sorted(missing, key=lambda m: m[0])
if missing:
print('The following dependencies should likely be added to requirements.txt')
for name, uses in missing:
for use in uses:
for filename, lineno in use.locations:
print(' {:s} in {:s}:{:d} (due to `{:s}`)'.format(
name, os.path.relpath(filename), lineno, use.modname))
else:
print('requirements.txt appears to contain all of the dependencies')
class FindUnusedRequirementsController(cement.Controller):
""" cement.Controller for finding unused requirements """
class Meta:
label = 'find-unused-requirements'
description = 'Finding unused requirements for a package.'
help = 'Finding unused requirements for a package.'
stacked_on = 'base'
stacked_type = 'nested'
arguments = [
(['package_name'], dict(
type=str, help='Package name')),
(['--dirname'], dict(
type=str, default='.', help='Path to package')),
]
@cement.ex(hide=True)
def _default(self):
args = self.app.pargs
buildHelper = BuildHelper()
unuseds = buildHelper.find_unused_requirements(
args.package_name, dirname=args.dirname)
if unuseds:
print('The following requirements from requirements.txt may not be necessary:')
for name in sorted(unuseds):
print(' {}'.format(name))
else:
print('All of the dependencies appear to be necessary')
class UploadPackageToPypiController(cement.Controller):
""" Upload package to PyPI
"""
class Meta:
label = 'upload-package-to-pypi'
description = 'Upload package to PyPI'
help = 'Upload package to PyPI'
stacked_on = 'base'
stacked_type = 'nested'
arguments = [
(['--dirname'], dict(
type=str, default='.', help='Path to package (e.g. parent directory of setup.py)')),
(['--repository'], dict(
type=str, default='pypi', help='Repository upload package (e.g. pypi or testpypi)')),
(['--do-not-upload-source'], dict(
default=False, action='store_true', help='if set, do not upload source code to PyPI')),
(['--do-not-upload-build'], dict(
default=False, action='store_true', help='if set, do not upload build to PyPI')),
]
@cement.ex(hide=True)
def _default(self):
args = self.app.pargs
buildHelper = BuildHelper()
buildHelper.upload_package_to_pypi(
dirname=args.dirname,
repository=args.repository,
upload_source=not args.do_not_upload_source,
upload_build=not args.do_not_upload_build)
class App(cement.App):
""" Command line application """
class Meta:
label = 'karr_lab_build_utils'
base_controller = 'base'
handlers = [
BaseController,
CreatePackageController,
CreateRepositoryController,
SetupRepositoryController,
CreateDocumentationTemplateController,
DownloadInstallPackageConfigFilesController,
RunTestsController,
DockerController,
DockerCreateContainerController,
InstallPackageToDockerContainerController,
RunTestsInDockerContainerController,
DockerRemoveContainerController,
FollowCircleciBuildController,
GetCircleciEnvironmentVariablesController,
SetCircleciEnvironmentVariableController,
DeleteCircleciEnvironmentVariableController,
CreateCodeClimateGithubWebhookController,
DoPostTestTasksController,
MakeAndArchiveReportsController,
CombineCoverageReportsController,
ArchiveCoverageReportController,
UploadCoverageReportToCoverallsController,
UploadCoverageReportToCodeClimateController,
MakeDocumentationController,
CompileDownstreamDependenciesController,
ArePackageDependenciesAcyclicController,
VisualizePackageDependenciesController,
AnalyzePackageController,
FindMissingRequirementsController,
FindUnusedRequirementsController,
UploadPackageToPypiController,
]
def main():
with App() as app:
app.run()
|
|
# encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'GroupedMessage'
db.create_table(
'sentry_groupedmessage', (
(
'id', self.gf('sentry.db.models.fields.bounded.BoundedBigAutoField')(
primary_key=True
)
), (
'logger', self.gf('django.db.models.fields.CharField')(
default='root', max_length=64, db_index=True, blank=True
)
), (
'class_name', self.gf('django.db.models.fields.CharField')(
db_index=True, max_length=128, null=True, blank=True
)
), (
'level', self.gf('django.db.models.fields.PositiveIntegerField')(
default=40, db_index=True, blank=True
)
), ('message', self.gf('django.db.models.fields.TextField')()),
('traceback',
self.gf('django.db.models.fields.TextField')(null=True, blank=True)), (
'view',
self.gf('django.db.models.fields.CharField')(max_length=200, db_index=True)
), (
'url', self.gf('django.db.models.fields.URLField')(
max_length=200, null=True, blank=True
)
), (
'server_name',
self.gf('django.db.models.fields.CharField')(max_length=128, db_index=True)
), (
'checksum',
self.gf('django.db.models.fields.CharField')(max_length=32, db_index=True)
), ('status', self.gf('django.db.models.fields.PositiveIntegerField')(default=0)),
('times_seen',
self.gf('django.db.models.fields.PositiveIntegerField')(default=1)), (
'last_seen', self.gf('django.db.models.fields.DateTimeField')(
db_index=True
)
), (
'first_seen', self.gf('django.db.models.fields.DateTimeField')(
db_index=True
)
),
)
)
db.send_create_signal('sentry', ['GroupedMessage'])
# Adding unique constraint on 'GroupedMessage', fields ['logger', 'view', 'checksum']
db.create_unique('sentry_groupedmessage', ['logger', 'view', 'checksum'])
# Adding model 'Message'
db.create_table(
'sentry_message', (
(
'id', self.gf('sentry.db.models.fields.bounded.BoundedBigAutoField')(
primary_key=True
)
), (
'logger', self.gf('django.db.models.fields.CharField')(
default='root', max_length=64, db_index=True, blank=True
)
), (
'class_name', self.gf('django.db.models.fields.CharField')(
db_index=True, max_length=128, null=True, blank=True
)
), (
'level', self.gf('django.db.models.fields.PositiveIntegerField')(
default=40, db_index=True, blank=True
)
), ('message', self.gf('django.db.models.fields.TextField')()),
('traceback',
self.gf('django.db.models.fields.TextField')(null=True, blank=True)), (
'view',
self.gf('django.db.models.fields.CharField')(max_length=200, db_index=True)
), (
'url', self.gf('django.db.models.fields.URLField')(
max_length=200, null=True, blank=True
)
), (
'server_name',
self.gf('django.db.models.fields.CharField')(max_length=128, db_index=True)
), (
'checksum',
self.gf('django.db.models.fields.CharField')(max_length=32, db_index=True)
), (
'datetime', self.gf('django.db.models.fields.DateTimeField')(
db_index=True
)
), ('data', self.gf('django.db.models.fields.TextField')(null=True, blank=True)),
)
)
db.send_create_signal('sentry', ['Message'])
# Adding model 'User'
db.create_table(
'auth_user', (
('password', self.gf('django.db.models.fields.CharField')(max_length=128)), (
'last_login',
self.gf('django.db.models.fields.DateTimeField')()
), ('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), (
'username',
self.gf('django.db.models.fields.CharField')(unique=True, max_length=128)
), (
'first_name',
self.gf('django.db.models.fields.CharField')(max_length=30, blank=True)
), (
'last_name',
self.gf('django.db.models.fields.CharField')(max_length=30, blank=True)
),
('email', self.gf('django.db.models.fields.EmailField')(max_length=75, blank=True)),
('is_staff', self.gf('django.db.models.fields.BooleanField')(default=False)),
('is_active', self.gf('django.db.models.fields.BooleanField')(default=True)),
('is_superuser', self.gf('django.db.models.fields.BooleanField')(default=False)), (
'date_joined',
self.gf('django.db.models.fields.DateTimeField')()
),
)
)
db.send_create_signal('sentry', ['User'])
def backwards(self, orm):
# Deleting model 'GroupedMessage'
db.delete_table('sentry_groupedmessage')
# Removing unique constraint on 'GroupedMessage', fields ['logger', 'view', 'checksum']
db.delete_unique('sentry_groupedmessage', ['logger', 'view', 'checksum'])
# Deleting model 'Message'
db.delete_table('sentry_message')
# Deleting model 'User'
db.delete_table('auth_user')
models = {
u'auth.group': {
'Meta': {
'object_name': 'Group'
},
u'id': ('django.db.models.fields.AutoField', [], {
'primary_key': 'True'
}),
'name':
('django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '80'
}),
'permissions': (
'django.db.models.fields.related.ManyToManyField', [], {
'to': u"orm['auth.Permission']",
'symmetrical': 'False',
'blank': 'True'
}
)
},
u'auth.permission': {
'Meta': {
'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')",
'unique_together': "((u'content_type', u'codename'),)",
'object_name': 'Permission'
},
'codename': ('django.db.models.fields.CharField', [], {
'max_length': '100'
}),
'content_type': (
'sentry.db.models.fields.FlexibleForeignKey', [], {
'to': u"orm['contenttypes.ContentType']"
}
),
u'id': ('django.db.models.fields.AutoField', [], {
'primary_key': 'True'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '50'
})
},
'sentry.groupedmessage': {
'Meta': {
'unique_together': "(('logger', 'view', 'checksum'),)",
'object_name': 'GroupedMessage'
},
'checksum':
('django.db.models.fields.CharField', [], {
'max_length': '32',
'db_index': 'True'
}),
'class_name': (
'django.db.models.fields.CharField', [], {
'db_index': 'True',
'max_length': '128',
'null': 'True',
'blank': 'True'
}
),
'first_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'last_seen': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'level': (
'django.db.models.fields.PositiveIntegerField', [], {
'default': '40',
'db_index': 'True',
'blank': 'True'
}
),
'logger': (
'django.db.models.fields.CharField', [], {
'default': "'root'",
'max_length': '64',
'db_index': 'True',
'blank': 'True'
}
),
'message': ('django.db.models.fields.TextField', [], {}),
'server_name':
('django.db.models.fields.CharField', [], {
'max_length': '128',
'db_index': 'True'
}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {
'default': '0'
}),
'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {
'default': '1'
}),
'traceback':
('django.db.models.fields.TextField', [], {
'null': 'True',
'blank': 'True'
}),
'url': (
'django.db.models.fields.URLField', [], {
'max_length': '200',
'null': 'True',
'blank': 'True'
}
),
'view':
('django.db.models.fields.CharField', [], {
'max_length': '200',
'db_index': 'True'
})
},
'sentry.message': {
'Meta': {
'object_name': 'Message'
},
'checksum':
('django.db.models.fields.CharField', [], {
'max_length': '32',
'db_index': 'True'
}),
'class_name': (
'django.db.models.fields.CharField', [], {
'db_index': 'True',
'max_length': '128',
'null': 'True',
'blank': 'True'
}
),
'data': ('django.db.models.fields.TextField', [], {
'null': 'True',
'blank': 'True'
}),
'datetime': (
'django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now',
'db_index': 'True'
}
),
'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'level': (
'django.db.models.fields.PositiveIntegerField', [], {
'default': '40',
'db_index': 'True',
'blank': 'True'
}
),
'logger': (
'django.db.models.fields.CharField', [], {
'default': "'root'",
'max_length': '64',
'db_index': 'True',
'blank': 'True'
}
),
'message': ('django.db.models.fields.TextField', [], {}),
'server_name':
('django.db.models.fields.CharField', [], {
'max_length': '128',
'db_index': 'True'
}),
'traceback':
('django.db.models.fields.TextField', [], {
'null': 'True',
'blank': 'True'
}),
'url': (
'django.db.models.fields.URLField', [], {
'max_length': '200',
'null': 'True',
'blank': 'True'
}
),
'view':
('django.db.models.fields.CharField', [], {
'max_length': '255',
'db_index': 'True'
})
},
'sentry.user': {
'Meta': {
'object_name': 'User',
'db_table': "'auth_user'"
},
'date_joined':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'email':
('django.db.models.fields.EmailField', [], {
'max_length': '75',
'blank': 'True'
}),
'first_name':
('django.db.models.fields.CharField', [], {
'max_length': '30',
'blank': 'True'
}),
'id': ('django.db.models.fields.AutoField', [], {
'primary_key': 'True'
}),
'is_active': ('django.db.models.fields.BooleanField', [], {
'default': 'True'
}),
'is_staff': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {
'default': 'False'
}),
'last_login':
('django.db.models.fields.DateTimeField', [], {
'default': 'datetime.datetime.now'
}),
'last_name':
('django.db.models.fields.CharField', [], {
'max_length': '30',
'blank': 'True'
}),
'password': ('django.db.models.fields.CharField', [], {
'max_length': '128'
}),
'username':
('django.db.models.fields.CharField', [], {
'unique': 'True',
'max_length': '128'
})
},
u'contenttypes.contenttype': {
'Meta': {
'ordering': "('name',)",
'unique_together': "(('app_label', 'model'),)",
'object_name': 'ContentType',
'db_table': "'django_content_type'"
},
'app_label': ('django.db.models.fields.CharField', [], {
'max_length': '100'
}),
u'id':
('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {
'primary_key': 'True'
}),
'model': ('django.db.models.fields.CharField', [], {
'max_length': '100'
}),
'name': ('django.db.models.fields.CharField', [], {
'max_length': '100'
})
},
}
complete_apps = ['sentry']
|
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class VirtualNetworkGatewaysOperations(object):
"""VirtualNetworkGatewaysOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2015_06_15.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _create_or_update_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
parameters, # type: "_models.VirtualNetworkGateway"
**kwargs # type: Any
):
# type: (...) -> "_models.VirtualNetworkGateway"
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2015-06-15"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json, text/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'VirtualNetworkGateway')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
parameters, # type: "_models.VirtualNetworkGateway"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.VirtualNetworkGateway"]
"""Creates or updates a virtual network gateway in the specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param parameters: Parameters supplied to create or update virtual network gateway operation.
:type parameters: ~azure.mgmt.network.v2015_06_15.models.VirtualNetworkGateway
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either VirtualNetworkGateway or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2015_06_15.models.VirtualNetworkGateway]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGateway"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.VirtualNetworkGateway"
"""Gets the specified virtual network gateway by resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualNetworkGateway, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2015_06_15.models.VirtualNetworkGateway
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2015-06-15"
accept = "application/json, text/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}'} # type: ignore
def _delete_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2015-06-15"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes the specified virtual network gateway.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}'} # type: ignore
def list(
self,
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.VirtualNetworkGatewayListResult"]
"""Gets all virtual network gateways by resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either VirtualNetworkGatewayListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2015_06_15.models.VirtualNetworkGatewayListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGatewayListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2015-06-15"
accept = "application/json, text/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('VirtualNetworkGatewayListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways'} # type: ignore
def _reset_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
parameters, # type: "_models.VirtualNetworkGateway"
**kwargs # type: Any
):
# type: (...) -> Optional["_models.VirtualNetworkGateway"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.VirtualNetworkGateway"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2015-06-15"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json, text/json"
# Construct URL
url = self._reset_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'VirtualNetworkGateway')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_reset_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/reset'} # type: ignore
def begin_reset(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
parameters, # type: "_models.VirtualNetworkGateway"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.VirtualNetworkGateway"]
"""Resets the primary of the virtual network gateway in the specified resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param parameters: Virtual network gateway vip address supplied to the begin reset of the
active-active feature enabled gateway.
:type parameters: ~azure.mgmt.network.v2015_06_15.models.VirtualNetworkGateway
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either VirtualNetworkGateway or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2015_06_15.models.VirtualNetworkGateway]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGateway"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._reset_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VirtualNetworkGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_reset.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/reset'} # type: ignore
def _generatevpnclientpackage_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
parameters, # type: "_models.VpnClientParameters"
**kwargs # type: Any
):
# type: (...) -> str
cls = kwargs.pop('cls', None) # type: ClsType[str]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2015-06-15"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json, text/json"
# Construct URL
url = self._generatevpnclientpackage_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'VpnClientParameters')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_generatevpnclientpackage_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/generatevpnclientpackage'} # type: ignore
def begin_generatevpnclientpackage(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
parameters, # type: "_models.VpnClientParameters"
**kwargs # type: Any
):
# type: (...) -> LROPoller[str]
"""Generates VPN client package for P2S client of the virtual network gateway in the specified
resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the virtual network gateway.
:type virtual_network_gateway_name: str
:param parameters: Parameters supplied to the generate virtual network gateway VPN client
package operation.
:type parameters: ~azure.mgmt.network.v2015_06_15.models.VpnClientParameters
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either str or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[str]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[str]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._generatevpnclientpackage_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('str', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_generatevpnclientpackage.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/generatevpnclientpackage'} # type: ignore
|
|
# -*- coding: utf-8 -*-
# Copyright 2015 The Chromium OS Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Install/copy the image to the device."""
from __future__ import division
from __future__ import print_function
import os
import re
import shutil
import subprocess
import sys
import tempfile
from chromite.cli.cros import cros_chrome_sdk
from chromite.lib import auto_updater
from chromite.lib import auto_updater_transfer
from chromite.lib import commandline
from chromite.lib import cros_build_lib
from chromite.lib import cros_logging as logging
from chromite.lib import dev_server_wrapper as ds_wrapper
from chromite.lib import operation
from chromite.lib import osutils
from chromite.lib import path_util
from chromite.lib import remote_access
from chromite.lib.paygen import paygen_payload_lib
from chromite.lib.paygen import paygen_stateful_payload_lib
from chromite.lib.xbuddy import artifact_info
assert sys.version_info >= (3, 6), 'This module requires Python 3.6+'
def GetDefaultBoard():
"""Look up default board.
In a chrome checkout, return $SDK_BOARD. In a chromeos checkout,
return the contents of .default_board.
"""
if path_util.DetermineCheckout().type == path_util.CHECKOUT_TYPE_GCLIENT:
return os.environ.get(cros_chrome_sdk.SDKFetcher.SDK_BOARD_ENV)
return cros_build_lib.GetDefaultBoard()
class UsbImagerOperation(operation.ProgressBarOperation):
"""Progress bar for flashing image to operation."""
def __init__(self, image):
super(UsbImagerOperation, self).__init__()
self._size = os.path.getsize(image)
self._transferred = 0
self._bytes = re.compile(r'(\d+) bytes')
def _GetDDPid(self):
"""Get the Pid of dd."""
try:
pids = cros_build_lib.run(['pgrep', 'dd'], capture_output=True,
print_cmd=False, encoding='utf-8').stdout
for pid in pids.splitlines():
if osutils.IsChildProcess(int(pid), name='dd'):
return int(pid)
return -1
except cros_build_lib.RunCommandError:
# If dd isn't still running, then we assume that it is finished.
return -1
def _PingDD(self, dd_pid):
"""Send USR1 signal to dd to get status update."""
try:
cmd = ['kill', '-USR1', str(dd_pid)]
cros_build_lib.sudo_run(cmd, print_cmd=False)
except cros_build_lib.RunCommandError:
# Here we assume that dd finished in the background.
return
def ParseOutput(self, output=None):
"""Parse the output of dd to update progress bar."""
dd_pid = self._GetDDPid()
if dd_pid == -1:
return
self._PingDD(dd_pid)
if output is None:
stdout = self._stdout.read()
stderr = self._stderr.read()
output = stdout + stderr
match = self._bytes.search(output)
if match:
self._transferred = int(match.groups()[0])
self.ProgressBar(self._transferred / self._size)
def _IsFilePathGPTDiskImage(file_path, require_pmbr=False):
"""Determines if a file is a valid GPT disk.
Args:
file_path: Path to the file to test.
require_pmbr: Whether to require a PMBR in LBA0.
"""
if os.path.isfile(file_path):
with open(file_path, 'rb') as image_file:
if require_pmbr:
# Seek to the end of LBA0 and look for the PMBR boot signature.
image_file.seek(0x1fe)
if image_file.read(2) != b'\x55\xaa':
return False
# Current file position is start of LBA1 now.
else:
# Seek to LBA1 where the GPT starts.
image_file.seek(0x200)
# See if there's a GPT here.
if image_file.read(8) == b'EFI PART':
return True
return False
def _ChooseImageFromDirectory(dir_path):
"""Lists all image files in |dir_path| and ask user to select one.
Args:
dir_path: Path to the directory.
"""
images = sorted([x for x in os.listdir(dir_path) if
_IsFilePathGPTDiskImage(os.path.join(dir_path, x))])
idx = 0
if not images:
raise ValueError('No image found in %s.' % dir_path)
elif len(images) > 1:
idx = cros_build_lib.GetChoice(
'Multiple images found in %s. Please select one to continue:' % (
(dir_path,)),
images)
return os.path.join(dir_path, images[idx])
class FlashError(Exception):
"""Thrown when there is an unrecoverable error during flash."""
class USBImager(object):
"""Copy image to the target removable device."""
def __init__(self, device, board, image, version, debug=False,
install=False, yes=False):
"""Initializes USBImager."""
self.device = device
self.board = board if board else GetDefaultBoard()
self.image = image
self.version = version
self.debug = debug
self.debug_level = logging.DEBUG if debug else logging.INFO
self.install = install
self.yes = yes
def DeviceNameToPath(self, device_name):
return '/dev/%s' % device_name
def GetRemovableDeviceDescription(self, device):
"""Returns a informational description of the removable |device|.
Args:
device: the device name (e.g. sdc).
Returns:
A string describing |device| (e.g. Patriot Memory 7918 MB).
"""
desc = [
osutils.GetDeviceInfo(device, keyword='manufacturer'),
osutils.GetDeviceInfo(device, keyword='product'),
osutils.GetDeviceSize(self.DeviceNameToPath(device)),
'(%s)' % self.DeviceNameToPath(device),
]
return ' '.join([x for x in desc if x])
def ListAllRemovableDevices(self):
"""Returns a list of removable devices.
Returns:
A list of device names (e.g. ['sdb', 'sdc']).
"""
devices = osutils.ListBlockDevices()
removable_devices = []
for d in devices:
if d.TYPE == 'disk' and d.RM == '1':
removable_devices.append(d.NAME)
return removable_devices
def ChooseRemovableDevice(self, devices):
"""Lists all removable devices and asks user to select/confirm.
Args:
devices: a list of device names (e.g. ['sda', 'sdb']).
Returns:
The device name chosen by the user.
"""
idx = cros_build_lib.GetChoice(
'Removable device(s) found. Please select/confirm to continue:',
[self.GetRemovableDeviceDescription(x) for x in devices])
return devices[idx]
def InstallImageToDevice(self, image, device):
"""Installs |image| to the removable |device|.
Args:
image: Path to the image to copy.
device: Device to copy to.
"""
cmd = [
'chromeos-install',
'--yes',
'--skip_src_removable',
'--skip_dst_removable',
'--payload_image=%s' % image,
'--dst=%s' % device,
'--skip_postinstall',
]
cros_build_lib.sudo_run(cmd,
print_cmd=True,
debug_level=logging.NOTICE,
stderr=subprocess.STDOUT,
log_output=True)
def CopyImageToDevice(self, image, device):
"""Copies |image| to the removable |device|.
Args:
image: Path to the image to copy.
device: Device to copy to.
"""
cmd = ['dd', 'if=%s' % image, 'of=%s' % device, 'bs=4M', 'iflag=fullblock',
'oflag=direct', 'conv=fdatasync']
if logging.getLogger().getEffectiveLevel() <= logging.NOTICE:
op = UsbImagerOperation(image)
op.Run(cros_build_lib.sudo_run, cmd, debug_level=logging.NOTICE,
encoding='utf-8', update_period=0.5)
else:
cros_build_lib.sudo_run(
cmd, debug_level=logging.NOTICE,
print_cmd=logging.getLogger().getEffectiveLevel() < logging.NOTICE)
# dd likely didn't put the backup GPT in the last block. sfdisk fixes this
# up for us with a 'write' command, so we have a standards-conforming GPT.
# Ignore errors because sfdisk (util-linux < v2.32) isn't always happy to
# fix GPT sanity issues.
cros_build_lib.sudo_run(['sfdisk', device], input='write\n',
check=False,
debug_level=self.debug_level)
cros_build_lib.sudo_run(['partx', '-u', device],
debug_level=self.debug_level)
cros_build_lib.sudo_run(['sync', '-d', device],
debug_level=self.debug_level)
def _GetImagePath(self):
"""Returns the image path to use."""
image_path = translated_path = None
if os.path.isfile(self.image):
if not self.yes and not _IsFilePathGPTDiskImage(self.image):
# TODO(wnwen): Open the tarball and if there is just one file in it,
# use that instead. Existing code in upload_symbols.py.
if cros_build_lib.BooleanPrompt(
prolog='The given image file is not a valid disk image. Perhaps '
'you forgot to untar it.',
prompt='Terminate the current flash process?'):
raise FlashError('Update terminated by user.')
image_path = self.image
elif os.path.isdir(self.image):
# Ask user which image (*.bin) in the folder to use.
image_path = _ChooseImageFromDirectory(self.image)
else:
# Translate the xbuddy path to get the exact image to use.
translated_path, _ = ds_wrapper.GetImagePathWithXbuddy(
self.image, self.board, self.version)
image_path = ds_wrapper.TranslatedPathToLocalPath(translated_path)
logging.info('Using image %s', translated_path or image_path)
return image_path
def Run(self):
"""Image the removable device."""
devices = self.ListAllRemovableDevices()
if self.device:
# If user specified a device path, check if it exists.
if not os.path.exists(self.device):
raise FlashError('Device path %s does not exist.' % self.device)
# Then check if it is removable.
if self.device not in [self.DeviceNameToPath(x) for x in devices]:
msg = '%s is not a removable device.' % self.device
if not (self.yes or cros_build_lib.BooleanPrompt(
default=False, prolog=msg)):
raise FlashError('You can specify usb:// to choose from a list of '
'removable devices.')
target = None
if self.device:
# Get device name from path (e.g. sdc in /dev/sdc).
target = self.device.rsplit(os.path.sep, 1)[-1]
elif devices:
# Ask user to choose from the list.
target = self.ChooseRemovableDevice(devices)
else:
raise FlashError('No removable devices detected.')
image_path = self._GetImagePath()
try:
device = self.DeviceNameToPath(target)
if self.install:
self.InstallImageToDevice(image_path, device)
else:
self.CopyImageToDevice(image_path, device)
except cros_build_lib.RunCommandError:
logging.error('Failed copying image to device %s',
self.DeviceNameToPath(target))
class FileImager(USBImager):
"""Copy image to the target path."""
def Run(self):
"""Copy the image to the path specified by self.device."""
if not os.path.isdir(os.path.dirname(self.device)):
raise FlashError('Parent of path %s is not a directory.' % self.device)
image_path = self._GetImagePath()
if os.path.isdir(self.device):
logging.info('Copying to %s',
os.path.join(self.device, os.path.basename(image_path)))
else:
logging.info('Copying to %s', self.device)
try:
shutil.copy(image_path, self.device)
except IOError:
logging.error('Failed to copy image %s to %s', image_path, self.device)
class RemoteDeviceUpdater(object):
"""Performs update on a remote device."""
STATEFUL_UPDATE_BIN = '/usr/bin/stateful_update'
UPDATE_ENGINE_BIN = 'update_engine_client'
# Root working directory on the device. This directory is in the
# stateful partition and thus has enough space to store the payloads.
DEVICE_BASE_DIR = '/usr/local/tmp/cros-flash'
UPDATE_CHECK_INTERVAL_PROGRESSBAR = 0.5
UPDATE_CHECK_INTERVAL_NORMAL = 10
def __init__(self, ssh_hostname, ssh_port, image, stateful_update=True,
rootfs_update=True, clobber_stateful=False, reboot=True,
board=None, src_image_to_delta=None, wipe=True, debug=False,
yes=False, force=False, ssh_private_key=None, ping=True,
disable_verification=False, send_payload_in_parallel=False,
experimental_au=False, version=None):
"""Initializes RemoteDeviceUpdater"""
if not stateful_update and not rootfs_update:
raise ValueError('No update operation to perform; either stateful or'
' rootfs partitions must be updated.')
self.tempdir = tempfile.mkdtemp(prefix='cros-flash')
self.ssh_hostname = ssh_hostname
self.ssh_port = ssh_port
self.image = image
self.board = board
self.src_image_to_delta = src_image_to_delta
self.do_stateful_update = stateful_update
self.do_rootfs_update = rootfs_update
self.disable_verification = disable_verification
self.clobber_stateful = clobber_stateful
self.reboot = reboot
self.debug = debug
self.ssh_private_key = ssh_private_key
self.ping = ping
# Do not wipe if debug is set.
self.wipe = wipe and not debug
self.yes = yes
self.force = force
self.send_payload_in_parallel = send_payload_in_parallel
self.experimental_au = experimental_au
self.version = version
def Cleanup(self):
"""Cleans up the temporary directory."""
if self.wipe:
logging.info('Cleaning up temporary working directory...')
osutils.RmDir(self.tempdir)
else:
logging.info('You can find the log files and/or payloads in %s',
self.tempdir)
def GetPayloadDir(self, device):
"""Get directory of payload for update.
This method is used to obtain the directory of payload for cros-flash. The
given path 'self.image' is passed in when initializing RemoteDeviceUpdater.
If self.image is a directory, we directly use the provided update payload(s)
in this directory.
If self.image is an image, we will generate payloads for it and put them in
our temporary directory. The reason is that people may modify a local image
or override it (on the same path) with a different image, so in order to be
safe each time we need to generate the payloads and not cache them.
If non of the above cases, we use the xbuddy to first obtain the image path
(and possibly download it). Then we will generate the payloads in the same
directory the image is located. The reason is that this is what devserver
used to do. The path to the image generated by the devserver (or xbuddy) is
unique and normally nobody override its image with a different one. That is
why I think it is safe to put the payloads next to the image. This is a poor
man's version of caching but it makes cros flash faster for users who flash
the same image multiple times (without doing any change to the image).
Args:
device: A ChromiumOSDevice object.
Returns:
A string payload_dir, that represents the payload directory.
"""
if os.path.isdir(self.image):
# The given path is a directory.
logging.info('Using provided payloads in %s', self.image)
return self.image
image_path = None
if os.path.isfile(self.image):
# The given path is an image.
image_path = self.image
payload_dir = self.tempdir
else:
# Assuming it is an xbuddy path.
self.board = cros_build_lib.GetBoard(
device_board=device.board or GetDefaultBoard(),
override_board=self.board,
force=self.yes,
strict=True)
if not self.force and self.board != device.board:
# If a board was specified, it must be compatible with the device.
raise FlashError('Device (%s) is incompatible with board %s' %
(device.board, self.board))
logging.info('Board is %s', self.board)
# TODO(crbug.com/872441): Once devserver code has been moved to chromite,
# use xbuddy library directly instead of the devserver_wrapper.
# Fetch the full payload and properties, and stateful files. If this
# fails, fallback to downloading the image.
try:
translated_path, _ = ds_wrapper.GetImagePathWithXbuddy(
os.path.join(self.image, artifact_info.FULL_PAYLOAD),
self.board, self.version, silent=True)
payload_dir = os.path.dirname(
ds_wrapper.TranslatedPathToLocalPath(translated_path))
ds_wrapper.GetImagePathWithXbuddy(
os.path.join(self.image, artifact_info.STATEFUL_PAYLOAD),
self.board, self.version, silent=True)
fetch_image = False
except (ds_wrapper.ImagePathError, ds_wrapper.ArtifactDownloadError):
logging.info('Could not find full_payload or stateful for "%s"',
self.image)
fetch_image = True
# We didn't find the full_payload, attempt to download the image.
if fetch_image:
translated_path, _ = ds_wrapper.GetImagePathWithXbuddy(
self.image, self.board, self.version)
image_path = ds_wrapper.TranslatedPathToLocalPath(translated_path)
payload_dir = os.path.join(os.path.dirname(image_path), 'payloads')
logging.notice('Using image path %s and payload directory %s',
image_path, payload_dir)
# Generate rootfs and stateful update payloads if they do not exist.
payload_path = os.path.join(payload_dir,
auto_updater_transfer.ROOTFS_FILENAME)
if not os.path.exists(payload_path):
paygen_payload_lib.GenerateUpdatePayload(
image_path, payload_path, src_image=self.src_image_to_delta)
if not os.path.exists(os.path.join(
payload_dir, auto_updater_transfer.STATEFUL_FILENAME)):
paygen_stateful_payload_lib.GenerateStatefulPayload(image_path,
payload_dir)
return payload_dir
def Run(self):
"""Perform remote device update.
The update process includes:
1. initialize a device instance for the given remote device.
2. achieve payload_dir which contains the required payloads for updating.
3. initialize an auto-updater instance to do RunUpdate().
4. After auto-update, all temp files and dir will be cleaned up.
"""
try:
with remote_access.ChromiumOSDeviceHandler(
self.ssh_hostname, port=self.ssh_port, base_dir=self.DEVICE_BASE_DIR,
private_key=self.ssh_private_key, ping=self.ping) as device:
try:
# Get payload directory
payload_dir = self.GetPayloadDir(device)
# Do auto-update
chromeos_AU = auto_updater.ChromiumOSUpdater(
device=device,
build_name=None,
payload_dir=payload_dir,
tempdir=self.tempdir,
do_rootfs_update=self.do_rootfs_update,
do_stateful_update=self.do_stateful_update,
reboot=self.reboot,
disable_verification=self.disable_verification,
clobber_stateful=self.clobber_stateful,
yes=self.yes,
send_payload_in_parallel=self.send_payload_in_parallel,
experimental_au=self.experimental_au,
transfer_class=auto_updater_transfer.LocalTransfer)
chromeos_AU.CheckPayloads()
chromeos_AU.RunUpdate()
except Exception:
logging.error('Device update failed.')
lsb_entries = sorted(device.lsb_release.items())
logging.info(
'Following are the LSB version details of the device:\n%s',
'\n'.join('%s=%s' % (k, v) for k, v in lsb_entries))
raise
logging.notice('Update performed successfully.')
except remote_access.RemoteAccessException:
logging.error('Remote device failed to initialize.')
raise
finally:
self.Cleanup()
def Flash(device, image, board=None, install=False, src_image_to_delta=None,
rootfs_update=True, stateful_update=True, clobber_stateful=False,
reboot=True, wipe=True, ssh_private_key=None, ping=True,
disable_rootfs_verification=False, clear_cache=False, yes=False,
force=False, debug=False, send_payload_in_parallel=False,
experimental_au=False, version=None):
"""Flashes a device, USB drive, or file with an image.
This provides functionality common to `cros flash` and `brillo flash`
so that they can parse the commandline separately but still use the
same underlying functionality.
Args:
device: commandline.Device object; None to use the default device.
image: Path (string) to the update image. Can be a local or xbuddy path;
non-existant local paths are converted to xbuddy.
board: Board to use; None to automatically detect.
install: Install to USB using base disk layout; USB |device| scheme only.
src_image_to_delta: Local path to an image to be used as the base to
generate delta payloads; SSH |device| scheme only.
rootfs_update: Update rootfs partition; SSH |device| scheme only.
stateful_update: Update stateful partition; SSH |device| scheme only.
clobber_stateful: Clobber stateful partition; SSH |device| scheme only.
reboot: Reboot device after update; SSH |device| scheme only.
wipe: Wipe temporary working directory; SSH |device| scheme only.
ssh_private_key: Path to an SSH private key file; None to use test keys.
ping: Ping the device before attempting update; SSH |device| scheme only.
disable_rootfs_verification: Remove rootfs verification after update; SSH
|device| scheme only.
clear_cache: Clear the devserver static directory.
yes: Assume "yes" for any prompt.
force: Ignore sanity checks and prompts. Overrides |yes| if True.
debug: Print additional debugging messages.
send_payload_in_parallel: Transfer payloads in chunks in parallel to speed
up transmissions for long haul between endpoints.
experimental_au: Use the experimental features auto updater. It should be
deprecated once crbug.com/872441 is fixed.
version: Default version.
Raises:
FlashError: An unrecoverable error occured.
ValueError: Invalid parameter combination.
"""
if force:
yes = True
if clear_cache:
ds_wrapper.DevServerWrapper.WipeStaticDirectory()
ds_wrapper.DevServerWrapper.CreateStaticDirectory()
if install:
if not device or device.scheme != commandline.DEVICE_SCHEME_USB:
raise ValueError(
'--install can only be used when writing to a USB device')
if not cros_build_lib.IsInsideChroot():
raise ValueError('--install can only be used inside the chroot')
# The user may not have specified a source image, use version as the default.
image = image or version
if not device or device.scheme == commandline.DEVICE_SCHEME_SSH:
if device:
hostname, port = device.hostname, device.port
else:
hostname, port = None, None
logging.notice('Preparing to update the remote device %s', hostname)
updater = RemoteDeviceUpdater(
hostname,
port,
image,
board=board,
src_image_to_delta=src_image_to_delta,
rootfs_update=rootfs_update,
stateful_update=stateful_update,
clobber_stateful=clobber_stateful,
reboot=reboot,
wipe=wipe,
debug=debug,
yes=yes,
force=force,
ssh_private_key=ssh_private_key,
ping=ping,
disable_verification=disable_rootfs_verification,
send_payload_in_parallel=send_payload_in_parallel,
experimental_au=experimental_au,
version=version)
updater.Run()
elif device.scheme == commandline.DEVICE_SCHEME_USB:
path = osutils.ExpandPath(device.path) if device.path else ''
logging.info('Preparing to image the removable device %s', path)
imager = USBImager(path,
board,
image,
version,
debug=debug,
install=install,
yes=yes)
imager.Run()
elif device.scheme == commandline.DEVICE_SCHEME_FILE:
logging.info('Preparing to copy image to %s', device.path)
imager = FileImager(device.path,
board,
image,
version,
debug=debug,
yes=yes)
imager.Run()
|
|
"""Test APRS device tracker."""
from unittest.mock import Mock, patch
import aprslib
import homeassistant.components.aprs.device_tracker as device_tracker
DEFAULT_PORT = 14580
TEST_CALLSIGN = "testcall"
TEST_COORDS_NULL_ISLAND = (0, 0)
TEST_FILTER = "testfilter"
TEST_HOST = "testhost"
TEST_PASSWORD = "testpass"
def test_make_filter():
"""Test filter."""
callsigns = ["CALLSIGN1", "callsign2"]
res = device_tracker.make_filter(callsigns)
assert res == "b/CALLSIGN1 b/CALLSIGN2"
def test_gps_accuracy_0():
"""Test GPS accuracy level 0."""
acc = device_tracker.gps_accuracy(TEST_COORDS_NULL_ISLAND, 0)
assert acc == 0
def test_gps_accuracy_1():
"""Test GPS accuracy level 1."""
acc = device_tracker.gps_accuracy(TEST_COORDS_NULL_ISLAND, 1)
assert acc == 186
def test_gps_accuracy_2():
"""Test GPS accuracy level 2."""
acc = device_tracker.gps_accuracy(TEST_COORDS_NULL_ISLAND, 2)
assert acc == 1855
def test_gps_accuracy_3():
"""Test GPS accuracy level 3."""
acc = device_tracker.gps_accuracy(TEST_COORDS_NULL_ISLAND, 3)
assert acc == 18553
def test_gps_accuracy_4():
"""Test GPS accuracy level 4."""
acc = device_tracker.gps_accuracy(TEST_COORDS_NULL_ISLAND, 4)
assert acc == 111319
def test_gps_accuracy_invalid_int():
"""Test GPS accuracy with invalid input."""
level = 5
try:
device_tracker.gps_accuracy(TEST_COORDS_NULL_ISLAND, level)
assert False, "No exception."
except ValueError:
pass
def test_gps_accuracy_invalid_string():
"""Test GPS accuracy with invalid input."""
level = "not an int"
try:
device_tracker.gps_accuracy(TEST_COORDS_NULL_ISLAND, level)
assert False, "No exception."
except ValueError:
pass
def test_gps_accuracy_invalid_float():
"""Test GPS accuracy with invalid input."""
level = 1.2
try:
device_tracker.gps_accuracy(TEST_COORDS_NULL_ISLAND, level)
assert False, "No exception."
except ValueError:
pass
def test_aprs_listener():
"""Test listener thread."""
with patch("aprslib.IS") as mock_ais:
callsign = TEST_CALLSIGN
password = TEST_PASSWORD
host = TEST_HOST
server_filter = TEST_FILTER
port = DEFAULT_PORT
see = Mock()
listener = device_tracker.AprsListenerThread(
callsign, password, host, server_filter, see
)
listener.run()
assert listener.callsign == callsign
assert listener.host == host
assert listener.server_filter == server_filter
assert listener.see == see
assert listener.start_event.is_set()
assert listener.start_success
assert listener.start_message == "Connected to testhost with callsign testcall."
mock_ais.assert_called_with(callsign, passwd=password, host=host, port=port)
def test_aprs_listener_start_fail():
"""Test listener thread start failure."""
with patch(
"aprslib.IS.connect", side_effect=aprslib.ConnectionError("Unable to connect.")
):
callsign = TEST_CALLSIGN
password = TEST_PASSWORD
host = TEST_HOST
server_filter = TEST_FILTER
see = Mock()
listener = device_tracker.AprsListenerThread(
callsign, password, host, server_filter, see
)
listener.run()
assert listener.callsign == callsign
assert listener.host == host
assert listener.server_filter == server_filter
assert listener.see == see
assert listener.start_event.is_set()
assert not listener.start_success
assert listener.start_message == "Unable to connect."
def test_aprs_listener_stop():
"""Test listener thread stop."""
with patch("aprslib.IS"):
callsign = TEST_CALLSIGN
password = TEST_PASSWORD
host = TEST_HOST
server_filter = TEST_FILTER
see = Mock()
listener = device_tracker.AprsListenerThread(
callsign, password, host, server_filter, see
)
listener.ais.close = Mock()
listener.run()
listener.stop()
assert listener.callsign == callsign
assert listener.host == host
assert listener.server_filter == server_filter
assert listener.see == see
assert listener.start_event.is_set()
assert listener.start_message == "Connected to testhost with callsign testcall."
assert listener.start_success
listener.ais.close.assert_called_with()
def test_aprs_listener_rx_msg():
"""Test rx_msg."""
with patch("aprslib.IS"):
callsign = TEST_CALLSIGN
password = TEST_PASSWORD
host = TEST_HOST
server_filter = TEST_FILTER
see = Mock()
sample_msg = {
device_tracker.ATTR_FORMAT: "uncompressed",
device_tracker.ATTR_FROM: "ZZ0FOOBAR-1",
device_tracker.ATTR_LATITUDE: 0.0,
device_tracker.ATTR_LONGITUDE: 0.0,
device_tracker.ATTR_ALTITUDE: 0,
}
listener = device_tracker.AprsListenerThread(
callsign, password, host, server_filter, see
)
listener.run()
listener.rx_msg(sample_msg)
assert listener.callsign == callsign
assert listener.host == host
assert listener.server_filter == server_filter
assert listener.see == see
assert listener.start_event.is_set()
assert listener.start_success
assert listener.start_message == "Connected to testhost with callsign testcall."
see.assert_called_with(
dev_id=device_tracker.slugify("ZZ0FOOBAR-1"),
gps=(0.0, 0.0),
attributes={"altitude": 0},
)
def test_aprs_listener_rx_msg_ambiguity():
"""Test rx_msg with posambiguity."""
with patch("aprslib.IS"):
callsign = TEST_CALLSIGN
password = TEST_PASSWORD
host = TEST_HOST
server_filter = TEST_FILTER
see = Mock()
sample_msg = {
device_tracker.ATTR_FORMAT: "uncompressed",
device_tracker.ATTR_FROM: "ZZ0FOOBAR-1",
device_tracker.ATTR_LATITUDE: 0.0,
device_tracker.ATTR_LONGITUDE: 0.0,
device_tracker.ATTR_POS_AMBIGUITY: 1,
}
listener = device_tracker.AprsListenerThread(
callsign, password, host, server_filter, see
)
listener.run()
listener.rx_msg(sample_msg)
assert listener.callsign == callsign
assert listener.host == host
assert listener.server_filter == server_filter
assert listener.see == see
assert listener.start_event.is_set()
assert listener.start_success
assert listener.start_message == "Connected to testhost with callsign testcall."
see.assert_called_with(
dev_id=device_tracker.slugify("ZZ0FOOBAR-1"),
gps=(0.0, 0.0),
attributes={device_tracker.ATTR_GPS_ACCURACY: 186},
)
def test_aprs_listener_rx_msg_ambiguity_invalid():
"""Test rx_msg with invalid posambiguity."""
with patch("aprslib.IS"):
callsign = TEST_CALLSIGN
password = TEST_PASSWORD
host = TEST_HOST
server_filter = TEST_FILTER
see = Mock()
sample_msg = {
device_tracker.ATTR_FORMAT: "uncompressed",
device_tracker.ATTR_FROM: "ZZ0FOOBAR-1",
device_tracker.ATTR_LATITUDE: 0.0,
device_tracker.ATTR_LONGITUDE: 0.0,
device_tracker.ATTR_POS_AMBIGUITY: 5,
}
listener = device_tracker.AprsListenerThread(
callsign, password, host, server_filter, see
)
listener.run()
listener.rx_msg(sample_msg)
assert listener.callsign == callsign
assert listener.host == host
assert listener.server_filter == server_filter
assert listener.see == see
assert listener.start_event.is_set()
assert listener.start_success
assert listener.start_message == "Connected to testhost with callsign testcall."
see.assert_called_with(
dev_id=device_tracker.slugify("ZZ0FOOBAR-1"), gps=(0.0, 0.0), attributes={}
)
def test_aprs_listener_rx_msg_no_position():
"""Test rx_msg with non-position report."""
with patch("aprslib.IS"):
callsign = TEST_CALLSIGN
password = TEST_PASSWORD
host = TEST_HOST
server_filter = TEST_FILTER
see = Mock()
sample_msg = {device_tracker.ATTR_FORMAT: "invalid"}
listener = device_tracker.AprsListenerThread(
callsign, password, host, server_filter, see
)
listener.run()
listener.rx_msg(sample_msg)
assert listener.callsign == callsign
assert listener.host == host
assert listener.server_filter == server_filter
assert listener.see == see
assert listener.start_event.is_set()
assert listener.start_success
assert listener.start_message == "Connected to testhost with callsign testcall."
see.assert_not_called()
async def test_setup_scanner(hass):
"""Test setup_scanner."""
with patch(
"homeassistant.components.aprs.device_tracker.AprsListenerThread"
) as listener:
config = {
"username": TEST_CALLSIGN,
"password": TEST_PASSWORD,
"host": TEST_HOST,
"callsigns": ["XX0FOO*", "YY0BAR-1"],
"timeout": device_tracker.DEFAULT_TIMEOUT,
}
see = Mock()
res = await hass.async_add_executor_job(
device_tracker.setup_scanner, hass, config, see
)
assert res
listener.assert_called_with(
TEST_CALLSIGN, TEST_PASSWORD, TEST_HOST, "b/XX0FOO* b/YY0BAR-1", see
)
async def test_setup_scanner_timeout(hass):
"""Test setup_scanner failure from timeout."""
with patch("aprslib.IS.connect", side_effect=TimeoutError):
config = {
"username": TEST_CALLSIGN,
"password": TEST_PASSWORD,
"host": "localhost",
"timeout": 0.01,
"callsigns": ["XX0FOO*", "YY0BAR-1"],
}
see = Mock()
assert not await hass.async_add_executor_job(
device_tracker.setup_scanner, hass, config, see
)
|
|
'''
Data classes representing references to files in model objects. Manager class
for common operations with files. Manager encapsulate knowledge on where and
how to store transient and persistent files.
'''
import os
import base64
import errno
import mimetypes
from shutil import copyfileobj
from ...utils import cached_property
class BaseFile(object):
def __init__(self, root, name, manager=None):
'''@root depends on environment of application and @name uniquely
identifies the file.'''
self.root = root
self.name = name
self.manager = manager
@property
def path(self):
return os.path.join(self.root, self.name)
@property
def mimetype(self):
'''Guessed mimetype'''
return mimetypes.guess_type(self.path)[0]
@cached_property
def size(self):
try:
return os.path.getsize(self.path)
# Return None for non-existing file.
# There can be OSError or IOError (depending on Python version?), both
# are derived from EnvironmentError having errno property.
except EnvironmentError as exc:
if exc.errno!=errno.ENOENT:
raise # pragma: no cover
@property
def file_name(self):
return os.path.split(self.name)[1]
@property
def ext(self):
return os.path.splitext(self.name)[1]
def __repr__(self):
return '{}({!r})'.format(type(self).__name__, self.name)
class TransientFile(BaseFile):
mode = 'transient'
@property
def url(self):
return self.manager.get_transient_url(self)
class PersistentFile(BaseFile):
mode = 'existing' # XXX rename existing to persistent everywhere
@property
def url(self):
return self.manager.get_persistent_url(self)
def random_name(length=32):
# altchars - do not use "-" and "_" in file names
name = base64.b64encode(os.urandom(length), altchars="AA").rstrip('=')
return name[:length]
class BaseFileManager(object):
def __init__(self, persistent_root, persistent_url):
self.persistent_root = persistent_root
self.persistent_url = persistent_url
def get_persistent(self, name, cls=PersistentFile):
if not name or '..' in name or name[0] in '~/':
raise ValueError('Unsecure file path')
persistent = cls(self.persistent_root, name, self)
return persistent
def get_persistent_url(self, file, env=None):
return self.persistent_url + file.name
class ReadonlyFileManager(BaseFileManager):
pass
class FileManager(BaseFileManager):
transient_length = 16
persistent_length = 32
def __init__(self, transient_root, persistent_root,
transient_url, persistent_url,
transient_length=None,
persistent_length=None):
self.transient_root = transient_root
self.persistent_root = persistent_root
self.transient_url = transient_url
self.persistent_url = persistent_url
self.transient_length = transient_length or self.transient_length
self.persistent_length = persistent_length or self.persistent_length
def delete(self, file_obj):
# XXX Is this right place again?
# BC "delete file if exist and ignore errors" would be used in many
# places, I think...
if os.path.isfile(file_obj.path):
try:
os.unlink(file_obj.path)
except OSError:
pass
def _copy_file(self, inp, path, length=None):
# works for ajax file upload
# XXX implement/debug for FieldStorage and file
with open(path, 'wb') as fp:
if length is None:
copyfileobj(inp, fp)
else:
# copyfileobj does not work on request.input_stream
# XXX check
pos, bufsize = 0, 16*1024
while pos < length:
bufsize = min(bufsize, length-pos)
data = inp.read(bufsize)
fp.write(data)
assert bufsize == len(data)
pos += bufsize
def create_transient(self, input_stream, original_name, length=None):
'''Create TransientFile and file on FS from given input stream and
original file name.'''
ext = os.path.splitext(original_name)[1]
transient = self.new_transient(ext)
if not os.path.isdir(self.transient_root):
os.makedirs(self.transient_root)
self._copy_file(input_stream, transient.path, length=length)
return transient
def new_transient(self, ext=''):
'''Creates empty TransientFile with random name and given extension.
File on FS is not created'''
name = random_name(self.transient_length) + ext
return TransientFile(self.transient_root, name, self)
def get_transient(self, name):
'''Restores TransientFile object with given name.
Should be used when form is submitted with file name and no file'''
# security checks: basically no folders are allowed
assert not ('/' in name or '\\' in name or name[0] in '.~')
transient = TransientFile(self.transient_root, name, self)
if not os.path.isfile(transient.path):
raise OSError(errno.ENOENT, 'Transient file has been lost',
transient.path)
return transient
def store(self, transient_file, persistent_file):
'''Makes PersistentFile from TransientFile'''
#for i in xrange(5):
# persistent_file = PersistentFile(self.persistent_root,
# persistent_name, self)
# if not os.path.exists(persistent_file.path):
# break
#else:
# raise Exception('Unable to find free file name')
dirname = os.path.dirname(persistent_file.path)
if not os.path.isdir(dirname):
os.makedirs(dirname)
os.rename(transient_file.path, persistent_file.path)
return persistent_file
def get_transient_url(self, file, env=None):
return self.transient_url + file.name
def new_file_name(self, name_template, inst, ext, old_name):
assert '{random}' in name_template, \
'Non-random name templates are not supported yet'
for i in xrange(5):
name = name_template.format(item=inst,
random=random_name(self.persistent_length))
name = name + ext
# XXX Must differ from old value[s].
if name != old_name or not '{random}' in name_template:
return name
raise Exception('Unable to find new file name')
def create_symlink(self, source_file, target_file):
source_path = os.path.normpath(source_file.path)
target_path = os.path.normpath(target_file.path)
assert target_path.startswith(self.persistent_root), \
'Target file must be in %s folder' % self.persistent_root
target_dir = os.path.dirname(target_path)
source_path_rel = os.path.relpath(source_path, target_dir)
if not os.path.isdir(target_dir):
os.makedirs(target_dir)
if os.path.islink(target_path):
os.unlink(target_path)
os.symlink(source_path_rel, target_path)
|
|
# Copyright (c) 2010-2021 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import contextlib
import logging
import mock
import sys
from collections import defaultdict
from swift.common import utils
from swift.common.utils import NOTICE
class WARN_DEPRECATED(Exception):
def __init__(self, msg):
self.msg = msg
print(self.msg)
class CaptureLog(object):
"""
Captures log records passed to the ``handle`` method and provides accessor
functions to the captured logs.
"""
def __init__(self):
self.clear()
def _clear(self):
self.log_dict = defaultdict(list)
self.lines_dict = {'critical': [], 'error': [], 'info': [],
'warning': [], 'debug': [], 'notice': []}
clear = _clear # this is a public interface
def get_lines_for_level(self, level):
if level not in self.lines_dict:
raise KeyError(
"Invalid log level '%s'; valid levels are %s" %
(level,
', '.join("'%s'" % lvl for lvl in sorted(self.lines_dict))))
return self.lines_dict[level]
def all_log_lines(self):
return dict((level, msgs) for level, msgs in self.lines_dict.items()
if len(msgs) > 0)
def _handle(self, record):
try:
line = record.getMessage()
except TypeError:
print('WARNING: unable to format log message %r %% %r' % (
record.msg, record.args))
raise
self.lines_dict[record.levelname.lower()].append(line)
return 0
def handle(self, record):
return self._handle(record)
class FakeLogger(logging.Logger, CaptureLog):
# a thread safe fake logger
def __init__(self, *args, **kwargs):
self._clear()
self.name = 'swift.unit.fake_logger'
self.level = logging.NOTSET
if 'facility' in kwargs:
self.facility = kwargs['facility']
self.statsd_client = None
self.thread_locals = None
self.parent = None
store_in = {
logging.ERROR: 'error',
logging.WARNING: 'warning',
logging.INFO: 'info',
logging.DEBUG: 'debug',
logging.CRITICAL: 'critical',
NOTICE: 'notice',
}
def warn(self, *args, **kwargs):
raise WARN_DEPRECATED("Deprecated Method warn use warning instead")
def notice(self, msg, *args, **kwargs):
"""
Convenience function for syslog priority LOG_NOTICE. The python
logging lvl is set to 25, just above info. SysLogHandler is
monkey patched to map this log lvl to the LOG_NOTICE syslog
priority.
"""
self.log(NOTICE, msg, *args, **kwargs)
def _log(self, level, msg, *args, **kwargs):
store_name = self.store_in[level]
cargs = [msg]
if any(args):
cargs.extend(args)
captured = dict(kwargs)
if 'exc_info' in kwargs and \
not isinstance(kwargs['exc_info'], tuple):
captured['exc_info'] = sys.exc_info()
self.log_dict[store_name].append((tuple(cargs), captured))
super(FakeLogger, self)._log(level, msg, *args, **kwargs)
def _store_in(store_name):
def stub_fn(self, *args, **kwargs):
self.log_dict[store_name].append((args, kwargs))
return stub_fn
# mock out the StatsD logging methods:
update_stats = _store_in('update_stats')
increment = _store_in('increment')
decrement = _store_in('decrement')
timing = _store_in('timing')
timing_since = _store_in('timing_since')
transfer_rate = _store_in('transfer_rate')
set_statsd_prefix = _store_in('set_statsd_prefix')
def get_increments(self):
return [call[0][0] for call in self.log_dict['increment']]
def get_increment_counts(self):
counts = {}
for metric in self.get_increments():
if metric not in counts:
counts[metric] = 0
counts[metric] += 1
return counts
def get_update_stats(self):
return [call[0] for call in self.log_dict['update_stats']]
def setFormatter(self, obj):
self.formatter = obj
def close(self):
self._clear()
def set_name(self, name):
# don't touch _handlers
self._name = name
def acquire(self):
pass
def release(self):
pass
def createLock(self):
pass
def emit(self, record):
pass
def flush(self):
pass
def handleError(self, record):
pass
def isEnabledFor(self, level):
return True
class DebugSwiftLogFormatter(utils.SwiftLogFormatter):
def format(self, record):
msg = super(DebugSwiftLogFormatter, self).format(record)
return msg.replace('#012', '\n')
class DebugLogger(FakeLogger):
"""A simple stdout logging version of FakeLogger"""
def __init__(self, *args, **kwargs):
FakeLogger.__init__(self, *args, **kwargs)
self.formatter = DebugSwiftLogFormatter(
"%(server)s %(levelname)s: %(message)s")
self.records = defaultdict(list)
def handle(self, record):
self._handle(record)
formatted = self.formatter.format(record)
print(formatted)
self.records[record.levelname].append(formatted)
class DebugLogAdapter(utils.LogAdapter):
def _send_to_logger(name):
def stub_fn(self, *args, **kwargs):
return getattr(self.logger, name)(*args, **kwargs)
return stub_fn
# delegate to FakeLogger's mocks
update_stats = _send_to_logger('update_stats')
increment = _send_to_logger('increment')
decrement = _send_to_logger('decrement')
timing = _send_to_logger('timing')
timing_since = _send_to_logger('timing_since')
transfer_rate = _send_to_logger('transfer_rate')
set_statsd_prefix = _send_to_logger('set_statsd_prefix')
def __getattribute__(self, name):
try:
return object.__getattribute__(self, name)
except AttributeError:
return getattr(self.__dict__['logger'], name)
def debug_logger(name='test'):
"""get a named adapted debug logger"""
return DebugLogAdapter(DebugLogger(), name)
class ForwardingLogHandler(logging.NullHandler):
"""
Provides a LogHandler implementation that simply forwards filtered records
to a given handler function. This can be useful to forward records to a
handler without the handler itself needing to subclass LogHandler.
"""
def __init__(self, handler_fn):
super(ForwardingLogHandler, self).__init__()
self.handler_fn = handler_fn
def handle(self, record):
return self.handler_fn(record)
class CaptureLogAdapter(utils.LogAdapter, CaptureLog):
"""
A LogAdapter that is capable of capturing logs for inspection via accessor
methods.
"""
def __init__(self, logger, name):
super(CaptureLogAdapter, self).__init__(logger, name)
self.clear()
self.handler = ForwardingLogHandler(self.handle)
def start_capture(self):
"""
Attaches the adapter's handler to the adapted logger in order to start
capturing log messages.
"""
self.logger.addHandler(self.handler)
def stop_capture(self):
"""
Detaches the adapter's handler from the adapted logger. This should be
called to prevent further logging to the adapted logger (possibly via
other log adapter instances) being captured by this instance.
"""
self.logger.removeHandler(self.handler)
@contextlib.contextmanager
def capture_logger(conf, *args, **kwargs):
"""
Yields an adapted system logger based on the conf options. The log adapter
captures logs in order to support the pattern of tests calling the log
accessor methods (e.g. get_lines_for_level) directly on the logger
instance.
"""
with mock.patch('swift.common.utils.LogAdapter', CaptureLogAdapter):
log_adapter = utils.get_logger(conf, *args, **kwargs)
log_adapter.start_capture()
try:
yield log_adapter
finally:
log_adapter.stop_capture()
|
|
import socket
import re
import datetime
import sys
import threading
class TimeoutError(Exception): pass
def timelimit(timeout):
def internal(function):
def internal2(*args, **kw):
class Calculator(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.result = None
self.error = None
def run(self):
try:
self.result = function(*args, **kw)
except:
self.error = sys.exc_info()[0]
c = Calculator()
c.start()
c.join(timeout)
if c.isAlive():
raise TimeoutError
if c.error:
raise c.error
return c.result
return internal2
return internal
class InvalidRequestException:
pass
class ClientConnection:
def __init__(self, socket, address):
self.socket = socket
self.address = address
@timelimit(10)
def wait_for_request(self):
data = ""
while True:
recv_data = self.socket.recv(65536)
data += recv_data
if "\r\n\r\n" in data:
break
return ClientRequest(self, data)
class ClientRequest:
def __init__(self, client, data):
self.client = client
self.headers = {}
self.cookies = {}
lines = data.split("\r\n")
request_line = lines[0]
lines = lines[1:]
m = re.search("^(\w+) (.+) (HTTP\/...)$", request_line)
if m:
method, request_uri, version = m.group(1, 2, 3)
self.method = method
self.request_path = request_uri
self.version = version
for line in lines:
if line:
m = re.search("^(\S+): (.+)$", line)
if m:
name, value = m.group(1, 2)
self.headers[name] = value
else:
raise InvalidRequestException()
if "Cookie" in self.headers:
cookies = self.headers["Cookie"]
for trash1, key, value, trash2 in re.findall("(^|:)(.*?)=(.*)(;|$)", cookies):
self.cookies[key] = value
def get_version(self):
return self.version
def get_method(self):
return self.method
def get_request_uri(self):
return self.request_uri
def get_header(self, name):
if name in self.headers:
return self.headers[name]
else:
return None
def get_cookie(self, name):
if name in self.cookies:
return self.cookies[name]
else:
return None
def __str__(self):
return "%s %s %s %s" % (self.method, self.request_path, self.version, self.headers)
class ServerResponse:
def __init__(self, status_code):
self.version = "HTTP/1.1"
self.status_code = status_code
self.headers = {}
self.content = None
def add_header(self, key, value):
self.headers[key] = value
def remove_header(self, key):
if key in self.headers:
del self.headers[key]
def set_content(self, content):
self.content = content
if content:
self.add_header("Content-Length", str(len(content)))
else:
self.remove_header("Content-Length")
def add_cookie(self, name, value, life_time=datetime.timedelta(1)):
self.cookies[name] = (value, datetime.datetime.now()+life_time)
def compile(self):
lines = []
status_line = "%s %d %s" % (self.version, self.status_code, "OK")
lines.append(status_line)
for key, value in self.headers.items():
lines.append("%s: %s" % (key, value))
data = "\r\n".join(lines) + "\r\n\r\n"
if self.content:
data += self.content
return data
class HTTPServer:
def __init__(self, port):
self.handle_request_callback = None
self.socket = socket.socket()
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.socket.bind(("", port))
self.socket.listen(10)
self.socket.setblocking(False)
self.request_queue_lock = threading.Lock()
self.request_queue = []
def register_handle_request_callback(self, callback):
self.handle_request_callback = callback
def accept_client_connection(self):
s, address = self.socket.accept()
s.setblocking(True)
return ClientConnection(s, address)
def respond_404(self, request):
response = ServerResponse(404)
response.add_header("Content-Type", "text/html")
response.add_header("Connection", "close")
response.set_content("404 etc")
self.respond(request, response)
def respond_200(self, request, data, type = "text/html"):
response = ServerResponse(200)
response.add_header("Content-Type", type)
response.add_header("Connection", "close")
response.set_content(data)
self.respond(request, response)
def respond(self, request, response):
data = response.compile()
while data:
sent = request.client.socket.send(data)
if sent <= 0:
return
else:
data = data[sent:]
request.client.socket.close()
def get_request(self, client):
try:
request = client.wait_for_request()
self.request_queue_lock.acquire()
self.request_queue.append(request)
self.request_queue_lock.release()
except TimeoutError:
client.socket.close()
except InvalidRequestException:
print "invalid request O.o"
client.socket.close()
def tick(self):
self.request_queue_lock.acquire()
while self.request_queue:
request = self.request_queue.pop()
self.handle_request_callback(request)
self.request_queue_lock.release()
try:
client = self.accept_client_connection()
thread = threading.Thread(None, self.get_request, None, (client,))
thread.start()
except socket.error:
pass # no incoming connection atm...
|
|
#!/usr/bin/env python
# Copyright 2014 the V8 project authors. All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# for py2/py3 compatibility
from __future__ import print_function
import argparse
from collections import OrderedDict
import sys
from common_includes import *
def IsSvnNumber(rev):
return rev.isdigit() and len(rev) < 8
class Preparation(Step):
MESSAGE = "Preparation."
def RunStep(self):
if os.path.exists(self.Config("ALREADY_MERGING_SENTINEL_FILE")):
if self._options.force:
os.remove(self.Config("ALREADY_MERGING_SENTINEL_FILE"))
elif self._options.step == 0: # pragma: no cover
self.Die("A merge is already in progress")
open(self.Config("ALREADY_MERGING_SENTINEL_FILE"), "a").close()
self.InitialEnvironmentChecks(self.default_cwd)
if self._options.branch:
self["merge_to_branch"] = self._options.branch
else: # pragma: no cover
self.Die("Please specify a branch to merge to")
self.CommonPrepare()
self.PrepareBranch()
class CreateBranch(Step):
MESSAGE = "Create a fresh branch for the patch."
def RunStep(self):
self.GitCreateBranch(self.Config("BRANCHNAME"),
self.vc.RemoteBranch(self["merge_to_branch"]))
class SearchArchitecturePorts(Step):
MESSAGE = "Search for corresponding architecture ports."
def RunStep(self):
self["full_revision_list"] = list(OrderedDict.fromkeys(
self._options.revisions))
port_revision_list = []
for revision in self["full_revision_list"]:
# Search for commits which matches the "Port XXX" pattern.
git_hashes = self.GitLog(reverse=True, format="%H",
grep="Port %s" % revision,
branch=self.vc.RemoteMasterBranch())
for git_hash in git_hashes.splitlines():
revision_title = self.GitLog(n=1, format="%s", git_hash=git_hash)
# Is this revision included in the original revision list?
if git_hash in self["full_revision_list"]:
print("Found port of %s -> %s (already included): %s"
% (revision, git_hash, revision_title))
else:
print("Found port of %s -> %s: %s"
% (revision, git_hash, revision_title))
port_revision_list.append(git_hash)
# Do we find any port?
if len(port_revision_list) > 0:
if self.Confirm("Automatically add corresponding ports (%s)?"
% ", ".join(port_revision_list)):
#: 'y': Add ports to revision list.
self["full_revision_list"].extend(port_revision_list)
class CreateCommitMessage(Step):
MESSAGE = "Create commit message."
def RunStep(self):
# Stringify: ["abcde", "12345"] -> "abcde, 12345"
self["revision_list"] = ", ".join(self["full_revision_list"])
if not self["revision_list"]: # pragma: no cover
self.Die("Revision list is empty.")
action_text = "Merged %s"
# The commit message title is added below after the version is specified.
msg_pieces = [
"\n".join(action_text % s for s in self["full_revision_list"]),
]
msg_pieces.append("\n\n")
for commit_hash in self["full_revision_list"]:
patch_merge_desc = self.GitLog(n=1, format="%s", git_hash=commit_hash)
msg_pieces.append("%s\n\n" % patch_merge_desc)
bugs = []
for commit_hash in self["full_revision_list"]:
msg = self.GitLog(n=1, git_hash=commit_hash)
for bug in re.findall(r"^[ \t]*BUG[ \t]*=[ \t]*(.*?)[ \t]*$", msg, re.M):
bugs.extend(s.strip() for s in bug.split(","))
bug_aggregate = ",".join(sorted(filter(lambda s: s and s != "none", bugs)))
if bug_aggregate:
msg_pieces.append("BUG=%s\n" % bug_aggregate)
self["new_commit_msg"] = "".join(msg_pieces)
class ApplyPatches(Step):
MESSAGE = "Apply patches for selected revisions."
def RunStep(self):
for commit_hash in self["full_revision_list"]:
print("Applying patch for %s to %s..."
% (commit_hash, self["merge_to_branch"]))
patch = self.GitGetPatch(commit_hash)
TextToFile(patch, self.Config("TEMPORARY_PATCH_FILE"))
self.ApplyPatch(self.Config("TEMPORARY_PATCH_FILE"))
if self._options.patch:
self.ApplyPatch(self._options.patch)
class PrepareVersion(Step):
MESSAGE = "Prepare version file."
def RunStep(self):
# This is used to calculate the patch level increment.
self.ReadAndPersistVersion()
class IncrementVersion(Step):
MESSAGE = "Increment version number."
def RunStep(self):
new_patch = str(int(self["patch"]) + 1)
if self.Confirm("Automatically increment V8_PATCH_LEVEL? (Saying 'n' will "
"fire up your EDITOR on %s so you can make arbitrary "
"changes. When you're done, save the file and exit your "
"EDITOR.)" % VERSION_FILE):
text = FileToText(os.path.join(self.default_cwd, VERSION_FILE))
text = MSub(r"(?<=#define V8_PATCH_LEVEL)(?P<space>\s+)\d*$",
r"\g<space>%s" % new_patch,
text)
TextToFile(text, os.path.join(self.default_cwd, VERSION_FILE))
else:
self.Editor(os.path.join(self.default_cwd, VERSION_FILE))
self.ReadAndPersistVersion("new_")
self["version"] = "%s.%s.%s.%s" % (self["new_major"],
self["new_minor"],
self["new_build"],
self["new_patch"])
class CommitLocal(Step):
MESSAGE = "Commit to local branch."
def RunStep(self):
# Add a commit message title.
self["commit_title"] = "Version %s (cherry-pick)" % self["version"]
self["new_commit_msg"] = "%s\n\n%s" % (self["commit_title"],
self["new_commit_msg"])
TextToFile(self["new_commit_msg"], self.Config("COMMITMSG_FILE"))
self.GitCommit(file_name=self.Config("COMMITMSG_FILE"))
class CommitRepository(Step):
MESSAGE = "Commit to the repository."
def RunStep(self):
self.GitCheckout(self.Config("BRANCHNAME"))
self.WaitForLGTM()
self.GitPresubmit()
self.vc.CLLand()
class TagRevision(Step):
MESSAGE = "Create the tag."
def RunStep(self):
print("Creating tag %s" % self["version"])
self.vc.Tag(self["version"],
self.vc.RemoteBranch(self["merge_to_branch"]),
self["commit_title"])
class CleanUp(Step):
MESSAGE = "Cleanup."
def RunStep(self):
self.CommonCleanup()
print("*** SUMMARY ***")
print("version: %s" % self["version"])
print("branch: %s" % self["merge_to_branch"])
if self["revision_list"]:
print("patches: %s" % self["revision_list"])
class RollMerge(ScriptsBase):
def _Description(self):
return ("Performs the necessary steps to merge revisions from "
"master to other branches, including candidates and roll branches.")
def _PrepareOptions(self, parser):
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument("--branch", help="The branch to merge to.")
parser.add_argument("revisions", nargs="*",
help="The revisions to merge.")
parser.add_argument("-f", "--force",
help="Delete sentinel file.",
default=False, action="store_true")
parser.add_argument("-m", "--message",
help="A commit message for the patch.")
parser.add_argument("-p", "--patch",
help="A patch file to apply as part of the merge.")
def _ProcessOptions(self, options):
if len(options.revisions) < 1:
if not options.patch:
print("Either a patch file or revision numbers must be specified")
return False
if not options.message:
print("You must specify a merge comment if no patches are specified")
return False
options.bypass_upload_hooks = True
# CC ulan to make sure that fixes are merged to Google3.
options.cc = "[email protected]"
# Make sure to use git hashes in the new workflows.
for revision in options.revisions:
if (IsSvnNumber(revision) or
(revision[0:1] == "r" and IsSvnNumber(revision[1:]))):
print("Please provide full git hashes of the patches to merge.")
print("Got: %s" % revision)
return False
return True
def _Config(self):
return {
"BRANCHNAME": "prepare-merge",
"PERSISTFILE_BASENAME":
RELEASE_WORKDIR + "v8-merge-to-branch-tempfile",
"ALREADY_MERGING_SENTINEL_FILE":
RELEASE_WORKDIR + "v8-merge-to-branch-tempfile-already-merging",
"TEMPORARY_PATCH_FILE":
RELEASE_WORKDIR + "v8-prepare-merge-tempfile-temporary-patch",
"COMMITMSG_FILE": RELEASE_WORKDIR + "v8-prepare-merge-tempfile-commitmsg",
}
def _Steps(self):
return [
Preparation,
CreateBranch,
SearchArchitecturePorts,
CreateCommitMessage,
ApplyPatches,
PrepareVersion,
IncrementVersion,
CommitLocal,
UploadStep,
CommitRepository,
TagRevision,
CleanUp,
]
if __name__ == "__main__": # pragma: no cover
sys.exit(RollMerge().Run())
|
|
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from cinderclient.v2 import availability_zones
from cinderclient.v2 import qos_specs
from cinderclient.v2 import quotas
from cinderclient.v2 import services
from cinderclient.v2 import volume_backups as vol_backups
from cinderclient.v2 import volume_encryption_types as vol_enc_types
from cinderclient.v2 import volume_snapshots as vol_snaps
from cinderclient.v2 import volume_types
from cinderclient.v2 import volumes
from openstack_dashboard import api
from openstack_dashboard.usage import quotas as usage_quotas
from openstack_dashboard.test.test_data import utils
def data(TEST):
TEST.cinder_services = utils.TestDataContainer()
TEST.cinder_volumes = utils.TestDataContainer()
TEST.cinder_volume_backups = utils.TestDataContainer()
TEST.cinder_volume_encryption_types = utils.TestDataContainer()
TEST.cinder_volume_types = utils.TestDataContainer()
TEST.cinder_qos_specs = utils.TestDataContainer()
TEST.cinder_qos_spec_associations = utils.TestDataContainer()
TEST.cinder_volume_snapshots = utils.TestDataContainer()
TEST.cinder_quotas = utils.TestDataContainer()
TEST.cinder_quota_usages = utils.TestDataContainer()
TEST.cinder_availability_zones = utils.TestDataContainer()
# Services
service_1 = services.Service(services.ServiceManager(None), {
"service": "cinder-scheduler",
"status": "enabled",
"binary": "cinder-scheduler",
"zone": "internal",
"state": "up",
"updated_at": "2013-07-08T05:21:00.000000",
"host": "devstack001",
"disabled_reason": None
})
service_2 = services.Service(services.ServiceManager(None), {
"service": "cinder-volume",
"status": "enabled",
"binary": "cinder-volume",
"zone": "nova",
"state": "up",
"updated_at": "2013-07-08T05:20:51.000000",
"host": "devstack001",
"disabled_reason": None
})
TEST.cinder_services.add(service_1)
TEST.cinder_services.add(service_2)
# Volumes - Cinder v1
volume = volumes.Volume(
volumes.VolumeManager(None),
{'id': "11023e92-8008-4c8b-8059-7f2293ff3887",
'status': 'available',
'size': 40,
'display_name': 'Volume name',
'display_description': 'Volume description',
'created_at': '2014-01-27 10:30:00',
'volume_type': None,
'attachments': []})
nameless_volume = volumes.Volume(
volumes.VolumeManager(None),
{"id": "4b069dd0-6eaa-4272-8abc-5448a68f1cce",
"status": 'available',
"size": 10,
"display_name": '',
"display_description": '',
"device": "/dev/hda",
"created_at": '2010-11-21 18:34:25',
"volume_type": 'vol_type_1',
"attachments": []})
other_volume = volumes.Volume(
volumes.VolumeManager(None),
{'id': "21023e92-8008-1234-8059-7f2293ff3889",
'status': 'in-use',
'size': 10,
'display_name': u'my_volume',
'display_description': '',
'created_at': '2013-04-01 10:30:00',
'volume_type': None,
'attachments': [{"id": "1", "server_id": '1',
"device": "/dev/hda"}]})
volume_with_type = volumes.Volume(
volumes.VolumeManager(None),
{'id': "7dcb47fd-07d9-42c2-9647-be5eab799ebe",
'name': 'my_volume2',
'status': 'in-use',
'size': 10,
'display_name': u'my_volume2',
'display_description': '',
'created_at': '2013-04-01 10:30:00',
'volume_type': 'vol_type_2',
'attachments': [{"id": "2", "server_id": '2',
"device": "/dev/hdb"}]})
volume.bootable = 'true'
nameless_volume.bootable = 'true'
other_volume.bootable = 'true'
TEST.cinder_volumes.add(api.cinder.Volume(volume))
TEST.cinder_volumes.add(api.cinder.Volume(nameless_volume))
TEST.cinder_volumes.add(api.cinder.Volume(other_volume))
TEST.cinder_volumes.add(api.cinder.Volume(volume_with_type))
vol_type1 = volume_types.VolumeType(volume_types.VolumeTypeManager(None),
{'id': u'1',
'name': u'vol_type_1',
'extra_specs': {'foo': 'bar'}})
vol_type2 = volume_types.VolumeType(volume_types.VolumeTypeManager(None),
{'id': u'2',
'name': u'vol_type_2'})
TEST.cinder_volume_types.add(vol_type1, vol_type2)
# Volumes - Cinder v2
volume_v2 = volumes.Volume(
volumes.VolumeManager(None),
{'id': "31023e92-8008-4c8b-8059-7f2293ff1234",
'name': 'v2_volume',
'description': "v2 Volume Description",
'status': 'available',
'size': 20,
'created_at': '2014-01-27 10:30:00',
'volume_type': None,
'bootable': 'true',
'attachments': []})
volume_v2.bootable = 'true'
TEST.cinder_volumes.add(api.cinder.Volume(volume_v2))
snapshot = vol_snaps.Snapshot(
vol_snaps.SnapshotManager(None),
{'id': '5f3d1c33-7d00-4511-99df-a2def31f3b5d',
'display_name': 'test snapshot',
'display_description': 'volume snapshot',
'size': 40,
'status': 'available',
'volume_id': '11023e92-8008-4c8b-8059-7f2293ff3887'})
snapshot2 = vol_snaps.Snapshot(
vol_snaps.SnapshotManager(None),
{'id': 'c9d0881a-4c0b-4158-a212-ad27e11c2b0f',
'name': '',
'description': 'v2 volume snapshot description',
'size': 80,
'status': 'available',
'volume_id': '31023e92-8008-4c8b-8059-7f2293ff1234'})
snapshot.bootable = 'true'
snapshot2.bootable = 'true'
TEST.cinder_volume_snapshots.add(api.cinder.VolumeSnapshot(snapshot))
TEST.cinder_volume_snapshots.add(api.cinder.VolumeSnapshot(snapshot2))
TEST.cinder_volume_snapshots.first()._volume = volume
# Volume Type Encryption
vol_enc_type1 = vol_enc_types.VolumeEncryptionType(
vol_enc_types.VolumeEncryptionTypeManager(None),
{'volume_type_id': u'1',
'control_location': "front-end",
'key_size': 512,
'provider': "a-provider",
'cipher': "a-cipher"})
vol_enc_type2 = vol_enc_types.VolumeEncryptionType(
vol_enc_types.VolumeEncryptionTypeManager(None),
{'volume_type_id': u'2',
'control_location': "front-end",
'key_size': 256,
'provider': "a-provider",
'cipher': "a-cipher"})
vol_unenc_type1 = vol_enc_types.VolumeEncryptionType(
vol_enc_types.VolumeEncryptionTypeManager(None), {})
TEST.cinder_volume_encryption_types.add(vol_enc_type1, vol_enc_type2,
vol_unenc_type1)
volume_backup1 = vol_backups.VolumeBackup(
vol_backups.VolumeBackupManager(None),
{'id': 'a374cbb8-3f99-4c3f-a2ef-3edbec842e31',
'name': 'backup1',
'description': 'volume backup 1',
'size': 10,
'status': 'available',
'container_name': 'volumebackups',
'volume_id': '11023e92-8008-4c8b-8059-7f2293ff3887'})
volume_backup2 = vol_backups.VolumeBackup(
vol_backups.VolumeBackupManager(None),
{'id': 'c321cbb8-3f99-4c3f-a2ef-3edbec842e52',
'name': 'backup2',
'description': 'volume backup 2',
'size': 20,
'status': 'available',
'container_name': 'volumebackups',
'volume_id': '31023e92-8008-4c8b-8059-7f2293ff1234'})
TEST.cinder_volume_backups.add(volume_backup1)
TEST.cinder_volume_backups.add(volume_backup2)
# Quota Sets
quota_data = dict(volumes='1',
snapshots='1',
gigabytes='1000')
quota = quotas.QuotaSet(quotas.QuotaSetManager(None), quota_data)
TEST.cinder_quotas.add(api.base.QuotaSet(quota))
# Quota Usages
quota_usage_data = {'gigabytes': {'used': 0,
'quota': 1000},
'instances': {'used': 0,
'quota': 10},
'snapshots': {'used': 0,
'quota': 10}}
quota_usage = usage_quotas.QuotaUsage()
for k, v in quota_usage_data.items():
quota_usage.add_quota(api.base.Quota(k, v['quota']))
quota_usage.tally(k, v['used'])
TEST.cinder_quota_usages.add(quota_usage)
# Availability Zones
# Cinder returns the following structure from os-availability-zone
# {"availabilityZoneInfo":
# [{"zoneState": {"available": true}, "zoneName": "nova"}]}
# Note that the default zone is still "nova" even though this is cinder
TEST.cinder_availability_zones.add(
availability_zones.AvailabilityZone(
availability_zones.AvailabilityZoneManager(None),
{
'zoneName': 'nova',
'zoneState': {'available': True}
}
)
)
# Cinder Limits
limits = {"absolute": {"totalVolumesUsed": 1,
"totalGigabytesUsed": 5,
"maxTotalVolumeGigabytes": 1000,
"maxTotalVolumes": 10}}
TEST.cinder_limits = limits
# QOS Specs
qos_spec1 = qos_specs.QoSSpecs(
qos_specs.QoSSpecsManager(None),
{"id": "418db45d-6992-4674-b226-80aacad2073c",
"name": "high_iops",
"consumer": "back-end",
"specs": {"minIOPS": "1000", "maxIOPS": '100000'}})
qos_spec2 = qos_specs.QoSSpecs(
qos_specs.QoSSpecsManager(None),
{"id": "6ed7035f-992e-4075-8ed6-6eff19b3192d",
"name": "high_bws",
"consumer": "back-end",
"specs": {"maxBWS": '5000'}})
TEST.cinder_qos_specs.add(qos_spec1, qos_spec2)
vol_type1.associated_qos_spec = qos_spec1.name
TEST.cinder_qos_spec_associations.add(vol_type1)
|
|
# $Id: TestDomHelpers.py 1058 2009-01-26 10:39:19Z graham $
#
# Unit testing for WebBrick library functions (Functions.py)
# See http://pyunit.sourceforge.net/pyunit.html
#
import sys
import unittest
import logging
from os import *
from os.path import *
sys.path.append("../..")
from MiscLib.DomHelpers import *
class TestDomHelpers(unittest.TestCase):
def setUp(self):
self.testdoc = ( """<?xml version="1.0" encoding="iso-8859-1" ?>
<root attr="attrtext">
<child1>
some text
<child11 />
more text
<child12>child text</child12>
final text
</child1>
</root>""" )
self.testpath = "resources/"
self.testfile = self.testpath+"TestDomHelpers.xml"
self.savefile = self.testpath+"TestDomHelpersSave.xml"
return
def tearDown(self):
return
def doAssert(self, cond, msg):
assert cond , msg
# Actual tests follow
def testParseXmlString(self):
assert parseXmlString(self.testdoc), "Parse XML string failed"
def testParseXmlFile(self):
assert parseXmlFile(self.testfile), "Parse XML file failed"
def testSaveXmlToFile(self):
# ensure clean first
logging.debug( self.testfile )
logging.debug( self.savefile )
try:
remove( self.savefile )
except Exception:
pass
testDom = parseXmlFile(self.testfile)
saveXmlToFile(self.savefile, testDom, False)
assert exists(self.savefile), "save XML file failed"
# could expand test and run compare of data.
# remove( self.savefile )
def testSaveXmlToFileWithBackup(self):
# ensure clean first
try:
remove( self.savefile )
except Exception:
pass
testDom = parseXmlFile(self.testfile)
saveXmlToFile(self.savefile, testDom)
saveXmlToFile(self.savefile, testDom)
assert exists(self.savefile), "save XML file failed"
assert exists(self.savefile+".bak"), "save XML file failed"
remove( self.savefile+".bak" )
remove( self.savefile )
def testGetNamedElem1(self):
dom = parseXmlString(self.testdoc)
elm = getNamedElem(dom, "child1")
assert elm, "Node not found: child1"
def testGetNamedElem2(self):
dom = parseXmlString(self.testdoc)
elm = getNamedElem(dom, "child12")
assert elm, "Node not found: child12"
def testGetNamedElem3(self):
dom = parseXmlString(self.testdoc)
elm = getNamedElem(dom, "child99")
assert not elm, "Unexpected dode found: child99"
#TODO: deprecate me
def testGetNamedNode(self):
dom = parseXmlString(self.testdoc)
elm = getNamedNode(dom, "child1")
assert elm, "Node not found: child1"
def testElemText(self):
dom = parseXmlString(self.testdoc)
elm = getNamedElem(dom, "child1")
txt = getElemText(elm)
assert txt == "\n some text\n \n more text\n \n final text\n ", \
"Wrong element text: "+repr(txt)
def testAttrText(self):
dom = parseXmlString(self.testdoc)
elm = getNamedElem(dom, "root")
txt = getAttrText(elm,"attr")
assert txt == "attrtext", "Wrong attribute text: "+txt
def testNamedNodeAttrText(self):
dom = parseXmlString(self.testdoc)
txt = getNamedNodeAttrText(dom, "root", "attr")
self.assertEqual( txt, "attrtext" )
def testNodeListText(self):
dom = parseXmlFile(self.testfile)
elm = dom.getElementsByTagName("child1")[0]
txt = getNodeListText(elm.childNodes)
assert txt == "\n some text\n \n more text\n \n final text\n ", \
"Wrong element text: "+repr(txt)
def testGetNamedNodeText(self):
dom = parseXmlFile(self.testfile)
txt = getNamedNodeText(dom, "child1")
self.assertEqual( txt, "\n some text\n \n more text\n \n final text\n " )
def testgetElemXml(self):
dom = parseXmlFile(self.testfile)
txt = getElemXml( dom.getElementsByTagName("child1")[0] )
self.assertEqual( txt, "<child1>\n some text\n <child11/>\n more text\n <child12>child text</child12>\n final text\n </child1>" )
def testgetElemPrettyXml(self):
dom = parseXmlFile(self.testfile)
txt = getElemPrettyXml( dom.getElementsByTagName("child1")[0] )
self.assertEqual( txt, "<child1>\n \n some text\n \n <child11/>\n \n more text\n \n <child12>\n child text\n </child12>\n \n final text\n \n</child1>\n" )
def testGetNamedNodeXml(self):
dom = parseXmlFile(self.testfile)
txt = getNamedNodeXml( dom, "child1" )
self.assertEqual( txt, "<child1>\n some text\n <child11/>\n more text\n <child12>child text</child12>\n final text\n </child1>" )
# --- node type tests ---
def testIsAttribute(self):
dom = parseXmlString(self.testdoc)
elm = getNamedElem(dom, "root")
atr = elm.getAttributeNode("attr")
assert isAttribute(atr),"isAttribute test (root/@attr) failed"
def testIsAttributeElem(self):
dom = parseXmlString(self.testdoc)
elm = getNamedElem(dom, "root")
assert not isAttribute(elm),"isAttribute false test (root) failed"
def testIsAttributeText(self):
dom = parseXmlString(self.testdoc)
elm = getNamedElem(dom, "root")
txt = elm.childNodes[0]
assert not isAttribute(elm),"isAttribute false test (root/[0]) failed"
def testIsElement(self):
dom = parseXmlString(self.testdoc)
elm = getNamedElem(dom, "root")
assert isElement(elm),"isElement test (root/@attr) failed"
def testIsElementAttr(self):
dom = parseXmlString(self.testdoc)
elm = getNamedElem(dom, "root")
atr = elm.getAttributeNode("attr")
assert not isElement(atr),"isElement false test (root/@attr) failed"
def testIsElementText(self):
dom = parseXmlString(self.testdoc)
elm = getNamedElem(dom, "root")
txt = elm.childNodes[0]
assert not isElement(txt),"isElement false test (root/[0]) failed"
def testIsText(self):
dom = parseXmlString(self.testdoc)
elm = getNamedElem(dom, "root")
txt = elm.childNodes[0]
assert isText(txt),"isText test (root/[0]) failed"
def testIsTextAttr(self):
dom = parseXmlString(self.testdoc)
elm = getNamedElem(dom, "root")
atr = elm.getAttributeNode("attr")
assert not isText(atr),"isText false test (root/@attr) failed"
def testIsTextElem(self):
dom = parseXmlString(self.testdoc)
elm = getNamedElem(dom, "root")
assert not isText(elm),"isText false test (root) failed"
def testRemoveChildren(self):
dom = parseXmlString(self.testdoc)
elm = getNamedElem(dom, "root")
el1 = removeChildren(elm)
self.assertEqual(getElemXml(elm),"""<root attr="attrtext"/>""")
self.assertEqual(elm.childNodes, [])
def testReplaceChildren(self):
dom = parseXmlString(self.testdoc)
elm = getNamedElem(dom, "root")
c1 = getNamedElem(elm, "child11")
c2 = getNamedElem(elm, "child12")
replaceChildren(elm,[c1,c2])
self.assertEqual(getElemXml(elm),
"""<root attr="attrtext">"""+
"""<child11/>"""+
"""<child12>child text</child12>"""+
"""</root>""")
def testReplaceChildrenText(self):
dom = parseXmlString(self.testdoc)
elm = getNamedElem(dom, "root")
replaceChildrenText(elm,"replacement text")
self.assertEqual(getElemXml(elm),
"""<root attr="attrtext">"""+
"""replacement text"""+
"""</root>""")
def testEscapeText(self):
self.assertEqual(
escapeText("<tag>Jack & Jill</tag>"),
"<tag>Jack & Jill</tag>")
def testEscapeTextForHtml(self):
self.assertEqual(
escapeTextForHtml("<tag>\nJack & Jill\n</tag>\n"),
"<tag><br/>Jack & Jill<br/></tag><br/>")
def testPrintDictionary(self, dic):
for key in dic:
if isinstance( dic[key], list ):
logging.debug( "%s : list " % ( key ) )
for ntry in dic[key]:
# print list
if isinstance( ntry, dict ):
self.testPrintDictionary(ntry )
else:
logging.debug( "%s : %s " % ( key, ntry ) )
elif isinstance( dic[key], dict ):
logging.debug( "%s : dictionary " % ( key ) )
self.testPrintDictionary(dic[key] )
else:
logging.debug( "%s : %s " % ( key, dic[key] ) )
def testGetXmlFromDict(self):
testDict = { 'root': { '': 'string',
'child1': { '': 'string1', 'attr1':'attr1text' },
'child2': { '': 'string2', 'attr2':'attr2text',
'child21': { '': 'string21', 'attr21':'attr21text' },
},
}
}
testStr = ("""<?xml version="1.0" encoding="utf-8"?>"""+
"""<root>string<child1 attr1="attr1text">string1</child1>"""+
"""<child2 attr2="attr2text">string2"""+
"""<child21 attr21="attr21text">string21</child21>"""+
"""</child2></root>""")
dom = getXmlDomFromDict( testDict )
domTxt = getElemXml( dom )
logging.debug( repr(testDict) )
logging.debug( repr(dom) )
logging.debug( domTxt )
logging.debug( testStr )
testDict = getDictFromXml( dom )
logging.debug( repr(testDict) )
#add some checks
#order is not always the same as the dictionary has its own random order
self.assertEqual( domTxt, testStr )
def testGetXmlFromDict2(self):
testDict = { 'root': { 'simpleVal': 'string',
'listVal':[ 'string1', 'string2', 'string3' ],
'listVal2':[ { 'key11': 'string11', 'key12': 'string12', 'key13': 'string13'},
{ 'key21': 'string21', 'key22': 'string22', 'key23': 'string23'},
]
}
}
testStr = """<?xml version="1.0" encoding="utf-8"?><root><simpleVal>string</simpleVal><listVal>string1</listVal><listVal>string2</listVal><listVal>string3</listVal><listVal2><key11>string11</key11><key12>string12</key12><key13>string13'</key13><listVal2><listVal2><key21>string21</key21><key22>string22</key22><key23>string23'</key23><listVal2><root>"""
dom = getXmlDomFromDict( testDict )
domTxt = getElemXml( dom )
logging.debug( repr(testDict) )
logging.debug( repr(dom) )
logging.debug( domTxt )
logging.debug( testStr )
testDict = getDictFromXml( dom )
logging.debug( repr(testDict) )
#add some checks
#order is not always the same as the dictionary has its own random order
#self.assertEqual( domTxt, testStr )
def testGetDictFromSimpleXml(self):
self.testdoc = ( """<?xml version="1.0" encoding="iso-8859-1" ?>
<root attr="attrtext">
<child1>child1 text</child1>
<child2 attr2="attr2text">child2 text</child2>
</root>""" )
logging.debug( self.testdoc )
dom = parseXmlString(self.testdoc)
testDict = getDictFromXml( dom )
logging.debug( repr(testDict) )
#self.testPrintDictionary(testDict)
self.assert_( testDict.has_key( "root" ) )
rootDict = testDict["root"]
self.assert_( rootDict.has_key( "attr" ) )
self.assert_( isinstance(rootDict["attr"], basestring) )
self.assertEqual( rootDict["attr"], "attrtext" )
self.assert_( rootDict.has_key( "child1" ) )
self.assert_( isinstance(rootDict["child1"], dict) )
child1Dict = rootDict["child1"]
self.assert_( child1Dict.has_key( "" ) ) # text element
self.assertEqual( child1Dict[""], "child1 text" )
self.assert_( rootDict.has_key( "child2" ) )
self.assert_( isinstance(rootDict["child2"], dict) )
child2Dict = rootDict["child2"]
self.assert_( child2Dict.has_key( "" ) ) # text element
self.assertEqual( child2Dict[""], "child2 text" )
self.assert_( child2Dict.has_key( "attr2" ) ) # text element
self.assert_( isinstance(child2Dict["attr2"], basestring) )
self.assertEqual( child2Dict["attr2"], "attr2text" )
def testGetDictFromXml(self):
self.testdoc = ( """<?xml version="1.0" encoding="iso-8859-1" ?>
<root attr="attrtext">
<child1>
some text
<child11 />
more text
<child12>child text</child12>
final text
</child1>
<repeats>
<repeat>
repeat 1
</repeat>
<repeat>
repeat 2
</repeat>
</repeats>
<repeat>
repeat 1
</repeat>
<repeat>
repeat 2
</repeat>
</root>""" )
logging.debug( repr(self.testdoc) )
dom = parseXmlString(self.testdoc)
testDict = getDictFromXml( dom )
logging.debug( repr(testDict) )
self.assert_( testDict.has_key( "root" ) )
rootDict = testDict["root"]
self.assert_( rootDict.has_key( "child1" ) )
child1Dict = rootDict["child1"]
self.assertEqual( child1Dict[""],
"some text\n \n more text\n \n final text" )
self.assertEqual( rootDict["attr"],
"attrtext" )
self.assert_( child1Dict.has_key( "child11" ) )
self.assert_( child1Dict.has_key( "child12" ) )
self.assertEqual( child1Dict["child12"][''],
"child text" )
self.assert_( rootDict.has_key( "repeats" ) )
self.assert_( isinstance( rootDict["repeats"], list ) )
self.assert_( rootDict.has_key( "repeat" ) )
self.assert_( isinstance( rootDict["repeat"], list ) )
def testGetDictFromXml2(self):
self.testdoc = ( """<?xml version="1.0" encoding="iso-8859-1" ?>
<eventInterfaces>
<eventInterface module='TestDespatchTask' name='TestEventLogger'>
<!-- This saves all events -->
<eventtype type="type1">
<eventsource source="source1" >
<event>
<!-- interested in all events -->
</event>
</eventsource>
</eventtype>
</eventInterface>
<eventInterface module='TestDespatchTask2' name='TestEventLogger2'>
<!-- This saves all events -->
<eventtype type="type2">
<eventsource source="source2" >
<event>
<!-- interested in all events -->
</event>
</eventsource>
<eventsource source="source3" >
<event>
<!-- interested in all events -->
</event>
</eventsource>
</eventtype>
<eventtype type="type3">
<eventsource source="source4" >
<event>
<!-- interested in all events -->
<params>
<second type="list">5,20,35,50</second>
</params>
</event>
<event>
<!-- interested in all events -->
<params>
<second>5</second>
</params>
</event>
</eventsource>
</eventtype>
</eventInterface>
</eventInterfaces>""" )
logging.debug( repr(self.testdoc) )
dom = parseXmlString(self.testdoc)
testDict = getDictFromXml( dom )
logging.debug( repr(testDict) )
self.assert_( testDict.has_key( "eventInterfaces" ) )
self.testPrintDictionary( testDict )
# Code to run unit tests directly from command line.
# Constructing the suite manually allows control over the order of tests.
def getTestSuite():
suite = unittest.TestSuite()
suite.addTest(TestDomHelpers("testParseXmlString"))
suite.addTest(TestDomHelpers("testParseXmlFile"))
suite.addTest(TestDomHelpers("testSaveXmlToFile"))
suite.addTest(TestDomHelpers("testSaveXmlToFileWithBackup"))
suite.addTest(TestDomHelpers("testGetNamedElem1"))
suite.addTest(TestDomHelpers("testGetNamedElem2"))
suite.addTest(TestDomHelpers("testGetNamedElem3"))
suite.addTest(TestDomHelpers("testElemText"))
suite.addTest(TestDomHelpers("testAttrText"))
suite.addTest(TestDomHelpers("testNodeListText"))
suite.addTest(TestDomHelpers("testIsAttribute"))
suite.addTest(TestDomHelpers("testIsAttributeElem"))
suite.addTest(TestDomHelpers("testIsAttributeText"))
suite.addTest(TestDomHelpers("testIsElement"))
suite.addTest(TestDomHelpers("testIsElementAttr"))
suite.addTest(TestDomHelpers("testIsElementText"))
suite.addTest(TestDomHelpers("testIsText"))
suite.addTest(TestDomHelpers("testIsTextAttr"))
suite.addTest(TestDomHelpers("testIsTextElem"))
suite.addTest(TestDomHelpers("testGetNamedNodeXml"))
suite.addTest(TestDomHelpers("testgetElemXml"))
suite.addTest(TestDomHelpers("testgetElemPrettyXml"))
suite.addTest(TestDomHelpers("testGetNamedNodeText"))
suite.addTest(TestDomHelpers("testNamedNodeAttrText"))
suite.addTest(TestDomHelpers("testRemoveChildren"))
suite.addTest(TestDomHelpers("testReplaceChildren"))
suite.addTest(TestDomHelpers("testReplaceChildrenText"))
suite.addTest(TestDomHelpers("testEscapeText"))
suite.addTest(TestDomHelpers("testEscapeTextForHtml"))
return suite
if __name__ == "__main__":
# unittest.main()
if len(sys.argv) > 1:
logging.basicConfig(level=logging.DEBUG)
tests = TestDomHelpers( sys.argv[1] )
else:
tests = getTestSuite()
runner = unittest.TextTestRunner(verbosity=2)
runner.run(tests)
|
|
"""
API for the command-line I{pyflakes} tool.
"""
from __future__ import with_statement
import sys
import os
import re
import _ast
from pyflakes import checker, __version__
from pyflakes import reporter as modReporter
__all__ = ['check', 'checkPath', 'checkRecursive', 'iterSourceCode', 'main']
PYTHON_SHEBANG_REGEX = re.compile(br'^#!.*\bpython[23w]?\b\s*$')
def check(codeString, filename, reporter=None):
"""
Check the Python source given by C{codeString} for flakes.
@param codeString: The Python source to check.
@type codeString: C{str}
@param filename: The name of the file the source came from, used to report
errors.
@type filename: C{str}
@param reporter: A L{Reporter} instance, where errors and warnings will be
reported.
@return: The number of warnings emitted.
@rtype: C{int}
"""
if reporter is None:
reporter = modReporter._makeDefaultReporter()
# First, compile into an AST and handle syntax errors.
try:
tree = compile(codeString, filename, "exec", _ast.PyCF_ONLY_AST)
except SyntaxError:
value = sys.exc_info()[1]
msg = value.args[0]
(lineno, offset, text) = value.lineno, value.offset, value.text
if checker.PYPY:
if text is None:
lines = codeString.splitlines()
if len(lines) >= lineno:
text = lines[lineno - 1]
if sys.version_info >= (3, ) and isinstance(text, bytes):
try:
text = text.decode('ascii')
except UnicodeDecodeError:
text = None
offset -= 1
# If there's an encoding problem with the file, the text is None.
if text is None:
# Avoid using msg, since for the only known case, it contains a
# bogus message that claims the encoding the file declared was
# unknown.
reporter.unexpectedError(filename, 'problem decoding source')
else:
reporter.syntaxError(filename, msg, lineno, offset, text)
return 1
except Exception:
reporter.unexpectedError(filename, 'problem decoding source')
return 1
# Okay, it's syntactically valid. Now check it.
w = checker.Checker(tree, filename)
w.messages.sort(key=lambda m: m.lineno)
for warning in w.messages:
reporter.flake(warning)
return len(w.messages)
def checkPath(filename, reporter=None):
"""
Check the given path, printing out any warnings detected.
@param reporter: A L{Reporter} instance, where errors and warnings will be
reported.
@return: the number of warnings printed
"""
if reporter is None:
reporter = modReporter._makeDefaultReporter()
try:
# in Python 2.6, compile() will choke on \r\n line endings. In later
# versions of python it's smarter, and we want binary mode to give
# compile() the best opportunity to do the right thing WRT text
# encodings.
if sys.version_info < (2, 7):
mode = 'rU'
else:
mode = 'rb'
with open(filename, mode) as f:
codestr = f.read()
if sys.version_info < (2, 7):
codestr += '\n' # Work around for Python <= 2.6
except UnicodeError:
reporter.unexpectedError(filename, 'problem decoding source')
return 1
except IOError:
msg = sys.exc_info()[1]
reporter.unexpectedError(filename, msg.args[1])
return 1
return check(codestr, filename, reporter)
def isPythonFile(filename):
"""Return True if filename points to a Python file."""
if filename.endswith('.py'):
return True
max_bytes = 128
try:
with open(filename, 'rb') as f:
text = f.read(max_bytes)
if not text:
return False
except IOError:
return False
first_line = text.splitlines()[0]
return PYTHON_SHEBANG_REGEX.match(first_line)
def iterSourceCode(paths):
"""
Iterate over all Python source files in C{paths}.
@param paths: A list of paths. Directories will be recursed into and
any .py files found will be yielded. Any non-directories will be
yielded as-is.
"""
for path in paths:
if os.path.isdir(path):
for dirpath, dirnames, filenames in os.walk(path):
for filename in filenames:
full_path = os.path.join(dirpath, filename)
if isPythonFile(full_path):
yield full_path
else:
yield path
def checkRecursive(paths, reporter):
"""
Recursively check all source files in C{paths}.
@param paths: A list of paths to Python source files and directories
containing Python source files.
@param reporter: A L{Reporter} where all of the warnings and errors
will be reported to.
@return: The number of warnings found.
"""
warnings = 0
for sourcePath in iterSourceCode(paths):
warnings += checkPath(sourcePath, reporter)
return warnings
def _exitOnSignal(sigName, message):
"""Handles a signal with sys.exit.
Some of these signals (SIGPIPE, for example) don't exist or are invalid on
Windows. So, ignore errors that might arise.
"""
import signal
try:
sigNumber = getattr(signal, sigName)
except AttributeError:
# the signal constants defined in the signal module are defined by
# whether the C library supports them or not. So, SIGPIPE might not
# even be defined.
return
def handler(sig, f):
sys.exit(message)
try:
signal.signal(sigNumber, handler)
except ValueError:
# It's also possible the signal is defined, but then it's invalid. In
# this case, signal.signal raises ValueError.
pass
def main(prog=None, args=None):
"""Entry point for the script "pyflakes"."""
import optparse
# Handle "Keyboard Interrupt" and "Broken pipe" gracefully
_exitOnSignal('SIGINT', '... stopped')
_exitOnSignal('SIGPIPE', 1)
parser = optparse.OptionParser(prog=prog, version=__version__)
(__, args) = parser.parse_args(args=args)
reporter = modReporter._makeDefaultReporter()
if args:
warnings = checkRecursive(args, reporter)
else:
warnings = check(sys.stdin.read(), '<stdin>', reporter)
raise SystemExit(warnings > 0)
|
|
import functools
from sqlalchemy.ext.compiler import compiles
from sqlalchemy.schema import DDLElement, Column, \
ForeignKeyConstraint, CheckConstraint
from sqlalchemy import Integer
from sqlalchemy import types as sqltypes
from sqlalchemy.sql.visitors import traverse
from .. import util
if util.sqla_09:
from sqlalchemy.sql.elements import quoted_name
class AlterTable(DDLElement):
"""Represent an ALTER TABLE statement.
Only the string name and optional schema name of the table
is required, not a full Table object.
"""
def __init__(self, table_name, schema=None):
self.table_name = table_name
self.schema = schema
class RenameTable(AlterTable):
def __init__(self, old_table_name, new_table_name, schema=None):
super(RenameTable, self).__init__(old_table_name, schema=schema)
self.new_table_name = new_table_name
class AlterColumn(AlterTable):
def __init__(self, name, column_name, schema=None,
existing_type=None,
existing_nullable=None,
existing_server_default=None):
super(AlterColumn, self).__init__(name, schema=schema)
self.column_name = column_name
self.existing_type = sqltypes.to_instance(existing_type) \
if existing_type is not None else None
self.existing_nullable = existing_nullable
self.existing_server_default = existing_server_default
class ColumnNullable(AlterColumn):
def __init__(self, name, column_name, nullable, **kw):
super(ColumnNullable, self).__init__(name, column_name,
**kw)
self.nullable = nullable
class ColumnType(AlterColumn):
def __init__(self, name, column_name, type_, **kw):
super(ColumnType, self).__init__(name, column_name,
**kw)
self.type_ = sqltypes.to_instance(type_)
class ColumnName(AlterColumn):
def __init__(self, name, column_name, newname, **kw):
super(ColumnName, self).__init__(name, column_name, **kw)
self.newname = newname
class ColumnDefault(AlterColumn):
def __init__(self, name, column_name, default, **kw):
super(ColumnDefault, self).__init__(name, column_name, **kw)
self.default = default
class AddColumn(AlterTable):
def __init__(self, name, column, schema=None):
super(AddColumn, self).__init__(name, schema=schema)
self.column = column
class DropColumn(AlterTable):
def __init__(self, name, column, schema=None):
super(DropColumn, self).__init__(name, schema=schema)
self.column = column
@compiles(RenameTable)
def visit_rename_table(element, compiler, **kw):
return "%s RENAME TO %s" % (
alter_table(compiler, element.table_name, element.schema),
format_table_name(compiler, element.new_table_name, element.schema)
)
@compiles(AddColumn)
def visit_add_column(element, compiler, **kw):
return "%s %s" % (
alter_table(compiler, element.table_name, element.schema),
add_column(compiler, element.column, **kw)
)
@compiles(DropColumn)
def visit_drop_column(element, compiler, **kw):
return "%s %s" % (
alter_table(compiler, element.table_name, element.schema),
drop_column(compiler, element.column.name, **kw)
)
@compiles(ColumnNullable)
def visit_column_nullable(element, compiler, **kw):
return "%s %s %s" % (
alter_table(compiler, element.table_name, element.schema),
alter_column(compiler, element.column_name),
"DROP NOT NULL" if element.nullable else "SET NOT NULL"
)
@compiles(ColumnType)
def visit_column_type(element, compiler, **kw):
return "%s %s %s" % (
alter_table(compiler, element.table_name, element.schema),
alter_column(compiler, element.column_name),
"TYPE %s" % format_type(compiler, element.type_)
)
@compiles(ColumnName)
def visit_column_name(element, compiler, **kw):
return "%s RENAME %s TO %s" % (
alter_table(compiler, element.table_name, element.schema),
format_column_name(compiler, element.column_name),
format_column_name(compiler, element.newname)
)
@compiles(ColumnDefault)
def visit_column_default(element, compiler, **kw):
return "%s %s %s" % (
alter_table(compiler, element.table_name, element.schema),
alter_column(compiler, element.column_name),
"SET DEFAULT %s" %
format_server_default(compiler, element.default)
if element.default is not None
else "DROP DEFAULT"
)
def _table_for_constraint(constraint):
if isinstance(constraint, ForeignKeyConstraint):
return constraint.parent
else:
return constraint.table
def _columns_for_constraint(constraint):
if isinstance(constraint, ForeignKeyConstraint):
return [fk.parent for fk in constraint.elements]
elif isinstance(constraint, CheckConstraint):
return _find_columns(constraint.sqltext)
else:
return list(constraint.columns)
def _fk_spec(constraint):
if util.sqla_100:
source_columns = [
constraint.columns[key].name for key in constraint.column_keys]
else:
source_columns = [
element.parent.name for element in constraint.elements]
source_table = constraint.parent.name
source_schema = constraint.parent.schema
target_schema = constraint.elements[0].column.table.schema
target_table = constraint.elements[0].column.table.name
target_columns = [element.column.name for element in constraint.elements]
return (
source_schema, source_table,
source_columns, target_schema, target_table, target_columns)
def _is_type_bound(constraint):
# this deals with SQLAlchemy #3260, don't copy CHECK constraints
# that will be generated by the type.
if util.sqla_100:
# new feature added for #3260
return constraint._type_bound
else:
# old way, look at what we know Boolean/Enum to use
return (
constraint._create_rule is not None and
isinstance(
getattr(constraint._create_rule, "target", None),
sqltypes.SchemaType)
)
def _find_columns(clause):
"""locate Column objects within the given expression."""
cols = set()
traverse(clause, {}, {'column': cols.add})
return cols
def quote_dotted(name, quote):
"""quote the elements of a dotted name"""
if util.sqla_09 and isinstance(name, quoted_name):
return quote(name)
result = '.'.join([quote(x) for x in name.split('.')])
return result
def format_table_name(compiler, name, schema):
quote = functools.partial(compiler.preparer.quote, force=None)
if schema:
return quote_dotted(schema, quote) + "." + quote(name)
else:
return quote(name)
def format_column_name(compiler, name):
return compiler.preparer.quote(name, None)
def format_server_default(compiler, default):
return compiler.get_column_default_string(
Column("x", Integer, server_default=default)
)
def format_type(compiler, type_):
return compiler.dialect.type_compiler.process(type_)
def alter_table(compiler, name, schema):
return "ALTER TABLE %s" % format_table_name(compiler, name, schema)
def drop_column(compiler, name):
return 'DROP COLUMN %s' % format_column_name(compiler, name)
def alter_column(compiler, name):
return 'ALTER COLUMN %s' % format_column_name(compiler, name)
def add_column(compiler, column, **kw):
return "ADD COLUMN %s" % compiler.get_column_specification(column, **kw)
|
|
import asyncio
import discord
import os
import psutil
import platform
import time
import sys
import fnmatch
import subprocess
import pyspeedtest
from PIL import Image
from discord.ext import commands
from Cogs import Settings
from Cogs import DisplayName
from Cogs import ReadableTime
from Cogs import GetImage
from Cogs import Nullify
from Cogs import ProgressBar
# This is the Bot module - it contains things like nickname, status, etc
class Bot:
# Init with the bot reference, and a reference to the settings var
def __init__(self, bot, settings, path, pypath):
self.bot = bot
self.settings = settings
self.startTime = int(time.time())
self.path = path
self.pypath = pypath
async def onready(self):
# Get ready - play game!
game = None
try:
game = self.settings.serverDict['Game']
except KeyError:
pass
if game:
await self.bot.change_presence(game=discord.Game(name=game))
else:
await self.bot.change_presence(game=None)
async def onserverjoin(self, server):
try:
serverList = self.settings.serverDict['BlockedServers']
except KeyError:
self.settings.serverDict['BlockedServers'] = []
serverList = self.settings.serverDict['BlockedServers']
for serv in serverList:
serverName = str(serv).lower()
try:
serverID = int(serv)
except Exception:
serverID = None
if serverName == server.name.lower() or serverID == server.id:
# Found it
await server.leave()
return True
# Check for owner name and id quick
# Name *MUST* be case-sensitive and have the discriminator for safety
namecheck = server.owner.name + "#" + str(server.owner.discriminator)
if serv == namecheck or serverID == server.owner.id:
# Got the owner
await server.leave()
return True
return False
@commands.command(pass_context=True)
async def ping(self, ctx):
"""Feeling lonely?"""
before_typing = time.monotonic()
await ctx.trigger_typing()
after_typing = time.monotonic()
ms = int((after_typing - before_typing) * 1000)
msg = '*{}*, ***PONG!*** (~{}ms)'.format(ctx.message.author.mention, ms)
await ctx.channel.send(msg)
@commands.command(pass_context=True)
async def nickname(self, ctx, *, name : str = None):
"""Set the bot's nickname (admin-only)."""
isAdmin = ctx.message.author.permissions_in(ctx.message.channel).administrator
# Only allow admins to change server stats
if not isAdmin:
await ctx.channel.send('You do not have sufficient privileges to access this command.')
return
# Let's get the bot's member in the current server
botName = "{}#{}".format(self.bot.user.name, self.bot.user.discriminator)
botMember = ctx.message.guild.get_member_named(botName)
await botMember.edit(nick=name)
@commands.command(pass_context=True)
async def hostinfo(self, ctx):
"""List info about the bot's host environment."""
message = await ctx.channel.send('Gathering info...')
# cpuCores = psutil.cpu_count(logical=False)
# cpuThred = psutil.cpu_count()
cpuThred = os.cpu_count()
cpuUsage = psutil.cpu_percent(interval=1)
memStats = psutil.virtual_memory()
memPerc = memStats.percent
memUsed = memStats.used
memTotal = memStats.total
memUsedGB = "{0:.1f}".format(((memUsed / 1024) / 1024) / 1024)
memTotalGB = "{0:.1f}".format(((memTotal/1024)/1024)/1024)
currentOS = platform.platform()
system = platform.system()
release = platform.release()
version = platform.version()
processor = platform.processor()
botMember = DisplayName.memberForID(self.bot.user.id, ctx.message.guild)
botName = DisplayName.name(botMember)
currentTime = int(time.time())
timeString = ReadableTime.getReadableTimeBetween(self.startTime, currentTime)
pythonMajor = sys.version_info.major
pythonMinor = sys.version_info.minor
pythonMicro = sys.version_info.micro
pythonRelease = sys.version_info.releaselevel
process = subprocess.Popen(['git', 'rev-parse', '--short', 'HEAD'], shell=False, stdout=subprocess.PIPE)
git_head_hash = process.communicate()[0].strip()
threadString = 'thread'
if not cpuThred == 1:
threadString += 's'
msg = '***{}\'s*** **Home:**\n'.format(botName)
msg += '```\n'
msg += 'OS : {}\n'.format(currentOS)
msg += 'Hostname : {}\n'.format(platform.node())
msg += 'Language : Python {}.{}.{} {}\n'.format(pythonMajor, pythonMinor, pythonMicro, pythonRelease)
msg += 'Commit : {}\n\n'.format(git_head_hash.decode("utf-8"))
msg += ProgressBar.center('{}% of {} {}'.format(cpuUsage, cpuThred, threadString), 'CPU') + '\n'
msg += ProgressBar.makeBar(int(round(cpuUsage))) + "\n\n"
#msg += '{}% of {} {}\n\n'.format(cpuUsage, cpuThred, threadString)
#msg += '{}% of {} ({} {})\n\n'.format(cpuUsage, processor, cpuThred, threadString)
msg += ProgressBar.center('{} ({}%) of {}GB used'.format(memUsedGB, memPerc, memTotalGB), 'RAM') + '\n'
msg += ProgressBar.makeBar(int(round(memPerc))) + "\n\n"
#msg += '{} ({}%) of {}GB used\n\n'.format(memUsedGB, memPerc, memTotalGB)
msg += '{} uptime```'.format(timeString)
await message.edit(content=msg)
@commands.command(pass_context=True)
async def speedtest(self, ctx):
"""Run a network speed test (owner only)."""
channel = ctx.message.channel
author = ctx.message.author
server = ctx.message.guild
# Only allow owner
isOwner = self.settings.isOwner(ctx.author)
if isOwner == None:
msg = 'I have not been claimed, *yet*.'
await ctx.channel.send(msg)
return
elif isOwner == False:
msg = 'You are not the *true* owner of me. Only the rightful owner can use this command.'
await ctx.channel.send(msg)
return
message = await channel.send('Running speed test...')
st = pyspeedtest.SpeedTest()
msg = '**Speed Test Results:**\n'
msg += '```\n'
msg += ' Ping: {}\n'.format(round(st.ping(), 2))
msg += 'Download: {}MB/s\n'.format(round(st.download()/1024/1024, 2))
msg += ' Upload: {}MB/s```'.format(round(st.upload()/1024/1024, 2))
await message.edit(content=msg)
@commands.command(pass_context=True)
async def adminunlim(self, ctx, *, unlimited : str = None):
"""Sets whether or not to allow unlimited xp to admins (owner only)."""
channel = ctx.message.channel
author = ctx.message.author
server = ctx.message.guild
# Check for admin status
isAdmin = ctx.author.permissions_in(ctx.channel).administrator
if not isAdmin:
await ctx.send("You do not have permission to use this command.")
return
# Get current status
adminUnlimited = self.settings.getServerStat(ctx.guild, "AdminUnlimited")
if unlimited == None:
# Output unlimited status
if adminUnlimited.lower() == "yes":
await channel.send('Admin unlimited is enabled.')
else:
await channel.send('Admin unlimited is disabled.')
return
elif unlimited.lower() == "yes" or unlimited.lower() == "on" or unlimited.lower() == "true":
unlimited = "Yes"
elif unlimited.lower() == "no" or unlimited.lower() == "off" or unlimited.lower() == "false":
unlimited = "No"
else:
unlimited = "No"
if unlimited == "Yes":
if adminUnlimited.lower() == "yes":
msg = 'Admin unlimited remains enabled.'
else:
msg = 'Admin unlimited now enabled.'
else:
if adminUnlimited.lower() == "no":
msg = 'Admin unlimited remains disabled.'
else:
msg = 'Admin unlimited now disabled.'
self.settings.setServerStat(ctx.guild, "AdminUnlimited", unlimited)
await channel.send(msg)
@commands.command(pass_context=True)
async def basadmin(self, ctx, *, asadmin : str = None):
"""Sets whether or not to treat bot-admins as admins with regards to xp (admin only)."""
channel = ctx.message.channel
author = ctx.message.author
server = ctx.message.guild
# Check for admin status
isAdmin = ctx.author.permissions_in(ctx.channel).administrator
if not isAdmin:
await ctx.send("You do not have permission to use this command.")
return
# Get current status
botAdminAsAdmin = self.settings.getServerStat(ctx.guild, "BotAdminAsAdmin")
if asadmin == None:
# Output unlimited status
if botAdminAsAdmin.lower() == "yes":
await channel.send('Bot-admin as admin is enabled.')
else:
await channel.send('Bot-admin as admin is disabled.')
return
elif asadmin.lower() == "yes" or asadmin.lower() == "on" or asadmin.lower() == "true":
asadmin = "Yes"
elif asadmin.lower() == "no" or asadmin.lower() == "off" or asadmin.lower() == "false":
asadmin = "No"
else:
asadmin = "No"
if asadmin == "Yes":
if botAdminAsAdmin.lower() == "yes":
msg = 'Bot-admin as admin remains enabled.'
else:
msg = 'Bot-admin as admin now enabled.'
else:
if botAdminAsAdmin.lower() == "no":
msg = 'Bot-admin as admin remains disabled.'
else:
msg = 'Bot-admin as admin now disabled.'
self.settings.setServerStat(ctx.guild, "BotAdminAsAdmin", asadmin)
await channel.send(msg)
@commands.command(pass_context=True)
async def joinpm(self, ctx, *, join_pm : str = None):
"""Sets whether or not to pm the rules to new users when they join (bot-admin only)."""
channel = ctx.message.channel
author = ctx.message.author
server = ctx.message.guild
# Check for admin status
isAdmin = ctx.author.permissions_in(ctx.channel).administrator
if not isAdmin:
checkAdmin = self.settings.getServerStat(ctx.guild, "AdminArray")
for role in ctx.author.roles:
for aRole in checkAdmin:
# Get the role that corresponds to the id
if str(aRole['ID']) == str(role.id):
isAdmin = True
if not isAdmin:
await ctx.send("You do not have permission to use this command.")
return
# Get current status
join_pm_setting = self.settings.getServerStat(ctx.guild, "JoinPM")
if join_pm == None:
# Output unlimited status
if join_pm_setting.lower() == "yes":
await channel.send('New user pm is enabled.')
else:
await channel.send('New user pm is disabled.')
return
elif join_pm.lower() == "yes" or join_pm.lower() == "on" or join_pm.lower() == "true":
join_pm = "Yes"
elif join_pm.lower() == "no" or join_pm.lower() == "off" or join_pm.lower() == "false":
join_pm = "No"
else:
join_pm = "No"
if join_pm == "Yes":
if join_pm_setting.lower() == "yes":
msg = 'New user pm remains enabled.'
else:
msg = 'New user pm now enabled.'
else:
if join_pm_setting.lower() == "no":
msg = 'New user pm remains disabled.'
else:
msg = 'New user pm now disabled.'
self.settings.setServerStat(ctx.guild, "JoinPM", join_pm)
await channel.send(msg)
@commands.command(pass_context=True)
async def avatar(self, ctx, filename : str = None, sizeLimit : int = 8000000):
"""Sets the bot's avatar (owner only)."""
channel = ctx.message.channel
author = ctx.message.author
server = ctx.message.guild
# Only allow owner
isOwner = self.settings.isOwner(ctx.author)
if isOwner == None:
msg = 'I have not been claimed, *yet*.'
await ctx.channel.send(msg)
return
elif isOwner == False:
msg = 'You are not the *true* owner of me. Only the rightful owner can use this command.'
await ctx.channel.send(msg)
return
if filename is None:
await self.bot.user.edit(avatar=None)
await ctx.channel.send('Avatar removed!')
# await self.bot.edit_profile(avatar=None)
return
# Check if we created a temp folder for this image
isTemp = False
status = await channel.send('Checking if url (and downloading if valid)...')
# File name is *something* - let's first check it as a url, then a file
extList = ["jpg", "jpeg", "png", "gif", "tiff", "tif"]
if GetImage.get_ext(filename) in extList:
# URL has an image extension
file = GetImage.download(filename)
if file:
# we got a download - let's reset and continue
filename = file
isTemp = True
if not os.path.isfile(filename):
if not os.path.isfile('./{}'.format(filename)):
await status.edit(content='*{}* doesn\'t exist absolutely, or in my working directory.'.format(filename))
# File doesn't exist
return
else:
# Local file name
filename = './{}'.format(filename)
# File exists - check if image
img = Image.open(filename)
ext = img.format
if not ext:
# File isn't a valid image
await status.edit(content='*{}* isn\'t a valid image format.'.format(filename))
return
wasConverted = False
# Is an image PIL understands
if not ext.lower == "png":
# Not a PNG - let's convert
await status.edit(content='Converting to png...')
filename = '{}.png'.format(filename)
img.save(filename)
wasConverted = True
# We got it - crop and go from there
w, h = img.size
dw = dh = 0
if w > h:
# Wide
dw = int((w-h)/2)
elif h > w:
# Tall
dh = int((h-w)/2)
# Run the crop
await status.edit(content='Cropping (if needed)...')
img.crop((dw, dh, w-dw, h-dh)).save(filename)
# Should be a square png here - let's check size
# Let's make sure it's less than the passed limit
imageSize = os.stat(filename)
await status.edit(content='Resizing (if needed)...')
while int(imageSize.st_size) > sizeLimit:
# Image is too big - resize
myimage = Image.open(filename)
xsize, ysize = myimage.size
ratio = sizeLimit/int(imageSize.st_size)
xsize *= ratio
ysize *= ratio
myimage = myimage.resize((int(xsize), int(ysize)), Image.ANTIALIAS)
myimage.save(filename)
imageSize = os.stat(filename)
# Image is resized - let's save it
img = Image.open(filename)
ext = img.format
img.close()
await status.edit(content='Uploading and applying avatar...')
with open(filename, 'rb') as f:
newAvatar = f.read()
await self.bot.user.edit(avatar=newAvatar)
# await self.bot.edit_profile(avatar=newAvatar)
# Cleanup - try removing with shutil.rmtree, then with os.remove()
await status.edit(content='Cleaning up...')
if isTemp:
GetImage.remove(filename)
else:
if wasConverted:
os.remove(filename)
await status.edit(content='Avatar set!')
@commands.command(pass_context=True)
async def reboot(self, ctx, force = None):
"""Reboots the bot (owner only)."""
channel = ctx.message.channel
author = ctx.message.author
server = ctx.message.guild
# Only allow owner
isOwner = self.settings.isOwner(ctx.author)
if isOwner == None:
msg = 'I have not been claimed, *yet*.'
await ctx.channel.send(msg)
return
elif isOwner == False:
msg = 'You are not the *true* owner of me. Only the rightful owner can use this command.'
await ctx.channel.send(msg)
return
self.settings.flushSettings()
quiet = False
if force and force.lower() == 'force':
quiet = True
if not quiet:
msg = 'Flushed settings to disk.\nRebooting...'
await ctx.channel.send(msg)
# Logout, stop the event loop, close the loop, quit
for task in asyncio.Task.all_tasks():
try:
task.cancel()
except Exception:
continue
try:
await self.bot.logout()
self.bot.loop.stop()
self.bot.loop.close()
except Exception:
pass
try:
# Try to reboot
subprocess.Popen([self.pypath, self.path, "-reboot", "True", "-channel", str(ctx.channel.id), "-path", self.pypath])
# Kill this process
await exit(0)
except Exception:
pass
@commands.command(pass_context=True)
async def shutdown(self, ctx, force = None):
"""Shuts down the bot (owner only)."""
channel = ctx.message.channel
author = ctx.message.author
server = ctx.message.guild
# Only allow owner
isOwner = self.settings.isOwner(ctx.author)
if isOwner == None:
msg = 'I have not been claimed, *yet*.'
await ctx.channel.send(msg)
return
elif isOwner == False:
msg = 'You are not the *true* owner of me. Only the rightful owner can use this command.'
await ctx.channel.send(msg)
return
self.settings.flushSettings()
quiet = False
if force and force.lower() == 'force':
quiet = True
if not quiet:
msg = 'Flushed settings to disk.\nShutting down...'
await ctx.channel.send(msg)
# Logout, stop the event loop, close the loop, quit
for task in asyncio.Task.all_tasks():
try:
task.cancel()
except Exception:
continue
try:
await self.bot.logout()
self.bot.loop.stop()
self.bot.loop.close()
except Exception:
pass
try:
# Kill this process
await exit(0)
except Exception:
pass
@commands.command(pass_context=True)
async def servers(self, ctx):
"""Lists the number of servers I'm connected to!"""
channel = ctx.message.channel
author = ctx.message.author
server = ctx.message.guild
total = 0
for server in self.bot.guilds:
total += 1
if total == 1:
msg = 'I am a part of *1* server!'
else:
msg = 'I am a part of *{}* servers!'.format(total)
await channel.send(msg)
@commands.command(pass_context=True)
async def playgame(self, ctx, *, game : str = None):
"""Sets the playing status of the bot (owner-only)."""
channel = ctx.message.channel
author = ctx.message.author
server = ctx.message.guild
# Check if we're suppressing @here and @everyone mentions
if self.settings.getServerStat(ctx.message.guild, "SuppressMentions").lower() == "yes":
suppress = True
else:
suppress = False
# Only allow owner
isOwner = self.settings.isOwner(ctx.author)
if isOwner == None:
msg = 'I have not been claimed, *yet*.'
await ctx.channel.send(msg)
return
elif isOwner == False:
msg = 'You are not the *true* owner of me. Only the rightful owner can use this command.'
await ctx.channel.send(msg)
return
if game == None:
self.settings.serverDict['Game'] = None
msg = 'Removing my playing status...'
status = await channel.send(msg)
await self.bot.change_presence(game=None)
await status.edit(content='Playing status removed!')
self.settings.flushSettings()
return
self.settings.serverDict['Game'] = game
msg = 'Setting my playing status to *{}*...'.format(game)
# Check for suppress
if suppress:
msg = Nullify.clean(msg)
status = await channel.send(msg)
await self.bot.change_presence(game=discord.Game(name=game))
# Check for suppress
if suppress:
game = Nullify.clean(game)
await status.edit(content='Playing status set to *{}!*'.format(game))
self.settings.flushSettings()
@commands.command(pass_context=True)
async def setbotparts(self, ctx, *, parts : str = None):
"""Set the bot's parts - can be a url, formatted text, or nothing to clear."""
# Check if we're suppressing @here and @everyone mentions
if self.settings.getServerStat(ctx.message.guild, "SuppressMentions").lower() == "yes":
suppress = True
else:
suppress = False
# Only allow owner
isOwner = self.settings.isOwner(ctx.author)
if isOwner == None:
msg = 'I have not been claimed, *yet*.'
await ctx.channel.send(msg)
return
elif isOwner == False:
msg = 'You are not the *true* owner of me. Only the rightful owner can use this command.'
await ctx.channel.send(msg)
return
channel = ctx.message.channel
author = ctx.message.author
server = ctx.message.guild
if not parts:
parts = ""
self.settings.setGlobalUserStat(self.bot.user, "Parts", parts)
msg = '*{}\'s* parts have been set to:\n{}'.format(DisplayName.serverNick(self.bot.user, server), parts)
# Check for suppress
if suppress:
msg = Nullify.clean(msg)
await channel.send(msg)
@commands.command(pass_context=True)
async def source(self, ctx):
"""Link the github source."""
source = "https://github.com/corpnewt/CorpBot.py"
msg = '**My insides are located at:**\n\n{}'.format(source)
await ctx.channel.send(msg)
@commands.command(pass_context=True)
async def block(self, ctx, *, server : str = None):
"""Blocks the bot from joining a server - takes either a name or an id (owner-only).
Can also take the id or case-sensitive name + descriminator of the owner (eg. Bob#1234)."""
# Check if we're suppressing @here and @everyone mentions
if self.settings.getServerStat(ctx.message.guild, "SuppressMentions").lower() == "yes":
suppress = True
else:
suppress = False
# Only allow owner
isOwner = self.settings.isOwner(ctx.author)
if isOwner == None:
msg = 'I have not been claimed, *yet*.'
await ctx.channel.send(msg)
return
elif isOwner == False:
msg = 'You are not the *true* owner of me. Only the rightful owner can use this command.'
await ctx.channel.send(msg)
return
if server == None:
# No server provided
await ctx.send("Usage: `{}block [server name/id or owner name#desc/id]`".format(ctx.prefix))
return
try:
serverList = self.settings.serverDict['BlockedServers']
except KeyError:
self.settings.serverDict['BlockedServers'] = []
serverList = self.settings.serverDict['BlockedServers']
for serv in serverList:
if str(serv).lower() == server.lower():
# Found a match - already blocked.
msg = "*{}* is already blocked!".format(serv)
if suppress:
msg = Nullify.clean(msg)
await ctx.channel.send(msg)
return
# Not blocked
self.settings.serverDict['BlockedServers'].append(server)
msg = "*{}* now blocked!".format(server)
# Check for suppress
if suppress:
msg = Nullify.clean(msg)
await ctx.channel.send(msg)
@commands.command(pass_context=True)
async def unblock(self, ctx, *, server : str = None):
"""Unblocks a server or owner (owner-only)."""
# Check if we're suppressing @here and @everyone mentions
if self.settings.getServerStat(ctx.message.guild, "SuppressMentions").lower() == "yes":
suppress = True
else:
suppress = False
# Only allow owner
isOwner = self.settings.isOwner(ctx.author)
if isOwner == None:
msg = 'I have not been claimed, *yet*.'
await ctx.channel.send(msg)
return
elif isOwner == False:
msg = 'You are not the *true* owner of me. Only the rightful owner can use this command.'
await ctx.channel.send(msg)
return
if server == None:
# No server provided
await ctx.send("Usage: `{}unblock [server name/id or owner name#desc/id]`".format(ctx.prefix))
return
try:
serverList = self.settings.serverDict['BlockedServers']
except KeyError:
self.settings.serverDict['BlockedServers'] = []
serverList = self.settings.serverDict['BlockedServers']
for serv in serverList:
if str(serv).lower() == server.lower():
# Found a match - already blocked.
self.settings.serverDict['BlockedServers'].remove(serv)
msg = "*{}* unblocked!".format(serv)
if suppress:
msg = Nullify.clean(msg)
await ctx.channel.send(msg)
return
# Not found
msg = "I couldn't find *{}* in my blocked list.".format(server)
# Check for suppress
if suppress:
msg = Nullify.clean(msg)
await ctx.channel.send(msg)
@commands.command(pass_context=True)
async def unblockall(self, ctx):
"""Unblocks all blocked servers and owners (owner-only)."""
# Check if we're suppressing @here and @everyone mentions
if self.settings.getServerStat(ctx.message.guild, "SuppressMentions").lower() == "yes":
suppress = True
else:
suppress = False
# Only allow owner
isOwner = self.settings.isOwner(ctx.author)
if isOwner == None:
msg = 'I have not been claimed, *yet*.'
await ctx.channel.send(msg)
return
elif isOwner == False:
msg = 'You are not the *true* owner of me. Only the rightful owner can use this command.'
await ctx.channel.send(msg)
return
self.settings.serverDict['BlockedServers'] = []
await ctx.channel.send("*All* servers and owners unblocked!")
@commands.command(pass_context=True)
async def blocked(self, ctx):
"""Lists all blocked servers and owners (owner-only)."""
# Check if we're suppressing @here and @everyone mentions
if self.settings.getServerStat(ctx.message.guild, "SuppressMentions").lower() == "yes":
suppress = True
else:
suppress = False
# Only allow owner
isOwner = self.settings.isOwner(ctx.author)
if isOwner == None:
msg = 'I have not been claimed, *yet*.'
await ctx.channel.send(msg)
return
elif isOwner == False:
msg = 'You are not the *true* owner of me. Only the rightful owner can use this command.'
await ctx.channel.send(msg)
return
try:
serverList = self.settings.serverDict['BlockedServers']
except KeyError:
self.settings.serverDict['BlockedServers'] = []
serverList = self.settings.serverDict['BlockedServers']
if not len(serverList):
msg = "There are no blocked servers or owners!"
else:
msg = "__Currently Blocked:__\n\n{}".format(', '.join(serverList))
# Check for suppress
if suppress:
msg = Nullify.clean(msg)
await ctx.channel.send(msg)
@commands.command(pass_context=True)
async def cloc(self, ctx):
"""Outputs the total count of lines of code in the currently installed repo."""
# Script pulled and edited from https://github.com/kyco/python-count-lines-of-code/blob/python3/cloc.py
# Get our current working directory - should be the bot's home
path = os.getcwd()
# Set up some lists
extensions = []
code_count = []
include = ['py','bat','sh']
# Get the extensions - include our include list
extensions = self.get_extensions(path, include)
for run in extensions:
extension = "*."+run
temp = 0
for root, dir, files in os.walk(path):
for items in fnmatch.filter(files, extension):
value = root + "/" + items
temp += sum(+1 for line in open(value, 'rb'))
code_count.append(temp)
pass
# Set up our output
msg = 'Some poor soul took the time to sloppily write the following to bring me life:\n```\n'
padTo = 0
for idx, val in enumerate(code_count):
# Find out which has the longest
tempLen = len(str('{:,}'.format(code_count[idx])))
if tempLen > padTo:
padTo = tempLen
for idx, val in enumerate(code_count):
lineWord = 'lines'
if code_count[idx] == 1:
lineWord = 'line'
# Setup a right-justified string padded with spaces
numString = str('{:,}'.format(code_count[idx])).rjust(padTo, ' ')
msg += numString + " " + lineWord + " of " + extensions[idx] + "\n"
# msg += extensions[idx] + ": " + str(code_count[idx]) + ' ' + lineWord + '\n'
# print(extensions[idx] + ": " + str(code_count[idx]))
pass
msg += '```'
await ctx.channel.send(msg)
@cloc.error
async def cloc_error(self, ctx, error):
# do stuff
msg = 'cloc Error: {}'.format(ctx)
await error.channel.send(msg)
# Helper function to get extensions
def get_extensions(self, path, excl):
extensions = []
for root, dir, files in os.walk(path):
for items in fnmatch.filter(files, "*"):
temp_extensions = items.rfind(".")
ext = items[temp_extensions+1:]
if ext not in extensions:
if ext in excl:
extensions.append(ext)
pass
return extensions
|
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Framework of debug-wrapped sessions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import shutil
import tempfile
import numpy as np
from tensorflow.python.client import session
from tensorflow.python.debug import debug_data
from tensorflow.python.debug import framework
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import test_util
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import googletest
class TestDebugWrapperSession(framework.BaseDebugWrapperSession):
"""A concrete implementation of BaseDebugWrapperSession for test."""
def __init__(self, sess, dump_root, observer):
# Supply dump root.
self._dump_root = dump_root
# Supply observer.
self._obs = observer
# Invoke superclass constructor.
framework.BaseDebugWrapperSession.__init__(self, sess)
def on_session_init(self, request):
"""Override abstract on-session-init callback method."""
self._obs["sess_init_count"] += 1
self._obs["request_sess"] = request.session
return framework.OnSessionInitResponse(
framework.OnSessionInitAction.PROCEED)
def on_run_start(self, request):
"""Override abstract on-run-start callback method."""
self._obs["on_run_start_count"] += 1
self._obs["run_fetches"] = request.fetches
self._obs["run_feed_dict"] = request.feed_dict
return framework.OnRunStartResponse(
framework.OnRunStartAction.DEBUG_RUN,
["file://" + self._dump_root])
def on_run_end(self, request):
"""Override abstract on-run-end callback method."""
self._obs["on_run_end_count"] += 1
self._obs["performed_action"] = request.performed_action
return framework.OnRunEndResponse()
class TestDebugWrapperSessionBadAction(framework.BaseDebugWrapperSession):
"""A concrete implementation of BaseDebugWrapperSession for test.
This class intentionally puts a bad action value in OnSessionInitResponse
and/or in OnRunStartAction to test the handling of such invalid cases.
"""
def __init__(
self,
sess,
bad_init_action=None,
bad_run_start_action=None,
bad_debug_urls=None):
"""Constructor.
Args:
sess: The TensorFlow Session object to be wrapped.
bad_init_action: (str) bad action value to be returned during the
on-session-init callback.
bad_run_start_action: (str) bad action value to be returned during the
the on-run-start callback.
bad_debug_urls: Bad URL values to be returned during the on-run-start
callback.
"""
self._bad_init_action = bad_init_action
self._bad_run_start_action = bad_run_start_action
self._bad_debug_urls = bad_debug_urls
# Invoke superclass constructor.
framework.BaseDebugWrapperSession.__init__(self, sess)
def on_session_init(self, request):
if self._bad_init_action:
return framework.OnSessionInitResponse(self._bad_init_action)
else:
return framework.OnSessionInitResponse(
framework.OnSessionInitAction.PROCEED)
def on_run_start(self, request):
debug_urls = self._bad_debug_urls or []
if self._bad_run_start_action:
return framework.OnRunStartResponse(
self._bad_run_start_action, debug_urls)
else:
return framework.OnRunStartResponse(
framework.OnRunStartAction.DEBUG_RUN, debug_urls)
def on_run_end(self, request):
return framework.OnRunEndResponse()
class DebugWrapperSessionTest(test_util.TensorFlowTestCase):
def setUp(self):
self._observer = {
"sess_init_count": 0,
"request_sess": None,
"on_run_start_count": 0,
"run_fetches": None,
"run_feed_dict": None,
"on_run_end_count": 0,
"performed_action": None,
}
self._dump_root = tempfile.mkdtemp()
self._sess = session.Session()
self._a_init_val = np.array([[5.0, 3.0], [-1.0, 0.0]])
self._b_init_val = np.array([[2.0], [-1.0]])
self._c_val = np.array([[-4.0], [6.0]])
self._a_init = constant_op.constant(
self._a_init_val, shape=[2, 2], name="a1_init")
self._b_init = constant_op.constant(
self._b_init_val, shape=[2, 1], name="b_init")
self._a = variables.Variable(self._a_init, name="a1")
self._b = variables.Variable(self._b_init, name="b")
self._c = constant_op.constant(self._c_val, shape=[2, 1], name="c")
# Matrix product of a and b.
self._p = math_ops.matmul(self._a, self._b, name="p1")
# Sum of two vectors.
self._s = math_ops.add(self._p, self._c, name="s")
# Initialize the variables.
self._sess.run(self._a.initializer)
self._sess.run(self._b.initializer)
def tearDown(self):
# Tear down temporary dump directory.
shutil.rmtree(self._dump_root)
def testSessionInit(self):
self.assertEqual(0, self._observer["sess_init_count"])
TestDebugWrapperSession(self._sess, self._dump_root, self._observer)
# Assert that on-session-init callback is invoked.
self.assertEqual(1, self._observer["sess_init_count"])
# Assert that the request to the on-session-init callback carries the
# correct session object.
self.assertEqual(self._sess, self._observer["request_sess"])
def testInteractiveSessionInit(self):
"""The wrapper should work also on other subclassses of session.Session."""
TestDebugWrapperSession(
session.InteractiveSession(), self._dump_root, self._observer)
def testSessionRun(self):
wrapper = TestDebugWrapperSession(
self._sess, self._dump_root, self._observer)
# Check initial state of the observer.
self.assertEqual(0, self._observer["on_run_start_count"])
self.assertEqual(0, self._observer["on_run_end_count"])
s = wrapper.run(self._s)
# Assert the run return value is correct.
self.assertAllClose(np.array([[3.0], [4.0]]), s)
# Assert the on-run-start method is invoked.
self.assertEqual(1, self._observer["on_run_start_count"])
# Assert the on-run-start request reflects the correct fetch.
self.assertEqual(self._s, self._observer["run_fetches"])
# Assert the on-run-start request reflects the correct feed_dict.
self.assertIsNone(self._observer["run_feed_dict"])
# Assert the file debug URL has led to dump on the filesystem.
dump = debug_data.DebugDumpDir(self._dump_root)
self.assertEqual(7, len(dump.dumped_tensor_data))
# Assert the on-run-end method is invoked.
self.assertEqual(1, self._observer["on_run_end_count"])
# Assert the performed action field in the on-run-end callback request is
# correct.
self.assertEqual(
framework.OnRunStartAction.DEBUG_RUN,
self._observer["performed_action"])
def testSessionInitInvalidSessionType(self):
"""Attempt to wrap a non-Session-type object should cause an exception."""
wrapper = TestDebugWrapperSessionBadAction(self._sess)
with self.assertRaisesRegexp(TypeError, "Expected type .*; got type .*"):
TestDebugWrapperSessionBadAction(wrapper)
def testSessionInitBadActionValue(self):
with self.assertRaisesRegexp(
ValueError, "Invalid OnSessionInitAction value: nonsense_action"):
TestDebugWrapperSessionBadAction(
self._sess, bad_init_action="nonsense_action")
def testRunStartBadActionValue(self):
wrapper = TestDebugWrapperSessionBadAction(
self._sess, bad_run_start_action="nonsense_action")
with self.assertRaisesRegexp(
ValueError, "Invalid OnRunStartAction value: nonsense_action"):
wrapper.run(self._s)
def testRunStartBadURLs(self):
# debug_urls ought to be a list of str, not a str. So an exception should
# be raised during a run() call.
wrapper = TestDebugWrapperSessionBadAction(
self._sess, bad_debug_urls="file://foo")
with self.assertRaisesRegexp(TypeError, "Expected type .*; got type .*"):
wrapper.run(self._s)
if __name__ == "__main__":
googletest.main()
|
|
# Copyright (c) 2019-2021 Manfred Moitzi
# License: MIT License
from typing import TYPE_CHECKING, Iterable, List, Union
from ezdxf.lldxf.attributes import (
DXFAttr,
DXFAttributes,
DefSubclass,
XType,
group_code_mapping,
)
from ezdxf.lldxf.const import (
SUBCLASS_MARKER,
DXF2000,
DXFTypeError,
DXF2013,
DXFStructureError,
)
from ezdxf.lldxf.tags import Tags, DXFTag
from ezdxf.math import Matrix44
from ezdxf.tools import crypt
from .dxfentity import base_class, SubclassProcessor
from .dxfgfx import DXFGraphic, acdb_entity
from .factory import register_entity
if TYPE_CHECKING:
from ezdxf.eztypes import TagWriter, DXFNamespace
__all__ = [
"Body",
"Solid3d",
"Region",
"Surface",
"ExtrudedSurface",
"LoftedSurface",
"RevolvedSurface",
"SweptSurface",
]
acdb_modeler_geometry = DefSubclass(
"AcDbModelerGeometry",
{
"version": DXFAttr(70, default=1),
"flags": DXFAttr(290, dxfversion=DXF2013),
"uid": DXFAttr(2, dxfversion=DXF2013),
},
)
acdb_modeler_geometry_group_codes = group_code_mapping(acdb_modeler_geometry)
# with R2013/AC1027 Modeler Geometry of ACIS data is stored in the ACDSDATA
# section as binary encoded information detection:
# group code 70, 1, 3 is missing
# group code 290, 2 present
#
# 0
# ACDSRECORD
# 90
# 1
# 2
# AcDbDs::ID
# 280
# 10
# 320
# 19B <<< handle of associated 3DSOLID entity in model space
# 2
# ASM_Data
# 280
# 15
# 94
# 7197 <<< size in bytes ???
# 310
# 414349532042696E61727946696C6...
ACIS_DATA = Union[List[str], List[bytes]]
@register_entity
class Body(DXFGraphic):
"""DXF BODY entity - container entity for embedded ACIS data."""
DXFTYPE = "BODY"
DXFATTRIBS = DXFAttributes(base_class, acdb_entity, acdb_modeler_geometry)
MIN_DXF_VERSION_FOR_EXPORT = DXF2000
def __init__(self):
super().__init__()
self._acis_data: ACIS_DATA = []
@property
def acis_data(self) -> ACIS_DATA:
"""Get ACIS text data as list of strings for DXF R2000 to DXF R2010 and
binary encoded ACIS data for DXF R2013 and later as list of bytes.
"""
if self.doc is not None and self.has_binary_data:
return self.doc.acdsdata.get_acis_data(self.dxf.handle)
else:
return self._acis_data
@acis_data.setter
def acis_data(self, lines: Iterable[str]):
"""Set ACIS data as list of strings for DXF R2000 to DXF R2010. In case
of DXF R2013 and later, setting ACIS data as binary data is not
supported.
"""
if self.has_binary_data:
raise DXFTypeError(
"Setting ACIS data not supported for DXF R2013 and later."
)
else:
self._acis_data = list(lines)
@property
def has_binary_data(self):
"""Returns ``True`` if ACIS data is of type ``List[bytes]``, ``False``
if data is of type ``List[str]``.
"""
if self.doc:
return self.doc.dxfversion >= DXF2013
else:
return False
def copy(self):
"""Prevent copying. (internal interface)"""
raise DXFTypeError("Copying of ACIS data not supported.")
def load_dxf_attribs(
self, processor: SubclassProcessor = None
) -> "DXFNamespace":
"""Loading interface. (internal API)"""
dxf = super().load_dxf_attribs(processor)
if processor:
processor.fast_load_dxfattribs(
dxf, acdb_modeler_geometry_group_codes, 2, log=False
)
if not self.has_binary_data:
self.load_acis_data(processor.subclasses[2])
return dxf
def load_acis_data(self, tags: Tags):
"""Loading interface. (internal API)"""
text_lines = tags2textlines(tag for tag in tags if tag.code in (1, 3))
self.acis_data = crypt.decode(text_lines) # type: ignore
def export_entity(self, tagwriter: "TagWriter") -> None:
"""Export entity specific data as DXF tags. (internal API)"""
super().export_entity(tagwriter)
tagwriter.write_tag2(SUBCLASS_MARKER, acdb_modeler_geometry.name)
if tagwriter.dxfversion >= DXF2013:
# ACIS data stored in the ACDSDATA section as binary encoded
# information.
if self.dxf.hasattr("version"):
tagwriter.write_tag2(70, self.dxf.version)
self.dxf.export_dxf_attribs(tagwriter, ["flags", "uid"])
else:
# DXF R2000 - R2013 stores ACIS data as text in entity
self.dxf.export_dxf_attribs(tagwriter, "version")
self.export_acis_data(tagwriter)
def export_acis_data(self, tagwriter: "TagWriter") -> None:
"""Export ACIS data as DXF tags. (internal API)"""
def cleanup(lines):
for line in lines:
yield line.rstrip().replace("\n", "")
tags = Tags(textlines2tags(crypt.encode(cleanup(self.acis_data))))
tagwriter.write_tags(tags)
def set_text(self, text: str, sep: str = "\n") -> None:
"""Set ACIS data from one string."""
self.acis_data = text.split(sep)
def tostring(self) -> str:
"""Returns ACIS data as one string for DXF R2000 to R2010."""
if self.has_binary_data:
return ""
else:
return "\n".join(self.acis_data) # type: ignore
def tobytes(self) -> bytes:
"""Returns ACIS data as joined bytes for DXF R2013 and later."""
if self.has_binary_data:
return b"".join(self.acis_data) # type: ignore
else:
return b""
def tags2textlines(tags: Iterable) -> Iterable[str]:
"""Yields text lines from code 1 and 3 tags, code 1 starts a line following
code 3 tags are appended to the line.
"""
line = None
for code, value in tags:
if code == 1:
if line is not None:
yield line
line = value
elif code == 3:
line += value
if line is not None:
yield line
def textlines2tags(lines: Iterable[str]) -> Iterable[DXFTag]:
"""Yields text lines as DXFTags, splitting long lines (>255) int code 1
and code 3 tags.
"""
for line in lines:
text = line[:255]
tail = line[255:]
yield DXFTag(1, text)
while len(tail):
text = tail[:255]
tail = tail[255:]
yield DXFTag(3, text)
@register_entity
class Region(Body):
"""DXF REGION entity - container entity for embedded ACIS data."""
DXFTYPE = "REGION"
acdb_3dsolid = DefSubclass(
"AcDb3dSolid",
{
"history_handle": DXFAttr(350, default="0"),
},
)
acdb_3dsolid_group_codes = group_code_mapping(acdb_3dsolid)
@register_entity
class Solid3d(Body):
"""DXF 3DSOLID entity - container entity for embedded ACIS data."""
DXFTYPE = "3DSOLID"
DXFATTRIBS = DXFAttributes(
base_class, acdb_entity, acdb_modeler_geometry, acdb_3dsolid
)
def load_dxf_attribs(
self, processor: SubclassProcessor = None
) -> "DXFNamespace":
dxf = super().load_dxf_attribs(processor)
if processor:
processor.fast_load_dxfattribs(dxf, acdb_3dsolid_group_codes, 3)
return dxf
def export_entity(self, tagwriter: "TagWriter") -> None:
"""Export entity specific data as DXF tags."""
# base class export is done by parent class
super().export_entity(tagwriter)
# AcDbEntity export is done by parent class
# AcDbModelerGeometry export is done by parent class
tagwriter.write_tag2(SUBCLASS_MARKER, acdb_3dsolid.name)
self.dxf.export_dxf_attribs(tagwriter, "history_handle")
def load_matrix(subclass: "Tags", code: int) -> Matrix44:
values = [tag.value for tag in subclass.find_all(code)]
if len(values) != 16:
raise DXFStructureError("Invalid transformation matrix.")
return Matrix44(values)
def export_matrix(tagwriter: "TagWriter", code: int, matrix: Matrix44) -> None:
for value in list(matrix):
tagwriter.write_tag2(code, value)
acdb_surface = DefSubclass(
"AcDbSurface",
{
"u_count": DXFAttr(71),
"v_count": DXFAttr(72),
},
)
acdb_surface_group_codes = group_code_mapping(acdb_surface)
@register_entity
class Surface(Body):
"""DXF SURFACE entity - container entity for embedded ACIS data."""
DXFTYPE = "SURFACE"
DXFATTRIBS = DXFAttributes(
base_class, acdb_entity, acdb_modeler_geometry, acdb_surface
)
def load_dxf_attribs(
self, processor: SubclassProcessor = None
) -> "DXFNamespace":
dxf = super().load_dxf_attribs(processor)
if processor:
processor.fast_load_dxfattribs(dxf, acdb_surface_group_codes, 3)
return dxf
def export_entity(self, tagwriter: "TagWriter") -> None:
"""Export entity specific data as DXF tags."""
# base class export is done by parent class
super().export_entity(tagwriter)
# AcDbEntity export is done by parent class
# AcDbModelerGeometry export is done by parent class
tagwriter.write_tag2(SUBCLASS_MARKER, acdb_surface.name)
self.dxf.export_dxf_attribs(tagwriter, ["u_count", "v_count"])
acdb_extruded_surface = DefSubclass(
"AcDbExtrudedSurface",
{
"class_id": DXFAttr(90),
"sweep_vector": DXFAttr(10, xtype=XType.point3d),
# 16x group code 40: Transform matrix of extruded entity (16 floats;
# row major format; default = identity matrix)
"draft_angle": DXFAttr(42, default=0.0), # in radians
"draft_start_distance": DXFAttr(43, default=0.0),
"draft_end_distance": DXFAttr(44, default=0.0),
"twist_angle": DXFAttr(45, default=0.0), # in radians?
"scale_factor": DXFAttr(48, default=0.0),
"align_angle": DXFAttr(49, default=0.0), # in radians
# 16x group code 46: Transform matrix of sweep entity (16 floats;
# row major format; default = identity matrix)
# 16x group code 47: Transform matrix of path entity (16 floats;
# row major format; default = identity matrix)
"solid": DXFAttr(290, default=0), # bool
# 0=No alignment; 1=Align sweep entity to path:
"sweep_alignment_flags": DXFAttr(70, default=0),
"unknown1": DXFAttr(71, default=0),
# 2=Translate sweep entity to path; 3=Translate path to sweep entity:
"align_start": DXFAttr(292, default=0), # bool
"bank": DXFAttr(293, default=0), # bool
"base_point_set": DXFAttr(294, default=0), # bool
"sweep_entity_transform_computed": DXFAttr(295, default=0), # bool
"path_entity_transform_computed": DXFAttr(296, default=0), # bool
"reference_vector_for_controlling_twist": DXFAttr(
11, xtype=XType.point3d
),
},
)
acdb_extruded_surface_group_codes = group_code_mapping(acdb_extruded_surface)
@register_entity
class ExtrudedSurface(Surface):
"""DXF EXTRUDEDSURFACE entity - container entity for embedded ACIS data."""
DXFTYPE = "EXTRUDEDSURFACE"
DXFATTRIBS = DXFAttributes(
base_class,
acdb_entity,
acdb_modeler_geometry,
acdb_surface,
acdb_extruded_surface,
)
def __init__(self):
super().__init__()
self.transformation_matrix_extruded_entity = Matrix44()
self.sweep_entity_transformation_matrix = Matrix44()
self.path_entity_transformation_matrix = Matrix44()
def load_dxf_attribs(
self, processor: SubclassProcessor = None
) -> "DXFNamespace":
dxf = super().load_dxf_attribs(processor)
if processor:
processor.fast_load_dxfattribs(
dxf, acdb_extruded_surface_group_codes, 4, log=False
)
self.load_matrices(processor.subclasses[4])
return dxf
def load_matrices(self, tags: Tags):
self.transformation_matrix_extruded_entity = load_matrix(tags, code=40)
self.sweep_entity_transformation_matrix = load_matrix(tags, code=46)
self.path_entity_transformation_matrix = load_matrix(tags, code=47)
def export_entity(self, tagwriter: "TagWriter") -> None:
"""Export entity specific data as DXF tags."""
# base class export is done by parent class
super().export_entity(tagwriter)
# AcDbEntity export is done by parent class
# AcDbModelerGeometry export is done by parent class
tagwriter.write_tag2(SUBCLASS_MARKER, acdb_extruded_surface.name)
self.dxf.export_dxf_attribs(tagwriter, ["class_id", "sweep_vector"])
export_matrix(
tagwriter,
code=40,
matrix=self.transformation_matrix_extruded_entity,
)
self.dxf.export_dxf_attribs(
tagwriter,
[
"draft_angle",
"draft_start_distance",
"draft_end_distance",
"twist_angle",
"scale_factor",
"align_angle",
],
)
export_matrix(
tagwriter, code=46, matrix=self.sweep_entity_transformation_matrix
)
export_matrix(
tagwriter, code=47, matrix=self.path_entity_transformation_matrix
)
self.dxf.export_dxf_attribs(
tagwriter,
[
"solid",
"sweep_alignment_flags",
"unknown1",
"align_start",
"bank",
"base_point_set",
"sweep_entity_transform_computed",
"path_entity_transform_computed",
"reference_vector_for_controlling_twist",
],
)
acdb_lofted_surface = DefSubclass(
"AcDbLoftedSurface",
{
# 16x group code 40: Transform matrix of loft entity (16 floats;
# row major format; default = identity matrix)
"plane_normal_lofting_type": DXFAttr(70),
"start_draft_angle": DXFAttr(41, default=0.0), # in radians
"end_draft_angle": DXFAttr(42, default=0.0), # in radians
"start_draft_magnitude": DXFAttr(43, default=0.0),
"end_draft_magnitude": DXFAttr(44, default=0.0),
"arc_length_parameterization": DXFAttr(290, default=0), # bool
"no_twist": DXFAttr(291, default=1), # true/false
"align_direction": DXFAttr(292, default=1), # bool
"simple_surfaces": DXFAttr(293, default=1), # bool
"closed_surfaces": DXFAttr(294, default=0), # bool
"solid": DXFAttr(295, default=0), # true/false
"ruled_surface": DXFAttr(296, default=0), # bool
"virtual_guide": DXFAttr(297, default=0), # bool
},
)
acdb_lofted_surface_group_codes = group_code_mapping(acdb_lofted_surface)
@register_entity
class LoftedSurface(Surface):
"""DXF LOFTEDSURFACE entity - container entity for embedded ACIS data."""
DXFTYPE = "LOFTEDSURFACE"
DXFATTRIBS = DXFAttributes(
base_class,
acdb_entity,
acdb_modeler_geometry,
acdb_surface,
acdb_lofted_surface,
)
def __init__(self):
super().__init__()
self.transformation_matrix_lofted_entity = Matrix44()
def load_dxf_attribs(
self, processor: SubclassProcessor = None
) -> "DXFNamespace":
dxf = super().load_dxf_attribs(processor)
if processor:
processor.fast_load_dxfattribs(
dxf, acdb_lofted_surface_group_codes, 4, log=False
)
self.load_matrices(processor.subclasses[4])
return dxf
def load_matrices(self, tags: Tags):
self.transformation_matrix_lofted_entity = load_matrix(tags, code=40)
def export_entity(self, tagwriter: "TagWriter") -> None:
"""Export entity specific data as DXF tags."""
# base class export is done by parent class
super().export_entity(tagwriter)
# AcDbEntity export is done by parent class
# AcDbModelerGeometry export is done by parent class
tagwriter.write_tag2(SUBCLASS_MARKER, acdb_lofted_surface.name)
export_matrix(
tagwriter, code=40, matrix=self.transformation_matrix_lofted_entity
)
self.dxf.export_dxf_attribs(
tagwriter, acdb_lofted_surface.attribs.keys()
)
acdb_revolved_surface = DefSubclass(
"AcDbRevolvedSurface",
{
"class_id": DXFAttr(90, default=0.0),
"axis_point": DXFAttr(10, xtype=XType.point3d),
"axis_vector": DXFAttr(11, xtype=XType.point3d),
"revolve_angle": DXFAttr(40), # in radians
"start_angle": DXFAttr(41), # in radians
# 16x group code 42: Transform matrix of revolved entity (16 floats;
# row major format; default = identity matrix)
"draft_angle": DXFAttr(43), # in radians
"start_draft_distance": DXFAttr(44, default=0),
"end_draft_distance": DXFAttr(45, default=0),
"twist_angle": DXFAttr(46, default=0), # in radians
"solid": DXFAttr(290, default=0), # bool
"close_to_axis": DXFAttr(291, default=0), # bool
},
)
acdb_revolved_surface_group_codes = group_code_mapping(acdb_revolved_surface)
@register_entity
class RevolvedSurface(Surface):
"""DXF REVOLVEDSURFACE entity - container entity for embedded ACIS data."""
DXFTYPE = "REVOLVEDSURFACE"
DXFATTRIBS = DXFAttributes(
base_class,
acdb_entity,
acdb_modeler_geometry,
acdb_surface,
acdb_revolved_surface,
)
def __init__(self):
super().__init__()
self.transformation_matrix_revolved_entity = Matrix44()
def load_dxf_attribs(
self, processor: SubclassProcessor = None
) -> "DXFNamespace":
dxf = super().load_dxf_attribs(processor)
if processor:
processor.fast_load_dxfattribs(
dxf, acdb_revolved_surface_group_codes, 4, log=False
)
self.load_matrices(processor.subclasses[4])
return dxf
def load_matrices(self, tags: Tags):
self.transformation_matrix_revolved_entity = load_matrix(tags, code=42)
def export_entity(self, tagwriter: "TagWriter") -> None:
"""Export entity specific data as DXF tags."""
# base class export is done by parent class
super().export_entity(tagwriter)
# AcDbEntity export is done by parent class
# AcDbModelerGeometry export is done by parent class
tagwriter.write_tag2(SUBCLASS_MARKER, acdb_revolved_surface.name)
self.dxf.export_dxf_attribs(
tagwriter,
[
"class_id",
"axis_point",
"axis_vector",
"revolve_angle",
"start_angle",
],
)
export_matrix(
tagwriter,
code=42,
matrix=self.transformation_matrix_revolved_entity,
)
self.dxf.export_dxf_attribs(
tagwriter,
[
"draft_angle",
"start_draft_distance",
"end_draft_distance",
"twist_angle",
"solid",
"close_to_axis",
],
)
acdb_swept_surface = DefSubclass(
"AcDbSweptSurface",
{
"swept_entity_id": DXFAttr(90),
# 90: size of binary data (lost on saving)
# 310: binary data (lost on saving)
"path_entity_id": DXFAttr(91),
# 90: size of binary data (lost on saving)
# 310: binary data (lost on saving)
# 16x group code 40: Transform matrix of sweep entity (16 floats;
# row major format; default = identity matrix)
# 16x group code 41: Transform matrix of path entity (16 floats;
# row major format; default = identity matrix)
"draft_angle": DXFAttr(42), # in radians
"draft_start_distance": DXFAttr(43, default=0),
"draft_end_distance": DXFAttr(44, default=0),
"twist_angle": DXFAttr(45, default=0), # in radians
"scale_factor": DXFAttr(48, default=1),
"align_angle": DXFAttr(49, default=0), # in radians
# don't know the meaning of this matrices
# 16x group code 46: Transform matrix of sweep entity (16 floats;
# row major format; default = identity matrix)
# 16x group code 47: Transform matrix of path entity (16 floats;
# row major format; default = identity matrix)
"solid": DXFAttr(290, default=0), # in radians
# 0=No alignment; 1= align sweep entity to path:
"sweep_alignment": DXFAttr(70, default=0),
"unknown1": DXFAttr(71, default=0),
# 2=Translate sweep entity to path; 3=Translate path to sweep entity:
"align_start": DXFAttr(292, default=0), # bool
"bank": DXFAttr(293, default=0), # bool
"base_point_set": DXFAttr(294, default=0), # bool
"sweep_entity_transform_computed": DXFAttr(295, default=0), # bool
"path_entity_transform_computed": DXFAttr(296, default=0), # bool
"reference_vector_for_controlling_twist": DXFAttr(
11, xtype=XType.point3d
),
},
)
acdb_swept_surface_group_codes = group_code_mapping(acdb_swept_surface)
@register_entity
class SweptSurface(Surface):
"""DXF SWEPTSURFACE entity - container entity for embedded ACIS data."""
DXFTYPE = "SWEPTSURFACE"
DXFATTRIBS = DXFAttributes(
base_class,
acdb_entity,
acdb_modeler_geometry,
acdb_surface,
acdb_swept_surface,
)
def __init__(self):
super().__init__()
self.transformation_matrix_sweep_entity = Matrix44()
self.transformation_matrix_path_entity = Matrix44()
self.sweep_entity_transformation_matrix = Matrix44()
self.path_entity_transformation_matrix = Matrix44()
def load_dxf_attribs(
self, processor: SubclassProcessor = None
) -> "DXFNamespace":
dxf = super().load_dxf_attribs(processor)
if processor:
processor.fast_load_dxfattribs(
dxf, acdb_swept_surface_group_codes, 4, log=False
)
self.load_matrices(processor.subclasses[4])
return dxf
def load_matrices(self, tags: Tags):
self.transformation_matrix_sweep_entity = load_matrix(tags, code=40)
self.transformation_matrix_path_entity = load_matrix(tags, code=41)
self.sweep_entity_transformation_matrix = load_matrix(tags, code=46)
self.path_entity_transformation_matrix = load_matrix(tags, code=47)
def export_entity(self, tagwriter: "TagWriter") -> None:
"""Export entity specific data as DXF tags."""
# base class export is done by parent class
super().export_entity(tagwriter)
# AcDbEntity export is done by parent class
# AcDbModelerGeometry export is done by parent class
tagwriter.write_tag2(SUBCLASS_MARKER, acdb_swept_surface.name)
self.dxf.export_dxf_attribs(
tagwriter,
[
"swept_entity_id",
"path_entity_id",
],
)
export_matrix(
tagwriter, code=40, matrix=self.transformation_matrix_sweep_entity
)
export_matrix(
tagwriter, code=41, matrix=self.transformation_matrix_path_entity
)
self.dxf.export_dxf_attribs(
tagwriter,
[
"draft_angle",
"draft_start_distance",
"draft_end_distance",
"twist_angle",
"scale_factor",
"align_angle",
],
)
export_matrix(
tagwriter, code=46, matrix=self.sweep_entity_transformation_matrix
)
export_matrix(
tagwriter, code=47, matrix=self.path_entity_transformation_matrix
)
self.dxf.export_dxf_attribs(
tagwriter,
[
"solid",
"sweep_alignment",
"unknown1",
"align_start",
"bank",
"base_point_set",
"sweep_entity_transform_computed",
"path_entity_transform_computed",
"reference_vector_for_controlling_twist",
],
)
|
|
# Copyright (c) 2001, Stanford University
# All rights reserved.
#
# See the file LICENSE.txt for information on redistributing this software.
import sys, re, string
sys.path.append( "../glapi_parser" )
import apiutil
line_re = re.compile (r'^(\S+)\s+(GL_\S+)\s+(.*)\s*$')
extensions_line_re = re.compile(r'^(\S+)\s+(GL_\S+)\s+(GL_\S+)\s+(.*)\s*$')
params = {}
extended_params = {}
input = open( "state_get.txt", 'r' )
for line in input.readlines():
if line[0] == '#':
continue
match = line_re.match( line )
if match:
type = match.group(1)
pname = match.group(2)
fields = string.split( match.group(3) )
params[pname] = ( type, fields )
input = open( "state_extensions_get.txt", 'r' )
for line in input.readlines():
if line[0] == '#':
continue
match = extensions_line_re.match( line )
if match:
type = match.group(1)
pname = match.group(2)
ifdef = match.group(3)
fields = string.split( match.group(4) )
extended_params[pname] = ( type, ifdef, fields )
convert = {
'GLenum' : {
'Boolean' : '(GLboolean) ( %s != 0 )',
'Double' : '(GLdouble) %s',
'Float' : '(GLfloat) %s',
'Integer' : '(GLint) %s'
},
'GLboolean' : {
'Boolean' : '(GLboolean) ( %s != 0 )',
'Double' : '(GLdouble) %s',
'Float' : '(GLfloat) %s',
'Integer' : '(GLint) %s'
},
'GLint' : {
'Boolean' : '(GLboolean) ( %s != 0 )',
'Double' : '(GLdouble) %s',
'Float' : '(GLfloat) %s',
'Integer' : '(GLint) %s'
},
'GLuint' : {
'Boolean' : '(GLboolean) ( %s != 0 )',
'Double' : '(GLdouble) %s',
'Float' : '(GLfloat) %s',
'Integer' : '(GLint) %s'
},
'GLfloat' : {
'Boolean' : '(GLboolean) ( %s != 0.0f )',
'Double' : '(GLdouble) %s',
'Float' : '%s',
'Integer' : '(GLint) %s'
},
'GLdouble' : {
'Boolean' : '(GLboolean) ( %s != 0.0 )',
'Double' : '%s',
'Float' : '(GLfloat) %s',
'Integer' : '(GLint) %s'
},
'GLdefault' : {
'Boolean' : '(GLboolean) ( %s != (GLdefault) 0.0 )',
'Double' : '(GLdouble) %s',
'Float' : '(GLfloat) %s',
'Integer' : '(GLint) %s'
},
'GLclampd' : {
'Boolean' : '(GLboolean) ( %s != 0.0 )',
'Double' : '%s',
'Float' : '(GLfloat) %s',
'Integer' : '__clampd_to_int(%s)'
},
'GLclampf' : {
'Boolean' : '(GLboolean) ( %s != 0.0f )',
'Double' : '(GLdouble) %s',
'Float' : '%s',
'Integer' : '__clampf_to_int(%s)'
}
}
types = [ "Boolean", "Double", "Float", "Integer" ]
ctypes = {
'Boolean' : 'GLboolean',
'Double' : 'GLdouble',
'Float' : 'GLfloat',
'Integer' : 'GLint'
}
apiutil.CopyrightC()
print """
/* DO NOT EDIT - THIS FILE GENERATED BY state_get.txt AND THE state_get.py SCRIPT */
#include <stdio.h>
#include <math.h>
#include "state.h"
#include "state/cr_statetypes.h"
static GLint __clampd_to_int( GLdouble d )
{
/* -1.0 -> MIN_INT, 1.0 -> MAX_INT */
if ( d > 1.0 )
return 0x7fffffff;
if ( d < -1.0 )
return 0x80000000;
return (GLint) floor( d * 2147483647.5 );
}
static GLint __clampf_to_int( GLfloat f )
{
/* -1.0f -> MIN_INT, 1.0f -> MAX_INT */
if ( f > 1.0f )
return 0x7fffffff;
if ( f < -1.0f )
return 0x80000000;
return (GLint) floor( f * 2147483647.5f );
}
"""
header = """
{
CRContext *g = GetCurrentContext();
if (g->current.inBeginEnd)
{
crStateError(__LINE__, __FILE__, GL_INVALID_OPERATION,
"glGet called in Begin/End");
return;
}
if ( pname == GL_CURRENT_INDEX || pname == GL_CURRENT_COLOR ||
pname == GL_CURRENT_SECONDARY_COLOR_EXT ||
pname == GL_CURRENT_FOG_COORDINATE_EXT ||
pname == GL_CURRENT_NORMAL || pname == GL_EDGE_FLAG ||
pname == GL_CURRENT_TEXTURE_COORDS )
{
#if 0
crStateError(__LINE__,__FILE__, GL_INVALID_OPERATION,
"Unimplemented glGet of a 'current' value" );
#else
crStateCurrentRecover();/* &g->current, &sb->current, g->bitID );*/
#endif
}
switch ( pname ) {
"""
for rettype in types:
print ''
print 'void STATE_APIENTRY crStateGet%sv( GLenum pname, %s *params )' % ( rettype, ctypes[rettype] )
print header
keys = params.keys()
keys.sort()
for pname in keys:
print '\t\tcase %s:' % pname
(srctype,fields) = params[pname]
try:
cvt = convert[srctype][rettype]
i = 0
for field in fields:
expr = cvt % field
print '\t\t\tparams[%d] = %s;' % (i,expr)
i += 1
except:
print '\t\t\tcrStateError(__LINE__,__FILE__,GL_INVALID_OPERATION, "Unimplemented glGet!");'
print "\t\t\tbreak;"
keys = extended_params.keys();
keys.sort()
for pname in keys:
(srctype,ifdef,fields) = extended_params[pname]
ext = ifdef[3:] # the extension name with the "GL_" prefix removed
#print '#ifdef %s' % ifdef
print '#ifdef CR_%s' % ext
print '\t\tcase %s:' % pname
if ext != 'OPENGL_VERSION_1_2':
print '\t\t\tif (g->extensions.%s) {' % ext
try:
cvt = convert[srctype][rettype]
i = 0
for field in fields:
expr = cvt % field
if field[0] == '%':
command = string.split(field, '%')
print '\t\t\t\t%s;' % command[1]
continue
elif ext != 'OPENGL_VERSION_1_2':
print '\t\t\t\tparams[%d] = %s;' % (i,expr)
else:
print '\t\t\tparams[%d] = %s;' % (i,expr)
i += 1
except:
print '\t\t\tcrStateError(__LINE__,__FILE__,GL_INVALID_OPERATION, "Unimplemented glGet!");'
if ext != 'OPENGL_VERSION_1_2':
print "\t\t\t}"
print "\t\t\telse {"
print '\t\t\t\tcrStateError(__LINE__,__FILE__,GL_INVALID_ENUM, "glGet%sv");' % rettype
print "\t\t\t}"
print "\t\t\tbreak;"
#print '#endif /* %s */' % ifdef
print '#endif /* CR_%s */' % ext
print '\t\tdefault:'
print '\t\t\tcrStateError(__LINE__, __FILE__, GL_INVALID_ENUM, "glGet: Unknown enum: 0x%x", pname);'
print '\t\t\treturn;'
print '\t}'
print '}'
|
|
import re
import os.path as path
from twisted.words.protocols import irc
from twisted.internet import defer
from twisted.internet import protocol
from twisted.internet import reactor
from twisted.internet.interfaces import ISSLTransport
from twisted.python.util import InsensitiveDict
from mk2.plugins import Plugin
from mk2.events import PlayerChat, PlayerJoin, PlayerQuit, PlayerDeath, ServerOutput, ServerStopping, ServerStarting, StatPlayers, Hook
try:
from OpenSSL import SSL
from twisted.internet import ssl
have_ssl = True
class Mark2ClientContextFactory(ssl.ClientContextFactory):
def __init__(self, parent, fingerprint=None, cert=None):
self.parent = parent
self.fingerprint = fingerprint
self.cert = path.expanduser(cert) if cert else None
@staticmethod
def stripfp(fp):
return fp.replace(':', '').lower()
def verify(self, conn, cert, errno, errdepth, rc):
ok = self.stripfp(cert.digest("sha1")) == self.stripfp(self.fingerprint)
if self.parent and self.parent.factory.reconnect and not ok:
self.parent.console("irc: server certificate verification failed")
self.parent.factory.reconnect = False
return ok
def getContext(self):
ctx = ssl.ClientContextFactory.getContext(self)
if self.fingerprint:
ctx.set_verify(SSL.VERIFY_PEER, self.verify)
if self.cert:
ctx.use_certificate_file(self.cert)
ctx.use_privatekey_file(self.cert)
return ctx
except:
have_ssl = False
class IRCUser(object):
username = ""
hostname = ""
status = ""
oper = False
away = False
def __init__(self, parent, nick):
self.parent = parent
self.nick = nick
@property
def priority(self):
p = self.parent.priority
if self.status:
return min([p[s] for s in self.status])
else:
return None
class SASLExternal(object):
name = "EXTERNAL"
def __init__(self, username, password):
pass
def is_valid(self):
return True
def respond(self, data):
return ""
class SASLPlain(object):
name = "PLAIN"
def __init__(self, username, password):
self.response = "{0}\0{0}\0{1}".format(username, password)
def is_valid(self):
return self.response != "\0\0"
def respond(self, data):
if data:
return False
return self.response
SASL_MECHANISMS = (SASLExternal, SASLPlain)
class IRCBot(irc.IRCClient):
sasl_buffer = ""
sasl_result = None
sasl_login = None
def __init__(self, factory, plugin):
self.factory = factory
self.nickname = plugin.nickname.encode('ascii')
self.realname = plugin.realname.encode('ascii')
self.username = plugin.ident.encode('ascii')
self.ns_username = plugin.username
self.ns_password = plugin.password
self.password = plugin.server_password.encode('ascii')
self.channel = plugin.channel.encode('ascii')
self.key = plugin.key.encode('ascii')
self.console = plugin.console
self.irc_message = plugin.irc_message
self.irc_action = plugin.irc_action
self.irc_chat_status = plugin.irc_chat_status
self.mangle_username = plugin.mangle_username
self.users = InsensitiveDict()
self.cap_requests = set()
def register(self, nickname, hostname="foo", servername="bar"):
self.sendLine("CAP LS")
return irc.IRCClient.register(self, nickname, hostname, servername)
def sendLine(self, line):
irc.IRCClient.sendLine(self, line.encode('ascii', 'replace'))
def _parse_cap(self, cap):
mod = ''
while cap[0] in "-~=":
mod, cap = mod + cap[0], cap[1:]
if '/' in cap:
vendor, cap = cap.split('/', 1)
else:
vendor = None
return (cap, mod, vendor)
def request_cap(self, *caps):
self.cap_requests |= set(caps)
self.sendLine("CAP REQ :{0}".format(' '.join(caps)))
@defer.inlineCallbacks
def end_cap(self):
if self.sasl_result:
yield self.sasl_result
self.sendLine("CAP END")
def irc_CAP(self, prefix, params):
self.supports_cap = True
identifier, subcommand, args = params
args = args.split(' ')
if subcommand == "LS":
self.sasl_start(args)
if not self.cap_requests:
self.sendLine("CAP END")
elif subcommand == "ACK":
ack = []
for cap in args:
if not cap:
continue
cap, mod, vendor = self._parse_cap(cap)
if '-' in mod:
if cap in self.capabilities:
del self.capabilities[cap]
continue
self.cap_requests.remove(cap)
if cap == 'sasl':
self.sasl_next()
if ack:
self.sendLine("CAP ACK :{0}".format(' '.join(ack)))
if not self.cap_requests:
self.end_cap()
elif subcommand == "NAK":
# this implementation is probably not compliant but it will have to do for now
for cap in args:
self.cap_requests.remove(cap)
if not self.cap_requests:
self.end_cap()
def signedOn(self):
if ISSLTransport.providedBy(self.transport):
cert = self.transport.getPeerCertificate()
fp = cert.digest("sha1")
verified = "verified" if self.factory.parent.server_fingerprint else "unverified"
self.console("irc: connected securely. server fingerprint: {0} ({1})".format(fp, verified))
else:
self.console("irc: connected")
if self.ns_username and self.ns_password and not self.sasl_login:
self.msg('NickServ', 'IDENTIFY {0} {1}'.format(self.ns_username, self.ns_password))
self.join(self.channel, self.key)
def irc_JOIN(self, prefix, params):
nick = prefix.split('!')[0]
channel = params[-1]
if nick == self.nickname:
self.joined(channel)
else:
self.userJoined(prefix, channel)
def joined(self, channel):
self.console('irc: joined channel')
self.factory.client = self
def who(a):
self.sendLine("WHO " + channel)
self.factory.parent.repeating_task(who, 30, now=True)
def isupport(self, args):
self.compute_prefix_names()
def compute_prefix_names(self):
KNOWN_NAMES = {"o": "op", "h": "halfop", "v": "voice"}
prefixdata = self.supported.getFeature("PREFIX", {"o": ("@", 0), "v": ("+", 1)}).items()
op_priority = ([priority for mode, (prefix, priority) in prefixdata if mode == "o"] + [None])[0]
self.prefixes, self.statuses, self.priority = {}, {}, {}
for mode, (prefix, priority) in prefixdata:
name = "?"
if mode in KNOWN_NAMES:
name = KNOWN_NAMES[mode]
elif priority == 0:
if op_priority == 2:
name = "owner"
else:
name = "admin"
else:
name = "+" + mode
self.prefixes[mode] = prefix
self.statuses[prefix] = name
self.priority[name] = priority
self.priority[mode] = priority
self.priority[prefix] = priority
def parse_prefixes(self, user, nick, prefixes=''):
status = []
prefixdata = self.supported.getFeature("PREFIX", {"o": ("@", 0), "v": ("+", 1)}).items()
for mode, (prefix, priority) in prefixdata:
if prefix in prefixes + nick:
nick = nick.replace(prefix, '')
status.append((prefix, priority))
if nick == self.nickname:
return
user.status = ''.join(t[0] for t in sorted(status, key=lambda t: t[1]))
def irc_RPL_WHOREPLY(self, prefix, params):
_, channel, username, host, server, nick, status, hg = params
if nick == self.nickname:
return
hops, gecos = hg.split(' ', 1)
user = IRCUser(self, nick)
user.username = username
user.hostname = host
user.oper = '*' in status
user.away = status[0] == 'G'
self.users[nick] = user
self.parse_prefixes(user, nick, status[1:].replace('*', ''))
def modeChanged(self, user, channel, _set, modes, args):
args = list(args)
if channel.lower() != self.channel.lower():
return
for m, arg in zip(modes, args):
if m in self.prefixes and arg != self.nickname:
u = self.users.get(arg, None)
if u:
u.status = u.status.replace(self.prefixes[m], '')
if _set:
u.status = ''.join(sorted(list(u.status + self.prefixes[m]),
key=lambda k: self.priority[k]))
def has_status(self, nick, status):
if status != 0 and not status:
return True
if status not in self.priority:
return False
priority = self.priority[status]
u = self.users.get(nick, None)
return u and (u.priority is not None) and u.priority <= priority
def userJoined(self, user, channel):
nick = user.split('!')[0]
user = IRCUser(self, nick)
self.users[nick] = user
def userRenamed(self, oldname, newname):
if oldname not in self.users:
return
u = self.users[oldname]
u.nick = newname
self.users[newname] = u
del self.users[oldname]
def userLeft(self, user, channel):
if user not in self.users:
return
del self.users[user]
def userKicked(self, kickee, channel, kicker, message):
if kickee not in self.users:
return
del self.users[kickee]
def userQuit(self, user, quitMessage):
if user not in self.users:
return
del self.users[user]
def privmsg(self, user, channel, msg):
if channel != self.channel:
return
if '!' not in user:
return
nick = user.split('!')[0]
p = self.factory.parent
if not self.has_status(nick, self.irc_chat_status):
return
if p.irc_players_enabled and msg.lower() == p.irc_command_prefix + "players":
self.say(self.channel, p.irc_players_format.format(
players=', '.join(map(self.mangle_username, p.players))))
elif p.irc_command_prefix and msg.startswith(p.irc_command_prefix) and p.irc_command_status and self.has_status(nick, p.irc_command_status):
argv = msg[len(p.irc_command_prefix):].split(' ')
command = argv[0]
if command.startswith('~'):
if p.irc_command_mark2 and (command.lower() in p.irc_command_allow.lower().split(',') or p.irc_command_allow == '*'):
p.dispatch(Hook(line=' '.join(argv)))
else:
if command.lower() in p.irc_command_allow.lower().split(',') or p.irc_command_allow == '*':
p.send(' '.join(argv))
else:
self.irc_message(nick, msg)
def action(self, user, channel, msg):
self.console("%s %s %s" % (user, channel, msg))
if channel != self.channel:
return
if '!' not in user:
return
nick = user.split('!')[0]
if self.has_status(nick, self.irc_chat_status):
self.irc_action(nick, msg)
def irc_AUTHENTICATE(self, prefix, params):
self.sasl_continue(params[0])
def sasl_send(self, data):
while data and len(data) >= 400:
en, data = data[:400].encode('base64').replace('\n', ''), data[400:]
self.sendLine("AUTHENTICATE " + en)
if data:
self.sendLine("AUTHENTICATE " + data.encode('base64').replace('\n', ''))
else:
self.sendLine("AUTHENTICATE +")
def sasl_start(self, cap_list):
if 'sasl' not in cap_list:
print cap_list
return
self.request_cap('sasl')
self.sasl_result = defer.Deferred()
self.sasl_mechanisms = list(SASL_MECHANISMS)
def sasl_next(self):
mech = None
while not mech or not mech.is_valid():
if not self.sasl_mechanisms:
return False
self.sasl_auth = mech = self.sasl_mechanisms.pop(0)(self.ns_username, self.ns_password)
self.sendLine("AUTHENTICATE " + self.sasl_auth.name)
return True
def sasl_continue(self, data):
if data == '+':
data = ''
else:
data = data.decode('base64')
if len(data) == 400:
self.sasl_buffer += data
else:
response = self.sasl_auth.respond(self.sasl_buffer + data)
if response is False: # abort
self.sendLine("AUTHENTICATE *")
else:
self.sasl_send(response)
self.sasl_buffer = ""
def sasl_finish(self):
if self.sasl_result:
self.sasl_result.callback(True)
self.sasl_result = None
def sasl_failed(self, whine=True):
if self.sasl_login is False:
return
if self.sasl_next():
return
self.sasl_login = False
self.sendLine("AUTHENTICATE *")
self.sasl_finish()
if whine:
self.console("irc: failed to log in.")
def irc_904(self, prefix, params):
print params
self.sasl_failed()
def irc_905(self, prefix, params):
print params
self.sasl_failed()
def irc_906(self, prefix, params):
self.sasl_failed(False)
def irc_907(self, prefix, params):
self.sasl_failed(False)
def irc_900(self, prefix, params):
self.sasl_login = params[2]
self.console("irc: logged in as '{0}' (using {1})".format(self.sasl_login, self.sasl_auth.name))
def irc_903(self, prefix, params):
self.sasl_finish()
def alterCollidedNick(self, nickname):
return nickname + '_'
def irc_relay(self, message):
self.say(self.channel, message.encode('utf8'))
class IRCBotFactory(protocol.ClientFactory):
protocol = IRCBot
client = None
reconnect = True
def __init__(self, parent):
self.parent = parent
def clientConnectionLost(self, connector, reason):
if self.reconnect:
self.parent.console("irc: lost connection with server: %s" % reason.getErrorMessage())
self.parent.console("irc: reconnecting...")
connector.connect()
def clientConnectionFailed(self, connector, reason):
self.parent.console("irc: connection attempt failed: %s" % reason.getErrorMessage())
def buildProtocol(self, addr):
p = IRCBot(self, self.parent)
return p
def irc_relay(self, message):
if self.client:
self.client.irc_relay(message)
class IRC(Plugin):
#connection
host = Plugin.Property(required=True)
port = Plugin.Property(required=True)
server_password = Plugin.Property()
channel = Plugin.Property(required=True)
key = Plugin.Property()
certificate = Plugin.Property()
ssl = Plugin.Property(default=False)
server_fingerprint = Plugin.Property()
#user
nickname = Plugin.Property(default="RelayBot")
realname = Plugin.Property(default="mark2 IRC relay")
ident = Plugin.Property(default="RelayBot")
username = Plugin.Property(default="")
password = Plugin.Property(default="")
#general
cancel_highlight = Plugin.Property(default=False, type_=False)
cancel_highlight_str = Plugin.Property(default=u"_")
#game -> irc settings
game_columns = Plugin.Property(default=True)
game_status_enabled = Plugin.Property(default=True)
game_status_format = Plugin.Property(default=u"!, | server {what}.")
game_chat_enabled = Plugin.Property(default=True)
game_chat_format = Plugin.Property(default=u"{username}, | {message}")
game_chat_private = Plugin.Property(default=None)
game_join_enabled = Plugin.Property(default=True)
game_join_format = Plugin.Property(default=u"*, | --> {username}")
game_quit_enabled = Plugin.Property(default=True)
game_quit_format = Plugin.Property(default=u"*, | <-- {username}")
game_death_enabled = Plugin.Property(default=True)
game_death_format = Plugin.Property(default=u"*, | {text}")
game_server_message_enabled = Plugin.Property(default=True)
game_server_message_format = Plugin.Property(default=u"#server, | {message}")
#bukkit only
game_me_enabled = Plugin.Property(default=True)
game_me_format = Plugin.Property(default=u"*, | {username} {message}")
#irc -> game settings
irc_chat_enabled = Plugin.Property(default=True)
irc_chat_command = Plugin.Property(default=u"say [IRC] <{nickname}> {message}")
irc_action_command = Plugin.Property(default=u"say [IRC] * {nickname} {message}")
irc_chat_status = Plugin.Property(default=None)
irc_command_prefix = Plugin.Property(default="!")
irc_command_status = Plugin.Property(default=None)
irc_command_allow = Plugin.Property(default="")
irc_command_mark2 = Plugin.Property(default=False)
irc_players_enabled = Plugin.Property(default=True)
irc_players_format = Plugin.Property(default=u"*, | players currently in game: {players}")
def setup(self):
self.players = []
self.factory = IRCBotFactory(self)
if self.ssl:
if have_ssl:
cf = Mark2ClientContextFactory(self,
cert=self.certificate,
fingerprint=self.server_fingerprint)
reactor.connectSSL(self.host, self.port, self.factory, cf)
else:
self.parent.console("Couldn't load SSL for IRC!")
return
else:
reactor.connectTCP(self.host, self.port, self.factory)
if self.game_status_enabled:
self.register(self.handle_stopping, ServerStopping)
self.register(self.handle_starting, ServerStarting)
self.column_width = 16
if self.cancel_highlight == "insert":
self.column_width += len(self.cancel_highlight_str)
def register(event_type, format, filter_=None, *a, **k):
def handler(event, format):
d = event.match.groupdict() if hasattr(event, 'match') else event.serialize()
if filter_ and 'message' in d:
if filter_.match(d['message']):
return
if self.cancel_highlight and 'username' in d and d['username'] in self.factory.client.users:
d['username'] = self.mangle_username(d['username'])
line = self.format(format, **d)
self.factory.irc_relay(line)
self.register(lambda e: handler(e, format), event_type, *a, **k)
if self.game_chat_enabled:
if self.game_chat_private:
try:
filter_ = re.compile(self.game_chat_private)
register(PlayerChat, self.game_chat_format, filter_=filter_)
except:
self.console("plugin.irc.game_chat_private must be a valid regex")
register(PlayerChat, self.game_chat_format)
else:
register(PlayerChat, self.game_chat_format)
if self.game_join_enabled:
register(PlayerJoin, self.game_join_format)
if self.game_quit_enabled:
register(PlayerQuit, self.game_quit_format)
if self.game_death_enabled:
def handler(event):
d = event.serialize()
for k in 'username', 'killer':
if k in d and d[k] and d[k] in self.factory.client.users:
d[k] = self.mangle_username(d[k])
text = event.get_text(**d)
line = self.format(self.game_death_format, text=text)
self.factory.irc_relay(line)
self.register(handler, PlayerDeath)
if self.game_server_message_enabled and not (self.irc_chat_enabled and self.irc_chat_command.startswith('say ')):
register(ServerOutput, self.game_server_message_format, pattern=r'\[(?:Server|SERVER)\] (?P<message>.+)')
if self.game_me_enabled:
register(ServerOutput, self.game_me_format, pattern=r'\* (?P<username>[A-Za-z0-9_]{1,16}) (?P<message>.+)')
if self.irc_chat_enabled:
self.register(self.handle_players, StatPlayers)
def teardown(self):
self.factory.reconnect = False
if self.factory.client:
self.factory.client.quit("Plugin unloading.")
def mangle_username(self, username):
if not self.cancel_highlight:
return username
elif self.cancel_highlight == "insert":
return username[:-1] + self.cancel_highlight_str + username[-1:]
else:
return self.cancel_highlight_str + username[1:]
def format(self, format, **data):
if self.game_columns:
f = unicode(format).split(',', 1)
f[0] = f[0].format(**data)
if len(f) == 2:
f[0] = f[0].rjust(self.column_width)
f[1] = f[1].format(**data)
return ''.join(f)
else:
return format.format(**data)
def handle_starting(self, event):
self.factory.irc_relay(self.format(self.game_status_format, what="starting"))
def handle_stopping(self, event):
self.factory.irc_relay(self.format(self.game_status_format, what="stopping"))
def handle_players(self, event):
self.players = sorted(event.players)
def irc_message(self, user, message):
if self.irc_chat_enabled:
self.send_format(self.irc_chat_command, nickname=user, message=message)
def irc_action(self, user, message):
if self.irc_chat_enabled:
self.console("{} {}".format(user, message))
self.send_format(self.irc_action_command, nickname=user, message=message)
|
|
# Copyright 2011 James McCauley
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
This is the main OpenFlow module.
Along with libopenflow, this is the major part of the OpenFlow API in POX.
There are a number of Events, which are generally raised on core.openflow
as well as on individual switch Connections. Many of these events have at
least some of the following properties:
.connection - a reference to the switch connection that caused the event
.dpid - the DPID of the switch that caused the event
.ofp - the OpenFlow message that caused the event (from libopenflow)
One of the more complicated aspects of OpenFlow is dealing with stats
replies, which may come in multiple parts (it shouldn't be that that
difficult, really, but that hasn't stopped it from beind handled wrong
wrong more than once). In POX, the raw events are available, but you will
generally just want to listen to the aggregate stats events which take
care of this for you and are only fired when all data is available.
NOTE: This module is usually automatically loaded by pox.py
"""
from pox.lib.revent import *
from pox.lib.util import dpidToStr
import libopenflow_01 as of
from pox.lib.packet.ethernet import ethernet
class ConnectionHandshakeComplete (Event):
"""
Event when a switch handshake completes
Fired immediately before ConnectionUp
"""
def __init__ (self, connection):
self.connection = connection
self.dpid = connection.dpid
class ConnectionUp (Event):
"""
Raised when a connection to a switch has been established.
"""
def __init__ (self, connection, ofp):
self.connection = connection
self.dpid = connection.dpid
self.ofp = ofp
class FeaturesReceived (Event):
"""
Raised upon receipt of an ofp_switch_features message
This generally happens as part of a connection automatically.
"""
def __init__ (self, connection, ofp):
self.connection = connection
self.dpid = connection.dpid
self.ofp = ofp
class ConnectionDown (Event):
"""
Raised when a connection to switch has been lost.
"""
def __init__ (self, connection):
self.connection = connection
self.dpid = connection.dpid
class PortStatus (Event):
"""
Fired in response to port status changes.
added (bool) - True if fired because a port was added
deleted (bool) - True if fired because a port was deleted
modified (bool) - True if fired because a port was modified
port (int) - number of port in question
"""
def __init__ (self, connection, ofp):
self.connection = connection
self.dpid = connection.dpid
self.ofp = ofp
self.modified = ofp.reason == of.OFPPR_MODIFY
self.added = ofp.reason == of.OFPPR_ADD
self.deleted = ofp.reason == of.OFPPR_DELETE
self.port = ofp.desc.port_no
class PortStats (Event):
def __init__ (self,connection,ofp):
Event.__init__(self)
self.connection = connection
self.dpid = connection.dpid
self.ofp = ofp
class FlowRemoved (Event):
"""
Raised when a flow entry has been removed from a flow table.
This may either be because of a timeout or because it was removed
explicitly.
Properties:
idleTimeout (bool) - True if expired because of idleness
hardTimeout (bool) - True if expired because of hard timeout
timeout (bool) - True if either of the above is true
deleted (bool) - True if deleted explicitly
"""
def __init__ (self, connection, ofp):
self.connection = connection
self.dpid = connection.dpid
self.ofp = ofp
self.idleTimeout = False
self.hardTimeout = False
self.deleted = False
self.timeout = False
if ofp.reason == of.OFPRR_IDLE_TIMEOUT:
self.timeout = True
self.idleTimeout = True
elif ofp.reason == of.OFPRR_HARD_TIMEOUT:
self.timeout = True
self.hardTimeout = True
elif ofp.reason == of.OFPRR_DELETE:
self.deleted = True
class RawStatsReply (Event):
def __init__ (self, connection, ofp):
self.connection = connection
self.ofp = ofp # Raw ofp message(s)
@property
def dpid (self):
return self.connection.dpid
class StatsReply (Event):
"""
Abstract superclass for all stats replies
"""
def __init__ (self, connection, ofp, stats):
self.connection = connection
self.ofp = ofp # Raw ofp message(s)
self.stats = stats # Processed
@property
def dpid (self):
return self.connection.dpid
class SwitchDescReceived (StatsReply):
pass
class FlowStatsReceived (StatsReply):
pass
class AggregateFlowStatsReceived (StatsReply):
pass
class TableStatsReceived (StatsReply):
pass
class PortStatsReceived (StatsReply):
pass
class QueueStatsReceived (StatsReply):
pass
class PacketIn (Event):
"""
Fired in response to PacketIn events
port (int) - number of port the packet came in on
data (bytes) - raw packet data
parsed (packet subclasses) - pox.lib.packet's parsed version
"""
def __init__ (self, connection, ofp):
self.connection = connection
self.ofp = ofp
self.port = ofp.in_port
self.data = ofp.data
self._parsed = None
self.dpid = connection.dpid
def parse (self):
if self._parsed is None:
self._parsed = ethernet(self.data)
return self._parsed
@property
def parsed (self):
"""
The packet as parsed by pox.lib.packet
"""
return self.parse()
class ErrorIn (Event):
def __init__ (self, connection, ofp):
self.connection = connection
self.ofp = ofp
self.xid = ofp.xid
self.dpid = connection.dpid
self.should_log = True # If this remains True, an error will be logged
def asString (self):
return self.ofp.show()
# def lookup (m, v):
# if v in m:
# return str(m[v])
# else:
# return "Unknown/" + str(v)
#
# #TODO: The show() in ofp_error actually does some clever
# # stuff now to stringize error messages. Refactor that and the
# # (less clever) code below.
# s = 'Type: ' + lookup(of.ofp_error_type_map, self.ofp.type)
# s += ' Code: '
#
# responses = {
# of.OFPET_HELLO_FAILED : of.ofp_hello_failed_code,
# of.OFPET_BAD_REQUEST : of.ofp_bad_request_code,
# of.OFPET_BAD_ACTION : of.ofp_bad_action_code,
# of.OFPET_FLOW_MOD_FAILED : of.ofp_flow_mod_failed_code,
# of.OFPET_PORT_MOD_FAILED : of.ofp_port_mod_failed_code,
# of.OFPET_QUEUE_OP_FAILED : of.ofp_queue_op_failed_code,
# }
#
# if self.ofp.type in responses:
# s += lookup(responses[self.ofp.type],self.ofp.code)
# else:
# s += "Unknown/" + str(self.ofp.code)
# if self.ofp.type == of.OFPET_HELLO_FAILED:
# s += lookup(of.ofp_hello_failed_code, self.ofp.type)
#
# return s
class BarrierIn (Event):
"""
Fired in response to a barrier reply
xid (int) - XID of barrier request
"""
def __init__ (self, connection, ofp):
self.connection = connection
self.ofp = ofp
self.dpid = connection.dpid
self.xid = ofp.xid
class ConnectionIn (Event):
def __init__ (self, connection):
super(ConnectionIn,self).__init__()
self.connection = connection
self.dpid = connection.dpid
self.nexus = None
class ConfigurationReceived (Event):
"""
Fired in response to OFPT_GET_CONFIG_REPLY
"""
def __init__ (self, connection, ofp):
self.connection = connection
self.ofp = ofp
self.dpid = connection.dpid
self.xid = ofp.xid
@property
def flags (self):
return self.ofp.flags
@property
def miss_send_len (self):
return self.ofp.miss_send_len
@property
def drop_fragments (self):
return (self.ofp.flags & of.OFPC_FRAG_MASK) == of.OFPC_FRAG_DROP
@property
def reassemble_fragments (self):
return (self.ofp.flags & of.OFPC_FRAG_MASK) == of.OFPC_FRAG_REASM
class OpenFlowConnectionArbiter (EventMixin):
"""
Determines which OpenFlowNexus gets the switch.
Default implementation always just gives it to core.openflow
"""
_eventMixin_events = set([
ConnectionIn,
])
def __init__ (self, default = False):
""" default as False causes it to always use core.openflow """
self._default = default
self._fallback = None
def getNexus (self, connection):
e = ConnectionIn(connection)
self.raiseEventNoErrors(e)
if e.nexus is None:
e.nexus = self._default
if e.nexus is False:
if self._fallback is None:
try:
from pox.core import core
self._fallback = core.openflow
except:
raise RuntimeError("No OpenFlow nexus for new connection")
e.nexus = self._fallback
return e.nexus
class ConnectionDict (dict):
def __iter__ (self):
return self.itervalues()
def __contains__ (self, item):
v = dict.__contains__(self, item)
if v: return v
return item in self.values()
@property
def dpids (self):
return self.keys()
def iter_dpids (self):
return self.iterkeys()
class OpenFlowNexus (EventMixin):
"""
Main point of OpenFlow interaction.
There is usually just one instance of this class, registered as
core.openflow. Most OpenFlow events fire here in addition to on their
specific connections.
"""
_eventMixin_events = set([
ConnectionHandshakeComplete,
ConnectionUp,
ConnectionDown,
FeaturesReceived,
PortStatus,
PacketIn,
BarrierIn,
ErrorIn,
RawStatsReply,
SwitchDescReceived,
FlowStatsReceived,
AggregateFlowStatsReceived,
TableStatsReceived,
PortStatsReceived,
QueueStatsReceived,
FlowRemoved,
ConfigurationReceived,
PortStats,
])
# Bytes to send to controller when a packet misses all flows
miss_send_len = of.OFP_DEFAULT_MISS_SEND_LEN
# Enable/Disable clearing of flows on switch connect
clear_flows_on_connect = True
def __init__ (self):
self._connections = ConnectionDict() # DPID -> Connection
from pox.core import core
self.listenTo(core)
@property
def connections (self):
return self._connections
def getConnection (self, dpid):
"""
Get the Connection object associated with a DPID.
"""
return self._connections.get(dpid, None)
def sendToDPID (self, dpid, data):
"""
Send data to a specific DPID.
"""
if dpid in self._connections:
self._connections[dpid].send(data)
return True
else:
import logging
log = logging.getLogger("openflow")
log.warn("Couldn't send to %s because we're not connected to it!" %
(dpidToStr(dpid),))
return False
def _handle_DownEvent (self, event):
for c in self._connections.values():
try:
c.disconnect()
except:
pass
def _connect (self, con):
self._connections[con.dpid] = con
def _disconnect (self, dpid):
if dpid in self._connections:
del self._connections[dpid]
return True
return False
def _launch (default_arbiter=True):
from pox.core import core
if default_arbiter:
core.registerNew(OpenFlowConnectionArbiter)
core.register("openflow", OpenFlowNexus())
def launch (default_arbiter=True):
from pox.core import core
if core.hasComponent("openflow"):
return
return _launch(default_arbiter)
|
|
# -*- coding: utf-8 -*-
import mechanize
import cookielib
import error
import models
import urllib2
import codecs
import datetime
import re
from lxml import etree
class MCMApi(object):
base = 'https://www.magiccardmarket.eu/'
def __init__(self, username, password):
self.username = username
self.password = password
self.br = mechanize.Browser()
self.cj = cookielib.LWPCookieJar()
self.br.set_cookiejar(self.cj)
self.br.set_handle_equiv(True)
#self.br.set_handle_gzip(True)
self.br.set_handle_redirect(True)
self.br.set_handle_referer(True)
self.br.set_handle_robots(False)
# follows refresh 0 but not hangs on refresh > 0
self.br.set_handle_refresh(mechanize._http.HTTPRefreshProcessor(), max_time=1)
self.br.addheaders = [('User-agent', 'Mozilla/5.0 (X11; U; Linux i686; es-VE; rv:1.9.0.1)Gecko/2008071615 Debian/6.0 Firefox/9')]
# debug
#self.br.set_debug_http(True)
#self.br.set_debug_redirects(True)
#self.br.set_debug_responses(True)
self.br.open(self.base)
def login(self):
self.br.select_form(predicate=lambda f: 'class' in f.attrs and f.attrs['class'] == 'loginForm')
self.br['username'] = self.username
self.br['userPassword'] = self.password
self.br.submit()
r = self.br.response().read()
m = re.search(r"The password you entered was not correct", r)
if m:
raise error.LoginError()
def get_wants_list(self):
link = self.br.find_link(url="/?mainPage=showWants")
self.br.follow_link(link)
# read wants lists
wants_lists = []
utf8_parser = etree.HTMLParser(encoding='utf-8')
tree = etree.fromstring(self.br.response().read().decode('utf-8'), parser=utf8_parser)
for wlnode in tree.xpath('//select[@name="sw_WantsListID"]/option'):
name = re.search(r"([\w\s]+) ", wlnode.text)
wl = models.WantList(int(wlnode.attrib['value']), name.group(1))
# read wants card
self.br.open(self.base + wl.url())
tree = etree.fromstring(self.br.response().read().decode('utf-8'), parser=utf8_parser)
for wantnode in tree.xpath('//table[contains(@class, "wantsTable")]/tbody/tr'):
node = wantnode.xpath('td[3]/a')[0]
card = models.Card(node.attrib['href'], node.text)
node = wantnode.xpath('td[11]')[0]
want = models.Want(card, int(node.text))
wl.wants.append(want)
wants_lists.append(wl)
return wants_lists
def get_cart(self):
utf8_parser = etree.HTMLParser(encoding='utf-8')
tree = etree.fromstring(self.br.response().read().decode('utf-8'), parser=utf8_parser)
node = tree.xpath('//*[@id="sc_menuhub"]')
if node:
m = re.search("\((\d+) articles", node[0].text)
na = 0
if m:
na = int(m.group(1))
if na == 0:
return models.Cart()
link = self.br.find_link(url="/?mainPage=showShoppingCart")
self.br.follow_link(link)
tree = etree.fromstring(self.br.response().read().decode('utf-8'), parser=utf8_parser)
# create cart
c = models.Cart(tree.xpath('//*[@id="sc_hashCode"]/@value')[0])
# ships
for shipnode in tree.xpath('//div[@class="sc_ShipTable"]'):
# id
shipid = int(shipnode.xpath('div/@id')[0].split('_')[-1])
# hash
bhash = shipnode.xpath('.//button/@onclick')[0]
m = re.search("jcp\('([^']+)'", bhash)
if m:
bhash = m.group(1)
# sumary
sumarynode = shipnode.xpath('.//table[@class="nestedContent"]')[2]
# read seller
node = sumarynode.xpath('.//a[contains(@href, "showSellerChart")]')[1]
seller = models.Seller(id=node.attrib['href'], name=node.text)
# ship
s = models.Ship(shipid, bhash, c, seller)
# shipping
node = sumarynode.xpath('tr[6]/td[2]/text()')[0]
m = re.search("([\d,]+) ", node)
if m:
s.shipping = float(m.group(1).replace(',', '.'))
# shipping method
node = None
for tr in sumarynode.xpath('tr'):
td = tr.xpath('td/text()')[0]
if td.find('Shipping Method') != -1:
node = tr
if node is not None:
m = re.search("\(([\w\s]+)\)", etree.tostring(node))
if m:
s.shipping_method = m.group(1)
# items
for item in shipnode.xpath('.//form[contains(@name, "itemViewForm")]/table/tbody/tr'):
idcard = item.xpath('td[2]/a/@href')[0]
namecard = item.xpath('td[2]/a/text()')[0]
pricecard = item.xpath('td[9]/text()')[0]
m = re.search("([\d,]+) ", pricecard)
if m:
pricecard = float(m.group(1).replace(',', '.'))
langcard = item.xpath('td[5]/a/span/@onmouseover')[0]
m = re.search("\('([\w\s]+)'\)", langcard)
if m:
langcard = m.group(1)
expansion = item.xpath('td[3]/span/@onmouseover')[0]
m = re.search("\('([\w\s]+)'\)", expansion)
if m:
expansion = m.group(1)
condition = item.xpath('td[6]/a/img/@onmouseover')[0]
m = re.search("\('([\w\s]+)'\)", condition)
if m:
condition = m.group(1)
quantity = int(item.xpath('td[2]/text()')[0][0:-2])
card = models.Card(idcard, namecard)
cardarticle = models.CardArticle(card, pricecard, langcard, expansion, condition, quantity)
s.articles.append(cardarticle)
c.ships.append(s)
return c
def remove_ship_from_cart(self, ship):
if not ship.id:
return
url = "{0}iajax.php".format(self.base)
referer = "{0}?mainPage=showShoppingCart".format(self.base)
hashremove = ship.hash + urllib2.quote("{0},{1}".format(ship.id, ship.cart.hash), safe='~@#$&()*!+=:;,.?/\'')
self._create_ajax_request(url, referer, "args=" + hashremove)
def search(self, query, lang='en'):
pagenow = 0
npages = None
utf8_parser = etree.HTMLParser(encoding='utf-8')
while pagenow <= npages or npages is None:
print "PAGE: {0}/{1}".format(pagenow, npages)
self.br.open("{0}?mainPage=showSearchResult&searchFor={1}&resultsPage={2}".format(self.base, query, pagenow))
tree = etree.fromstring(self.br.response().read().decode('utf-8'), parser=utf8_parser)
# number of pages
if npages is None:
href = tree.xpath('//*[@id="siteContents"]/div/div[1]/span[3]/a[2]/@href')
npages = 1
if len(href):
m = re.search('resultsPage=(\d+)', href[0])
npages = int(m.group(1)) + 1
# serach table
tree = tree.xpath("//table[contains(@class, 'SearchTable')]/tbody")
if len(tree) == 0:
return
tree = tree[0]
# rows
rows = tree.xpath("tr[contains(@class, 'row_')]")
for row in rows:
result = {'img': '', 'expansion': '', 'rarity': '', 'name': '', 'id': '', 'category': '', 'available': '', 'from': 0}
data = row.xpath("td[1]//img/@onmouseover")
if data:
m = re.search("'(.+?)'", data[0])
result['img'] = m.group(1)
data = row.xpath("td[2]/span/@onmouseover")
if data:
m = re.search("'(.+?)'", data[0])
result['expansion'] = m.group(1)
data = row.xpath("td[3]/img/@onmouseover")
if data:
m = re.search("'(.+?)'", data[0])
result['rarity'] = m.group(1)
data = row.xpath("td[5]/a")
if data:
result['id'] = data[0].attrib['href']
result['name'] = data[0].text
data = row.xpath("td[6]")
if data:
result['category'] = data[0].text
data = row.xpath("td[7]")
if data:
result['available'] = int(data[0].text)
data = row.xpath("td[8]")
if data:
if data[0].text == u"N/A":
result['price_from'] = 0
else:
m = re.search("(\d+,\d+) ", data[0].text)
result['price_from'] = float(m.group(1).replace(',', '.'))
c = models.Card(result['id'], name=result['name'], img=result['img'])
yield models.SearchResult(c, result['expansion'], result['rarity'], result['category'], result['available'], result['price_from'])
# next page
pagenow += 1
def list_prices(self, card, filters={}):
self.br.open(card.url())
utf8_parser = etree.HTMLParser(encoding='utf-8')
tree = etree.fromstring(self.br.response().read().decode('utf-8'), parser=utf8_parser)
tree = tree.xpath('//table[contains(@class, "specimenTable")]')[0]
results = []
for cardnode in tree.xpath('tbody/tr'):
node = cardnode.xpath('td[2]/span/span[1]/a')[0]
sellerid = node.attrib['href']
sellertext = node.text
node = cardnode.xpath('td[2]/span/span[2]/span/@onmouseover')[0]
m = re.search('location: ([\w\s]+)', node)
sellerlang = m.group(1)
node = cardnode.xpath('td[2]/span/span[3]/img/@onmouseover')[0]
m = re.search("'([\w\s]+)'", node)
sellerclass = m.group(1) if m else 'warning'
s = models.Seller(sellerid, sellertext, country=sellerlang, cls=sellerclass)
node = cardnode.xpath('td[3]/span/@onmouseover')[0]
m = re.search("'([\w\s]+)'", node)
expansion = m.group(1)
node = cardnode.xpath('td[5]/a/span/@onmouseover')[0]
m = re.search("'([\w\s-]+)'", node)
lang = m.group(1)
node = cardnode.xpath('td[6]/a/img/@onmouseover')[0]
m = re.search("'([\w\s]+)'", node)
condition = m.group(1)
node = cardnode.xpath('td[9]/text()')[0]
m = re.search("([\d,]+) ", node)
price = float(m.group(1).replace(',', '.'))
node = cardnode.xpath('td[10]/text()')[0]
quantity = int(node)
node = cardnode.xpath('td[11]//input[@type="image"]/@value')[0]
idprice = int(node)
results.append(models.PriceCard(idprice, card, s, expansion, lang, condition, price, quantity))
return results
def add_to_cart(self, pricecard, amount=1):
if pricecard.available < 1:
return False
self.br.open(pricecard.card.url())
self.br.select_form(predicate=lambda f: 'name' in f.attrs and f.attrs['name'].find('itemViewForm') != -1)
form = None
for f in self.br.forms():
if 'name' in f.attrs and f.attrs['name'].find('itemViewForm') != -1:
form = f
break
if not form:
return False
if amount > pricecard.available:
amount = pricecard.available
self.br.submit('putSingleArticleInCart{0}'.format(pricecard.id))
def get_my_articles(self):
link = self.br.find_link(url_regex="browseUserProducts")
self.br.follow_link(link)
utf8_parser = etree.HTMLParser(encoding='utf-8')
tree = etree.fromstring(self.br.response().read().decode('utf-8'), parser=utf8_parser)
raise NotImplementedError()
def _create_ajax_request(self, url, referer, data):
req = mechanize.Request(url, data=data)
req.add_header("User-Agent", "Mozilla/5.0 (X11; U; Linux i686; es-VE; rv:1.9.0.1)Gecko/2008071615 Debian/6.0 Firefox/9")
req.add_header("Referer", referer)
self.cj.add_cookie_header(req)
return mechanize.urlopen(req)
def _page(self):
text = self.br.response().read().decode('utf-8')
t = datetime.datetime.now()
with codecs.open("last{0}{1}{2}_{3}{4}{5}.html".format(t.year, t.month, t.day, t.hour, t.minute, t.second), "w", "utf-8") as f:
f.write(text)
def _node_text(self, node):
from lxml.etree import tostring
return tostring(node)
if __name__ == '__main__':
from pprint import pprint
mcm = MCMApi(username='foo', password='bar')
mcm.login()
# search
mcm.get_my_articles()
|
|
from django.db import models
from formatChecker import ContentTypeRestrictedFileField
class taxonomy(models.Model):
kingdom = models.CharField(max_length=255)
kingdomTaxonomy = models.IntegerField()
group = models.CharField(max_length=255) #fungi has no group info
# virus has family, bacteria has genus
family = models.CharField(max_length=255)
familyTaxonomy = models.IntegerField()
genus = models.CharField(max_length=255)
genusTaxonomy = models.IntegerField()
species = models.CharField(max_length=255)
speciesTaxonomy = models.IntegerField()
strain = models.CharField(max_length=255) #used in preview
pubmedId = models.CharField(max_length=255) #used to generated the gea tree and heatmap article tree
def __unicode__(self): # Python 3: def __str__(self):
return str(self.species);
#used to precalculate the common gene list
class heatmapModel(models.Model):
a = models.CharField(max_length=255)
b = models.CharField(max_length=255)
level = models.CharField(max_length=255)
commonGeneNumber = models.IntegerField()
commonGeneList = models.TextField()
def __unicode__(self): # Python 3: def __str__(self):
return str(self.a)+":"+str(self.b);
#not necessary, but can be used to simplify the id field of tree
class idNameMap(models.Model):
acc = models.CharField(max_length=255) # taxonomy, pubmed, gram, baltimore id
type = models.CharField(max_length=255) # type, kingdom, species, gram, baltimore
name = models.CharField(max_length=255) # corresponding name
#used to store the network analysis user provided data
class networkModel(models.Model):
geneList = models.TextField(blank=True)
file = ContentTypeRestrictedFileField(upload_to='networkFiles/', content_types=['text/plain',''],max_upload_size=5242880,blank=True, null=True)
class vtpModel(models.Model):
geneSymbol = models.CharField(max_length=255)
proteinName = models.CharField(max_length=255)
uniprotId = models.CharField(max_length=255)
resources = models.CharField(max_length=255)
virusTaxid = models.CharField(max_length=255)
virusName = models.CharField(max_length=255)
note = models.CharField(max_length=255)
def __unicode__(self): # Python 3: def __str__(self):
return str(self.geneSymbol+' '+self.virusName);
class gwasOriginal(models.Model):
acc = models.CharField(max_length=255)
dataAdded = models.TimeField()
pubmedId = models.CharField(max_length=255)
firstAuthor = models.CharField(max_length=255)
date = models.TimeField()
journal = models.CharField(max_length=255)
link = models.CharField(max_length=255)
study = models.CharField(max_length=255)
disease = models.CharField(max_length=255)
initialSampleSize = models.CharField(max_length=255)
replicationSampleSize = models.CharField(max_length=255)
cytogeneticLoc = models.CharField(max_length=255)
chrId = models.CharField(max_length=255)
charPos = models.CharField(max_length=255)
reportedGene = models.CharField(max_length=255)
mappedGene = models.CharField(max_length=255)
upstreamGeneId = models.CharField(max_length=255)
downstreamGeneId = models.CharField(max_length=255)
snpGeneId = models.CharField(max_length=255)
upstreamGeneDistance = models.FloatField()
downstreamGeneDistance = models.FloatField()
strongSnpAllele = models.CharField(max_length=255)
snps = models.CharField(max_length=255)
merged = models.IntegerField()
snpIdCurrent = models.IntegerField()
context = models.CharField(max_length=255)
interGenetic = models.IntegerField()
riskAlleleFreq = models.FloatField()
pvalue = models.FloatField()
pvalueMLog = models.FloatField()
pvalueText = models.CharField(max_length=255)
orOrBeta = models.CharField(max_length=255)
ci = models.CharField(max_length=255)
platform = models.CharField(max_length=255)
cnv = models.CharField(max_length=255)
def __unicode__(self): # Python 3: def __str__(self):
return str(self.disease+' '+self.mappedGene);
#add species info in gwas
class gwas(models.Model):
acc = models.IntegerField(primary_key=True)
species = models.CharField(max_length=255)
speciesTaxonomy = models.CharField(max_length=255)
dataAdded = models.TimeField()
pubmedId = models.CharField(max_length=255)
firstAuthor = models.CharField(max_length=255)
date = models.TimeField()
journal = models.CharField(max_length=255)
link = models.CharField(max_length=255)
study = models.CharField(max_length=255)
disease = models.CharField(max_length=255)
initialSampleSize = models.CharField(max_length=255)
replicationSampleSize = models.CharField(max_length=255)
cytogeneticLoc = models.CharField(max_length=255)
chrId = models.CharField(max_length=255)
charPos = models.CharField(max_length=255)
reportedGene = models.CharField(max_length=255)
mappedGene = models.CharField(max_length=255)
upstreamGeneId = models.CharField(max_length=255)
downstreamGeneId = models.CharField(max_length=255)
snpGeneId = models.CharField(max_length=255)
upstreamGeneDistance = models.FloatField()
downstreamGeneDistance = models.FloatField()
strongSnpAllele = models.CharField(max_length=255)
snps = models.CharField(max_length=255)
merged = models.IntegerField()
snpIdCurrent = models.IntegerField()
context = models.CharField(max_length=255)
interGenetic = models.IntegerField()
riskAlleleFreq = models.FloatField()
pvalue = models.FloatField()
pvalueMLog = models.FloatField()
pvalueText = models.CharField(max_length=255)
orOrBeta = models.CharField(max_length=255)
ci = models.CharField(max_length=255)
platform = models.CharField(max_length=255)
cnv = models.CharField(max_length=255)
def __unicode__(self): # Python 3: def __str__(self):
return str(self.disease+' '+self.mappedGene);
#model for drug
class drugModel(models.Model):
acc = models.CharField(max_length=255)
geneSymbol = models.CharField(max_length=255)
hgncId = models.CharField(max_length=255)
uniprotId = models.CharField(max_length=255)
proteinName = models.CharField(max_length=255)
drugbankId = models.CharField(max_length=255)
drugName = models.CharField(max_length=255)
drugType = models.CharField(max_length=255)
def __unicode__(self): # Python 3: def __str__(self):
return str(self.geneSymbol+' '+self.drugbankId);
#model for drug does not contain pathogen species info, we generate this model with mysql view
#select `mainview`.`species` AS `species`,`mainview`.`speciesTaxonomy` AS `speciesTaxonomy`,`mainview`.`strain` AS `strain`,`analysis_drugmodel`.`geneSymbol` AS `geneSymbol`,`analysis_drugmodel`.`hgncId` AS `hgncId`,`analysis_drugmodel`.`uniprotId` AS `uniprotId`,`analysis_drugmodel`.`proteinName` AS `proteinName`,`analysis_drugmodel`.`drugbankId` AS `drugbankId`,`analysis_drugmodel`.`drugName` AS `drugName`,`analysis_drugmodel`.`drugType` AS `drugType` from (`mainview` join `analysis_drugmodel`) where (`mainview`.`humanHomolog` = `analysis_drugmodel`.`geneSymbol`) order by `mainview`.`species`
class drugModelWithInt(models.Model):
acc = models.IntegerField(primary_key=True)
species = models.CharField(max_length=255)
speciesTaxonomy = models.CharField(max_length=255)
strain = models.CharField(max_length=255)
geneSymbol = models.CharField(max_length=255)
hgncId = models.CharField(max_length=255)
uniprotId = models.CharField(max_length=255)
proteinName = models.CharField(max_length=255)
drugbankId = models.CharField(max_length=255)
drugName = models.CharField(max_length=255)
drugType = models.CharField(max_length=255)
drugGroup = models.CharField(max_length=255)
def __unicode__(self): # Python 3: def __str__(self):
return str(self.species+' '+self.geneSymbol+' '+self.drugbankId+' '+self.drugGroup);
#ppi from HPRD database
class ppi(models.Model):
geneSymbol1 = models.CharField(max_length=50)
hprdId1 = models.CharField(max_length=10)
refseqId1 = models.CharField(max_length=50)
geneSymbol2 = models.CharField(max_length=50)
hprdId2 = models.CharField(max_length=10)
refseqId2 = models.CharField(max_length=50)
expType = models.CharField(max_length=50)
pubmedId = models.CharField(max_length=255)
def __unicode__(self): # Python 3: def __str__(self):
return str(self.geneSymbol1+' '+self.geneSymbol2+' '+self.expType+' '+self.pubmedId);
#statistics page, the table is filled in the view, see detailed there
class overlapStatistics(models.Model):
geneSymbol = models.CharField(max_length=50)
speciesNumber = models.IntegerField()
speciesList = models.TextField()
def __unicode__(self): # Python 3: def __str__(self):
return str(self.geneSymbol+' '+str(self.speciesNumber)+' '+self.speciesList);
#for distribution page, the table is filled in view
class overlapDistribution(models.Model):
pathogenNumber = models.IntegerField()
geneNumber = models.IntegerField()
geneList = models.TextField()
type = models.CharField(max_length=20) #primary or confirmed(no primary)
'''
Author: Yang Liu
Data: 20140925
Function: the data model from DAVID.
We request pairwise text file from DAVID which contain gene symbols to related GO (BP,CC,MF) and pathway (BBID, KEGG, PANTHER, REACTOME) information, as well as the related DAVID gene name information
'''
#gene symbol to gene name annotated by DAVID
# not accurate, use out own annotations. (e.g. IL6RL1 is previous name in DAVID)
class geneSymbolToDavidGeneName(models.Model):
geneSymbol = models.CharField(max_length=255)
davidGeneName = models.CharField(max_length=255)
def __unicode__(self): # Python 3: def __str__(self):
return str(self.geneSymbol+':'+self.davidGeneName);
#gene symbol to GO BP
class geneSymbolToGOBP(models.Model):
geneSymbol = models.CharField(max_length=255)
gobp = models.CharField(max_length=255)
gobpAnnotation = models.CharField(max_length=255)
def __unicode__(self): # Python 3: def __str__(self):
return str(self.geneSymbol+':'+self.gobp+':'+self.gobpAnnotation);
#gene symbol to GO CC
class geneSymbolToGOCC(models.Model):
geneSymbol = models.CharField(max_length=255)
gocc = models.CharField(max_length=255)
goccAnnotation = models.CharField(max_length=255)
def __unicode__(self): # Python 3: def __str__(self):
return str(self.geneSymbol+':'+self.gocc+':'+self.goccAnnotation);
#gene symbol to GO MF
class geneSymbolToGOMF(models.Model):
geneSymbol = models.CharField(max_length=255)
gomf = models.CharField(max_length=255)
gomfAnnotation = models.CharField(max_length=255)
def __unicode__(self): # Python 3: def __str__(self):
return str(self.geneSymbol+':'+self.gomf+':'+self.gomfAnnotation);
#gene symbol to BBID Pathway, we can link to BBID website simply by the id provided
class geneSymbolToPathwayBBID(models.Model):
geneSymbol = models.CharField(max_length=255)
BBID = models.CharField(max_length=255)
def __unicode__(self): # Python 3: def __str__(self):
return str(self.geneSymbol+':'+self.BBID);
#gene symbol to KEGG pathway, the kegg pathway id and description are separated, we can link to kegg website
class geneSymbolToPathwayKEGG(models.Model):
geneSymbol = models.CharField(max_length=255)
KEGG = models.CharField(max_length=255)
KEGGAnnotation = models.CharField(max_length=255)
def __unicode__(self): # Python 3: def __str__(self):
return str(self.geneSymbol+':'+self.KEGG+':'+self.KEGGAnnotation);
#gene symbol to PANTHER pathway
class geneSymbolToPathwayPANTHER(models.Model):
geneSymbol = models.CharField(max_length=255)
PANTHER = models.CharField(max_length=255)
PANTHERAnnotation = models.CharField(max_length=255)
def __unicode__(self): # Python 3: def __str__(self):
return str(self.geneSymbol+':'+self.PANTHER+':'+self.PANTHERAnnotation);
#genesymbol to REACTOME pathway
class geneSymbolToPathwayREACTOME(models.Model):
geneSymbol = models.CharField(max_length=255)
REACTOME = models.CharField(max_length=255)
REACTOMEAnnotation = models.CharField(max_length=255)
def __unicode__(self): # Python 3: def __str__(self):
return str(self.geneSymbol+':'+self.REACTOME+':'+self.REACTOMEAnnotation);
|
|
"""
Object oriented (read-only) access to CLDF data
To read ORM objects from a `pycldf.Dataset`, use two methods
* `pycldf.Dataset.objects`
* `pycldf.Dataset.get_object`
Both will return default implementations of the objects, i.e. instances of the corresponding
class defined in this module. To customize these objects,
1. subclass the default and specify \
the appropriate component (i.e. the table of the CLDF dataset which holds rows to be transformed\
to this type):
.. code-block:: python
from pycldf.orm import Language
class Variety(Language):
__component__ = 'LanguageTable'
def custom_method(self):
pass
2. pass the class into the `objects` or `get_object` method.
Limitations:
------------
* We only support foreign key constraints for CLDF reference properties targeting either a \
component's CLDF id or its primary key. This is because CSVW does not support unique constraints \
other than the one implied by the primary key declaration.
* This functionality comes with the typical "more convenient API vs. less performance and bigger \
memory footprint" trade-off. If you are running into problems with this, you might want to load \
your data into a SQLite db using the `pycldf.db` module, and access via SQL. \
Some numbers (to be interpreted relative to each other): \
Reading ~400,000 rows from a ValueTable of a StructureDataset takes
* ~2secs with csvcut, i.e. only making sure it's valid CSV
* ~15secs iterating over pycldf.Dataset['ValueTable']
* ~35secs iterating over pycldf.Dataset.objects('ValueTable')
"""
import argparse
import collections
import typing
import csvw.metadata
from tabulate import tabulate
from clldutils.misc import lazyproperty
from pycldf.terms import TERMS, term_uri
from pycldf.util import DictTuple
from pycldf.sources import Reference
class Object:
"""
Represents a row of a CLDF component table.
Subclasses of `Object` are instantiated when calling `Dataset.objects` or `Dataset.get_object`.
:ivar dataset: Reference to the `Dataset` instance, this object was loaded from.
:ivar data: An `OrderedDict` with a copy of the row the object was instantiated with.
:ivar cldf: A `dict` with CLDF-specified properties of the row, keyed with CLDF terms.
:ivar id: The value of the CLDF id property of the row.
:ivar name: The value of the CLDF name property of the row.
:ivar description: The value of the CLDF description property of the row.
:ivar pk: The value of the column specified as primary key for the table. (May differ from id)
"""
# If a subclass name can not be used to derive the CLDF component name, the component can be
# specified here:
__component__ = None
def __init__(self, dataset, row: dict):
# Get a mapping of column names to pairs (CLDF property name, list-valued) for columns
# present in the component specified by class name.
cldf_cols = {
v[0]: (k, v[1])
for k, v in vars(getattr(dataset.readonly_column_names, self.component)).items()
if v}
self._listvalued = set(v[0] for v in cldf_cols.values() if v[1])
self.cldf = {}
self.data = collections.OrderedDict()
for k, v in row.items():
# We go through the items of the row and slot them into the appropriate bags:
self.data[k] = v
if k in cldf_cols:
self.cldf[cldf_cols[k][0]] = v
# Make cldf properties accessible as attributes:
self.cldf = argparse.Namespace(**self.cldf)
self.dataset = dataset
self.id = self.cldf.id
self.pk = None
t = dataset[self.component_name()]
if t.tableSchema.primaryKey and len(t.tableSchema.primaryKey) == 1:
self.pk = self.data[dataset[self.component_name()].tableSchema.primaryKey[0]]
self.name = getattr(self.cldf, 'name', None)
self.description = getattr(self.cldf, 'name', None)
def __repr__(self):
return '<{}.{} id="{}">'.format(self.__class__.__module__, self.__class__.__name__, self.id)
@classmethod
def component_name(cls) -> str:
return cls.__component__ or (cls.__name__ + 'Table')
@property
def component(self) -> str:
"""
Name of the CLDF component the object belongs to. Can be used to lookup the corresponding \
table via `obj.dataset[obj.component_name()]`.
"""
return self.__class__.component_name()
@property
def key(self):
return id(self.dataset), self.__class__.__name__, self.id
def __hash__(self):
return hash(self.key)
def __eq__(self, other):
if isinstance(self, Object):
return self.key == other.key
return NotImplemented # pragma: no cover
def _expand_uritemplate(self, attr, col):
"""
CSVW cells can specify various URI templates which must be expanded supplying the full
row as context. Thus, expansion is available as method on this row object.
"""
col = self.dataset[self.component, col]
variables = {k: v for k, v in vars(self.cldf).items()}
variables.update(self.data)
if getattr(col, attr, None):
return getattr(col, attr).expand(**variables)
def aboutUrl(self, col='id') -> typing.Union[str, None]:
"""
The table's `aboutUrl` property, expanded with the object's row as context.
"""
return self._expand_uritemplate('aboutUrl', col)
def valueUrl(self, col='id'):
"""
The table's `valueUrl` property, expanded with the object's row as context.
"""
return self._expand_uritemplate('valueUrl', col)
def propertyUrl(self, col='id'):
"""
The table's `propertyUrl` property, expanded with the object's row as context.
"""
return self._expand_uritemplate('propertyUrl', col)
@lazyproperty
def references(self) -> typing.Tuple[Reference]:
"""
`pycldf.Reference` instances associated with the object.
>>> obj.references[0].source['title']
>>> obj.references[0].fields.title
>>> obj.references[0].description # The "context", typically cited pages
"""
return DictTuple(
self.dataset.sources.expand_refs(getattr(self.cldf, 'source', []) or []),
key=lambda r: r.source.id,
multi=True,
)
def related(self, relation: str):
"""
The CLDF ontology specifies several "reference properties". This method returns the first
related object specified by such a property.
:param relation: a CLDF reference property name.
:return: related `Object` instance.
"""
if relation in self._listvalued:
raise ValueError(
'{} is list-valued, use `all_related` to retrieve related objects'.format(relation))
fk = getattr(self.cldf, relation, None)
if fk:
ref = self.dataset.get_foreign_key_reference(self.component_name(), relation)
if ref:
if str(ref[1].propertyUrl) == term_uri('id'):
return self.dataset.get_object(TERMS[relation].references, fk)
if [ref[1].name] == self.dataset[TERMS[relation].references].tableSchema.primaryKey:
return self.dataset.get_object(TERMS[relation].references, fk, pk=True)
raise NotImplementedError('pycldf does not support foreign key constraints '
'referencing columns other than CLDF id or primary key.')
def all_related(self, relation: str) -> typing.Union[DictTuple, list]:
"""
CLDF reference properties can be list-valued. This method returns all related objects for
such a property.
"""
fks = getattr(self.cldf, relation, None)
if fks and not isinstance(fks, list):
fks = [fks]
if fks:
return DictTuple(self.dataset.get_object(TERMS[relation].references, fk) for fk in fks)
return []
class _WithLanguageMixin:
@property
def language(self):
return self.related('languageReference')
@property
def languages(self):
return self.all_related('languageReference')
class _WithParameterMixin:
@lazyproperty
def parameter(self):
return self.related('parameterReference')
@property
def parameters(self):
return self.all_related('parameterReference')
class Borrowing(Object):
@property
def targetForm(self):
return self.related('targetFormReference')
@property
def sourceForm(self):
return self.related('sourceFormReference')
class Code(Object, _WithParameterMixin):
pass
class Cognateset(Object):
pass
class Cognate(Object):
@property
def form(self):
return self.related('formReference')
@property
def cognateset(self):
return self.related('cognatesetReference')
class Entry(Object, _WithLanguageMixin):
@property
def senses(self):
return DictTuple(v for v in self.dataset.objects('SenseTable') if self in v.entries)
class Example(Object, _WithLanguageMixin):
@property
def metaLanguage(self):
return self.related('metaLanguageReference')
@property
def igt(self):
return '{0}\n{1}\n{2}'.format(
self.cldf.primaryText,
tabulate([self.cldf.gloss], self.cldf.analyzedWord, tablefmt='plain'),
self.cldf.translatedText,
)
class Form(Object, _WithLanguageMixin, _WithParameterMixin):
pass
class FunctionalEquivalentset(Object):
pass
class FunctionalEquivalent(Object):
@property
def form(self): # pragma: no cover
return self.related('formReference')
class Language(Object):
@property
def lonlat(self):
"""
:return: (longitude, latitude) pair
"""
if hasattr(self.cldf, 'latitude'):
return (self.cldf.longitude, self.cldf.latitude)
@property
def as_geojson_feature(self):
if self.lonlat:
return {
"type": "Feature",
"geometry": {"type": "Point", "coordinates": list(self.lonlat)},
"properties": self.cldf,
}
@property
def values(self):
return DictTuple(v for v in self.dataset.objects('ValueTable') if self in v.languages)
@property
def forms(self):
return DictTuple(v for v in self.dataset.objects('FormTable') if self in v.languages)
def glottolog_languoid(self, glottolog_api):
"""
Get a Glottolog languoid associated with the `Language`.
:param glottolog_api: `pyglottolog.Glottolog` instance or `dict` mapping glottocodes to \
`pyglottolog.langoids.Languoid` instances.
:return: `pyglottolog.langoids.Languoid` instance or `None`.
"""
if isinstance(glottolog_api, dict):
return glottolog_api.get(self.cldf.glottocode)
return glottolog_api.languoid(self.cldf.glottocode)
class Parameter(Object):
@lazyproperty
def datatype(self):
if 'datatype' in self.data \
and self.dataset['ParameterTable', 'datatype'].datatype.base == 'json':
if self.data['datatype']:
return csvw.metadata.Datatype.fromvalue(self.data['datatype'])
@property
def values(self):
return DictTuple(v for v in self.dataset.objects('ValueTable') if self in v.parameters)
@property
def forms(self):
return DictTuple(v for v in self.dataset.objects('FormTable') if self in v.parameters)
def concepticon_conceptset(self, concepticon_api):
"""
Get a Concepticon conceptset associated with the `Parameter`.
:param concepticon_api: `pyconcepticon.Concepticon` instance or `dict` mapping conceptset \
IDs to `pyconcepticon.models.Conceptset` instances.
:return: `pyconcepticon.models.Conceptset` instance or `None`.
"""
if isinstance(concepticon_api, dict):
return concepticon_api.get(self.cldf.concepticonReference)
return concepticon_api.conceptsets.get(self.cldf.concepticonReference)
class Sense(Object):
@property
def entry(self):
return self.related('entryReference')
@property
def entries(self):
return self.all_related('entryReference')
class Value(Object, _WithLanguageMixin, _WithParameterMixin):
@property
def typed_value(self):
if self.parameter.datatype:
return self.parameter.datatype.read(self.cldf.value)
return self.cldf.value
@property
def code(self):
return self.related('codeReference')
@property
def examples(self):
return self.all_related('exampleReference')
class Contribution(Object):
pass
class Media(Object):
@property
def downloadUrl(self):
if hasattr(self.cldf, 'downloadUrl'):
return self.cldf.downloadUrl
return self.valueUrl()
|
|
# -*- coding: utf-8 -*-
"""
unit tests for the InfluxDBClient.
NB/WARNING :
This module implements tests for the InfluxDBClient class
but does so
+ without any server instance running
+ by mocking all the expected responses.
So any change of (response format from) the server will **NOT** be
detected by this module.
See client_test_with_server.py for tests against a running server instance.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import json
import requests
import requests.exceptions
import socket
import time
import requests_mock
import random
from nose.tools import raises
from mock import patch
import warnings
import mock
import unittest
from influxdb import InfluxDBClient, InfluxDBClusterClient
from influxdb.client import InfluxDBServerError
def _build_response_object(status_code=200, content=""):
resp = requests.Response()
resp.status_code = status_code
resp._content = content.encode("utf8")
return resp
def _mocked_session(cli, method="GET", status_code=200, content=""):
method = method.upper()
def request(*args, **kwargs):
c = content
# Check method
assert method == kwargs.get('method', 'GET')
if method == 'POST':
data = kwargs.get('data', None)
if data is not None:
# Data must be a string
assert isinstance(data, str)
# Data must be a JSON string
assert c == json.loads(data, strict=True)
c = data
# Anyway, Content must be a JSON string (or empty string)
if not isinstance(c, str):
c = json.dumps(c)
return _build_response_object(status_code=status_code, content=c)
mocked = patch.object(
cli._session,
'request',
side_effect=request
)
return mocked
class TestInfluxDBClient(unittest.TestCase):
def setUp(self):
# By default, raise exceptions on warnings
warnings.simplefilter('error', FutureWarning)
self.cli = InfluxDBClient('localhost', 8086, 'username', 'password')
self.dummy_points = [
{
"measurement": "cpu_load_short",
"tags": {
"host": "server01",
"region": "us-west"
},
"time": "2009-11-10T23:00:00.123456Z",
"fields": {
"value": 0.64
}
}
]
self.dsn_string = 'influxdb://uSr:[email protected]:1886/db'
def test_scheme(self):
cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
self.assertEqual('http://host:8086', cli._baseurl)
cli = InfluxDBClient(
'host', 8086, 'username', 'password', 'database', ssl=True
)
self.assertEqual('https://host:8086', cli._baseurl)
def test_dsn(self):
cli = InfluxDBClient.from_DSN('influxdb://192.168.0.1:1886')
self.assertEqual('http://192.168.0.1:1886', cli._baseurl)
cli = InfluxDBClient.from_DSN(self.dsn_string)
self.assertEqual('http://my.host.fr:1886', cli._baseurl)
self.assertEqual('uSr', cli._username)
self.assertEqual('pWd', cli._password)
self.assertEqual('db', cli._database)
self.assertFalse(cli.use_udp)
cli = InfluxDBClient.from_DSN('udp+' + self.dsn_string)
self.assertTrue(cli.use_udp)
cli = InfluxDBClient.from_DSN('https+' + self.dsn_string)
self.assertEqual('https://my.host.fr:1886', cli._baseurl)
cli = InfluxDBClient.from_DSN('https+' + self.dsn_string,
**{'ssl': False})
self.assertEqual('http://my.host.fr:1886', cli._baseurl)
def test_switch_database(self):
cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
cli.switch_database('another_database')
self.assertEqual('another_database', cli._database)
def test_switch_user(self):
cli = InfluxDBClient('host', 8086, 'username', 'password', 'database')
cli.switch_user('another_username', 'another_password')
self.assertEqual('another_username', cli._username)
self.assertEqual('another_password', cli._password)
def test_write(self):
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.POST,
"http://localhost:8086/write",
status_code=204
)
cli = InfluxDBClient(database='db')
cli.write(
{"database": "mydb",
"retentionPolicy": "mypolicy",
"points": [{"measurement": "cpu_load_short",
"tags": {"host": "server01",
"region": "us-west"},
"time": "2009-11-10T23:00:00Z",
"fields": {"value": 0.64}}]}
)
self.assertEqual(
m.last_request.body,
b"cpu_load_short,host=server01,region=us-west "
b"value=0.64 1257894000000000000\n",
)
def test_write_points(self):
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.POST,
"http://localhost:8086/write",
status_code=204
)
cli = InfluxDBClient(database='db')
cli.write_points(
self.dummy_points,
)
self.assertEqual(
'cpu_load_short,host=server01,region=us-west '
'value=0.64 1257894000123456000\n',
m.last_request.body.decode('utf-8'),
)
def test_write_points_toplevel_attributes(self):
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.POST,
"http://localhost:8086/write",
status_code=204
)
cli = InfluxDBClient(database='db')
cli.write_points(
self.dummy_points,
database='testdb',
tags={"tag": "hello"},
retention_policy="somepolicy"
)
self.assertEqual(
'cpu_load_short,host=server01,region=us-west,tag=hello '
'value=0.64 1257894000123456000\n',
m.last_request.body.decode('utf-8'),
)
def test_write_points_batch(self):
dummy_points = [
{"measurement": "cpu_usage", "tags": {"unit": "percent"},
"time": "2009-11-10T23:00:00Z", "fields": {"value": 12.34}},
{"measurement": "network", "tags": {"direction": "in"},
"time": "2009-11-10T23:00:00Z", "fields": {"value": 123.00}},
{"measurement": "network", "tags": {"direction": "out"},
"time": "2009-11-10T23:00:00Z", "fields": {"value": 12.00}}
]
expected_last_body = (
"network,direction=out,host=server01,region=us-west "
"value=12.0 1257894000000000000\n"
)
with requests_mock.Mocker() as m:
m.register_uri(requests_mock.POST,
"http://localhost:8086/write",
status_code=204)
cli = InfluxDBClient(database='db')
cli.write_points(points=dummy_points,
database='db',
tags={"host": "server01",
"region": "us-west"},
batch_size=2)
self.assertEqual(m.call_count, 2)
self.assertEqual(expected_last_body,
m.last_request.body.decode('utf-8'))
def test_write_points_udp(self):
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
port = random.randint(4000, 8000)
s.bind(('0.0.0.0', port))
cli = InfluxDBClient(
'localhost', 8086, 'root', 'root',
'test', use_udp=True, udp_port=port
)
cli.write_points(self.dummy_points)
received_data, addr = s.recvfrom(1024)
self.assertEqual(
'cpu_load_short,host=server01,region=us-west '
'value=0.64 1257894000123456000\n',
received_data.decode()
)
def test_write_bad_precision_udp(self):
cli = InfluxDBClient(
'localhost', 8086, 'root', 'root',
'test', use_udp=True, udp_port=4444
)
with self.assertRaisesRegexp(
Exception,
"InfluxDB only supports seconds precision for udp writes"
):
cli.write_points(
self.dummy_points,
time_precision='ms'
)
@raises(Exception)
def test_write_points_fails(self):
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
with _mocked_session(cli, 'post', 500):
cli.write_points([])
def test_write_points_with_precision(self):
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.POST,
"http://localhost:8086/write",
status_code=204
)
cli = InfluxDBClient(database='db')
cli.write_points(self.dummy_points, time_precision='n')
self.assertEqual(
b'cpu_load_short,host=server01,region=us-west '
b'value=0.64 1257894000123456000\n',
m.last_request.body,
)
cli.write_points(self.dummy_points, time_precision='u')
self.assertEqual(
b'cpu_load_short,host=server01,region=us-west '
b'value=0.64 1257894000123456\n',
m.last_request.body,
)
cli.write_points(self.dummy_points, time_precision='ms')
self.assertEqual(
b'cpu_load_short,host=server01,region=us-west '
b'value=0.64 1257894000123\n',
m.last_request.body,
)
cli.write_points(self.dummy_points, time_precision='s')
self.assertEqual(
b"cpu_load_short,host=server01,region=us-west "
b"value=0.64 1257894000\n",
m.last_request.body,
)
cli.write_points(self.dummy_points, time_precision='m')
self.assertEqual(
b'cpu_load_short,host=server01,region=us-west '
b'value=0.64 20964900\n',
m.last_request.body,
)
cli.write_points(self.dummy_points, time_precision='h')
self.assertEqual(
b'cpu_load_short,host=server01,region=us-west '
b'value=0.64 349415\n',
m.last_request.body,
)
def test_write_points_bad_precision(self):
cli = InfluxDBClient()
with self.assertRaisesRegexp(
Exception,
"Invalid time precision is given. "
"\(use 'n', 'u', 'ms', 's', 'm' or 'h'\)"
):
cli.write_points(
self.dummy_points,
time_precision='g'
)
@raises(Exception)
def test_write_points_with_precision_fails(self):
cli = InfluxDBClient('host', 8086, 'username', 'password', 'db')
with _mocked_session(cli, 'post', 500):
cli.write_points_with_precision([])
def test_query(self):
example_response = (
'{"results": [{"series": [{"measurement": "sdfsdfsdf", '
'"columns": ["time", "value"], "values": '
'[["2009-11-10T23:00:00Z", 0.64]]}]}, {"series": '
'[{"measurement": "cpu_load_short", "columns": ["time", "value"], '
'"values": [["2009-11-10T23:00:00Z", 0.64]]}]}]}'
)
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.GET,
"http://localhost:8086/query",
text=example_response
)
rs = self.cli.query('select * from foo')
self.assertListEqual(
list(rs[0].get_points()),
[{'value': 0.64, 'time': '2009-11-10T23:00:00Z'}]
)
@unittest.skip('Not implemented for 0.9')
def test_query_chunked(self):
cli = InfluxDBClient(database='db')
example_object = {
'points': [
[1415206250119, 40001, 667],
[1415206244555, 30001, 7],
[1415206228241, 20001, 788],
[1415206212980, 10001, 555],
[1415197271586, 10001, 23]
],
'measurement': 'foo',
'columns': [
'time',
'sequence_number',
'val'
]
}
example_response = \
json.dumps(example_object) + json.dumps(example_object)
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.GET,
"http://localhost:8086/db/db/series",
text=example_response
)
self.assertListEqual(
cli.query('select * from foo', chunked=True),
[example_object, example_object]
)
@raises(Exception)
def test_query_fail(self):
with _mocked_session(self.cli, 'get', 401):
self.cli.query('select column_one from foo;')
def test_create_database(self):
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.POST,
"http://localhost:8086/query",
text='{"results":[{}]}'
)
self.cli.create_database('new_db')
self.assertEqual(
m.last_request.qs['q'][0],
'create database "new_db"'
)
def test_create_numeric_named_database(self):
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.POST,
"http://localhost:8086/query",
text='{"results":[{}]}'
)
self.cli.create_database('123')
self.assertEqual(
m.last_request.qs['q'][0],
'create database "123"'
)
@raises(Exception)
def test_create_database_fails(self):
with _mocked_session(self.cli, 'post', 401):
self.cli.create_database('new_db')
def test_drop_database(self):
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.POST,
"http://localhost:8086/query",
text='{"results":[{}]}'
)
self.cli.drop_database('new_db')
self.assertEqual(
m.last_request.qs['q'][0],
'drop database "new_db"'
)
def test_drop_numeric_named_database(self):
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.POST,
"http://localhost:8086/query",
text='{"results":[{}]}'
)
self.cli.drop_database('123')
self.assertEqual(
m.last_request.qs['q'][0],
'drop database "123"'
)
def test_get_list_database(self):
data = {'results': [
{'series': [
{'name': 'databases',
'values': [
['new_db_1'],
['new_db_2']],
'columns': ['name']}]}
]}
with _mocked_session(self.cli, 'get', 200, json.dumps(data)):
self.assertListEqual(
self.cli.get_list_database(),
[{'name': 'new_db_1'}, {'name': 'new_db_2'}]
)
@raises(Exception)
def test_get_list_database_fails(self):
cli = InfluxDBClient('host', 8086, 'username', 'password')
with _mocked_session(cli, 'get', 401):
cli.get_list_database()
def test_get_list_servers(self):
data = {'results': [
{'series': [
{'columns': ['id', 'cluster_addr', 'raft', 'raft-leader'],
'values': [
[1, 'server01:8088', True, True],
[2, 'server02:8088', True, False],
[3, 'server03:8088', True, False]]}]}
]}
with _mocked_session(self.cli, 'get', 200, json.dumps(data)):
self.assertListEqual(
self.cli.get_list_servers(),
[{'cluster_addr': 'server01:8088',
'id': 1,
'raft': True,
'raft-leader': True},
{'cluster_addr': 'server02:8088',
'id': 2,
'raft': True,
'raft-leader': False},
{'cluster_addr': 'server03:8088',
'id': 3,
'raft': True,
'raft-leader': False}]
)
@raises(Exception)
def test_get_list_servers_fails(self):
cli = InfluxDBClient('host', 8086, 'username', 'password')
with _mocked_session(cli, 'get', 401):
cli.get_list_servers()
def test_create_retention_policy_default(self):
example_response = '{"results":[{}]}'
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.POST,
"http://localhost:8086/query",
text=example_response
)
self.cli.create_retention_policy(
'somename', '1d', 4, default=True, database='db'
)
self.assertEqual(
m.last_request.qs['q'][0],
'create retention policy "somename" on '
'"db" duration 1d replication 4 default'
)
def test_create_retention_policy(self):
example_response = '{"results":[{}]}'
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.POST,
"http://localhost:8086/query",
text=example_response
)
self.cli.create_retention_policy(
'somename', '1d', 4, database='db'
)
self.assertEqual(
m.last_request.qs['q'][0],
'create retention policy "somename" on '
'"db" duration 1d replication 4'
)
def test_alter_retention_policy(self):
example_response = '{"results":[{}]}'
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.POST,
"http://localhost:8086/query",
text=example_response
)
# Test alter duration
self.cli.alter_retention_policy('somename', 'db',
duration='4d')
self.assertEqual(
m.last_request.qs['q'][0],
'alter retention policy "somename" on "db" duration 4d'
)
# Test alter replication
self.cli.alter_retention_policy('somename', 'db',
replication=4)
self.assertEqual(
m.last_request.qs['q'][0],
'alter retention policy "somename" on "db" replication 4'
)
# Test alter default
self.cli.alter_retention_policy('somename', 'db',
default=True)
self.assertEqual(
m.last_request.qs['q'][0],
'alter retention policy "somename" on "db" default'
)
@raises(Exception)
def test_alter_retention_policy_invalid(self):
cli = InfluxDBClient('host', 8086, 'username', 'password')
with _mocked_session(cli, 'post', 400):
self.cli.alter_retention_policy('somename', 'db')
def test_drop_retention_policy(self):
example_response = '{"results":[{}]}'
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.POST,
"http://localhost:8086/query",
text=example_response
)
self.cli.drop_retention_policy('somename', 'db')
self.assertEqual(
m.last_request.qs['q'][0],
'drop retention policy "somename" on "db"'
)
@raises(Exception)
def test_drop_retention_policy_fails(self):
cli = InfluxDBClient('host', 8086, 'username', 'password')
with _mocked_session(cli, 'delete', 401):
cli.drop_retention_policy('default', 'db')
def test_get_list_retention_policies(self):
example_response = \
'{"results": [{"series": [{"values": [["fsfdsdf", "24h0m0s", 2]],'\
' "columns": ["name", "duration", "replicaN"]}]}]}'
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.GET,
"http://localhost:8086/query",
text=example_response
)
self.assertListEqual(
self.cli.get_list_retention_policies(),
[{'duration': '24h0m0s',
'name': 'fsfdsdf', 'replicaN': 2}]
)
@mock.patch('requests.Session.request')
def test_request_retry(self, mock_request):
"""Tests that two connection errors will be handled"""
class CustomMock(object):
i = 0
def connection_error(self, *args, **kwargs):
self.i += 1
if self.i < 3:
raise requests.exceptions.ConnectionError
else:
r = requests.Response()
r.status_code = 204
return r
mock_request.side_effect = CustomMock().connection_error
cli = InfluxDBClient(database='db')
cli.write_points(
self.dummy_points
)
@mock.patch('requests.Session.request')
def test_request_retry_raises(self, mock_request):
"""Tests that three connection errors will not be handled"""
class CustomMock(object):
i = 0
def connection_error(self, *args, **kwargs):
self.i += 1
if self.i < 4:
raise requests.exceptions.ConnectionError
else:
r = requests.Response()
r.status_code = 200
return r
mock_request.side_effect = CustomMock().connection_error
cli = InfluxDBClient(database='db')
with self.assertRaises(requests.exceptions.ConnectionError):
cli.write_points(self.dummy_points)
def test_get_list_users(self):
example_response = (
'{"results":[{"series":[{"columns":["user","admin"],'
'"values":[["test",false]]}]}]}'
)
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.GET,
"http://localhost:8086/query",
text=example_response
)
self.assertListEqual(
self.cli.get_list_users(),
[{'user': 'test', 'admin': False}]
)
def test_get_list_users_empty(self):
example_response = (
'{"results":[{"series":[{"columns":["user","admin"]}]}]}'
)
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.GET,
"http://localhost:8086/query",
text=example_response
)
self.assertListEqual(self.cli.get_list_users(), [])
def test_grant_admin_privileges(self):
example_response = '{"results":[{}]}'
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.POST,
"http://localhost:8086/query",
text=example_response
)
self.cli.grant_admin_privileges('test')
self.assertEqual(
m.last_request.qs['q'][0],
'grant all privileges to test'
)
@raises(Exception)
def test_grant_admin_privileges_invalid(self):
cli = InfluxDBClient('host', 8086, 'username', 'password')
with _mocked_session(cli, 'post', 400):
self.cli.grant_admin_privileges('')
def test_revoke_admin_privileges(self):
example_response = '{"results":[{}]}'
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.POST,
"http://localhost:8086/query",
text=example_response
)
self.cli.revoke_admin_privileges('test')
self.assertEqual(
m.last_request.qs['q'][0],
'revoke all privileges from test'
)
@raises(Exception)
def test_revoke_admin_privileges_invalid(self):
cli = InfluxDBClient('host', 8086, 'username', 'password')
with _mocked_session(cli, 'post', 400):
self.cli.revoke_admin_privileges('')
def test_grant_privilege(self):
example_response = '{"results":[{}]}'
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.POST,
"http://localhost:8086/query",
text=example_response
)
self.cli.grant_privilege('read', 'testdb', 'test')
self.assertEqual(
m.last_request.qs['q'][0],
'grant read on testdb to test'
)
@raises(Exception)
def test_grant_privilege_invalid(self):
cli = InfluxDBClient('host', 8086, 'username', 'password')
with _mocked_session(cli, 'post', 400):
self.cli.grant_privilege('', 'testdb', 'test')
def test_revoke_privilege(self):
example_response = '{"results":[{}]}'
with requests_mock.Mocker() as m:
m.register_uri(
requests_mock.POST,
"http://localhost:8086/query",
text=example_response
)
self.cli.revoke_privilege('read', 'testdb', 'test')
self.assertEqual(
m.last_request.qs['q'][0],
'revoke read on testdb from test'
)
@raises(Exception)
def test_revoke_privilege_invalid(self):
cli = InfluxDBClient('host', 8086, 'username', 'password')
with _mocked_session(cli, 'post', 400):
self.cli.revoke_privilege('', 'testdb', 'test')
def test_get_list_privileges(self):
data = {'results': [
{'series': [
{'columns': ['database', 'privilege'],
'values': [
['db1', 'READ'],
['db2', 'ALL PRIVILEGES'],
['db3', 'NO PRIVILEGES']]}
]}
]}
with _mocked_session(self.cli, 'get', 200, json.dumps(data)):
self.assertListEqual(
self.cli.get_list_privileges('test'),
[{'database': 'db1', 'privilege': 'READ'},
{'database': 'db2', 'privilege': 'ALL PRIVILEGES'},
{'database': 'db3', 'privilege': 'NO PRIVILEGES'}]
)
@raises(Exception)
def test_get_list_privileges_fails(self):
cli = InfluxDBClient('host', 8086, 'username', 'password')
with _mocked_session(cli, 'get', 401):
cli.get_list_privileges('test')
def test_invalid_port_fails(self):
with self.assertRaises(ValueError):
InfluxDBClient('host', '80/redir', 'username', 'password')
class FakeClient(InfluxDBClient):
def __init__(self, *args, **kwargs):
super(FakeClient, self).__init__(*args, **kwargs)
def query(self,
query,
params={},
expected_response_code=200,
database=None):
if query == 'Fail':
raise Exception("Fail")
elif query == 'Fail once' and self._host == 'host1':
raise Exception("Fail Once")
elif query == 'Fail twice' and self._host in 'host1 host2':
raise Exception("Fail Twice")
else:
return "Success"
class TestInfluxDBClusterClient(unittest.TestCase):
def setUp(self):
# By default, raise exceptions on warnings
warnings.simplefilter('error', FutureWarning)
self.hosts = [('host1', 8086), ('host2', 8086), ('host3', 8086)]
self.dsn_string = 'influxdb://uSr:pWd@host1:8086,uSr:pWd@host2:8086/db'
def test_init(self):
cluster = InfluxDBClusterClient(hosts=self.hosts,
username='username',
password='password',
database='database',
shuffle=False,
client_base_class=FakeClient)
self.assertEqual(3, len(cluster.hosts))
self.assertEqual(0, len(cluster.bad_hosts))
self.assertIn((cluster._client._host,
cluster._client._port), cluster.hosts)
def test_one_server_fails(self):
cluster = InfluxDBClusterClient(hosts=self.hosts,
database='database',
shuffle=False,
client_base_class=FakeClient)
self.assertEqual('Success', cluster.query('Fail once'))
self.assertEqual(2, len(cluster.hosts))
self.assertEqual(1, len(cluster.bad_hosts))
def test_two_servers_fail(self):
cluster = InfluxDBClusterClient(hosts=self.hosts,
database='database',
shuffle=False,
client_base_class=FakeClient)
self.assertEqual('Success', cluster.query('Fail twice'))
self.assertEqual(1, len(cluster.hosts))
self.assertEqual(2, len(cluster.bad_hosts))
def test_all_fail(self):
cluster = InfluxDBClusterClient(hosts=self.hosts,
database='database',
shuffle=True,
client_base_class=FakeClient)
with self.assertRaises(InfluxDBServerError):
cluster.query('Fail')
self.assertEqual(0, len(cluster.hosts))
self.assertEqual(3, len(cluster.bad_hosts))
def test_all_good(self):
cluster = InfluxDBClusterClient(hosts=self.hosts,
database='database',
shuffle=True,
client_base_class=FakeClient)
self.assertEqual('Success', cluster.query(''))
self.assertEqual(3, len(cluster.hosts))
self.assertEqual(0, len(cluster.bad_hosts))
def test_recovery(self):
cluster = InfluxDBClusterClient(hosts=self.hosts,
database='database',
shuffle=True,
client_base_class=FakeClient)
with self.assertRaises(InfluxDBServerError):
cluster.query('Fail')
self.assertEqual('Success', cluster.query(''))
self.assertEqual(1, len(cluster.hosts))
self.assertEqual(2, len(cluster.bad_hosts))
def test_healing(self):
cluster = InfluxDBClusterClient(hosts=self.hosts,
database='database',
shuffle=True,
healing_delay=1,
client_base_class=FakeClient)
with self.assertRaises(InfluxDBServerError):
cluster.query('Fail')
self.assertEqual('Success', cluster.query(''))
time.sleep(1.1)
self.assertEqual('Success', cluster.query(''))
self.assertEqual(2, len(cluster.hosts))
self.assertEqual(1, len(cluster.bad_hosts))
time.sleep(1.1)
self.assertEqual('Success', cluster.query(''))
self.assertEqual(3, len(cluster.hosts))
self.assertEqual(0, len(cluster.bad_hosts))
def test_dsn(self):
cli = InfluxDBClusterClient.from_DSN(self.dsn_string)
self.assertEqual([('host1', 8086), ('host2', 8086)], cli.hosts)
self.assertEqual('http://host1:8086', cli._client._baseurl)
self.assertEqual('uSr', cli._client._username)
self.assertEqual('pWd', cli._client._password)
self.assertEqual('db', cli._client._database)
self.assertFalse(cli._client.use_udp)
cli = InfluxDBClusterClient.from_DSN('udp+' + self.dsn_string)
self.assertTrue(cli._client.use_udp)
cli = InfluxDBClusterClient.from_DSN('https+' + self.dsn_string)
self.assertEqual('https://host1:8086', cli._client._baseurl)
cli = InfluxDBClusterClient.from_DSN('https+' + self.dsn_string,
**{'ssl': False})
self.assertEqual('http://host1:8086', cli._client._baseurl)
def test_dsn_password_caps(self):
cli = InfluxDBClusterClient.from_DSN(
'https+influxdb://usr:pWd@host:8086/db')
self.assertEqual('pWd', cli._client._password)
def test_dsn_mixed_scheme_case(self):
cli = InfluxDBClusterClient.from_DSN(
'hTTps+inFLUxdb://usr:pWd@host:8086/db')
self.assertEqual('pWd', cli._client._password)
self.assertEqual('https://host:8086', cli._client._baseurl)
cli = InfluxDBClusterClient.from_DSN(
'uDP+influxdb://usr:pwd@host1:8086,usr:pwd@host2:8086/db')
self.assertTrue(cli._client.use_udp)
|
|
#!/usr/bin/env python
#
# Use the raw transactions API to spend bitcoins received on particular addresses,
# and send any change back to that same address.
#
# Example usage:
# spendfrom.py # Lists available funds
# spendfrom.py --from=ADDRESS --to=ADDRESS --amount=11.00
#
# Assumes it will talk to a totallylegitcoind or TotallyLegitCoin-Qt running
# on localhost.
#
# Depends on jsonrpc
#
from decimal import *
import getpass
import math
import os
import os.path
import platform
import sys
import time
from jsonrpc import ServiceProxy, json
BASE_FEE=Decimal("0.001")
def check_json_precision():
"""Make sure json library being used does not lose precision converting TLC values"""
n = Decimal("20000000.00000003")
satoshis = int(json.loads(json.dumps(float(n)))*1.0e8)
if satoshis != 2000000000000003:
raise RuntimeError("JSON encode/decode loses precision")
def determine_db_dir():
"""Return the default location of the bitcoin data directory"""
if platform.system() == "Darwin":
return os.path.expanduser("~/Library/Application Support/Bitcoin/")
elif platform.system() == "Windows":
return os.path.join(os.environ['APPDATA'], "Bitcoin")
return os.path.expanduser("~/.bitcoin")
def read_totallylegitcoin.config(dbdir):
"""Read the totallylegitcoin.conf file from dbdir, returns dictionary of settings"""
from ConfigParser import SafeConfigParser
class FakeSecHead(object):
def __init__(self, fp):
self.fp = fp
self.sechead = '[all]\n'
def readline(self):
if self.sechead:
try: return self.sechead
finally: self.sechead = None
else:
s = self.fp.readline()
if s.find('#') != -1:
s = s[0:s.find('#')].strip() +"\n"
return s
config_parser = SafeConfigParser()
config_parser.readfp(FakeSecHead(open(os.path.join(dbdir, "totallylegitcoin.conf"))))
return dict(config_parser.items("all"))
def connect_JSON(config):
"""Connect to a bitcoin JSON-RPC server"""
testnet = config.get('testnet', '0')
testnet = (int(testnet) > 0) # 0/1 in config file, convert to True/False
if not 'rpcport' in config:
config['rpcport'] = 15554 if testnet else 5554
connect = "http://%s:%[email protected]:%s"%(config['rpcuser'], config['rpcpassword'], config['rpcport'])
try:
result = ServiceProxy(connect)
# ServiceProxy is lazy-connect, so send an RPC command mostly to catch connection errors,
# but also make sure the totallylegitcoind we're talking to is/isn't testnet:
if result.getmininginfo()['testnet'] != testnet:
sys.stderr.write("RPC server at "+connect+" testnet setting mismatch\n")
sys.exit(1)
return result
except:
sys.stderr.write("Error connecting to RPC server at "+connect+"\n")
sys.exit(1)
def unlock_wallet(totallylegitcoind):
info = totallylegitcoind.getinfo()
if 'unlocked_until' not in info:
return True # wallet is not encrypted
t = int(info['unlocked_until'])
if t <= time.time():
try:
passphrase = getpass.getpass("Wallet is locked; enter passphrase: ")
totallylegitcoind.walletpassphrase(passphrase, 5)
except:
sys.stderr.write("Wrong passphrase\n")
info = totallylegitcoind.getinfo()
return int(info['unlocked_until']) > time.time()
def list_available(totallylegitcoind):
address_summary = dict()
address_to_account = dict()
for info in totallylegitcoind.listreceivedbyaddress(0):
address_to_account[info["address"]] = info["account"]
unspent = totallylegitcoind.listunspent(0)
for output in unspent:
# listunspent doesn't give addresses, so:
rawtx = totallylegitcoind.getrawtransaction(output['txid'], 1)
vout = rawtx["vout"][output['vout']]
pk = vout["scriptPubKey"]
# This code only deals with ordinary pay-to-bitcoin-address
# or pay-to-script-hash outputs right now; anything exotic is ignored.
if pk["type"] != "pubkeyhash" and pk["type"] != "scripthash":
continue
address = pk["addresses"][0]
if address in address_summary:
address_summary[address]["total"] += vout["value"]
address_summary[address]["outputs"].append(output)
else:
address_summary[address] = {
"total" : vout["value"],
"outputs" : [output],
"account" : address_to_account.get(address, "")
}
return address_summary
def select_coins(needed, inputs):
# Feel free to improve this, this is good enough for my simple needs:
outputs = []
have = Decimal("0.0")
n = 0
while have < needed and n < len(inputs):
outputs.append({ "txid":inputs[n]["txid"], "vout":inputs[n]["vout"]})
have += inputs[n]["amount"]
n += 1
return (outputs, have-needed)
def create_tx(totallylegitcoind, fromaddresses, toaddress, amount, fee):
all_coins = list_available(totallylegitcoind)
total_available = Decimal("0.0")
needed = amount+fee
potential_inputs = []
for addr in fromaddresses:
if addr not in all_coins:
continue
potential_inputs.extend(all_coins[addr]["outputs"])
total_available += all_coins[addr]["total"]
if total_available < needed:
sys.stderr.write("Error, only %f TLC available, need %f\n"%(total_available, needed));
sys.exit(1)
#
# Note:
# Python's json/jsonrpc modules have inconsistent support for Decimal numbers.
# Instead of wrestling with getting json.dumps() (used by jsonrpc) to encode
# Decimals, I'm casting amounts to float before sending them to totallylegitcoind.
#
outputs = { toaddress : float(amount) }
(inputs, change_amount) = select_coins(needed, potential_inputs)
if change_amount > BASE_FEE: # don't bother with zero or tiny change
change_address = fromaddresses[-1]
if change_address in outputs:
outputs[change_address] += float(change_amount)
else:
outputs[change_address] = float(change_amount)
rawtx = totallylegitcoind.createrawtransaction(inputs, outputs)
signed_rawtx = totallylegitcoind.signrawtransaction(rawtx)
if not signed_rawtx["complete"]:
sys.stderr.write("signrawtransaction failed\n")
sys.exit(1)
txdata = signed_rawtx["hex"]
return txdata
def compute_amount_in(totallylegitcoind, txinfo):
result = Decimal("0.0")
for vin in txinfo['vin']:
in_info = totallylegitcoind.getrawtransaction(vin['txid'], 1)
vout = in_info['vout'][vin['vout']]
result = result + vout['value']
return result
def compute_amount_out(txinfo):
result = Decimal("0.0")
for vout in txinfo['vout']:
result = result + vout['value']
return result
def sanity_test_fee(totallylegitcoind, txdata_hex, max_fee):
class FeeError(RuntimeError):
pass
try:
txinfo = totallylegitcoind.decoderawtransaction(txdata_hex)
total_in = compute_amount_in(totallylegitcoind, txinfo)
total_out = compute_amount_out(txinfo)
if total_in-total_out > max_fee:
raise FeeError("Rejecting transaction, unreasonable fee of "+str(total_in-total_out))
tx_size = len(txdata_hex)/2
kb = tx_size/1000 # integer division rounds down
if kb > 1 and fee < BASE_FEE:
raise FeeError("Rejecting no-fee transaction, larger than 1000 bytes")
if total_in < 0.01 and fee < BASE_FEE:
raise FeeError("Rejecting no-fee, tiny-amount transaction")
# Exercise for the reader: compute transaction priority, and
# warn if this is a very-low-priority transaction
except FeeError as err:
sys.stderr.write((str(err)+"\n"))
sys.exit(1)
def main():
import optparse
parser = optparse.OptionParser(usage="%prog [options]")
parser.add_option("--from", dest="fromaddresses", default=None,
help="addresses to get bitcoins from")
parser.add_option("--to", dest="to", default=None,
help="address to get send bitcoins to")
parser.add_option("--amount", dest="amount", default=None,
help="amount to send")
parser.add_option("--fee", dest="fee", default="0.0",
help="fee to include")
parser.add_option("--datadir", dest="datadir", default=determine_db_dir(),
help="location of totallylegitcoin.conf file with RPC username/password (default: %default)")
parser.add_option("--testnet", dest="testnet", default=False, action="store_true",
help="Use the test network")
parser.add_option("--dry_run", dest="dry_run", default=False, action="store_true",
help="Don't broadcast the transaction, just create and print the transaction data")
(options, args) = parser.parse_args()
check_json_precision()
config = read_totallylegitcoin.config(options.datadir)
if options.testnet: config['testnet'] = True
totallylegitcoind = connect_JSON(config)
if options.amount is None:
address_summary = list_available(totallylegitcoind)
for address,info in address_summary.iteritems():
n_transactions = len(info['outputs'])
if n_transactions > 1:
print("%s %.8f %s (%d transactions)"%(address, info['total'], info['account'], n_transactions))
else:
print("%s %.8f %s"%(address, info['total'], info['account']))
else:
fee = Decimal(options.fee)
amount = Decimal(options.amount)
while unlock_wallet(totallylegitcoind) == False:
pass # Keep asking for passphrase until they get it right
txdata = create_tx(totallylegitcoind, options.fromaddresses.split(","), options.to, amount, fee)
sanity_test_fee(totallylegitcoind, txdata, amount*Decimal("0.01"))
if options.dry_run:
print(txdata)
else:
txid = totallylegitcoind.sendrawtransaction(txdata)
print(txid)
if __name__ == '__main__':
main()
|
|
#
# Copyright 2012 eNovance
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Common code for working with object stores
"""
from __future__ import absolute_import
from keystoneclient import exceptions
from oslo_config import cfg
from oslo_log import log
from oslo_utils import timeutils
import six.moves.urllib.parse as urlparse
from swiftclient import client as swift
from ceilometer.agent import plugin_base
from ceilometer.i18n import _
from ceilometer import sample
LOG = log.getLogger(__name__)
OPTS = [
cfg.StrOpt('reseller_prefix',
default='AUTH_',
help="Swift reseller prefix. Must be on par with "
"reseller_prefix in proxy-server.conf."),
]
SERVICE_OPTS = [
cfg.StrOpt('swift',
default='object-store',
help='Swift service type.'),
]
cfg.CONF.register_opts(OPTS)
cfg.CONF.register_opts(SERVICE_OPTS, group='service_types')
cfg.CONF.import_group('service_credentials', 'ceilometer.service')
class _Base(plugin_base.PollsterBase):
METHOD = 'head'
_ENDPOINT = None
@property
def default_discovery(self):
return 'tenant'
@property
def CACHE_KEY_METHOD(self):
return 'swift.%s_account' % self.METHOD
@staticmethod
def _get_endpoint(ksclient):
# we store the endpoint as a base class attribute, so keystone is
# only ever called once
if _Base._ENDPOINT is None:
try:
conf = cfg.CONF.service_credentials
_Base._ENDPOINT = ksclient.service_catalog.url_for(
service_type=cfg.CONF.service_types.swift,
endpoint_type=conf.os_endpoint_type)
except exceptions.EndpointNotFound:
LOG.debug(_("Swift endpoint not found"))
return _Base._ENDPOINT
def _iter_accounts(self, ksclient, cache, tenants):
if self.CACHE_KEY_METHOD not in cache:
cache[self.CACHE_KEY_METHOD] = list(self._get_account_info(
ksclient, tenants))
return iter(cache[self.CACHE_KEY_METHOD])
def _get_account_info(self, ksclient, tenants):
endpoint = self._get_endpoint(ksclient)
if not endpoint:
raise StopIteration()
for t in tenants:
api_method = '%s_account' % self.METHOD
yield (t.id, getattr(swift, api_method)
(self._neaten_url(endpoint, t.id),
ksclient.auth_token))
@staticmethod
def _neaten_url(endpoint, tenant_id):
"""Transform the registered url to standard and valid format."""
return urlparse.urljoin(endpoint.split('/v1')[0].rstrip('/') + '/',
'v1/' + cfg.CONF.reseller_prefix + tenant_id)
class ObjectsPollster(_Base):
"""Iterate over all accounts, using keystone."""
def get_samples(self, manager, cache, resources):
tenants = resources
for tenant, account in self._iter_accounts(manager.keystone,
cache, tenants):
yield sample.Sample(
name='storage.objects',
type=sample.TYPE_GAUGE,
volume=int(account['x-account-object-count']),
unit='object',
user_id=None,
project_id=tenant,
resource_id=tenant,
timestamp=timeutils.isotime(),
resource_metadata=None,
)
class ObjectsSizePollster(_Base):
"""Iterate over all accounts, using keystone."""
def get_samples(self, manager, cache, resources):
tenants = resources
for tenant, account in self._iter_accounts(manager.keystone,
cache, tenants):
yield sample.Sample(
name='storage.objects.size',
type=sample.TYPE_GAUGE,
volume=int(account['x-account-bytes-used']),
unit='B',
user_id=None,
project_id=tenant,
resource_id=tenant,
timestamp=timeutils.isotime(),
resource_metadata=None,
)
class ObjectsContainersPollster(_Base):
"""Iterate over all accounts, using keystone."""
def get_samples(self, manager, cache, resources):
tenants = resources
for tenant, account in self._iter_accounts(manager.keystone,
cache, tenants):
yield sample.Sample(
name='storage.objects.containers',
type=sample.TYPE_GAUGE,
volume=int(account['x-account-container-count']),
unit='container',
user_id=None,
project_id=tenant,
resource_id=tenant,
timestamp=timeutils.isotime(),
resource_metadata=None,
)
class ContainersObjectsPollster(_Base):
"""Get info about containers using Swift API."""
METHOD = 'get'
def get_samples(self, manager, cache, resources):
tenants = resources
for tenant, account in self._iter_accounts(manager.keystone,
cache, tenants):
containers_info = account[1]
for container in containers_info:
yield sample.Sample(
name='storage.containers.objects',
type=sample.TYPE_GAUGE,
volume=int(container['count']),
unit='object',
user_id=None,
project_id=tenant,
resource_id=tenant + '/' + container['name'],
timestamp=timeutils.isotime(),
resource_metadata=None,
)
class ContainersSizePollster(_Base):
"""Get info about containers using Swift API."""
METHOD = 'get'
def get_samples(self, manager, cache, resources):
tenants = resources
for tenant, account in self._iter_accounts(manager.keystone,
cache, tenants):
containers_info = account[1]
for container in containers_info:
yield sample.Sample(
name='storage.containers.objects.size',
type=sample.TYPE_GAUGE,
volume=int(container['bytes']),
unit='B',
user_id=None,
project_id=tenant,
resource_id=tenant + '/' + container['name'],
timestamp=timeutils.isotime(),
resource_metadata=None,
)
|
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Prefix DAG permissions.
Revision ID: 849da589634d
Revises: 45ba3f1493b9
Create Date: 2020-10-01 17:25:10.006322
"""
from flask_appbuilder import SQLA
from airflow import settings
from airflow.security import permissions
from airflow.www.fab_security.sqla.models import Action, Permission, Resource
# revision identifiers, used by Alembic.
revision = '849da589634d'
down_revision = '45ba3f1493b9'
branch_labels = None
depends_on = None
airflow_version = '2.0.0'
def prefix_individual_dag_permissions(session):
dag_perms = ['can_dag_read', 'can_dag_edit']
prefix = "DAG:"
perms = (
session.query(Permission)
.join(Action)
.filter(Action.name.in_(dag_perms))
.join(Resource)
.filter(Resource.name != 'all_dags')
.filter(Resource.name.notlike(prefix + '%'))
.all()
)
resource_ids = {permission.resource.id for permission in perms}
vm_query = session.query(Resource).filter(Resource.id.in_(resource_ids))
vm_query.update({Resource.name: prefix + Resource.name}, synchronize_session=False)
session.commit()
def remove_prefix_in_individual_dag_permissions(session):
dag_perms = ['can_read', 'can_edit']
prefix = "DAG:"
perms = (
session.query(Permission)
.join(Action)
.filter(Action.name.in_(dag_perms))
.join(Resource)
.filter(Resource.name.like(prefix + '%'))
.all()
)
for permission in perms:
permission.resource.name = permission.resource.name[len(prefix) :]
session.commit()
def get_or_create_dag_resource(session):
dag_resource = get_resource_query(session, permissions.RESOURCE_DAG).first()
if dag_resource:
return dag_resource
dag_resource = Resource()
dag_resource.name = permissions.RESOURCE_DAG
session.add(dag_resource)
session.commit()
return dag_resource
def get_or_create_all_dag_resource(session):
all_dag_resource = get_resource_query(session, 'all_dags').first()
if all_dag_resource:
return all_dag_resource
all_dag_resource = Resource()
all_dag_resource.name = 'all_dags'
session.add(all_dag_resource)
session.commit()
return all_dag_resource
def get_or_create_action(session, action_name):
action = get_action_query(session, action_name).first()
if action:
return action
action = Action()
action.name = action_name
session.add(action)
session.commit()
return action
def get_resource_query(session, resource_name):
return session.query(Resource).filter(Resource.name == resource_name)
def get_action_query(session, action_name):
return session.query(Action).filter(Action.name == action_name)
def get_permission_with_action_query(session, action):
return session.query(Permission).filter(Permission.action == action)
def get_permission_with_resource_query(session, resource):
return session.query(Permission).filter(Permission.resource_id == resource.id)
def update_permission_action(session, permission_query, action):
permission_query.update({Permission.action_id: action.id}, synchronize_session=False)
session.commit()
def get_permission(session, resource, action):
return (
session.query(Permission)
.filter(Permission.resource == resource)
.filter(Permission.action == action)
.first()
)
def update_permission_resource(session, permission_query, resource):
for permission in permission_query.all():
if not get_permission(session, resource, permission.action):
permission.resource = resource
else:
session.delete(permission)
session.commit()
def migrate_to_new_dag_permissions(db):
# Prefix individual dag perms with `DAG:`
prefix_individual_dag_permissions(db.session)
# Update existing permissions to use `can_read` instead of `can_dag_read`
can_dag_read_action = get_action_query(db.session, 'can_dag_read').first()
old_can_dag_read_permissions = get_permission_with_action_query(db.session, can_dag_read_action)
can_read_action = get_or_create_action(db.session, 'can_read')
update_permission_action(db.session, old_can_dag_read_permissions, can_read_action)
# Update existing permissions to use `can_edit` instead of `can_dag_edit`
can_dag_edit_action = get_action_query(db.session, 'can_dag_edit').first()
old_can_dag_edit_permissions = get_permission_with_action_query(db.session, can_dag_edit_action)
can_edit_action = get_or_create_action(db.session, 'can_edit')
update_permission_action(db.session, old_can_dag_edit_permissions, can_edit_action)
# Update existing permissions for `all_dags` resource to use `DAGs` resource.
all_dags_resource = get_resource_query(db.session, 'all_dags').first()
if all_dags_resource:
old_all_dags_permission = get_permission_with_resource_query(db.session, all_dags_resource)
dag_resource = get_or_create_dag_resource(db.session)
update_permission_resource(db.session, old_all_dags_permission, dag_resource)
# Delete the `all_dags` resource
db.session.delete(all_dags_resource)
# Delete `can_dag_read` action
if can_dag_read_action:
db.session.delete(can_dag_read_action)
# Delete `can_dag_edit` action
if can_dag_edit_action:
db.session.delete(can_dag_edit_action)
db.session.commit()
def undo_migrate_to_new_dag_permissions(session):
# Remove prefix from individual dag perms
remove_prefix_in_individual_dag_permissions(session)
# Update existing permissions to use `can_dag_read` instead of `can_read`
can_read_action = get_action_query(session, 'can_read').first()
new_can_read_permissions = get_permission_with_action_query(session, can_read_action)
can_dag_read_action = get_or_create_action(session, 'can_dag_read')
update_permission_action(session, new_can_read_permissions, can_dag_read_action)
# Update existing permissions to use `can_dag_edit` instead of `can_edit`
can_edit_action = get_action_query(session, 'can_edit').first()
new_can_edit_permissions = get_permission_with_action_query(session, can_edit_action)
can_dag_edit_action = get_or_create_action(session, 'can_dag_edit')
update_permission_action(session, new_can_edit_permissions, can_dag_edit_action)
# Update existing permissions for `DAGs` resource to use `all_dags` resource.
dag_resource = get_resource_query(session, permissions.RESOURCE_DAG).first()
if dag_resource:
new_dag_permission = get_permission_with_resource_query(session, dag_resource)
old_all_dag_resource = get_or_create_all_dag_resource(session)
update_permission_resource(session, new_dag_permission, old_all_dag_resource)
# Delete the `DAG` resource
session.delete(dag_resource)
# Delete `can_read` action
if can_read_action:
session.delete(can_read_action)
# Delete `can_edit` action
if can_edit_action:
session.delete(can_edit_action)
session.commit()
def upgrade():
db = SQLA()
db.session = settings.Session
migrate_to_new_dag_permissions(db)
db.session.commit()
db.session.close()
def downgrade():
db = SQLA()
db.session = settings.Session
undo_migrate_to_new_dag_permissions(db.session)
|
|
"""watershed.py - watershed algorithm
This module implements a watershed algorithm that apportions pixels into
marked basins. The algorithm uses a priority queue to hold the pixels
with the metric for the priority queue being pixel value, then the time
of entry into the queue - this settles ties in favor of the closest marker.
Some ideas taken from
Soille, "Automated Basin Delineation from Digital Elevation Models Using
Mathematical Morphology", Signal Processing 20 (1990) 171-182.
The most important insight in the paper is that entry time onto the queue
solves two problems: a pixel should be assigned to the neighbor with the
largest gradient or, if there is no gradient, pixels on a plateau should
be split between markers on opposite sides.
Originally part of CellProfiler, code licensed under both GPL and BSD licenses.
Website: http://www.cellprofiler.org
Copyright (c) 2003-2009 Massachusetts Institute of Technology
Copyright (c) 2009-2011 Broad Institute
All rights reserved.
Original author: Lee Kamentsky
"""
from _heapq import heappush, heappop
import numpy as np
from scipy import ndimage as ndi
from ..filters import rank_order
from . import _watershed
def watershed(image, markers, connectivity=None, offset=None, mask=None):
"""
Return a matrix labeled using the watershed segmentation algorithm
Parameters
----------
image: ndarray (2-D, 3-D, ...) of integers
Data array where the lowest value points are labeled first.
markers: ndarray of the same shape as `image`
An array marking the basins with the values to be assigned in the
label matrix. Zero means not a marker. This array should be of an
integer type.
connectivity: ndarray, optional
An array with the same number of dimensions as `image` whose
non-zero elements indicate neighbors for connection.
Following the scipy convention, default is a one-connected array of
the dimension of the image.
offset: array_like of shape image.ndim, optional
offset of the connectivity (one offset per dimension)
mask: ndarray of bools or 0s and 1s, optional
Array of same shape as `image`. Only points at which mask == True
will be labeled.
Returns
-------
out: ndarray
A labeled matrix of the same type and shape as markers
See also
--------
skimage.segmentation.random_walker: random walker segmentation
A segmentation algorithm based on anisotropic diffusion, usually
slower than the watershed but with good results on noisy data and
boundaries with holes.
Notes
-----
This function implements a watershed algorithm [1]_that apportions pixels
into marked basins. The algorithm uses a priority queue to hold the pixels
with the metric for the priority queue being pixel value, then the time of
entry into the queue - this settles ties in favor of the closest marker.
Some ideas taken from
Soille, "Automated Basin Delineation from Digital Elevation Models Using
Mathematical Morphology", Signal Processing 20 (1990) 171-182
The most important insight in the paper is that entry time onto the queue
solves two problems: a pixel should be assigned to the neighbor with the
largest gradient or, if there is no gradient, pixels on a plateau should
be split between markers on opposite sides.
This implementation converts all arguments to specific, lowest common
denominator types, then passes these to a C algorithm.
Markers can be determined manually, or automatically using for example
the local minima of the gradient of the image, or the local maxima of the
distance function to the background for separating overlapping objects
(see example).
References
----------
.. [1] http://en.wikipedia.org/wiki/Watershed_%28image_processing%29
.. [2] http://cmm.ensmp.fr/~beucher/wtshed.html
Examples
--------
The watershed algorithm is very useful to separate overlapping objects
>>> # Generate an initial image with two overlapping circles
>>> x, y = np.indices((80, 80))
>>> x1, y1, x2, y2 = 28, 28, 44, 52
>>> r1, r2 = 16, 20
>>> mask_circle1 = (x - x1)**2 + (y - y1)**2 < r1**2
>>> mask_circle2 = (x - x2)**2 + (y - y2)**2 < r2**2
>>> image = np.logical_or(mask_circle1, mask_circle2)
>>> # Now we want to separate the two objects in image
>>> # Generate the markers as local maxima of the distance
>>> # to the background
>>> from scipy import ndimage as ndi
>>> distance = ndi.distance_transform_edt(image)
>>> from skimage.feature import peak_local_max
>>> local_maxi = peak_local_max(distance, labels=image,
... footprint=np.ones((3, 3)),
... indices=False)
>>> markers = ndi.label(local_maxi)[0]
>>> labels = watershed(-distance, markers, mask=image)
The algorithm works also for 3-D images, and can be used for example to
separate overlapping spheres.
"""
if connectivity is None:
c_connectivity = ndi.generate_binary_structure(image.ndim, 1)
else:
c_connectivity = np.array(connectivity, bool)
if c_connectivity.ndim != image.ndim:
raise ValueError("Connectivity dimension must be same as image")
if offset is None:
if any([x % 2 == 0 for x in c_connectivity.shape]):
raise ValueError("Connectivity array must have an unambiguous "
"center")
#
# offset to center of connectivity array
#
offset = np.array(c_connectivity.shape) // 2
# pad the image, markers, and mask so that we can use the mask to
# keep from running off the edges
pads = offset
def pad(im):
new_im = np.zeros(
[i + 2 * p for i, p in zip(im.shape, pads)], im.dtype)
new_im[[slice(p, -p, None) for p in pads]] = im
return new_im
if mask is not None:
mask = pad(mask)
else:
mask = pad(np.ones(image.shape, bool))
image = pad(image)
markers = pad(markers)
c_image = rank_order(image)[0].astype(np.int32)
c_markers = np.ascontiguousarray(markers, dtype=np.int32)
if c_markers.ndim != c_image.ndim:
raise ValueError("markers (ndim=%d) must have same # of dimensions "
"as image (ndim=%d)" % (c_markers.ndim, c_image.ndim))
if c_markers.shape != c_image.shape:
raise ValueError("image and markers must have the same shape")
if mask is not None:
c_mask = np.ascontiguousarray(mask, dtype=bool)
if c_mask.ndim != c_markers.ndim:
raise ValueError("mask must have same # of dimensions as image")
if c_markers.shape != c_mask.shape:
raise ValueError("mask must have same shape as image")
c_markers[np.logical_not(mask)] = 0
else:
c_mask = None
c_output = c_markers.copy()
#
# We pass a connectivity array that pre-calculates the stride for each
# neighbor.
#
# The result of this bit of code is an array with one row per
# point to be considered. The first column is the pre-computed stride
# and the second through last are the x,y...whatever offsets
# (to do bounds checking).
c = []
distances = []
image_stride = np.array(image.strides) // image.itemsize
for i in range(np.product(c_connectivity.shape)):
multiplier = 1
offs = []
indexes = []
ignore = True
for j in range(len(c_connectivity.shape)):
idx = (i // multiplier) % c_connectivity.shape[j]
off = idx - offset[j]
if off:
ignore = False
offs.append(off)
indexes.append(idx)
multiplier *= c_connectivity.shape[j]
if (not ignore) and c_connectivity.__getitem__(tuple(indexes)):
stride = np.dot(image_stride, np.array(offs))
d = np.sum(np.abs(offs)) - 1
offs.insert(0, stride)
c.append(offs)
distances.append(d)
c = np.array(c, dtype=np.int32)
c = c[np.argsort(distances)]
pq, age = __heapify_markers(c_markers, c_image)
pq = np.ascontiguousarray(pq, dtype=np.int32)
if np.product(pq.shape) > 0:
# If nothing is labeled, the output is empty and we don't have to
# do anything
c_output = c_output.flatten()
if c_mask is None:
c_mask = np.ones(c_image.shape, np.int8).flatten()
else:
c_mask = c_mask.astype(np.int8).flatten()
_watershed.watershed(c_image.flatten(),
pq, age, c,
c_mask,
c_output)
c_output = c_output.reshape(c_image.shape)[[slice(1, -1, None)] *
image.ndim]
try:
return c_output.astype(markers.dtype)
except:
return c_output
# ---------------------- deprecated ------------------------------
# Deprecate slower pure-Python code, that we keep only for
# pedagogical purposes
def __heapify_markers(markers, image):
"""Create a priority queue heap with the markers on it"""
stride = np.array(image.strides) // image.itemsize
coords = np.argwhere(markers != 0)
ncoords = coords.shape[0]
if ncoords > 0:
pixels = image[markers != 0]
age = np.arange(ncoords)
offset = np.zeros(coords.shape[0], int)
for i in range(image.ndim):
offset = offset + stride[i] * coords[:, i]
pq = np.column_stack((pixels, age, offset, coords))
# pixels = top priority, age=second
ordering = np.lexsort((age, pixels))
pq = pq[ordering, :]
else:
pq = np.zeros((0, markers.ndim + 3), int)
return (pq, ncoords)
def _slow_watershed(image, markers, connectivity=8, mask=None):
"""Return a matrix labeled using the watershed algorithm
Use the `watershed` function for a faster execution.
This pure Python function is solely for pedagogical purposes.
Parameters
----------
image: 2-d ndarray of integers
a two-dimensional matrix where the lowest value points are
labeled first.
markers: 2-d ndarray of integers
a two-dimensional matrix marking the basins with the values
to be assigned in the label matrix. Zero means not a marker.
connectivity: {4, 8}, optional
either 4 for four-connected or 8 (default) for eight-connected
mask: 2-d ndarray of bools, optional
don't label points in the mask
Returns
-------
out: ndarray
A labeled matrix of the same type and shape as markers
Notes
-----
This function implements a watershed algorithm [1]_that apportions pixels
into marked basins. The algorithm uses a priority queue to hold the pixels
with the metric for the priority queue being pixel value, then the time of
entry into the queue - this settles ties in favor of the closest marker.
Some ideas taken from
Soille, "Automated Basin Delineation from Digital Elevation Models Using
Mathematical Morphology", Signal Processing 20 (1990) 171-182
The most important insight in the paper is that entry time onto the queue
solves two problems: a pixel should be assigned to the neighbor with the
largest gradient or, if there is no gradient, pixels on a plateau should
be split between markers on opposite sides.
This implementation converts all arguments to specific, lowest common
denominator types, then passes these to a C algorithm.
Markers can be determined manually, or automatically using for example
the local minima of the gradient of the image, or the local maxima of the
distance function to the background for separating overlapping objects.
"""
if connectivity not in (4, 8):
raise ValueError("Connectivity was %d: it should be either \
four or eight" % (connectivity))
image = np.array(image)
markers = np.array(markers)
labels = markers.copy()
max_x = markers.shape[0]
max_y = markers.shape[1]
if connectivity == 4:
connect_increments = ((1, 0), (0, 1), (-1, 0), (0, -1))
else:
connect_increments = ((1, 0), (1, 1), (0, 1), (-1, 1),
(-1, 0), (-1, -1), (0, -1), (1, -1))
pq, age = __heapify_markers(markers, image)
pq = pq.tolist()
#
# The second step pops a value off of the queue, then labels and pushes
# the neighbors
#
while len(pq):
pix_value, pix_age, ignore, pix_x, pix_y = heappop(pq)
pix_label = labels[pix_x, pix_y]
for xi, yi in connect_increments:
x = pix_x + xi
y = pix_y + yi
if x < 0 or y < 0 or x >= max_x or y >= max_y:
continue
if labels[x, y]:
continue
if mask is not None and not mask[x, y]:
continue
# label the pixel
labels[x, y] = pix_label
# put the pixel onto the queue
heappush(pq, [image[x, y], age, 0, x, y])
age += 1
return labels
|
|
# GUI Application automation and testing library
# Copyright (C) 2006-2018 Mark Mc Mahon and Contributors
# https://github.com/pywinauto/pywinauto/graphs/contributors
# http://pywinauto.readthedocs.io/en/latest/credits.html
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of pywinauto nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests for classes in controls\common_controls.py"""
from __future__ import print_function
import sys
#import ctypes
import unittest
import time
from datetime import datetime
#import pdb
import os
import win32api
import win32gui
import six
sys.path.append(".")
from pywinauto.windows.application import Application # noqa: E402
from pywinauto.windows.win32structures import RECT # noqa: E402
from pywinauto.windows import win32defines
from pywinauto import findbestmatch # noqa: E402
from pywinauto.sysinfo import is_x64_Python # noqa: E402
from pywinauto.windows.remote_memory_block import RemoteMemoryBlock # noqa: E402
from pywinauto.actionlogger import ActionLogger # noqa: E402
from pywinauto.timings import Timings # noqa: E402
from pywinauto.timings import wait_until # noqa: E402
from pywinauto import mouse # noqa: E402
controlspy_folder = os.path.join(
os.path.dirname(__file__), r"..\..\apps\controlspy0998")
controlspy_folder_32 = controlspy_folder
mfc_samples_folder = os.path.join(
os.path.dirname(__file__), r"..\..\apps\MFC_samples")
mfc_samples_folder_32 = mfc_samples_folder
winforms_folder = os.path.join(
os.path.dirname(__file__), r"..\..\apps\WinForms_samples")
winforms_folder_32 = winforms_folder
if is_x64_Python():
controlspy_folder = os.path.join(controlspy_folder, 'x64')
mfc_samples_folder = os.path.join(mfc_samples_folder, 'x64')
winforms_folder = os.path.join(winforms_folder, 'x64')
class RemoteMemoryBlockTestCases(unittest.TestCase):
def test__init__fail(self):
self.assertRaises(AttributeError, RemoteMemoryBlock, 0)
class ListViewTestCases32(unittest.TestCase):
"""Unit tests for the ListViewWrapper class"""
path = os.path.join(mfc_samples_folder_32, u"RowList.exe")
def setUp(self):
"""Set some data and ensure the application is in the state we want"""
Timings.fast()
app = Application()
app.start(self.path)
self.texts = [
(u"Yellow", u"255", u"255", u"0", u"40", u"240", u"120", u"Neutral"),
(u"Red", u"255", u"0", u"0", u"0", u"240", u"120", u"Warm"),
(u"Green", u"0", u"255", u"0", u"80", u"240", u"120", u"Cool"),
(u"Magenta", u"255", u"0", u"255", u"200", u"240", u"120", u"Warm"),
(u"Cyan", u"0", u"255", u"255", u"120", u"240", u"120", u"Cool"),
(u"Blue", u"0", u"0", u"255", u"160", u"240", u"120", u"Cool"),
(u"Gray", u"192", u"192", u"192", u"160", u"0", u"181", u"Neutral")
]
self.app = app
self.dlg = app.RowListSampleApplication
self.ctrl = app.RowListSampleApplication.ListView.find()
self.dlg.Toolbar.button(0).click() # switch to icon view
self.dlg.Toolbar.button(6).click() # switch off states
def tearDown(self):
"""Close the application after tests"""
self.dlg.send_message(win32defines.WM_CLOSE)
def testFriendlyClass(self):
"""Make sure the ListView friendly class is set correctly"""
self.assertEqual(self.ctrl.friendly_class_name(), u"ListView")
def testColumnCount(self):
"""Test the ListView ColumnCount method"""
self.assertEqual(self.ctrl.column_count(), 8)
def testItemCount(self):
"""Test the ListView ItemCount method"""
self.assertEqual(self.ctrl.item_count(), 7)
def testItemText(self):
"""Test the ListView item.Text property"""
item = self.ctrl.get_item(1)
self.assertEqual(item.text(), u"Red")
def testItems(self):
"""Test the ListView Items method"""
flat_texts = []
for row in self.texts:
flat_texts.extend(row)
items = self.ctrl.items()
for i, item in enumerate(items):
self.assertEqual(item.text(), flat_texts[i])
self.assertEqual(len(items), len(flat_texts))
def testTexts(self):
"""Test the ListView texts method"""
flat_texts = []
for row in self.texts:
flat_texts.extend(row)
self.assertEqual(flat_texts, self.ctrl.texts()[1:])
def testGetItem(self):
"""Test the ListView get_item method"""
for row in range(self.ctrl.item_count()):
for col in range(self.ctrl.column_count()):
self.assertEqual(
self.ctrl.get_item(row, col).text(), self.texts[row][col])
def testGetItemText(self):
"""Test the ListView get_item method - with text this time"""
for text in [row[0] for row in self.texts]:
self.assertEqual(
self.ctrl.get_item(text).text(), text)
self.assertRaises(ValueError, self.ctrl.get_item, "Item not in this list")
def testColumn(self):
"""Test the ListView columns method"""
cols = self.ctrl.columns()
self.assertEqual(len(cols), self.ctrl.column_count())
# TODO: add more checking of column values
#for col in cols:
# print(col)
def testGetSelectionCount(self):
"""Test the ListView get_selected_count method"""
self.assertEqual(self.ctrl.get_selected_count(), 0)
self.ctrl.get_item(1).select()
self.ctrl.get_item(6).select()
self.assertEqual(self.ctrl.get_selected_count(), 2)
# def testGetSelectionCount(self):
# "Test the ListView get_selected_count method"
#
# self.assertEqual(self.ctrl.get_selected_count(), 0)
#
# self.ctrl.select(1)
# self.ctrl.select(7)
#
# self.assertEqual(self.ctrl.get_selected_count(), 2)
def testIsSelected(self):
"""Test ListView IsSelected for some items"""
# ensure that the item is not selected
self.assertEqual(self.ctrl.get_item(1).is_selected(), False)
# select an item
self.ctrl.get_item(1).select()
# now ensure that the item is selected
self.assertEqual(self.ctrl.get_item(1).is_selected(), True)
def _testFocused(self):
"""Test checking the focus of some ListView items"""
print("Select something quick!!")
time.sleep(3)
#self.ctrl.select(1)
print(self.ctrl.is_focused(0))
print(self.ctrl.is_focused(1))
print(self.ctrl.is_focused(2))
print(self.ctrl.is_focused(3))
print(self.ctrl.is_focused(4))
print(self.ctrl.is_focused(5))
#for col in cols:
# print(col)
def testSelect(self):
"""Test ListView Selecting some items"""
self.ctrl.get_item(1).select()
self.ctrl.get_item(3).select()
self.ctrl.get_item(4).select()
self.assertRaises(IndexError, self.ctrl.get_item(23).select)
self.assertEqual(self.ctrl.get_selected_count(), 3)
def testSelectText(self):
"""Test ListView Selecting some items"""
self.ctrl.get_item(u"Green").select()
self.ctrl.get_item(u"Yellow").select()
self.ctrl.get_item(u"Gray").select()
self.assertRaises(ValueError, self.ctrl.get_item, u"Item not in list")
self.assertEqual(self.ctrl.get_selected_count(), 3)
def testDeselect(self):
"""Test ListView Selecting some items"""
self.ctrl.get_item(1).select()
self.ctrl.get_item(4).select()
self.ctrl.get_item(3).deselect()
self.ctrl.get_item(4).deselect()
self.assertRaises(IndexError, self.ctrl.get_item(23).deselect)
self.assertEqual(self.ctrl.get_selected_count(), 1)
def testGetProperties(self):
"""Test getting the properties for the listview control"""
props = self.ctrl.get_properties()
self.assertEqual(
"ListView", props['friendly_class_name'])
self.assertEqual(
self.ctrl.texts(), props['texts'])
for prop_name in props.keys():
self.assertEqual(getattr(self.ctrl, prop_name)(), props[prop_name])
self.assertEqual(props['column_count'], 8)
self.assertEqual(props['item_count'], 7)
def testGetColumnTexts(self):
"""Test columns titles text"""
self.assertEqual(self.ctrl.get_column(0)['text'], u"Color")
self.assertEqual(self.ctrl.get_column(1)['text'], u"Red")
self.assertEqual(self.ctrl.get_column(2)['text'], u"Green")
self.assertEqual(self.ctrl.get_column(3)['text'], u"Blue")
def testItemRectangles(self):
"""Test getting item rectangles"""
yellow_rect = self.ctrl.get_item_rect('Yellow')
gold_rect = RECT(13, 0, 61, 53)
self.assertEqual(yellow_rect.left, gold_rect.left)
self.assertEqual(yellow_rect.top, gold_rect.top)
self.assertEqual(yellow_rect.right, gold_rect.right)
if yellow_rect.bottom < 53 or yellow_rect.bottom > 55:
self.assertEqual(yellow_rect.bottom, gold_rect.bottom)
self.ctrl.get_item('Green').click(where='text')
self.assertEqual(self.ctrl.get_item('Green').is_selected(), True)
self.ctrl.get_item('Magenta').click(where='icon')
self.assertEqual(self.ctrl.get_item('Magenta').is_selected(), True)
self.assertEqual(self.ctrl.get_item('Green').is_selected(), False)
self.ctrl.get_item('Green').click(where='all')
self.assertEqual(self.ctrl.get_item('Green').is_selected(), True)
self.assertEqual(self.ctrl.get_item('Magenta').is_selected(), False)
def testItemCheck(self):
"""Test checking/unchecking item"""
if not self.dlg.Toolbar.button(6).is_checked():
self.dlg.Toolbar.button(6).click()
yellow = self.ctrl.get_item('Yellow')
yellow.check()
self.assertEqual(yellow.is_checked(), True)
yellow.uncheck()
self.assertEqual(yellow.is_checked(), False)
# test legacy deprecated methods (TODO: remove later)
self.ctrl.get_item('Yellow').check()
self.assertEqual(self.ctrl.get_item('Yellow').is_checked(), True)
self.ctrl.get_item('Yellow').uncheck()
self.assertEqual(self.ctrl.get_item('Yellow').is_checked(), False)
def testItemClick(self):
"""Test clicking item rectangles by click() method"""
self.ctrl.get_item('Green').click(where='select')
self.assertEqual(self.ctrl.get_item('Green').is_selected(), True)
self.ctrl.get_item('Magenta').click(where='select')
self.assertEqual(self.ctrl.get_item('Magenta').is_selected(), True)
self.assertEqual(self.ctrl.get_item('Green').is_selected(), False)
self.assertEqual(self.ctrl.get_item('Green').is_focused(), False)
self.assertEqual(self.ctrl.get_item('Green').state() & win32defines.LVIS_FOCUSED, 0)
self.ctrl.get_item('Green').click(where='select')
self.assertEqual(self.ctrl.get_item('Green').is_selected(), True)
self.assertEqual(self.ctrl.is_selected('Green'), True) # TODO: deprecated method
self.assertEqual(self.ctrl.get_item('Green').is_focused(), True)
self.assertEqual(self.ctrl.is_focused('Green'), True) # TODO: deprecated method
self.assertEqual(self.ctrl.get_item('Magenta').is_selected(), False)
# Test click on checkboxes
if not self.dlg.Toolbar.button(6).is_checked(): # switch on states
self.dlg.Toolbar.button(6).click()
for i in range(1, 6):
self.dlg.Toolbar.button(i - 1).click()
self.ctrl.get_item(i).click(where='check') # check item
time.sleep(0.5)
self.assertEqual(self.ctrl.get_item(i).is_checked(), True)
self.assertEqual(self.ctrl.get_item(i - 1).is_checked(), False)
self.ctrl.get_item(i).click(where='check') # uncheck item
time.sleep(0.5)
self.assertEqual(self.ctrl.get_item(i).is_checked(), False)
self.ctrl.get_item(i).click(where='check') # recheck item
time.sleep(0.5)
self.assertEqual(self.ctrl.get_item(i).is_checked(), True)
self.dlg.Toolbar.button(6).click() # switch off states
self.assertRaises(RuntimeError, self.ctrl.get_item(6).click, where="check")
def testItemClickInput(self):
"""Test clicking item rectangles by click_input() method"""
Timings.defaults()
self.ctrl.get_item('Green').click_input(where='select')
self.assertEqual(self.ctrl.get_item('Green').is_selected(), True)
self.ctrl.get_item('Magenta').click_input(where='select')
self.assertEqual(self.ctrl.get_item('Magenta').is_selected(), True)
self.assertEqual(self.ctrl.get_item('Green').is_selected(), False)
self.assertEqual(self.ctrl.get_item('Green').is_focused(), False)
self.assertEqual(self.ctrl.get_item('Green').state() & win32defines.LVIS_FOCUSED, 0)
self.ctrl.get_item('Green').click_input(where='select')
self.assertEqual(self.ctrl.get_item('Green').is_selected(), True)
self.assertEqual(self.ctrl.is_selected('Green'), True) # TODO: deprecated method
self.assertEqual(self.ctrl.get_item('Green').is_focused(), True)
self.assertEqual(self.ctrl.is_focused('Green'), True) # TODO: deprecated method
self.assertEqual(self.ctrl.get_item('Magenta').is_selected(), False)
# Test click on checkboxes
if not self.dlg.Toolbar.button(6).is_checked(): # switch on states
self.dlg.Toolbar.button(6).click()
for i in range(1, 6):
self.dlg.Toolbar.button(i - 1).click()
self.ctrl.get_item(i).click_input(where='check') # check item
time.sleep(0.5)
self.assertEqual(self.ctrl.get_item(i).is_checked(), True)
self.assertEqual(self.ctrl.get_item(i - 1).is_checked(), False)
self.ctrl.get_item(i).click_input(where='check') # uncheck item
time.sleep(0.5)
self.assertEqual(self.ctrl.get_item(i).is_checked(), False)
self.ctrl.get_item(i).click_input(where='check') # recheck item
time.sleep(0.5)
self.assertEqual(self.ctrl.get_item(i).is_checked(), True)
self.dlg.Toolbar.button(6).click() # switch off states
self.assertRaises(RuntimeError, self.ctrl.get_item(6).click_input, where="check")
def testItemMethods(self):
"""Test short item methods like Text(), State() etc"""
self.assertEqual(self.ctrl.get_item('Green').text(), 'Green')
self.assertEqual(self.ctrl.get_item('Green').image(), 2)
self.assertEqual(self.ctrl.get_item('Green').indent(), 0)
def test_ensure_visible(self):
self.dlg.move_window(width=300)
# Gray is selected by click because ensure_visible() is called inside
self.ctrl.get_item('Gray').click()
self.assertEqual(self.ctrl.get_item('Gray').is_selected(), True)
self.dlg.set_focus() # just in case
self.ctrl.get_item('Green').ensure_visible()
self.ctrl.get_item('Red').click()
self.assertEqual(self.ctrl.get_item('Gray').is_selected(), False)
self.assertEqual(self.ctrl.get_item('Red').is_selected(), True)
#
# def testSubItems(self):
#
# for row in range(self.ctrl.item_count())
#
# for i in self.ctrl.items():
#
# #self.assertEqual(item.Text, texts[i])
def testEqualsItems(self):
"""
Test __eq__ and __ne__ cases for _listview_item.
"""
item1 = self.ctrl.get_item(0, 0)
item1_copy = self.ctrl.get_item(0, 0)
item2 = self.ctrl.get_item(1, 0)
self.assertEqual(item1, item1_copy)
self.assertNotEqual(item1, "Not _listview_item")
self.assertNotEqual(item1, item2)
def test_cells_rectangles(self):
"""Test the ListView get_item rectangle method for cells"""
if not self.dlg.Toolbar.button(4).is_checked():
self.dlg.Toolbar.button(4).click()
for row in range(self.ctrl.item_count() - 1):
for col in range(self.ctrl.column_count() - 1):
self.assertEqual(
self.ctrl.get_item(row, col).rectangle(area="text").right,
self.ctrl.get_item(row, col + 1).rectangle(area="text").left)
self.assertEqual(
self.ctrl.get_item(row, col).rectangle(area="text").bottom,
self.ctrl.get_item(row + 1, col).rectangle(area="text").top)
self.assertEqual(self.ctrl.get_item(1, 2).rectangle(area="text"),
RECT(200, 36, 250, 53))
self.assertEqual(self.ctrl.get_item(3, 4).rectangle(area="text"),
RECT(300, 70, 400, 87))
def test_inplace_control(self):
"""Test the ListView inplace_control method for item"""
# Item is not editable so it will raise timeout error
with self.assertRaises(Exception) as context:
self.ctrl.get_item(0).inplace_control()
self.assertTrue('In-place-edit control for item' in str(context.exception))
if is_x64_Python():
class ListViewTestCases64(ListViewTestCases32):
"""Unit tests for the 64-bit ListViewWrapper on a 32-bit sample"""
path = os.path.join(mfc_samples_folder, u"RowList.exe")
class ListViewWinFormTestCases32(unittest.TestCase):
"""Unit tests for the ListViewWrapper class with WinForm applications"""
path = os.path.join(winforms_folder_32, u"ListView_TestApp.exe")
def setUp(self):
"""Set some data and ensure the application is in the state we want"""
Timings.defaults()
app = Application()
app.start(self.path)
self.dlg = app.ListViewEx
self.ctrl = self.dlg.ListView.find()
def tearDown(self):
"""Close the application after tests"""
self.dlg.send_message(win32defines.WM_CLOSE)
def test_cell_click_input(self):
"""Test the ListView get_item click_input method"""
self.ctrl.get_item(0,2).click_input(double=True, where="text")
self.dlg.type_keys("{ENTER}")
# For make sure the input is finished, click to another place
self.ctrl.get_item(0,3).click_input(double=False, where="text")
self.assertEqual(str(self.ctrl.get_item(0,2).text()), u"Clicked!")
def test_get_editor_of_datetimepicker(self):
"""Test the ListView inplace_control method using DateTimePicker"""
dt_picker = self.ctrl.get_item(2,0).inplace_control("DateTimePicker")
dt_picker.set_time(year=2017, month=5, day=23)
cur_time = dt_picker.get_time();
self.assertEqual(cur_time.wYear, 2017)
self.assertEqual(cur_time.wMonth, 5)
self.assertEqual(cur_time.wDay, 23)
def test_get_editor_of_combobox(self):
"""Test the ListView inplace_control method using ComboBox"""
combo_box = self.ctrl.get_item(1,1).inplace_control("ComboBox")
combo_box.select(combo_box.selected_index() - 1)
self.assertEqual(combo_box.selected_index(), 2)
def test_get_editor_of_editwrapper(self):
"""Test the ListView inplace_control method using EditWrapper"""
dt_picker = self.ctrl.get_item(3,4).inplace_control("Edit")
dt_picker.set_text("201")
self.assertEqual(dt_picker.text_block(), u"201")
def test_get_editor_wrong_args(self):
"""Test the ListView inplace_control case when used wrong friendly class name"""
with self.assertRaises(Exception) as context:
self.ctrl.get_item(1,1).inplace_control("Edit")
self.assertTrue('In-place-edit control "Edit"' in str(context.exception))
def test_automation_id_by_win32(self):
list_view = self.dlg.by(auto_id="listViewEx1").wait('visible')
self.assertEqual(list_view.automation_id(), "listViewEx1")
check_box = self.dlg.by(auto_id="checkBoxDoubleClickActivation").wait('visible')
self.assertEqual(check_box.automation_id(), "checkBoxDoubleClickActivation")
check_box = self.dlg.checkBoxDoubleClickActivation.wait('visible')
self.assertEqual(check_box.automation_id(), "checkBoxDoubleClickActivation")
def test_win32_control_type(self):
list_view = self.dlg.by(control_type="ListViewEx.ListViewEx").wait('visible')
self.assertEqual(list_view.control_type(), "ListViewEx.ListViewEx")
self.assertEqual(list_view.full_control_type(),
"ListViewEx.ListViewEx, ListViewEx, Version=1.0.6520.42612, " \
"Culture=neutral, PublicKeyToken=null")
check_box = self.dlg.by(control_type="System.Windows.Forms.CheckBox").wait('visible')
self.assertEqual(check_box.control_type(), "System.Windows.Forms.CheckBox")
self.assertEqual(check_box.full_control_type(),
"System.Windows.Forms.CheckBox, System.Windows.Forms, " \
"Version=2.0.0.0, Culture=neutral, PublicKeyToken=b77a5c561934e089")
if is_x64_Python():
class ListViewWinFormTestCases64(ListViewWinFormTestCases32):
"""Unit tests for the 64-bit ListViewWrapper on a 32-bit sample"""
path = os.path.join(winforms_folder, u"ListView_TestApp.exe")
class TreeViewTestCases32(unittest.TestCase):
"""Unit tests for the TreeViewWrapper class"""
path = os.path.join(controlspy_folder_32, "Tree View.exe")
def setUp(self):
"""Set some data and ensure the application is in the state we want"""
Timings.fast()
self.root_text = "The Planets"
self.texts = [
("Mercury", '57,910,000', '4,880', '3.30e23'),
("Venus", '108,200,000', '12,103.6', '4.869e24'),
("Earth", '149,600,000', '12,756.3', '5.9736e24'),
("Mars", '227,940,000', '6,794', '6.4219e23'),
("Jupiter", '778,330,000', '142,984', '1.900e27'),
("Saturn", '1,429,400,000', '120,536', '5.68e26'),
("Uranus", '2,870,990,000', '51,118', '8.683e25'),
("Neptune", '4,504,000,000', '49,532', '1.0247e26'),
("Pluto", '5,913,520,000', '2,274', '1.27e22'),
]
self.app = Application()
self.app.start(self.path)
self.dlg = self.app.MicrosoftControlSpy
self.ctrl = self.app.MicrosoftControlSpy.TreeView.find()
def tearDown(self):
"""Close the application after tests"""
self.dlg.send_message(win32defines.WM_CLOSE)
def test_friendly_class_name(self):
"""Make sure the friendly class name is set correctly (TreeView)"""
self.assertEqual(self.ctrl.friendly_class_name(), "TreeView")
def testItemCount(self):
"""Test the TreeView ItemCount method"""
self.assertEqual(self.ctrl.item_count(), 37)
def testGetItem(self):
"""Test the get_item method"""
self.assertRaises(RuntimeError, self.ctrl.get_item, "test\here\please")
self.assertRaises(IndexError, self.ctrl.get_item, r"\test\here\please")
self.assertEqual(
self.ctrl.get_item((0, 1, 2)).text(), self.texts[1][3] + " kg")
self.assertEqual(
self.ctrl.get_item(r"\The Planets\Venus\4.869e24 kg", exact=True).text(), self.texts[1][3] + " kg")
self.assertEqual(
self.ctrl.get_item(["The Planets", "Venus", "4.869"]).text(),
self.texts[1][3] + " kg"
)
def testItemText(self):
"""Test the TreeView item Text() method"""
self.assertEqual(self.ctrl.tree_root().text(), self.root_text)
self.assertEqual(
self.ctrl.get_item((0, 1, 2)).text(), self.texts[1][3] + " kg")
def testSelect(self):
"""Test selecting an item"""
self.ctrl.select((0, 1, 2))
self.ctrl.get_item((0, 1, 2)).state()
self.assertEqual(True, self.ctrl.is_selected((0, 1, 2)))
def testEnsureVisible(self):
"""make sure that the item is visible"""
# TODO: note this is partially a fake test at the moment because
# just by getting an item - we usually make it visible
self.ctrl.ensure_visible((0, 8, 2))
# make sure that the item is not hidden
self.assertNotEqual(None, self.ctrl.get_item((0, 8, 2)).client_rect())
def testGetProperties(self):
"""Test getting the properties for the treeview control"""
props = self.ctrl.get_properties()
self.assertEqual(
"TreeView", props['friendly_class_name'])
self.assertEqual(
self.ctrl.texts(), props['texts'])
for prop_name in props:
self.assertEqual(getattr(self.ctrl, prop_name)(), props[prop_name])
def testItemsClick(self):
"""Test clicking of items and sub-items in the treeview control"""
planets_item_path = (0, 0)
mercury_diam_item_path = (0, 0, 1)
mars_dist_item_path = (0, 3, 0)
itm = self.ctrl.get_item(planets_item_path)
itm.ensure_visible()
time.sleep(1)
itm.click(button='left')
self.assertEqual(True, self.ctrl.is_selected(planets_item_path))
itm = self.ctrl.get_item(mars_dist_item_path)
itm.ensure_visible()
time.sleep(1)
itm.click(button='left')
self.assertEqual(True, self.ctrl.is_selected(mars_dist_item_path))
itm = self.ctrl.get_item(mercury_diam_item_path)
itm.ensure_visible()
time.sleep(1)
itm.click(button='left')
self.assertEqual(True, self.ctrl.is_selected(mercury_diam_item_path))
self.assertEqual(False, self.ctrl.is_selected(mars_dist_item_path))
itm = self.ctrl.get_item(planets_item_path)
itm.ensure_visible()
time.sleep(1)
itm.click(button='left')
self.assertEqual(True, self.ctrl.is_selected(planets_item_path))
self.assertEqual(False, self.ctrl.is_selected(mercury_diam_item_path))
if is_x64_Python():
class TreeViewTestCases64(TreeViewTestCases32):
"""Unit tests for the 64-bit TreeViewWrapper on a 32-bit sample"""
path = os.path.join(controlspy_folder, "Tree View.exe")
class TreeViewAdditionalTestCases(unittest.TestCase):
"""More unit tests for the TreeViewWrapper class (CmnCtrl1.exe)"""
def setUp(self):
"""Set some data and ensure the application is in the state we want"""
Timings.fast()
self.app = Application().start(os.path.join(mfc_samples_folder, "CmnCtrl1.exe"))
self.dlg = self.app.CommonControlsSample
self.ctrl = self.app.CommonControlsSample.TreeView.find()
self.app.wait_cpu_usage_lower(threshold=1.5, timeout=30, usage_interval=1)
def tearDown(self):
"""Close the application after tests"""
self.dlg.send_message(win32defines.WM_CLOSE)
self.app.kill()
def testCheckBoxes(self):
"""Make sure tree view item method is_checked() works as expected"""
self.dlg.set_focus()
self.dlg.TVS_CHECKBOXES.uncheck_by_click()
self.dlg.TVS_CHECKBOXES.check_by_click()
birds = self.ctrl.get_item(r'\Birds')
self.ctrl.set_focus() # to make sure focus is not lost by any accident event
birds.click(where='check')
self.assertEqual(birds.is_checked(), True)
birds.click_input(where='check')
wait_until(3, 0.4, birds.is_checked, value=False)
def testPrintItems(self):
"""Test TreeView method print_items()"""
birds = self.ctrl.get_item(r'\Birds')
birds.expand()
items_str = self.ctrl.print_items()
self.assertEqual(items_str, "Treeview1\nBirds\n Eagle\n Hummingbird\n Pigeon\n" +
"Dogs\n Dalmatian\n German Shepherd\n Great Dane\n" +
"Fish\n Salmon\n Snapper\n Sole\n")
def testIsSelected(self):
"""Make sure tree view item method IsSelected() works as expected"""
birds = self.ctrl.get_item(r'\Birds')
birds.expand()
eagle = self.ctrl.get_item(r'\Birds\Eagle')
eagle.select()
self.assertEqual(eagle.is_selected(), True)
def test_expand_collapse(self):
"""Make sure tree view item methods Expand() and Collapse() work as expected"""
birds = self.ctrl.get_item(r'\Birds')
birds.expand()
self.assertEqual(birds.is_expanded(), True)
birds.collapse()
self.assertEqual(birds.is_expanded(), False)
def test_expand_collapse_buttons(self):
"""Make sure correct area is clicked"""
self.dlg.TVS_HASBUTTONS.click_input()
self.dlg.TVS_HASLINES.click_input()
self.dlg.TVS_LINESATROOT.click_input()
birds = self.ctrl.get_item(r'\Birds')
birds.click(where='button')
self.assertEqual(birds.is_expanded(), True)
birds.click(double=True, where='icon')
self.assertEqual(birds.is_expanded(), False)
birds.click_input(where='button')
self.assertEqual(birds.is_expanded(), True)
time.sleep(win32gui.GetDoubleClickTime() * 2.0 / 1000)
birds.click_input(double=True, where='icon')
self.assertEqual(birds.is_expanded(), False)
def testIncorrectAreas(self):
"""Make sure incorrect area raises an exception"""
birds = self.ctrl.get_item(r'\Birds')
self.assertRaises(RuntimeError, birds.click, where='radiob')
self.assertRaises(RuntimeError, birds.click_input, where='radiob')
def testStartDraggingAndDrop(self):
"""Make sure tree view item methods StartDragging() and drop() work as expected"""
birds = self.ctrl.get_item(r'\Birds')
birds.expand()
pigeon = self.ctrl.get_item(r'\Birds\Pigeon')
pigeon.start_dragging()
eagle = self.ctrl.get_item(r'\Birds\Eagle')
eagle.drop()
self.assertRaises(IndexError, birds.get_child, 'Pigeon')
self.assertRaises(IndexError, self.ctrl.get_item, r'\Birds\Pigeon')
self.assertRaises(IndexError, self.ctrl.get_item, [0, 2])
self.assertRaises(IndexError, self.ctrl.get_item, r'\Bread', exact=True)
new_pigeon = self.ctrl.get_item(r'\Birds\Eagle\Pigeon')
self.assertEqual(len(birds.children()), 2)
self.assertEqual(new_pigeon.children(), [])
class HeaderTestCases(unittest.TestCase):
"""Unit tests for the Header class"""
def setUp(self):
"""Set some data and ensure the application is in the state we want"""
Timings.fast()
app = Application()
app.start(os.path.join(mfc_samples_folder, "RowList.exe"), timeout=20)
self.texts = [u'Color', u'Red', u'Green', u'Blue', u'Hue', u'Sat', u'Lum', u'Type']
self.item_rects = [
RECT(000, 0, 150, 19),
RECT(150, 0, 200, 19),
RECT(200, 0, 250, 19),
RECT(250, 0, 300, 19),
RECT(300, 0, 400, 19),
RECT(400, 0, 450, 19),
RECT(450, 0, 500, 19),
RECT(500, 0, 650, 19)]
self.app = app
self.dlg = app.RowListSampleApplication
self.ctrl = app.RowListSampleApplication.Header.find()
def tearDown(self):
"""Close the application after tests"""
# close the application
self.dlg.send_message(win32defines.WM_CLOSE)
def testFriendlyClass(self):
"""Make sure the friendly class is set correctly (Header)"""
self.assertEqual(self.ctrl.friendly_class_name(), "Header")
def testTexts(self):
"""Make sure the texts are set correctly"""
self.assertEqual(self.ctrl.texts()[1:], self.texts)
def testGetProperties(self):
"""Test getting the properties for the header control"""
props = self.ctrl.get_properties()
self.assertEqual(
self.ctrl.friendly_class_name(), props['friendly_class_name'])
self.assertEqual(
self.ctrl.texts(), props['texts'])
for prop_name in props:
self.assertEqual(getattr(self.ctrl, prop_name)(), props[prop_name])
def testItemCount(self):
self.assertEqual(8, self.ctrl.item_count())
def testGetColumnRectangle(self):
for i in range(0, 3):
self.assertEqual(self.item_rects[i].left, self.ctrl.get_column_rectangle(i).left)
self.assertEqual(self.item_rects[i].right, self.ctrl.get_column_rectangle(i).right)
self.assertEqual(self.item_rects[i].top, self.ctrl.get_column_rectangle(i).top)
self.assertFalse(abs(self.item_rects[i].bottom - self.ctrl.get_column_rectangle(i).bottom) > 2)
def testClientRects(self):
test_rects = self.item_rects
test_rects.insert(0, self.ctrl.client_rect())
client_rects = self.ctrl.client_rects()
self.assertEqual(len(test_rects), len(client_rects))
for i, r in enumerate(test_rects):
self.assertEqual(r.left, client_rects[i].left)
self.assertEqual(r.right, client_rects[i].right)
self.assertEqual(r.top, client_rects[i].top)
self.assertFalse(abs(r.bottom - client_rects[i].bottom) > 2) # may be equal to 17 or 19
def testGetColumnText(self):
for i in range(0, 3):
self.assertEqual(
self.texts[i],
self.ctrl.get_column_text(i))
class StatusBarTestCases(unittest.TestCase):
"""Unit tests for the TreeViewWrapper class"""
def setUp(self):
"""Set some data and ensure the application is in the state we want"""
Timings.fast()
app = Application()
app.start(os.path.join(controlspy_folder, "Status bar.exe"))
self.texts = ["Long text", "", "Status Bar"]
self.part_rects = [
RECT(0, 2, 65, 22),
RECT(67, 2, 90, 22),
RECT(92, 2, 261, 22)]
self.app = app
self.dlg = app.MicrosoftControlSpy
self.ctrl = app.MicrosoftControlSpy.StatusBar.find()
def tearDown(self):
"""Close the application after tests"""
self.dlg.send_message(win32defines.WM_CLOSE)
def test_friendly_class_name(self):
"""Make sure the friendly class name is set correctly (StatusBar)"""
self.assertEqual(self.ctrl.friendly_class_name(), "StatusBar")
def test_texts(self):
"""Make sure the texts are set correctly"""
self.assertEqual(self.ctrl.texts()[1:], self.texts)
def testGetProperties(self):
"""Test getting the properties for the status bar control"""
props = self.ctrl.get_properties()
self.assertEqual(
self.ctrl.friendly_class_name(), props['friendly_class_name'])
self.assertEqual(
self.ctrl.texts(), props['texts'])
for prop_name in props:
self.assertEqual(getattr(self.ctrl, prop_name)(), props[prop_name])
def testBorderWidths(self):
"""Make sure the border widths are retrieved correctly"""
self.assertEqual(
self.ctrl.border_widths(),
dict(
Horizontal=0,
Vertical=2,
Inter=2,
)
)
def testPartCount(self):
"Make sure the number of parts is retrieved correctly"
self.assertEqual(self.ctrl.part_count(), 3)
def testPartRightEdges(self):
"Make sure the part widths are retrieved correctly"
for i in range(0, self.ctrl.part_count() - 1):
self.assertEqual(self.ctrl.part_right_edges()[i], self.part_rects[i].right)
self.assertEqual(self.ctrl.part_right_edges()[i + 1], -1)
def testGetPartRect(self):
"Make sure the part rectangles are retrieved correctly"
for i in range(0, self.ctrl.part_count()):
part_rect = self.ctrl.get_part_rect(i)
self.assertEqual(part_rect.left, self.part_rects[i].left)
if i != self.ctrl.part_count() - 1:
self.assertEqual(part_rect.right, self.part_rects[i].right)
self.assertEqual(part_rect.top, self.part_rects[i].top)
self.assertFalse(abs(part_rect.bottom - self.part_rects[i].bottom) > 2)
self.assertRaises(IndexError, self.ctrl.get_part_rect, 99)
def testClientRects(self):
self.assertEqual(self.ctrl.client_rect(), self.ctrl.client_rects()[0])
client_rects = self.ctrl.client_rects()[1:]
for i, client_rect in enumerate(client_rects):
self.assertEqual(self.part_rects[i].left, client_rect.left)
if i != len(client_rects) - 1:
self.assertEqual(self.part_rects[i].right, client_rect.right)
self.assertEqual(self.part_rects[i].top, client_rect.top)
self.assertFalse(abs(self.part_rects[i].bottom - client_rect.bottom) > 2)
def testGetPartText(self):
self.assertRaises(IndexError, self.ctrl.get_part_text, 99)
for i, text in enumerate(self.texts):
self.assertEqual(text, self.ctrl.get_part_text(i))
class TabControlTestCases(unittest.TestCase):
"""Unit tests for the TreeViewWrapper class"""
def setUp(self):
"""Set some data and ensure the application is in the state we want"""
Timings.fast()
self.screen_w = win32api.GetSystemMetrics(0)
app = Application()
app.start(os.path.join(mfc_samples_folder, "CmnCtrl1.exe"))
self.texts = [
u"CTreeCtrl", u"CAnimateCtrl", u"CToolBarCtrl",
u"CDateTimeCtrl", u"CMonthCalCtrl"]
self.rects = [
RECT(2, 2, 58, 20),
RECT(58, 2, 130, 20),
RECT(130, 2, 201, 20),
RECT(201, 2, 281, 20),
RECT(281, 2, 360, 20)
]
self.app = app
self.dlg = app.CommonControlsSample
self.ctrl = app.CommonControlsSample.TabControl.find()
def tearDown(self):
"""Close the application after tests"""
# close the application
self.dlg.send_message(win32defines.WM_CLOSE)
def testFriendlyClass(self):
"""Make sure the friendly class is set correctly (TabControl)"""
self.assertEqual(self.ctrl.friendly_class_name(), "TabControl")
def testTexts(self):
"""Make sure the texts are set correctly"""
self.assertEqual(self.ctrl.texts()[1:], self.texts)
def testGetProperties(self):
"""Test getting the properties for the tabcontrol"""
props = self.ctrl.get_properties()
self.assertEqual(
self.ctrl.friendly_class_name(), props['friendly_class_name'])
self.assertEqual(
self.ctrl.texts(), props['texts'])
for prop_name in props:
self.assertEqual(getattr(self.ctrl, prop_name)(), props[prop_name])
def testRowCount(self):
self.assertEqual(1, self.ctrl.row_count())
dlgClientRect = self.ctrl.parent().rectangle() # use the parent as a reference
prev_rect = self.ctrl.rectangle() - dlgClientRect
# squeeze the tab control to force two rows
new_rect = RECT(prev_rect)
new_rect.right = int(new_rect.width() / 2)
self.ctrl.move_window(
new_rect.left,
new_rect.top,
new_rect.width(),
new_rect.height(),
)
time.sleep(0.1)
# verify two tab rows
self.assertEqual(2, self.ctrl.row_count())
# restore back the original size of the control
self.ctrl.move_window(prev_rect)
self.assertEqual(1, self.ctrl.row_count())
def testGetSelectedTab(self):
self.assertEqual(0, self.ctrl.get_selected_tab())
self.ctrl.select(1)
self.assertEqual(1, self.ctrl.get_selected_tab())
self.ctrl.select(u"CMonthCalCtrl")
self.assertEqual(4, self.ctrl.get_selected_tab())
def testTabCount(self):
"""Make sure the number of parts is retrieved correctly"""
self.assertEqual(self.ctrl.tab_count(), 5)
def testGetTabRect(self):
"""Make sure the part rectangles are retrieved correctly"""
for i, _ in enumerate(self.rects):
self.assertEqual(self.ctrl.get_tab_rect(i), self.rects[i])
self.assertRaises(IndexError, self.ctrl.get_tab_rect, 99)
# def testGetTabState(self):
# self.assertRaises(IndexError, self.ctrl.GetTabState, 99)
#
# self.dlg.StatementEdit.SetEditText ("MSG (TCM_HIGHLIGHTITEM,1,MAKELONG(TRUE,0))")
#
# time.sleep(.3)
# # use CloseClick to allow the control time to respond to the message
# self.dlg.Send.CloseClick()
# time.sleep(2)
# print("==\n",self.ctrl.TabStates())
#
# self.assertEqual(self.ctrl.GetTabState(1), 1)
#
# def testTabStates(self):
# print(self.ctrl.TabStates())
# raise "tabstates hiay"
def testGetTabText(self):
for i, text in enumerate(self.texts):
self.assertEqual(text, self.ctrl.get_tab_text(i))
self.assertRaises(IndexError, self.ctrl.get_tab_text, 99)
def testClientRects(self):
self.assertEqual(self.ctrl.client_rect(), self.ctrl.client_rects()[0])
self.assertEqual(self.rects, self.ctrl.client_rects()[1:])
def testSelect(self):
self.assertEqual(0, self.ctrl.get_selected_tab())
self.ctrl.select(1)
self.assertEqual(1, self.ctrl.get_selected_tab())
self.ctrl.select(u"CToolBarCtrl")
self.assertEqual(2, self.ctrl.get_selected_tab())
self.assertRaises(IndexError, self.ctrl.select, 99)
class ToolbarTestCases(unittest.TestCase):
"""Unit tests for the ToolbarWrapper class"""
def setUp(self):
"""Set some data and ensure the application is in the state we want"""
Timings.fast()
app = Application()
app.start(os.path.join(mfc_samples_folder, "CmnCtrl1.exe"))
self.app = app
self.dlg = app.CommonControlsSample
# select a tab with toolbar controls
self.dlg.SysTabControl.select(u"CToolBarCtrl")
# see identifiers available at that tab
#self.dlg.PrintControlIdentifiers()
# The sample app has two toolbars. The first toolbar can be
# addressed as Toolbar, Toolbar0 and Toolbar1.
# The second control goes as Toolbar2
self.ctrl = app.CommonControlsSample.ToolbarNew.find()
self.ctrl2 = app.CommonControlsSample.ToolbarErase.find()
def tearDown(self):
"""Close the application after tests"""
# close the application
self.dlg.send_message(win32defines.WM_CLOSE)
def testFriendlyClass(self):
"""Make sure the friendly class is set correctly (Toolbar)"""
self.assertEqual(self.ctrl.friendly_class_name(), "Toolbar")
def testTexts(self):
"""Make sure the texts are set correctly"""
for txt in self.ctrl.texts():
self.assertEqual(isinstance(txt, six.string_types), True)
def testGetProperties(self):
"""Test getting the properties for the toolbar control"""
props = self.ctrl.get_properties()
self.assertEqual(
self.ctrl.friendly_class_name(), props['friendly_class_name'])
self.assertEqual(
self.ctrl.texts(), props['texts'])
self.assertEqual(
self.ctrl.button_count(), props['button_count'])
for prop_name in props:
self.assertEqual(getattr(self.ctrl, prop_name)(), props[prop_name])
def testButtonCount(self):
"""Test the button count method of the toolbar"""
# TODO: for a some reason the first toolbar returns button count = 12
# The same as in the second toolbar, even though their handles are different.
# Maybe the test app itself has to be fixed too.
#self.assertEqual(self.ctrl.button_count(), 9)
self.assertEqual(self.ctrl2.button_count(), 12)
def testGetButton(self):
self.assertRaises(IndexError, self.ctrl.get_button, 29)
def testGetButtonRect(self):
rect_ctrl = self.ctrl.get_button_rect(0)
self.assertEqual((rect_ctrl.left, rect_ctrl.top), (0, 0))
self.assertFalse((rect_ctrl.right - rect_ctrl.left) > 40)
self.assertFalse((rect_ctrl.right - rect_ctrl.left) < 36)
self.assertFalse((rect_ctrl.bottom - rect_ctrl.top) > 38)
self.assertFalse((rect_ctrl.bottom - rect_ctrl.top) < 36)
#self.assertEqual(rect_ctrl, RECT(0, 0, 40, 38))
rect_ctrl2 = self.ctrl2.get_button_rect(0)
self.assertEqual((rect_ctrl2.left, rect_ctrl2.top), (0, 0))
self.assertFalse((rect_ctrl2.right - rect_ctrl2.left) > 70)
self.assertFalse((rect_ctrl2.right - rect_ctrl2.left) < 64)
self.assertFalse((rect_ctrl2.bottom - rect_ctrl2.top) > 38)
self.assertFalse((rect_ctrl2.bottom - rect_ctrl2.top) < 36)
#self.assertEqual(rect_ctrl2, RECT(0, 0, 70, 38))
def testGetToolTipsControls(self):
tips = self.ctrl.get_tool_tips_control()
tt = tips.texts()
self.assertEqual(u"New" in tt, True)
self.assertEqual(u"About" in tt, True)
tips = self.ctrl2.get_tool_tips_control()
tt = tips.texts()
self.assertEqual(u"Pencil" in tt, True)
self.assertEqual(u"Ellipse" in tt, True)
def testPressButton(self):
self.ctrl.press_button(0)
#print(self.ctrl.texts())
self.assertRaises(
findbestmatch.MatchError,
self.ctrl.press_button,
"asdfdasfasdf")
# todo more tests for pressbutton
self.ctrl.press_button(u"Open")
def testCheckButton(self):
self.ctrl2.check_button('Erase', True)
self.assertEqual(self.ctrl2.button('Erase').is_checked(), True)
self.ctrl2.check_button('Pencil', True)
self.assertEqual(self.ctrl2.button('Erase').is_checked(), False)
self.ctrl2.check_button('Erase', False)
self.assertEqual(self.ctrl2.button('Erase').is_checked(), False)
# try to check separator
self.assertRaises(RuntimeError, self.ctrl.check_button, 3, True)
def testIsCheckable(self):
self.assertNotEqual(self.ctrl2.button('Erase').is_checkable(), False)
self.assertEqual(self.ctrl.button('New').is_checkable(), False)
def testIsPressable(self):
self.assertEqual(self.ctrl.button('New').is_pressable(), True)
def testButtonByTooltip(self):
self.assertEqual(self.ctrl.button('New', by_tooltip=True).text(), 'New')
self.assertEqual(self.ctrl.button('About', exact=False, by_tooltip=True).text(), 'About')
class RebarTestCases(unittest.TestCase):
"""Unit tests for the UpDownWrapper class"""
def setUp(self):
"""Start the application, set some data and wait for the state we want
The app title can be tricky. If no document is opened the title is just: "RebarTest"
However if a document is created/opened in the child frame
the title is appended with a document name: "RebarTest - RebarTest1"
A findbestmatch proc does well here with guessing the title
even though the app is started with a short title "RebarTest".
"""
Timings.fast()
app = Application()
app.start(os.path.join(mfc_samples_folder, "RebarTest.exe"))
mouse.move((-500, 200)) # remove the mouse from the screen to avoid side effects
self.app = app
self.dlg = app.RebarTest_RebarTest
self.dlg.wait('ready', 20)
self.ctrl = app.RebarTest_RebarTest.Rebar.find()
def tearDown(self):
"""Close the application after tests"""
self.app.kill(soft=True)
def testFriendlyClass(self):
"""Make sure the friendly class is set correctly (ReBar)"""
self.assertEqual(self.ctrl.friendly_class_name(), "ReBar")
def testTexts(self):
"""Make sure the texts are set correctly"""
for txt in self.ctrl.texts():
self.assertEqual(isinstance(txt, six.string_types), True)
def testBandCount(self):
"""Make sure band_count() returns 2"""
self.assertEqual(self.ctrl.band_count(), 2)
def testGetBand(self):
"""Check that get_band() is working corectly"""
self.assertRaises(IndexError, self.ctrl.get_band, 99)
self.assertRaises(IndexError, self.ctrl.get_band, 2)
band = self.ctrl.get_band(0)
self.assertEqual(band.hwndChild, self.dlg.MenuBar.handle)
self.assertEqual(self.ctrl.get_band(1).text, u"Tools band:")
self.assertEqual(self.ctrl.get_band(0).text, u"Menus band:")
def testGetToolTipsControl(self):
"""Make sure GetToolTipsControl() returns None"""
self.assertEqual(self.ctrl.get_tool_tips_control(), None)
def testAfxToolBarButtons(self):
"""Make sure we can click on Afx ToolBar button by index"""
Timings.closeclick_dialog_close_wait = 2.
self.dlg.StandardToolbar.button(1).click()
self.app.window(name='Open').wait('ready', timeout=30)
self.app.window(name='Open').Cancel.close_click()
def testMenuBarClickInput(self):
"""Make sure we can click on Menu Bar items by indexed path"""
self.assertRaises(TypeError, self.dlg.MenuBar.menu_bar_click_input, '#one->#0', self.app)
self.dlg.MenuBar.menu_bar_click_input('#1->#0->#0', self.app)
self.app.Customize.CloseButton.click()
self.app.Customize.wait_not('visible')
self.dlg.MenuBar.menu_bar_click_input([2, 0], self.app)
self.app.window(name='About RebarTest').OK.click()
self.app.window(name='About RebarTest').wait_not('visible')
class DatetimeTestCases(unittest.TestCase):
"""Unit tests for the DateTimePicker class"""
def setUp(self):
"""Start the application and get 'Date Time Picker' control"""
Timings.fast()
app = Application()
app.start(os.path.join(mfc_samples_folder, "CmnCtrl1.exe"))
self.app = app
self.dlg = app.CommonControlsSample
self.dlg.wait('ready', 20)
tab = app.CommonControlsSample.TabControl.find()
tab.select(3)
self.ctrl = self.dlg.DateTimePicker
def tearDown(self):
"""Close the application after tests"""
# close the application
self.dlg.send_message(win32defines.WM_CLOSE)
def testFriendlyClass(self):
"""Make sure the friendly class is set correctly (DateTimePicker)"""
self.assertEqual(self.ctrl.friendly_class_name(), "DateTimePicker")
def testGetTime(self):
"""Test reading a date from a 'Date Time Picker' control"""
# No check for seconds and milliseconds as it can slip
# These values are verified in the next 'testSetTime'
test_date_time = self.ctrl.get_time()
date_time_now = datetime.now()
self.assertEqual(test_date_time.wYear, date_time_now.year)
self.assertEqual(test_date_time.wMonth, date_time_now.month)
self.assertEqual(test_date_time.wDay, date_time_now.day)
self.assertEqual(test_date_time.wHour, date_time_now.hour)
self.assertEqual(test_date_time.wMinute, date_time_now.minute)
def testSetTime(self):
"""Test setting a date to a 'Date Time Picker' control"""
year = 2025
month = 9
day_of_week = 5
day = 19
hour = 1
minute = 2
second = 3
milliseconds = 781
self.ctrl.set_time(
year=year,
month=month,
day_of_week=day_of_week,
day=day,
hour=hour,
minute=minute,
second=second,
milliseconds=milliseconds
)
# Retrive back the values we set
test_date_time = self.ctrl.get_time()
self.assertEqual(test_date_time.wYear, year)
self.assertEqual(test_date_time.wMonth, month)
self.assertEqual(test_date_time.wDay, day)
self.assertEqual(test_date_time.wDayOfWeek, day_of_week)
self.assertEqual(test_date_time.wHour, hour)
self.assertEqual(test_date_time.wMinute, minute)
self.assertEqual(test_date_time.wSecond, second)
self.assertEqual(test_date_time.wMilliseconds, milliseconds)
class ToolTipsTestCases(unittest.TestCase):
"""Unit tests for the tooltips class"""
def setUp(self):
"""Set some data and ensure the application is in the state we want"""
Timings.fast()
self.texts = [u'', u'New', u'Open', u'Save', u'Cut', u'Copy', u'Paste', u'Print', u'About', u'Help']
app = Application()
app.start(os.path.join(mfc_samples_folder, "CmnCtrl1.exe"))
#app.start_(os.path.join(controlspy_folder, "Tooltip.exe"))
self.app = app
self.dlg = app.Common_Controls_Sample
# Make sure the mouse doesn't hover over tested controls
# so it won't generate an unexpected tooltip
self.dlg.move_mouse_input(coords=(-100, -100), absolute=True)
self.dlg.TabControl.select(u'CToolBarCtrl')
self.ctrl = self.dlg.Toolbar.get_tool_tips_control()
def tearDown(self):
"""Close the application after tests"""
# close the application
self.app.kill()
def testFriendlyClass(self):
"""Make sure the friendly class is set correctly (ToolTips)"""
self.assertEqual(self.ctrl.friendly_class_name(), "ToolTips")
def testGetProperties(self):
"""Test getting the properties for the tooltips control"""
props = self.ctrl.get_properties()
self.assertEqual(
self.ctrl.friendly_class_name(), props['friendly_class_name'])
self.assertEqual(
self.ctrl.texts(), props['texts'])
for prop_name in props:
self.assertEqual(getattr(self.ctrl, prop_name)(), props[prop_name])
def test_get_tip(self):
"""Test that get_tip() returns correct ToolTip object"""
self.assertRaises(IndexError, self.ctrl.get_tip, 99)
tip = self.ctrl.get_tip(1)
self.assertEqual(tip.text, self.texts[1])
def test_tool_count(self):
"""Test that tool_count() returns correct value"""
self.assertEqual(10, self.ctrl.tool_count())
def test_get_tip_text(self):
"""Test that get_tip_text() returns correct text"""
self.assertEqual(self.texts[1], self.ctrl.get_tip_text(1))
def test_texts(self):
"""Make sure the texts are set correctly"""
# just to make sure a tooltip is not shown
self.dlg.move_mouse_input(coords=(0, 0), absolute=False)
ActionLogger().log('ToolTips texts = ' + ';'.join(self.ctrl.texts()))
self.assertEqual(self.ctrl.texts()[0], '')
self.assertEqual(self.ctrl.texts()[1:], self.texts)
class UpDownTestCases(unittest.TestCase):
"""Unit tests for the UpDownWrapper class"""
def setUp(self):
"""Set some data and ensure the application is in the state we want"""
Timings.fast()
app = Application()
app.start(os.path.join(controlspy_folder, "Up-Down.exe"))
self.app = app
self.dlg = app.MicrosoftControlSpy
self.ctrl = app.MicrosoftControlSpy.UpDown2.find()
def tearDown(self):
"""Close the application after tests"""
# close the application
self.dlg.send_message(win32defines.WM_CLOSE)
def testFriendlyClass(self):
"""Make sure the friendly class is set correctly (UpDown)"""
self.assertEqual(self.ctrl.friendly_class_name(), "UpDown")
def testTexts(self):
"""Make sure the texts are set correctly"""
self.assertEqual(self.ctrl.texts()[1:], [])
def testGetProperties(self):
"""Test getting the properties for the updown control"""
props = self.ctrl.get_properties()
self.assertEqual(
self.ctrl.friendly_class_name(), props['friendly_class_name'])
self.assertEqual(
self.ctrl.texts(), props['texts'])
for prop_name in props:
self.assertEqual(getattr(self.ctrl, prop_name)(), props[prop_name])
def testGetValue(self):
"""Test getting up-down position"""
self.assertEqual(self.ctrl.get_value(), 0)
self.ctrl.set_value(23)
self.assertEqual(self.ctrl.get_value(), 23)
def testSetValue(self):
"""Test setting up-down position"""
self.assertEqual(self.ctrl.get_value(), 0)
self.ctrl.set_value(23)
self.assertEqual(self.ctrl.get_value(), 23)
self.assertEqual(
int(self.ctrl.get_buddy_control().texts()[1]),
23)
def testGetBase(self):
"""Test getting the base of the up-down control"""
self.assertEqual(self.ctrl.get_base(), 10)
#self.dlg.StatementEdit.SetEditText ("MSG (UDM_SETBASE, 16, 0)")
# use CloseClick to allow the control time to respond to the message
#self.dlg.Send.click_input()
self.ctrl.set_base(16)
self.assertEqual(self.ctrl.get_base(), 16)
def testGetRange(self):
"""Test getting the range of the up-down control"""
self.assertEqual((0, 9999), self.ctrl.get_range())
def testGetBuddy(self):
"""Test getting the buddy control"""
self.assertEqual(self.ctrl.get_buddy_control().handle, self.dlg.Edit6.handle)
def testIncrement(self):
"""Test incremementing up-down position"""
Timings.defaults()
self.ctrl.increment()
self.assertEqual(self.ctrl.get_value(), 1)
def testDecrement(self):
"""Test decrementing up-down position"""
Timings.defaults()
self.ctrl.set_value(23)
self.ctrl.decrement()
self.assertEqual(self.ctrl.get_value(), 22)
class TrackbarWrapperTestCases(unittest.TestCase):
def setUp(self):
"""Set some data and ensure the application is in the state we want"""
app = Application()
app.start(os.path.join(mfc_samples_folder, u"CmnCtrl2.exe"))
dlg = app.top_window()
dlg.TabControl.select(1)
ctrl = dlg.Trackbar.find()
self.app = app
self.dlg = dlg
self.ctrl = ctrl
def tearDown(self):
"""Close the application after tests"""
# close the application
self.dlg.send_message(win32defines.WM_CLOSE)
def test_friendly_class(self):
"""Make sure the Trackbar friendly class is set correctly"""
self.assertEqual(self.ctrl.friendly_class_name(), u"Trackbar")
def test_get_range_max(self):
"""Test the get_range_max method"""
self.ctrl.set_range_max(100)
self.assertEqual(self.ctrl.get_range_max(), 100)
def test_get_range_min(self):
"""Test the get_range_min method"""
self.ctrl.set_range_min(25)
self.assertEqual(self.ctrl.get_range_min(), 25)
def test_set_range_min_more_then_range_max(self):
"""Test the set_range_min method with error"""
self.assertRaises(ValueError, self.ctrl.set_range_min, self.ctrl.get_range_max() + 1)
def test_set_position_more_than_max_range(self):
"""Test the set_position method with error"""
self.ctrl.set_range_max(100)
self.assertRaises(ValueError, self.ctrl.set_position, 110)
def test_set_position_less_than_min_range(self):
"""Test the set_position method with error"""
self.assertRaises(ValueError, self.ctrl.set_position, self.ctrl.get_range_min() - 10)
def test_set_correct_position(self):
"""Test the set_position method"""
self.ctrl.set_position(23)
self.assertEqual(self.ctrl.get_position(), 23)
def test_get_num_ticks(self):
"""Test the get_num_ticks method"""
self.assertEqual(self.ctrl.get_num_ticks(), 6)
def test_get_channel_rect(self):
"""Test the get_channel_rect method"""
system_rect = RECT()
system_rect.left = 8
system_rect.top = 19
system_rect.right = 249
system_rect.bottom = 23
self.assert_channel_rect(self.ctrl.get_channel_rect(), system_rect)
def assert_channel_rect(self, first_rect, second_rect):
"""Compare two rect structures"""
self.assertEqual(first_rect.height(), second_rect.height())
self.assertEqual(first_rect.width(), second_rect.width())
def test_get_line_size(self):
"""Test the get_line_size method"""
self.ctrl.set_line_size(10)
self.assertEqual(self.ctrl.get_line_size(), 10)
def test_get_page_size(self):
"""Test the set_page_size method"""
self.ctrl.set_page_size(14)
self.assertEqual(self.ctrl.get_page_size(), 14)
def test_get_tool_tips_control(self):
"""Test the get_tooltips_control method"""
self.assertRaises(RuntimeError, self.ctrl.get_tooltips_control)
def test_set_sel(self):
"""Test the set_sel method"""
self.assertRaises(RuntimeError, self.ctrl.set_sel, 22, 55)
def test_get_sel_start(self):
"""Test the get_sel_start method"""
self.assertRaises(RuntimeError, self.ctrl.get_sel_start)
def test_get_sel_end(self):
"""Test the get_sel_end method"""
self.assertRaises(RuntimeError, self.ctrl.get_sel_end)
if __name__ == "__main__":
unittest.main()
|
|
#
# Copyright (c) SAS Institute Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Unit tests for front-end build functionality
"""
import os
import re
import shutil
from conary import state
from conary.build import cook
from conary.deps import deps
from conary.lib import util
from conary.state import ConaryStateFromFile
from conary_test import recipes
from rmake import errors
from rmake.cmdline import buildcmd
from rmake_test import fixtures
from rmake_test import resources
from rmake_test import rmakehelp
class BuildCmdLineTest(rmakehelp.RmakeHelper):
def testChangeFactory(self):
repos = self.openRmakeRepository()
helper = self.getRmakeHelper()
# Commit factory to use
# Warning: can't use this factory unless set it's factory to "factory"
self.addComponent('factory-test:source', '0', '',
[('factory-test.recipe', localFactory)],
factory='factory')
# Commit recipe which won't cook successfully
self.addComponent('simple:source', '1', '',
[ ('simple.recipe', recipes.simpleRecipe)])
os.chdir(self.workDir)
self.checkout('simple')
os.chdir('simple')
# Hack around conary's bug not notice only factory change during checkin
self.writeFile('simple.recipe',
recipes.simpleRecipe + '\tr.Create("bar")\n')
# Load CONARY state file
stateFile = "CONARY"
conaryState = ConaryStateFromFile(stateFile)
sourceState = conaryState.getSourceState()
# Verify no factory
assert(sourceState.getFactory() == '')
# Set factory
sourceState.setFactory('test')
conaryState.write(stateFile)
# Verify build is successful
(name,version,flavor) = buildcmd.getTrovesToBuild(self.buildCfg,
helper.getConaryClient(),
['simple.recipe'], message='foo')[0]
# checkout newly shadowed package
self.checkout(name,versionStr=version.asString())
# get state file object of newly shadowed package
os.chdir('simple')
conaryState = ConaryStateFromFile(stateFile)
sourceState = conaryState.getSourceState()
# check factory matches
assert(sourceState.getFactory() == 'test')
# TODO test for adding and removing files while changing factory
def testGetTrovesToBuild(self):
repos = self.openRmakeRepository()
helper = self.getRmakeHelper()
self.addComponent('simple:source', '1', '',
[ ('simple.recipe', recipes.simpleRecipe),
('foo', 'content\n')])
os.chdir(self.workDir)
self.checkout('simple')
os.chdir('simple')
self.writeFile('simple.recipe',
recipes.simpleRecipe + '\tr.AddSource("bar")\n')
self.remove('foo')
self.writeFile('bar', 'content\n')
self.addfile('bar', text=True)
os.mkdir('baz') # just make a random dir - rmake should
# ignore this.
(n,v,f) = buildcmd.getTrovesToBuild(self.buildCfg,
helper.getConaryClient(),
['simple.recipe[bam]'], message='foo')[0]
assert(f == deps.parseFlavor('bam'))
f = deps.parseFlavor('')
assert(v.trailingLabel().getHost() == self.rmakeCfg.reposName)
# make sure we can commit this uphill
assert(len(list(v.iterLabels())) == 2)
trove = repos.getTrove(n,v,f, withFiles=True)
assert(set(x[1] for x in list(trove.iterFileList())) == set(['simple.recipe', 'bar']))
# okay now let's do it again, no change this time
v2 = buildcmd.getTrovesToBuild(self.buildCfg, helper.getConaryClient(),
['simple.recipe'], message='foo')[0][1]
assert(v == v2)
# this time, change the config setting for foo
self.setSourceFlag('bar', binary=True)
v3 = buildcmd.getTrovesToBuild(self.buildCfg, helper.getConaryClient(),
['simple.recipe'], message='foo')[0][1]
assert(v3 != v2)
# one more time, revert back to original.
self.writeFile('simple.recipe', recipes.simpleRecipe)
v4 = buildcmd.getTrovesToBuild(self.buildCfg, helper.getConaryClient(),
['simple.recipe'], message='foo')[0][1]
assert(v4.trailingLabel().getHost() == self.rmakeCfg.reposName)
assert(v4 != v3)
assert(helper.buildConfig.buildTroveSpecs ==
[(self.workDir + '/simple/simple.recipe', None, deps.parseFlavor(''))])
def testGetInfoRecipe(self):
recipePath = self.workDir + '/info-foo.recipe'
self.writeFile(recipePath, infoRecipe)
self.openRepository()
self.openRmakeRepository()
helper = self.getRmakeHelper()
self.logFilter.add()
v = buildcmd.getTrovesToBuild(self.buildCfg, helper.getConaryClient(),
[recipePath])[0][1]
assert(v.trailingLabel().getHost() == self.rmakeCfg.reposName)
def testGetTrovesWithFileWithPackageNameInSameDirectory(self):
simpleRecipe = recipes.simpleRecipe
self.addComponent('simple:source', '1-1',
[('simple.recipe', simpleRecipe)])
os.chdir(self.workDir)
self.checkout('simple')
os.chdir('simple')
self.addComponent('simple:source', '1-2',
[('simple.recipe', simpleRecipe)])
# file with the name of the package we're trying to build.
# If this working correctly, "rmake build simple" should build the
# file in the repository (1-2), not the local .recipe (which is 1-1)
self.writeFile('simple', 'foo\n')
self.writeFile('simple.recipe', simpleRecipe + '#change\n')
self.openRepository()
self.openRmakeRepository()
helper = self.getRmakeHelper()
v = buildcmd.getTrovesToBuild(self.buildCfg, helper.getConaryClient(),
['simple'])[0][1]
self.assertEquals(v.getHost(), 'localhost')
def testGetTrovesWithOtherDirectory(self):
simpleRecipe = recipes.simpleRecipe
self.addComponent('simple:source', '1-1',
[('simple.recipe', simpleRecipe)])
os.chdir(self.workDir)
self.checkout('simple')
self.addComponent('simple:source', '1-2',
[('simple.recipe', simpleRecipe)])
# file with the name of the package we're trying to build.
# If this working correctly, "rmake build simple" should build the
# file in the repository (1-2), not the local .recipe (which is 1-1)
self.writeFile('simple/simple', 'foo\n')
self.writeFile('simple/simple.recipe', simpleRecipe + '#change\n')
self.openRepository()
self.openRmakeRepository()
helper = self.getRmakeHelper()
_gifp = cook.getRecipeInfoFromPath
try:
def checkCWD(*args, **kwargs):
self.failUnlessEqual(os.getcwd(),
os.path.join(self.workDir, 'simple'))
return _gifp(*args, **kwargs)
cook.getRecipeInfoFromPath = checkCWD
v = buildcmd.getTrovesToBuild(self.buildCfg,
helper.getConaryClient(), ['simple/simple.recipe'])[0][1]
finally:
cook.getRecipeInfoFromPath = _gifp
self.assertEquals(v.getHost(), 'rmakehost')
def testGetTrovesToBuildWithRecipeAndRPM(self):
recipePath = self.workDir + '/local.recipe'
self.writeFile(recipePath, localSourceRecipe)
self.writeFile(self.workDir + '/foo', 'Contents\n')
shutil.copyfile(resources.get_archive('/tmpwatch-2.9.0-2.src.rpm'),
self.workDir + '/tmpwatch-2.9.0-2.src.rpm')
self.openRepository()
self.openRmakeRepository()
helper = self.getRmakeHelper()
self.logFilter.add()
v = buildcmd.getTrovesToBuild(self.buildCfg, helper.getConaryClient(),
[recipePath])[0][1]
assert(v.trailingLabel().getHost() == self.rmakeCfg.reposName)
self.writeFile(recipePath, localSourceRecipe + '\n')
v2 = buildcmd.getTrovesToBuild(self.buildCfg, helper.getConaryClient(),
[recipePath])[0][1]
assert(v2.trailingLabel().getHost() == self.rmakeCfg.reposName)
assert(v2 != v)
self.writeFile(recipePath, localSourceRecipe + '\n')
v3 = buildcmd.getTrovesToBuild(self.buildCfg, helper.getConaryClient(),
[recipePath])[0][1]
assert(v3.trailingLabel().getHost() == self.rmakeCfg.reposName)
assert(v3 == v2)
def testGetTrovesToBuildNewPackage(self):
self.openRmakeRepository()
# create another version in that repository on another label.
# this triggers RMK-685
self.addComponent(
'simple:source=/localhost@foo:branch//rmakehost@local:branch/1:1-1')
helper = self.getRmakeHelper()
os.chdir(self.workDir)
self.newpkg('simple=localhost@rpl:branch')
os.chdir('simple')
self.writeFile('simple.recipe',
recipes.simpleRecipe + '\tr.Create("/bar")\n\tr.addAction("echo 1")\n')
v = buildcmd.getTrovesToBuild(self.buildCfg, helper.getConaryClient(),
['simple.recipe'], message='foo')[0][1]
assert(v.trailingLabel().getHost() == self.rmakeCfg.reposName)
assert(str(v.branch().parentBranch()) == '/localhost@rpl:branch')
def testGetTrovesToBuildNoPackageWithTemplate(self):
repos = self.openRepository()
self.openRmakeRepository()
helper = self.getRmakeHelper()
templateDir = resources.get_archive('recipeTemplates')
oldRTD = self.cfg.recipeTemplateDirs
oldTemplate = self.cfg.recipeTemplate
self.buildCfg.recipeTemplateDirs = [templateDir]
self.buildCfg.recipeTemplate = 'test'
os.chdir(self.workDir)
self.writeFile('simple.recipe',
recipes.simpleRecipe + '\tr.Create("/bar")\n')
v = buildcmd.getTrovesToBuild(self.buildCfg,
helper.getConaryClient(),
['simple.recipe'], message='foo')[0][1]
assert(v.trailingLabel().getHost() == self.rmakeCfg.reposName)
assert(len(list(v.iterLabels())) == 2)
assert(str(v.branch().parentBranch()) == '/localhost@rpl:linux')
fileList = repos.iterFilesInTrove('simple:source', v,
deps.parseFlavor(''), withFiles=True)
for item in fileList:
assert(item[4].flags.isConfig())
def testGetTrovesToBuildNoPackage(self):
repos = self.openRepository()
self.openRmakeRepository()
helper = self.getRmakeHelper()
templateDir = resources.get_archive('recipeTemplates')
os.chdir(self.workDir)
self.writeFile('simple.recipe',
recipes.simpleRecipe + '\tr.Create("/bar")\n')
v = buildcmd.getTrovesToBuild(self.buildCfg,
helper.getConaryClient(),
['simple.recipe'], message='foo')[0][1]
assert(v.trailingLabel().getHost() == self.rmakeCfg.reposName)
assert(len(list(v.iterLabels())) == 2)
assert(str(v.branch().parentBranch()) == '/localhost@rpl:linux')
fileList = repos.iterFilesInTrove('simple:source', v,
deps.parseFlavor(''), withFiles=True)
for item in fileList:
assert(item[4].flags.isConfig())
def testGetTrovesFromBinaryGroup(self):
helper = self.getRmakeHelper()
self.addComponent('group-foo:source', '1')
binTrv = self.addCollection('simple', '1', [':runtime'],
createComps=True,
defaultFlavor='readline')
branchBinTrv = self.addCollection('simple', ':branch/1', [':runtime'],
createComps=True,
defaultFlavor='!readline')
self.addCollection('group-foo', '1', ['simple',
('simple', ':branch', '!readline')],
defaultFlavor='readline')
sourceTrv = self.addComponent('simple:source', '2',
[('simple.recipe', recipes.simpleRecipe)])
branchTrv = self.addComponent('simple:source', ':branch/2',
[('simple.recipe', recipes.simpleRecipe)])
def _getTroves(*matchSpecs):
return buildcmd.getTrovesToBuild(self.buildCfg,
helper.getConaryClient(),
['group-foo'], recurseGroups=True,
matchSpecs=matchSpecs)
trvs = _getTroves('simple=:linux', '-group-foo')
assert(trvs == [('simple:source', sourceTrv.getVersion(),
binTrv.getFlavor())])
trvs = _getTroves('simple=:branch', '-group-foo')
assert(trvs == [('simple:source', branchTrv.getVersion(),
branchBinTrv.getFlavor())])
trvs = _getTroves('=[readline]', '-group-foo')
assert(trvs == [('simple:source', sourceTrv.getVersion(),
binTrv.getFlavor())])
trvs = _getTroves('si*[readline]', '-group-*')
assert(trvs == [('simple:source', sourceTrv.getVersion(),
binTrv.getFlavor())])
def testRestartCookSourceGroup(self):
self.addComponent('foo:source=1')
self.addComponent('bar:source=1')
self.addComponent('group-foo:source=1',
[('group-foo.recipe', groupFooRecipe)])
helper = self.getRmakeHelper()
job = helper.createBuildJob('group-foo',
recurseGroups=helper.BUILD_RECURSE_GROUPS_SOURCE)
db = self.openRmakeDatabase()
db.addJob(job)
jobId = job.jobId
self.addComponent('group-foo:source=2',
[('group-foo.recipe', groupFooRecipe2)])
job = helper.createRestartJob(jobId)
assert(sorted([x[0] for x in job.iterTroveList()]) == ['bar:source',
'group-foo:source'])
job = helper.createRestartJob(jobId, updateSpecs=['-*'])
assert(sorted([x[0] for x in job.iterTroveList()])
== ['foo:source', 'group-foo:source'])
assert(job.getMainConfig().jobContext == [jobId])
job = helper.createRestartJob(jobId, clearPrebuiltList=True)
assert(job.getMainConfig().jobContext == [])
def testCookSourceGroup(self):
self.openRepository()
repos = self.openRepository(1)
trv0 = self.addComponent('test0:source', '1').getNameVersionFlavor()
trv1 = self.addComponent('test:source', '1').getNameVersionFlavor()
trv2 = self.addComponent('test2:source', '1').getNameVersionFlavor()
trv5 = self.addComponent('test5:source', '1').getNameVersionFlavor()
self.addComponent('test4:source',
'/localhost1@rpl:linux/1-1').getNameVersionFlavor()
self.addComponent('group-foo:source', '1',
[('group-foo.recipe', groupRecipe),
('preupdate.sh', '#!/bin/sh\necho "hello"\n')])
self.openRmakeRepository()
helper = self.getRmakeHelper()
self.buildCfg.limitToHosts('localhost')
self.buildCfg.addMatchRule('-group-foo')
job = buildcmd.getBuildJob(self.buildCfg,
helper.getConaryClient(),
['group-foo[ssl]',
'group-foo[!ssl]'],
recurseGroups=buildcmd.BUILD_RECURSE_GROUPS_SOURCE)
trvs = set(job.iterTroveList())
assert(trvs == set([(trv0[0], trv0[1], deps.parseFlavor('')),
(trv1[0], trv1[1], deps.parseFlavor('ssl')),
(trv2[0], trv2[1], deps.parseFlavor('readline')),
(trv5[0], trv5[1], deps.parseFlavor(''))]))
helper = self.getRmakeHelper()
db = self.openRmakeDatabase()
db.addJob(job)
jobId = job.jobId
job = helper.createRestartJob(jobId)
os.chdir(self.workDir)
self.checkout('group-foo')
os.chdir('group-foo')
self.writeFile('group-foo.recipe', groupRecipe + '#\n') # change
self.buildCfg.matchTroveRule = []
self.buildCfg.limitToLabels('localhost@rpl:linux')
self.buildCfg.addMatchRule('-group-foo')
self.buildCfg.addMatchRule('-[readline]')
trvs = buildcmd.getTrovesToBuild(
self.buildCfg, helper.getConaryClient(),
['group-foo.recipe[ssl]',
'group-foo.recipe[!ssl]'],
recurseGroups=buildcmd.BUILD_RECURSE_GROUPS_SOURCE,
matchSpecs=self.buildCfg.matchTroveRule)
trvs = set(trvs)
assert(trvs == set([(trv0[0], trv0[1], deps.parseFlavor('')),
(trv1[0], trv1[1], deps.parseFlavor('ssl')),
(trv5[0], trv5[1], deps.parseFlavor(''))]))
# Build the actual group, and this time let's do a fresh commit
# instead of a shadow + commit.
os.remove('CONARY')
self.buildCfg.matchTroveRule = []
trvs = buildcmd.getTrovesToBuild(
self.buildCfg, helper.getConaryClient(),
['group-foo.recipe[ssl]',
'group-foo.recipe[!ssl]'],
recurseGroups=buildcmd.BUILD_RECURSE_GROUPS_NONE,
matchSpecs=self.buildCfg.matchTroveRule)
trvs = set(trvs)
assert(len(trvs) == 2)
assert([x[0] for x in trvs] == ['group-foo:source', 'group-foo:source'])
def testGetTrovesToBuildFailedPackage(self):
self.openRmakeRepository()
helper = self.getRmakeHelper()
os.chdir(self.workDir)
self.newpkg('simple')
os.chdir('simple')
self.writeFile('simple.recipe',
recipes.simpleRecipe + '\ta = b # NameError\n')
try:
v = buildcmd.getTrovesToBuild(self.buildCfg,
helper.getConaryClient(),
['simple.recipe'], message='foo')[0][1]
except errors.RmakeError, msg:
assert(str(msg) == "could not initialize recipe: %s/simple/simple.recipe:8:\n NameError: global name 'b' is not defined" % self.workDir)
else:
assert 0, "expected RmakeError"
self.writeFile('simple.recipe',
recipes.simpleRecipe + '\tr.addArchive("blammo")\n')
try:
v = buildcmd.getTrovesToBuild(self.buildCfg,
helper.getConaryClient(),
['simple.recipe'], message='foo')[0][1]
except errors.RmakeError, msg:
assert(str(msg) == 'Could not commit changes to build recipe %s/simple/simple.recipe: Source file blammo does not exist' % self.workDir)
else:
assert 0, "expected RmakeError"
def testCookBinaryGroup(self):
repos = self.openRmakeRepository()
self.startRmakeServer()
helper = self.getRmakeHelper()
bamTrv = self.addComponent('group-bam:source', '1')
self.addComponent('group-bam:source', '2')
binTrv = self.addCollection('simple', '1', [':runtime'],
createComps=True,
defaultFlavor='readline')
self.addCollection('group-foo', '1', ['simple'],
defaultFlavor='readline',
sourceName='group-bam:source')
sourceTrv = self.addComponent('simple:source', '2',
[('simple.recipe', recipes.simpleRecipe)])
trvs = buildcmd.getTrovesToBuild(self.buildCfg,
helper.getConaryClient(),
['group-foo[ssl]'],
matchSpecs=['-group-foo'],
recurseGroups=True)
assert(len(trvs) == 2)
assert(set(trvs) == set([('simple:source', sourceTrv.getVersion(),
binTrv.getFlavor()),
('group-bam:source', bamTrv.getVersion(),
deps.parseFlavor('ssl'))]))
jobId, txt = self.captureOutput(helper.buildTroves,
['group-foo'],
matchSpecs=['-group-bam'],
recurseGroups=True)
helper.waitForJob(jobId)
assert(helper.client.getJob(jobId).isBuilt())
self.addComponent('foo:source=2')
binTrv = self.addCollection('foo', '2', [':runtime'],
createComps=True,
defaultFlavor='~readline,~ssl')
self.addCollection('group-foo', '2', ['foo=2'],
defaultFlavor='~readline,~ssl',
sourceName='group-bam:source')
job = helper.createRestartJob(jobId, updateSpecs=['-group-*'])
assert(sorted(x.getName() for x in job.iterTroves()) == ['simple:source'])
job = helper.createRestartJob(jobId)
assert(sorted(x.getName() for x in job.iterTroves()) == ['foo:source'])
def testResolveTroveList(self):
repos = self.openRepository()
self.addComponent('foo:run', '1')
grp = self.addCollection('group-dist', '1', ['foo:run'])
oldResolveTroves = self.buildCfg.resolveTroves
self.buildCfg.resolveTroves = [[('group-dist', None, None)]]
try:
resolveTroveTups = buildcmd._getResolveTroveTups(self.buildCfg, repos)
finally:
self.buildCfg.resolveTroves = oldResolveTroves
assert(resolveTroveTups == [[grp.getNameVersionFlavor()]])
def testResolveTroveListError(self):
repos = self.openRepository()
oldResolveTroves = self.buildCfg.resolveTroves
self.buildCfg.resolveTroves = [[('group-dist', None, None)]]
try:
try:
buildcmd._getResolveTroveTups(self.buildCfg, repos)
except errors.RmakeError, msg:
assert(str(msg) == 'Could not find resolve troves for [default] context: group-dist'
' was not found on path localhost@rpl:linux\n')
else:
assert 0, "didn't raise expected exception"
finally:
self.buildCfg.resolveTroves = oldResolveTroves
def testRemovefile(self):
repos = self.openRmakeRepository()
helper = self.getRmakeHelper()
self.buildCfg.configLine('[foo]')
self.buildCfg.configLine('flavor ssl')
self.addComponent('local:source', '1', '',
[ ('local.recipe', localSourceRecipe2),
('foo', 'content\n')])
os.chdir(self.workDir)
self.checkout('local')
os.chdir('local')
self.writeFile('local.recipe',
'\n'.join(localSourceRecipe2.split('\n')[:-1]))
self.remove('foo')
job = self.captureOutput(buildcmd.getBuildJob,
self.buildCfg,
helper.getConaryClient(),
['local.recipe{foo}'], message='foo')
# make sure that making no changes works as well
job = self.captureOutput(buildcmd.getBuildJob,
self.buildCfg,
helper.getConaryClient(),
['local.recipe{foo}'], message='foo')
def testLoadJob(self):
self.addComponent('simple:source', '1', '',
[ ('simple.recipe', recipes.simpleRecipe)])
helper = self.getRmakeHelper()
job = helper.createBuildJob('simple')
job.writeToFile(self.workDir + '/foo.job')
helper.buildConfig.user.append(('localhost', 'bam', 'newpass'))
job2 = helper.loadJobFromFile(self.workDir + '/foo.job')
assert(list(job2.iterTroveList()) == list(job.iterTroveList()))
assert(job2.iterConfigList().next().user
!= job.iterConfigList().next().user)
def testSubDirectories(self):
repos = self.openRmakeRepository()
helper = self.getRmakeHelper()
self.addComponent('local:source', '1', '',
[ ('local.recipe', localSourceRecipe3),
('subdir/foo', 'content\n')])
os.chdir(self.workDir)
self.checkout('local')
os.chdir('local')
self.writeFile('local.recipe',
(localSourceRecipe3 + '\tr.addSource("bar/bam")\n'))
os.mkdir('bar')
self.writeFile('bar/bam', 'content2\n')
self.addfile('bar')
self.addfile('bar/bam', text=True)
(n,v,f) = self.captureOutput(buildcmd.getTrovesToBuild,
self.buildCfg,
helper.getConaryClient(),
['local.recipe'], message='foo')[0][0]
# make sure that making no changes works as well
(n,v,f) = self.captureOutput(buildcmd.getTrovesToBuild,
self.buildCfg,
helper.getConaryClient(),
['local.recipe'], message='foo')[0][0]
def testBuildRecipeWithMissingFile(self):
self.openRmakeRepository()
self.addComponent('simple:source',
[('simple.recipe', recipes.simpleRecipe)])
os.chdir(self.workDir)
self.checkout('simple')
os.chdir('simple')
self.writeFile('simple.recipe',
recipes.simpleRecipe + '\tr.addSource("foo")\n')
self.writeFile('foo', 'foo\n')
helper = self.getRmakeHelper()
self.buildCfg.configLine('sourceSearchDir .')
(n,v,f) = self.captureOutput(buildcmd.getTrovesToBuild,
self.buildCfg,
helper.getConaryClient(),
['simple.recipe'], message='foo')[0][0]
os.chdir('..')
self.checkout('simple=%s' % v)
os.chdir('simple')
stateFile = state.ConaryStateFromFile('CONARY').getSourceState()
pathId, = [x[0] for x in stateFile.iterFileList() if x[1] == 'foo']
assert(not stateFile.fileIsAutoSource(pathId))
def testRefreshRecipe(self):
self.cfg.sourceSearchDir = self.workDir + '/source'
self.buildCfg.sourceSearchDir = self.workDir + '/source'
util.mkdirChain(self.cfg.sourceSearchDir)
autoSourceFile = self.cfg.sourceSearchDir + '/autosource'
self.writeFile(autoSourceFile, 'contents\n')
self.makeSourceTrove('auto', autoSourceRecipe)
os.chdir(self.workDir)
self.checkout('auto')
os.chdir('auto')
self.writeFile(autoSourceFile, 'contents2\n')
self.refresh()
repos = self.openRmakeRepository()
helper = self.getRmakeHelper()
(n,v,f) = self.captureOutput(buildcmd.getTrovesToBuild,
self.buildCfg,
helper.getConaryClient(),
['auto.recipe'], message='foo')[0][0]
trv = repos.getTrove(n,v,f)
filesToGet = []
for pathId, path, fileId, fileVersion in trv.iterFileList():
if path == 'autosource':
filesToGet.append((fileId, fileVersion))
contents = repos.getFileContents(filesToGet)[0]
assert(contents.get().read() == 'contents2\n')
def testRebuild(self):
self.addComponent('foo:source')
self.addComponent('bar:source')
self.buildCfg.configLine('[x86]')
helper = self.getRmakeHelper()
job = buildcmd.getBuildJob(self.buildCfg,
helper.getConaryClient(),
['foo'],
rebuild=True)
assert(not job.getMainConfig().prebuiltBinaries)
self.addComponent('foo:runtime')
self.addCollection('foo', [':runtime'])
self.addComponent('bar:runtime')
self.addCollection('bar', [':runtime'])
self.addComponent('bar:runtime[is:x86_64]')
self.addCollection('bar[is:x86_64]', [':runtime'])
job = helper.createBuildJob(['foo', 'bar{x86}'],
rebuild=True)
assert(sorted([x[0] for x in job.getMainConfig().prebuiltBinaries]) == ['bar', 'bar', 'foo'])
def testBuildInfo(self):
self.addComponent('bar:runtime')
self.addComponent('foo:source')
self.openRepository()
self.openRmakeRepository()
self.buildCfg.flavor = [deps.parseFlavor('is:x86')]
self.buildCfg.configLine('resolveTroves bar:runtime')
self.buildCfg.configLine('[nossl]')
self.buildCfg.configLine('flavor !ssl is: x86')
client = self.startRmakeServer()
helper = self.getRmakeHelper(client.uri)
rc, txt = self.captureOutput(helper.buildTroves,['foo{nossl}', 'foo[ssl]'], infoOnly=True, limitToLabels='localhost@rpl:linux')
txt = re.sub('flavor.*', 'flavor <flavor>', txt)
txt = re.sub('buildFlavor .*', 'buildFlavor <flavor>', txt)
assert('repositoryMap' in txt)
txt = re.sub('repositoryMap .*?\n', '', txt)
self.assertEquals(txt, '''
{Default Context}
ResolveTroves:
bar:runtime=/localhost@rpl:linux/1.0-1-1[]
Configuration:
copyInConfig False
copyInConary False
buildFlavor <flavor>
flavor <flavor>
installLabelPath localhost@rpl:linux
resolveTrovesOnly False
user rmakehost rmake <password>
user * test <password>
Building:
foo:source=localhost@rpl:linux/1.0-1[ssl]
{nossl}
ResolveTroves:
bar:runtime=/localhost@rpl:linux/1.0-1-1[]
Configuration:
buildFlavor <flavor>
flavor <flavor>
installLabelPath localhost@rpl:linux
resolveTrovesOnly False
user rmakehost rmake <password>
user * test <password>
Building:
foo:source=localhost@rpl:linux/1.0-1{nossl}
''')
rc, txt = self.captureOutput(helper.buildTroves,['foo{nossl}', 'foo[ssl]'], quiet=True, infoOnly=True)
self.assertEquals(txt, '''\
foo:source=localhost@rpl:linux/1.0-1[ssl]
foo:source=localhost@rpl:linux/1.0-1{nossl}
''')
job = fixtures.addBuiltJob1(self)
rc, txt = self.captureOutput(helper.restartJob, 1, infoOnly=True,
quiet=True)
self.assertEquals(txt, '''\
testcase:source=localhost@rpl:linux/1-1[ssl]
''')
infoRecipe = """\
class InfoRecipe(UserInfoRecipe):
name = 'info-foo'
version = '1.0'
clearBuildReqs()
def setup(r):
r.User('foo', 500)
"""
groupRecipe = """
class FooGroup(GroupRecipe):
name = 'group-foo'
version = '1'
def setup(r):
r.add('test0')
r.addAll('test5')
if Use.ssl:
r.add('test-derived', '1', 'ssl', source='test')
else:
r.add('test2', '1', 'readline')
r.add('test3', '1', 'readline')
r.add('test4', 'localhost1@rpl:linux/1', 'readline')
r.addPostUpdateScript(contents = '''#!/bin/bash
/sbin/service foundation-config start
''')
r.addPreUpdateScript('preupdate.sh')
"""
groupRecipe2 = """
class FooGroup(GroupRecipe):
name = 'group-foo'
version = '2'
def setup(r):
r.add('test0')
r.add('test6')
"""
groupFooRecipe = """
class FooGroup(GroupRecipe):
name = 'group-foo'
version = '1'
def setup(r):
r.add('foo')
"""
groupFooRecipe2 = """
class FooGroup(GroupRecipe):
name = 'group-foo'
version = '2'
def setup(r):
r.add('bar')
"""
autoSourceRecipe = """\
class AutoSource(PackageRecipe):
name = 'auto'
version = '1.0'
clearBuildReqs()
def setup(r):
r.addSource('autosource', dest='/foo/autosource')
"""
localSourceRecipe = """\
class LocalSource(PackageRecipe):
name = 'local'
version = '1.0'
clearBuildReqs()
def setup(r):
r.addSource('dc_client.init', rpm='distcache-1.4.5-2.src.rpm')
r.addSource('foo', dest='/')
r.addArchive('tmpwatch-2.9.0.tar.gz', rpm='tmpwatch-2.9.0-2.src.rpm')
del r.NormalizeManPages
"""
localSourceRecipe2 = """\
class LocalSource(PackageRecipe):
name = 'local'
version = '1.0'
clearBuildReqs()
def setup(r):
r.addSource('dc_client.init', rpm='distcache-1.4.5-2.src.rpm')
r.addArchive('tmpwatch-2.9.0.tar.gz', rpm='tmpwatch-2.9.0-2.src.rpm')
r.addSource('foo', dest='/')
"""
localSourceRecipe3 = """\
class LocalSource(PackageRecipe):
name = 'local'
version = '1.0'
clearBuildReqs()
def setup(r):
r.addSource('subdir/foo', dest='/')
r.addAction('echo "hello"')
"""
localFactory = """\
class FactoryTest(Factory):
name = 'factory-test'
version = '0'
def getRecipeClass(self):
class TestRecipe(PackageRecipe):
name = self.packageName
version = 'setbysuperclass'
def setup(r):
r.Create('/etc/foo')
return TestRecipe
"""
|
|
#!/usr/bin/env python
# Copyright 2013 Google Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
'''
GCE external inventory script
=================================
Generates inventory that Ansible can understand by making API requests
Google Compute Engine via the libcloud library. Full install/configuration
instructions for the gce* modules can be found in the comments of
ansible/test/gce_tests.py.
When run against a specific host, this script returns the following variables
based on the data obtained from the libcloud Node object:
- gce_uuid
- gce_id
- gce_image
- gce_machine_type
- gce_private_ip
- gce_public_ip
- gce_name
- gce_description
- gce_status
- gce_zone
- gce_tags
- gce_metadata
- gce_network
When run in --list mode, instances are grouped by the following categories:
- zone:
zone group name examples are us-central1-b, europe-west1-a, etc.
- instance tags:
An entry is created for each tag. For example, if you have two instances
with a common tag called 'foo', they will both be grouped together under
the 'tag_foo' name.
- network name:
the name of the network is appended to 'network_' (e.g. the 'default'
network will result in a group named 'network_default')
- machine type
types follow a pattern like n1-standard-4, g1-small, etc.
- running status:
group name prefixed with 'status_' (e.g. status_running, status_stopped,..)
- image:
when using an ephemeral/scratch disk, this will be set to the image name
used when creating the instance (e.g. debian-7-wheezy-v20130816). when
your instance was created with a root persistent disk it will be set to
'persistent_disk' since there is no current way to determine the image.
Examples:
Execute uname on all instances in the us-central1-a zone
$ ansible -i gce.py us-central1-a -m shell -a "/bin/uname -a"
Use the GCE inventory script to print out instance specific information
$ contrib/inventory/gce.py --host my_instance
Author: Eric Johnson <[email protected]>
Version: 0.0.1
'''
__requires__ = ['pycrypto>=2.6']
try:
import pkg_resources
except ImportError:
# Use pkg_resources to find the correct versions of libraries and set
# sys.path appropriately when there are multiversion installs. We don't
# fail here as there is code that better expresses the errors where the
# library is used.
pass
USER_AGENT_PRODUCT="Ansible-gce_inventory_plugin"
USER_AGENT_VERSION="v1"
import sys
import os
import argparse
import ConfigParser
try:
import json
except ImportError:
import simplejson as json
try:
from libcloud.compute.types import Provider
from libcloud.compute.providers import get_driver
_ = Provider.GCE
except:
print("GCE inventory script requires libcloud >= 0.13")
sys.exit(1)
class GceInventory(object):
def __init__(self):
# Read settings and parse CLI arguments
self.parse_cli_args()
self.driver = self.get_gce_driver()
# Just display data for specific host
if self.args.host:
print(self.json_format_dict(self.node_to_dict(
self.get_instance(self.args.host)),
pretty=self.args.pretty))
sys.exit(0)
# Otherwise, assume user wants all instances grouped
print(self.json_format_dict(self.group_instances(),
pretty=self.args.pretty))
sys.exit(0)
def get_gce_driver(self):
"""Determine the GCE authorization settings and return a
libcloud driver.
"""
gce_ini_default_path = os.path.join(
os.path.dirname(os.path.realpath(__file__)), "gce.ini")
gce_ini_path = os.environ.get('GCE_INI_PATH', gce_ini_default_path)
# Create a ConfigParser.
# This provides empty defaults to each key, so that environment
# variable configuration (as opposed to INI configuration) is able
# to work.
config = ConfigParser.SafeConfigParser(defaults={
'gce_service_account_email_address': '',
'gce_service_account_pem_file_path': '',
'gce_project_id': '',
'libcloud_secrets': '',
})
if 'gce' not in config.sections():
config.add_section('gce')
config.read(gce_ini_path)
# Attempt to get GCE params from a configuration file, if one
# exists.
secrets_path = config.get('gce', 'libcloud_secrets')
secrets_found = False
try:
import secrets
args = list(getattr(secrets, 'GCE_PARAMS', []))
kwargs = getattr(secrets, 'GCE_KEYWORD_PARAMS', {})
secrets_found = True
except:
pass
if not secrets_found and secrets_path:
if not secrets_path.endswith('secrets.py'):
err = "Must specify libcloud secrets file as "
err += "/absolute/path/to/secrets.py"
print(err)
sys.exit(1)
sys.path.append(os.path.dirname(secrets_path))
try:
import secrets
args = list(getattr(secrets, 'GCE_PARAMS', []))
kwargs = getattr(secrets, 'GCE_KEYWORD_PARAMS', {})
secrets_found = True
except:
pass
if not secrets_found:
args = [
config.get('gce','gce_service_account_email_address'),
config.get('gce','gce_service_account_pem_file_path')
]
kwargs = {'project': config.get('gce', 'gce_project_id')}
# If the appropriate environment variables are set, they override
# other configuration; process those into our args and kwargs.
args[0] = os.environ.get('GCE_EMAIL', args[0])
args[1] = os.environ.get('GCE_PEM_FILE_PATH', args[1])
kwargs['project'] = os.environ.get('GCE_PROJECT', kwargs['project'])
# Retrieve and return the GCE driver.
gce = get_driver(Provider.GCE)(*args, **kwargs)
gce.connection.user_agent_append(
'%s/%s' % (USER_AGENT_PRODUCT, USER_AGENT_VERSION),
)
return gce
def parse_cli_args(self):
''' Command line argument processing '''
parser = argparse.ArgumentParser(
description='Produce an Ansible Inventory file based on GCE')
parser.add_argument('--list', action='store_true', default=True,
help='List instances (default: True)')
parser.add_argument('--host', action='store',
help='Get all information about an instance')
parser.add_argument('--pretty', action='store_true', default=False,
help='Pretty format (default: False)')
self.args = parser.parse_args()
def node_to_dict(self, inst):
md = {}
if inst is None:
return {}
if inst.extra['metadata'].has_key('items'):
for entry in inst.extra['metadata']['items']:
md[entry['key']] = entry['value']
net = inst.extra['networkInterfaces'][0]['network'].split('/')[-1]
return {
'gce_uuid': inst.uuid,
'gce_id': inst.id,
'gce_image': inst.image,
'gce_machine_type': inst.size,
'gce_private_ip': inst.private_ips[0],
'gce_public_ip': inst.public_ips[0] if len(inst.public_ips) >= 1 else None,
'gce_name': inst.name,
'gce_description': inst.extra['description'],
'gce_status': inst.extra['status'],
'gce_zone': inst.extra['zone'].name,
'gce_tags': inst.extra['tags'],
'gce_metadata': md,
'gce_network': net,
# Hosts don't have a public name, so we add an IP
'ansible_ssh_host': inst.public_ips[0] if len(inst.public_ips) >= 1 else inst.private_ips[0]
}
def get_instance(self, instance_name):
'''Gets details about a specific instance '''
try:
return self.driver.ex_get_node(instance_name)
except Exception as e:
return None
def group_instances(self):
'''Group all instances'''
groups = {}
meta = {}
meta["hostvars"] = {}
for node in self.driver.list_nodes():
name = node.name
meta["hostvars"][name] = self.node_to_dict(node)
zone = node.extra['zone'].name
if groups.has_key(zone): groups[zone].append(name)
else: groups[zone] = [name]
tags = node.extra['tags']
for t in tags:
if t.startswith('group-'):
tag = t[6:]
else:
tag = 'tag_%s' % t
if groups.has_key(tag): groups[tag].append(name)
else: groups[tag] = [name]
net = node.extra['networkInterfaces'][0]['network'].split('/')[-1]
net = 'network_%s' % net
if groups.has_key(net): groups[net].append(name)
else: groups[net] = [name]
machine_type = node.size
if groups.has_key(machine_type): groups[machine_type].append(name)
else: groups[machine_type] = [name]
image = node.image and node.image or 'persistent_disk'
if groups.has_key(image): groups[image].append(name)
else: groups[image] = [name]
status = node.extra['status']
stat = 'status_%s' % status.lower()
if groups.has_key(stat): groups[stat].append(name)
else: groups[stat] = [name]
groups["_meta"] = meta
return groups
def json_format_dict(self, data, pretty=False):
''' Converts a dict to a JSON object and dumps it as a formatted
string '''
if pretty:
return json.dumps(data, sort_keys=True, indent=2)
else:
return json.dumps(data)
# Run the script
GceInventory()
|
|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Document matcher for Full Text Search API stub.
DocumentMatcher provides an approximation of the Full Text Search API's query
matching.
"""
from google.appengine.datastore import document_pb
from google.appengine._internal.antlr3 import tree
from google.appengine.api.search import query_parser
from google.appengine.api.search import QueryParser
from google.appengine.api.search import search_util
from google.appengine.api.search.stub import simple_tokenizer
from google.appengine.api.search.stub import tokens
class DocumentMatcher(object):
"""A class to match documents with a query."""
def __init__(self, query, inverted_index):
self._query = query
self._inverted_index = inverted_index
self._parser = simple_tokenizer.SimpleTokenizer()
def _PostingsForToken(self, token):
"""Returns the postings for the token."""
return self._inverted_index.GetPostingsForToken(token)
def _PostingsForFieldToken(self, field, value):
"""Returns postings for the value occurring in the given field."""
value = simple_tokenizer.NormalizeString(value)
return self._PostingsForToken(
tokens.Token(chars=value, field_name=field))
def _MatchPhrase(self, field, match, document):
"""Match a textual field with a phrase query node."""
field_text = field.value().string_value()
phrase_text = query_parser.GetPhraseQueryNodeText(match)
if field.value().type() == document_pb.FieldValue.ATOM:
return (field_text == phrase_text)
phrase = self._parser.TokenizeText(phrase_text)
field_text = self._parser.TokenizeText(field_text)
if not phrase:
return True
posting = None
for post in self._PostingsForFieldToken(field.name(), phrase[0].chars):
if post.doc_id == document.id():
posting = post
break
if not posting:
return False
def ExtractWords(token_list):
return (token.chars for token in token_list)
for position in posting.positions:
match_words = zip(ExtractWords(field_text[position:]),
ExtractWords(phrase))
if len(match_words) != len(phrase):
continue
match = True
for doc_word, match_word in match_words:
if doc_word != match_word:
match = False
if match:
return True
return False
def _MatchTextField(self, field, match, document):
"""Check if a textual field matches a query tree node."""
if (match.getType() in (QueryParser.TEXT, QueryParser.NAME) or
match.getType() in search_util.NUMBER_QUERY_TYPES):
if field.value().type() == document_pb.FieldValue.ATOM:
return (field.value().string_value() ==
query_parser.GetQueryNodeText(match))
query_tokens = self._parser.TokenizeText(
query_parser.GetQueryNodeText(match))
if not query_tokens:
return True
if len(query_tokens) > 1:
def QueryNode(token):
return query_parser.CreateQueryNode(token.chars, QueryParser.TEXT)
return all(self._MatchTextField(field, QueryNode(token), document)
for token in query_tokens)
token_text = query_tokens[0].chars
matching_docids = [
post.doc_id for post in self._PostingsForFieldToken(
field.name(), token_text)]
return document.id() in matching_docids
if match.getType() is QueryParser.PHRASE:
return self._MatchPhrase(field, match, document)
if match.getType() is QueryParser.CONJUNCTION:
return all(self._MatchTextField(field, child, document)
for child in match.children)
if match.getType() is QueryParser.DISJUNCTION:
return any(self._MatchTextField(field, child, document)
for child in match.children)
if match.getType() is QueryParser.NEGATION:
return not self._MatchTextField(field, match.children[0], document)
return False
def _MatchDateField(self, field, match, document):
"""Check if a date field matches a query tree node."""
return self._MatchComparableField(
field, match, search_util.DeserializeDate,
search_util.TEXT_QUERY_TYPES, document)
def _MatchNumericField(self, field, match, document):
"""Check if a numeric field matches a query tree node."""
return self._MatchComparableField(
field, match, float, search_util.NUMBER_QUERY_TYPES, document)
def _MatchComparableField(
self, field, match, cast_to_type, query_node_types,
document):
"""A generic method to test matching for comparable types.
Comparable types are defined to be anything that supports <, >, <=, >=, ==
and !=. For our purposes, this is numbers and dates.
Args:
field: The document_pb.Field to test
match: The query node to match against
cast_to_type: The type to cast the node string values to
query_node_types: The query node types that would be valid matches
document: The document that the field is in
Returns:
True iff the field matches the query.
Raises:
UnsupportedOnDevError: Raised when an unsupported operator is used, or
when the query node is of the wrong type.
"""
field_val = cast_to_type(field.value().string_value())
op = QueryParser.EQ
if match.getType() in query_node_types:
try:
match_val = cast_to_type(query_parser.GetQueryNodeText(match))
except ValueError:
return False
elif match.children:
op = match.getType()
try:
match_val = cast_to_type(
query_parser.GetQueryNodeText(match.children[0]))
except ValueError:
return False
else:
return False
if op is QueryParser.EQ:
return field_val == match_val
if op is QueryParser.NE:
return field_val != match_val
if op is QueryParser.GT:
return field_val > match_val
if op is QueryParser.GE:
return field_val >= match_val
if op is QueryParser.LT:
return field_val < match_val
if op is QueryParser.LE:
return field_val <= match_val
raise search_util.UnsupportedOnDevError(
'Operator %s not supported for numerical fields on development server.'
% match.getText())
def _MatchField(self, field, match, document):
"""Check if a field matches a query tree.
Args:
field_query_node: Either a string containing the name of a field, a query
node whose text is the name of the field, or a document_pb.Field.
match: A query node to match the field with.
document: The document to match.
"""
if isinstance(field, (basestring, tree.CommonTree)):
if isinstance(field, tree.CommonTree):
field = field.getText()
fields = search_util.GetAllFieldInDocument(document, field)
return any(self._MatchField(f, match, document) for f in fields)
if field.value().type() in search_util.TEXT_DOCUMENT_FIELD_TYPES:
return self._MatchTextField(field, match, document)
if field.value().type() in search_util.NUMBER_DOCUMENT_FIELD_TYPES:
return self._MatchNumericField(field, match, document)
if field.value().type() == document_pb.FieldValue.DATE:
return self._MatchDateField(field, match, document)
raise search_util.UnsupportedOnDevError(
'Matching to field type of field "%s" (type=%d) is unsupported on '
'dev server' % (field.name(), field.value().type()))
def _MatchGlobal(self, match, document):
for field in document.field_list():
if self._MatchField(field.name(), match, document):
return True
return False
def _CheckMatch(self, node, document):
"""Check if a document matches a query tree."""
if node.getType() is QueryParser.CONJUNCTION:
return all(self._CheckMatch(child, document) for child in node.children)
if node.getType() is QueryParser.DISJUNCTION:
return any(self._CheckMatch(child, document) for child in node.children)
if node.getType() is QueryParser.NEGATION:
return not self._CheckMatch(node.children[0], document)
if node.getType() is QueryParser.RESTRICTION:
field, match = node.children
return self._MatchField(field, match, document)
return self._MatchGlobal(node, document)
def Matches(self, document):
return self._CheckMatch(self._query, document)
def FilterDocuments(self, documents):
return (doc for doc in documents if self.Matches(doc))
|
|
#
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
#
#
# gen_win.py -- base class for generating windows projects
#
import os
from hashlib import md5 as hashlib_md5
import sys
import fnmatch
import re
import subprocess
import glob
import string
import generator.swig.header_wrappers
import generator.swig.checkout_swig_header
import generator.swig.external_runtime
import gen_win_dependencies
if sys.version_info[0] >= 3:
# Python >=3.0
from io import StringIO
else:
# Python <3.0
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
import gen_base
import ezt
class WinGeneratorBase(gen_win_dependencies.GenDependenciesBase):
"Base class for all Windows project files generators"
def __init__(self, fname, verfname, options, subdir):
"""
Do some Windows specific setup
Build the list of Platforms & Configurations &
create the necessary paths
"""
# Initialize parent
gen_win_dependencies.GenDependenciesBase.__init__(self, fname, verfname,
options, find_libs=False)
# On Windows we create svn_private_config.h in the output directory since
# r1370526.
#
# Without this replacement all projects include a not-existing file,
# which makes the MSBuild calculation to see whether a project is changed
# far more expensive than necessary.
self.private_built_includes.append('$(Configuration)/svn_private_config.h')
self.private_built_includes.remove('subversion/svn_private_config.h')
if subdir == 'vcnet-vcproj':
print('Generating for Visual Studio %s\n' % self.vs_version)
self.find_libraries(True)
# Print list of identified libraries
printed = []
for lib in sorted(self._libraries.values(),
key = lambda s: (s.internal, s.name)):
if lib.name in printed:
continue
printed.append(lib.name)
if lib.internal:
print('Using bundled %s %s' % (lib.name, lib.version))
else:
print('Found %s %s' % (lib.name, lib.version))
#Make some files for the installer so that we don't need to
#require sed or some other command to do it
### GJS: don't do this right now
if 0:
buf = open(os.path.join("packages","win32-innosetup","svn.iss.in"), 'rb').read()
buf = buf.replace("@VERSION@", "0.16.1+").replace("@RELEASE@", "4365")
buf = buf.replace("@DBBINDLL@", self.dbbindll)
svnissrel = os.path.join("packages","win32-innosetup","svn.iss.release")
svnissdeb = os.path.join("packages","win32-innosetup","svn.iss.debug")
if self.write_file_if_changed(svnissrel, buf.replace("@CONFIG@", "Release")):
print('Wrote %s' % svnissrel)
if self.write_file_if_changed(svnissdeb, buf.replace("@CONFIG@", "Debug")):
print('Wrote %s' % svnissdeb)
#Make the project files directory if it doesn't exist
#TODO win32 might not be the best path as win64 stuff will go here too
self.projfilesdir=os.path.join("build","win32",subdir)
self.rootpath = self.find_rootpath()
if not os.path.exists(self.projfilesdir):
os.makedirs(self.projfilesdir)
# Generate the build_locale.bat file
if self.enable_nls:
pofiles = []
for po in os.listdir(os.path.join('subversion', 'po')):
if fnmatch.fnmatch(po, '*.po'):
pofiles.append(POFile(po[:-3]))
data = {'pofiles': pofiles}
self.write_with_template(os.path.join(self.projfilesdir,
'build_locale.bat'),
'templates/build_locale.ezt', data)
#Here we can add additional platforms to compile for
self.platforms = ['Win32']
# VC 2002 and VC 2003 only allow a single platform per project file
if subdir == 'vcnet-vcproj':
if self.vcproj_version != '7.00' and self.vcproj_version != '7.10':
self.platforms = ['Win32','x64']
#Here we can add additional modes to compile for
self.configs = ['Debug','Release']
if 'swig' in self._libraries:
# Generate SWIG header wrappers and external runtime
for swig in (generator.swig.header_wrappers,
generator.swig.checkout_swig_header,
generator.swig.external_runtime):
swig.Generator(self.conf, self.swig_exe).write()
else:
print("%s not found; skipping SWIG file generation..." % self.swig_exe)
def errno_filter(self, codes):
"Callback for gen_base.write_errno_table()."
# Filter out python's SOC* codes, which alias the windows API names.
return set(filter(lambda code: not (10000 <= code <= 10100), codes))
def find_rootpath(self):
"Gets the root path as understand by the project system"
return os.path.relpath('.', self.projfilesdir) + "\\"
def makeguid(self, data):
"Generate a windows style GUID"
### blah. this function can generate invalid GUIDs. leave it for now,
### but we need to fix it. we can wrap the apr UUID functions, or
### implement this from scratch using the algorithms described in
### http://www.webdav.org/specs/draft-leach-uuids-guids-01.txt
myhash = hashlib_md5(data).hexdigest()
guid = ("{%s-%s-%s-%s-%s}" % (myhash[0:8], myhash[8:12],
myhash[12:16], myhash[16:20],
myhash[20:32])).upper()
return guid
def path(self, *paths):
"""Convert build path to msvc path and prepend root"""
return self.rootpath + msvc_path_join(*list(map(msvc_path, paths)))
def apath(self, path, *paths):
"""Convert build path to msvc path and prepend root if not absolute"""
### On Unix, os.path.isabs won't do the right thing if "item"
### contains backslashes or drive letters
if os.path.isabs(path):
return msvc_path_join(msvc_path(path), *list(map(msvc_path, paths)))
else:
return self.rootpath + msvc_path_join(msvc_path(path),
*list(map(msvc_path, paths)))
def get_install_targets(self):
"Generate the list of targets"
# Get list of targets to generate project files for
install_targets = self.graph.get_all_sources(gen_base.DT_INSTALL) \
+ self.projects
install_targets = [x for x in install_targets if not x.when or
x.when in self._windows_when]
# Don't create projects for scripts
install_targets = [x for x in install_targets if not isinstance(x, gen_base.TargetScript)]
if not self.enable_nls:
install_targets = [x for x in install_targets if x.name != 'locale']
# Drop the libsvn_fs_base target and tests if we don't have BDB
if 'db' not in self._libraries:
install_targets = [x for x in install_targets if x.name != 'libsvn_fs_base']
install_targets = [x for x in install_targets if not (isinstance(x, gen_base.TargetExe)
and x.install == 'bdb-test')]
# Drop the ra_serf target if we don't have serf
if 'serf' not in self._libraries:
install_targets = [x for x in install_targets if x.name != 'libsvn_ra_serf']
# Drop the swig targets if we don't have swig or language support
install_targets = [x for x in install_targets
if (not (isinstance(x, gen_base.TargetSWIG)
or isinstance(x, gen_base.TargetSWIGLib)
or isinstance(x, gen_base.TargetSWIGProject))
or (x.lang in self._libraries
and 'swig' in self._libraries))]
# Drop the Java targets if we don't have a JDK
if 'java_sdk' not in self._libraries:
install_targets = [x for x in install_targets
if not (isinstance(x, gen_base.TargetJava)
or isinstance(x, gen_base.TargetJavaHeaders)
or x.name == '__JAVAHL__'
or x.name == '__JAVAHL_TESTS__'
or x.name == 'libsvnjavahl')]
# Create DLL targets for libraries
dll_targets = []
for target in install_targets:
if isinstance(target, gen_base.TargetLib):
if target.msvc_fake:
install_targets.append(self.create_fake_target(target))
if target.msvc_export:
if self.disable_shared:
target.disable_shared()
else:
dll_targets.append(self.create_dll_target(target))
install_targets.extend(dll_targets)
# Fix up targets that can't be linked to libraries
if not self.disable_shared:
for target in install_targets:
if isinstance(target, gen_base.TargetExe) and target.msvc_force_static:
# Make direct dependencies of all the indirect dependencies
linked_deps = {}
self.get_linked_win_depends(target, linked_deps)
for lk in linked_deps.keys():
if not isinstance(lk, gen_base.TargetLib) or not lk.msvc_export:
self.graph.add(gen_base.DT_LINK, target.name, lk)
else:
self.graph.remove(gen_base.DT_LINK, target.name, lk)
for target in install_targets:
target.project_guid = self.makeguid(target.name)
# sort these for output stability, to watch out for regressions.
install_targets.sort(key = lambda t: t.name)
return install_targets
def create_fake_target(self, dep):
"Return a new target which depends on another target but builds nothing"
section = gen_base.TargetProject.Section(gen_base.TargetProject,
dep.name + "_fake",
{'path': 'build/win32'}, self)
section.create_targets()
section.target.msvc_name = dep.msvc_name and dep.msvc_name + "_fake"
self.graph.add(gen_base.DT_LINK, section.target.name, dep)
dep.msvc_fake = section.target
return section.target
def create_dll_target(self, dep):
"Return a dynamic library that depends on a static library"
target = gen_base.TargetLib(dep.name,
{ 'path' : dep.path,
'msvc-name' : dep.name + "_dll" },
self)
target.msvc_export = dep.msvc_export
target.msvc_delayload = dep.msvc_delayload
# move the description from the static library target to the dll.
target.desc = dep.desc
dep.desc = None
# The dependency should now be static.
dep.msvc_export = None
dep.msvc_static = True
dep.msvc_delayload = False
# Remove the 'lib' prefix, so that the static library will be called
# svn_foo.lib
dep.name = dep.name[3:]
# However, its name should still be 'libsvn_foo' in Visual Studio
dep.msvc_name = target.name
# We renamed dep, so right now it has no dependencies. Because target has
# dep's old dependencies, transfer them over to dep.
deps = self.graph.deps[gen_base.DT_LINK]
deps[dep.name] = deps[target.name]
for key in deps.keys():
# Link everything except tests against the dll. Tests need to be linked
# against the static libraries because they sometimes access internal
# library functions.
if dep in deps[key]:
deps[key].remove(dep)
deps[key].append(target)
# The dll has exactly one dependency, the static library.
deps[target.name] = [ dep ]
return target
def get_configs(self, target):
"Get the list of configurations for the project"
configs = [ ]
for cfg in self.configs:
configs.append(
ProjectItem(name=cfg,
lower=cfg.lower(),
defines=self.get_win_defines(target, cfg),
libdirs=self.get_win_lib_dirs(target, cfg),
libs=self.get_win_libs(target, cfg),
includes=self.get_win_includes(target, cfg),
forced_include_files
=self.get_win_forced_includes(target, cfg),
))
return configs
def get_proj_sources(self, quote_path, target):
"Get the list of source files for each project"
sources = [ ]
javac_exe = "javac"
javah_exe = "javah"
jar_exe = "jar"
if self.jdk_path:
javac_exe = os.path.join(self.jdk_path, "bin", javac_exe)
javah_exe = os.path.join(self.jdk_path, "bin", javah_exe)
jar_exe = os.path.join(self.jdk_path, "bin", jar_exe)
if not isinstance(target, gen_base.TargetProject):
for source, object, reldir in self.get_win_sources(target):
cbuild = None
ctarget = None
cdesc = None
cignore = None
if isinstance(target, gen_base.TargetJavaHeaders):
classes = self.path(target.classes)
if self.junit_path is not None:
classes = "%s;%s" % (classes, self.junit_path)
headers = self.path(target.headers)
classname = target.package + "." + source.class_name
cbuild = "%s -verbose -force -classpath %s -d %s %s" \
% (self.quote(javah_exe), self.quote(classes),
self.quote(headers), classname)
ctarget = self.path(object.filename_win)
cdesc = "Generating %s" % (object.filename_win)
elif isinstance(target, gen_base.TargetJavaClasses):
classes = targetdir = self.path(target.classes)
if self.junit_path is not None:
classes = "%s;%s" % (classes, self.junit_path)
sourcepath = self.path(source.sourcepath)
per_project_flags = ""
if target.name.find("-compat-"):
per_project_flags += "-Xlint:-deprecation -Xlint:-dep-ann" \
" -Xlint:-rawtypes"
cbuild = ("%s -g -Xlint -Xlint:-options " +
per_project_flags +
" -target 1.5 -source 1.5 -classpath "
" %s -d %s "
" -sourcepath %s $(InputPath)") \
% tuple(map(self.quote, (javac_exe, classes,
targetdir, sourcepath)))
ctarget = self.path(object.filename)
cdesc = "Compiling %s" % (source)
rsrc = self.path(str(source))
if quote_path and '-' in rsrc:
rsrc = '"%s"' % rsrc
if (not isinstance(source, gen_base.SourceFile)
and cbuild is None and ctarget is None and cdesc is None
and source in self._excluded_from_build):
# Make sure include dependencies are excluded from the build.
# This is an 'orrible 'ack that relies on the source being a
# string if it's an include dependency, or a SourceFile object
# otherwise.
cignore = 'yes'
sources.append(ProjectItem(path=rsrc, reldir=reldir, user_deps=[],
custom_build=cbuild, custom_target=ctarget,
custom_desc=cdesc, ignored = cignore,
extension=os.path.splitext(rsrc)[1]))
if isinstance(target, gen_base.TargetJavaClasses) and target.jar:
classdir = self.path(target.classes)
jarfile = msvc_path_join(classdir, target.jar)
cbuild = "%s cf %s -C %s %s" \
% (self.quote(jar_exe), jarfile, classdir,
" ".join(target.packages))
deps = [x.custom_target for x in sources]
sources.append(ProjectItem(path='makejar', reldir='', user_deps=deps,
custom_build=cbuild, custom_target=jarfile,
extension=''))
if isinstance(target, gen_base.TargetSWIG):
swig_options = self.swig.opts[target.lang].split()
swig_options.append('-DWIN32')
swig_deps = []
for include_dir in self.get_win_includes(target):
swig_options.append("-I%s" % self.quote(include_dir))
for obj in self.graph.get_sources(gen_base.DT_LINK, target.name):
if isinstance(obj, gen_base.SWIGObject):
for cobj in self.graph.get_sources(gen_base.DT_OBJECT, obj):
if isinstance(cobj, gen_base.SWIGObject):
csrc = self.path(cobj.filename)
cout = csrc
# included header files that the generated c file depends on
user_deps = swig_deps[:]
for iobj in self.graph.get_sources(gen_base.DT_SWIG_C, cobj):
isrc = self.path(str(iobj))
if not isinstance(iobj, gen_base.SWIGSource):
user_deps.append(isrc)
continue
cbuild = '%s %s -o %s $(InputPath)' \
% (self.swig_exe, " ".join(swig_options), cout)
cdesc = 'Generating %s' % cout
sources.append(ProjectItem(path=isrc, reldir=None,
custom_build=cbuild,
custom_target=csrc,
custom_desc=cdesc,
user_deps=user_deps,
extension=''))
def_file = self.get_def_file(target)
if def_file is not None:
gsrc = self.path("build/generator/extractor.py")
deps = [self.path('build.conf')]
for header in target.msvc_export:
deps.append(self.path('subversion/include', header))
cbuild = "%s $(InputPath) %s > %s" \
% (self.quote(sys.executable), " ".join(deps), def_file)
cdesc = 'Generating %s ' % def_file
sources.append(ProjectItem(path=gsrc, reldir=None,
custom_build=cbuild,
custom_target=def_file,
custom_desc=cdesc,
user_deps=deps,
extension=''))
sources.append(ProjectItem(path=def_file, reldir=None,
custom_build=None, user_deps=[],
extension=''))
sources.sort(key = lambda x: x.path)
return sources
def get_output_name(self, target):
if isinstance(target, gen_base.TargetExe):
return target.name + '.exe'
elif isinstance(target, gen_base.TargetJava):
### This target file is not actually built, but we need it to keep
### the VC Express build happy.
return target.name
elif isinstance(target, gen_base.TargetApacheMod):
return target.name + '.so'
elif isinstance(target, gen_base.TargetLib):
if target.msvc_static:
return '%s-%d.lib' % (target.name, self.version)
else:
return os.path.basename(target.filename)
elif isinstance(target, gen_base.TargetProject):
### Since this target type doesn't produce any output, we shouldn't
### need to specify an output filename. But to keep the VC.NET template
### happy for now we have to return something
return target.name + '.exe'
elif isinstance(target, gen_base.TargetI18N):
return target.name
def get_output_pdb(self, target):
name = self.get_output_name(target)
name = os.path.splitext(name)
return name[0] + '.pdb'
def get_output_dir(self, target):
if isinstance(target, gen_base.TargetJavaHeaders):
return msvc_path("../" + target.headers)
elif isinstance(target, gen_base.TargetJavaClasses):
return msvc_path("../" + target.classes)
else:
return msvc_path(target.path)
def get_intermediate_dir(self, target):
if isinstance(target, gen_base.TargetSWIG):
return msvc_path_join(msvc_path(target.path), target.name)
else:
return self.get_output_dir(target)
def get_def_file(self, target):
if isinstance(target, gen_base.TargetLib) and target.msvc_export:
return target.name + ".def"
return None
def gen_proj_names(self, install_targets):
"Generate project file names for the targets"
# Generate project file names for the targets: replace dashes with
# underscores and replace *-test with test_* (so that the test
# programs are visually separare from the rest of the projects)
for target in install_targets:
if target.msvc_name:
target.proj_name = target.msvc_name
continue
name = target.name
pos = name.find('-test')
if pos >= 0:
proj_name = 'test_' + name[:pos].replace('-', '_')
elif isinstance(target, gen_base.TargetSWIG):
proj_name = 'swig_' + name.replace('-', '_')
else:
proj_name = name.replace('-', '_')
target.proj_name = proj_name
def get_external_project(self, target, proj_ext):
if not ((isinstance(target, gen_base.TargetLinked)
or isinstance(target, gen_base.TargetI18N))
and target.external_project):
return None
if target.external_project.find('/') != -1:
path = target.external_project
else:
path = os.path.join(self.projfilesdir, target.external_project)
return "%s.%s" % (gen_base.native_path(path), proj_ext)
def adjust_win_depends(self, target, name):
"Handle special dependencies if needed"
if name == '__CONFIG__':
depends = []
else:
depends = self.sections['__CONFIG__'].get_dep_targets(target)
depends.extend(self.get_win_depends(target, FILTER_PROJECTS))
# Make the default target generate the .mo files, too
if self.enable_nls and name == '__ALL__':
depends.extend(self.sections['locale'].get_targets())
# To set the correct build order of the JavaHL targets, the javahl-javah
# and libsvnjavahl targets are defined with extra dependencies in build.conf
# like this:
# add-deps = $(javahl_javah_DEPS) $(javahl_java_DEPS)
#
# This section parses those dependencies and adds them to the dependency list
# for this target.
if name.startswith('javahl') or name == 'libsvnjavahl':
for dep in re.findall('\$\(([^\)]*)_DEPS\)', target.add_deps):
dep = dep.replace('_', '-')
depends.extend(self.sections[dep].get_targets())
return depends
def get_win_depends(self, target, mode):
"""Return the list of dependencies for target"""
dep_dict = {}
if mode == FILTER_EXTERNALLIBS:
self.get_externallib_depends(target, dep_dict)
elif isinstance(target, gen_base.TargetLib) and target.msvc_static:
self.get_static_win_depends(target, dep_dict)
else:
self.get_linked_win_depends(target, dep_dict)
deps = []
if mode == FILTER_PROJECTS:
for dep, (is_proj, is_lib, is_static) in dep_dict.items():
if is_proj:
deps.append(dep)
elif mode == FILTER_LIBS or mode == FILTER_EXTERNALLIBS:
for dep, (is_proj, is_lib, is_static) in dep_dict.items():
if is_static or (is_lib and not is_proj):
# Filter explicit msvc libraries of optional dependencies
if (dep.name in self._libraries
or dep.name not in self._optional_libraries):
deps.append(dep)
else:
raise NotImplementedError
deps.sort(key = lambda d: d.name)
return deps
def get_direct_depends(self, target):
"""Read target dependencies from graph
return value is list of (dependency, (is_project, is_lib, is_static)) tuples
"""
deps = []
for dep in self.graph.get_sources(gen_base.DT_LINK, target.name):
if not isinstance(dep, gen_base.Target):
continue
is_project = hasattr(dep, 'proj_name')
is_lib = isinstance(dep, gen_base.TargetLib)
is_static = is_lib and dep.msvc_static
deps.append((dep, (is_project, is_lib, is_static)))
for dep in self.graph.get_sources(gen_base.DT_NONLIB, target.name):
is_project = hasattr(dep, 'proj_name')
is_lib = isinstance(dep, gen_base.TargetLib)
is_static = is_lib and dep.msvc_static
deps.append((dep, (is_project, is_lib, is_static)))
return deps
def get_static_win_depends(self, target, deps):
"""Find project dependencies for a static library project"""
for dep, dep_kind in self.get_direct_depends(target):
is_proj, is_lib, is_static = dep_kind
# recurse for projectless targets
if not is_proj:
self.get_static_win_depends(dep, deps)
# Only add project dependencies on non-library projects. If we added
# project dependencies on libraries, MSVC would copy those libraries
# into the static archive. This would waste space and lead to linker
# warnings about multiply defined symbols. Instead, the library
# dependencies get added to any DLLs or EXEs that depend on this static
# library (see get_linked_win_depends() implementation).
if not is_lib:
deps[dep] = dep_kind
# a static library can depend on another library through a fake project
elif dep.msvc_fake:
deps[dep.msvc_fake] = dep_kind
def get_linked_win_depends(self, target, deps, static_recurse=0):
"""Find project dependencies for a DLL or EXE project"""
direct_deps = self.get_direct_depends(target)
for dep, dep_kind in direct_deps:
is_proj, is_lib, is_static = dep_kind
# add all top level dependencies
if not static_recurse or is_lib:
# We need to guard against linking both a static and a dynamic library
# into a project (this is mainly a concern for tests). To do this, for
# every dll dependency we first check to see if its corresponding
# static library is already in the list of dependencies. If it is,
# we don't add the dll to the list.
if is_lib and dep.msvc_export:
static_dep = self.graph.get_sources(gen_base.DT_LINK, dep.name)[0]
if static_dep in deps:
continue
deps[dep] = dep_kind
# add any libraries that static library dependencies depend on
for dep, dep_kind in direct_deps:
is_proj, is_lib, is_static = dep_kind
# recurse for projectless dependencies
if not is_proj:
self.get_linked_win_depends(dep, deps, 0)
# also recurse into static library dependencies
elif is_static:
self.get_linked_win_depends(dep, deps, 1)
# and recurse over the external library dependencies for swig libraries,
# to include the language runtime
elif isinstance(dep, gen_base.TargetSWIGLib):
self.get_externallib_depends(dep, deps)
def get_externallib_depends(self, target, deps):
"""Find externallib dependencies for a project"""
direct_deps = self.get_direct_depends(target)
for dep, dep_kind in direct_deps:
self.get_externallib_depends(dep, deps)
if isinstance(target, gen_base.TargetLinked) and dep.external_lib:
deps[dep] = dep_kind
def get_win_defines(self, target, cfg):
"Return the list of defines for target"
fakedefines = ["WIN32","_WINDOWS","alloca=_alloca",
"_CRT_SECURE_NO_DEPRECATE=",
"_CRT_NONSTDC_NO_DEPRECATE=",
"_CRT_SECURE_NO_WARNINGS="]
if cfg == 'Debug':
fakedefines.extend(["_DEBUG","SVN_DEBUG"])
elif cfg == 'Release':
fakedefines.append("NDEBUG")
if isinstance(target, gen_base.TargetApacheMod):
if target.name == 'mod_dav_svn':
fakedefines.extend(["AP_DECLARE_EXPORT"])
if self.cpp_defines:
fakedefines.extend(self.cpp_defines)
if isinstance(target, gen_base.TargetSWIG):
fakedefines.append("SWIG_GLOBAL")
for dep in self.get_win_depends(target, FILTER_EXTERNALLIBS):
if dep.external_lib:
for elib in re.findall('\$\(SVN_([^\)]*)_LIBS\)', dep.external_lib):
external_lib = elib.lower()
if external_lib in self._libraries:
lib = self._libraries[external_lib]
if lib.defines:
fakedefines.extend(lib.defines)
# check if they wanted nls
if self.enable_nls:
fakedefines.append("ENABLE_NLS")
if target.name.endswith('svn_subr'):
fakedefines.append("SVN_USE_WIN32_CRASHHANDLER")
return fakedefines
def get_win_includes(self, target, cfg='Release'):
"Return the list of include directories for target"
fakeincludes = [ "subversion/include" ]
for dep in self.get_win_depends(target, FILTER_EXTERNALLIBS):
if dep.external_lib:
for elib in re.findall('\$\(SVN_([^\)]*)_LIBS\)', dep.external_lib):
external_lib = elib.lower()
if external_lib in self._libraries:
lib = self._libraries[external_lib]
fakeincludes.extend(lib.include_dirs)
if (isinstance(target, gen_base.TargetSWIG)
or isinstance(target, gen_base.TargetSWIGLib)):
util_includes = "subversion/bindings/swig/%s/libsvn_swig_%s" \
% (target.lang,
gen_base.lang_utillib_suffix[target.lang])
fakeincludes.append(util_includes)
if (isinstance(target, gen_base.TargetSWIG)
or isinstance(target, gen_base.TargetSWIGLib)):
# Projects aren't generated unless we have swig
assert self.swig_libdir
if target.lang == "perl" and self.swig_version >= (1, 3, 28):
# At least swigwin 1.3.38+ uses perl5 as directory name.
lang_subdir = 'perl5'
else:
lang_subdir = target.lang
# After the language specific includes include the generic libdir,
# to allow overriding a generic with a per language include
fakeincludes.append(os.path.join(self.swig_libdir, lang_subdir))
fakeincludes.append(self.swig_libdir)
if 'cxxhl' in target.name:
fakeincludes.append("subversion/bindings/cxxhl/include")
return gen_base.unique(map(self.apath, fakeincludes))
def get_win_lib_dirs(self, target, cfg):
"Return the list of library directories for target"
debug = (cfg == 'Debug')
if not isinstance(target, gen_base.TargetLinked):
return []
if isinstance(target, gen_base.TargetLib) and target.msvc_static:
return []
fakelibdirs = []
# When nls is enabled, all our projects use it directly via the _() macro,
# even though only libsvn_subr references it in build.conf
if self.enable_nls:
lib = self._libraries['intl']
if debug and lib.debug_lib_dir:
fakelibdirs.append(lib.debug_lib_dir)
else:
fakelibdirs.append(lib.lib_dir)
if (isinstance(target, gen_base.TargetSWIG)
or isinstance(target, gen_base.TargetSWIGLib)):
if target.lang in self._libraries:
lib = self._libraries[target.lang]
if debug and lib.debug_lib_dir:
fakelibdirs.append(lib.debug_lib_dir)
elif lib.lib_dir:
fakelibdirs.append(lib.lib_dir)
for dep in self.get_win_depends(target, FILTER_LIBS):
if dep.external_lib:
for elib in re.findall('\$\(SVN_([^\)]*)_LIBS\)', dep.external_lib):
external_lib = elib.lower()
if external_lib not in self._libraries:
continue
lib = self._libraries[external_lib]
if debug and lib.debug_lib_dir:
lib_dir = self.apath(lib.debug_lib_dir)
elif lib.lib_dir:
lib_dir = self.apath(lib.lib_dir)
else:
continue # Dependency without library (E.g. JDK)
fakelibdirs.append(lib_dir)
return gen_base.unique(fakelibdirs)
def get_win_libs(self, target, cfg):
"Return the list of external libraries needed for target"
debug = (cfg == 'Debug')
if not isinstance(target, gen_base.TargetLinked):
return []
if isinstance(target, gen_base.TargetLib) and target.msvc_static:
return []
nondeplibs = target.msvc_libs[:]
# When nls is enabled, all our projects use it directly via the _() macro,
# even though only libsvn_subr references it in build.conf
if self.enable_nls:
lib = self._libraries['intl']
if debug and lib.debug_lib_name:
nondeplibs.append(lib.debug_lib_name)
else:
nondeplibs.append(lib.lib_name)
if (isinstance(target, gen_base.TargetSWIG)
or isinstance(target, gen_base.TargetSWIGLib)):
if target.lang in self._libraries:
lib = self._libraries[target.lang]
if debug and lib.debug_lib_name:
nondeplibs.append(lib.debug_lib_name)
elif lib.lib_name:
nondeplibs.append(lib.lib_name)
for dep in self.get_win_depends(target, FILTER_LIBS):
nondeplibs.extend(dep.msvc_libs)
if dep.external_lib:
for elib in re.findall('\$\(SVN_([^\)]*)_LIBS\)', dep.external_lib):
external_lib = elib.lower()
if external_lib not in self._libraries:
if external_lib not in self._optional_libraries:
print('Warning: Using undeclared dependency \'$(SVN_%s_LIBS)\'.'
% (elib,))
continue
lib = self._libraries[external_lib]
if debug:
nondeplibs.append(lib.debug_lib_name)
else:
nondeplibs.append(lib.lib_name)
return gen_base.unique(nondeplibs)
def get_win_sources(self, target, reldir_prefix=''):
"Return the list of source files that need to be compliled for target"
sources = { }
for obj in self.graph.get_sources(gen_base.DT_LINK, target.name):
if isinstance(obj, gen_base.Target):
continue
for src in self.graph.get_sources(gen_base.DT_OBJECT, obj):
if isinstance(src, gen_base.SourceFile):
if reldir_prefix:
if src.reldir:
reldir = reldir_prefix + '\\' + src.reldir
else:
reldir = reldir_prefix
else:
reldir = src.reldir
else:
reldir = ''
sources[src] = src, obj, reldir
return list(sources.values())
def get_win_forced_includes(self, target, cfg):
"""Return a list of include files that need to be included before any
other header in every c/c++ file"""
fakeincludes = []
for dep in self.get_win_depends(target, FILTER_EXTERNALLIBS):
if dep.external_lib:
for elib in re.findall('\$\(SVN_([^\)]*)_LIBS\)', dep.external_lib):
external_lib = elib.lower()
if external_lib in self._libraries:
lib = self._libraries[external_lib]
fakeincludes.extend(lib.forced_includes)
return gen_base.unique(fakeincludes)
def write_with_template(self, fname, tname, data):
fout = StringIO()
template = ezt.Template(compress_whitespace = 0)
template.parse_file(os.path.join('build', 'generator', tname))
template.generate(fout, data)
self.write_file_if_changed(fname, fout.getvalue())
def move_proj_file(self, path, name, params=()):
### Move our slightly templatized pre-built project files into place --
### these projects include zlib, serf, locale, config, etc.
dest_file = os.path.join(path, name)
source_template = os.path.join('templates', name + '.ezt')
data = {
'version' : self.vcproj_version,
'configs' : self.configs,
'platforms' : self.platforms,
'toolset_version' : 'v' + self.vcproj_version.replace('.',''),
}
for key, val in params:
data[key] = val
self.write_with_template(dest_file, source_template, data)
def write(self):
"Override me when creating a new project type"
raise NotImplementedError
class ProjectItem:
"A generic item class for holding sources info, config info, etc for a project"
def __init__(self, **kw):
self.ignored = None
vars(self).update(kw)
# ============================================================================
FILTER_LIBS = 1
FILTER_PROJECTS = 2
FILTER_EXTERNALLIBS = 3
class POFile:
"Item class for holding po file info"
def __init__(self, base):
self.po = base + '.po'
self.mo = base + '.mo'
# MSVC paths always use backslashes regardless of current platform
def msvc_path(path):
"""Convert a build path to an msvc path"""
return path.replace('/', '\\')
def msvc_path_join(*path_parts):
"""Join path components into an msvc path"""
return '\\'.join(path_parts)
|
|
import datetime
import pytest
import random
import string
import subprocess
import time
try:
from urllib.parse import quote_plus
except ImportError:
from urllib import quote_plus
from symantecssl.order import ModifyOperation
from symantecssl.order import ProductCode
from symantecssl.exceptions import SymantecError
try:
from subprocess import check_output
except ImportError:
def check_output(*popenargs, **kwargs):
if 'stdout' in kwargs:
raise ValueError(
'stdout argument not allowed, it will be overridden.'
)
process = subprocess.Popen(
stdout=subprocess.PIPE,
*popenargs,
**kwargs
)
output, unused_err = process.communicate()
retcode = process.poll()
if retcode:
cmd = kwargs.get("args")
if cmd is None:
cmd = popenargs[0]
raise subprocess.CalledProcessError(retcode, cmd, output=output)
return output
def create_csr():
return b"".join(
check_output([
"openssl", "req", "-new", "-newkey", "rsa:2048", "-nodes",
"-keyout", "/dev/null", "-text", "-batch", "-subj",
"/C=US/ST=Texas/L=San Antonio/O=MyOrg/CN=example.com",
]).partition(b"-----BEGIN CERTIFICATE REQUEST-----")[1:]
).decode("ascii")
def order_with_order_id(symantec, order_id, csr):
return symantec.order(
partnercode=symantec.partner_code,
productcode=ProductCode.QuickSSLPremium,
partnerorderid=order_id,
organizationname="MyOrg",
addressline1="5000 Walzem",
city="San Antonio",
region="TX",
postalcode="78218",
country="US",
organizationphone="2103124000",
validityperiod="12",
serverCount="1",
webservertype="20",
admincontactfirstname="John",
admincontactlastname="Doe",
admincontactphone="2103122400",
admincontactemail="[email protected]",
admincontacttitle="Caesar",
admincontactaddressline1="123 Road",
admincontactcity="San Antonio",
admincontactregion="TX",
admincontactpostalcode="78218",
admincontactcountry="US",
techsameasadmin="True",
billsameastech="True",
approveremail="[email protected]",
csr=csr,
)
@pytest.fixture
def order_kwargs():
order_id = "".join(random.choice(string.ascii_letters) for _ in range(30))
return {
"partnerorderid": order_id,
"productcode": ProductCode.QuickSSLPremium,
"organizationname": "MyOrg",
"addressline1": "5000 Walzem",
"city": "San Antonio",
"region": "TX",
"postalcode": "78218",
"country": "US",
"organizationphone": "2103124000",
"validityperiod": "12",
"serverCount": "1",
"webservertype": "20",
"admincontactfirstname": "John",
"admincontactlastname": "Doe",
"admincontactphone": "2103122400",
"admincontactemail": "[email protected]",
"admincontacttitle": "Caesar",
"admincontactaddressline1": "123 Road",
"admincontactcity": "San Antonio",
"admincontactregion": "TX",
"admincontactpostalcode": "78218",
"admincontactcountry": "US",
"techsameasadmin": "True",
"billsameastech": "True",
"approveremail": "[email protected]",
"csr": create_csr(),
}
def test_get_orders_by_date_range(symantec, vcr):
now = datetime.datetime.now()
date = now.strftime("%Y-%m-%d")
orderids = []
csrs = []
for _ in range(2):
orderids.append("".join(random.choice(string.ascii_letters)
for _ in range(30)))
csrs.append(create_csr())
with vcr.use_cassette(
placeholders=(
[{"placeholder": "<<CSR{0}>>".format(i), "replace": csr}
for i, csr in enumerate(csrs)]
+
[{"placeholder": "<<CSR{0}_QUOTED>>".format(i),
"replace": quote_plus(csr)}
for i, csr in enumerate(csrs)]
+
[{"placeholder": "<<OrderID{0}>>".format(i), "replace": oid}
for i, oid in enumerate(orderids)])):
for order_id, csr in zip(orderids, csrs):
order_with_order_id(symantec, order_id, csr)
order_list = symantec.get_orders_by_date_range(
fromdate=date,
todate=date,
partnercode=symantec.partner_code
)
assert len(order_list) > 1
order_data = order_list.pop()
assert order_data["PartnerOrderID"]
assert order_data["OrderDate"]
def test_get_order_by_partner_order_id(symantec, vcr):
order_id = "".join(random.choice(string.ascii_letters) for _ in range(30))
csr = create_csr()
with vcr.use_cassette(
placeholders=(
[
{"placeholder": "<<CSR>>", "replace": csr},
{"placeholder": "<<CSR_QUOTED>>", "replace": quote_plus(csr)},
{"placeholder": "<<OrderID>>", "replace": order_id},
])):
order_with_order_id(symantec, order_id, csr)
order_data = symantec.get_order_by_partner_order_id(
partnerorderid=order_id,
partnercode=symantec.partner_code
)
assert order_data["OrderInfo"]["PartnerOrderID"] == order_id
def test_modify_order(symantec, vcr):
order_id = "".join(random.choice(string.ascii_letters) for _ in range(30))
csr = create_csr()
with vcr.use_cassette(
placeholders=(
[
{"placeholder": "<<CSR>>", "replace": csr},
{"placeholder": "<<CSR_QUOTED>>", "replace": quote_plus(csr)},
{"placeholder": "<<OrderID>>", "replace": order_id},
])):
order_with_order_id(symantec, order_id, csr)
symantec.modify_order(
partnerorderid=order_id,
partnercode=symantec.partner_code,
productcode=ProductCode.QuickSSLPremium,
modifyorderoperation=ModifyOperation.Cancel,
)
order_data = symantec.get_order_by_partner_order_id(
partnerorderid=order_id,
partnercode=symantec.partner_code,
)
assert order_data["OrderInfo"]["OrderStatusMajor"] == "CANCELLED"
def test_get_modified_orders(symantec, vcr):
now = datetime.datetime.now()
date1 = now.strftime("%Y-%m-%d")
date2 = (now + datetime.timedelta(days=1)).strftime("%Y-%m-%d")
orderids = []
csrs = []
for _ in range(2):
orderids.append("".join(random.choice(string.ascii_letters)
for _ in range(30)))
csrs.append(create_csr())
with vcr.use_cassette(
placeholders=(
[{"placeholder": "<<CSR{0}>>".format(i), "replace": csr}
for i, csr in enumerate(csrs)]
+
[{"placeholder": "<<CSR{0}_QUOTED>>".format(i),
"replace": quote_plus(csr)}
for i, csr in enumerate(csrs)]
+
[{"placeholder": "<<OrderID{0}>>".format(i), "replace": oid}
for i, oid in enumerate(orderids)])):
for order_id, csr in zip(orderids, csrs):
order_with_order_id(symantec, order_id, csr)
for order_id in orderids:
symantec.modify_order(
partnerorderid=order_id,
partnercode=symantec.partner_code,
productcode=ProductCode.QuickSSLPremium,
modifyorderoperation=ModifyOperation.Cancel,
)
order_list = symantec.get_modified_orders(
fromdate=date1,
todate=date2,
partnercode=symantec.partner_code
)
assert len(order_list) > 1
order_data = order_list.pop()
assert order_data["OrderInfo"]["PartnerOrderID"]
assert order_data["ModificationEvents"].pop()
def test_get_quick_approver_list(symantec, vcr):
with vcr.use_cassette():
approver_list = symantec.get_quick_approver_list(
partnercode=symantec.partner_code,
domain="testingsymantecssl.com"
)
assert len(approver_list) > 0
for approver in approver_list:
assert set(approver.keys()) == set(["ApproverType", "ApproverEmail"])
def test_change_approver_email(symantec, vcr):
order_id = "".join(random.choice(string.ascii_letters) for _ in range(30))
csr = create_csr()
with vcr.use_cassette(
placeholders=(
[
{"placeholder": "<<CSR>>", "replace": csr},
{"placeholder": "<<CSR_QUOTED>>", "replace": quote_plus(csr)},
{"placeholder": "<<OrderID>>", "replace": order_id},
])):
order_with_order_id(symantec, order_id, csr)
symantec.change_approver_email(
partnercode=symantec.partner_code,
partnerorderid=order_id,
approveremail="[email protected]"
)
new_email = symantec.get_order_by_partner_order_id(
partnerorderid=order_id,
partnercode=symantec.partner_code,
returnproductdetail=True
)["QuickOrderDetail"]["ApproverEmailAddress"]
assert new_email == "[email protected]"
def test_order_call(symantec, order_kwargs, vcr):
order_kwargs["partnercode"] = symantec.partner_code
order_id = order_kwargs["partnerorderid"]
csr = order_kwargs["csr"]
with vcr.use_cassette(
placeholders=(
[
{"placeholder": "<<CSR>>", "replace": csr},
{"placeholder": "<<CSR_QUOTED>>", "replace": quote_plus(csr)},
{"placeholder": "<<OrderID>>", "replace": order_id},
])):
order_data = symantec.order(**order_kwargs)
assert set(order_data.keys()) == set(["GeoTrustOrderID", "PartnerOrderID"])
assert order_data["PartnerOrderID"] == order_id
def test_validate_order_parameters_success(
symantec, order_kwargs, vcr):
order_kwargs["partnercode"] = symantec.partner_code
order_id = order_kwargs["partnerorderid"]
csr = order_kwargs["csr"]
with vcr.use_cassette(
placeholders=(
[
{"placeholder": "<<CSR>>", "replace": csr},
{"placeholder": "<<CSR_QUOTED>>", "replace": quote_plus(csr)},
{"placeholder": "<<OrderID>>", "replace": order_id},
])):
response = symantec.validate_order_parameters(**order_kwargs)
expected_keys = set(["ValidityPeriod", "Price", "ParsedCSR", "RenewalInfo",
"CertificateSignatureHashAlgorithm"])
assert set(response.keys()) == set(expected_keys)
def test_validate_order_parameters_error(symantec, order_kwargs, vcr):
order_kwargs["partnercode"] = symantec.partner_code
order_kwargs["webservertype"] = "9999"
order_id = order_kwargs["partnerorderid"]
csr = order_kwargs["csr"]
with vcr.use_cassette(
placeholders=(
[
{"placeholder": "<<CSR>>", "replace": csr},
{"placeholder": "<<CSR_QUOTED>>", "replace": quote_plus(csr)},
{"placeholder": "<<OrderID>>", "replace": order_id},
])):
with pytest.raises(SymantecError) as e:
symantec.validate_order_parameters(**order_kwargs)
assert str(e).endswith("The Symantec API call ValidateOrderParameters"
" returned an error: 'Missing or Invalid Field:"
" WebServerType'")
def test_reissue(symantec, order_kwargs, vcr):
one_minute = 60
order_kwargs["partnercode"] = symantec.partner_code
product_code = order_kwargs["productcode"]
order_id = order_kwargs['partnerorderid']
csr = order_kwargs["csr"]
def query_order():
return symantec.get_order_by_partner_order_id(
partnercode=symantec.partner_code,
productcode=product_code,
partnerorderid=order_id,
returncertificateinfo="true",
)
def modify_order(operation):
return symantec.modify_order(
partnercode=symantec.partner_code,
productcode=product_code,
partnerorderid=order_id,
modifyorderoperation=operation,
)
def ensure_order_completed():
query_resp = query_order()
order_status = query_resp["OrderInfo"]["OrderStatusMajor"].lower()
if order_status == 'complete':
return
# Force approval of the order. The approval initially fails with a
# 'SECURITY REVIEW FAILED'. Per the API docs, do a push_order_state
# to force the order to COMPLETED state.
#
# For SSL123, this takes up to 15 minutes or so.
# For QuickSSLPremium and RapidSSL, this happens immediately.
try:
modify_order(ModifyOperation.Approve)
except SymantecError:
modify_order(ModifyOperation.PushState)
# wait until the order is finished; timeout after five minutes
start = time.time()
while True:
query_resp = query_order()
order_status = query_resp["OrderInfo"]["OrderStatusMajor"].lower()
if order_status == 'complete':
break
elif time.time() - start > 5 * one_minute:
raise Exception("Order approval timed out")
time.sleep(10)
with vcr.use_cassette(
placeholders=(
[
{"placeholder": "<<CSR>>", "replace": csr},
{"placeholder": "<<CSR_QUOTED>>", "replace": quote_plus(csr)},
{"placeholder": "<<OrderID>>", "replace": order_id},
])):
symantec.order(**order_kwargs)
ensure_order_completed()
reissue_resp = symantec.reissue(
partnercode=symantec.partner_code,
productcode=product_code,
partnerorderid=order_id,
reissueemail=order_kwargs["admincontactemail"],
csr=order_kwargs['csr'],
)
assert set(reissue_resp.keys()) == set(['GeoTrustOrderID',
'PartnerOrderID'])
assert reissue_resp['PartnerOrderID'] == order_id
query_resp = query_order()
cert_status = query_resp['CertificateInfo']['CertificateStatus']
assert cert_status == 'PENDING_REISSUE'
|
|
# -*- coding: utf-8 -*-
import datetime as dt
import itertools
import logging
import re
import urlparse
from copy import deepcopy
import bson
import pytz
import itsdangerous
from modularodm import fields, Q
from modularodm.exceptions import NoResultsFound
from modularodm.exceptions import ValidationError, ValidationValueError, QueryException
from modularodm.validators import URLValidator
import framework
from framework.addons import AddonModelMixin
from framework import analytics
from framework.auth import signals, utils
from framework.auth.exceptions import (ChangePasswordError, ExpiredTokenError, InvalidTokenError,
MergeConfirmedRequiredError, MergeConflictError)
from framework.bcrypt import generate_password_hash, check_password_hash
from framework.exceptions import PermissionsError
from framework.guid.model import GuidStoredObject
from framework.mongo.validators import string_required
from framework.sentry import log_exception
from framework.sessions import session
from framework.sessions.model import Session
from framework.sessions.utils import remove_sessions_for_user
from website import mails, settings, filters, security
name_formatters = {
'long': lambda user: user.fullname,
'surname': lambda user: user.family_name if user.family_name else user.fullname,
'initials': lambda user: u'{surname}, {initial}.'.format(
surname=user.family_name,
initial=user.given_name_initial,
),
}
logger = logging.getLogger(__name__)
# Hide implementation of token generation
def generate_confirm_token():
return security.random_string(30)
def generate_claim_token():
return security.random_string(30)
def validate_history_item(item):
string_required(item.get('institution'))
startMonth = item.get('startMonth')
startYear = item.get('startYear')
endMonth = item.get('endMonth')
endYear = item.get('endYear')
validate_year(startYear)
validate_year(endYear)
if startYear and endYear:
if endYear < startYear:
raise ValidationValueError('End date must be later than start date.')
elif endYear == startYear:
if endMonth and startMonth and endMonth < startMonth:
raise ValidationValueError('End date must be later than start date.')
def validate_year(item):
if item:
try:
int(item)
except ValueError:
raise ValidationValueError('Please enter a valid year.')
else:
if len(item) != 4:
raise ValidationValueError('Please enter a valid year.')
validate_url = URLValidator()
def validate_profile_websites(profile_websites):
for value in profile_websites or []:
try:
validate_url(value)
except ValidationError:
# Reraise with a better message
raise ValidationError('Invalid personal URL.')
def validate_social(value):
validate_profile_websites(value.get('profileWebsites'))
# TODO - rename to _get_current_user_from_session /HRYBACKI
def _get_current_user():
uid = session._get_current_object() and session.data.get('auth_user_id')
return User.load(uid)
# TODO: This should be a class method of User?
def get_user(email=None, password=None, verification_key=None):
"""Get an instance of User matching the provided params.
:return: The instance of User requested
:rtype: User or None
"""
# tag: database
if password and not email:
raise AssertionError('If a password is provided, an email must also '
'be provided.')
query_list = []
if email:
email = email.strip().lower()
query_list.append(Q('emails', 'eq', email) | Q('username', 'eq', email))
if password:
password = password.strip()
try:
query = query_list[0]
for query_part in query_list[1:]:
query = query & query_part
user = User.find_one(query)
except Exception as err:
logger.error(err)
user = None
if user and not user.check_password(password):
return False
return user
if verification_key:
query_list.append(Q('verification_key', 'eq', verification_key))
try:
query = query_list[0]
for query_part in query_list[1:]:
query = query & query_part
user = User.find_one(query)
return user
except Exception as err:
logger.error(err)
return None
class Auth(object):
def __init__(self, user=None, api_node=None,
private_key=None):
self.user = user
self.api_node = api_node
self.private_key = private_key
def __repr__(self):
return ('<Auth(user="{self.user}", '
'private_key={self.private_key})>').format(self=self)
@property
def logged_in(self):
return self.user is not None
@property
def private_link(self):
if not self.private_key:
return None
try:
# Avoid circular import
from website.project.model import PrivateLink
private_link = PrivateLink.find_one(
Q('key', 'eq', self.private_key)
)
if private_link.is_deleted:
return None
except QueryException:
return None
return private_link
@classmethod
def from_kwargs(cls, request_args, kwargs):
user = request_args.get('user') or kwargs.get('user') or _get_current_user()
private_key = request_args.get('view_only')
return cls(
user=user,
private_key=private_key,
)
class User(GuidStoredObject, AddonModelMixin):
# Node fields that trigger an update to the search engine on save
SEARCH_UPDATE_FIELDS = {
'fullname',
'given_name',
'middle_names',
'family_name',
'suffix',
'merged_by',
'date_disabled',
'date_confirmed',
'jobs',
'schools',
'social',
}
# TODO: Add SEARCH_UPDATE_NODE_FIELDS, for fields that should trigger a
# search update for all nodes to which the user is a contributor.
SOCIAL_FIELDS = {
'orcid': u'http://orcid.org/{}',
'github': u'http://github.com/{}',
'scholar': u'http://scholar.google.com/citations?user={}',
'twitter': u'http://twitter.com/{}',
'profileWebsites': [],
'linkedIn': u'https://www.linkedin.com/{}',
'impactStory': u'https://impactstory.org/{}',
'researcherId': u'http://researcherid.com/rid/{}',
'researchGate': u'https://researchgate.net/profile/{}',
'academiaInstitution': u'https://{}',
'academiaProfileID': u'.academia.edu/{}',
'baiduScholar': u'http://xueshu.baidu.com/scholarID/{}'
}
# This is a GuidStoredObject, so this will be a GUID.
_id = fields.StringField(primary=True)
# The primary email address for the account.
# This value is unique, but multiple "None" records exist for:
# * unregistered contributors where an email address was not provided.
# TODO: Update mailchimp subscription on username change in user.save()
username = fields.StringField(required=False, unique=True, index=True)
# Hashed. Use `User.set_password` and `User.check_password`
password = fields.StringField()
fullname = fields.StringField(required=True, validate=string_required)
# user has taken action to register the account
is_registered = fields.BooleanField(index=True)
# user has claimed the account
# TODO: This should be retired - it always reflects is_registered.
# While a few entries exist where this is not the case, they appear to be
# the result of a bug, as they were all created over a small time span.
is_claimed = fields.BooleanField(default=False, index=True)
# a list of strings - for internal use
system_tags = fields.StringField(list=True)
# security emails that have been sent
# TODO: This should be removed and/or merged with system_tags
security_messages = fields.DictionaryField()
# Format: {
# <message label>: <datetime>
# ...
# }
# user was invited (as opposed to registered unprompted)
is_invited = fields.BooleanField(default=False, index=True)
# Per-project unclaimed user data:
# TODO: add validation
unclaimed_records = fields.DictionaryField(required=False)
# Format: {
# <project_id>: {
# 'name': <name that referrer provided>,
# 'referrer_id': <user ID of referrer>,
# 'token': <token used for verification urls>,
# 'email': <email the referrer provided or None>,
# 'claimer_email': <email the claimer entered or None>,
# 'last_sent': <timestamp of last email sent to referrer or None>
# }
# ...
# }
# Time of last sent notification email to newly added contributors
# Format : {
# <project_id>: {
# 'last_sent': time.time()
# }
# ...
# }
contributor_added_email_records = fields.DictionaryField(default=dict)
# The user into which this account was merged
merged_by = fields.ForeignField('user', default=None, index=True)
# verification key used for resetting password
verification_key = fields.StringField()
email_last_sent = fields.DateTimeField()
# confirmed emails
# emails should be stripped of whitespace and lower-cased before appending
# TODO: Add validator to ensure an email address only exists once across
# all User's email lists
emails = fields.StringField(list=True)
# email verification tokens
# see also ``unconfirmed_emails``
email_verifications = fields.DictionaryField(default=dict)
# Format: {
# <token> : {'email': <email address>,
# 'expiration': <datetime>}
# }
# TODO remove this field once migration (scripts/migration/migrate_mailing_lists_to_mailchimp_fields.py)
# has been run. This field is deprecated and replaced with mailchimp_mailing_lists
mailing_lists = fields.DictionaryField()
# email lists to which the user has chosen a subscription setting
mailchimp_mailing_lists = fields.DictionaryField()
# Format: {
# 'list1': True,
# 'list2: False,
# ...
# }
# email lists to which the user has chosen a subscription setting, being sent from osf, rather than mailchimp
osf_mailing_lists = fields.DictionaryField(default=lambda: {settings.OSF_HELP_LIST: True})
# Format: {
# 'list1': True,
# 'list2: False,
# ...
# }
# the date this user was registered
# TODO: consider removal - this can be derived from date_registered
date_registered = fields.DateTimeField(auto_now_add=dt.datetime.utcnow,
index=True)
# watched nodes are stored via a list of WatchConfigs
watched = fields.ForeignField('WatchConfig', list=True)
# list of collaborators that this user recently added to nodes as a contributor
recently_added = fields.ForeignField('user', list=True)
# Attached external accounts (OAuth)
external_accounts = fields.ForeignField('externalaccount', list=True)
# CSL names
given_name = fields.StringField()
middle_names = fields.StringField()
family_name = fields.StringField()
suffix = fields.StringField()
# Employment history
jobs = fields.DictionaryField(list=True, validate=validate_history_item)
# Format: {
# 'title': <position or job title>,
# 'institution': <institution or organization>,
# 'department': <department>,
# 'location': <location>,
# 'startMonth': <start month>,
# 'startYear': <start year>,
# 'endMonth': <end month>,
# 'endYear': <end year>,
# 'ongoing: <boolean>
# }
# Educational history
schools = fields.DictionaryField(list=True, validate=validate_history_item)
# Format: {
# 'degree': <position or job title>,
# 'institution': <institution or organization>,
# 'department': <department>,
# 'location': <location>,
# 'startMonth': <start month>,
# 'startYear': <start year>,
# 'endMonth': <end month>,
# 'endYear': <end year>,
# 'ongoing: <boolean>
# }
# Social links
social = fields.DictionaryField(validate=validate_social)
# Format: {
# 'profileWebsites': <list of profile websites>
# 'twitter': <twitter id>,
# }
# hashed password used to authenticate to Piwik
piwik_token = fields.StringField()
# date the user last sent a request
date_last_login = fields.DateTimeField()
# date the user first successfully confirmed an email address
date_confirmed = fields.DateTimeField(index=True)
# When the user was disabled.
date_disabled = fields.DateTimeField(index=True)
# when comments were last viewed
comments_viewed_timestamp = fields.DictionaryField()
# Format: {
# 'Comment.root_target._id': 'timestamp',
# ...
# }
# timezone for user's locale (e.g. 'America/New_York')
timezone = fields.StringField(default='Etc/UTC')
# user language and locale data (e.g. 'en_US')
locale = fields.StringField(default='en_US')
# whether the user has requested to deactivate their account
requested_deactivation = fields.BooleanField(default=False)
_meta = {'optimistic': True}
def __repr__(self):
return '<User({0!r}) with id {1!r}>'.format(self.username, self._id)
def __str__(self):
return self.fullname.encode('ascii', 'replace')
__unicode__ = __str__
# For compatibility with Django auth
@property
def pk(self):
return self._id
@property
def email(self):
return self.username
def is_authenticated(self): # Needed for django compat
return True
def is_anonymous(self):
return False
@property
def absolute_api_v2_url(self):
from website import util
return util.api_v2_url('users/{}/'.format(self.pk))
# used by django and DRF
def get_absolute_url(self):
if not self.is_registered:
return None
return self.absolute_api_v2_url
@classmethod
def create_unregistered(cls, fullname, email=None):
"""Create a new unregistered user.
"""
user = cls(
username=email,
fullname=fullname,
is_invited=True,
is_registered=False,
)
user.update_guessed_names()
return user
@classmethod
def create(cls, username, password, fullname):
user = cls(
username=username,
fullname=fullname,
)
user.update_guessed_names()
user.set_password(password)
return user
@classmethod
def create_unconfirmed(cls, username, password, fullname, do_confirm=True,
campaign=None):
"""Create a new user who has begun registration but needs to verify
their primary email address (username).
"""
user = cls.create(username, password, fullname)
user.add_unconfirmed_email(username)
user.is_registered = False
if campaign:
# needed to prevent cirular import
from framework.auth.campaigns import system_tag_for_campaign # skipci
user.system_tags.append(system_tag_for_campaign(campaign))
return user
@classmethod
def create_confirmed(cls, username, password, fullname):
user = cls.create(username, password, fullname)
user.is_registered = True
user.is_claimed = True
user.date_confirmed = user.date_registered
user.emails.append(username)
return user
@classmethod
def from_cookie(cls, cookie, secret=None):
"""Attempt to load a user from their signed cookie
:returns: None if a user cannot be loaded else User
"""
if not cookie:
return None
secret = secret or settings.SECRET_KEY
try:
token = itsdangerous.Signer(secret).unsign(cookie)
except itsdangerous.BadSignature:
return None
user_session = Session.load(token)
if user_session is None:
return None
return cls.load(user_session.data.get('auth_user_id'))
def get_or_create_cookie(self, secret=None):
"""Find the cookie for the given user
Create a new session if no cookie is found
:param str secret: The key to sign the cookie with
:returns: The signed cookie
"""
secret = secret or settings.SECRET_KEY
sessions = Session.find(
Q('data.auth_user_id', 'eq', self._id)
).sort(
'-date_modified'
).limit(1)
if sessions.count() > 0:
user_session = sessions[0]
else:
user_session = Session(data={
'auth_user_id': self._id,
'auth_user_username': self.username,
'auth_user_fullname': self.fullname,
})
user_session.save()
signer = itsdangerous.Signer(secret)
return signer.sign(user_session._id)
def update_guessed_names(self):
"""Updates the CSL name fields inferred from the the full name.
"""
parsed = utils.impute_names(self.fullname)
self.given_name = parsed['given']
self.middle_names = parsed['middle']
self.family_name = parsed['family']
self.suffix = parsed['suffix']
def register(self, username, password=None):
"""Registers the user.
"""
self.username = username
if password:
self.set_password(password)
if username not in self.emails:
self.emails.append(username)
self.is_registered = True
self.is_claimed = True
self.date_confirmed = dt.datetime.utcnow()
self.update_search()
self.update_search_nodes()
# Emit signal that a user has confirmed
signals.user_confirmed.send(self)
return self
def add_unclaimed_record(self, node, referrer, given_name, email=None):
"""Add a new project entry in the unclaimed records dictionary.
:param Node node: Node this unclaimed user was added to.
:param User referrer: User who referred this user.
:param str given_name: The full name that the referrer gave for this user.
:param str email: The given email address.
:returns: The added record
"""
if not node.can_edit(user=referrer):
raise PermissionsError('Referrer does not have permission to add a contributor '
'to project {0}'.format(node._primary_key))
project_id = node._primary_key
referrer_id = referrer._primary_key
if email:
clean_email = email.lower().strip()
else:
clean_email = None
record = {
'name': given_name,
'referrer_id': referrer_id,
'token': generate_confirm_token(),
'email': clean_email
}
self.unclaimed_records[project_id] = record
return record
def display_full_name(self, node=None):
"""Return the full name , as it would display in a contributor list for a
given node.
NOTE: Unclaimed users may have a different name for different nodes.
"""
if node:
unclaimed_data = self.unclaimed_records.get(node._primary_key, None)
if unclaimed_data:
return unclaimed_data['name']
return self.fullname
@property
def is_active(self):
"""Returns True if the user is active. The user must have activated
their account, must not be deleted, suspended, etc.
:return: bool
"""
return (self.is_registered and
self.password is not None and
not self.is_merged and
not self.is_disabled and
self.is_confirmed)
def get_unclaimed_record(self, project_id):
"""Get an unclaimed record for a given project_id.
:raises: ValueError if there is no record for the given project.
"""
try:
return self.unclaimed_records[project_id]
except KeyError: # reraise as ValueError
raise ValueError('No unclaimed record for user {self._id} on node {project_id}'
.format(**locals()))
def get_claim_url(self, project_id, external=False):
"""Return the URL that an unclaimed user should use to claim their
account. Return ``None`` if there is no unclaimed_record for the given
project ID.
:param project_id: The project ID for the unclaimed record
:raises: ValueError if a record doesn't exist for the given project ID
:rtype: dict
:returns: The unclaimed record for the project
"""
uid = self._primary_key
base_url = settings.DOMAIN if external else '/'
unclaimed_record = self.get_unclaimed_record(project_id)
token = unclaimed_record['token']
return '{base_url}user/{uid}/{project_id}/claim/?token={token}'\
.format(**locals())
def set_password(self, raw_password, notify=True):
"""Set the password for this user to the hash of ``raw_password``.
If this is a new user, we're done. If this is a password change,
then email the user about the change and clear all the old sessions
so that users will have to log in again with the new password.
:param raw_password: the plaintext value of the new password
:param notify: Only meant for unit tests to keep extra notifications from being sent
:rtype: list
:returns: Changed fields from the user save
"""
had_existing_password = bool(self.password)
self.password = generate_password_hash(raw_password)
if had_existing_password and notify:
mails.send_mail(
to_addr=self.username,
mail=mails.PASSWORD_RESET,
mimetype='plain',
user=self
)
remove_sessions_for_user(self)
def check_password(self, raw_password):
"""Return a boolean of whether ``raw_password`` was correct."""
if not self.password or not raw_password:
return False
return check_password_hash(self.password, raw_password)
@property
def csl_given_name(self):
parts = [self.given_name]
if self.middle_names:
parts.extend(each[0] for each in re.split(r'\s+', self.middle_names))
return ' '.join(parts)
@property
def csl_name(self):
return {
'family': self.family_name,
'given': self.csl_given_name,
}
@property
def created(self):
from website.project.model import Node
return Node.find(Q('creator', 'eq', self._id))
# TODO: This should not be on the User object.
def change_password(self, raw_old_password, raw_new_password, raw_confirm_password):
"""Change the password for this user to the hash of ``raw_new_password``."""
raw_old_password = (raw_old_password or '').strip()
raw_new_password = (raw_new_password or '').strip()
raw_confirm_password = (raw_confirm_password or '').strip()
issues = []
if not self.check_password(raw_old_password):
issues.append('Old password is invalid')
elif raw_old_password == raw_new_password:
issues.append('Password cannot be the same')
if not raw_old_password or not raw_new_password or not raw_confirm_password:
issues.append('Passwords cannot be blank')
elif len(raw_new_password) < 6:
issues.append('Password should be at least six characters')
elif len(raw_new_password) > 256:
issues.append('Password should not be longer than 256 characters')
if raw_new_password != raw_confirm_password:
issues.append('Password does not match the confirmation')
if issues:
raise ChangePasswordError(issues)
self.set_password(raw_new_password)
def _set_email_token_expiration(self, token, expiration=None):
"""Set the expiration date for given email token.
:param str token: The email token to set the expiration for.
:param datetime expiration: Datetime at which to expire the token. If ``None``, the
token will expire after ``settings.EMAIL_TOKEN_EXPIRATION`` hours. This is only
used for testing purposes.
"""
expiration = expiration or (dt.datetime.utcnow() + dt.timedelta(hours=settings.EMAIL_TOKEN_EXPIRATION))
self.email_verifications[token]['expiration'] = expiration
return expiration
def add_unconfirmed_email(self, email, expiration=None):
"""Add an email verification token for a given email."""
# TODO: This is technically not compliant with RFC 822, which requires
# that case be preserved in the "local-part" of an address. From
# a practical standpoint, the vast majority of email servers do
# not preserve case.
# ref: https://tools.ietf.org/html/rfc822#section-6
email = email.lower().strip()
if email in self.emails:
raise ValueError('Email already confirmed to this user.')
utils.validate_email(email)
# If the unconfirmed email is already present, refresh the token
if email in self.unconfirmed_emails:
self.remove_unconfirmed_email(email)
token = generate_confirm_token()
# handle when email_verifications is None
if not self.email_verifications:
self.email_verifications = {}
# confirmed used to check if link has been clicked
self.email_verifications[token] = {'email': email,
'confirmed': False}
self._set_email_token_expiration(token, expiration=expiration)
return token
def remove_unconfirmed_email(self, email):
"""Remove an unconfirmed email addresses and their tokens."""
for token, value in self.email_verifications.iteritems():
if value.get('email') == email:
del self.email_verifications[token]
return True
return False
def remove_email(self, email):
"""Remove a confirmed email"""
if email == self.username:
raise PermissionsError("Can't remove primary email")
if email in self.emails:
self.emails.remove(email)
signals.user_email_removed.send(self, email=email)
@signals.user_email_removed.connect
def _send_email_removal_confirmations(self, email):
mails.send_mail(to_addr=self.username,
mail=mails.REMOVED_EMAIL,
user=self,
removed_email=email,
security_addr='alternate email address ({})'.format(email))
mails.send_mail(to_addr=email,
mail=mails.REMOVED_EMAIL,
user=self,
removed_email=email,
security_addr='primary email address ({})'.format(self.username))
def get_confirmation_token(self, email, force=False):
"""Return the confirmation token for a given email.
:param str email: Email to get the token for.
:param bool force: If an expired token exists for the given email, generate a new
token and return that token.
:raises: ExpiredTokenError if trying to access a token that is expired and force=False.
:raises: KeyError if there no token for the email.
"""
# TODO: Refactor "force" flag into User.get_or_add_confirmation_token
for token, info in self.email_verifications.items():
if info['email'].lower() == email.lower():
# Old records will not have an expiration key. If it's missing,
# assume the token is expired
expiration = info.get('expiration')
if not expiration or (expiration and expiration < dt.datetime.utcnow()):
if not force:
raise ExpiredTokenError('Token for email "{0}" is expired'.format(email))
else:
new_token = self.add_unconfirmed_email(email)
self.save()
return new_token
return token
raise KeyError('No confirmation token for email "{0}"'.format(email))
def get_confirmation_url(self, email, external=True, force=False):
"""Return the confirmation url for a given email.
:raises: ExpiredTokenError if trying to access a token that is expired.
:raises: KeyError if there is no token for the email.
"""
base = settings.DOMAIN if external else '/'
token = self.get_confirmation_token(email, force=force)
return '{0}confirm/{1}/{2}/'.format(base, self._primary_key, token)
def get_unconfirmed_email_for_token(self, token):
"""Return email if valid.
:rtype: bool
:raises: ExpiredTokenError if trying to access a token that is expired.
:raises: InvalidTokenError if trying to access a token that is invalid.
"""
if token not in self.email_verifications:
raise InvalidTokenError
verification = self.email_verifications[token]
# Not all tokens are guaranteed to have expiration dates
if (
'expiration' in verification and
verification['expiration'] < dt.datetime.utcnow()
):
raise ExpiredTokenError
return verification['email']
def clean_email_verifications(self, given_token=None):
email_verifications = deepcopy(self.email_verifications or {})
for token in self.email_verifications or {}:
try:
self.get_unconfirmed_email_for_token(token)
except (KeyError, ExpiredTokenError):
email_verifications.pop(token)
continue
if token == given_token:
email_verifications.pop(token)
self.email_verifications = email_verifications
def verify_claim_token(self, token, project_id):
"""Return whether or not a claim token is valid for this user for
a given node which they were added as a unregistered contributor for.
"""
try:
record = self.get_unclaimed_record(project_id)
except ValueError: # No unclaimed record for given pid
return False
return record['token'] == token
def confirm_email(self, token, merge=False):
"""Confirm the email address associated with the token"""
email = self.get_unconfirmed_email_for_token(token)
# If this email is confirmed on another account, abort
try:
user_to_merge = User.find_one(Q('emails', 'iexact', email))
except NoResultsFound:
user_to_merge = None
if user_to_merge and merge:
self.merge_user(user_to_merge)
elif user_to_merge:
raise MergeConfirmedRequiredError(
'Merge requires confirmation',
user=self,
user_to_merge=user_to_merge,
)
# If another user has this email as its username, get it
try:
unregistered_user = User.find_one(Q('username', 'eq', email) &
Q('_id', 'ne', self._id))
except NoResultsFound:
unregistered_user = None
if unregistered_user:
self.merge_user(unregistered_user)
self.save()
unregistered_user.username = None
if email not in self.emails:
self.emails.append(email)
# Complete registration if primary email
if email.lower() == self.username.lower():
self.register(self.username)
self.date_confirmed = dt.datetime.utcnow()
# Revoke token
del self.email_verifications[token]
# TODO: We can't assume that all unclaimed records are now claimed.
# Clear unclaimed records, so user's name shows up correctly on
# all projects
self.unclaimed_records = {}
self.save()
self.update_search_nodes()
return True
@property
def unconfirmed_emails(self):
# Handle when email_verifications field is None
email_verifications = self.email_verifications or {}
return [
each['email']
for each
in email_verifications.values()
]
def update_search_nodes(self):
"""Call `update_search` on all nodes on which the user is a
contributor. Needed to add self to contributor lists in search upon
registration or claiming.
"""
for node in self.contributed:
node.update_search()
def update_search_nodes_contributors(self):
"""
Bulk update contributor name on all nodes on which the user is
a contributor.
:return:
"""
from website.search import search
search.update_contributors(self.visible_contributor_to)
def update_affiliated_institutions_by_email_domain(self):
"""
Append affiliated_institutions by email domain.
:return:
"""
# Avoid circular import
from website.project.model import Institution
try:
email_domains = [email.split('@')[1] for email in self.emails]
insts = Institution.find(Q('email_domains', 'in', email_domains))
for inst in insts:
if inst not in self.affiliated_institutions:
self.affiliated_institutions.append(inst)
except (IndexError, NoResultsFound):
pass
@property
def is_confirmed(self):
return bool(self.date_confirmed)
@property
def social_links(self):
social_user_fields = {}
for key, val in self.social.items():
if val and key in self.SOCIAL_FIELDS:
if not isinstance(val, basestring):
social_user_fields[key] = val
else:
social_user_fields[key] = self.SOCIAL_FIELDS[key].format(val)
return social_user_fields
@property
def biblio_name(self):
given_names = self.given_name + ' ' + self.middle_names
surname = self.family_name
if surname != given_names:
initials = [
name[0].upper() + '.'
for name in given_names.split(' ')
if name and re.search(r'\w', name[0], re.I)
]
return u'{0}, {1}'.format(surname, ' '.join(initials))
return surname
@property
def given_name_initial(self):
"""
The user's preferred initialization of their given name.
Some users with common names may choose to distinguish themselves from
their colleagues in this way. For instance, there could be two
well-known researchers in a single field named "Robert Walker".
"Walker, R" could then refer to either of them. "Walker, R.H." could
provide easy disambiguation.
NOTE: The internal representation for this should never end with a
period. "R" and "R.H" would be correct in the prior case, but
"R.H." would not.
"""
return self.given_name[0]
@property
def url(self):
return '/{}/'.format(self._primary_key)
@property
def api_url(self):
return '/api/v1/profile/{0}/'.format(self._primary_key)
@property
def absolute_url(self):
return urlparse.urljoin(settings.DOMAIN, self.url)
@property
def display_absolute_url(self):
url = self.absolute_url
if url is not None:
return re.sub(r'https?:', '', url).strip('/')
@property
def deep_url(self):
return '/profile/{}/'.format(self._primary_key)
@property
def unconfirmed_email_info(self):
"""Return a list of dictionaries containing information about each of this
user's unconfirmed emails.
"""
unconfirmed_emails = []
email_verifications = self.email_verifications or []
for token in email_verifications:
if self.email_verifications[token].get('confirmed', False):
try:
user_merge = User.find_one(Q('emails', 'eq', self.email_verifications[token]['email'].lower()))
except NoResultsFound:
user_merge = False
unconfirmed_emails.append({'address': self.email_verifications[token]['email'],
'token': token,
'confirmed': self.email_verifications[token]['confirmed'],
'user_merge': user_merge.email if user_merge else False})
return unconfirmed_emails
def profile_image_url(self, size=None):
"""A generalized method for getting a user's profile picture urls.
We may choose to use some service other than gravatar in the future,
and should not commit ourselves to using a specific service (mostly
an API concern).
As long as we use gravatar, this is just a proxy to User.gravatar_url
"""
return self._gravatar_url(size)
def _gravatar_url(self, size):
return filters.gravatar(
self,
use_ssl=True,
size=size
)
def get_activity_points(self, db=None):
db = db or framework.mongo.database
return analytics.get_total_activity_count(self._primary_key, db=db)
def disable_account(self):
"""
Disables user account, making is_disabled true, while also unsubscribing user
from mailchimp emails.
"""
from website import mailchimp_utils
try:
mailchimp_utils.unsubscribe_mailchimp(
list_name=settings.MAILCHIMP_GENERAL_LIST,
user_id=self._id,
username=self.username
)
except mailchimp_utils.mailchimp.ListNotSubscribedError:
pass
except mailchimp_utils.mailchimp.InvalidApiKeyError:
if not settings.ENABLE_EMAIL_SUBSCRIPTIONS:
pass
else:
raise
self.is_disabled = True
@property
def is_disabled(self):
"""Whether or not this account has been disabled.
Abstracts ``User.date_disabled``.
:return: bool
"""
return self.date_disabled is not None
@is_disabled.setter
def is_disabled(self, val):
"""Set whether or not this account has been disabled."""
if val and not self.date_disabled:
self.date_disabled = dt.datetime.utcnow()
elif val is False:
self.date_disabled = None
@property
def is_merged(self):
'''Whether or not this account has been merged into another account.
'''
return self.merged_by is not None
@property
def profile_url(self):
return '/{}/'.format(self._id)
@property
def contributed(self):
from website.project.model import Node
return Node.find(Q('contributors', 'eq', self._id))
@property
def contributor_to(self):
from website.project.model import Node
return Node.find(
Q('contributors', 'eq', self._id) &
Q('is_deleted', 'ne', True) &
Q('is_collection', 'ne', True)
)
@property
def visible_contributor_to(self):
from website.project.model import Node
return Node.find(
Q('contributors', 'eq', self._id) &
Q('is_deleted', 'ne', True) &
Q('is_collection', 'ne', True) &
Q('visible_contributor_ids', 'eq', self._id)
)
def get_summary(self, formatter='long'):
return {
'user_fullname': self.fullname,
'user_profile_url': self.profile_url,
'user_display_name': name_formatters[formatter](self),
'user_is_claimed': self.is_claimed
}
def save(self, *args, **kwargs):
# TODO: Update mailchimp subscription on username change
# Avoid circular import
from framework.analytics import tasks as piwik_tasks
self.username = self.username.lower().strip() if self.username else None
ret = super(User, self).save(*args, **kwargs)
if self.SEARCH_UPDATE_FIELDS.intersection(ret) and self.is_confirmed:
self.update_search()
self.update_search_nodes_contributors()
if settings.PIWIK_HOST and not self.piwik_token:
piwik_tasks.update_user(self._id)
return ret
def update_search(self):
from website import search
try:
search.search.update_user(self)
except search.exceptions.SearchUnavailableError as e:
logger.exception(e)
log_exception()
@classmethod
def find_by_email(cls, email):
try:
user = cls.find_one(
Q('emails', 'eq', email)
)
return [user]
except:
return []
def serialize(self, anonymous=False):
return {
'id': utils.privacy_info_handle(self._primary_key, anonymous),
'fullname': utils.privacy_info_handle(self.fullname, anonymous, name=True),
'registered': self.is_registered,
'url': utils.privacy_info_handle(self.url, anonymous),
'api_url': utils.privacy_info_handle(self.api_url, anonymous),
}
###### OSF-Specific methods ######
def watch(self, watch_config):
"""Watch a node by adding its WatchConfig to this user's ``watched``
list. Raises ``ValueError`` if the node is already watched.
:param watch_config: The WatchConfig to add.
:param save: Whether to save the user.
"""
watched_nodes = [each.node for each in self.watched]
if watch_config.node in watched_nodes:
raise ValueError('Node is already being watched.')
watch_config.save()
self.watched.append(watch_config)
return None
def unwatch(self, watch_config):
"""Unwatch a node by removing its WatchConfig from this user's ``watched``
list. Raises ``ValueError`` if the node is not already being watched.
:param watch_config: The WatchConfig to remove.
:param save: Whether to save the user.
"""
for each in self.watched:
if watch_config.node._id == each.node._id:
from framework.transactions.context import TokuTransaction # Avoid circular import
with TokuTransaction():
# Ensure that both sides of the relationship are removed
each.__class__.remove_one(each)
self.watched.remove(each)
self.save()
return None
raise ValueError('Node not being watched.')
def is_watching(self, node):
'''Return whether a not a user is watching a Node.'''
watched_node_ids = set([config.node._id for config in self.watched])
return node._id in watched_node_ids
def get_recent_log_ids(self, since=None):
'''Return a generator of recent logs' ids.
:param since: A datetime specifying the oldest time to retrieve logs
from. If ``None``, defaults to 60 days before today. Must be a tz-aware
datetime because PyMongo's generation times are tz-aware.
:rtype: generator of log ids (strings)
'''
log_ids = []
# Default since to 60 days before today if since is None
# timezone aware utcnow
utcnow = dt.datetime.utcnow().replace(tzinfo=pytz.utc)
since_date = since or (utcnow - dt.timedelta(days=60))
for config in self.watched:
# Extract the timestamps for each log from the log_id (fast!)
# The first 4 bytes of Mongo's ObjectId encodes time
# This prevents having to load each Log Object and access their
# date fields
node_log_ids = [log.pk for log in config.node.logs
if bson.ObjectId(log.pk).generation_time > since_date and
log.pk not in log_ids]
# Log ids in reverse chronological order
log_ids = _merge_into_reversed(log_ids, node_log_ids)
return (l_id for l_id in log_ids)
def get_daily_digest_log_ids(self):
'''Return a generator of log ids generated in the past day
(starting at UTC 00:00).
'''
utcnow = dt.datetime.utcnow()
midnight = dt.datetime(
utcnow.year, utcnow.month, utcnow.day,
0, 0, 0, tzinfo=pytz.utc
)
return self.get_recent_log_ids(since=midnight)
@property
def can_be_merged(self):
"""The ability of the `merge_user` method to fully merge the user"""
return all((addon.can_be_merged for addon in self.get_addons()))
def merge_user(self, user):
"""Merge a registered user into this account. This user will be
a contributor on any project. if the registered user and this account
are both contributors of the same project. Then it will remove the
registered user and set this account to the highest permission of the two
and set this account to be visible if either of the two are visible on
the project.
:param user: A User object to be merged.
"""
# Fail if the other user has conflicts.
if not user.can_be_merged:
raise MergeConflictError('Users cannot be merged')
# Move over the other user's attributes
# TODO: confirm
for system_tag in user.system_tags:
if system_tag not in self.system_tags:
self.system_tags.append(system_tag)
self.is_claimed = self.is_claimed or user.is_claimed
self.is_invited = self.is_invited or user.is_invited
# copy over profile only if this user has no profile info
if user.jobs and not self.jobs:
self.jobs = user.jobs
if user.schools and not self.schools:
self.schools = user.schools
if user.social and not self.social:
self.social = user.social
unclaimed = user.unclaimed_records.copy()
unclaimed.update(self.unclaimed_records)
self.unclaimed_records = unclaimed
# - unclaimed records should be connected to only one user
user.unclaimed_records = {}
security_messages = user.security_messages.copy()
security_messages.update(self.security_messages)
self.security_messages = security_messages
for key, value in user.mailchimp_mailing_lists.iteritems():
# subscribe to each list if either user was subscribed
subscription = value or self.mailchimp_mailing_lists.get(key)
signals.user_merged.send(self, list_name=key, subscription=subscription)
# clear subscriptions for merged user
signals.user_merged.send(user, list_name=key, subscription=False, send_goodbye=False)
for target_id, timestamp in user.comments_viewed_timestamp.iteritems():
if not self.comments_viewed_timestamp.get(target_id):
self.comments_viewed_timestamp[target_id] = timestamp
elif timestamp > self.comments_viewed_timestamp[target_id]:
self.comments_viewed_timestamp[target_id] = timestamp
self.emails.extend(user.emails)
user.emails = []
for k, v in user.email_verifications.iteritems():
email_to_confirm = v['email']
if k not in self.email_verifications and email_to_confirm != user.username:
self.email_verifications[k] = v
user.email_verifications = {}
for institution in user.affiliated_institutions:
self.affiliated_institutions.append(institution)
user._affiliated_institutions = []
# FOREIGN FIELDS
for watched in user.watched:
if watched not in self.watched:
self.watched.append(watched)
user.watched = []
for account in user.external_accounts:
if account not in self.external_accounts:
self.external_accounts.append(account)
user.external_accounts = []
# - addons
# Note: This must occur before the merged user is removed as a
# contributor on the nodes, as an event hook is otherwise fired
# which removes the credentials.
for addon in user.get_addons():
user_settings = self.get_or_add_addon(addon.config.short_name)
user_settings.merge(addon)
user_settings.save()
# Disconnect signal to prevent emails being sent about being a new contributor when merging users
# be sure to reconnect it at the end of this code block. Import done here to prevent circular import error.
from website.addons.osfstorage.listeners import checkin_files_by_user
from website.project.signals import contributor_added, contributor_removed
from website.project.views.contributor import notify_added_contributor
from website.util import disconnected_from
# - projects where the user was a contributor
with disconnected_from(signal=contributor_added, listener=notify_added_contributor):
for node in user.contributed:
# Skip bookmark collection node
if node.is_bookmark_collection:
continue
# if both accounts are contributor of the same project
if node.is_contributor(self) and node.is_contributor(user):
if node.permissions[user._id] > node.permissions[self._id]:
permissions = node.permissions[user._id]
else:
permissions = node.permissions[self._id]
node.set_permissions(user=self, permissions=permissions)
visible1 = self._id in node.visible_contributor_ids
visible2 = user._id in node.visible_contributor_ids
if visible1 != visible2:
node.set_visible(user=self, visible=True, log=True, auth=Auth(user=self))
else:
node.add_contributor(
contributor=self,
permissions=node.get_permissions(user),
visible=node.get_visible(user),
log=False,
)
with disconnected_from(signal=contributor_removed, listener=checkin_files_by_user):
try:
node.remove_contributor(
contributor=user,
auth=Auth(user=self),
log=False,
)
except ValueError:
logger.error('Contributor {0} not in list on node {1}'.format(
user._id, node._id
))
node.save()
# - projects where the user was the creator
for node in user.created:
node.creator = self
node.save()
# - file that the user has checked_out, import done here to prevent import error
from website.files.models.base import FileNode
for file_node in FileNode.files_checked_out(user=user):
file_node.checkout = self
file_node.save()
# finalize the merge
remove_sessions_for_user(user)
# - username is set to None so the resultant user can set it primary
# in the future.
user.username = None
user.password = None
user.verification_key = None
user.osf_mailing_lists = {}
user.merged_by = self
user.save()
def get_projects_in_common(self, other_user, primary_keys=True):
"""Returns either a collection of "shared projects" (projects that both users are contributors for)
or just their primary keys
"""
if primary_keys:
projects_contributed_to = set(self.contributed.get_keys())
other_projects_primary_keys = set(other_user.contributed.get_keys())
return projects_contributed_to.intersection(other_projects_primary_keys)
else:
projects_contributed_to = set(self.contributed)
return projects_contributed_to.intersection(other_user.contributed)
def n_projects_in_common(self, other_user):
"""Returns number of "shared projects" (projects that both users are contributors for)"""
return len(self.get_projects_in_common(other_user, primary_keys=True))
def is_affiliated_with_institution(self, inst):
return inst in self.affiliated_institutions
def remove_institution(self, inst_id):
removed = False
for inst in self.affiliated_institutions:
if inst._id == inst_id:
self.affiliated_institutions.remove(inst)
removed = True
return removed
_affiliated_institutions = fields.ForeignField('node', list=True)
@property
def affiliated_institutions(self):
from website.institutions.model import Institution, AffiliatedInstitutionsList
return AffiliatedInstitutionsList([Institution(inst) for inst in self._affiliated_institutions], obj=self, private_target='_affiliated_institutions')
def get_node_comment_timestamps(self, target_id):
""" Returns the timestamp for when comments were last viewed on a node, file or wiki.
"""
default_timestamp = dt.datetime(1970, 1, 1, 12, 0, 0)
return self.comments_viewed_timestamp.get(target_id, default_timestamp)
def _merge_into_reversed(*iterables):
'''Merge multiple sorted inputs into a single output in reverse order.
'''
return sorted(itertools.chain(*iterables), reverse=True)
|
|
"""
sentry.templatetags.sentry_helpers
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2013 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
# XXX: Import django-paging's template tags so we don't have to worry about
# INSTALLED_APPS
import datetime
import os.path
from collections import namedtuple
from paging.helpers import paginate as paginate_func
from pkg_resources import parse_version as Version
from urllib import quote
from django import template
from django.template import RequestContext
from django.template.defaultfilters import stringfilter
from django.template.loader import render_to_string
from django.utils.html import escape
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext as _
from sentry.constants import STATUS_MUTED, EVENTS_PER_PAGE, MEMBER_OWNER
from sentry.models import Team, Group, Option
from sentry.web.helpers import group_is_public
from sentry.utils import to_unicode
from sentry.utils.avatar import get_gravatar_url
from sentry.utils.http import absolute_uri
from sentry.utils.javascript import to_json
from sentry.utils.safe import safe_execute
from sentry.utils.strings import truncatechars
from templatetag_sugar.register import tag
from templatetag_sugar.parser import Name, Variable, Constant, Optional
SentryVersion = namedtuple('SentryVersion', ['current', 'latest',
'update_available'])
register = template.Library()
truncatechars = register.filter(stringfilter(truncatechars))
truncatechars.is_safe = True
register.filter(to_json)
register.simple_tag(absolute_uri)
@register.filter
def pprint(value, break_after=10):
"""
break_after is used to define how often a <span> is
inserted (for soft wrapping).
"""
value = to_unicode(value)
return mark_safe(u'<span></span>'.join(
[escape(value[i:(i + break_after)]) for i in xrange(0, len(value), break_after)]
))
@register.filter
def is_url(value):
if not isinstance(value, basestring):
return False
if not value.startswith(('http://', 'https://')):
return False
if ' ' in value:
return False
return True
# seriously Django?
@register.filter
def subtract(value, amount):
return int(value) - int(amount)
@register.filter
def has_charts(group):
from sentry.utils.db import has_charts
if hasattr(group, '_state'):
db = group._state.db or 'default'
else:
db = 'default'
return has_charts(db)
@register.filter
def as_sorted(value):
return sorted(value)
@register.filter
def small_count(v):
z = [
(1000000000, _('b')),
(1000000, _('m')),
(1000, _('k')),
]
v = int(v)
for x, y in z:
o, p = divmod(v, x)
if o:
if len(str(o)) > 2 or not p:
return '%d%s' % (o, y)
return '%.1f%s' % (v / float(x), y)
return v
@register.filter
def num_digits(value):
return len(str(value))
@register.filter
def to_str(data):
return str(data)
@register.filter
def is_none(value):
return value is None
@register.simple_tag(takes_context=True)
def get_sentry_version(context):
import sentry
current = sentry.get_version()
latest = Option.objects.get_value('sentry:latest_version', current)
update_available = Version(latest) > Version(current)
context['sentry_version'] = SentryVersion(
current, latest, update_available
)
return ''
@register.filter
def timesince(value, now=None):
from django.template.defaultfilters import timesince
from django.utils import timezone
if now is None:
now = timezone.now()
if not value:
return _('never')
if value < (now - datetime.timedelta(days=5)):
return value.date()
value = (' '.join(timesince(value, now).split(' ')[0:2])).strip(',')
if value == _('0 minutes'):
return _('just now')
if value == _('1 day'):
return _('yesterday')
return value + _(' ago')
@register.filter
def duration(value):
if not value:
return '0s'
hours, minutes, seconds = 0, 0, 0
if value > 3600:
hours = value / 3600
value = value % 3600
if value > 60:
minutes = value / 60
value = value % 60
seconds = value
output = []
if hours:
output.append('%dh' % hours)
if minutes:
output.append('%dm' % minutes)
if seconds > 1:
output.append('%0.2fs' % seconds)
elif seconds:
output.append('%dms' % (seconds * 1000))
return ''.join(output)
# XXX: this is taken from django-paging so that we may render
# a custom template, and not worry about INSTALLED_APPS
@tag(register, [Variable('queryset_or_list'),
Constant('from'), Variable('request'),
Optional([Constant('as'), Name('asvar')]),
Optional([Constant('per_page'), Variable('per_page')])])
def paginate(context, queryset_or_list, request, asvar=None, per_page=EVENTS_PER_PAGE):
"""{% paginate queryset_or_list from request as foo[ per_page 25] %}"""
result = paginate_func(request, queryset_or_list, per_page, endless=True)
context_instance = RequestContext(request)
paging = mark_safe(render_to_string('sentry/partial/_pager.html', result, context_instance))
result = dict(objects=result['paginator'].get('objects', []), paging=paging)
if asvar:
context[asvar] = result
return ''
return result
@tag(register, [Variable('queryset_or_list'),
Constant('from'), Variable('request'),
Optional([Constant('as'), Name('asvar')]),
Optional([Constant('per_page'), Variable('per_page')])])
def paginator(context, queryset_or_list, request, asvar=None, per_page=EVENTS_PER_PAGE):
"""{% paginator queryset_or_list from request as foo[ per_page 25] %}"""
result = paginate_func(request, queryset_or_list, per_page, endless=True)
if asvar:
context[asvar] = result
return ''
return result
@tag(register, [Constant('from'), Variable('request'),
Optional([Constant('without'), Name('withoutvar')]),
Optional([Constant('as'), Name('asvar')])])
def querystring(context, request, withoutvar, asvar=None):
params = request.GET.copy()
if withoutvar in params:
del params[withoutvar]
result = params.urlencode()
if asvar:
context[asvar] = result
return ''
return result
@register.inclusion_tag('sentry/partial/_form.html')
def render_form(form):
return {'form': form}
@register.filter
def as_bookmarks(group_list, user):
group_list = list(group_list)
if user.is_authenticated() and group_list:
project = group_list[0].project
bookmarks = set(project.bookmark_set.filter(
user=user,
group__in=group_list,
).values_list('group_id', flat=True))
else:
bookmarks = set()
for g in group_list:
yield g, g.pk in bookmarks
@register.filter
def is_bookmarked(group, user):
if user.is_authenticated():
return group.bookmark_set.filter(
user=user,
group=group,
).exists()
return False
@register.filter
def date(datetime, arg=None):
from django.template.defaultfilters import date
from django.utils import timezone
if not timezone.is_aware(datetime):
datetime = datetime.replace(tzinfo=timezone.utc)
return date(datetime, arg)
@tag(register, [Constant('for'), Variable('user'),
Constant('from'), Variable('project'),
Constant('as'), Name('asvar')])
def get_project_dsn(context, user, project, asvar):
from sentry.models import ProjectKey
if not user.is_authenticated():
context[asvar] = None
return ''
try:
key = ProjectKey.objects.filter(user=None, project=project)[0]
except ProjectKey.DoesNotExist:
try:
key = ProjectKey.objects.get(user=user, project=project)
except IndexError:
context[asvar] = None
else:
context[asvar] = key.get_dsn()
else:
context[asvar] = key.get_dsn()
return ''
# Adapted from http://en.gravatar.com/site/implement/images/django/
# The "mm" default is for the grey, "mystery man" icon. See:
# http://en.gravatar.com/site/implement/images/
@tag(register, [Variable('email'),
Optional([Constant('size'), Variable('size')]),
Optional([Constant('default'), Variable('default')])])
def gravatar_url(context, email, size=None, default='mm'):
return get_gravatar_url(email, size, default)
@register.filter
def trim_schema(value):
return value.split('//', 1)[-1]
@register.filter
def with_metadata(group_list, request):
group_list = list(group_list)
if request.user.is_authenticated() and group_list:
project = group_list[0].project
bookmarks = set(project.bookmark_set.filter(
user=request.user,
group__in=group_list,
).values_list('group_id', flat=True))
else:
bookmarks = set()
if group_list:
historical_data = Group.objects.get_chart_data_for_group(
instances=group_list,
max_days=1,
key='group',
)
else:
historical_data = {}
for g in group_list:
yield g, {
'is_bookmarked': g.pk in bookmarks,
'historical_data': ','.join(str(x[1]) for x in historical_data.get(g.id, [])),
}
@register.inclusion_tag('sentry/plugins/bases/tag/widget.html')
def render_tag_widget(group, tag):
return {
'title': tag.replace('_', ' ').title(),
'tag_name': tag,
'unique_tags': list(group.get_unique_tags(tag)[:10]),
'group': group,
}
@register.filter
def titlize(value):
return value.replace('_', ' ').title()
@register.filter
def is_muted(value):
return value == STATUS_MUTED
@register.filter
def split(value, delim=''):
return value.split(delim)
@register.filter
def get_rendered_interfaces(event, request):
interface_list = []
is_public = group_is_public(event.group, request.user)
for interface in event.interfaces.itervalues():
html = safe_execute(interface.to_html, event, is_public=is_public)
if not html:
continue
interface_list.append((interface, mark_safe(html)))
return sorted(interface_list, key=lambda x: x[0].get_display_score(), reverse=True)
@register.inclusion_tag('sentry/partial/github_button.html')
def github_button(user, repo):
return {
'user': user,
'repo': repo,
}
@register.inclusion_tag('sentry/partial/data_values.html')
def render_values(value, threshold=5, collapse_to=3):
if isinstance(value, (list, tuple)):
value = dict(enumerate(value))
is_list, is_dict = True, True
else:
is_list, is_dict = False, isinstance(value, dict)
context = {
'is_dict': is_dict,
'is_list': is_list,
'threshold': threshold,
'collapse_to': collapse_to,
}
if is_dict:
value = sorted(value.iteritems())
value_len = len(value)
over_threshold = value_len > threshold
if over_threshold:
context.update({
'over_threshold': over_threshold,
'hidden_values': value_len - collapse_to,
'value_before_expand': value[:collapse_to],
'value_after_expand': value[collapse_to:],
})
else:
context.update({
'over_threshold': over_threshold,
'hidden_values': 0,
'value_before_expand': value,
'value_after_expand': [],
})
else:
context['value'] = value
return context
@register.inclusion_tag('sentry/partial/_client_config.html')
def client_help(user, project):
from sentry.web.frontend.docs import get_key_context
context = get_key_context(user, project)
context['project'] = project
return context
@tag(register, [Constant('from'), Variable('project'),
Constant('as'), Name('asvar')])
def recent_alerts(context, project, asvar):
from sentry.models import Alert
context[asvar] = list(Alert.get_recent_for_project(project.id))
return ''
@register.filter
def reorder_teams(team_list, team):
pending = []
for t, p_list in team_list:
if t == team:
pending.insert(0, (t, p_list))
else:
pending.append((t, p_list))
return pending
@register.filter
def urlquote(value, safe=''):
return quote(value.encode('utf8'), safe)
@register.filter
def basename(value):
return os.path.basename(value)
@register.filter
def can_admin_team(user, team):
if user.is_superuser:
return True
if team.owner == user:
return True
if team in Team.objects.get_for_user(user, access=MEMBER_OWNER):
return True
return False
|
|
from __future__ import division, absolute_import, print_function
import collections
import pickle
import sys
from os import path
import numpy as np
from numpy.compat import asbytes
from numpy.testing import (
TestCase, run_module_suite, assert_, assert_equal, assert_array_equal,
assert_array_almost_equal, assert_raises, assert_warns
)
class TestFromrecords(TestCase):
def test_fromrecords(self):
r = np.rec.fromrecords([[456, 'dbe', 1.2], [2, 'de', 1.3]],
names='col1,col2,col3')
assert_equal(r[0].item(), (456, 'dbe', 1.2))
assert_equal(r['col1'].dtype.kind, 'i')
if sys.version_info[0] >= 3:
assert_equal(r['col2'].dtype.kind, 'U')
assert_equal(r['col2'].dtype.itemsize, 12)
else:
assert_equal(r['col2'].dtype.kind, 'S')
assert_equal(r['col2'].dtype.itemsize, 3)
assert_equal(r['col3'].dtype.kind, 'f')
def test_method_array(self):
r = np.rec.array(asbytes('abcdefg') * 100, formats='i2,a3,i4', shape=3, byteorder='big')
assert_equal(r[1].item(), (25444, asbytes('efg'), 1633837924))
def test_method_array2(self):
r = np.rec.array([(1, 11, 'a'), (2, 22, 'b'), (3, 33, 'c'), (4, 44, 'd'), (5, 55, 'ex'),
(6, 66, 'f'), (7, 77, 'g')], formats='u1,f4,a1')
assert_equal(r[1].item(), (2, 22.0, asbytes('b')))
def test_recarray_slices(self):
r = np.rec.array([(1, 11, 'a'), (2, 22, 'b'), (3, 33, 'c'), (4, 44, 'd'), (5, 55, 'ex'),
(6, 66, 'f'), (7, 77, 'g')], formats='u1,f4,a1')
assert_equal(r[1::2][1].item(), (4, 44.0, asbytes('d')))
def test_recarray_fromarrays(self):
x1 = np.array([1, 2, 3, 4])
x2 = np.array(['a', 'dd', 'xyz', '12'])
x3 = np.array([1.1, 2, 3, 4])
r = np.rec.fromarrays([x1, x2, x3], names='a,b,c')
assert_equal(r[1].item(), (2, 'dd', 2.0))
x1[1] = 34
assert_equal(r.a, np.array([1, 2, 3, 4]))
def test_recarray_fromfile(self):
data_dir = path.join(path.dirname(__file__), 'data')
filename = path.join(data_dir, 'recarray_from_file.fits')
fd = open(filename, 'rb')
fd.seek(2880 * 2)
r1 = np.rec.fromfile(fd, formats='f8,i4,a5', shape=3, byteorder='big')
fd.seek(2880 * 2)
r2 = np.rec.array(fd, formats='f8,i4,a5', shape=3, byteorder='big')
fd.close()
assert_equal(r1, r2)
def test_recarray_from_obj(self):
count = 10
a = np.zeros(count, dtype='O')
b = np.zeros(count, dtype='f8')
c = np.zeros(count, dtype='f8')
for i in range(len(a)):
a[i] = list(range(1, 10))
mine = np.rec.fromarrays([a, b, c], names='date,data1,data2')
for i in range(len(a)):
assert_((mine.date[i] == list(range(1, 10))))
assert_((mine.data1[i] == 0.0))
assert_((mine.data2[i] == 0.0))
def test_recarray_from_repr(self):
a = np.array([(1, 'ABC'), (2, "DEF")],
dtype=[('foo', int), ('bar', 'S4')])
recordarr = np.rec.array(a)
recarr = a.view(np.recarray)
recordview = a.view(np.dtype((np.record, a.dtype)))
recordarr_r = eval("numpy." + repr(recordarr), {'numpy': np})
recarr_r = eval("numpy." + repr(recarr), {'numpy': np})
recordview_r = eval("numpy." + repr(recordview), {'numpy': np})
assert_equal(type(recordarr_r), np.recarray)
assert_equal(recordarr_r.dtype.type, np.record)
assert_equal(recordarr, recordarr_r)
assert_equal(type(recarr_r), np.recarray)
assert_equal(recarr_r.dtype.type, np.record)
assert_equal(recarr, recarr_r)
assert_equal(type(recordview_r), np.ndarray)
assert_equal(recordview.dtype.type, np.record)
assert_equal(recordview, recordview_r)
def test_recarray_views(self):
a = np.array([(1, 'ABC'), (2, "DEF")],
dtype=[('foo', int), ('bar', 'S4')])
b = np.array([1, 2, 3, 4, 5], dtype=np.int64)
# check that np.rec.array gives right dtypes
assert_equal(np.rec.array(a).dtype.type, np.record)
assert_equal(type(np.rec.array(a)), np.recarray)
assert_equal(np.rec.array(b).dtype.type, np.int64)
assert_equal(type(np.rec.array(b)), np.recarray)
# check that viewing as recarray does the same
assert_equal(a.view(np.recarray).dtype.type, np.record)
assert_equal(type(a.view(np.recarray)), np.recarray)
assert_equal(b.view(np.recarray).dtype.type, np.int64)
assert_equal(type(b.view(np.recarray)), np.recarray)
# check that view to non-structured dtype preserves type=np.recarray
r = np.rec.array(np.ones(4, dtype="f4,i4"))
rv = r.view('f8').view('f4,i4')
assert_equal(type(rv), np.recarray)
assert_equal(rv.dtype.type, np.record)
# check that getitem also preserves np.recarray and np.record
r = np.rec.array(np.ones(4, dtype=[('a', 'i4'), ('b', 'i4'),
('c', 'i4,i4')]))
assert_equal(r['c'].dtype.type, np.record)
assert_equal(type(r['c']), np.recarray)
# suppress deprecation warning in 1.12 (remove in 1.13)
with assert_warns(FutureWarning):
assert_equal(r[['a', 'b']].dtype.type, np.record)
assert_equal(type(r[['a', 'b']]), np.recarray)
# and that it preserves subclasses (gh-6949)
class C(np.recarray):
pass
c = r.view(C)
assert_equal(type(c['c']), C)
# check that accessing nested structures keep record type, but
# not for subarrays, non-void structures, non-structured voids
test_dtype = [('a', 'f4,f4'), ('b', 'V8'), ('c', ('f4', 2)),
('d', ('i8', 'i4,i4'))]
r = np.rec.array([((1, 1), b'11111111', [1, 1], 1),
((1, 1), b'11111111', [1, 1], 1)], dtype=test_dtype)
assert_equal(r.a.dtype.type, np.record)
assert_equal(r.b.dtype.type, np.void)
assert_equal(r.c.dtype.type, np.float32)
assert_equal(r.d.dtype.type, np.int64)
# check the same, but for views
r = np.rec.array(np.ones(4, dtype='i4,i4'))
assert_equal(r.view('f4,f4').dtype.type, np.record)
assert_equal(r.view(('i4', 2)).dtype.type, np.int32)
assert_equal(r.view('V8').dtype.type, np.void)
assert_equal(r.view(('i8', 'i4,i4')).dtype.type, np.int64)
# check that we can undo the view
arrs = [np.ones(4, dtype='f4,i4'), np.ones(4, dtype='f8')]
for arr in arrs:
rec = np.rec.array(arr)
# recommended way to view as an ndarray:
arr2 = rec.view(rec.dtype.fields or rec.dtype, np.ndarray)
assert_equal(arr2.dtype.type, arr.dtype.type)
assert_equal(type(arr2), type(arr))
def test_recarray_repr(self):
# make sure non-structured dtypes also show up as rec.array
a = np.array(np.ones(4, dtype='f8'))
assert_(repr(np.rec.array(a)).startswith('rec.array'))
# check that the 'np.record' part of the dtype isn't shown
a = np.rec.array(np.ones(3, dtype='i4,i4'))
assert_equal(repr(a).find('numpy.record'), -1)
a = np.rec.array(np.ones(3, dtype='i4'))
assert_(repr(a).find('dtype=int32') != -1)
def test_recarray_from_names(self):
ra = np.rec.array([
(1, 'abc', 3.7000002861022949, 0),
(2, 'xy', 6.6999998092651367, 1),
(0, ' ', 0.40000000596046448, 0)],
names='c1, c2, c3, c4')
pa = np.rec.fromrecords([
(1, 'abc', 3.7000002861022949, 0),
(2, 'xy', 6.6999998092651367, 1),
(0, ' ', 0.40000000596046448, 0)],
names='c1, c2, c3, c4')
assert_(ra.dtype == pa.dtype)
assert_(ra.shape == pa.shape)
for k in range(len(ra)):
assert_(ra[k].item() == pa[k].item())
def test_recarray_conflict_fields(self):
ra = np.rec.array([(1, 'abc', 2.3), (2, 'xyz', 4.2),
(3, 'wrs', 1.3)],
names='field, shape, mean')
ra.mean = [1.1, 2.2, 3.3]
assert_array_almost_equal(ra['mean'], [1.1, 2.2, 3.3])
assert_(type(ra.mean) is type(ra.var))
ra.shape = (1, 3)
assert_(ra.shape == (1, 3))
ra.shape = ['A', 'B', 'C']
assert_array_equal(ra['shape'], [['A', 'B', 'C']])
ra.field = 5
assert_array_equal(ra['field'], [[5, 5, 5]])
assert_(isinstance(ra.field, collections.Callable))
def test_fromrecords_with_explicit_dtype(self):
a = np.rec.fromrecords([(1, 'a'), (2, 'bbb')],
dtype=[('a', int), ('b', np.object)])
assert_equal(a.a, [1, 2])
assert_equal(a[0].a, 1)
assert_equal(a.b, ['a', 'bbb'])
assert_equal(a[-1].b, 'bbb')
#
ndtype = np.dtype([('a', int), ('b', np.object)])
a = np.rec.fromrecords([(1, 'a'), (2, 'bbb')], dtype=ndtype)
assert_equal(a.a, [1, 2])
assert_equal(a[0].a, 1)
assert_equal(a.b, ['a', 'bbb'])
assert_equal(a[-1].b, 'bbb')
def test_recarray_stringtypes(self):
# Issue #3993
a = np.array([('abc ', 1), ('abc', 2)],
dtype=[('foo', 'S4'), ('bar', int)])
a = a.view(np.recarray)
assert_equal(a.foo[0] == a.foo[1], False)
def test_recarray_returntypes(self):
qux_fields = {'C': (np.dtype('S5'), 0), 'D': (np.dtype('S5'), 6)}
a = np.rec.array([('abc ', (1, 1), 1, ('abcde', 'fgehi')),
('abc', (2, 3), 1, ('abcde', 'jklmn'))],
dtype=[('foo', 'S4'),
('bar', [('A', int), ('B', int)]),
('baz', int), ('qux', qux_fields)])
assert_equal(type(a.foo), np.ndarray)
assert_equal(type(a['foo']), np.ndarray)
assert_equal(type(a.bar), np.recarray)
assert_equal(type(a['bar']), np.recarray)
assert_equal(a.bar.dtype.type, np.record)
assert_equal(type(a['qux']), np.recarray)
assert_equal(a.qux.dtype.type, np.record)
assert_equal(dict(a.qux.dtype.fields), qux_fields)
assert_equal(type(a.baz), np.ndarray)
assert_equal(type(a['baz']), np.ndarray)
assert_equal(type(a[0].bar), np.record)
assert_equal(type(a[0]['bar']), np.record)
assert_equal(a[0].bar.A, 1)
assert_equal(a[0].bar['A'], 1)
assert_equal(a[0]['bar'].A, 1)
assert_equal(a[0]['bar']['A'], 1)
assert_equal(a[0].qux.D, asbytes('fgehi'))
assert_equal(a[0].qux['D'], asbytes('fgehi'))
assert_equal(a[0]['qux'].D, asbytes('fgehi'))
assert_equal(a[0]['qux']['D'], asbytes('fgehi'))
def test_zero_width_strings(self):
# Test for #6430, based on the test case from #1901
cols = [['test'] * 3, [''] * 3]
rec = np.rec.fromarrays(cols)
assert_equal(rec['f0'], ['test', 'test', 'test'])
assert_equal(rec['f1'], ['', '', ''])
dt = np.dtype([('f0', '|S4'), ('f1', '|S')])
rec = np.rec.fromarrays(cols, dtype=dt)
assert_equal(rec.itemsize, 4)
assert_equal(rec['f0'], [b'test', b'test', b'test'])
assert_equal(rec['f1'], [b'', b'', b''])
class TestRecord(TestCase):
def setUp(self):
self.data = np.rec.fromrecords([(1, 2, 3), (4, 5, 6)],
dtype=[("col1", "<i4"),
("col2", "<i4"),
("col3", "<i4")])
def test_assignment1(self):
a = self.data
assert_equal(a.col1[0], 1)
a[0].col1 = 0
assert_equal(a.col1[0], 0)
def test_assignment2(self):
a = self.data
assert_equal(a.col1[0], 1)
a.col1[0] = 0
assert_equal(a.col1[0], 0)
def test_invalid_assignment(self):
a = self.data
def assign_invalid_column(x):
x[0].col5 = 1
self.assertRaises(AttributeError, assign_invalid_column, a)
def test_nonwriteable_setfield(self):
# gh-8171
r = np.rec.array([(0,), (1,)], dtype=[('f', 'i4')])
r.flags.writeable = False
with assert_raises(ValueError):
r.f = [2, 3]
with assert_raises(ValueError):
r.setfield([2, 3], *r.dtype.fields['f'])
def test_out_of_order_fields(self):
"""Ticket #1431."""
# this test will be invalid in 1.13
# suppress deprecation warning in 1.12 (remove in 1.13)
with assert_warns(FutureWarning):
x = self.data[['col1', 'col2']]
y = self.data[['col2', 'col1']]
assert_equal(x[0][0], y[0][1])
def test_pickle_1(self):
# Issue #1529
a = np.array([(1, [])], dtype=[('a', np.int32), ('b', np.int32, 0)])
assert_equal(a, pickle.loads(pickle.dumps(a)))
assert_equal(a[0], pickle.loads(pickle.dumps(a[0])))
def test_pickle_2(self):
a = self.data
assert_equal(a, pickle.loads(pickle.dumps(a)))
assert_equal(a[0], pickle.loads(pickle.dumps(a[0])))
def test_pickle_3(self):
# Issue #7140
a = self.data
pa = pickle.loads(pickle.dumps(a[0]))
assert_(pa.flags.c_contiguous)
assert_(pa.flags.f_contiguous)
assert_(pa.flags.writeable)
assert_(pa.flags.aligned)
def test_objview_record(self):
# https://github.com/numpy/numpy/issues/2599
dt = np.dtype([('foo', 'i8'), ('bar', 'O')])
r = np.zeros((1, 3), dtype=dt).view(np.recarray)
r.foo = np.array([1, 2, 3]) # TypeError?
# https://github.com/numpy/numpy/issues/3256
ra = np.recarray((2,), dtype=[('x', object), ('y', float), ('z', int)])
with assert_warns(FutureWarning):
ra[['x', 'y']] # TypeError?
def test_record_scalar_setitem(self):
# https://github.com/numpy/numpy/issues/3561
rec = np.recarray(1, dtype=[('x', float, 5)])
rec[0].x = 1
assert_equal(rec[0].x, np.ones(5))
def test_missing_field(self):
# https://github.com/numpy/numpy/issues/4806
arr = np.zeros((3,), dtype=[('x', int), ('y', int)])
assert_raises(ValueError, lambda: arr[['nofield']])
def test_find_duplicate():
l1 = [1, 2, 3, 4, 5, 6]
assert_(np.rec.find_duplicate(l1) == [])
l2 = [1, 2, 1, 4, 5, 6]
assert_(np.rec.find_duplicate(l2) == [1])
l3 = [1, 2, 1, 4, 1, 6, 2, 3]
assert_(np.rec.find_duplicate(l3) == [1, 2])
l3 = [2, 2, 1, 4, 1, 6, 2, 3]
assert_(np.rec.find_duplicate(l3) == [2, 1])
if __name__ == "__main__":
run_module_suite()
|
|
# Authors: Denis Engemann <[email protected]>
#
# License: BSD-3-Clause
from io import BytesIO
import os
import os.path as op
from functools import reduce, partial
import numpy as np
from numpy.testing import (assert_array_almost_equal, assert_array_equal,
assert_allclose, assert_equal)
import pytest
from mne.datasets import testing
from mne.io import read_raw_fif, read_raw_bti
from mne.io._digitization import _make_bti_dig_points
from mne.io.bti.bti import (_read_config,
_read_bti_header, _get_bti_dev_t,
_correct_trans, _get_bti_info,
_loc_to_coil_trans, _convert_coil_trans,
_check_nan_dev_head_t, _rename_channels)
from mne.io.bti.bti import _read_head_shape
from mne.io.tests.test_raw import _test_raw_reader
from mne.io.pick import pick_info
from mne.io.constants import FIFF
from mne import pick_types
from mne.utils import assert_dig_allclose
from mne.transforms import Transform, combine_transforms, invert_transform
base_dir = op.join(op.abspath(op.dirname(__file__)), 'data')
archs = 'linux', 'solaris'
pdf_fnames = [op.join(base_dir, 'test_pdf_%s' % a) for a in archs]
config_fnames = [op.join(base_dir, 'test_config_%s' % a) for a in archs]
hs_fnames = [op.join(base_dir, 'test_hs_%s' % a) for a in archs]
exported_fnames = [op.join(base_dir, 'exported4D_%s_raw.fif' % a)
for a in archs]
tmp_raw_fname = op.join(base_dir, 'tmp_raw.fif')
fname_2500 = op.join(testing.data_path(download=False), 'BTi', 'erm_HFH',
'c,rfDC')
fname_sim = op.join(testing.data_path(download=False), 'BTi', '4Dsim',
'c,rfDC')
fname_sim_filt = op.join(testing.data_path(download=False), 'BTi', '4Dsim',
'c,rfDC,fn50,o')
# the 4D exporter doesn't export all channels, so we confine our comparison
NCH = 248
@testing.requires_testing_data
def test_read_2500():
"""Test reading data from 2500 system."""
_test_raw_reader(read_raw_bti, pdf_fname=fname_2500, head_shape_fname=None)
def test_read_config():
"""Test read bti config file."""
# for config in config_fname, config_solaris_fname:
for config in config_fnames:
cfg = _read_config(config)
assert all('unknown' not in block.lower() and block != ''
for block in cfg['user_blocks'])
def test_crop_append():
"""Test crop and append raw."""
raw = _test_raw_reader(
read_raw_bti, pdf_fname=pdf_fnames[0],
config_fname=config_fnames[0], head_shape_fname=hs_fnames[0])
y, t = raw[:]
t0, t1 = 0.25 * t[-1], 0.75 * t[-1]
mask = (t0 <= t) * (t <= t1)
raw_ = raw.copy().crop(t0, t1)
y_, _ = raw_[:]
assert (y_.shape[1] == mask.sum())
assert (y_.shape[0] == y.shape[0])
def test_transforms():
"""Test transformations."""
bti_trans = (0.0, 0.02, 0.11)
bti_dev_t = Transform('ctf_meg', 'meg', _get_bti_dev_t(0.0, bti_trans))
for pdf, config, hs, in zip(pdf_fnames, config_fnames, hs_fnames):
raw = read_raw_bti(pdf, config, hs, preload=False)
dev_ctf_t = raw.info['dev_ctf_t']
dev_head_t_old = raw.info['dev_head_t']
ctf_head_t = raw.info['ctf_head_t']
# 1) get BTI->Neuromag
bti_dev_t = Transform('ctf_meg', 'meg', _get_bti_dev_t(0.0, bti_trans))
# 2) get Neuromag->BTI head
t = combine_transforms(invert_transform(bti_dev_t), dev_ctf_t,
'meg', 'ctf_head')
# 3) get Neuromag->head
dev_head_t_new = combine_transforms(t, ctf_head_t, 'meg', 'head')
assert_array_equal(dev_head_t_new['trans'], dev_head_t_old['trans'])
@pytest.mark.slowtest
def test_raw():
"""Test bti conversion to Raw object."""
for pdf, config, hs, exported in zip(pdf_fnames, config_fnames, hs_fnames,
exported_fnames):
# rx = 2 if 'linux' in pdf else 0
pytest.raises(ValueError, read_raw_bti, pdf, 'eggs', preload=False)
pytest.raises(ValueError, read_raw_bti, pdf, config, 'spam',
preload=False)
if op.exists(tmp_raw_fname):
os.remove(tmp_raw_fname)
ex = read_raw_fif(exported, preload=True)
ra = read_raw_bti(pdf, config, hs, preload=False)
assert ('RawBTi' in repr(ra))
assert_equal(ex.ch_names[:NCH], ra.ch_names[:NCH])
assert_array_almost_equal(ex.info['dev_head_t']['trans'],
ra.info['dev_head_t']['trans'], 7)
assert len(ex.info['dig']) in (3563, 5154)
assert_dig_allclose(ex.info, ra.info, limit=100)
coil1, coil2 = [np.concatenate([d['loc'].flatten()
for d in r_.info['chs'][:NCH]])
for r_ in (ra, ex)]
assert_array_almost_equal(coil1, coil2, 7)
loc1, loc2 = [np.concatenate([d['loc'].flatten()
for d in r_.info['chs'][:NCH]])
for r_ in (ra, ex)]
assert_allclose(loc1, loc2)
assert_allclose(ra[:NCH][0], ex[:NCH][0])
assert_array_equal([c['range'] for c in ra.info['chs'][:NCH]],
[c['range'] for c in ex.info['chs'][:NCH]])
assert_array_equal([c['cal'] for c in ra.info['chs'][:NCH]],
[c['cal'] for c in ex.info['chs'][:NCH]])
assert_array_equal(ra._cals[:NCH], ex._cals[:NCH])
# check our transforms
for key in ('dev_head_t', 'dev_ctf_t', 'ctf_head_t'):
if ex.info[key] is None:
pass
else:
assert (ra.info[key] is not None)
for ent in ('to', 'from', 'trans'):
assert_allclose(ex.info[key][ent],
ra.info[key][ent])
ra.save(tmp_raw_fname)
re = read_raw_fif(tmp_raw_fname)
print(re)
for key in ('dev_head_t', 'dev_ctf_t', 'ctf_head_t'):
assert (isinstance(re.info[key], dict))
this_t = re.info[key]['trans']
assert_equal(this_t.shape, (4, 4))
# check that matrix by is not identity
assert (not np.allclose(this_t, np.eye(4)))
os.remove(tmp_raw_fname)
def test_info_no_rename_no_reorder_no_pdf():
"""Test private renaming, reordering and partial construction option."""
for pdf, config, hs in zip(pdf_fnames, config_fnames, hs_fnames):
info, bti_info = _get_bti_info(
pdf_fname=pdf, config_fname=config, head_shape_fname=hs,
rotation_x=0.0, translation=(0.0, 0.02, 0.11), convert=False,
ecg_ch='E31', eog_ch=('E63', 'E64'),
rename_channels=False, sort_by_ch_name=False)
info2, bti_info = _get_bti_info(
pdf_fname=None, config_fname=config, head_shape_fname=hs,
rotation_x=0.0, translation=(0.0, 0.02, 0.11), convert=False,
ecg_ch='E31', eog_ch=('E63', 'E64'),
rename_channels=False, sort_by_ch_name=False)
assert_equal(info['ch_names'],
[ch['ch_name'] for ch in info['chs']])
assert_equal([n for n in info['ch_names'] if n.startswith('A')][:5],
['A22', 'A2', 'A104', 'A241', 'A138'])
assert_equal([n for n in info['ch_names'] if n.startswith('A')][-5:],
['A133', 'A158', 'A44', 'A134', 'A216'])
info = pick_info(info, pick_types(info, meg=True, stim=True,
resp=True))
info2 = pick_info(info2, pick_types(info2, meg=True, stim=True,
resp=True))
assert (info['sfreq'] is not None)
assert (info['lowpass'] is not None)
assert (info['highpass'] is not None)
assert (info['meas_date'] is not None)
assert_equal(info2['sfreq'], None)
assert_equal(info2['lowpass'], None)
assert_equal(info2['highpass'], None)
assert_equal(info2['meas_date'], None)
assert_equal(info['ch_names'], info2['ch_names'])
assert_equal(info['ch_names'], info2['ch_names'])
for key in ['dev_ctf_t', 'dev_head_t', 'ctf_head_t']:
assert_array_equal(info[key]['trans'], info2[key]['trans'])
assert_array_equal(
np.array([ch['loc'] for ch in info['chs']]),
np.array([ch['loc'] for ch in info2['chs']]))
# just check reading data | corner case
raw1 = read_raw_bti(
pdf_fname=pdf, config_fname=config, head_shape_fname=None,
sort_by_ch_name=False, preload=True)
# just check reading data | corner case
raw2 = read_raw_bti(
pdf_fname=pdf, config_fname=config, head_shape_fname=None,
rename_channels=False,
sort_by_ch_name=True, preload=True)
sort_idx = [raw1.bti_ch_labels.index(ch) for ch in raw2.bti_ch_labels]
raw1._data = raw1._data[sort_idx]
assert_array_equal(raw1._data, raw2._data)
assert_array_equal(raw2.bti_ch_labels, raw2.ch_names)
def test_no_conversion():
"""Test bti no-conversion option."""
get_info = partial(
_get_bti_info,
rotation_x=0.0, translation=(0.0, 0.02, 0.11), convert=False,
ecg_ch='E31', eog_ch=('E63', 'E64'),
rename_channels=False, sort_by_ch_name=False)
for pdf, config, hs in zip(pdf_fnames, config_fnames, hs_fnames):
raw_info, _ = get_info(pdf, config, hs, convert=False)
raw_info_con = read_raw_bti(
pdf_fname=pdf, config_fname=config, head_shape_fname=hs,
convert=True, preload=False).info
pick_info(raw_info_con,
pick_types(raw_info_con, meg=True, ref_meg=True),
copy=False)
pick_info(raw_info,
pick_types(raw_info, meg=True, ref_meg=True), copy=False)
bti_info = _read_bti_header(pdf, config)
dev_ctf_t = _correct_trans(bti_info['bti_transform'][0])
assert_array_equal(dev_ctf_t, raw_info['dev_ctf_t']['trans'])
assert_array_equal(raw_info['dev_head_t']['trans'], np.eye(4))
assert_array_equal(raw_info['ctf_head_t']['trans'], np.eye(4))
nasion, lpa, rpa, hpi, dig_points = _read_head_shape(hs)
dig, t, _ = _make_bti_dig_points(nasion, lpa, rpa, hpi, dig_points,
convert=False, use_hpi=False)
assert_array_equal(t['trans'], np.eye(4))
for ii, (old, new, con) in enumerate(zip(
dig, raw_info['dig'], raw_info_con['dig'])):
assert_equal(old['ident'], new['ident'])
assert_array_equal(old['r'], new['r'])
assert (not np.allclose(old['r'], con['r']))
if ii > 10:
break
ch_map = {ch['chan_label']: ch['loc'] for ch in bti_info['chs']}
for ii, ch_label in enumerate(raw_info['ch_names']):
if not ch_label.startswith('A'):
continue
t1 = ch_map[ch_label] # correction already performed in bti_info
t2 = raw_info['chs'][ii]['loc']
t3 = raw_info_con['chs'][ii]['loc']
assert_allclose(t1, t2, atol=1e-15)
assert (not np.allclose(t1, t3))
idx_a = raw_info_con['ch_names'].index('MEG 001')
idx_b = raw_info['ch_names'].index('A22')
assert_equal(
raw_info_con['chs'][idx_a]['coord_frame'],
FIFF.FIFFV_COORD_DEVICE)
assert_equal(
raw_info['chs'][idx_b]['coord_frame'],
FIFF.FIFFV_MNE_COORD_4D_HEAD)
def test_bytes_io():
"""Test bti bytes-io API."""
for pdf, config, hs in zip(pdf_fnames, config_fnames, hs_fnames):
raw = read_raw_bti(pdf, config, hs, convert=True, preload=False)
with open(pdf, 'rb') as fid:
pdf = BytesIO(fid.read())
with open(config, 'rb') as fid:
config = BytesIO(fid.read())
with open(hs, 'rb') as fid:
hs = BytesIO(fid.read())
raw2 = read_raw_bti(pdf, config, hs, convert=True, preload=False)
repr(raw2)
assert_array_equal(raw[:][0], raw2[:][0])
def test_setup_headshape():
"""Test reading bti headshape."""
for hs in hs_fnames:
nasion, lpa, rpa, hpi, dig_points = _read_head_shape(hs)
dig, t, _ = _make_bti_dig_points(nasion, lpa, rpa, hpi, dig_points)
expected = {'kind', 'ident', 'r'}
found = set(reduce(lambda x, y: list(x) + list(y),
[d.keys() for d in dig]))
assert (not expected - found)
def test_nan_trans():
"""Test unlikely case that the device to head transform is empty."""
for ii, pdf_fname in enumerate(pdf_fnames):
bti_info = _read_bti_header(
pdf_fname, config_fnames[ii], sort_by_ch_name=True)
dev_ctf_t = Transform('ctf_meg', 'ctf_head',
_correct_trans(bti_info['bti_transform'][0]))
# reading params
convert = True
rotation_x = 0.
translation = (0.0, 0.02, 0.11)
bti_dev_t = _get_bti_dev_t(rotation_x, translation)
bti_dev_t = Transform('ctf_meg', 'meg', bti_dev_t)
ecg_ch = 'E31'
eog_ch = ('E63', 'E64')
# read parts of info to get trans
bti_ch_names = list()
for ch in bti_info['chs']:
ch_name = ch['name']
if not ch_name.startswith('A'):
ch_name = ch.get('chan_label', ch_name)
bti_ch_names.append(ch_name)
neuromag_ch_names = _rename_channels(
bti_ch_names, ecg_ch=ecg_ch, eog_ch=eog_ch)
ch_mapping = zip(bti_ch_names, neuromag_ch_names)
# add some nan in some locations!
dev_ctf_t['trans'][:, 3] = np.nan
_check_nan_dev_head_t(dev_ctf_t)
for idx, (chan_4d, chan_neuromag) in enumerate(ch_mapping):
loc = bti_info['chs'][idx]['loc']
if loc is not None:
if convert:
t = _loc_to_coil_trans(bti_info['chs'][idx]['loc'])
t = _convert_coil_trans(t, dev_ctf_t, bti_dev_t)
@testing.requires_testing_data
@pytest.mark.parametrize('fname', (fname_sim, fname_sim_filt))
@pytest.mark.parametrize('preload', (True, False))
def test_bti_ch_data(fname, preload):
"""Test for gh-6048."""
read_raw_bti(fname, preload=preload) # used to fail with ascii decode err
@testing.requires_testing_data
def test_bti_set_eog():
"""Check that EOG channels can be set (gh-10092)."""
raw = read_raw_bti(fname_sim,
preload=False,
eog_ch=('X65', 'X67', 'X69', 'X66', 'X68'))
assert_equal(len(pick_types(raw.info, eog=True)), 5)
|
|
#!/usr/bin/env python3
# Provides utilities for standalone test scripts.
# This script is not intended to be run directly.
import sys, os
sys.path.append(os.path.join(os.path.dirname(__file__), '..', '..'))
import stat
import odrive
from odrive.enums import *
import odrive.utils
import fibre
from fibre import Logger, Event
import argparse
import yaml
from inspect import signature
import itertools
import time
import tempfile
import io
import re
import datetime
from typing import Union, Tuple
import traceback
# needed for curve fitting
import numpy as np
import scipy.optimize
import scipy.ndimage.filters
# Assert utils ----------------------------------------------------------------#
class TestFailed(Exception):
def __init__(self, message):
Exception.__init__(self, message)
def test_assert_eq(observed, expected, range=None, accuracy=None):
sign = lambda x: 1 if x >= 0 else -1
# Comparision with absolute range
if not range is None:
if (observed < expected - range) or (observed > expected + range):
raise TestFailed("value out of range: expected {}+-{} but observed {}".format(expected, range, observed))
# Comparision with relative range
elif not accuracy is None:
if sign(observed) != sign(expected) or (abs(observed) < abs(expected) * (1 - accuracy)) or (abs(observed) > abs(expected) * (1 + accuracy)):
raise TestFailed("value out of range: expected {}+-{}% but observed {}".format(expected, accuracy*100.0, observed))
# Exact comparision
else:
if observed != expected:
raise TestFailed("value mismatch: expected {} but observed {}".format(expected, observed))
def test_assert_within(observed, lower_bound, upper_bound, accuracy=0.0):
"""
Checks if the value is within the closed interval [lower_bound, upper_bound]
The permissible range can be expanded in both direction by the coefficiont "accuracy".
I.e. accuracy of 1.0 would expand the range by a total factor of 3.0
"""
lower_bound, upper_bound = (
(lower_bound - (upper_bound - lower_bound) * accuracy),
(upper_bound + (upper_bound - lower_bound) * accuracy)
)
if (observed < lower_bound) or (observed > upper_bound):
raise TestFailed(f"the oberved value {observed} is outside the interval [{lower_bound}, {upper_bound}]")
# Other utils -----------------------------------------------------------------#
def disjoint_sets(list_of_sets: list):
while len(list_of_sets):
current_set, list_of_sets = list_of_sets[0], list_of_sets[1:]
updated = True
while updated:
updated = False
for i, s in enumerate(list_of_sets):
if len(current_set.intersection(s)):
current_set = current_set.union(s)
list_of_sets = list_of_sets[:i] + list_of_sets[(i+1):]
updated = True
yield current_set
def is_list_like(arg):
return hasattr(arg, '__iter__') and not isinstance(arg, str)
def all_unique(lst):
seen = list()
return not any(i in seen or seen.append(i) for i in lst)
def modpm(val, range):
return ((val + (range / 2)) % range) - (range / 2)
def clamp(val, lower_bound, upper_bound):
return min(max(val, lower_bound), upper_bound)
def record_log(data_getter, duration=5.0):
logger.debug(f"Recording log for {duration}s...")
data = []
start = time.monotonic()
while time.monotonic() - start < duration:
data.append((time.monotonic() - start,) + tuple(data_getter()))
return np.array(data)
def save_log(data, id=None):
import json
filename = '/tmp/log{}.json'.format('' if id is None else str(id))
with open(filename, 'w+') as fp:
json.dump(data.tolist(), fp, indent=2)
print(f'data saved to {filename}')
def fit_line(data):
func = lambda x, a, b: x*a + b
slope, offset = scipy.optimize.curve_fit(func, data[:,0], data[:,1], [1.0, 0])[0]
return slope, offset, func(data[:,0], slope, offset)
def fit_sawtooth(data, min_val, max_val, sigma=10):
"""
Fits the data to a sawtooth function.
Returns the average absolute error and the number of outliers.
The sample data must span at least one full period.
data is expected to contain one row (t, y) for each sample.
"""
# Sawtooth function with free parameters for period and x-shift
func = lambda x, a, b: np.mod(a * x + b, max_val - min_val) + min_val
# Fit period and x-shift
mid_point = (min_val + max_val) / 2
filtered_data = scipy.ndimage.filters.gaussian_filter(data[:,1], sigma=sigma)
if max_val > min_val:
zero_crossings = data[np.where((filtered_data[:-1] > mid_point) & (filtered_data[1:] < mid_point))[0], 0]
else:
zero_crossings = data[np.where((filtered_data[:-1] < mid_point) & (filtered_data[1:] > mid_point))[0], 0]
if len(zero_crossings) == 0:
# No zero-crossing - fit simple line
slope, offset, _ = fit_line(data)
elif len(zero_crossings) == 1:
# One zero-crossing - fit line based on the longer half
z_index = np.where(data[:,0] > zero_crossings[0])[0][0]
if z_index > len(data[:,0]):
slope, offset, _ = fit_line(data[:z_index])
else:
slope, offset, _ = fit_line(data[z_index:])
else:
# Two or more zero-crossings - determine period based on average distance between zero-crossings
period = (zero_crossings[1:] - zero_crossings[:-1]).mean()
slope = (max_val - min_val) / period
#shift = scipy.optimize.curve_fit(lambda x, b: func(x, period, b), data[:,0], data[:,1], [0.0])[0][0]
if np.std(np.mod(zero_crossings, period)) < np.std(np.mod(zero_crossings + period/2, period)):
shift = np.mean(np.mod(zero_crossings, period))
else:
shift = np.mean(np.mod(zero_crossings + period/2, period)) - period/2
offset = -slope * shift
return slope, offset, func(data[:,0], slope, offset)
def test_curve_fit(data, fitted_curve, max_mean_err, inlier_range, max_outliers):
diffs = data[:,1] - fitted_curve
mean_err = np.abs(diffs).mean()
if mean_err > max_mean_err:
save_log(np.concatenate([data, np.array([fitted_curve]).transpose()], 1))
raise TestFailed("curve fit has too large mean error: {} > {}".format(mean_err, max_mean_err))
outliers = np.count_nonzero((diffs > inlier_range) | (diffs < -inlier_range))
if outliers > max_outliers:
save_log(np.concatenate([data, np.array([fitted_curve]).transpose()], 1))
raise TestFailed("curve fit has too many outliers (err > {}): {} > {}".format(inlier_range, outliers, max_outliers))
def test_watchdog(axis, feed_func, logger: Logger):
"""
Tests the watchdog of one axis, using the provided function to feed the watchdog.
This test assumes that the testing host has no more than 300ms random delays.
"""
start = time.monotonic()
axis.config.enable_watchdog = False
axis.error = 0
axis.config.watchdog_timeout = 1.0
axis.watchdog_feed()
axis.config.enable_watchdog = True
test_assert_eq(axis.error, 0)
for _ in range(5): # keep the watchdog alive for 3.5 seconds
time.sleep(0.7)
logger.debug('feeding watchdog at {}s'.format(time.monotonic() - start))
feed_func()
err = axis.error
logger.debug('checking error at {}s'.format(time.monotonic() - start))
test_assert_eq(err, 0)
logger.debug('letting watchdog expire...')
time.sleep(1.3) # let the watchdog expire
test_assert_eq(axis.error, AXIS_ERROR_WATCHDOG_TIMER_EXPIRED)
class SafeTerminator():
"""
Context that can be used in a "with" statement to facilitate safe shutdown
of the test rig when the test ends (succeeds or fails).
"""
def __init__(self, logger, *axes):
self.logger = logger
self.axes = axes
def __enter__(self):
pass
def __exit__(self, exc_type, exc_val, exc_tb):
# TODO: cut power supply
self.logger.debug('clearing config...')
idle_axes = []
for axis_ctx in self.axes:
try:
axis_ctx.handle.requested_state = AXIS_STATE_IDLE
idle_axes.append(axis_ctx)
except:
self.logger.error(f"can't put axis {axis_ctx} into idle")
# TODO: review if erase_configuration is safe during active PWM
time.sleep(0.005)
for axis_ctx in set(axis for axis in idle_axes):
axis_ctx.parent.erase_config_and_reboot()
# Test Components -------------------------------------------------------------#
class Component(object):
def __init__(self, parent):
self.parent = parent
class ODriveComponent(Component):
def __init__(self, yaml: dict):
self.handle = None
self.yaml = yaml
if yaml['board-version'].startswith("v3."):
self.encoders = [ODriveEncoderComponent(self, 0, yaml['encoder0']), ODriveEncoderComponent(self, 1, yaml['encoder1'])]
self.axes = [ODriveAxisComponent(self, 0, yaml['motor0']), ODriveAxisComponent(self, 1, yaml['motor1'])]
gpio_nums = range(1,9)
elif yaml['board-version'].startswith("v4."):
self.encoders = [ODriveEncoderComponent(self, 0, yaml['encoder0'])]
self.axes = [ODriveAxisComponent(self, 0, yaml['motor0'])]
gpio_nums = range(23)
else:
raise Exception("unknown board version {}".format(yaml['board-version']))
for i in gpio_nums:
self.__setattr__('gpio' + str(i), Component(self))
self.can = Component(self)
self.sck = Component(self)
self.miso = Component(self)
self.mosi = Component(self)
def get_subcomponents(self):
for enc_ctx in self.encoders:
yield 'encoder' + str(enc_ctx.num), enc_ctx
for axis_ctx in self.axes:
yield 'axis' + str(axis_ctx.num), axis_ctx
for k in dir(self):
if k.startswith('gpio'):
yield k, getattr(self, k)
yield 'can', self.can
yield 'spi.sck', self.sck
yield 'spi.miso', self.miso
yield 'spi.mosi', self.mosi
def prepare(self, logger: Logger):
"""
Connects to the ODrive
"""
if not self.handle is None:
return
logger.debug('waiting for {} ({})'.format(self.yaml['name'], self.yaml['serial-number']))
self.handle = odrive.find_any(channel_termination_token=shutdown_token, serial_number=self.yaml['serial-number'], timeout=60)#, printer=print)
assert(self.handle)
#for axis_idx, axis_ctx in enumerate(self.axes):
# axis_ctx.handle = getattr(self.handle, f'axis{axis_idx}')
for encoder_idx, encoder_ctx in enumerate(self.encoders):
encoder_ctx.handle = getattr(self.handle, f'axis{encoder_idx}').encoder
# TODO: distinguish between axis and motor context
for axis_idx, axis_ctx in enumerate(self.axes):
axis_ctx.handle = getattr(self.handle, f'axis{axis_idx}')
def disable_mappings(self):
for k in dir(self.handle.config):
if re.match(r'gpio[0-9]+_pwm_mapping', k):
getattr(self.handle.config, k).endpoint = None
for k in dir(self.handle.config):
if re.match(r'gpio[0-9]+_analog_mapping', k):
getattr(self.handle.config, k).endpoint = None
def save_config_and_reboot(self):
try:
self.handle.save_configuration()
except fibre.ObjectLostError:
pass # this is expected
self.handle = None
time.sleep(2)
self.prepare(logger)
def erase_config_and_reboot(self):
try:
self.handle.erase_configuration()
except fibre.ObjectLostError:
pass # this is expected
self.handle = None
time.sleep(2)
self.prepare(logger)
class MotorComponent(Component):
def __init__(self, yaml: dict):
self.yaml = yaml
self.shaft = Component(self)
self.phases = Component(self)
def get_subcomponents(self):
return [('shaft', self.shaft), ('phases', self.phases)]
def prepare(self, logger: Logger):
pass
class ODriveAxisComponent(Component):
def __init__(self, parent: ODriveComponent, num: int, yaml: dict):
Component.__init__(self, parent)
self.handle = None
self.yaml = yaml # TODO: this is bad naming
self.num = num
def prepare(self, logger: Logger):
self.parent.prepare(logger)
class ODriveEncoderComponent(Component):
def __init__(self, parent: ODriveComponent, num: int, yaml: dict):
Component.__init__(self, parent)
self.handle = None
self.yaml = yaml
self.num = num
self.z = Component(self)
self.a = Component(self)
self.b = Component(self)
def get_subcomponents(self):
return [('z', self.z), ('a', self.a), ('b', self.b)]
def prepare(self, logger: Logger):
self.parent.prepare(logger)
class EncoderComponent(Component):
def __init__(self, parent: Component, yaml: dict):
Component.__init__(self, parent)
self.yaml = yaml
self.z = Component(self)
self.a = Component(self)
self.b = Component(self)
self.shaft = Component(self)
def get_subcomponents(self):
return [('z', self.z), ('a', self.a), ('b', self.b), ('shaft', self.shaft)]
class GeneralPurposeComponent(Component):
def __init__(self, yaml: dict):
self.components = {}
for component_yaml in yaml.get('components', []):
if component_yaml['type'] == 'can':
self.components[component_yaml['name']] = CanInterfaceComponent(self, component_yaml)
if component_yaml['type'] == 'uart':
self.components[component_yaml['name']] = SerialPortComponent(self, component_yaml)
if component_yaml['type'] == 'gpio':
self.components['gpio' + str(component_yaml['num'])] = LinuxGpioComponent(self, component_yaml)
def get_subcomponents(self):
return self.components.items()
class LinuxGpioComponent(Component):
def __init__(self, parent: Component, yaml: dict):
Component.__init__(self, parent)
self.num = int(yaml['num'])
def config(self, output: bool):
with open("/sys/class/gpio/gpio{}/direction".format(self.num), "w") as fp:
fp.write('out' if output else '0')
def write(self, state: bool):
with open("/sys/class/gpio/gpio{}/value".format(self.num), "w") as fp:
fp.write('1' if state else '0')
class SerialPortComponent(Component):
def __init__(self, parent: Component, yaml: dict):
Component.__init__(self, parent)
self.yaml = yaml
self.tx = Component(self)
self.rx = Component(self)
def get_subcomponents(self):
yield 'tx', self.tx
yield 'rx', self.rx
def open(self, baudrate: int):
import serial
return serial.Serial(self.yaml['port'], baudrate, timeout=1)
class CanInterfaceComponent(Component):
def __init__(self, parent: Component, yaml: dict):
Component.__init__(self, parent)
self.handle = None
self.yaml = yaml
def prepare(self, logger: Logger):
if not self.handle is None:
return
import can
self.handle = can.interface.Bus(bustype='socketcan', channel=self.yaml['interface'], bitrate=250000)
class TeensyGpio(Component):
def __init__(self, parent: Component, num: int):
Component.__init__(self, parent)
self.num = num
class TeensyComponent(Component):
def __init__(self, testrig, yaml: dict):
self.testrig = testrig
self.yaml = yaml
if self.yaml['board-version'] == 'teensy:avr:teensy40':
self.gpios = [TeensyGpio(self, i) for i in range(24)]
elif self.yaml['board-version'] == 'teensy:avr:teensy41':
self.gpios = [TeensyGpio(self, i) for i in range(42)]
else:
raise Exception(f"unknown Arduino board {self.yaml['board-version']}")
self.routes = []
self.previous_routes = object()
def get_subcomponents(self):
for i, gpio in enumerate(self.gpios):
yield ('gpio' + str(i)), gpio
yield 'program', Component(self)
def add_route(self, input: TeensyGpio, output: TeensyGpio, noise_enable: TeensyGpio):
self.routes.append((input, output, noise_enable))
def commit_routing_config(self, logger: Logger):
if self.previous_routes == self.routes:
self.routes = []
return
code = ''
code += 'bool noise = false;\n'
code += 'void setup() {\n'
for i, o, n in self.routes:
code += ' pinMode({}, OUTPUT);\n'.format(o.num)
code += '}\n'
code += 'void loop() {\n'
code += ' noise = !noise;\n'
for i, o, n in self.routes:
if n:
# with noise enable
code += ' digitalWrite({}, digitalRead({}) ? noise : digitalRead({}));\n'.format(o.num, n.num, i.num)
else:
# no noise enable
code += ' digitalWrite({}, digitalRead({}));\n'.format(o.num, i.num)
code += '}\n'
self.compile_and_program(code)
self.previous_routes = self.routes
self.routes = []
def compile(self, sketchfile, hexfile):
env = os.environ.copy()
env['ARDUINO_COMPILE_DESTINATION'] = hexfile
run_shell(
['arduino', '--board', self.yaml['board-version'], '--verify', sketchfile],
logger, env = env, timeout = 120)
def program(self, hex_file_path: str, logger: Logger):
"""
Programs the specified hex file onto the Teensy.
To reset the Teensy, a GPIO of the local system must be connected to the
Teensy's "Program" pin.
"""
# todo: this should be treated like a regular setup resource
program_gpio = self.testrig.get_directly_connected_components(self.testrig.get_component_name(self) + '.program')[0]
# Put Teensy into program mode by pulling it's program pin down
program_gpio.config(output = True)
program_gpio.write(False)
time.sleep(0.1)
program_gpio.write(True)
run_shell(["teensy-loader-cli", "-mmcu=" + self.yaml['board-version'].rpartition(':')[2].upper(), "-w", hex_file_path], logger, timeout = 5)
time.sleep(0.5) # give it some time to boot
def compile_and_program(self, code: str):
with tempfile.TemporaryDirectory() as temp_dir:
with open(os.path.join(temp_dir, 'code.ino'), 'w+') as code_fp:
code_fp.write(code)
code_fp.flush()
code_fp.seek(0)
print('Writing code to teensy: ')
print(code_fp.read())
with tempfile.NamedTemporaryFile(suffix='.hex') as hex_fp:
self.compile(code_fp.name, hex_fp.name)
self.program(hex_fp.name, logger)
class LowPassFilterComponent(Component):
def __init__(self, parent: Component):
Component.__init__(self, parent)
self.en = Component(self)
def get_subcomponents(self):
yield 'en', self.en
class TestFixture(object):
"""
Base class for test fixtures. A test fixture is what defines the
prerequisites for a paricular test.
"""
def all_of(*test_fixtures):
test_fixtures = [(tf._subfixtures if isinstance(tf, CompositeTestFixture) else [[tf]])
for tf in test_fixtures if not tf is None]
# Each item in test_fixtures is now in disjunctive normal form and we
# flatten this into a single expression in disjunctive normal form
combinations = []
for combination in itertools.product([[]], *test_fixtures):
# flatten list of lists
combination = [item for c in combination for item in c]
combinations.append(combination)
if len(combinations) == 1 and len(combinations[0]) == 0:
return None
elif len(combinations) == 1 and len(combinations[0]) == 1:
return combinations[0][0]
else:
return CompositeTestFixture(combinations)
def any_of(*test_fixtures):
test_fixtures = [(tf._subfixtures if isinstance(tf, CompositeTestFixture) else [[tf]])
for tf in test_fixtures]
# Flatten list of lists into list
combinations = [item for c in test_fixtures for item in c]
if len(combinations) == 1 and len(combinations[0]) == 0:
return None
elif len(combinations) == 1 and len(combinations[0]) == 1:
return combinations[0][0]
else:
return CompositeTestFixture(combinations)
class CompositeTestFixture(TestFixture):
"""
Represents the combination of several test fixtures. The combinations are
stored as a list of lists representing a disjunctive normal form.
Do not construct this class directly, use TestFixture.any_of or
TestFixture.all_of instead.
"""
def __init__(self, subfixtures: list):
self._subfixtures = subfixtures
class TeensyForwardingFixture(TestFixture):
def __init__(self, teensy: TeensyComponent, high_z: TeensyGpio, low_z: TeensyGpio):
self.teensy = teensy
self.high_z = high_z
self.low_z = low_z
self.noise_enable = None
def prepare(self, logger: Logger):
self.teensy.add_route(self.high_z, self.low_z, self.noise_enable)
def get_resources(self):
return [(self.teensy, False), (self.high_z, True), (self.low_z, True), (self.noise_enable, True)]
class ClosedLoopControlFixture(TestFixture):
def __init__(self, axis_ctx: ODriveAxisComponent, motor_ctx: MotorComponent, enc_ctx: EncoderComponent):
self.axis_ctx, self.motor_ctx, self.enc_ctx = (axis_ctx, motor_ctx, enc_ctx)
def get_resources(self):
return []
def prepare(self, logger: Logger):
# Make sure no funny configuration is active
logger.debug('Setting up clean configuration...')
self.axis_ctx.parent.erase_config_and_reboot()
# Set motor calibration values
self.axis_ctx.handle.motor.config.phase_resistance = float(self.motor_ctx.yaml['phase-resistance'])
self.axis_ctx.handle.motor.config.phase_inductance = float(self.motor_ctx.yaml['phase-inductance'])
self.axis_ctx.handle.motor.config.pre_calibrated = True
# Set brake resistor settings
if 'brake-resistance' in self.axis_ctx.parent.yaml:
logger.debug(f"brake resistor set to {self.axis_ctx.parent.yaml['brake-resistance']} Ohm")
self.axis_ctx.parent.handle.config.brake_resistance = float(self.axis_ctx.parent.yaml['brake-resistance'])
# The docs say this requires a reboot but here's a small secret:
# Since the brake resistor is also started in clear_errors() this
# circumvents the need for a reboot.
self.axis_ctx.parent.handle.config.enable_brake_resistor = True
else:
logger.debug("brake resistor disabled")
# TODO: set vbus voltage trip level based on yaml
self.axis_ctx.parent.handle.config.dc_max_negative_current = -1
self.axis_ctx.parent.handle.config.enable_brake_resistor = False
# Set calibration settings
self.axis_ctx.handle.encoder.config.direction = 0
self.axis_ctx.handle.encoder.config.use_index = False
self.axis_ctx.handle.encoder.config.calib_scan_omega = 12.566 # 2 electrical revolutions per second
self.axis_ctx.handle.encoder.config.calib_scan_distance = 50.265 # 8 revolutions
self.axis_ctx.handle.encoder.config.bandwidth = 1000
self.axis_ctx.parent.handle.clear_errors()
logger.debug('Calibrating encoder offset...')
request_state(self.axis_ctx, AXIS_STATE_ENCODER_OFFSET_CALIBRATION)
time.sleep(9) # actual calibration takes 8 seconds
test_assert_eq(self.axis_ctx.handle.current_state, AXIS_STATE_IDLE)
test_assert_no_error(self.axis_ctx)
class AnyTestCase():
"""
Helper to specifiy that the test case may be run with any of the specified
parameter combinations.
"""
def __init__(self, *alternatives):
self.alternatives = alternatives
class TestRig():
def __init__(self, yaml: dict, logger: Logger):
# Contains all components (including subcomponents).
# Ports are components too.
self.components_by_name = {} # {'name': object, ...}
self.names_by_component = {} # {'name': object, ...}
def add_component(name, component):
self.components_by_name[name] = component
self.names_by_component[component] = name
if hasattr(component, 'get_subcomponents'):
for subname, subcomponent in component.get_subcomponents():
add_component(name + '.' + subname, subcomponent)
for component_yaml in yaml['components']:
if component_yaml['type'] == 'odrive':
add_component(component_yaml['name'], ODriveComponent(component_yaml))
elif component_yaml['type'] == 'generalpurpose':
add_component(component_yaml['name'], GeneralPurposeComponent(component_yaml))
elif component_yaml['type'] == 'arduino':
add_component(component_yaml['name'], TeensyComponent(self, component_yaml))
elif component_yaml['type'] == 'motor':
add_component(component_yaml['name'], MotorComponent(component_yaml))
elif component_yaml['type'] == 'encoder':
add_component(component_yaml['name'], EncoderComponent(self, component_yaml))
elif component_yaml['type'] == 'lpf':
add_component(component_yaml['name'], LowPassFilterComponent(self))
else:
logger.warn('test rig has unsupported component ' + component_yaml['type'])
continue
# List of disjunct sets, where each set holds references of the mutually connected components
self.connections = []
for connection_yaml in yaml['connections']:
self.connections.append(set(self.components_by_name[name] for name in connection_yaml))
self.connections = list(disjoint_sets(self.connections))
# Dict for fast lookup of the connection sets for each port
self.net_by_component = {}
for s in self.connections:
for port in s:
self.net_by_component[port] = s
def get_closed_loop_combos(self, init: bool = True):
"""
Fetches all connected (odrive axis, motor, encoder) combos in the test rig.
Returns a generator of tuples where each tuple follows the form:
(axis, motor, encoder, test_fixture).
"""
all_axes = self.get_components(ODriveAxisComponent)
all_motors = self.get_components(MotorComponent)
all_encoders = self.get_components(EncoderComponent)
for axis, motor, encoder in itertools.product(all_axes, all_motors, all_encoders):
is_connected, tf1 = self.check_connections([
(axis, motor.phases),
(motor.shaft, encoder.shaft),
(encoder.a, axis.parent.encoders[axis.num].a),
(encoder.b, axis.parent.encoders[axis.num].b)
])
if not is_connected:
continue
tf2 = ClosedLoopControlFixture(axis, motor, encoder) if init else None
test_fixture = TestFixture.all_of(tf1, tf2)
yield axis, motor, encoder, test_fixture
def check_connection(self, component1: Component, component2: Component, mode: str = 'indirect'):
"""
Checks if the specified components are connected or connectable through
a test fixture.
If there are one-directional connections, they must point from
component1 to component2.
Returns: A tuple (connectable: bool, text_fixture: TestFixture). If
connectable is True then the test_fixture, if not None, must
be prepared before the components are actually connected.
"""
assert(mode in ['direct', 'indirect'])
net1 = self.net_by_component.get(component1, set([component1]))
if component2 in net1:
return True, None # The components are directly connected
if mode == 'direct':
return False, None
net2 = self.net_by_component.get(component2, set([component2]))
possible_test_fixtures = []
for port1, port2 in itertools.product(net1, net2):
if (isinstance(port1, TeensyGpio) and isinstance(port2, TeensyGpio) and port1.parent == port2.parent):
possible_test_fixtures.append(TeensyForwardingFixture(port1.parent, port1, port2))
if not len(possible_test_fixtures):
return False, None # no forwarding is possible between the two components
else:
return True, TestFixture.any_of(*possible_test_fixtures)
def check_connections(self, components: list, mode: str = 'indirect'):
tfs = []
for component1, component2 in components:
is_connected, tf = self.check_connection(component1, component2, mode)
if not is_connected:
return False, None
tfs.append(tf)
return True, TestFixture.all_of(*tfs)
def get_components(self, t: type):
"""Returns a tuple (name, component) for all components that are of the specified type"""
return (comp for comp in self.names_by_component.keys() if isinstance(comp, t))
def get_component_name(self, component: Component):
#if isinstance(component, ProxiedComponent):
# return self.names_by_component[component.impl]
#else:
return self.names_by_component[component]
def get_directly_connected_components(self, component: Union[str, Component]):
"""
Returns all components that are directly connected to the specified
component, excluding the specified component itself.
"""
if isinstance(component, str):
component = self.components_by_name[component]
result = self.net_by_component.get(component, set([component]))
return [c for c in result if (c != component)]
def get_connected_components(self, src: Union[dict, Tuple[Union[Component, str], bool]], comp_type: type = None):
"""
Returns all components that are either directly or indirectly (through a
Teensy) connected to the specified component(s).
component: Either:
- A component object.
- A component name given as string.
- A tuple of the form (comp, dir) where comp is a component object
or name and dir specifies the data direction.
The direction is required if routing through a Teensy should be
considered.
- A dict {sumcomponent: val} where subcomponent is a string
such as 'tx' or 'rx' and val is of one of the forms described above.
A type can be specified to filter the connected components.
"""
candidates = self.get_components(comp_type)
if isinstance(src, dict):
for candidate in candidates:
subcomponents = dict(candidate.get_subcomponents())
if len(set(src.keys()) - set(subcomponents.keys())):
continue # candidate doesn't have all of the requested ports
test_fixtures = []
for name, subsrc in src.items():
srcport, direction = subsrc if isinstance(subsrc, tuple) else (subsrc, False)
is_connected, test_fixture = (self.check_connection(srcport, subcomponents[name]) if direction else
self.check_connection(subcomponents[name], srcport))
if not is_connected:
break
test_fixtures.append(test_fixture)
if len(test_fixtures) < len(src.items()):
continue # not all of the ports are connected to the candidate's ports
yield candidate, TestFixture.all_of(*test_fixtures)
else:
src, direction = src if isinstance(src, tuple) else (src, False)
assert(isinstance(src, Component))
for candidate in candidates:
is_connected, test_fixture = (self.check_connection(src, candidate) if direction else
self.check_connection(candidate, src))
if is_connected:
yield candidate, test_fixture
# Helper functions ------------------------------------------------------------#
def request_state(axis_ctx: ODriveAxisComponent, state, expect_success=True):
axis_ctx.handle.requested_state = state
time.sleep(0.001)
if expect_success:
test_assert_eq(axis_ctx.handle.current_state, state)
else:
test_assert_eq(axis_ctx.handle.current_state, AXIS_STATE_IDLE)
test_assert_eq(axis_ctx.handle.error, AXIS_ERROR_INVALID_STATE)
axis_ctx.handle.error = AXIS_ERROR_NONE # reset error
def test_assert_no_error(axis_ctx: ODriveAxisComponent):
any_error = (axis_ctx.handle.motor.error |
axis_ctx.handle.encoder.error |
#axis_ctx.handle.sensorless_estimator.error | # TODO: reenable
axis_ctx.handle.error) != 0 # TODO: this is not the complete list of components
if any_error:
lines = []
odrive.utils.dump_errors(axis_ctx.parent.handle, printfunc = lines.append)
raise TestFailed("\n".join(lines))
def run_shell(command_line, logger, env=None, timeout=None):
"""
Runs a shell command in the current directory
"""
import shlex
import subprocess
logger.debug("invoke: " + str(command_line))
if isinstance(command_line, list):
cmd = command_line
else:
cmd = shlex.split(command_line)
result = subprocess.run(cmd, timeout=timeout,
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
env=env)
if result.returncode != 0:
logger.error(result.stdout.decode(sys.stdout.encoding))
raise TestFailed("command {} failed".format(command_line))
def render_html_summary(status, test_results, output_file):
import jinja2
with open(os.path.join(os.path.dirname(__file__), "results.html.j2")) as fp:
env = jinja2.Environment()
env.filters['passes'] = lambda x: [res for res in x if res == True]
env.filters['fails'] = lambda x: [res for res in x if res != True]
template = env.from_string(fp.read())
html = template.render(
status=status,
date=datetime.datetime.utcnow(),
test_results=test_results
)
with open(output_file, 'w') as fp:
fp.write(html)
def is_feasible(params, test_fixtures):
"""
Checks if the specified parameter and test fixture combination is feasible.
A combination is feasible if none of the test fixture resources appear in
the parameters and if all of the exclusive-use test fixture resources are
only used by one test fixture.
"""
exclusive_tf_resources = []
shared_tf_resources = set()
for r, ex in [(r, ex) for tf in test_fixtures if not tf is None for r, ex in tf.get_resources() if not r is None]:
if ex:
exclusive_tf_resources.append(r)
else:
shared_tf_resources.add(r)
if len(exclusive_tf_resources + list(shared_tf_resources)) > len(set(exclusive_tf_resources).union(shared_tf_resources)):
return False # At least one exclusive-use resource is used twice in the test fixtures
if len(set(exclusive_tf_resources).union(shared_tf_resources).intersection(params)):
return False # At least one test fixture resource appears in the params too
if len(shared_tf_resources.intersection(params)):
return False # At least one test fixture resource appears in the params too
return True
def run(tests):
test_results = []
if not isinstance(tests, list):
tests = [tests]
for test in tests:
# The result of get_test_cases must be a generator or a list of which
# each item is one of the following:
# - A tuple, each element of which is to be passed as an argument to
# the run_test() function, except the last argument. The last
# argument must be a TestFixture object.
# - An AnyTestCase object. In this case, each of the alternatives in
# the test case must follow the tuple form described above.
#
# All of the provided test-cases are executed. If none is provided,
# a warning is reported. A warning is also reported if for a particular
# test case no alternative is feasible (e.g. because there are component
# conflicts).
logger.debug("loading...")
test_cases = list(test.get_test_cases(testrig))
test_name = type(test).__name__
test_case_results = []
test_results.append((test_name, test_case_results))
if len(test_cases) == 0:
logger.warn(f'no test cases are available to conduct the test {test_name}')
continue
for test_case in test_cases:
# Flatten all test case options and test fixture options into a list of candidates
candidates = []
for candidate in test_case.alternatives if isinstance(test_case, AnyTestCase) else [test_case]:
test_fixture = candidate[-1]
assert(isinstance(test_fixture, TestFixture) or test_fixture is None)
if isinstance(test_fixture, CompositeTestFixture):
candidates += [tuple(candidate[:-1]) + (tf,) for tf in test_fixture._subfixtures]
else:
candidates.append(tuple(candidate[:-1]) + (([] if test_fixture is None else [test_fixture]),))
# Select the first candidate that is feasible
params, test_fixture = (None, None)
for candidate in candidates:
if is_feasible(candidate[:-1], candidate[-1]):
params, test_fixture = (candidate[:-1], candidate[-1])
break
if params is None:
logger.warn(f'I found a {type(test).__name__} test case with {len(candidates)} possible parameter combination candidates but none of them is feasible.')
continue
logger.notify('* preparing {} with {}...'.format(type(test).__name__,
[(testrig.get_component_name(p) if isinstance(p, Component) else str(p)) for p in params]))
# Prepare all components
# TODO: less hardcoded priority assignment
teensies = set()
for param in params:
#if isinstance(param, ProxiedComponent):
# continue
if hasattr(param, 'prepare'):
param.prepare(logger)
teensies = set()
for tf in test_fixture:
if isinstance(tf, ClosedLoopControlFixture):
continue
tf.prepare(logger)
if isinstance(tf, TeensyForwardingFixture):
teensies.add(tf.teensy)
# Post-prepare step required if teensy-forwarding is involved
for teensy in teensies:
teensy.commit_routing_config(logger)
for tf in test_fixture:
if not isinstance(tf, ClosedLoopControlFixture):
continue
tf.prepare(logger)
logger.notify('* running {} on {}...'.format(type(test).__name__,
[(testrig.get_component_name(p) if isinstance(p, Component) else str(p)) for p in params]))
try:
test.run_test(*params, logger)
test_case_results.append(True)
except Exception as ex:
traceback.print_exc()
test_case_results.append(ex)
if args.html:
render_html_summary('running...', test_results, args.html)
if args.html:
render_html_summary('finished', test_results, args.html)
passes = [res for t in test_results for res in t[1] if res == True]
fails = [res for t in test_results for res in t[1] if res != True]
if len(fails):
logger.error(f'{len(fails)} out of {len(fails) + len(passes)} test cases failed.')
else:
logger.success('All tests passed!')
shutdown_token.set()
return test_results
# Load test engine ------------------------------------------------------------#
# Parse arguments
parser = argparse.ArgumentParser(description='ODrive automated test tool\n')
parser.add_argument("--ignore", metavar='DEVICE', action='store', nargs='+',
help="Ignore (disable) one or more components of the test rig")
# TODO: implement
parser.add_argument("--test-rig-yaml", type=argparse.FileType('r'),
help="Test rig YAML file. Can be omitted if the environment variable ODRIVE_TEST_RIG_NAME is set.")
parser.add_argument("--setup-host", action='store_true', default=False,
help="configure operating system functions such as GPIOs (requires root)")
parser.add_argument("--all", action='store_true', default=False,
help="Run all tests in the test runner's directory")
parser.add_argument("--html", type=str,
help="If provided, the an HTML summary is written to the specified file.")
parser.set_defaults(ignore=[])
args = parser.parse_args()
if args.test_rig_yaml is None:
test_rig_name = os.environ.get('ODRIVE_TEST_RIG_NAME', '')
if test_rig_name == '':
print("You must either provide a --test-rig-yaml argument or set the environment variable ODRIVE_TEST_RIG_NAME.")
sys.exit(1)
path = os.path.join(os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(__file__)))), test_rig_name + '.yaml')
args.test_rig_yaml = open(path, 'r')
# Load objects
test_rig_yaml = yaml.load(args.test_rig_yaml, Loader=yaml.BaseLoader)
logger = Logger()
testrig = TestRig(test_rig_yaml, logger)
shutdown_token = fibre.Event()
if args.setup_host:
for gpio in testrig.get_components(LinuxGpioComponent):
num = gpio.num
logger.debug('exporting GPIO ' + str(num) + ' to user space...')
if not os.path.isdir("/sys/class/gpio/gpio{}".format(num)):
with open("/sys/class/gpio/export", "w") as fp:
fp.write(str(num))
os.chmod("/sys/class/gpio/gpio{}/value".format(num), stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO)
os.chmod("/sys/class/gpio/gpio{}/direction".format(num), stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO)
for port in testrig.get_components(SerialPortComponent):
logger.debug('changing permissions on ' + port.yaml['port'] + '...')
os.chmod(port.yaml['port'], stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO)
if len(list(testrig.get_components(TeensyComponent))):
# This breaks the annoying teensy loader that shows up on every compile
logger.debug('modifying teensyduino installation...')
if not os.path.isfile('/usr/share/arduino/hardware/tools/teensy_post_compile_old'):
os.rename('/usr/share/arduino/hardware/tools/teensy_post_compile', '/usr/share/arduino/hardware/tools/teensy_post_compile_old')
with open('/usr/share/arduino/hardware/tools/teensy_post_compile', 'w') as scr:
scr.write('#!/usr/bin/env bash\n')
scr.write('if [ "$ARDUINO_COMPILE_DESTINATION" != "" ]; then\n')
scr.write(' cp -r ${2#-path=}/*.ino.hex ${ARDUINO_COMPILE_DESTINATION}\n')
scr.write('fi\n')
os.chmod('/usr/share/arduino/hardware/tools/teensy_post_compile', stat.S_IRWXU | stat.S_IRGRP | stat.S_IXGRP | stat.S_IROTH | stat.S_IXOTH)
# Bring up CAN interface(s)
for intf in testrig.get_components(CanInterfaceComponent):
name = intf.yaml['interface']
path = intf.yaml.get('path', None)
logger.debug('bringing up {}...'.format(name))
run_shell('ip link set dev {} down'.format(name), logger)
if not path is None:
run_shell(f'slcand -o -c -s5 \'{path}\' {name}', logger)
else:
run_shell('ip link set dev {} type can bitrate 250000'.format(name), logger)
run_shell('ip link set dev {} type can loopback off'.format(name), logger)
run_shell('ip link set dev {} up'.format(name), logger)
if __name__ == '__main__':
test_scripts = []
tests = []
if args.all:
test_scripts = [file for file in os.listdir(os.path.dirname(__file__))
if file.lower().endswith("_test.py")]
for script in test_scripts:
import importlib.util
spec = importlib.util.spec_from_file_location("test_module", script)
test_module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(test_module)
if not hasattr(test_module, 'tests') or not isinstance(test_module.tests, list):
logger.error(f"{script} does not have a list named `tests`")
tests += test_module.tests
logger.notify(f"found {len(tests)} tests in {len(test_scripts)} modules")
if any(tests):
test_results = test_module.test_runner.run(tests)
|
|
# Copyright (c) 2016 Santosh Philip
# =======================================================================
# Distributed under the MIT License.
# (See accompanying file LICENSE or copy at
# http://opensource.org/licenses/MIT)
# =======================================================================
"""py.test for idd_index"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from eppy.EPlusInterfaceFunctions import iddindex
commdct = [[{u'format': [u'singleLine'],
u'group': u'Simulation Parameters',
u'idfobj': u'Version',
u'memo': [u'Specifies the EnergyPlus version of the IDF file.'],
u'unique-object': [u'']},
{u'default': [u'7.0'],
u'field': [u'Version Identifier'],
u'required-field': [u'']}],
[{u'group': u'Simulation Parameters',
u'idfobj': u'Building',
u'memo': [u'Describes parameters that are used during the simulation',
u'of the building. There are necessary correlations between the entries for',
u'this object and some entries in the Site:WeatherStation and',
u'Site:HeightVariation objects, specifically the Terrain field.'],
u'min-fields': [u'8'],
u'required-object': [u''],
u'unique-object': [u'']},
{u'default': [u'NONE'],
u'field': [u'Name'],
u'required-field': [u''],
u'retaincase': [u'']},
{u'default': [u'0.0'],
u'field': [u'North Axis'],
u'note': [u'degrees from true North'],
u'type': [u'real'],
u'units': [u'deg']}],
[{u'format': [u'vertices'],
u'group': u'Thermal Zones and Surfaces',
u'idfobj': u'Zone',
u'memo': [u'Defines a thermal zone of the building.']},
{u'field': [u'Name'],
u'reference': [u'ZoneNames',
u'OutFaceEnvNames',
u'ZoneAndZoneListNames',
u'AirflowNetworkNodeAndZoneNames'],
u'required-field': [u''],
u'type': [u'alpha']},
{u'default': [u'0'],
u'field': [u'Direction of Relative North'],
u'type': [u'real'],
u'units': [u'deg']},
],
[{u'extensible:3': [u'-- duplicate last set of x,y,z coordinates (last 3 fields), remembering to remove ; from "inner" fields.'],
u'format': [u'vertices'],
u'group': u'Thermal Zones and Surfaces',
u'idfobj': u'BuildingSurface:Detailed',
u'memo': [u'Allows for detailed entry of building heat transfer surfaces. Does not include subsurfaces such as windows or doors.'],
u'min-fields': [u'19']},
{u'field': [u'Name'],
u'reference': [u'SurfaceNames',
u'SurfAndSubSurfNames',
u'AllHeatTranSurfNames',
u'HeatTranBaseSurfNames',
u'OutFaceEnvNames',
u'AllHeatTranAngFacNames',
u'RadGroupAndSurfNames',
u'SurfGroupAndHTSurfNames',
u'AllShadingAndHTSurfNames'],
u'required-field': [u''],
u'type': [u'alpha']},
{u'field': [u'Surface Type'],
u'key': [u'Floor', u'Wall', u'Ceiling', u'Roof'],
u'required-field': [u''],
u'type': [u'choice']},
],
[{u'format': [u'vertices'],
u'group': u'Thermal Zones and Surfaces',
u'idfobj': u'FenestrationSurface:Detailed',
u'memo': [u'Allows for detailed entry of subsurfaces',
u'(windows, doors, glass doors, tubular daylighting devices).'],
u'min-fields': [u'19']},
{u'field': [u'Name'],
u'reference': [u'SubSurfNames',
u'SurfAndSubSurfNames',
u'AllHeatTranSurfNames',
u'OutFaceEnvNames',
u'AllHeatTranAngFacNames',
u'RadGroupAndSurfNames',
u'SurfGroupAndHTSurfNames',
u'AllShadingAndHTSurfNames'],
u'required-field': [u''],
u'type': [u'alpha']},
{u'field': [u'Surface Type'],
u'key': [u'Window',
u'Door',
u'GlassDoor',
u'TubularDaylightDome',
u'TubularDaylightDiffuser'],
u'required-field': [u''],
u'type': [u'choice']},
],
[{u'group': u'Thermal Zones and Surfaces',
u'idfobj': u'Wall:Exterior',
u'memo': [u'Allows for simplified entry of exterior walls.',
u'View Factor to Ground is automatically calculated.']},
{u'field': [u'Name'],
u'reference': [u'SurfaceNames',
u'SurfAndSubSurfNames',
u'AllHeatTranSurfNames',
u'HeatTranBaseSurfNames',
u'AllHeatTranAngFacNames',
u'RadGroupAndSurfNames',
u'SurfGroupAndHTSurfNames',
u'AllShadingAndHTSurfNames'],
u'required-field': [u''],
u'type': [u'alpha']},
{u'field': [u'Construction Name'],
u'note': [u'To be matched with a construction in this input file'],
u'object-list': [u'ConstructionNames'],
u'required-field': [u''],
u'type': [u'object-list']}],
[{u'group': u'Thermal Zones and Surfaces',
u'idfobj': u'Window',
u'memo': [u'Allows for simplified entry of Windows.']},
{u'field': [u'Name'],
u'reference': [u'SubSurfNames',
u'SurfAndSubSurfNames',
u'AllHeatTranSurfNames',
u'OutFaceEnvNames',
u'AllHeatTranAngFacNames',
u'RadGroupAndSurfNames',
u'SurfGroupAndHTSurfNames',
u'AllShadingAndHTSurfNames'],
u'required-field': [u''],
u'type': [u'alpha']},
{u'field': [u'Construction Name'],
u'note': [u'To be matched with a construction in this input file'],
u'object-list': [u'ConstructionNames'],
u'required-field': [u''],
u'type': [u'object-list']}]
]
def test_makename2refdct():
"""py.test for makename2refdct"""
# do a simple test
thedata = (
(
{'ZONE':['Z1', 'Z2'], },
[
[{'idfobj':'zone'}, {'field':['Name'], 'reference':['Z1', 'Z2']}],
]
), # expected, simpledct
(
{'ZONE':['Z1', 'Z2'], 'WALL':['W1', 'W2']},
[
[{'idfobj':'zone'}, {'field':['Name'], 'reference':['Z1', 'Z2']}],
[{'idfobj':'wall'}, {'field':['Name'], 'reference':['W1', 'W2']}],
]
), # expected, simpledct
(
{'ZONE':['Z1', 'Z2'], 'WALL':['W1', 'W2']},
[
[{'idfobj':'zone'}, {'field':['Name'], 'reference':['Z1', 'Z2']}],
[{'idfobj':'wall'}, {'field':['Name'], 'reference':['W1', 'W2']}],
[], # put in random stuff
]
), # expected, simpledct
(
{'WALL':['W1', 'W2']},
[
[{'idfobj':'zone'}, {'field':['notName'], 'reference':['Z1', 'Z2']}],
[{'idfobj':'wall'}, {'field':['Name'], 'reference':['W1', 'W2']}],
[], # put in random stuff
]
), # expected, simpledct
(
{},
[
[{'idfobj':'zone'}, {'field':['notName'], 'reference':['Z1', 'Z2']}],
[{'idfobj':'wall'}, {'field':['Name'], 'noreference':['W1', 'W2']}],
[], # put in random stuff
]
), # expected, simpledct
)
for expected, simpledct in thedata:
result = iddindex.makename2refdct(simpledct)
assert result == expected
# the test with real data
expected = {
'ZONE':[u'ZoneNames',
u'OutFaceEnvNames',
u'ZoneAndZoneListNames',
u'AirflowNetworkNodeAndZoneNames'],
'WINDOW':[u'SubSurfNames',
u'SurfAndSubSurfNames',
u'AllHeatTranSurfNames',
u'OutFaceEnvNames',
u'AllHeatTranAngFacNames',
u'RadGroupAndSurfNames',
u'SurfGroupAndHTSurfNames',
u'AllShadingAndHTSurfNames'],
'WALL:EXTERIOR':[u'SurfaceNames',
u'SurfAndSubSurfNames',
u'AllHeatTranSurfNames',
u'HeatTranBaseSurfNames',
u'AllHeatTranAngFacNames',
u'RadGroupAndSurfNames',
u'SurfGroupAndHTSurfNames',
u'AllShadingAndHTSurfNames'],
'FENESTRATIONSURFACE:DETAILED':[u'SubSurfNames',
u'SurfAndSubSurfNames',
u'AllHeatTranSurfNames',
u'OutFaceEnvNames',
u'AllHeatTranAngFacNames',
u'RadGroupAndSurfNames',
u'SurfGroupAndHTSurfNames',
u'AllShadingAndHTSurfNames'],
'BUILDINGSURFACE:DETAILED':[u'SurfaceNames',
u'SurfAndSubSurfNames',
u'AllHeatTranSurfNames',
u'HeatTranBaseSurfNames',
u'OutFaceEnvNames',
u'AllHeatTranAngFacNames',
u'RadGroupAndSurfNames',
u'SurfGroupAndHTSurfNames',
u'AllShadingAndHTSurfNames'],
}
result = iddindex.makename2refdct(commdct)
assert result == expected
def test_makeref2namesdct():
"""pytest for makeref2namesdct"""
thedata = (
(
{
'wall':['surface', 'surfandsubsurf'],
'roof':['surface', 'surfandsubsurf'],
'window':['surfandsubsurf', 'subsurf'],
'skylight':['surfandsubsurf', 'subsurf'],
'zone':['zname',]
},
{
'surface':set(['wall', 'roof']),
'subsurf':set(['window', 'skylight']),
'surfandsubsurf':set(['wall', 'roof', 'window', 'skylight']),
'zname':set(['zone']),
}
), # name2refdct, expected
(
{
'ZONE':[u'ZoneNames',
u'OutFaceEnvNames',
u'ZoneAndZoneListNames',
u'AirflowNetworkNodeAndZoneNames'],
'WINDOW':[u'SubSurfNames',
u'SurfAndSubSurfNames',
u'AllHeatTranSurfNames',
u'OutFaceEnvNames',
u'AllHeatTranAngFacNames',
u'RadGroupAndSurfNames',
u'SurfGroupAndHTSurfNames',
u'AllShadingAndHTSurfNames'],
'WALL:EXTERIOR':[u'SurfaceNames',
u'SurfAndSubSurfNames',
u'AllHeatTranSurfNames',
u'HeatTranBaseSurfNames',
u'AllHeatTranAngFacNames',
u'RadGroupAndSurfNames',
u'SurfGroupAndHTSurfNames',
u'AllShadingAndHTSurfNames'],
'FENESTRATIONSURFACE:DETAILED':[u'SubSurfNames',
u'SurfAndSubSurfNames',
u'AllHeatTranSurfNames',
u'OutFaceEnvNames',
u'AllHeatTranAngFacNames',
u'RadGroupAndSurfNames',
u'SurfGroupAndHTSurfNames',
u'AllShadingAndHTSurfNames'],
'BUILDINGSURFACE:DETAILED':[u'SurfaceNames',
u'SurfAndSubSurfNames',
u'AllHeatTranSurfNames',
u'HeatTranBaseSurfNames',
u'OutFaceEnvNames',
u'AllHeatTranAngFacNames',
u'RadGroupAndSurfNames',
u'SurfGroupAndHTSurfNames',
u'AllShadingAndHTSurfNames'],
},
{u'AirflowNetworkNodeAndZoneNames': {'ZONE'},
u'AllHeatTranAngFacNames': {'BUILDINGSURFACE:DETAILED',
'FENESTRATIONSURFACE:DETAILED',
'WALL:EXTERIOR',
'WINDOW'},
u'AllHeatTranSurfNames': {'BUILDINGSURFACE:DETAILED',
'FENESTRATIONSURFACE:DETAILED',
'WALL:EXTERIOR',
'WINDOW'},
u'AllShadingAndHTSurfNames': {'BUILDINGSURFACE:DETAILED',
'FENESTRATIONSURFACE:DETAILED',
'WALL:EXTERIOR',
'WINDOW'},
u'HeatTranBaseSurfNames': {'BUILDINGSURFACE:DETAILED', 'WALL:EXTERIOR'},
u'OutFaceEnvNames': {'BUILDINGSURFACE:DETAILED',
'FENESTRATIONSURFACE:DETAILED',
'WINDOW',
'ZONE'},
u'RadGroupAndSurfNames': {'BUILDINGSURFACE:DETAILED',
'FENESTRATIONSURFACE:DETAILED',
'WALL:EXTERIOR',
'WINDOW'},
u'SubSurfNames': {'FENESTRATIONSURFACE:DETAILED', 'WINDOW'},
u'SurfAndSubSurfNames': {'BUILDINGSURFACE:DETAILED',
'FENESTRATIONSURFACE:DETAILED',
'WALL:EXTERIOR',
'WINDOW'},
u'SurfGroupAndHTSurfNames': {'BUILDINGSURFACE:DETAILED',
'FENESTRATIONSURFACE:DETAILED',
'WALL:EXTERIOR',
'WINDOW'},
u'SurfaceNames': {'BUILDINGSURFACE:DETAILED', 'WALL:EXTERIOR'},
u'ZoneAndZoneListNames': {'ZONE'},
u'ZoneNames': {'ZONE'}}
), # name2refdct, expected
)
for name2refdct, expected in thedata:
result = iddindex.makeref2namesdct(name2refdct)
assert result == expected
def test_ref2names2commdct():
"""py.test for ref2names2commdct"""
thedata = (
(
# ------------
[
[
{'idfobj':'referedto1'},
{
'field':['Name'],
'reference':['rname11', 'rname12', 'rname_both'],
},
],
[
{'idfobj':'referedto2'},
{
'field':['Name'],
'reference':['rname21', 'rname22', 'rname_both'],
},
],
[
{'idfobj':'referingobj1'},
{'field':['Name']},
{
'field':['referingfield'],
'type':['object-list'],
'object-list':['rname11'],
}
],
[
{'idfobj':'referingobj2'},
{'field':['Name']},
{
'field':['referingfield'],
'type':['object-list'],
'object-list':['rname_both'],
}
],
],
# ------------
[
[
{'idfobj':'referedto1'},
{
'field':['Name'],
'reference':['rname11', 'rname12', 'rname_both'],
},
],
[
{'idfobj':'referedto2'},
{
'field':['Name'],
'reference':['rname21', 'rname22', 'rname_both'],
},
],
[
{'idfobj':'referingobj1'},
{'field':['Name']},
{
'field':['referingfield'],
'type':['object-list'],
'object-list':['rname11'],
'validobjects':set(['referedto1'.upper()]),
}
],
[
{'idfobj':'referingobj2'},
{'field':['Name']},
{
'field':['referingfield'],
'type':['object-list'],
'object-list':['rname_both'],
'validobjects':set(['REFEREDTO1', 'REFEREDTO2'])
}
],
],
), # commdct, expected
)
for commdct, expected in thedata:
name2refdct = iddindex.makename2refdct(commdct)
ref2names = iddindex.makeref2namesdct(name2refdct)
result = iddindex.ref2names2commdct(ref2names, commdct)
for r_item, e_item in zip(result, expected):
assert r_item == e_item
# the test below is ensure that the embedded data is not a copy,
# but is pointing to the set in ref2names
for item in r_item:
try:
reference = item['object-list'][0]
validobjects = item['validobjects']
assert id(ref2names[reference]) == id(validobjects)
except KeyError as e:
continue
|
|
'''
.. currentmodule:: skrf.util
========================================
util (:mod:`skrf.util`)
========================================
Holds utility functions that are general conveniences.
General
------------
.. autosummary::
:toctree: generated/
now_string
find_nearest
find_nearest_index
get_fid
get_extn
'''
from . import mathFunctions as mf
import matplotlib as mpl
import warnings
import os, fnmatch
try:
import cPickle as pickle
except ImportError:
import pickle as pickle
import pylab as plb
import numpy as npy
from scipy.constants import mil
from datetime import datetime
import collections, pprint
from subprocess import Popen,PIPE
# globals
# other
def now_string():
'''
returns a unique sortable string, representing the current time
nice for generating date-time stamps to be used in file-names,
the companion function :func:`now_string_2_dt` can be used
to read these string back into datetime objects.
See Also
------------
now_string_2_dt
'''
return datetime.now().__str__().replace('-','.').replace(':','.').replace(' ','.')
def now_string_2_dt(s):
'''
Converts the output of :func:`now_string` to a datetime object.
See Also
-----------
now_string
'''
return datetime(*[int(k) for k in s.split('.')])
def find_nearest(array,value):
'''
find nearest value in array.
taken from http://stackoverflow.com/questions/2566412/find-nearest-value-in-numpy-array
Parameters
----------
array : numpy.ndarray
array we are searching for a value in
value : element of the array
value to search for
Returns
--------
found_value : an element of the array
the value that is numerically closest to `value`
'''
idx=(npy.abs(array-value)).argmin()
return array[idx]
def find_nearest_index(array,value):
'''
find nearest value in array.
Parameters
----------
array : numpy.ndarray
array we are searching for a value in
value : element of the array
value to search for
Returns
--------
found_index : int
the index at which the numerically closest element to `value`
was found at
taken from http://stackoverflow.com/questions/2566412/find-nearest-value-in-numpy-array
'''
return (npy.abs(array-value)).argmin()
def slice_domain(x,domain):
'''
Returns a slice object closest to the `domain` of `x`
domain = x[slice_domain(x, (start, stop))]
Parameters
-----------
vector : array-like
an array of values
domain : tuple
tuple of (start,stop) values defining the domain over
which to slice
Examples
-----------
>>> x = linspace(0,10,101)
>>> idx = slice_domain(x, (2,6))
>>> x[idx]
'''
start = find_nearest_index(x, domain[0])
stop = find_nearest_index(x, domain[1])
return slice(start,stop+1)
# file IO
def get_fid(file, *args, **kwargs):
'''
Returns a file object, given a filename or file object
Useful when you want to allow the arguments of a function to
be either files or filenames
Parameters
-------------
file : str or file-object
file to open
\*args, \*\*kwargs : arguments and keyword arguments
passed through to pickle.load
'''
if isinstance(file, str):
return open(file, *args, **kwargs)
else:
return file
def get_extn(filename):
'''
Get the extension from a filename.
The extension is defined as everything passed the last '.'.
Returns None if it ain't got one
Parameters
------------
filename : string
the filename
Returns
--------
ext : string, None
either the extension (not including '.') or None if there
isn't one
'''
ext = os.path.splitext(filename)[-1]
if len(ext)==0:
return None
else:
return ext[1:]
def basename_noext(filename):
'''
gets the basename and strips extension
'''
return os.path.splitext(os.path.basename(filename))[0]
# git
def git_version( modname):
'''
Returns output 'git describe', executed in a module's root directory.
'''
mod = __import__(modname)
mod_dir =os.path.split(mod.__file__)[0]
p = Popen(['git', 'describe'], stdout = PIPE,stderr=PIPE, cwd =mod_dir )
try:
out,er = p.communicate()
except(OSError):
return None
out = out.strip('\n')
if out == '':
return None
return out
def stylely(rc_dict={}, style_file = 'skrf.mplstyle'):
'''
loads the rc-params from the specified file (file must be located in skrf/data)
'''
from skrf.data import pwd # delayed to solve circular import
rc = mpl.rc_params_from_file(os.path.join(pwd, style_file))
mpl.rcParams.update(rc)
mpl.rcParams.update(rc_dict)
def dict_2_recarray(d, delim, dtype):
'''
Turns a dictionary of structured keys to a record array of objects
This is useful if you save data-base like meta-data in the form
or file-naming conventions, aka 'the poor-mans database'
Examples
-------------
given a directory of networks like:
>>> ls
a1,0.0,0.0.s1p a1,3.0,3.0.s1p a2,3.0,-3.0.s1p b1,-3.0,3.0.s1p
...
you can sort based on the values or each field, after defining their
type with `dtype`. The `values` field accesses the objects.
>>>d =rf.ran('/tmp/' )
>>>delim =','
>>>dtype = [('name', object),('voltage',float),('current',float)]
>>>ra = dict_2_recarray(d=rf.ran(dir), delim=delim, dtype =dtype)
then you can sift like you do with numpy arrays
>>>ra[ra['voltage']<3]['values']
array([1-Port Network: 'a2,0.0,-3.0', 450-800 GHz, 101 pts, z0=[ 50.+0.j],
1-Port Network: 'b1,0.0,3.0', 450-800 GHz, 101 pts, z0=[ 50.+0.j],
1-Port Network: 'a1,0.0,-3.0', 450-800 GHz, 101 pts, z0=[ 50.+0.j],
'''
split_keys = [tuple(k.split(delim)+[d[k]]) for k in d.keys()]
x = npy.array(split_keys, dtype=dtype+[('values',object)])
return x
def findReplace(directory, find, replace, filePattern):
'''
Find/replace some txt in all files in a directory, recursively
This was found in [1]_.
Examples
-----------
findReplace("some_dir", "find this", "replace with this", "*.txt")
.. [1] http://stackoverflow.com/questions/4205854/python-way-to-recursively-find-and-replace-string-in-text-files
'''
for path, dirs, files in os.walk(os.path.abspath(directory)):
for filename in fnmatch.filter(files, filePattern):
filepath = os.path.join(path, filename)
with open(filepath) as f:
s = f.read()
s = s.replace(find, replace)
with open(filepath, "w") as f:
f.write(s)
# general purpose objects
class HomoList(collections.Sequence):
'''
A Homogeneous Sequence
Provides a class for a list-like object which contains
homogeneous values. Attributes of the values can be accessed through
the attributes of HomoList. Searching is done like numpy arrays.
Initialized from a list of all the same type
>>> h = HomoDict([Foo(...), Foo(...)])
The individual values of `h` can be access in identical fashion to
Lists.
>>> h[0]
Assuming that `Foo` has property `prop` and function `func` ...
Access elements' properties:
>>> h.prop
Access elements' functions:
>>> h.func()
Searching:
>>> h[h.prop == value]
>>> h[h.prop < value]
Multiple search:
>>> h[set(h.prop==value1) & set( h.prop2==value2)]
Combos:
>>> h[h.prop==value].func()
'''
def __init__(self, list_):
self.store = list(list_)
def __eq__(self, value):
return [k for k in range(len(self)) if self.store[k] == value ]
def __ne__(self, value):
return [k for k in range(len(self)) if self.store[k] != value ]
def __gt__(self, value):
return [k for k in range(len(self)) if self.store[k] > value ]
def __ge__(self, value):
return [k for k in range(len(self)) if self.store[k] >= value ]
def __lt__(self, value):
return [k for k in range(len(self)) if self.store[k] < value ]
def __le__(self, value):
return [k for k in range(len(self)) if self.store[k] <= value ]
def __getattr__(self, name):
return self.__class__(
[k.__getattribute__(name) for k in self.store])
def __getitem__(self, idx):
try:
return self.store[idx]
except(TypeError):
return self.__class__([self.store[k] for k in idx])
def __call__(self, *args, **kwargs):
return self.__class__(
[k(*args,**kwargs) for k in self.store])
def __setitem__(self, idx, value):
self.store[idx] = value
def __delitem__(self, idx):
del self.store[idx]
def __iter__(self):
return iter(self.store)
def __len__(self):
return len(self.store)
def __str__(self):
return pprint.pformat(self.store)
def __repr__(self):
return pprint.pformat(self.store)
class HomoDict(collections.MutableMapping):
'''
A Homogeneous Mutable Mapping
Provides a class for a dictionary-like object which contains
homogeneous values. Attributes of the values can be accessed through
the attributes of HomoDict. Searching is done like numpy arrays.
Initialized from a dictionary containing values of all the same type
>>> h = HomoDict({'a':Foo(...),'b': Foo(...), 'c':Foo(..)})
The individual values of `h` can be access in identical fashion to
Dictionaries.
>>> h['key']
Assuming that `Foo` has property `prop` and function `func` ...
Access elements' properties:
>>> h.prop
Access elements' functions:
>>> h.func()
Searching:
>>> h[h.prop == value]
>>> h[h.prop < value]
Multiple search:
>>> h[set(h.prop==value1) & set( h.prop2==value2)]
Combos:
>>> h[h.prop==value].func()
'''
def __init__(self, dict_):
self.store = dict(dict_)
def __eq__(self, value):
return [k for k in self.store if self.store[k] == value ]
def __ne__(self, value):
return [k for k in self.store if self.store[k] != value ]
def __gt__(self, value):
return [k for k in self.store if self.store[k] > value ]
def __ge__(self, value):
return [k for k in self.store if self.store[k] >= value ]
def __lt__(self, value):
return [k for k in self.store if self.store[k] < value ]
def __le__(self, value):
return [k for k in self.store if self.store[k] <= value ]
def __getattr__(self, name):
return self.__class__(
{k: getattr(self.store[k],name) for k in self.store})
def __getitem__(self, key):
if isinstance(key, str):
return self.store[key]
else:
c = self.__class__({k:self.store[k] for k in key})
return c
#if len(c) == 1:
# return c.store.values()[0]
#else:
# return c
def __call__(self, *args, **kwargs):
return self.__class__(
{k: self.store[k](*args, **kwargs) for k in self.store})
def __setitem__(self, key, value):
self.store[key] = value
def __delitem__(self, key):
del self.store[key]
def __iter__(self):
return iter(self.store)
def __len__(self):
return len(self.store)
def __str__(self):
return pprint.pformat(self.store)
def __repr__(self):
return pprint.pformat(self.store)
def copy(self):
return HomoDict(self.store)
def filter_nones(self):
self.store = {k:self.store[k] for k in self.store \
if self.store[k] is not None}
def filter(self, **kwargs):
'''
Filter self based on kwargs
This is equivalent to:
>>> h = HomoDict(...)
>>> for k in kwargs:
>>> h = h[k ==kwargs[k]]
>>> return h
prefixing the kwarg value with a '!' causes a not equal test (!=)
Examples
----------
>>> h = HomoDict(...)
>>> h.filter(name='jean', age = '18', gender ='!female')
'''
a = self
for k in kwargs:
if kwargs[k][0] == '!':
a = a[a.__getattr__(k) != kwargs[k][1:]]
else:
a = a[a.__getattr__(k) == kwargs[k]]
return a
|
|
from copy import copy
from inspect import getmembers
from warnings import warn
from makefun import wraps, with_signature
try:
from inspect import signature, Parameter, Signature
except ImportError:
from funcsigs import signature, Parameter, Signature
try:
from typing import Any, Tuple, Callable, Union, TypeVar, Iterable, Dict
try:
from typing import Type
except ImportError:
pass
T = TypeVar('T')
except ImportError:
pass
from decopatch import DECORATED, function_decorator, class_decorator
from autoclass.utils import check_known_decorators, AUTO, read_fields_from_init, DuplicateOverrideError
__GETTER_OVERRIDE_ANNOTATION = '__getter_override__'
__SETTER_OVERRIDE_ANNOTATION = '__setter_override__'
class IllegalGetterSignatureException(Exception):
""" This is raised whenever an overridden getter has an illegal signature"""
class IllegalSetterSignatureException(Exception):
""" This is raised whenever an overridden setter has an illegal signature"""
@class_decorator
def autoprops(include=None, # type: Union[str, Tuple[str]]
exclude=None, # type: Union[str, Tuple[str]]
cls=DECORATED):
"""
A decorator to automatically generate all properties getters and setters from the class constructor.
* if a @contract annotation exist on the __init__ method, mentioning a contract for a given parameter, the
parameter contract will be added on the generated setter method
* The user may override the generated getter and/or setter by creating them explicitly in the class and annotating
them with @getter_override or @setter_override. Note that the contract will still be dynamically added on the
setter, even if the setter already has one (in such case a `UserWarning` will be issued)
:param include: a tuple of explicit attribute names to include (None means all)
:param exclude: a tuple of explicit attribute names to exclude. In such case, include should be None.
:return:
"""
return autoprops_decorate(cls, include=include, exclude=exclude)
def autoprops_decorate(cls, # type: Type[T]
include=None, # type: Union[str, Tuple[str]]
exclude=None # type: Union[str, Tuple[str]]
):
# type: (...) -> Type[T]
"""
To automatically generate all properties getters and setters from the class constructor manually, without using
@autoprops decorator.
* if a @contract annotation exist on the __init__ method, mentioning a contract for a given parameter, the
parameter contract will be added on the generated setter method
* The user may override the generated getter and/or setter by creating them explicitly in the class and annotating
them with @getter_override or @setter_override. Note that the contract will still be dynamically added on the
setter, even if the setter already has one (in such case a `UserWarning` will be issued)
:param cls: the class on which to execute. Note that it won't be wrapped.
:param include: a tuple of explicit attribute names to include (None means all)
:param exclude: a tuple of explicit attribute names to exclude. In such case, include should be None.
:return:
"""
# first check that we do not conflict with other known decorators
check_known_decorators(cls, '@autoprops')
# retrieve and filter the names
init_fun = cls.__init__
selected_names, init_fun_sig = read_fields_from_init(init_fun, include=include, exclude=exclude,
caller="@autoprops")
# perform the class mod
execute_autoprops_on_class(cls, init_fun=init_fun, init_fun_sig=init_fun_sig, prop_names=selected_names)
return cls
def execute_autoprops_on_class(cls, # type: Type[T]
init_fun, # type: Callable
init_fun_sig, # type: Signature
prop_names # type: Iterable[str]
):
"""
This method will automatically add one getter and one setter for each constructor argument, except for those
overridden using autoprops_override_decorate(), @getter_override or @setter_override.
It will add a @contract on top of all setters (generated or overridden, if they don't already have one)
:param cls: the class on which to execute.
:param init_fun:
:param init_fun_sig:
:param prop_names:
:return: nothing (`cls` is modified in-place)
"""
# gather all information required: attribute names, type hints, and potential pycontracts/validators
att_type_hints_and_defaults = {att_name: (init_fun_sig.parameters[att_name].annotation,
init_fun_sig.parameters[att_name].default)
for att_name in prop_names}
pycontracts_dict = init_fun.__contracts__ if hasattr(init_fun, '__contracts__') else {}
valid8ors_dict = init_fun.__validators__ if hasattr(init_fun, '__validators__') else {}
# 1. Retrieve overridden getters/setters and check that there is no one that does not correspond to an attribute
overridden_getters = dict()
overridden_setters = dict()
for m_name, m in getmembers(cls, predicate=callable):
# Overridden getter ?
try:
overriden_getter_att_name = getattr(m, __GETTER_OVERRIDE_ANNOTATION)
except AttributeError:
pass # no annotation
else:
if overriden_getter_att_name not in att_type_hints_and_defaults:
raise AttributeError("Invalid getter function %r: attribute %r was not found in constructor "
"signature." % (m.__name__, overriden_getter_att_name))
elif overriden_getter_att_name in overridden_getters:
raise DuplicateOverrideError("Getter is overridden more than once for attribute name : %s"
% overriden_getter_att_name)
else:
overridden_getters[overriden_getter_att_name] = m
# Overridden setter ?
try:
overriden_setter_att_name = getattr(m, __SETTER_OVERRIDE_ANNOTATION)
except AttributeError:
pass # no annotation
else:
if overriden_setter_att_name not in att_type_hints_and_defaults:
raise AttributeError("Invalid setter function %r: attribute %r was not found in constructor "
"signature." % (m.__name__, overriden_setter_att_name))
elif overriden_setter_att_name in overridden_setters:
raise DuplicateOverrideError("Setter is overridden more than once for attribute name : %s"
% overriden_setter_att_name)
else:
overridden_setters[overriden_setter_att_name] = m
# 2. For each attribute to consider, create the corresponding property and add it to the class
for attr_name, (type_hint, default_value) in att_type_hints_and_defaults.items():
# valid8 validators: create copies, because we will modify them (changing the validated function ref)
if valid8ors_dict is not None and attr_name in valid8ors_dict:
validators = [copy(v) for v in valid8ors_dict[attr_name]]
else:
validators = None
# create and add the property
_add_property(cls, attr_name, type_hint, default_value,
overridden_getter=overridden_getters.get(attr_name, None),
overridden_setter=overridden_setters.get(attr_name, None),
pycontract=pycontracts_dict.get(attr_name, None) if pycontracts_dict is not None else None,
validators=validators)
def _add_property(cls, # type: Type[T]
property_name, # type: str
type_hint, # type: Any
default_value, # type: Any
overridden_getter=None, # type: Callable
overridden_setter=None, # type: Callable
pycontract=None, # type: Any
validators=None # type: Any
):
"""
A method to dynamically add a property to a class with the optional given pycontract or validators.
If the property getter and/or setter have been overridden, it is taken into account too.
:param cls: the class on which to execute.
:param property_name:
:param type_hint:
:param default_value: this is not really needed by property setter/getter but may be used by type checkers to
determine from the signature if something is nonable.
:param pycontract:
:param validators:
:return:
"""
# 1. create the private field name , e.g. '_foobar'
private_property_name = '_%s' % property_name
# 2. property getter (@property) and setter (@property_name.setter) - create or use overridden
getter_fun = _get_getter_fun(cls, property_name, type_hint, private_property_name,
overridden_getter=overridden_getter)
setter_fun, var_name = _get_setter_fun(cls, property_name, type_hint, default_value, private_property_name,
overridden_setter=overridden_setter)
# 3. add the contract to the setter, if any
setter_fun_with_possible_contract = setter_fun
if pycontract is not None:
setter_fun_with_possible_contract = _add_contract_to_setter(setter_fun, var_name, pycontract, property_name)
elif validators is not None:
setter_fun_with_possible_contract = _add_validators_to_setter(setter_fun, var_name, validators, property_name)
# 4. change the function name to make it look nice
# TODO in which case is this really needed ?
setter_fun_with_possible_contract.__name__ = property_name
setter_fun_with_possible_contract.__module__ = cls.__module__
setter_fun_with_possible_contract.__qualname__ = cls.__name__ + '.' + property_name
# __annotations__
# __doc__
# __dict__
# 5. Create the property with getter and setter
# WARNING : property_obj.setter(f) does absolutely nothing :) > we have to assign the result
new_prop = property(fget=getter_fun, fset=setter_fun_with_possible_contract)
# specific for enforce: here we might wrap the overridden setter on which enforce has already written something.
# if hasattr(setter_fun_with_possible_contract, '__enforcer__'):
# new_prop.__enforcer__ = setter_fun_with_possible_contract.__enforcer__
# DESIGN DECISION > although this would probably work, it is probably better to 'force' users to always use the
# @autoprops annotation BEFORE any other annotation. This is now done in autoprops_decorate
# 6. Finally add the property to the class
setattr(cls, property_name, new_prop)
def _has_annotation(annotation, value):
"""
Returns a function that can be used as a predicate in getmembers. Used in _get_getter_fun and _get_setter_fun
"""
def matches_property_name(fun):
""" return true if fun is a callable that has the correct annotation with value """
return callable(fun) and getattr(fun, annotation, None) == value
return matches_property_name
def _get_getter_fun(cls, # type: Type
property_name, # type: str
type_hint, # type: Any
private_property_name, # type: str
overridden_getter=AUTO # type: Callable
):
"""
Utility method to find the overridden getter function for a given property, or generate a new one
:param cls:
:param property_name:
:param type_hint:
:param private_property_name:
:return:
"""
if overridden_getter is AUTO:
# If not provided - look for an overridden getter in the class
overridden_getters = getmembers(cls, predicate=_has_annotation(__GETTER_OVERRIDE_ANNOTATION, property_name))
if len(overridden_getters) > 1:
raise DuplicateOverrideError('Getter is overridden more than once for attribute name : %s' % property_name)
else:
try:
overridden_getter = overridden_getters[0][1]
except IndexError:
pass
if overridden_getter is not None:
# --use the overridden getter found/provided
getter_fun = overridden_getter
try: # python 2 - possibly unbind the function
getter_fun = getter_fun.im_func
except AttributeError:
pass
# --check its signature
s = signature(getter_fun)
if not ('self' in s.parameters.keys() and len(s.parameters.keys()) == 1):
raise IllegalGetterSignatureException("overridden getter '%s' should have 0 non-self arguments, found %s"
% (getter_fun.__name__, s))
else:
# -- generate the getter :
def autoprops_generated_getter(self):
""" generated by `autoprops` - getter for a property """
return getattr(self, private_property_name)
# -- use the generated getter
getter_fun = autoprops_generated_getter
# -- add type hint to output declaration
try:
annotations = getter_fun.__annotations__
except AttributeError:
pass # python 2 - no return type hint
else:
annotations['return'] = type_hint
return getter_fun
def _get_setter_fun(cls, # type: Type
property_name, # type: str
type_hint, # type: Any
default_value, # type: Any
private_property_name, # type: str
overridden_setter=AUTO # type: Callable
):
"""
Utility method to find the overridden setter function for a given property, or generate a new one
:param cls:
:param property_name:
:param type_hint:
:param default_value:
:param private_property_name:
:param overridden_setter: an already found overridden setter to use. If AUTO is provided (default), the class will
be inspected to find them
:return:
"""
if overridden_setter is AUTO:
# If not provided - look for an overridden setter in the class
overridden_setters = getmembers(cls, predicate=_has_annotation(__SETTER_OVERRIDE_ANNOTATION, property_name))
if len(overridden_setters) > 1:
raise DuplicateOverrideError('Setter is overridden more than once for attribute name : %s' % property_name)
else:
try:
overridden_setter = overridden_setters[0][1]
except IndexError:
pass
if overridden_setter is not None:
# --use the overridden setter found/provided
setter_fun = overridden_setter
try: # python 2 - possibly unbind the function
setter_fun = setter_fun.im_func
except AttributeError:
pass
# --find the parameter name and check the signature
s = signature(setter_fun)
p = [attribute_name for attribute_name, param in s.parameters.items() if attribute_name is not 'self']
if len(p) != 1:
raise IllegalSetterSignatureException('overridden setter %s should have 1 and only 1 non-self argument, '
'found %s' % (setter_fun.__name__, s))
actual_arg_name = p[0]
else:
# --create the setter: Dynamically compile a wrapper with correct argument name
sig = Signature(parameters=[Parameter('self', kind=Parameter.POSITIONAL_OR_KEYWORD),
Parameter(property_name, kind=Parameter.POSITIONAL_OR_KEYWORD,
annotation=type_hint, default=default_value)])
@with_signature(sig)
def autoprops_generated_setter(self, **kwargs):
""" generated by `autoprops` - setter for a property """
setattr(self, private_property_name, kwargs[property_name])
setter_fun = autoprops_generated_setter
actual_arg_name = property_name
return setter_fun, actual_arg_name
def _add_contract_to_setter(setter_fun, var_name, property_contract, property_name):
"""
Utility function to add a pycontract contract to a setter
:param setter_fun:
:param var_name:
:param property_contract:
:param property_name:
:return:
"""
# 0. check that we can import contracts
try:
# noinspection PyUnresolvedReferences
from contracts import ContractNotRespected, contract
except ImportError as e:
raise Exception('Use of _add_contract_to_setter requires that PyContract library is installed. Check that you '
'can \'import contracts\'')
try:
# python 2
setter_fun = setter_fun.im_func
except AttributeError:
pass
# -- check if a contract already exists on the function
if hasattr(setter_fun, '__contracts__'):
try:
qname = str(setter_fun.__qualname__)
except AttributeError:
qname = setter_fun.__name__
msg = "overridden setter for attribute %s implemented by function %s has a contract while there is a " \
"contract already defined for this property in the __init__ constructor. This will lead to " \
"double-contract in the final setter, please remove the one on the overridden setter." \
"" % (property_name, qname)
warn(msg)
# -- add the generated contract
setter_fun_with_possible_contract = contract(setter_fun, **{var_name: property_contract})
# the only thing we can't do is to replace the function's parameter name dynamically in the error messages
# so we wrap the function again to catch the potential pycontracts error :(
@wraps(setter_fun_with_possible_contract)
def _contracts_parser_interceptor(self, *args, **kwargs):
try:
return setter_fun_with_possible_contract(self, *args, **kwargs)
except ContractNotRespected as er:
er.error = er.error.replace('\'val\'', '\'' + property_name + '\'')
raise er
return _contracts_parser_interceptor
def _add_validators_to_setter(setter_fun, var_name, validators, property_name):
"""
Utility function to add valid8 validators to a setter
:param setter_fun:
:param var_name:
:param validators:
:param property_name:
:return:
"""
# 0. check that we can import valid8
try:
# noinspection PyUnresolvedReferences
from valid8 import decorate_with_validators
except ImportError:
raise Exception('Use of _add_contract_to_setter requires that valid8 library is installed. Check that you can'
' \'import valid8\'')
# -- check if a contract already exists on the function
if hasattr(setter_fun, '__validators__'):
msg = 'overridden setter for attribute ' + property_name + ' implemented by function ' \
+ str(setter_fun.__qualname__) + ' has validators while there are validators already defined ' \
'for this property in the __init__ constructor. This will lead to double-contract in the final ' \
'setter, please remove the one on the overridden setter.'
warn(msg)
# -- add the generated contract
setter_fun_with_validation = decorate_with_validators(setter_fun, **{var_name: validators})
# bind the validators to the setter function so that error message is correct
for v in validators:
v.validated_func = setter_fun_with_validation
# # the only thing we can't do is to replace the function's parameter name dynamically in the validation error
# # messages so we wrap the function again to catch the potential pycontracts error :(
# # old:
# # @functools.wraps(func) -> to make the wrapper function look like the wrapped function
# # def wrapper(self, *args, **kwargs):
# # new:
# # we now use 'decorate' to have a wrapper that has the same signature, see below
# def _contracts_parser_interceptor(func, self, *args, **kwargs):
# try:
# return func(self, *args, **kwargs)
# except ContractNotRespected as e:
# e.error = e.error.replace('\'val\'', '\'' + property_name + '\'')
# raise e
# f = _contracts_parser_interceptor(f)
# setter_fun_with_possible_contract = decorate(setter_fun_with_possible_contract, _contracts_parser_interceptor)
return setter_fun_with_validation
@function_decorator
def getter_override(attribute=None, # type: str
f=DECORATED
):
"""
A decorator to indicate an overridden getter for a given attribute. If the attribute name is None, the function name
will be used as the attribute name.
:param attribute: the attribute name for which the decorated function is an overridden getter
:return:
"""
return autoprops_override_decorate(f, attribute=attribute, is_getter=True)
@function_decorator
def setter_override(attribute=None, # type: str
f=DECORATED
):
"""
A decorator to indicate an overridden setter for a given attribute. If the attribute name is None, the function name
will be used as the attribute name. The @contract will still be dynamically added.
:param attribute: the attribute name for which the decorated function is an overridden setter
:return:
"""
return autoprops_override_decorate(f, attribute=attribute, is_getter=False)
def autoprops_override_decorate(func, # type: Callable
attribute=None, # type: str
is_getter=True # type: bool
):
# type: (...) -> Callable
"""
Used to decorate a function as an overridden getter or setter, without using the @getter_override or
@setter_override annotations. If the overridden setter has no @contract, the contract will still be
dynamically added. Note: this should be executed BEFORE @autoprops or autoprops_decorate().
:param func: the function on which to execute. Note that it won't be wrapped but simply annotated.
:param attribute: the attribute name. If None, the function name will be used
:param is_getter: True for a getter override, False for a setter override.
:return:
"""
# default attribute name is getter/setter function name
if attribute is None:
attribute = func.__name__
if is_getter:
# Simply annotate the fact that this is a getter function for this attribute
# (a) check that there is no annotation yet
if hasattr(func, __GETTER_OVERRIDE_ANNOTATION):
already_name = getattr(func, __GETTER_OVERRIDE_ANNOTATION)
raise DuplicateOverrideError('Function %s is already an overridden getter for attribute %s'
% (func, already_name))
# (b) set it
# func.__getter_override__ = attribute
setattr(func, __GETTER_OVERRIDE_ANNOTATION, attribute)
else:
# Simply annotate the fact that this is a getter function for this attribute
# (a) check that there is no annotation yet
if hasattr(func, __SETTER_OVERRIDE_ANNOTATION):
already_name = getattr(func, __SETTER_OVERRIDE_ANNOTATION)
raise DuplicateOverrideError('Function %s is already an overridden setter for attribute %s'
% (func, already_name))
# (b) set it
# func.__getter_override__ = attribute
setattr(func, __SETTER_OVERRIDE_ANNOTATION, attribute)
return func
|
|
# wireproto.py - generic wire protocol support functions
#
# Copyright 2005-2010 Matt Mackall <[email protected]>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
import urllib, tempfile, os, sys
from i18n import _
from node import bin, hex
import changegroup as changegroupmod
import peer, error, encoding, util, store
# abstract batching support
class future(object):
'''placeholder for a value to be set later'''
def set(self, value):
if util.safehasattr(self, 'value'):
raise error.RepoError("future is already set")
self.value = value
class batcher(object):
'''base class for batches of commands submittable in a single request
All methods invoked on instances of this class are simply queued and
return a a future for the result. Once you call submit(), all the queued
calls are performed and the results set in their respective futures.
'''
def __init__(self):
self.calls = []
def __getattr__(self, name):
def call(*args, **opts):
resref = future()
self.calls.append((name, args, opts, resref,))
return resref
return call
def submit(self):
pass
class localbatch(batcher):
'''performs the queued calls directly'''
def __init__(self, local):
batcher.__init__(self)
self.local = local
def submit(self):
for name, args, opts, resref in self.calls:
resref.set(getattr(self.local, name)(*args, **opts))
class remotebatch(batcher):
'''batches the queued calls; uses as few roundtrips as possible'''
def __init__(self, remote):
'''remote must support _submitbatch(encbatch) and
_submitone(op, encargs)'''
batcher.__init__(self)
self.remote = remote
def submit(self):
req, rsp = [], []
for name, args, opts, resref in self.calls:
mtd = getattr(self.remote, name)
batchablefn = getattr(mtd, 'batchable', None)
if batchablefn is not None:
batchable = batchablefn(mtd.im_self, *args, **opts)
encargsorres, encresref = batchable.next()
if encresref:
req.append((name, encargsorres,))
rsp.append((batchable, encresref, resref,))
else:
resref.set(encargsorres)
else:
if req:
self._submitreq(req, rsp)
req, rsp = [], []
resref.set(mtd(*args, **opts))
if req:
self._submitreq(req, rsp)
def _submitreq(self, req, rsp):
encresults = self.remote._submitbatch(req)
for encres, r in zip(encresults, rsp):
batchable, encresref, resref = r
encresref.set(encres)
resref.set(batchable.next())
def batchable(f):
'''annotation for batchable methods
Such methods must implement a coroutine as follows:
@batchable
def sample(self, one, two=None):
# Handle locally computable results first:
if not one:
yield "a local result", None
# Build list of encoded arguments suitable for your wire protocol:
encargs = [('one', encode(one),), ('two', encode(two),)]
# Create future for injection of encoded result:
encresref = future()
# Return encoded arguments and future:
yield encargs, encresref
# Assuming the future to be filled with the result from the batched
# request now. Decode it:
yield decode(encresref.value)
The decorator returns a function which wraps this coroutine as a plain
method, but adds the original method as an attribute called "batchable",
which is used by remotebatch to split the call into separate encoding and
decoding phases.
'''
def plain(*args, **opts):
batchable = f(*args, **opts)
encargsorres, encresref = batchable.next()
if not encresref:
return encargsorres # a local result in this case
self = args[0]
encresref.set(self._submitone(f.func_name, encargsorres))
return batchable.next()
setattr(plain, 'batchable', f)
return plain
# list of nodes encoding / decoding
def decodelist(l, sep=' '):
if l:
return map(bin, l.split(sep))
return []
def encodelist(l, sep=' '):
return sep.join(map(hex, l))
# batched call argument encoding
def escapearg(plain):
return (plain
.replace(':', '::')
.replace(',', ':,')
.replace(';', ':;')
.replace('=', ':='))
def unescapearg(escaped):
return (escaped
.replace(':=', '=')
.replace(':;', ';')
.replace(':,', ',')
.replace('::', ':'))
# client side
def todict(**args):
return args
class wirepeer(peer.peerrepository):
def batch(self):
return remotebatch(self)
def _submitbatch(self, req):
cmds = []
for op, argsdict in req:
args = ','.join('%s=%s' % p for p in argsdict.iteritems())
cmds.append('%s %s' % (op, args))
rsp = self._call("batch", cmds=';'.join(cmds))
return rsp.split(';')
def _submitone(self, op, args):
return self._call(op, **args)
@batchable
def lookup(self, key):
self.requirecap('lookup', _('look up remote revision'))
f = future()
yield todict(key=encoding.fromlocal(key)), f
d = f.value
success, data = d[:-1].split(" ", 1)
if int(success):
yield bin(data)
self._abort(error.RepoError(data))
@batchable
def heads(self):
f = future()
yield {}, f
d = f.value
try:
yield decodelist(d[:-1])
except ValueError:
self._abort(error.ResponseError(_("unexpected response:"), d))
@batchable
def known(self, nodes):
f = future()
yield todict(nodes=encodelist(nodes)), f
d = f.value
try:
yield [bool(int(f)) for f in d]
except ValueError:
self._abort(error.ResponseError(_("unexpected response:"), d))
@batchable
def branchmap(self):
f = future()
yield {}, f
d = f.value
try:
branchmap = {}
for branchpart in d.splitlines():
branchname, branchheads = branchpart.split(' ', 1)
branchname = encoding.tolocal(urllib.unquote(branchname))
branchheads = decodelist(branchheads)
branchmap[branchname] = branchheads
yield branchmap
except TypeError:
self._abort(error.ResponseError(_("unexpected response:"), d))
def branches(self, nodes):
n = encodelist(nodes)
d = self._call("branches", nodes=n)
try:
br = [tuple(decodelist(b)) for b in d.splitlines()]
return br
except ValueError:
self._abort(error.ResponseError(_("unexpected response:"), d))
def between(self, pairs):
batch = 8 # avoid giant requests
r = []
for i in xrange(0, len(pairs), batch):
n = " ".join([encodelist(p, '-') for p in pairs[i:i + batch]])
d = self._call("between", pairs=n)
try:
r.extend(l and decodelist(l) or [] for l in d.splitlines())
except ValueError:
self._abort(error.ResponseError(_("unexpected response:"), d))
return r
@batchable
def pushkey(self, namespace, key, old, new):
if not self.capable('pushkey'):
yield False, None
f = future()
self.ui.debug('preparing pushkey for "%s:%s"\n' % (namespace, key))
yield todict(namespace=encoding.fromlocal(namespace),
key=encoding.fromlocal(key),
old=encoding.fromlocal(old),
new=encoding.fromlocal(new)), f
d = f.value
d, output = d.split('\n', 1)
try:
d = bool(int(d))
except ValueError:
raise error.ResponseError(
_('push failed (unexpected response):'), d)
for l in output.splitlines(True):
self.ui.status(_('remote: '), l)
yield d
@batchable
def listkeys(self, namespace):
if not self.capable('pushkey'):
yield {}, None
f = future()
self.ui.debug('preparing listkeys for "%s"\n' % namespace)
yield todict(namespace=encoding.fromlocal(namespace)), f
d = f.value
r = {}
for l in d.splitlines():
k, v = l.split('\t')
r[encoding.tolocal(k)] = encoding.tolocal(v)
yield r
def stream_out(self):
return self._callstream('stream_out')
def changegroup(self, nodes, kind):
n = encodelist(nodes)
f = self._callstream("changegroup", roots=n)
return changegroupmod.unbundle10(self._decompress(f), 'UN')
def changegroupsubset(self, bases, heads, kind):
self.requirecap('changegroupsubset', _('look up remote changes'))
bases = encodelist(bases)
heads = encodelist(heads)
f = self._callstream("changegroupsubset",
bases=bases, heads=heads)
return changegroupmod.unbundle10(self._decompress(f), 'UN')
def getbundle(self, source, heads=None, common=None):
self.requirecap('getbundle', _('look up remote changes'))
opts = {}
if heads is not None:
opts['heads'] = encodelist(heads)
if common is not None:
opts['common'] = encodelist(common)
f = self._callstream("getbundle", **opts)
return changegroupmod.unbundle10(self._decompress(f), 'UN')
def unbundle(self, cg, heads, source):
'''Send cg (a readable file-like object representing the
changegroup to push, typically a chunkbuffer object) to the
remote server as a bundle. Return an integer indicating the
result of the push (see localrepository.addchangegroup()).'''
if heads != ['force'] and self.capable('unbundlehash'):
heads = encodelist(['hashed',
util.sha1(''.join(sorted(heads))).digest()])
else:
heads = encodelist(heads)
ret, output = self._callpush("unbundle", cg, heads=heads)
if ret == "":
raise error.ResponseError(
_('push failed:'), output)
try:
ret = int(ret)
except ValueError:
raise error.ResponseError(
_('push failed (unexpected response):'), ret)
for l in output.splitlines(True):
self.ui.status(_('remote: '), l)
return ret
def debugwireargs(self, one, two, three=None, four=None, five=None):
# don't pass optional arguments left at their default value
opts = {}
if three is not None:
opts['three'] = three
if four is not None:
opts['four'] = four
return self._call('debugwireargs', one=one, two=two, **opts)
# server side
class streamres(object):
def __init__(self, gen):
self.gen = gen
class pushres(object):
def __init__(self, res):
self.res = res
class pusherr(object):
def __init__(self, res):
self.res = res
class ooberror(object):
def __init__(self, message):
self.message = message
def dispatch(repo, proto, command):
repo = repo.filtered("served")
func, spec = commands[command]
args = proto.getargs(spec)
return func(repo, proto, *args)
def options(cmd, keys, others):
opts = {}
for k in keys:
if k in others:
opts[k] = others[k]
del others[k]
if others:
sys.stderr.write("abort: %s got unexpected arguments %s\n"
% (cmd, ",".join(others)))
return opts
def batch(repo, proto, cmds, others):
repo = repo.filtered("served")
res = []
for pair in cmds.split(';'):
op, args = pair.split(' ', 1)
vals = {}
for a in args.split(','):
if a:
n, v = a.split('=')
vals[n] = unescapearg(v)
func, spec = commands[op]
if spec:
keys = spec.split()
data = {}
for k in keys:
if k == '*':
star = {}
for key in vals.keys():
if key not in keys:
star[key] = vals[key]
data['*'] = star
else:
data[k] = vals[k]
result = func(repo, proto, *[data[k] for k in keys])
else:
result = func(repo, proto)
if isinstance(result, ooberror):
return result
res.append(escapearg(result))
return ';'.join(res)
def between(repo, proto, pairs):
pairs = [decodelist(p, '-') for p in pairs.split(" ")]
r = []
for b in repo.between(pairs):
r.append(encodelist(b) + "\n")
return "".join(r)
def branchmap(repo, proto):
branchmap = repo.branchmap()
heads = []
for branch, nodes in branchmap.iteritems():
branchname = urllib.quote(encoding.fromlocal(branch))
branchnodes = encodelist(nodes)
heads.append('%s %s' % (branchname, branchnodes))
return '\n'.join(heads)
def branches(repo, proto, nodes):
nodes = decodelist(nodes)
r = []
for b in repo.branches(nodes):
r.append(encodelist(b) + "\n")
return "".join(r)
def capabilities(repo, proto):
caps = ('lookup changegroupsubset branchmap pushkey known getbundle '
'unbundlehash batch').split()
if _allowstream(repo.ui):
if repo.ui.configbool('server', 'preferuncompressed', False):
caps.append('stream-preferred')
requiredformats = repo.requirements & repo.supportedformats
# if our local revlogs are just revlogv1, add 'stream' cap
if not requiredformats - set(('revlogv1',)):
caps.append('stream')
# otherwise, add 'streamreqs' detailing our local revlog format
else:
caps.append('streamreqs=%s' % ','.join(requiredformats))
caps.append('unbundle=%s' % ','.join(changegroupmod.bundlepriority))
caps.append('httpheader=1024')
return ' '.join(caps)
def changegroup(repo, proto, roots):
nodes = decodelist(roots)
cg = repo.changegroup(nodes, 'serve')
return streamres(proto.groupchunks(cg))
def changegroupsubset(repo, proto, bases, heads):
bases = decodelist(bases)
heads = decodelist(heads)
cg = repo.changegroupsubset(bases, heads, 'serve')
return streamres(proto.groupchunks(cg))
def debugwireargs(repo, proto, one, two, others):
# only accept optional args from the known set
opts = options('debugwireargs', ['three', 'four'], others)
return repo.debugwireargs(one, two, **opts)
def getbundle(repo, proto, others):
opts = options('getbundle', ['heads', 'common'], others)
for k, v in opts.iteritems():
opts[k] = decodelist(v)
cg = repo.getbundle('serve', **opts)
return streamres(proto.groupchunks(cg))
def heads(repo, proto):
h = repo.heads()
return encodelist(h) + "\n"
def hello(repo, proto):
'''the hello command returns a set of lines describing various
interesting things about the server, in an RFC822-like format.
Currently the only one defined is "capabilities", which
consists of a line in the form:
capabilities: space separated list of tokens
'''
return "capabilities: %s\n" % (capabilities(repo, proto))
def listkeys(repo, proto, namespace):
d = repo.listkeys(encoding.tolocal(namespace)).items()
t = '\n'.join(['%s\t%s' % (encoding.fromlocal(k), encoding.fromlocal(v))
for k, v in d])
return t
def lookup(repo, proto, key):
try:
k = encoding.tolocal(key)
c = repo[k]
r = c.hex()
success = 1
except Exception, inst:
r = str(inst)
success = 0
return "%s %s\n" % (success, r)
def known(repo, proto, nodes, others):
return ''.join(b and "1" or "0" for b in repo.known(decodelist(nodes)))
def pushkey(repo, proto, namespace, key, old, new):
# compatibility with pre-1.8 clients which were accidentally
# sending raw binary nodes rather than utf-8-encoded hex
if len(new) == 20 and new.encode('string-escape') != new:
# looks like it could be a binary node
try:
new.decode('utf-8')
new = encoding.tolocal(new) # but cleanly decodes as UTF-8
except UnicodeDecodeError:
pass # binary, leave unmodified
else:
new = encoding.tolocal(new) # normal path
if util.safehasattr(proto, 'restore'):
proto.redirect()
try:
r = repo.pushkey(encoding.tolocal(namespace), encoding.tolocal(key),
encoding.tolocal(old), new) or False
except util.Abort:
r = False
output = proto.restore()
return '%s\n%s' % (int(r), output)
r = repo.pushkey(encoding.tolocal(namespace), encoding.tolocal(key),
encoding.tolocal(old), new)
return '%s\n' % int(r)
def _allowstream(ui):
return ui.configbool('server', 'uncompressed', True, untrusted=True)
def stream(repo, proto):
'''If the server supports streaming clone, it advertises the "stream"
capability with a value representing the version and flags of the repo
it is serving. Client checks to see if it understands the format.
The format is simple: the server writes out a line with the amount
of files, then the total amount of bytes to be transferred (separated
by a space). Then, for each file, the server first writes the filename
and filesize (separated by the null character), then the file contents.
'''
if not _allowstream(repo.ui):
return '1\n'
entries = []
total_bytes = 0
try:
# get consistent snapshot of repo, lock during scan
lock = repo.lock()
try:
repo.ui.debug('scanning\n')
for name, ename, size in repo.store.walk():
if size:
entries.append((name, size))
total_bytes += size
finally:
lock.release()
except error.LockError:
return '2\n' # error: 2
def streamer(repo, entries, total):
'''stream out all metadata files in repository.'''
yield '0\n' # success
repo.ui.debug('%d files, %d bytes to transfer\n' %
(len(entries), total_bytes))
yield '%d %d\n' % (len(entries), total_bytes)
sopener = repo.sopener
oldaudit = sopener.mustaudit
debugflag = repo.ui.debugflag
sopener.mustaudit = False
try:
for name, size in entries:
if debugflag:
repo.ui.debug('sending %s (%d bytes)\n' % (name, size))
# partially encode name over the wire for backwards compat
yield '%s\0%d\n' % (store.encodedir(name), size)
if size <= 65536:
fp = sopener(name)
try:
data = fp.read(size)
finally:
fp.close()
yield data
else:
for chunk in util.filechunkiter(sopener(name), limit=size):
yield chunk
# replace with "finally:" when support for python 2.4 has been dropped
except Exception:
sopener.mustaudit = oldaudit
raise
sopener.mustaudit = oldaudit
return streamres(streamer(repo, entries, total_bytes))
def unbundle(repo, proto, heads):
their_heads = decodelist(heads)
def check_heads():
heads = repo.heads()
heads_hash = util.sha1(''.join(sorted(heads))).digest()
return (their_heads == ['force'] or their_heads == heads or
their_heads == ['hashed', heads_hash])
proto.redirect()
# fail early if possible
if not check_heads():
return pusherr('repository changed while preparing changes - '
'please try again')
# write bundle data to temporary file because it can be big
fd, tempname = tempfile.mkstemp(prefix='hg-unbundle-')
fp = os.fdopen(fd, 'wb+')
r = 0
try:
proto.getfile(fp)
lock = repo.lock()
try:
if not check_heads():
# someone else committed/pushed/unbundled while we
# were transferring data
return pusherr('repository changed while uploading changes - '
'please try again')
# push can proceed
fp.seek(0)
gen = changegroupmod.readbundle(fp, None)
try:
r = repo.addchangegroup(gen, 'serve', proto._client())
except util.Abort, inst:
sys.stderr.write("abort: %s\n" % inst)
finally:
lock.release()
return pushres(r)
finally:
fp.close()
os.unlink(tempname)
commands = {
'batch': (batch, 'cmds *'),
'between': (between, 'pairs'),
'branchmap': (branchmap, ''),
'branches': (branches, 'nodes'),
'capabilities': (capabilities, ''),
'changegroup': (changegroup, 'roots'),
'changegroupsubset': (changegroupsubset, 'bases heads'),
'debugwireargs': (debugwireargs, 'one two *'),
'getbundle': (getbundle, '*'),
'heads': (heads, ''),
'hello': (hello, ''),
'known': (known, 'nodes *'),
'listkeys': (listkeys, 'namespace'),
'lookup': (lookup, 'key'),
'pushkey': (pushkey, 'namespace key old new'),
'stream_out': (stream, ''),
'unbundle': (unbundle, 'heads'),
}
|
|
# -*- coding: utf-8 -*-
# Copyright (c) 2015, imageio contributors
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
""" Functionality used for testing. This code itself is not covered in tests.
"""
from __future__ import absolute_import, print_function, division
import os
import sys
import inspect
import shutil
import atexit
import pytest
# Get root dir
THIS_DIR = os.path.abspath(os.path.dirname(__file__))
ROOT_DIR = THIS_DIR
for i in range(9):
ROOT_DIR = os.path.dirname(ROOT_DIR)
if os.path.isfile(os.path.join(ROOT_DIR, '.gitignore')):
break
STYLE_IGNORES = ['E226',
'E241',
'E265',
'E266', # too many leading '#' for block comment
'E402', # module level import not at top of file
'E731', # do not assign a lambda expression, use a def
'W291',
'W293',
]
## Functions to use in tests
def run_tests_if_main(show_coverage=False):
""" Run tests in a given file if it is run as a script
Coverage is reported for running this single test. Set show_coverage to
launch the report in the web browser.
"""
local_vars = inspect.currentframe().f_back.f_locals
if not local_vars.get('__name__', '') == '__main__':
return
# we are in a "__main__"
os.chdir(ROOT_DIR)
fname = str(local_vars['__file__'])
_clear_imageio()
_enable_faulthandler()
pytest.main('-v -x --color=yes --cov imageio '
'--cov-config .coveragerc --cov-report html %s' % repr(fname))
if show_coverage:
import webbrowser
fname = os.path.join(ROOT_DIR, 'htmlcov', 'index.html')
webbrowser.open_new_tab(fname)
_the_test_dir = None
def get_test_dir():
global _the_test_dir
if _the_test_dir is None:
# Define dir
from imageio.core import appdata_dir
_the_test_dir = os.path.join(appdata_dir('imageio'), 'testdir')
# Clear and create it now
clean_test_dir(True)
os.makedirs(_the_test_dir)
os.makedirs(os.path.join(_the_test_dir, 'images'))
# And later
atexit.register(clean_test_dir)
return _the_test_dir
def clean_test_dir(strict=False):
if os.path.isdir(_the_test_dir):
try:
shutil.rmtree(_the_test_dir)
except Exception:
if strict:
raise
def need_internet():
if os.getenv('IMAGEIO_NO_INTERNET', '').lower() in ('1', 'true', 'yes'):
pytest.skip('No internet')
## Functions to use from make
def test_unit(cov_report='term'):
""" Run all unit tests. Returns exit code.
"""
orig_dir = os.getcwd()
os.chdir(ROOT_DIR)
try:
_clear_imageio()
_enable_faulthandler()
return pytest.main('-v --cov imageio --cov-config .coveragerc '
'--cov-report %s tests' % cov_report)
finally:
os.chdir(orig_dir)
import imageio
print('Tests were performed on', str(imageio))
def test_style():
""" Test style using flake8
"""
# Test if flake is there
try:
from flake8.main import main # noqa
except ImportError:
print('Skipping flake8 test, flake8 not installed')
return
# Reporting
print('Running flake8 on %s' % ROOT_DIR)
sys.stdout = FileForTesting(sys.stdout)
# Init
ignores = STYLE_IGNORES.copy()
fail = False
count = 0
# Iterate over files
for dir, dirnames, filenames in os.walk(ROOT_DIR):
dir = os.path.relpath(dir, ROOT_DIR)
# Skip this dir?
exclude_dirs = set(['.git', 'docs', 'build', 'dist', '__pycache__'])
if exclude_dirs.intersection(dir.split(os.path.sep)):
continue
# Check all files ...
for fname in filenames:
if fname.endswith('.py'):
# Get test options for this file
filename = os.path.join(ROOT_DIR, dir, fname)
skip, extra_ignores = _get_style_test_options(filename)
if skip:
continue
# Test
count += 1
thisfail = _test_style(filename, ignores + extra_ignores)
if thisfail:
fail = True
print('----')
sys.stdout.flush()
# Report result
sys.stdout.revert()
if not count:
raise RuntimeError(' Arg! flake8 did not check any files')
elif fail:
raise RuntimeError(' Arg! flake8 failed (checked %i files)' % count)
else:
print(' Hooray! flake8 passed (checked %i files)' % count)
## Requirements
def _enable_faulthandler():
""" Enable faulthandler (if we can), so that we get tracebacks
on segfaults.
"""
try:
import faulthandler
faulthandler.enable()
print('Faulthandler enabled')
except Exception:
print('Could not enable faulthandler')
def _clear_imageio():
# Remove ourselves from sys.modules to force an import
for key in list(sys.modules.keys()):
if key.startswith('imageio'):
del sys.modules[key]
class FileForTesting(object):
""" Alternative to stdout that makes path relative to ROOT_DIR
"""
def __init__(self, original):
self._original = original
def write(self, msg):
if msg.startswith(ROOT_DIR):
msg = os.path.relpath(msg, ROOT_DIR)
self._original.write(msg)
self._original.flush()
def flush(self):
self._original.flush()
def revert(self):
sys.stdout = self._original
def _get_style_test_options(filename):
""" Returns (skip, ignores) for the specifies source file.
"""
skip = False
ignores = []
text = open(filename, 'rb').read().decode('utf-8')
# Iterate over lines
for i, line in enumerate(text.splitlines()):
if i > 20:
break
if line.startswith('# styletest:'):
if 'skip' in line:
skip = True
elif 'ignore' in line:
words = line.replace(',', ' ').split(' ')
words = [w.strip() for w in words if w.strip()]
words = [w for w in words if
(w[1:].isnumeric() and w[0] in 'EWFCN')]
ignores.extend(words)
return skip, ignores
def _test_style(filename, ignore):
""" Test style for a certain file.
"""
if isinstance(ignore, (list, tuple)):
ignore = ','.join(ignore)
orig_dir = os.getcwd()
orig_argv = sys.argv
os.chdir(ROOT_DIR)
sys.argv[1:] = [filename]
sys.argv.append('--ignore=' + ignore)
try:
from flake8.main import main
main()
except SystemExit as ex:
if ex.code in (None, 0):
return False
else:
return True
finally:
os.chdir(orig_dir)
sys.argv[:] = orig_argv
|
|
from datetime import date, datetime
from functools import partial
from django.urls import reverse
from trainings.filters import filter_trainees_by_instructor_status
from trainings.views import all_trainees_queryset
from workshops.models import (
Award,
Event,
Organization,
Person,
Role,
Tag,
TrainingProgress,
TrainingRequirement,
)
from workshops.tests.base import TestBase
class TestTraineesView(TestBase):
def setUp(self):
self._setUpUsersAndLogin()
self._setUpAirports()
self._setUpNonInstructors()
self._setUpTags()
self._setUpRoles()
self.training = TrainingRequirement.objects.get(name="Training")
self.homework = TrainingRequirement.objects.get(name="SWC Homework")
self.discussion = TrainingRequirement.objects.get(name="Discussion")
self.ttt_event = Event.objects.create(
start=datetime(2018, 7, 14),
slug="2018-07-14-training",
host=Organization.objects.first(),
)
self.ttt_event.tags.add(Tag.objects.get(name="TTT"))
def test_view_loads(self):
rv = self.client.get(reverse("all_trainees"))
self.assertEqual(rv.status_code, 200)
def test_bulk_add_progress(self):
TrainingProgress.objects.create(
trainee=self.spiderman, requirement=self.discussion, state="n"
)
data = {
"trainees": [self.spiderman.pk, self.ironman.pk],
"requirement": self.discussion.pk,
"state": "a",
"submit": "",
}
# all trainees need to have a training task to assign a training
# progress to them
self.ironman.task_set.create(
event=self.ttt_event,
role=Role.objects.get(name="learner"),
)
self.spiderman.task_set.create(
event=self.ttt_event,
role=Role.objects.get(name="learner"),
)
rv = self.client.post(reverse("all_trainees"), data, follow=True)
self.assertEqual(rv.resolver_match.view_name, "all_trainees")
msg = "Successfully changed progress of all selected trainees."
self.assertContains(rv, msg)
got = set(
TrainingProgress.objects.values_list(
"trainee", "requirement", "state", "evaluated_by"
)
)
expected = {
(self.spiderman.pk, self.discussion.pk, "n", None),
(self.spiderman.pk, self.discussion.pk, "a", self.admin.pk),
(self.ironman.pk, self.discussion.pk, "a", self.admin.pk),
}
self.assertEqual(got, expected)
def test_bulk_discard_progress(self):
spiderman_progress = TrainingProgress.objects.create(
trainee=self.spiderman, requirement=self.discussion, state="n"
)
ironman_progress = TrainingProgress.objects.create(
trainee=self.ironman, requirement=self.discussion, state="n"
)
blackwidow_progress = TrainingProgress.objects.create(
trainee=self.blackwidow, requirement=self.discussion, state="n"
)
data = {
"trainees": [self.spiderman.pk, self.ironman.pk],
"discard": "",
}
rv = self.client.post(reverse("all_trainees"), data, follow=True)
self.assertEqual(rv.status_code, 200)
self.assertEqual(rv.resolver_match.view_name, "all_trainees")
msg = "Successfully discarded progress of all selected trainees."
self.assertContains(rv, msg)
spiderman_progress.refresh_from_db()
self.assertTrue(spiderman_progress.discarded)
ironman_progress.refresh_from_db()
self.assertTrue(ironman_progress.discarded)
blackwidow_progress.refresh_from_db()
self.assertFalse(blackwidow_progress.discarded)
class TestFilterTraineesByInstructorStatus(TestBase):
def _setUpPermissions(self):
pass
def _setUpNonInstructors(self):
pass
def _setUpTrainingRequirements(self):
self.swc_demo = TrainingRequirement.objects.get(name="SWC Demo")
self.swc_homework = TrainingRequirement.objects.get(name="SWC Homework")
self.dc_demo = TrainingRequirement.objects.get(name="DC Demo")
self.dc_homework = TrainingRequirement.objects.get(name="DC Homework")
self.lc_demo = TrainingRequirement.objects.get(name="LC Demo")
self.lc_homework = TrainingRequirement.objects.get(name="LC Homework")
self.discussion = TrainingRequirement.objects.get(name="Discussion")
self.training = TrainingRequirement.objects.get(name="Training")
def _setUpInstructors(self):
# prepare data
# 1 SWC/DC/LC instructor
self.instructor1 = Person.objects.create(
personal="Instructor1",
family="Instructor1",
email="[email protected]",
username="instructor1_instructor1",
)
Award.objects.create(
person=self.instructor1, badge=self.swc_instructor, awarded=date(2014, 1, 1)
)
self.instructor2 = Person.objects.create(
personal="Instructor2",
family="Instructor2",
email="[email protected]",
username="instructor2_instructor2",
)
Award.objects.create(
person=self.instructor2, badge=self.dc_instructor, awarded=date(2014, 1, 1)
)
self.instructor3 = Person.objects.create(
personal="Instructor3",
family="Instructor3",
email="[email protected]",
username="instructor3_instructor3",
)
Award.objects.create(
person=self.instructor3, badge=self.lc_instructor, awarded=date(2014, 1, 1)
)
# 1 combined instructor (SWC-DC-LC)
self.instructor4 = Person.objects.create(
personal="Instructor4",
family="Instructor4",
email="[email protected]",
username="instructor4_instructor4",
)
Award.objects.create(
person=self.instructor4, badge=self.swc_instructor, awarded=date(2014, 1, 1)
)
Award.objects.create(
person=self.instructor4, badge=self.dc_instructor, awarded=date(2014, 1, 1)
)
Award.objects.create(
person=self.instructor4, badge=self.lc_instructor, awarded=date(2014, 1, 1)
)
# 1 eligible trainee with no instructor badges
self.trainee1 = Person.objects.create(
personal="Trainee1",
family="Trainee1",
email="[email protected]",
username="trainee1_trainee1",
)
TrainingProgress.objects.bulk_create(
[
TrainingProgress(
trainee=self.trainee1,
evaluated_by=None,
requirement=self.training,
state="p", # passed
),
TrainingProgress(
trainee=self.trainee1,
evaluated_by=None,
requirement=self.discussion,
state="p",
),
TrainingProgress(
trainee=self.trainee1,
evaluated_by=None,
requirement=self.swc_homework,
state="p",
),
TrainingProgress(
trainee=self.trainee1,
evaluated_by=None,
requirement=self.dc_homework,
state="p",
),
TrainingProgress(
trainee=self.trainee1,
evaluated_by=None,
requirement=self.lc_demo,
state="p",
),
]
)
# 1 eligible trainee with instructor badge
self.trainee2 = Person.objects.create(
personal="Trainee2",
family="Trainee2",
email="[email protected]",
username="trainee2_trainee2",
)
TrainingProgress.objects.bulk_create(
[
TrainingProgress(
trainee=self.trainee2,
evaluated_by=None,
requirement=self.training,
state="p", # passed
),
TrainingProgress(
trainee=self.trainee2,
evaluated_by=None,
requirement=self.discussion,
state="p",
),
TrainingProgress(
trainee=self.trainee2,
evaluated_by=None,
requirement=self.dc_homework,
state="p",
),
TrainingProgress(
trainee=self.trainee2,
evaluated_by=None,
requirement=self.swc_demo,
state="p",
),
TrainingProgress(
trainee=self.trainee2,
evaluated_by=None,
requirement=self.lc_demo,
state="p",
),
]
)
Award.objects.create(
person=self.trainee2, badge=self.lc_instructor, awarded=date(2014, 1, 1)
)
# 1 non-eligible trainee
self.trainee3 = Person.objects.create(
personal="Trainee3",
family="Trainee3",
email="[email protected]",
username="trainee3_trainee3",
)
TrainingProgress.objects.bulk_create(
[
TrainingProgress(
trainee=self.trainee3,
evaluated_by=None,
requirement=self.training,
state="p", # passed
),
TrainingProgress(
trainee=self.trainee3,
evaluated_by=None,
requirement=self.discussion,
state="f", # failed
notes="Failed",
),
TrainingProgress(
trainee=self.trainee3,
evaluated_by=None,
requirement=self.lc_homework,
state="p",
),
TrainingProgress(
trainee=self.trainee3,
evaluated_by=None,
requirement=self.lc_demo,
state="p",
),
]
)
def setUp(self):
self._setUpTrainingRequirements()
super().setUp()
# `filter_trainees_by_instructor_status` takes 3 parameters (queryset,
# name and choice), but only 1 is used for tests (choice)
self.queryset = all_trainees_queryset()
self.filter = partial(
filter_trainees_by_instructor_status, queryset=self.queryset, name=""
)
def test_no_choice(self):
# result should be the same as original queryset
rv = self.filter(choice="")
self.assertEqual(rv, self.queryset)
self.assertQuerysetEqual(rv, list(self.queryset), transform=lambda x: x)
def test_all_instructors(self):
# only instructors who have all 3 badges should be returned
rv = self.filter(choice="all")
values = [self.instructor4]
self.assertQuerysetEqual(rv, values, transform=lambda x: x)
def test_any_instructors(self):
# any instructor should be returned
rv = self.filter(choice="any")
values = [
self.instructor1,
self.instructor2,
self.instructor3,
self.instructor4,
]
self.assertQuerysetEqual(rv, values, transform=lambda x: x)
def test_swc_instructors(self):
# only SWC instructors should be returned
rv = self.filter(choice="swc")
values = [self.instructor1, self.instructor4]
self.assertQuerysetEqual(rv, values, transform=lambda x: x)
def test_dc_instructors(self):
# only DC instructors should be returned
rv = self.filter(choice="dc")
values = [self.instructor2, self.instructor4]
self.assertQuerysetEqual(rv, values, transform=lambda x: x)
def test_lc_instructors(self):
# only LC instructors should be returned
rv = self.filter(choice="lc")
values = [self.instructor3, self.instructor4]
self.assertQuerysetEqual(rv, values, transform=lambda x: x)
def test_eligible_trainees(self):
# only 1 eligible trainee should be returned
rv = self.filter(choice="eligible")
values = [self.trainee1]
self.assertQuerysetEqual(rv, values, transform=lambda x: x)
def test_eligibility_query(self):
# check if eligibility query works correctly
self.assertEqual(Person.objects.all().count(), 7)
rv = all_trainees_queryset().order_by("pk")
conditions_per_person = [
# self.instructor1
dict(
username="instructor1_instructor1",
is_swc_instructor=1,
is_dc_instructor=0,
is_lc_instructor=0,
is_instructor=1,
instructor_eligible=0,
),
# self.instructor2
dict(
username="instructor2_instructor2",
is_swc_instructor=0,
is_dc_instructor=1,
is_lc_instructor=0,
is_instructor=1,
instructor_eligible=0,
),
# self.instructor3
dict(
username="instructor3_instructor3",
is_swc_instructor=0,
is_dc_instructor=0,
is_lc_instructor=1,
is_instructor=1,
instructor_eligible=0,
),
# self.instructor4
dict(
username="instructor4_instructor4",
is_swc_instructor=1,
is_dc_instructor=1,
is_lc_instructor=1,
is_instructor=3,
instructor_eligible=0,
),
# self.trainee1
dict(
username="trainee1_trainee1",
is_swc_instructor=0,
is_dc_instructor=0,
is_lc_instructor=0,
is_instructor=0,
passed_training=1,
passed_discussion=1,
passed_swc_homework=1,
passed_dc_homework=1,
passed_lc_homework=0,
passed_homework=2,
passed_swc_demo=0,
passed_dc_demo=0,
passed_lc_demo=1,
passed_demo=1,
instructor_eligible=2,
),
# self.trainee2
dict(
username="trainee2_trainee2",
is_swc_instructor=0,
is_dc_instructor=0,
# no idea why this is counting this way, but
# bool(5) == True so we're cool?
is_lc_instructor=5,
is_instructor=5,
passed_training=1,
passed_discussion=1,
passed_swc_homework=0,
passed_dc_homework=1,
passed_lc_homework=0,
passed_homework=1,
passed_swc_demo=1,
passed_dc_demo=0,
passed_lc_demo=1,
passed_demo=2,
instructor_eligible=2,
),
# self.trainee3
dict(
username="trainee3_trainee3",
is_swc_instructor=0,
is_dc_instructor=0,
is_lc_instructor=0,
is_instructor=0,
passed_training=1,
passed_discussion=0,
passed_swc_homework=0,
passed_dc_homework=0,
passed_lc_homework=1,
passed_homework=1,
passed_swc_demo=0,
passed_dc_demo=0,
passed_lc_demo=1,
passed_demo=1,
instructor_eligible=0,
),
]
for person, conditions in zip(rv, conditions_per_person):
for k, v in conditions.items():
self.assertEqual(
getattr(person, k),
v,
f"{person.username} attr {k} doesn't have value {v}",
)
def test_no_instructors(self):
# only non-instructors should be returned
rv = self.filter(choice="no")
values = [self.trainee1, self.trainee3]
self.assertQuerysetEqual(rv, values, transform=lambda x: x)
|
|
__author__ = 'jtromo'
#Developed at : SEFCOM Labs by James Romo
from bluetooth import *
from Crypto.Cipher import AES
import threading
import time
import base64
import os
import uuid
# /////////////////////////////////////////////////////////////////////////////
# Configuration
# /////////////////////////////////////////////////////////////////////////////
# ---------------------------------- Flags ------------------------------------
# Logging Level Controls
LOGGING_INFO = 1
LOGGING_DEBUG = 1
LOGGING_ERROR = 1
# Determines which protocol components need creation (if not, read from disk)
REQUIRES_SHARED_SECRET_GENERATION = 0
REQUIRES_SMARTKEY_ID_GENERATION = 0
REQUIRES_TRUSTED_DEVICES_GENERATION = 0
# Causes the service to use a sample message from the device instead of real
DEBUG_USE_SAMPLE_DEVICE_MESSAGE = 1
if DEBUG_USE_SAMPLE_DEVICE_MESSAGE:
print '////////////////////////////////////////////////////////////////////'
print '----------------------------- WARNING ------------------------------'
print ' ******* Testing Mode Enabled *******'
print ' Using sample device message'
print ' Must be disabled for demo'
print '////////////////////////////////////////////////////////////////////'
# -------------------------------- Encryption ---------------------------------
# the block size for the cipher object; must be 16, 24, or 32 for AES
BLOCK_SIZE = 32
# the character used for padding--with a block cipher such as AES, the value
# you encrypt must be a multiple of BLOCK_SIZE in length. This character is
# used to ensure that your value is always a multiple of BLOCK_SIZE
PADDING = '{'
# one-liner to sufficiently pad the text to be encrypted
pad = lambda s: s + (BLOCK_SIZE - len(s) % BLOCK_SIZE) * PADDING
# one-liners to encrypt/encode and decrypt/decode a string
# encrypt with AES, encode with base64
EncodeAES = lambda c, s: base64.b64encode(c.encrypt(pad(s)))
DecodeAES = lambda c, e: c.decrypt(base64.b64decode(e)).rstrip(PADDING)
# ---------------------- Protocol Components Generation -----------------------
# ********* shared_secretKey **********
# 1) Generate a new shared_secretKey (if required)
if REQUIRES_SHARED_SECRET_GENERATION:
new_shared_secretKey = os.urandom(BLOCK_SIZE)
if LOGGING_DEBUG:
print 'Newly generated shared_secretKey:', new_shared_secretKey
# 2) Save new shared_secretKey to a file (if required)
if REQUIRES_SHARED_SECRET_GENERATION:
with open('shared_secretKey', 'w') as file_: file_.write(new_shared_secretKey)
if LOGGING_DEBUG:
print 'Successfully saved new shared_secretKey'
# 3) Read shared_secretKey from file
shared_secretKeyFile = open('shared_secretKey')
read_shared_secretKey = shared_secretKeyFile.read()
if LOGGING_DEBUG:
print 'shared_secretKey from file:', read_shared_secretKey
# 4) Create a cipher object using shared_secretKey
cipher = AES.new(read_shared_secretKey)
# *********** smartKey_id *************
# 5) Generate a new smartKey_id (if required)
if REQUIRES_SMARTKEY_ID_GENERATION:
new_smartKey_id = uuid.uuid4()
if LOGGING_DEBUG:
print 'Newly generated smartKey_id:', new_smartKey_id
# 6) Save new smartKey_id to a file (if required)
if REQUIRES_SMARTKEY_ID_GENERATION:
with open('smartKey_id', 'w') as file_: file_.write(str(new_smartKey_id))
if LOGGING_DEBUG:
print 'Successfully saved new smartKey_id'
# 7) Read shared_secretKey from file
smartKey_idFile = open('smartKey_id')
read_smartKey_id = smartKey_idFile.read()
if LOGGING_DEBUG:
print 'smartKey_id from file:', read_smartKey_id
# ********** trusted_devices **********
# 8) Generate a new list of trusted_devices (if required)
if REQUIRES_TRUSTED_DEVICES_GENERATION:
new_trusted_devices = [uuid.uuid4(), uuid.uuid4(), uuid.uuid4(), uuid.uuid4()]
if LOGGING_DEBUG:
print 'Newly generated trusted_devices:', new_trusted_devices
# 9) Save new list of trusted_devices to a file (if required)
if REQUIRES_TRUSTED_DEVICES_GENERATION:
with open('trusted_devices', 'w') as file_:
for trusted_device in new_trusted_devices
file_.write(str(trusted_device))
if LOGGING_DEBUG:
print 'Successfully saved new trusted_devices'
# -------------------------- Protocol Components ------------------------------
smartKey_id = read_smartKey_id
shared_secretKey = read_shared_secretKey
# Sample from phone key
sample_device_id = uuid.uuid4()
# List of trusted device_ids
if DEBUG_USE_SAMPLE_DEVICE_MESSAGE:
trusted_devices = [sample_device_id]
else:
# TODO: Place generated uuid's here
trusted_devices = ['850b6beb-58a9-486a-8de0-c68e25f2b619', '644c98e6-7e86-4bd1-a4f2-88fd9ed80407', '61f126e4-bbc3-4857-8857-56f54049a747', '3026a7f9-e640-4a02-9e62-8851020ab873']
# /////////////////////////////////////////////////////////////////////////////
# Client Response Thread
# /////////////////////////////////////////////////////////////////////////////
class ClientResponseThread (threading.Thread):
def __init__ (self, socket):
self.socket = socket
threading.Thread.__init__(self)
def run (self):
try:
# Generate timestamp for protocol
timestamp = time.time()
if LOGGING_DEBUG:
print 'timestamp:', timestamp
# Create decodedMsg: device_id, timestamp
decodedMsg = smartKey_id + '-' + str(timestamp)
if LOGGING_DEBUG:
print 'decodedMsg:', decodedMsg
# Encode message: secretKey(device_id, timestamp)
encodedMsg = EncodeAES(cipher, decodedMsg)
if LOGGING_DEBUG:
print 'encodedMsg:', encodedMsg
# Send response
self.socket.send (encodedMsg)
except:
print "Fatal error has occurred during client response. Closing socket..."
if LOGGING_ERROR:
e = sys.exc_info()
print 'Response Thread:', e
self.socket.close()
# /////////////////////////////////////////////////////////////////////////////
# Smart Key Service
# /////////////////////////////////////////////////////////////////////////////
# ---------------------- Bluetooth Socket Configuration -----------------------
server_sock = BluetoothSocket(RFCOMM)
server_sock.bind(("", PORT_ANY))
server_sock.listen(1)
port = server_sock.getsockname()[1]
uuid = "a60f35f0-b93a-11de-8a39-08002009c666"
advertise_service(server_sock, "SmartKey", service_id=uuid,
service_classes=[uuid, SERIAL_PORT_CLASS], profiles=[SERIAL_PORT_PROFILE],
)
if LOGGING_INFO:
print("Waiting for connection on RFCOMM channel %d" % port)
client_sock, client_info = server_sock.accept()
if LOGGING_INFO:
print("Accepted connection from ", client_info)
# ---------------------------- Bluetooth Service ------------------------------
try:
while True:
# Retrieve data sent by mobile device
data = client_sock.recv(1024)
if len(data) > 0:
if LOGGING_DEBUG:
print("Encoded message received from mobile device: [%s]" % data)
# ****** Verify the message came from trusted valid device ******
try:
# Decode encrypted message
if DEBUG_USE_SAMPLE_DEVICE_MESSAGE:
# In place until Android encryption is finished
decoded_device_id = sample_device_id
else:
decoded_device_id = DecodeAES(cipher, data)
if LOGGING_DEBUG:
print 'Decoded device_id', decoded_device_id
# Verify if device_id is in trusted_devices
if decoded_device_id in trusted_devices:
if LOGGING_INFO:
print decoded_device_id, 'verified as a trusted device. Sending response...'
ClientResponseThread(client_sock).start()
else:
if LOGGING_INFO:
print decoded_device_id, 'not found in list of trusted_devices'
except:
print 'Incorrect encrypted data format. Cannot be decoded.'
if LOGGING_ERROR:
e = sys.exc_info()
print 'Decode:', e
else:
if LOGGING_DEBUG:
print 'No data has been received received'
except :
print 'Fatal error has occurred during bluetooth service. Disconnecting...'
if LOGGING_ERROR:
e = sys.exc_info()
print 'Bluetooth Service:', e
# --------------------------------- Exiting -----------------------------------
print("Smart Key Bluetooth Service has stopped.")
# Close all sockets
client_sock.close()
server_sock.close()
print("Please Restart service.")
|
|
# Copyright 2015 ETH Zurich
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
:mod:`lib_path_store_test` --- lib.path_store unit tests
==========================================================================
"""
# Stdlib
import math
from unittest.mock import patch, MagicMock
# External packages
import nose
import nose.tools as ntools
# SCION
from lib.errors import SCIONPathPolicyViolated
from lib.packet.pcb import PathSegment
from lib.path_store import (
PathPolicy,
PathStore,
PathStoreRecord
)
from test.testcommon import create_mock, create_mock_full
class TestPathPolicyCheckFilters(object):
"""
Unit tests for lib.path_store.PathPolicy.check_filters
"""
def _setup(self, unwanted=None, reasons=None, remote_ia=None):
inst = PathPolicy()
inst._check_unwanted_ases = create_mock()
inst._check_unwanted_ases.return_value = unwanted
inst._check_property_ranges = create_mock()
inst._check_property_ranges.return_value = reasons
inst._check_remote_ifid = create_mock()
inst._check_remote_ifid.return_value = remote_ia
pcb = create_mock(["short_desc"], class_=PathSegment)
return inst, pcb
def test_basic(self):
inst, pcb = self._setup()
# Call
inst.check_filters(pcb)
def test_unwanted_ases(self):
inst, pcb = self._setup("unwanted AS")
# Call
ntools.assert_raises(SCIONPathPolicyViolated, inst.check_filters, pcb)
def test_property_ranges(self):
inst, pcb = self._setup(reasons="reasons")
ntools.assert_raises(SCIONPathPolicyViolated, inst.check_filters, pcb)
class TestPathPolicyCheckPropertyRanges(object):
"""
Unit tests for lib.path_store.PathPolicy._check_property_ranges
"""
def _setup(self, max_bw=20):
inst = PathPolicy()
inst.property_ranges = {
'PeerLinks': [0, 1], 'HopsLength': [0, 1], 'DelayTime': [0, 1],
'GuaranteedBandwidth': [0, max_bw],
'AvailableBandwidth': [0, max_bw], 'TotalBandwidth': [0, max_bw]
}
pcb = create_mock(["get_n_peer_links", "get_n_hops", "get_timestamp"])
return inst, pcb
@patch("lib.path_store.SCIONTime.get_time", new_callable=create_mock)
def test_success(self, get_time):
inst, pcb = self._setup()
pcb.get_n_peer_links.return_value = 0.5
pcb.get_n_hops.return_value = 0.5
pcb.get_timestamp.return_value = 0.5
# Call
ntools.eq_(inst._check_property_ranges(pcb), [])
@patch("lib.path_store.SCIONTime.get_time", new_callable=create_mock)
def test_failure(self, get_time):
inst, pcb = self._setup(max_bw=9)
pcb.get_n_peer_links.return_value = 2
pcb.get_n_hops.return_value = -1
pcb.get_timestamp.return_value = -0.1
# Call
ntools.eq_(len(inst._check_property_ranges(pcb)), 6)
@patch("lib.path_store.SCIONTime.get_time", new_callable=create_mock)
def test_no_checks(self, get_time):
inst, pcb = self._setup(max_bw=9)
for key in inst.property_ranges:
inst.property_ranges[key] = []
pcb.get_n_peer_links.return_value = 2
pcb.get_n_hops.return_value = -1
pcb.get_timestamp.return_value = -0.1
# Call
ntools.eq_(inst._check_property_ranges(pcb), [])
class TestPathPolicyParseDict(object):
"""
Unit tests for lib.path_store.PathPolicy.parse_dict
"""
def test_basic(self):
dict_ = {}
dict_['BestSetSize'] = "best_set_size"
dict_['CandidatesSetSize'] = "candidates_set_size"
dict_['HistoryLimit'] = "history_limit"
dict_['UpdateAfterNumber'] = "update_after_number"
dict_['UpdateAfterTime'] = "update_after_time"
dict_['UnwantedASes'] = "1-ff00:0:311,2-ff00:0:312"
dict_['PropertyRanges'] = {'key1': "1-11", 'key2': "2-12", 'DelayTime': "0-100"}
dict_['PropertyWeights'] = "property_weights"
pth_pol2 = PathPolicy()
pth_pol2.parse_dict(dict_)
ntools.eq_(pth_pol2.best_set_size, "best_set_size")
ntools.eq_(pth_pol2.candidates_set_size, "candidates_set_size")
ntools.eq_(pth_pol2.history_limit, "history_limit")
ntools.eq_(pth_pol2.update_after_number, "update_after_number")
ntools.eq_(pth_pol2.update_after_time, "update_after_time")
ntools.eq_(pth_pol2.property_ranges, {
'key1': (1, 11), 'key2': (2, 12), 'DelayTime': (-1, 101),
})
ntools.eq_(pth_pol2.property_weights, "property_weights")
class TestPathStoreRecordInit(object):
"""
Unit tests for lib.path_store.PathStoreRecord.__init__
"""
@patch("lib.path_store.PathStoreRecord.update", autospec=True)
@patch("lib.path_store.SCIONTime.get_time", new_callable=create_mock)
def test(self, get_time, update):
pcb = create_mock(['get_hops_hash', 'get_n_hops', 'get_n_peer_links'],
class_=PathSegment)
get_time.return_value = PathStoreRecord.DEFAULT_OFFSET + 1
# Call
inst = PathStoreRecord(pcb)
# Tests
ntools.eq_(inst.id, pcb.get_hops_hash.return_value)
ntools.eq_(inst.peer_links, pcb.get_n_peer_links.return_value)
ntools.eq_(inst.hops_length, pcb.get_n_hops.return_value)
ntools.eq_(inst.fidelity, 0)
ntools.eq_(inst.disjointness, 0)
ntools.eq_(inst.last_sent_time, 1)
ntools.eq_(inst.guaranteed_bandwidth, 0)
ntools.eq_(inst.available_bandwidth, 0)
ntools.eq_(inst.total_bandwidth, 0)
update.assert_called_once_with(inst, pcb)
class TestPathStoreRecordUpdate(object):
"""
Unit tests for lib.path_store.PathStoreRecord.update
"""
@patch("lib.path_store.SCIONTime.get_time", new_callable=create_mock)
@patch("lib.path_store.PathStoreRecord.__init__", autospec=True,
return_value=None)
def test(self, init, get_time):
inst = PathStoreRecord("pcb")
get_time.return_value = 100
pcb = create_mock(["copy", "get_hops_hash", "get_timestamp",
"get_expiration_time"])
inst.id = pcb.get_hops_hash.return_value
pcb.get_timestamp.return_value = 95
# Call
inst.update(pcb)
# Tests
pcb.copy.assert_called_once_with()
ntools.eq_(inst.delay_time, 5)
ntools.eq_(inst.last_seen_time, 100)
ntools.eq_(inst.expiration_time, pcb.get_expiration_time.return_value)
class TestPathStoreRecordUpdateFidelity(object):
"""
Unit tests for lib.path_store.PathStoreRecord.update_fidelity
"""
@patch("lib.path_store.SCIONTime.get_time", new_callable=create_mock)
@patch("lib.path_store.PathStoreRecord.__init__", autospec=True,
return_value=None)
def test_basic(self, init, time_):
path_policy = PathPolicy()
path_policy.property_weights['PeerLinks'] = 10
path_policy.property_weights['HopsLength'] = 1
path_policy.property_weights['Disjointness'] = 2
path_policy.property_weights['LastSentTime'] = 3
path_policy.property_weights['LastSeenTime'] = 4
path_policy.property_weights['DelayTime'] = 5
path_policy.property_weights['ExpirationTime'] = 6
path_policy.property_weights['GuaranteedBandwidth'] = 7
path_policy.property_weights['AvailableBandwidth'] = 8
path_policy.property_weights['TotalBandwidth'] = 9
pth_str_rec = PathStoreRecord("pcb")
pth_str_rec.peer_links = 10 ** 5
pth_str_rec.hops_length = (1 / (10 ** 4))
pth_str_rec.disjointness = 10 ** 3
pth_str_rec.last_sent_time = -99
pth_str_rec.last_seen_time = 10
pth_str_rec.delay_time = 1
pth_str_rec.expiration_time = 10 / 9
pth_str_rec.guaranteed_bandwidth = 10 ** -2
pth_str_rec.available_bandwidth = 10 ** -3
pth_str_rec.total_bandwidth = 10 ** -4
time_.return_value = 1
pth_str_rec.update_fidelity(path_policy)
ntools.assert_almost_equal(pth_str_rec.fidelity, 1012345.6789)
class TestPathStoreAddSegment(object):
"""
Unit tests for lib.path_store.PathStore.add_segment
"""
def _setup(self, filter_=True):
inst = PathStore("path_policy")
inst.path_policy = create_mock(["check_filters"])
if not filter_:
inst.path_policy.check_filters.side_effect = SCIONPathPolicyViolated()
pcb = create_mock(["get_hops_hash", "get_timestamp"],
class_=PathSegment)
return inst, pcb
@patch("lib.path_store.PathStore.__init__", autospec=True,
return_value=None)
def test_filters(self, psi):
"""
Try to add a path that does not meet the filter requirements.
"""
inst, pcb = self._setup(filter_=False)
# Call
inst.add_segment(pcb)
# Tests
inst.path_policy.check_filters.assert_called_once_with(pcb)
@patch("lib.path_store.PathStore.__init__", autospec=True,
return_value=None)
def test_already_in_store(self, init):
"""
Try to add a path that is already in the path store.
"""
inst, pcb = self._setup()
candidate = create_mock(['id', 'update'])
candidate.id = pcb.get_hops_hash.return_value
inst.candidates = [candidate]
# Call
inst.add_segment(pcb)
# Tests
candidate.update.assert_called_once_with(pcb)
@patch("lib.path_store.PathStoreRecord", autospec=True)
@patch("lib.path_store.PathStore.__init__", autospec=True,
return_value=None)
def test_adding(self, psi, psr):
"""
Add a single path segment to the set of candidate paths.
"""
inst, pcb = self._setup()
inst.candidates = []
inst._trim_candidates = create_mock()
# Call
inst.add_segment(pcb)
# Tests
ntools.eq_(inst.candidates, [psr.return_value])
inst._trim_candidates.assert_called_once_with()
class TestPathStoreTrimCandidates(object):
"""
Unit tests for lib.path_store.PathStore._trim_candidates
"""
@patch("lib.path_store.PathStore.__init__", autospec=True,
return_value=None)
def test_expire_paths(self, psi):
"""
Test trimming the size of the candidate set by removing an expired
segment.
"""
pth_str = PathStore("path_policy")
pth_str.path_policy = MagicMock(spec_set=['candidates_set_size'])
pth_str.path_policy.candidates_set_size = 0
pth_str.candidates = [0]
pth_str._remove_expired_segments = (
lambda: pth_str.candidates.pop())
pth_str._trim_candidates()
ntools.eq_(pth_str.candidates, [])
@patch("lib.path_store.PathStore.__init__", autospec=True,
return_value=None)
def test_remove_low_fidelity_path(self, psi):
"""
Add a path, find that the candidate set size is too large, and remove
the lowest-fidelity path.
"""
pth_str = PathStore("path_policy")
pth_str.path_policy = MagicMock(spec_set=['candidates_set_size'])
pth_str.path_policy.candidates_set_size = 2
pth_str.candidates = [create_mock(['fidelity']) for i in range(3)]
pth_str.candidates[0].fidelity = 2
pth_str.candidates[1].fidelity = 0
pth_str.candidates[2].fidelity = 1
remainder = [pth_str.candidates[0], pth_str.candidates[2]]
pth_str._remove_expired_segments = create_mock()
pth_str._update_all_fidelity = create_mock()
pth_str._trim_candidates()
pth_str._remove_expired_segments.assert_called_once_with()
pth_str._update_all_fidelity.assert_called_once_with()
ntools.eq_(pth_str.candidates, remainder)
class TestPathStoreUpdateDisjointnessDB(object):
"""
Unit tests for lib.path_store._update_disjointness_db
"""
@patch("lib.path_store.SCIONTime.get_time", spec_set=[],
new_callable=MagicMock)
def test_basic(self, time_):
path_policy = MagicMock(spec_set=['history_limit'])
path_policy.history_limit = 3
pth_str = PathStore(path_policy)
pth_str.disjointness = {0: math.e, 1: math.e**2}
pth_str.last_dj_update = 22
time_.return_value = 23
pth_str._update_disjointness_db()
ntools.eq_(pth_str.last_dj_update, time_.return_value)
ntools.assert_almost_equal(pth_str.disjointness[0], 1.0)
ntools.assert_almost_equal(pth_str.disjointness[1], math.e)
class TestPathStoreUpdateAllDisjointness(object):
"""
Unit tests for lib.path_store._update_all_disjointness
"""
def test(self):
inst = PathStore(create_mock_full({'history_limit': 3}))
numCandidates = 5
pathLength = 5
inst.candidates = []
inst.disjointness = {}
for i in range(numCandidates):
id_ = i * (2 * pathLength + 1)
asms = []
for j in range(pathLength):
isdas = 9, id_ + j + 1
hof = create_mock_full({'egress_if': isdas[1] + pathLength})
pcbm = create_mock_full({'hof()': hof})
asms.append(create_mock_full({
"isd_as()": isdas, "pcbm()": pcbm}))
inst.disjointness[isdas[1]] = 1.0
inst.disjointness[hof.egress_if] = 1.0
pcb = create_mock_full({"iter_asms()": asms})
record = create_mock_full(
{'pcb': pcb, 'disjointness': 0, 'id': id_})
inst.disjointness[id_] = 1.0
inst.candidates.append(record)
inst._update_disjointness_db = create_mock()
inst._update_all_disjointness()
for i in range(numCandidates):
ntools.assert_almost_equal(inst.candidates[i].disjointness, 1.0)
class TestPathStoreUpdateAllDelayTime(object):
"""
Unit tests for lib.path_store._update_all_delay_time
"""
def test_basic(self):
path_policy = MagicMock(spec_set=['history_limit'])
path_policy.history_limit = 3
pth_str = PathStore(path_policy)
pth_str.candidates = [MagicMock(spec_set=['pcb', 'delay_time',
'last_seen_time'])
for i in range(5)]
for i in range(5):
pcb = MagicMock(spec_set=['get_timestamp'])
pcb.get_timestamp.return_value = 0
pth_str.candidates[i].pcb = pcb
pth_str.candidates[i].last_seen_time = 2 * i + 2
pth_str._update_all_delay_time()
for i in range(5):
pth_str.candidates[i].pcb.get_timestamp.assert_called_once_with()
ntools.assert_almost_equal(pth_str.candidates[i].delay_time,
((2 * i + 2) / 10))
def test_pcbs_from_future(self):
path_policy = MagicMock(spec_set=['history_limit'])
path_policy.history_limit = 3
pth_str = PathStore(path_policy)
pth_str.candidates = [MagicMock(spec_set=['pcb', 'delay_time',
'last_seen_time'])
for i in range(5)]
for i in range(5):
pcb = MagicMock(spec_set=['get_timestamp'])
pcb.get_timestamp.return_value = 2
pth_str.candidates[i].pcb = pcb
pth_str.candidates[i].last_seen_time = i
pth_str._update_all_delay_time()
for i in range(5):
pth_str.candidates[i].pcb.get_timestamp.assert_called_once_with()
ntools.assert_true(pth_str.candidates[i].delay_time > 0)
class TestPathStoreUpdateAllFidelity(object):
"""
Unit tests for lib.path_store._update_all_fidelity
"""
def test_basic(self):
path_policy = MagicMock(spec_set=['history_limit'])
path_policy.history_limit = 3
pth_str = PathStore(path_policy)
pth_str._update_all_disjointness = MagicMock(spec_set=[])
pth_str._update_all_delay_time = MagicMock(spec_set=[])
pth_str.candidates = [MagicMock(spec_set=['update_fidelity'])
for i in range(5)]
pth_str._update_all_fidelity()
pth_str._update_all_disjointness.assert_called_once_with()
pth_str._update_all_delay_time.assert_called_once_with()
for i in range(5):
pth_str.candidates[i].update_fidelity.assert_called_once_with(
path_policy)
class TestPathStoreGetBestSegments(object):
"""
Unit tests for lib.path_store.PathStore.get_best_segments
"""
def _setup(self):
inst = PathStore("path_policy")
inst._remove_expired_segments = create_mock()
inst._update_all_fidelity = create_mock()
inst.candidates = []
for i, fidelity in enumerate([0, 5, 2, 6, 3]):
candidate = create_mock(["pcb", "fidelity", "sending"])
candidate.pcb = "pcb%d" % i
candidate.fidelity = fidelity
inst.candidates.append(candidate)
return inst
@patch("lib.path_store.PathStore.__init__", autospec=True,
return_value=None)
def test_full(self, init):
inst = self._setup()
# Call
ntools.eq_(inst.get_best_segments(k=3, sending=False),
["pcb3", "pcb1", "pcb4"])
# Tests
inst._remove_expired_segments.assert_called_once_with()
inst._update_all_fidelity.assert_called_once_with()
for i in inst.candidates:
ntools.assert_false(i.sending.called)
@patch("lib.path_store.PathStore.__init__", autospec=True,
return_value=None)
def test_less_arg(self, init):
inst = self._setup()
inst.path_policy = create_mock(["best_set_size"])
inst.path_policy.best_set_size = 1
# Call
ntools.eq_(inst.get_best_segments(), ["pcb3"])
# Tests
for i in inst.candidates:
if i.fidelity == 6:
i.sending.assert_called_once_with()
else:
ntools.assert_false(i.sending.called)
class TestPathStoreGetLatestHistorySnapshot(object):
"""
Unit tests for lib.path_store.get_latest_history_snapshot
"""
def _setup(self, attrs=None):
def_attrs = {'history_limit': 3}
if attrs:
def_attrs.update(attrs)
path_policy = MagicMock(spec_set=list(def_attrs.keys()))
path_policy.history_limit = 3
for k, v in def_attrs.items():
setattr(path_policy, k, v)
return path_policy
def test_basic(self):
pth_str = PathStore(self._setup())
pth_str.best_paths_history = []
pth_str.best_paths_history.append([MagicMock(spec_set=['pcb'])
for i in range(5)])
for i in range(5):
pth_str.best_paths_history[0][i].pcb = i
ntools.eq_(pth_str.get_latest_history_snapshot(3), [0, 1, 2])
def test_less_arg(self):
pth_str = PathStore(self._setup({'best_set_size': 4}))
pth_str.best_paths_history = []
pth_str.best_paths_history.append([MagicMock(spec_set=['pcb'])
for i in range(5)])
for i in range(5):
pth_str.best_paths_history[0][i].pcb = i
ntools.eq_(pth_str.get_latest_history_snapshot(), [0, 1, 2, 3])
def test_false(self):
pth_str = PathStore(self._setup())
ntools.eq_(pth_str.get_latest_history_snapshot(3), [])
class TestPathStoreRemoveExpiredSegments(object):
"""
Unit tests for lib.path_store._remove_expired_segments
"""
@patch("lib.path_store.SCIONTime.get_time", spec_set=[],
new_callable=MagicMock)
def test_basic(self, time_):
path_policy = MagicMock(spec_set=['history_limit'])
path_policy.history_limit = 3
pth_str = PathStore(path_policy)
pth_str.candidates = [MagicMock(spec_set=['expiration_time', 'id'])
for i in range(5)]
for i in range(5):
pth_str.candidates[i].expiration_time = i
pth_str.candidates[i].id = i
time_.return_value = 2
pth_str.remove_segments = MagicMock(spec_set=[])
pth_str._remove_expired_segments()
pth_str.remove_segments.assert_called_once_with([0, 1, 2])
class TestPathStoreRemoveSegments(object):
"""
Unit tests for lib.path_store.remove_segments
"""
def setUp(self):
self.path_policy = MagicMock(spec_set=['history_limit'])
self.path_policy.history_limit = 3
def tearDown(self):
del self.path_policy
def test_basic(self):
pth_str = PathStore(self.path_policy)
pth_str.candidates = [MagicMock(spec_set=['id', 'fidelity'])
for i in range(5)]
for i in range(5):
pth_str.candidates[i].id = i
pth_str.candidates[i].fidelity = i
pth_str._update_all_fidelity = MagicMock(spec_set=[])
pth_str.remove_segments([1, 2, 3])
ntools.eq_(len(pth_str.candidates), 2)
ntools.eq_(pth_str.candidates[0].id, 4)
ntools.eq_(pth_str.candidates[1].id, 0)
pth_str._update_all_fidelity.assert_called_once_with()
def test_none(self):
pth_str = PathStore(self.path_policy)
pth_str.candidates = [MagicMock(spec_set=['id']) for i in range(5)]
for i in range(5):
pth_str.candidates[i].id = i
pth_str.remove_segments([0, 1, 2, 3, 4])
ntools.eq_(pth_str.candidates, [])
class TestPathStoreGetSegment(object):
"""
Unit tests for lib.path_store.get_segment
"""
def setUp(self):
self.path_policy = MagicMock(spec_set=['history_limit'])
self.path_policy.history_limit = 3
def tearDown(self):
del self.path_policy
def test_basic(self):
pth_str = PathStore(self.path_policy)
pth_str.candidates = [MagicMock(spec_set=['id', 'pcb'])
for i in range(5)]
for i in range(5):
pth_str.candidates[i].id = i
pth_str.candidates[i].pcb = i
ntools.eq_(pth_str.get_segment(2), 2)
def test_not_present(self):
pth_str = PathStore(self.path_policy)
pth_str.candidates = [MagicMock(spec_set=['id']) for i in range(5)]
ntools.assert_is_none(pth_str.get_segment(2))
if __name__ == "__main__":
nose.run(defaultTest=__name__)
|
|
#!/usr/bin/python
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
#
# Automatically update TensorFlow version in source files
#
# Usage:
# ./tensorflow/tools/ci_build/update_version.py --version 1.4.0-rc1
# ./tensorflow/tools/ci_build/update_version.py --nightly
#
"""Update version of TensorFlow script."""
# pylint: disable=superfluous-parens
import argparse
import os
import re
import subprocess
import time
# File parameters.
TF_SRC_DIR = "tensorflow"
VERSION_H = "%s/core/public/version.h" % TF_SRC_DIR
SETUP_PY = "%s/tools/pip_package/setup.py" % TF_SRC_DIR
README_MD = "./README.md"
TENSORFLOW_BZL = "%s/tensorflow.bzl" % TF_SRC_DIR
RELEVANT_FILES = [TF_SRC_DIR, VERSION_H, SETUP_PY, README_MD]
# Version type parameters.
NIGHTLY_VERSION = 1
REGULAR_VERSION = 0
def check_existence(filename):
"""Check the existence of file or dir."""
if not os.path.exists(filename):
raise RuntimeError("%s not found. Are you under the TensorFlow source root"
" directory?")
def check_all_files():
"""Check all relevant files necessary for upgrade."""
for file_name in RELEVANT_FILES:
check_existence(file_name)
def replace_string_in_line(search, replace, filename):
"""Replace with sed when regex is required."""
with open(filename, "r") as source:
content = source.read()
with open(filename, "w") as source:
source.write(re.sub(search, replace, content))
class Version(object):
"""Version class object that stores SemVer version information."""
def __init__(self, major, minor, patch, identifier_string, version_type):
"""Constructor.
Args:
major: major string eg. (1)
minor: minor string eg. (3)
patch: patch string eg. (1)
identifier_string: extension string eg. (-rc0)
version_type: version parameter ((REGULAR|NIGHTLY)_VERSION)
"""
self.major = major
self.minor = minor
self.patch = patch
self.identifier_string = identifier_string
self.version_type = version_type
self._update_string()
def _update_string(self):
self.string = "%s.%s.%s%s" % (self.major,
self.minor,
self.patch,
self.identifier_string)
def __str__(self):
return self.string
def set_identifier_string(self, identifier_string):
self.identifier_string = identifier_string
self._update_string()
@property
def pep_440_str(self):
if self.version_type == REGULAR_VERSION:
return_string = "%s.%s.%s%s" % (self.major,
self.minor,
self.patch,
self.identifier_string)
return return_string.replace("-", "")
else:
return_string = "%s.%s.%s" % (self.major,
self.minor,
self.identifier_string)
return return_string.replace("-", "")
@staticmethod
def parse_from_string(string, version_type):
"""Returns version object from Semver string.
Args:
string: version string
version_type: version parameter
Raises:
RuntimeError: If the version string is not valid.
"""
# Check validity of new version string.
if not re.search(r"[0-9]+\.[0-9]+\.[a-zA-Z0-9]+", string):
raise RuntimeError("Invalid version string: %s" % string)
major, minor, extension = string.split(".", 2)
# Isolate patch and identifier string if identifier string exists.
extension_split = extension.split("-", 1)
patch = extension_split[0]
if len(extension_split) == 2:
identifier_string = "-" + extension_split[1]
else:
identifier_string = ""
return Version(major,
minor,
patch,
identifier_string,
version_type)
def get_current_semver_version():
"""Returns a Version object of current version.
Returns:
version: Version object of current SemVer string based on information from
core/public/version.h
"""
# Get current version information.
version_file = open(VERSION_H, "r")
for line in version_file:
major_match = re.search("^#define TF_MAJOR_VERSION ([0-9]+)", line)
minor_match = re.search("^#define TF_MINOR_VERSION ([0-9]+)", line)
patch_match = re.search("^#define TF_PATCH_VERSION ([0-9]+)", line)
extension_match = re.search("^#define TF_VERSION_SUFFIX \"(.*)\"", line)
if major_match:
old_major = major_match.group(1)
if minor_match:
old_minor = minor_match.group(1)
if patch_match:
old_patch_num = patch_match.group(1)
if extension_match:
old_extension = extension_match.group(1)
break
if "dev" in old_extension:
version_type = NIGHTLY_VERSION
else:
version_type = REGULAR_VERSION
return Version(old_major,
old_minor,
old_patch_num,
old_extension,
version_type)
def update_version_h(old_version, new_version):
"""Update tensorflow/core/public/version.h."""
replace_string_in_line("#define TF_MAJOR_VERSION %s" % old_version.major,
"#define TF_MAJOR_VERSION %s" % new_version.major,
VERSION_H)
replace_string_in_line("#define TF_MINOR_VERSION %s" % old_version.minor,
"#define TF_MINOR_VERSION %s" % new_version.minor,
VERSION_H)
replace_string_in_line("#define TF_PATCH_VERSION %s" % old_version.patch,
"#define TF_PATCH_VERSION %s" % new_version.patch,
VERSION_H)
replace_string_in_line(
"#define TF_VERSION_SUFFIX \"%s\"" % old_version.identifier_string,
"#define TF_VERSION_SUFFIX \"%s\"" % new_version.identifier_string,
VERSION_H)
def update_setup_dot_py(old_version, new_version):
"""Update setup.py."""
replace_string_in_line("_VERSION = '%s'" % old_version.string,
"_VERSION = '%s'" % new_version.string, SETUP_PY)
def update_readme(old_version, new_version):
"""Update README."""
pep_440_str = new_version.pep_440_str
replace_string_in_line(r"%s\.%s\.([[:alnum:]]+)-" % (old_version.major,
old_version.minor),
"%s-" % pep_440_str, README_MD)
def update_tensorflow_bzl(old_version, new_version):
"""Update tensorflow.bzl."""
old_mmp = "%s.%s.%s" % (old_version.major, old_version.minor,
old_version.patch)
new_mmp = "%s.%s.%s" % (new_version.major, new_version.minor,
new_version.patch)
replace_string_in_line('VERSION = "%s"' % old_mmp,
'VERSION = "%s"' % new_mmp, TENSORFLOW_BZL)
def major_minor_change(old_version, new_version):
"""Check if a major or minor change occurred."""
major_mismatch = old_version.major != new_version.major
minor_mismatch = old_version.minor != new_version.minor
if major_mismatch or minor_mismatch:
return True
return False
def check_for_lingering_string(lingering_string):
"""Check for given lingering strings."""
formatted_string = lingering_string.replace(".", r"\.")
try:
linger_str_output = subprocess.check_output(
["grep", "-rnoH", formatted_string, TF_SRC_DIR])
linger_strs = linger_str_output.decode("utf8").split("\n")
except subprocess.CalledProcessError:
linger_strs = []
if linger_strs:
print("WARNING: Below are potentially instances of lingering old version "
"string \"%s\" in source directory \"%s/\" that are not "
"updated by this script. Please check them manually!"
% (lingering_string, TF_SRC_DIR))
for linger_str in linger_strs:
print(linger_str)
else:
print("No lingering old version strings \"%s\" found in source directory"
" \"%s/\". Good." % (lingering_string, TF_SRC_DIR))
def check_for_old_version(old_version, new_version):
"""Check for old version references."""
for old_ver in [old_version.string, old_version.pep_440_str]:
check_for_lingering_string(old_ver)
if major_minor_change(old_version, new_version):
old_r_major_minor = "r%s.%s" % (old_version.major, old_version.minor)
check_for_lingering_string(old_r_major_minor)
def main():
"""This script updates all instances of version in the tensorflow directory.
Requirements:
version: The version tag
OR
nightly: Create a nightly tag with current date
Raises:
RuntimeError: If the script is not being run from tf source dir
"""
parser = argparse.ArgumentParser(description="Cherry picking automation.")
# Arg information
parser.add_argument("--version",
help="<new_major_ver>.<new_minor_ver>.<new_patch_ver>",
default="")
parser.add_argument("--nightly",
help="disable the service provisioning step",
action="store_true")
args = parser.parse_args()
check_all_files()
old_version = get_current_semver_version()
if args.nightly:
if args.version:
new_version = Version.parse_from_string(args.version, NIGHTLY_VERSION)
new_version.set_identifier_string("-dev" + time.strftime("%Y%m%d"))
else:
new_version = Version(old_version.major,
str(old_version.minor),
old_version.patch,
"-dev" + time.strftime("%Y%m%d"),
NIGHTLY_VERSION)
else:
new_version = Version.parse_from_string(args.version, REGULAR_VERSION)
update_version_h(old_version, new_version)
update_setup_dot_py(old_version, new_version)
update_readme(old_version, new_version)
update_tensorflow_bzl(old_version, new_version)
# Print transition details.
print("Major: %s -> %s" % (old_version.major, new_version.major))
print("Minor: %s -> %s" % (old_version.minor, new_version.minor))
print("Patch: %s -> %s\n" % (old_version.patch, new_version.patch))
check_for_old_version(old_version, new_version)
if __name__ == "__main__":
main()
|
|
#!/usr/bin/python
import logging
class NullHandler(logging.Handler):
def emit(self, record):
pass
log = logging.getLogger('NetworkStateAnalyzer')
log.setLevel(logging.ERROR)
log.addHandler(NullHandler())
import inspect
import threading
from DustLinkData import DustLinkData
from EventBus import EventBusClient
from SmartMeshSDK import FormatUtils
class NetworkStateAnalyzer(EventBusClient.EventBusClient):
QUEUESIZE = 100
def __init__(self,connectParams):
# log
log.info("creating instance")
# store params
self.connectParams = connectParams
# local variables
self.netname = FormatUtils.formatConnectionParams(self.connectParams)
self.busyTesting = threading.Lock()
# initialize parent class
EventBusClient.EventBusClient.__init__(self,
signal = 'snapShotEnd_{0}'.format(self.netname),
cb = self._ebHandler_snapShotEnd,
teardown_cb = self._cleanup,
queuesize = self.QUEUESIZE,
)
self.name = '{0}_NetworkStateAnalyzer'.format(self.netname)
def _cleanup(self):
pass
#======================== public ==========================================
#======================== eventBus handlers ===============================
#===== snapShotEnd
def _ebHandler_snapShotEnd(self,sender,signal,data):
dld = DustLinkData.DustLinkData()
testsFailed = False
# discover and run all the network tests
with self.busyTesting:
for f in dir(self):
if f.startswith("_nettest_"):
# execute the test
(outcome,description) = getattr(self,f)()
assert outcome in dld.TEST_OUTCOME_ALL
assert type(description)==str
# remember if test failed
if outcome==dld.TEST_OUTCOME_FAIL:
testsFailed = True
# log
if log.isEnabledFor(logging.DEBUG):
log.debug('testResult outcome={0} description={1}'.format(outcome,description))
# dispatch
self._dispatch(
signal = 'testResult_{0}'.format(self.netname),
data = {
'testName': f,
'testDesc': getattr(self,f).__doc__,
'outcome': outcome,
'description': description,
},
)
# write to banner if tests failed
if testsFailed:
dld._setBanner(
'some tests failed for network <a href="/health/_{0}">{1}</a>'.format(
FormatUtils.quote(self.netname),
self.netname
)
)
#======================== network tests ===================================
#===== network availability
MIN_NETWORKAVAILABILITY = 0.99
def _nettest_networkAvailability(self):
'''
<p>
This test verifies that the overall network availability is above
MIN_NETWORKAVAILABILITY.
</p>
<p>
The network availability is the portion of the packets generated by
the motes' apps, which were actually sent into the network. If the
protocol stack is busy, it will reject the application's data,
resulting in a lower availability.
</p>
<p>
This test is run once for the whole network.
</p>
'''
dld = DustLinkData.DustLinkData()
descPASS = []
descFAIL = []
descNOTRUN = []
# counter network-wide number of packets generated/failed
numTxOk = 0
numTxFail = 0
for mac in dld.getNetworkMotes(self.netname):
moteinfo = dld.getMoteInfo(mac)
if (('numTxOk' in moteinfo) and ('numTxFail' in moteinfo)):
numTxOk += moteinfo['numTxOk']
numTxFail += moteinfo['numTxFail']
# stop here if both counters are 0
if not numTxOk:
descNOTRUN += ['This test could not run because no packets were sent in the network (yet?) (numTxOk=={0} for the network) and so it is impossible to calculate a ratio.'.format(numTxOk)]
if not descNOTRUN:
# calculate resulting network availability
networkAvailability = (1-float(numTxFail)/float(numTxOk))
# make sure about threshold
if (networkAvailability>=self.MIN_NETWORKAVAILABILITY):
descPASS += [
'networkAvailability={0} is better than the expected {1}'.format(
networkAvailability,
self.MIN_NETWORKAVAILABILITY
)
]
else:
descFAIL += [
'networkAvailability={0} is below the expected {1}'.format(
networkAvailability,
self.MIN_NETWORKAVAILABILITY
)
]
# decide outcome and write report
if descNOTRUN:
outcome = dld.TEST_OUTCOME_NOTRUN
description = ''.join(descNOTRUN)
elif descPASS:
outcome = dld.TEST_OUTCOME_PASS
description = ''.join(descPASS)
elif descFAIL:
outcome = dld.TEST_OUTCOME_FAIL
description = ''.join(descFAIL)
# return test result
return (outcome,description)
#===== network reliability
MIN_NETWORKRELIABILITY = 0.999
def _nettest_networkReliability(self):
'''
<p>
This test erifies that the overall network reliability is above
MIN_NETWORKRELIABILITY.
</p>
<p>
The network reliability is the portion of the packets injected into
the network that were received by their final destination. If the
network looses data, the network reliability goes down.
</p>
<p>
This test is run once for the whole network.
</p>
'''
dld = DustLinkData.DustLinkData()
descPASS = []
descFAIL = []
descNOTRUN = []
# counter network-wide number of packets generated/lost
numPktsGenerated = 0
numPktsLost = 0
for mac in dld.getNetworkMotes(self.netname):
moteinfo = dld.getMoteInfo(mac)
if ('packetsReceived' in moteinfo):
numPktsGenerated += moteinfo['packetsReceived']
if ('packetsLost' in moteinfo):
numPktsGenerated += moteinfo['packetsLost']
# stop here if both counters are 0
if (not numPktsGenerated) and (not numPktsLost):
descNOTRUN += [
'This test could not run because numPktsGenerated=={0} and numPktsLost=={1} and so its\'s impossible to calculate a ratio.'.format(
numPktsGenerated,
numPktsLost,
)
]
if not descNOTRUN:
# calculate resulting network reliability
networkReliability = (1-float(numPktsLost)/float(numPktsLost + numPktsGenerated))
# make sure about threshold
if (networkReliability>=self.MIN_NETWORKRELIABILITY):
descPASS += [
'networkReliability={0} is better than the expected {1}'.format(
networkReliability,
self.MIN_NETWORKRELIABILITY,
)
]
else:
descFAIL += [
'networkAvailability={0} is below the expected {1}'.format(
networkAvailability,
self.MIN_NETWORKAVAILABILITY,
)
]
# decide outcome and write report
if descNOTRUN:
outcome = dld.TEST_OUTCOME_NOTRUN
description = ''.join(descNOTRUN)
elif descPASS:
outcome = dld.TEST_OUTCOME_PASS
description = ''.join(descPASS)
elif descFAIL:
outcome = dld.TEST_OUTCOME_FAIL
description = ''.join(descFAIL)
# return test result
return (outcome,description)
#===== multiple joins
def _nettest_multipleJoins(self):
'''
<p>
This test verifies that each mote has joined exactly once.
</p>
<p>
In a normal deployment, all motes should join exactly once. Joining
more than once may indicate a mote reset.
</p>
<p>
This test is run once for each node in the network (both AP and
mote).
</p>
'''
dld = DustLinkData.DustLinkData()
descPASS = []
descFAIL = []
descNOTRUN = []
# run test
motesExactlyOnce = []
motesNotExactlyOnce = []
motesNotRun = []
for mac in dld.getNetworkMotes(self.netname):
moteinfo = dld.getMoteInfo(mac)
if ('numOperationalEvents' in moteinfo):
if moteinfo['numOperationalEvents']==1:
descPASS += ['- {0} has numOperationalEvents=={1}'.format(
FormatUtils.formatMacString(mac),
moteinfo['numOperationalEvents'],
)
]
else:
descFAIL += ['- {0} has numOperationalEvents=={1}'.format(
FormatUtils.formatMacString(mac),
moteinfo['numOperationalEvents'],
)
]
else:
if (('state' in moteinfo) and (moteinfo['state']==4)):
descPASS += ['- {0} has no numOperationalEvents parameters, but its state is {1}'.format(
FormatUtils.formatMacString(mac),
moteinfo['state'],
)
]
else:
descNOTRUN += ['- {0} has neither numOperationalEvents, nor state attribute'.format(
FormatUtils.formatMacString(mac),
)
]
# decide outcome
if descFAIL:
outcome = dld.TEST_OUTCOME_FAIL
elif descPASS:
outcome = dld.TEST_OUTCOME_PASS
else:
outcome = dld.TEST_OUTCOME_NOTRUN
# write report
description = []
if descPASS:
description += ["PASS</b>:"]
description += descPASS
if descFAIL:
description += ["FAIL:"]
description += descFAIL
if descNOTRUN:
description += ["NOTRUN:"]
description += descNOTRUN
description = '<br/>'.join(description)
# return test result
return (outcome,description)
#===== number of links
MAX_AP_RXLINKS = 140
MAX_MOTE_LINKS = 180
def _nettest_numLinks(self):
'''
<p>
This test verifies that the number of links assigned to each mote
does not exceed the maximum limit.
</p>
<p>
The manager is never supposed to allocate more than MAX_AP_RXLINKS
receive links to the AP, nor more than MAX_MOTE_LINKS links (both
transmit and receive) for a non-AP mote.
</p>
<p>
This test is run once for each node in the network (both AP and
mote).
</p>
'''
dld = DustLinkData.DustLinkData()
descPASS = []
descFAIL = []
descNOTRUN = []
# get all the paths in the network
currentPaths = dld.getNetworkPaths(self.netname)
for mac in dld.getNetworkMotes(self.netname):
(numTx,numRx) = self._countTxRxLinks(currentPaths,mac)
moteinfo = dld.getMoteInfo(mac)
if moteinfo['isAP']:
if numRx<self.MAX_AP_RXLINKS:
descPASS += [
'AP {0} has {1} RX links, less than maximum {2}'.format(
FormatUtils.formatMacString(mac),
numRx,
self.MAX_AP_RXLINKS
)
]
else:
descFAIL += [
'AP {0} has {1} RX links, more than maximum {2}'.format(
FormatUtils.formatMacString(mac),
numRx,
self.MAX_AP_RXLINKS
)
]
else:
numLinks = numTx+numRx
if numLinks<self.MAX_MOTE_LINKS:
descPASS += [
'mote {0} has {1} links, less than maximum {2}'.format(
FormatUtils.formatMacString(mac),
numLinks,
self.MAX_MOTE_LINKS
)
]
else:
descFAIL += [
'mote {0} has {1} links, more than maximum {2}'.format(
FormatUtils.formatMacString(mac),
numLinks,
self.MAX_MOTE_LINKS
)
]
# decide outcome
if descFAIL:
outcome = dld.TEST_OUTCOME_FAIL
elif descPASS:
outcome = dld.TEST_OUTCOME_PASS
else:
outcome = dld.TEST_OUTCOME_NOTRUN
# write report
description = []
if descPASS:
description += ["PASS:"]
description += descPASS
if descFAIL:
description += ["FAIL:"]
description += descFAIL
if descNOTRUN:
description += ["NOTRUN:"]
description += descNOTRUN
description = '<br/>'.join(description)
# return test result
return (outcome,description)
def _countTxRxLinks(self,paths,mac):
numTx = 0
numRx = 0
dld = DustLinkData.DustLinkData()
for (fromMote,toMote) in paths:
if mac!=fromMote and mac!=toMote:
continue
pathInfo = dld.getPathInfo(self.netname,fromMote,toMote)
if mac==fromMote:
numTx += pathInfo['numLinks']
if mac==toMote:
numRx += pathInfo['numLinks']
return (numTx,numRx)
#===== number of good neighbors
MIN_NUMGOODNEIGHBORS = 3
def _nettest_numGoodNeighbors(self):
'''
<p>
This test verifies that each mote has enough good neighbors.
</p>
<p>
The manager can build a robust network if each mote in the network
has at least MIN_NUMGOODNEIGHBORS neighbors.
</p>
<p>
This test is run once for each mote in the network.
</p>
'''
dld = DustLinkData.DustLinkData()
descPASS = []
descFAIL = []
descNOTRUN = []
for mac in dld.getNetworkMotes(self.netname):
moteinfo = dld.getMoteInfo(mac)
if 'numGoodNbrs' not in moteinfo:
descNOTRUN += [
'This test could not run because mote {0} did not report any numGoodNbrs counter (the counters it did report are {1}).'.format(
FormatUtils.formatMacString(mac),
moteinfo.keys()
)
]
elif moteinfo['numGoodNbrs']<self.MIN_NUMGOODNEIGHBORS:
descFAIL += [
'mote {0} has {1} good neighbors, expected at least {2}.'.format(
FormatUtils.formatMacString(mac),
moteinfo['numGoodNbrs'],
self.MIN_NUMGOODNEIGHBORS
)
]
else:
descPASS += [
'mote {0} has {1} good neighbors, which is more than {2}.'.format(
FormatUtils.formatMacString(mac),
moteinfo['numGoodNbrs'],
self.MIN_NUMGOODNEIGHBORS
)
]
# decide outcome
if descFAIL:
outcome = dld.TEST_OUTCOME_FAIL
elif descPASS:
outcome = dld.TEST_OUTCOME_PASS
else:
outcome = dld.TEST_OUTCOME_NOTRUN
# write report
description = []
if descPASS:
description += ["PASS:"]
description += descPASS
if descFAIL:
description += ["FAIL:"]
description += descFAIL
if descNOTRUN:
description += ["NOTRUN:"]
description += descNOTRUN
description = '<br/>'.join(description)
# return test result
return (outcome,description)
#===== mote availability
MIN_MOTEAVAILABILITY = 0.99
def _nettest_perMoteAvailability(self):
'''
<p>
This test verifies that the availability for each mote is above
MIN_MOTEAVAILABILITY.
</p>
<p>
The mote availability is the portion of the packets generated by
the mote's application which were actually sent into the network.
If the mote's protocol stack is busy, it will reject the
application's data, resulting in a lower availability.
</p>
<p>
This test is run once for each mote in the network.
</p>
'''
dld = DustLinkData.DustLinkData()
descPASS = []
descFAIL = []
descNOTRUN = []
for mac in dld.getNetworkMotes(self.netname):
moteinfo = dld.getMoteInfo(mac)
#==== filter edge cases where the test can not be run
if ('isAP' in moteinfo) and moteinfo['isAP']==True:
# don't run test on AP
continue
if 'numTxOk' not in moteinfo:
descNOTRUN += [
'This test could not run because mote {0} did not report any numTxOk counter (the counters it did report are {1}).'.format(
FormatUtils.formatMacString(mac),
moteinfo.keys()
)
]
continue
if 'numTxFail' not in moteinfo:
descNOTRUN += [
'This test could not run because mote {0} did not report any numTxFail counter (the counters it did report are {1}).'.format(
FormatUtils.formatMacString(mac),
moteinfo.keys()
)
]
continue
if not moteinfo['numTxOk']:
descNOTRUN += [
'This test could not run because mote {0} did not send any packets succesfully (yet?) (numTxOk=={1}) and so its\'s impossible to calculate a ratio.'.format(
FormatUtils.formatMacString(mac),
moteinfo['numTxOk']
)
]
continue
#==== run the test
availability = (1-float(moteinfo['numTxFail'])/float(moteinfo['numTxOk']))
if availability<self.MIN_MOTEAVAILABILITY:
descFAIL += [
'availability for mote {0} is {1}, expected at least {2}.'.format(
FormatUtils.formatMacString(mac),
availability,
self.MIN_MOTEAVAILABILITY
)
]
else:
descPASS += [
'availability for mote {0} is {1}, which is better than {2}.'.format(
FormatUtils.formatMacString(mac),
availability,
self.MIN_MOTEAVAILABILITY
)
]
# decide outcome
if descFAIL:
outcome = dld.TEST_OUTCOME_FAIL
elif descPASS:
outcome = dld.TEST_OUTCOME_PASS
else:
outcome = dld.TEST_OUTCOME_NOTRUN
# write report
description = []
if descPASS:
description += ["PASS:"]
description += descPASS
if descFAIL:
description += ["FAIL:"]
description += descFAIL
if descNOTRUN:
description += ["NOTRUN:"]
description += descNOTRUN
description = '<br/>'.join(description)
# return test result
return (outcome,description)
#===== single single parent
def _nettest_oneSingleParentMote(self):
'''
<p>
This test verifies that there is exactly mote with only one parent.
</p>
<p>
Graph theory indicates that, when building a bi-DAG, exactly one
node ends up with one parent (it will be a one-hop neighbor of the
root). This test verifies that this is the case in this network.
</p>
<p>
This test is run once for the whole network.
</p>
'''
dld = DustLinkData.DustLinkData()
descPASS = []
descFAIL = []
descNOTRUN = []
numParents = {}
singleParentMotes = []
# get all the paths in the network
currentPaths = dld.getNetworkPaths(self.netname)
# count number of parents for each mote
for mac in dld.getNetworkMotes(self.netname):
numParents[mac] = 0
for (fromMote,toMote) in currentPaths:
pathInfo = dld.getPathInfo(self.netname,fromMote,toMote)
if fromMote==mac and pathInfo['direction']==2 and pathInfo['numLinks']>0:
numParents[mac] += 1
# count number of single-parents motes
for (mac,n) in numParents.items():
if n==1:
singleParentMotes = [mac]
# run test
if len(singleParentMotes)==1:
descPASS += [
'only mote {0} has a single parent'.format(
FormatUtils.formatMacString(singleParentMotes[0]),
)
]
else:
description = []
description += ['The following {0} motes have one parent only: '.format(len(singleParentMotes))]
description += [' '.join(FormatUtils.formatMacString(m) for m in singleParentMotes)]
description = ''.join(description)
descPASS += [description]
# decide outcome
if descFAIL:
outcome = dld.TEST_OUTCOME_FAIL
elif descPASS:
outcome = dld.TEST_OUTCOME_PASS
else:
outcome = dld.TEST_OUTCOME_NOTRUN
# write report
description = []
if descPASS:
description += ["PASS:"]
description += descPASS
if descFAIL:
description += ["FAIL:"]
description += descFAIL
if descNOTRUN:
description += ["NOTRUN:"]
description += descNOTRUN
description = '<br/>'.join(description)
# return test result
return (outcome,description)
#===== stability vs. RSSI
THRES_NUM_PACKETS = 30
THRES_HIGH_RSSI = -70
THRES_HIGH_STAB = 0.70
THRES_LOW_RSSI = -80
THRES_LOW_STAB = 0.50
def _nettest_stabilityVsRssi(self):
'''
<p>
This test verifies that stability of a path is plausible given its
RSSI.
</p>
<p>
In the absence of heavy interference, the is a straightforward
relationship between the RSSI and stability of a path:
<ul>
<li>if the RSSI is above THRES_HIGH_RSSI, the stability is
expected to be above THRES_HIGH_STAB.</li>
<li>if the RSSI is below THRES_LOW_RSSI, the stability is expected to
be below THRES_LOW_STAB.</li>
</ul>
</p>
<p>
The stability is calculated as the ratio between the number of
packets transmitted successfully and transmission attempts; it is
also known as Packet Delivery Ratio (PDR).
</p>
<p>
This test is run once for each path in the network over which at
least THRES_NUM_PACKETS packet have been transmitted.
</p>
'''
dld = DustLinkData.DustLinkData()
descPASS = []
descFAIL = []
descNOTRUN = []
currentPaths = dld.getNetworkPaths(self.netname)
for (fromMote,toMote) in currentPaths:
pathInfo = dld.getPathInfo(self.netname,fromMote,toMote)
# make sure path information contains all the counters
if ('rssi' not in pathInfo) or ('numTxPackets' not in pathInfo) or ('numTxFailures' not in pathInfo):
continue
# make sure source has sent enough packets to destination
if pathInfo['numTxPackets']<self.THRES_NUM_PACKETS:
continue
# calculate link stability and RSSI
linkStability = 1-float(pathInfo['numTxFailures'])/float(pathInfo['numTxPackets'])
linkRssi = pathInfo['rssi']
# test for high RSSI
if linkRssi>self.THRES_HIGH_RSSI:
if linkStability>self.THRES_HIGH_STAB:
descPASS += [
'link {0}->{1} has RSSI {2} (>{3}) and stability {4} (>{5})'.format(
FormatUtils.formatMacString(fromMote),
FormatUtils.formatMacString(toMote),
linkRssi,
self.THRES_HIGH_RSSI,
linkStability,
self.THRES_HIGH_STAB,
)
]
else:
descFAIL += [
'link {0}->{1} has RSSI {2} (>{3}) and stability {4} (<{5})'.format(
FormatUtils.formatMacString(fromMote),
FormatUtils.formatMacString(toMote),
linkRssi,
self.THRES_HIGH_RSSI,
linkStability,
self.THRES_HIGH_STAB,
)
]
# test for low RSSI
if linkRssi<self.THRES_LOW_RSSI:
if linkStability<self.THRES_LOW_STAB:
descPASS += [
'link {0}->{1} has RSSI {2} (<{3}) and stability {4} (<{5})'.format(
FormatUtils.formatMacString(fromMote),
FormatUtils.formatMacString(toMote),
linkRssi,
self.THRES_LOW_RSSI,
linkStability,
self.THRES_LOW_STAB,
)
]
else:
descFAIL += [
'link {0}->{1} has RSSI {2} (<{3}) and stability {4} (>{5})'.format(
FormatUtils.formatMacString(fromMote),
FormatUtils.formatMacString(toMote),
linkRssi,
self.THRES_LOW_RSSI,
linkStability,
self.THRES_LOW_STAB,
)
]
# decide outcome
if descFAIL:
outcome = dld.TEST_OUTCOME_FAIL
elif descPASS:
outcome = dld.TEST_OUTCOME_PASS
else:
outcome = dld.TEST_OUTCOME_NOTRUN
# write report
description = []
if descPASS:
description += ["PASS:"]
description += descPASS
if descFAIL:
description += ["FAIL:"]
description += descFAIL
if descNOTRUN:
description += ["NOTRUN:"]
description += descNOTRUN
description = '<br/>'.join(description)
# return test result
return (outcome,description)
'''
#===== dump info
def _nettest_dumpInfo(self):
# dump moteInfo
dld = DustLinkData.DustLinkData()
output = []
for mac in dld.getNetworkMotes(self.netname):
moteinfo = dld.getMoteInfo(mac)
output += ['']
output += ['{0}:'.format(FormatUtils.formatMacString(mac))]
for (k,v) in moteinfo.items():
output += ['- {0}: {1}'.format(k,v)]
output = '\n'.join(output)
f = open('poipoi_moteinfo.txt','w')
f.write(output)
f.close()
print 'moteInfo dumped'
# dump pathInfo
dld = DustLinkData.DustLinkData()
currentPaths = dld.getNetworkPaths(self.netname)
output = []
for (fromMote,toMote) in currentPaths:
output += ['']
output += ['{0} -> {1}'.format(
FormatUtils.formatMacString(fromMote),
FormatUtils.formatMacString(toMote)
)
]
for (k,v) in dld.getPathInfo(self.netname,fromMote,toMote).items():
output += ['- {0}: {1}'.format(k,v)]
output = '\n'.join(output)
f = open('poipoi_pathinfo.txt','w')
f.write(output)
f.close()
print 'pathInfo dumped'
'''
#======================== helpers =========================================
|
|
from functools import reduce, partial
import inspect
import operator
from operator import attrgetter
from textwrap import dedent
from .compatibility import PY3, PY33, PY34, PYPY, import_module
from .utils import no_default
__all__ = ('identity', 'thread_first', 'thread_last', 'memoize', 'compose',
'pipe', 'complement', 'juxt', 'do', 'curry', 'flip', 'excepts')
def identity(x):
""" Identity function. Return x
>>> identity(3)
3
"""
return x
def thread_first(val, *forms):
""" Thread value through a sequence of functions/forms
>>> def double(x): return 2*x
>>> def inc(x): return x + 1
>>> thread_first(1, inc, double)
4
If the function expects more than one input you can specify those inputs
in a tuple. The value is used as the first input.
>>> def add(x, y): return x + y
>>> def pow(x, y): return x**y
>>> thread_first(1, (add, 4), (pow, 2)) # pow(add(1, 4), 2)
25
So in general
thread_first(x, f, (g, y, z))
expands to
g(f(x), y, z)
See Also:
thread_last
"""
def evalform_front(val, form):
if callable(form):
return form(val)
if isinstance(form, tuple):
func, args = form[0], form[1:]
args = (val,) + args
return func(*args)
return reduce(evalform_front, forms, val)
def thread_last(val, *forms):
""" Thread value through a sequence of functions/forms
>>> def double(x): return 2*x
>>> def inc(x): return x + 1
>>> thread_last(1, inc, double)
4
If the function expects more than one input you can specify those inputs
in a tuple. The value is used as the last input.
>>> def add(x, y): return x + y
>>> def pow(x, y): return x**y
>>> thread_last(1, (add, 4), (pow, 2)) # pow(2, add(4, 1))
32
So in general
thread_last(x, f, (g, y, z))
expands to
g(y, z, f(x))
>>> def iseven(x):
... return x % 2 == 0
>>> list(thread_last([1, 2, 3], (map, inc), (filter, iseven)))
[2, 4]
See Also:
thread_first
"""
def evalform_back(val, form):
if callable(form):
return form(val)
if isinstance(form, tuple):
func, args = form[0], form[1:]
args = args + (val,)
return func(*args)
return reduce(evalform_back, forms, val)
def instanceproperty(fget=None, fset=None, fdel=None, doc=None, classval=None):
""" Like @property, but returns ``classval`` when used as a class attribute
>>> class MyClass(object):
... '''The class docstring'''
... @instanceproperty(classval=__doc__)
... def __doc__(self):
... return 'An object docstring'
... @instanceproperty
... def val(self):
... return 42
...
>>> MyClass.__doc__
'The class docstring'
>>> MyClass.val is None
True
>>> obj = MyClass()
>>> obj.__doc__
'An object docstring'
>>> obj.val
42
"""
if fget is None:
return partial(instanceproperty, fset=fset, fdel=fdel, doc=doc,
classval=classval)
return InstanceProperty(fget=fget, fset=fset, fdel=fdel, doc=doc,
classval=classval)
class InstanceProperty(property):
""" Like @property, but returns ``classval`` when used as a class attribute
Should not be used directly. Use ``instanceproperty`` instead.
"""
def __init__(self, fget=None, fset=None, fdel=None, doc=None,
classval=None):
self.classval = classval
property.__init__(self, fget=fget, fset=fset, fdel=fdel, doc=doc)
def __get__(self, obj, type=None):
if obj is None:
return self.classval
return property.__get__(self, obj, type)
def __reduce__(self):
state = (self.fget, self.fset, self.fdel, self.__doc__, self.classval)
return InstanceProperty, state
class curry(object):
""" Curry a callable function
Enables partial application of arguments through calling a function with an
incomplete set of arguments.
>>> def mul(x, y):
... return x * y
>>> mul = curry(mul)
>>> double = mul(2)
>>> double(10)
20
Also supports keyword arguments
>>> @curry # Can use curry as a decorator
... def f(x, y, a=10):
... return a * (x + y)
>>> add = f(a=1)
>>> add(2, 3)
5
See Also:
toolz.curried - namespace of curried functions
https://toolz.readthedocs.io/en/latest/curry.html
"""
def __init__(self, *args, **kwargs):
if not args:
raise TypeError('__init__() takes at least 2 arguments (1 given)')
func, args = args[0], args[1:]
if not callable(func):
raise TypeError("Input must be callable")
# curry- or functools.partial-like object? Unpack and merge arguments
if (
hasattr(func, 'func')
and hasattr(func, 'args')
and hasattr(func, 'keywords')
and isinstance(func.args, tuple)
):
_kwargs = {}
if func.keywords:
_kwargs.update(func.keywords)
_kwargs.update(kwargs)
kwargs = _kwargs
args = func.args + args
func = func.func
if kwargs:
self._partial = partial(func, *args, **kwargs)
else:
self._partial = partial(func, *args)
self.__doc__ = getattr(func, '__doc__', None)
self.__name__ = getattr(func, '__name__', '<curry>')
self.__module__ = getattr(func, '__module__', None)
self.__qualname__ = getattr(func, '__qualname__', None)
self._sigspec = None
self._has_unknown_args = None
@instanceproperty
def func(self):
return self._partial.func
if PY3: # pragma: py2 no cover
@instanceproperty
def __signature__(self):
sig = inspect.signature(self.func)
args = self.args or ()
keywords = self.keywords or {}
if is_partial_args(self.func, args, keywords, sig) is False:
raise TypeError('curry object has incorrect arguments')
params = list(sig.parameters.values())
skip = 0
for param in params[:len(args)]:
if param.kind == param.VAR_POSITIONAL:
break
skip += 1
kwonly = False
newparams = []
for param in params[skip:]:
kind = param.kind
default = param.default
if kind == param.VAR_KEYWORD:
pass
elif kind == param.VAR_POSITIONAL:
if kwonly:
continue
elif param.name in keywords:
default = keywords[param.name]
kind = param.KEYWORD_ONLY
kwonly = True
else:
if kwonly:
kind = param.KEYWORD_ONLY
if default is param.empty:
default = no_default
newparams.append(param.replace(default=default, kind=kind))
return sig.replace(parameters=newparams)
@instanceproperty
def args(self):
return self._partial.args
@instanceproperty
def keywords(self):
return self._partial.keywords
@instanceproperty
def func_name(self):
return self.__name__
def __str__(self):
return str(self.func)
def __repr__(self):
return repr(self.func)
def __hash__(self):
return hash((self.func, self.args,
frozenset(self.keywords.items()) if self.keywords
else None))
def __eq__(self, other):
return (isinstance(other, curry) and self.func == other.func and
self.args == other.args and self.keywords == other.keywords)
def __ne__(self, other):
return not self.__eq__(other)
def __call__(self, *args, **kwargs):
try:
return self._partial(*args, **kwargs)
except TypeError as exc:
if self._should_curry(args, kwargs, exc):
return self.bind(*args, **kwargs)
raise
def _should_curry(self, args, kwargs, exc=None):
func = self.func
args = self.args + args
if self.keywords:
kwargs = dict(self.keywords, **kwargs)
if self._sigspec is None:
sigspec = self._sigspec = _sigs.signature_or_spec(func)
self._has_unknown_args = has_varargs(func, sigspec) is not False
else:
sigspec = self._sigspec
if is_partial_args(func, args, kwargs, sigspec) is False:
# Nothing can make the call valid
return False
elif self._has_unknown_args:
# The call may be valid and raised a TypeError, but we curry
# anyway because the function may have `*args`. This is useful
# for decorators with signature `func(*args, **kwargs)`.
return True
elif not is_valid_args(func, args, kwargs, sigspec):
# Adding more arguments may make the call valid
return True
else:
# There was a genuine TypeError
return False
def bind(self, *args, **kwargs):
return type(self)(self, *args, **kwargs)
def call(self, *args, **kwargs):
return self._partial(*args, **kwargs)
def __get__(self, instance, owner):
if instance is None:
return self
return curry(self, instance)
def __reduce__(self):
func = self.func
modname = getattr(func, '__module__', None)
qualname = getattr(func, '__qualname__', None)
if qualname is None: # pragma: py3 no cover
qualname = getattr(func, '__name__', None)
is_decorated = None
if modname and qualname:
attrs = []
obj = import_module(modname)
for attr in qualname.split('.'):
if isinstance(obj, curry): # pragma: py2 no cover
attrs.append('func')
obj = obj.func
obj = getattr(obj, attr, None)
if obj is None:
break
attrs.append(attr)
if isinstance(obj, curry) and obj.func is func:
is_decorated = obj is self
qualname = '.'.join(attrs)
func = '%s:%s' % (modname, qualname)
# functools.partial objects can't be pickled
userdict = tuple((k, v) for k, v in self.__dict__.items()
if k not in ('_partial', '_sigspec'))
state = (type(self), func, self.args, self.keywords, userdict,
is_decorated)
return (_restore_curry, state)
def _restore_curry(cls, func, args, kwargs, userdict, is_decorated):
if isinstance(func, str):
modname, qualname = func.rsplit(':', 1)
obj = import_module(modname)
for attr in qualname.split('.'):
obj = getattr(obj, attr)
if is_decorated:
return obj
func = obj.func
obj = cls(func, *args, **(kwargs or {}))
obj.__dict__.update(userdict)
return obj
@curry
def memoize(func, cache=None, key=None):
""" Cache a function's result for speedy future evaluation
Considerations:
Trades memory for speed.
Only use on pure functions.
>>> def add(x, y): return x + y
>>> add = memoize(add)
Or use as a decorator
>>> @memoize
... def add(x, y):
... return x + y
Use the ``cache`` keyword to provide a dict-like object as an initial cache
>>> @memoize(cache={(1, 2): 3})
... def add(x, y):
... return x + y
Note that the above works as a decorator because ``memoize`` is curried.
It is also possible to provide a ``key(args, kwargs)`` function that
calculates keys used for the cache, which receives an ``args`` tuple and
``kwargs`` dict as input, and must return a hashable value. However,
the default key function should be sufficient most of the time.
>>> # Use key function that ignores extraneous keyword arguments
>>> @memoize(key=lambda args, kwargs: args)
... def add(x, y, verbose=False):
... if verbose:
... print('Calculating %s + %s' % (x, y))
... return x + y
"""
if cache is None:
cache = {}
try:
may_have_kwargs = has_keywords(func) is not False
# Is unary function (single arg, no variadic argument or keywords)?
is_unary = is_arity(1, func)
except TypeError: # pragma: no cover
may_have_kwargs = True
is_unary = False
if key is None:
if is_unary:
def key(args, kwargs):
return args[0]
elif may_have_kwargs:
def key(args, kwargs):
return (
args or None,
frozenset(kwargs.items()) if kwargs else None,
)
else:
def key(args, kwargs):
return args
def memof(*args, **kwargs):
k = key(args, kwargs)
try:
return cache[k]
except TypeError:
raise TypeError("Arguments to memoized function must be hashable")
except KeyError:
cache[k] = result = func(*args, **kwargs)
return result
try:
memof.__name__ = func.__name__
except AttributeError:
pass
memof.__doc__ = func.__doc__
memof.__wrapped__ = func
return memof
class Compose(object):
""" A composition of functions
See Also:
compose
"""
__slots__ = 'first', 'funcs'
def __init__(self, funcs):
funcs = tuple(reversed(funcs))
self.first = funcs[0]
self.funcs = funcs[1:]
def __call__(self, *args, **kwargs):
ret = self.first(*args, **kwargs)
for f in self.funcs:
ret = f(ret)
return ret
def __getstate__(self):
return self.first, self.funcs
def __setstate__(self, state):
self.first, self.funcs = state
@instanceproperty(classval=__doc__)
def __doc__(self):
def composed_doc(*fs):
"""Generate a docstring for the composition of fs.
"""
if not fs:
# Argument name for the docstring.
return '*args, **kwargs'
return '{f}({g})'.format(f=fs[0].__name__, g=composed_doc(*fs[1:]))
try:
return (
'lambda *args, **kwargs: ' +
composed_doc(*reversed((self.first,) + self.funcs))
)
except AttributeError:
# One of our callables does not have a `__name__`, whatever.
return 'A composition of functions'
@property
def __name__(self):
try:
return '_of_'.join(
(f.__name__ for f in reversed((self.first,) + self.funcs)),
)
except AttributeError:
return type(self).__name__
def compose(*funcs):
""" Compose functions to operate in series.
Returns a function that applies other functions in sequence.
Functions are applied from right to left so that
``compose(f, g, h)(x, y)`` is the same as ``f(g(h(x, y)))``.
If no arguments are provided, the identity function (f(x) = x) is returned.
>>> inc = lambda i: i + 1
>>> compose(str, inc)(3)
'4'
See Also:
pipe
"""
if not funcs:
return identity
if len(funcs) == 1:
return funcs[0]
else:
return Compose(funcs)
def pipe(data, *funcs):
""" Pipe a value through a sequence of functions
I.e. ``pipe(data, f, g, h)`` is equivalent to ``h(g(f(data)))``
We think of the value as progressing through a pipe of several
transformations, much like pipes in UNIX
``$ cat data | f | g | h``
>>> double = lambda i: 2 * i
>>> pipe(3, double, str)
'6'
See Also:
compose
thread_first
thread_last
"""
for func in funcs:
data = func(data)
return data
def complement(func):
""" Convert a predicate function to its logical complement.
In other words, return a function that, for inputs that normally
yield True, yields False, and vice-versa.
>>> def iseven(n): return n % 2 == 0
>>> isodd = complement(iseven)
>>> iseven(2)
True
>>> isodd(2)
False
"""
return compose(operator.not_, func)
class juxt(object):
""" Creates a function that calls several functions with the same arguments
Takes several functions and returns a function that applies its arguments
to each of those functions then returns a tuple of the results.
Name comes from juxtaposition: the fact of two things being seen or placed
close together with contrasting effect.
>>> inc = lambda x: x + 1
>>> double = lambda x: x * 2
>>> juxt(inc, double)(10)
(11, 20)
>>> juxt([inc, double])(10)
(11, 20)
"""
__slots__ = ['funcs']
def __init__(self, *funcs):
if len(funcs) == 1 and not callable(funcs[0]):
funcs = funcs[0]
self.funcs = tuple(funcs)
def __call__(self, *args, **kwargs):
return tuple(func(*args, **kwargs) for func in self.funcs)
def __getstate__(self):
return self.funcs
def __setstate__(self, state):
self.funcs = state
def do(func, x):
""" Runs ``func`` on ``x``, returns ``x``
Because the results of ``func`` are not returned, only the side
effects of ``func`` are relevant.
Logging functions can be made by composing ``do`` with a storage function
like ``list.append`` or ``file.write``
>>> from toolz import compose
>>> from toolz.curried import do
>>> log = []
>>> inc = lambda x: x + 1
>>> inc = compose(inc, do(log.append))
>>> inc(1)
2
>>> inc(11)
12
>>> log
[1, 11]
"""
func(x)
return x
@curry
def flip(func, a, b):
""" Call the function call with the arguments flipped
This function is curried.
>>> def div(a, b):
... return a // b
...
>>> flip(div, 2, 6)
3
>>> div_by_two = flip(div, 2)
>>> div_by_two(4)
2
This is particularly useful for built in functions and functions defined
in C extensions that accept positional only arguments. For example:
isinstance, issubclass.
>>> data = [1, 'a', 'b', 2, 1.5, object(), 3]
>>> only_ints = list(filter(flip(isinstance, int), data))
>>> only_ints
[1, 2, 3]
"""
return func(b, a)
def return_none(exc):
""" Returns None.
"""
return None
class excepts(object):
"""A wrapper around a function to catch exceptions and
dispatch to a handler.
This is like a functional try/except block, in the same way that
ifexprs are functional if/else blocks.
Examples
--------
>>> excepting = excepts(
... ValueError,
... lambda a: [1, 2].index(a),
... lambda _: -1,
... )
>>> excepting(1)
0
>>> excepting(3)
-1
Multiple exceptions and default except clause.
>>> excepting = excepts((IndexError, KeyError), lambda a: a[0])
>>> excepting([])
>>> excepting([1])
1
>>> excepting({})
>>> excepting({0: 1})
1
"""
def __init__(self, exc, func, handler=return_none):
self.exc = exc
self.func = func
self.handler = handler
def __call__(self, *args, **kwargs):
try:
return self.func(*args, **kwargs)
except self.exc as e:
return self.handler(e)
@instanceproperty(classval=__doc__)
def __doc__(self):
exc = self.exc
try:
if isinstance(exc, tuple):
exc_name = '(%s)' % ', '.join(
map(attrgetter('__name__'), exc),
)
else:
exc_name = exc.__name__
return dedent(
"""\
A wrapper around {inst.func.__name__!r} that will except:
{exc}
and handle any exceptions with {inst.handler.__name__!r}.
Docs for {inst.func.__name__!r}:
{inst.func.__doc__}
Docs for {inst.handler.__name__!r}:
{inst.handler.__doc__}
"""
).format(
inst=self,
exc=exc_name,
)
except AttributeError:
return type(self).__doc__
@property
def __name__(self):
exc = self.exc
try:
if isinstance(exc, tuple):
exc_name = '_or_'.join(map(attrgetter('__name__'), exc))
else:
exc_name = exc.__name__
return '%s_excepting_%s' % (self.func.__name__, exc_name)
except AttributeError:
return 'excepting'
if PY3: # pragma: py2 no cover
def _check_sigspec(sigspec, func, builtin_func, *builtin_args):
if sigspec is None:
try:
sigspec = inspect.signature(func)
except (ValueError, TypeError) as e:
sigspec = e
if isinstance(sigspec, ValueError):
return None, builtin_func(*builtin_args)
elif not isinstance(sigspec, inspect.Signature):
if (
func in _sigs.signatures
and ((
hasattr(func, '__signature__')
and hasattr(func.__signature__, '__get__')
) or (
PY33
and hasattr(func, '__wrapped__')
and hasattr(func.__wrapped__, '__get__')
and not callable(func.__wrapped__)
))
): # pragma: no cover (not covered in Python 3.4)
val = builtin_func(*builtin_args)
return None, val
return None, False
return sigspec, None
else: # pragma: py3 no cover
def _check_sigspec(sigspec, func, builtin_func, *builtin_args):
if sigspec is None:
try:
sigspec = inspect.getargspec(func)
except TypeError as e:
sigspec = e
if isinstance(sigspec, TypeError):
if not callable(func):
return None, False
return None, builtin_func(*builtin_args)
return sigspec, None
if PY34 or PYPY: # pragma: no cover
_check_sigspec_orig = _check_sigspec
def _check_sigspec(sigspec, func, builtin_func, *builtin_args):
# Python 3.4 and PyPy may lie, so use our registry for builtins instead
if func in _sigs.signatures:
val = builtin_func(*builtin_args)
return None, val
return _check_sigspec_orig(sigspec, func, builtin_func, *builtin_args)
_check_sigspec.__doc__ = """ \
Private function to aid in introspection compatibly across Python versions.
If a callable doesn't have a signature (Python 3) or an argspec (Python 2),
the signature registry in toolz._signatures is used.
"""
if PY3: # pragma: py2 no cover
def num_required_args(func, sigspec=None):
sigspec, rv = _check_sigspec(sigspec, func, _sigs._num_required_args,
func)
if sigspec is None:
return rv
return sum(1 for p in sigspec.parameters.values()
if p.default is p.empty
and p.kind in (p.POSITIONAL_OR_KEYWORD, p.POSITIONAL_ONLY))
def has_varargs(func, sigspec=None):
sigspec, rv = _check_sigspec(sigspec, func, _sigs._has_varargs, func)
if sigspec is None:
return rv
return any(p.kind == p.VAR_POSITIONAL
for p in sigspec.parameters.values())
def has_keywords(func, sigspec=None):
sigspec, rv = _check_sigspec(sigspec, func, _sigs._has_keywords, func)
if sigspec is None:
return rv
return any(p.default is not p.empty
or p.kind in (p.KEYWORD_ONLY, p.VAR_KEYWORD)
for p in sigspec.parameters.values())
def is_valid_args(func, args, kwargs, sigspec=None):
sigspec, rv = _check_sigspec(sigspec, func, _sigs._is_valid_args,
func, args, kwargs)
if sigspec is None:
return rv
try:
sigspec.bind(*args, **kwargs)
except TypeError:
return False
return True
def is_partial_args(func, args, kwargs, sigspec=None):
sigspec, rv = _check_sigspec(sigspec, func, _sigs._is_partial_args,
func, args, kwargs)
if sigspec is None:
return rv
try:
sigspec.bind_partial(*args, **kwargs)
except TypeError:
return False
return True
else: # pragma: py3 no cover
def num_required_args(func, sigspec=None):
sigspec, rv = _check_sigspec(sigspec, func, _sigs._num_required_args,
func)
if sigspec is None:
return rv
num_defaults = len(sigspec.defaults) if sigspec.defaults else 0
return len(sigspec.args) - num_defaults
def has_varargs(func, sigspec=None):
sigspec, rv = _check_sigspec(sigspec, func, _sigs._has_varargs, func)
if sigspec is None:
return rv
return sigspec.varargs is not None
def has_keywords(func, sigspec=None):
sigspec, rv = _check_sigspec(sigspec, func, _sigs._has_keywords, func)
if sigspec is None:
return rv
return sigspec.defaults is not None or sigspec.keywords is not None
def is_valid_args(func, args, kwargs, sigspec=None):
sigspec, rv = _check_sigspec(sigspec, func, _sigs._is_valid_args,
func, args, kwargs)
if sigspec is None:
return rv
spec = sigspec
defaults = spec.defaults or ()
num_pos = len(spec.args) - len(defaults)
missing_pos = spec.args[len(args):num_pos]
if any(arg not in kwargs for arg in missing_pos):
return False
if spec.varargs is None:
num_extra_pos = max(0, len(args) - num_pos)
else:
num_extra_pos = 0
kwargs = dict(kwargs)
# Add missing keyword arguments (unless already included in `args`)
missing_kwargs = spec.args[num_pos + num_extra_pos:]
kwargs.update(zip(missing_kwargs, defaults[num_extra_pos:]))
# Convert call to use positional arguments
args = args + tuple(kwargs.pop(key) for key in spec.args[len(args):])
if (
not spec.keywords and kwargs
or not spec.varargs and len(args) > len(spec.args)
or set(spec.args[:len(args)]) & set(kwargs)
):
return False
else:
return True
def is_partial_args(func, args, kwargs, sigspec=None):
sigspec, rv = _check_sigspec(sigspec, func, _sigs._is_partial_args,
func, args, kwargs)
if sigspec is None:
return rv
spec = sigspec
defaults = spec.defaults or ()
num_pos = len(spec.args) - len(defaults)
if spec.varargs is None:
num_extra_pos = max(0, len(args) - num_pos)
else:
num_extra_pos = 0
kwargs = dict(kwargs)
# Add missing keyword arguments (unless already included in `args`)
missing_kwargs = spec.args[num_pos + num_extra_pos:]
kwargs.update(zip(missing_kwargs, defaults[num_extra_pos:]))
# Add missing position arguments as keywords (may already be in kwargs)
missing_args = spec.args[len(args):num_pos + num_extra_pos]
kwargs.update((x, None) for x in missing_args)
# Convert call to use positional arguments
args = args + tuple(kwargs.pop(key) for key in spec.args[len(args):])
if (
not spec.keywords and kwargs
or not spec.varargs and len(args) > len(spec.args)
or set(spec.args[:len(args)]) & set(kwargs)
):
return False
else:
return True
def is_arity(n, func, sigspec=None):
""" Does a function have only n positional arguments?
This function relies on introspection and does not call the function.
Returns None if validity can't be determined.
>>> def f(x):
... return x
>>> is_arity(1, f)
True
>>> def g(x, y=1):
... return x + y
>>> is_arity(1, g)
False
"""
sigspec, rv = _check_sigspec(sigspec, func, _sigs._is_arity, n, func)
if sigspec is None:
return rv
num = num_required_args(func, sigspec)
if num is not None:
num = num == n
if not num:
return False
varargs = has_varargs(func, sigspec)
if varargs:
return False
keywords = has_keywords(func, sigspec)
if keywords:
return False
if num is None or varargs is None or keywords is None: # pragma: no cover
return None
return True
num_required_args.__doc__ = """ \
Number of required positional arguments
This function relies on introspection and does not call the function.
Returns None if validity can't be determined.
>>> def f(x, y, z=3):
... return x + y + z
>>> num_required_args(f)
2
>>> def g(*args, **kwargs):
... pass
>>> num_required_args(g)
0
"""
has_varargs.__doc__ = """ \
Does a function have variadic positional arguments?
This function relies on introspection and does not call the function.
Returns None if validity can't be determined.
>>> def f(*args):
... return args
>>> has_varargs(f)
True
>>> def g(**kwargs):
... return kwargs
>>> has_varargs(g)
False
"""
has_keywords.__doc__ = """ \
Does a function have keyword arguments?
This function relies on introspection and does not call the function.
Returns None if validity can't be determined.
>>> def f(x, y=0):
... return x + y
>>> has_keywords(f)
True
"""
is_valid_args.__doc__ = """ \
Is ``func(*args, **kwargs)`` a valid function call?
This function relies on introspection and does not call the function.
Returns None if validity can't be determined.
>>> def add(x, y):
... return x + y
>>> is_valid_args(add, (1,), {})
False
>>> is_valid_args(add, (1, 2), {})
True
>>> is_valid_args(map, (), {})
False
**Implementation notes**
Python 2 relies on ``inspect.getargspec``, which only works for
user-defined functions. Python 3 uses ``inspect.signature``, which
works for many more types of callables.
Many builtins in the standard library are also supported.
"""
is_partial_args.__doc__ = """ \
Can partial(func, *args, **kwargs)(*args2, **kwargs2) be a valid call?
Returns True *only* if the call is valid or if it is possible for the
call to become valid by adding more positional or keyword arguments.
This function relies on introspection and does not call the function.
Returns None if validity can't be determined.
>>> def add(x, y):
... return x + y
>>> is_partial_args(add, (1,), {})
True
>>> is_partial_args(add, (1, 2), {})
True
>>> is_partial_args(add, (1, 2, 3), {})
False
>>> is_partial_args(map, (), {})
True
**Implementation notes**
Python 2 relies on ``inspect.getargspec``, which only works for
user-defined functions. Python 3 uses ``inspect.signature``, which
works for many more types of callables.
Many builtins in the standard library are also supported.
"""
from . import _signatures as _sigs
|
|
# Copyright 2011 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
import itertools
import logging
import socket
import ssl
import time
import uuid
import kombu
import kombu.connection
import kombu.entity
import kombu.messaging
from oslo.config import cfg
import six
from oslo.messaging._drivers import amqp as rpc_amqp
from oslo.messaging._drivers import amqpdriver
from oslo.messaging._drivers import common as rpc_common
from oslo.messaging.openstack.common import network_utils
# FIXME(markmc): remove this
_ = lambda s: s
rabbit_opts = [
cfg.StrOpt('kombu_ssl_version',
default='',
help='SSL version to use (valid only if SSL enabled). '
'valid values are TLSv1, SSLv23 and SSLv3. SSLv2 may '
'be available on some distributions.'
),
cfg.StrOpt('kombu_ssl_keyfile',
default='',
help='SSL key file (valid only if SSL enabled).'),
cfg.StrOpt('kombu_ssl_certfile',
default='',
help='SSL cert file (valid only if SSL enabled).'),
cfg.StrOpt('kombu_ssl_ca_certs',
default='',
help=('SSL certification authority file '
'(valid only if SSL enabled).')),
cfg.FloatOpt('kombu_reconnect_delay',
default=1.0,
help='How long to wait before reconnecting in response to an '
'AMQP consumer cancel notification.'),
cfg.StrOpt('rabbit_host',
default='localhost',
help='The RabbitMQ broker address where a single node is '
'used.'),
cfg.IntOpt('rabbit_port',
default=5672,
help='The RabbitMQ broker port where a single node is used.'),
cfg.ListOpt('rabbit_hosts',
default=['$rabbit_host:$rabbit_port'],
help='RabbitMQ HA cluster host:port pairs.'),
cfg.BoolOpt('rabbit_use_ssl',
default=False,
help='Connect over SSL for RabbitMQ.'),
cfg.StrOpt('rabbit_userid',
default='guest',
help='The RabbitMQ userid.'),
cfg.StrOpt('rabbit_password',
default='guest',
help='The RabbitMQ password.',
secret=True),
cfg.StrOpt('rabbit_login_method',
default='AMQPLAIN',
help='the RabbitMQ login method'),
cfg.StrOpt('rabbit_virtual_host',
default='/',
help='The RabbitMQ virtual host.'),
cfg.IntOpt('rabbit_retry_interval',
default=1,
help='How frequently to retry connecting with RabbitMQ.'),
cfg.IntOpt('rabbit_retry_backoff',
default=2,
help='How long to backoff for between retries when connecting '
'to RabbitMQ.'),
cfg.IntOpt('rabbit_max_retries',
default=0,
help='Maximum number of RabbitMQ connection retries. '
'Default is 0 (infinite retry count).'),
cfg.BoolOpt('rabbit_ha_queues',
default=False,
help='Use HA queues in RabbitMQ (x-ha-policy: all). '
'If you change this option, you must wipe the '
'RabbitMQ database.'),
# FIXME(markmc): this was toplevel in openstack.common.rpc
cfg.BoolOpt('fake_rabbit',
default=False,
help='If passed, use a fake RabbitMQ provider.'),
]
LOG = logging.getLogger(__name__)
def _get_queue_arguments(conf):
"""Construct the arguments for declaring a queue.
If the rabbit_ha_queues option is set, we declare a mirrored queue
as described here:
http://www.rabbitmq.com/ha.html
Setting x-ha-policy to all means that the queue will be mirrored
to all nodes in the cluster.
"""
return {'x-ha-policy': 'all'} if conf.rabbit_ha_queues else {}
class RabbitMessage(dict):
def __init__(self, raw_message):
super(RabbitMessage, self).__init__(
rpc_common.deserialize_msg(raw_message.payload))
self._raw_message = raw_message
def acknowledge(self):
self._raw_message.ack()
def requeue(self):
self._raw_message.requeue()
class ConsumerBase(object):
"""Consumer base class."""
def __init__(self, channel, callback, tag, **kwargs):
"""Declare a queue on an amqp channel.
'channel' is the amqp channel to use
'callback' is the callback to call when messages are received
'tag' is a unique ID for the consumer on the channel
queue name, exchange name, and other kombu options are
passed in here as a dictionary.
"""
self.callback = callback
self.tag = str(tag)
self.kwargs = kwargs
self.queue = None
self.reconnect(channel)
def reconnect(self, channel):
"""Re-declare the queue after a rabbit reconnect."""
self.channel = channel
self.kwargs['channel'] = channel
self.queue = kombu.entity.Queue(**self.kwargs)
self.queue.declare()
def _callback_handler(self, message, callback):
"""Call callback with deserialized message.
Messages that are processed and ack'ed.
"""
try:
callback(RabbitMessage(message))
except Exception:
LOG.exception(_("Failed to process message"
" ... skipping it."))
message.ack()
def consume(self, *args, **kwargs):
"""Actually declare the consumer on the amqp channel. This will
start the flow of messages from the queue. Using the
Connection.iterconsume() iterator will process the messages,
calling the appropriate callback.
If a callback is specified in kwargs, use that. Otherwise,
use the callback passed during __init__()
If kwargs['nowait'] is True, then this call will block until
a message is read.
"""
options = {'consumer_tag': self.tag}
options['nowait'] = kwargs.get('nowait', False)
callback = kwargs.get('callback', self.callback)
if not callback:
raise ValueError("No callback defined")
def _callback(raw_message):
message = self.channel.message_to_python(raw_message)
self._callback_handler(message, callback)
self.queue.consume(*args, callback=_callback, **options)
def cancel(self):
"""Cancel the consuming from the queue, if it has started."""
try:
self.queue.cancel(self.tag)
except KeyError as e:
# NOTE(comstud): Kludge to get around a amqplib bug
if str(e) != "u'%s'" % self.tag:
raise
self.queue = None
class DirectConsumer(ConsumerBase):
"""Queue/consumer class for 'direct'."""
def __init__(self, conf, channel, msg_id, callback, tag, **kwargs):
"""Init a 'direct' queue.
'channel' is the amqp channel to use
'msg_id' is the msg_id to listen on
'callback' is the callback to call when messages are received
'tag' is a unique ID for the consumer on the channel
Other kombu options may be passed
"""
# Default options
options = {'durable': False,
'queue_arguments': _get_queue_arguments(conf),
'auto_delete': True,
'exclusive': False}
options.update(kwargs)
exchange = kombu.entity.Exchange(name=msg_id,
type='direct',
durable=options['durable'],
auto_delete=options['auto_delete'])
super(DirectConsumer, self).__init__(channel,
callback,
tag,
name=msg_id,
exchange=exchange,
routing_key=msg_id,
**options)
class TopicConsumer(ConsumerBase):
"""Consumer class for 'topic'."""
def __init__(self, conf, channel, topic, callback, tag, name=None,
exchange_name=None, **kwargs):
"""Init a 'topic' queue.
:param channel: the amqp channel to use
:param topic: the topic to listen on
:paramtype topic: str
:param callback: the callback to call when messages are received
:param tag: a unique ID for the consumer on the channel
:param name: optional queue name, defaults to topic
:paramtype name: str
Other kombu options may be passed as keyword arguments
"""
# Default options
options = {'durable': conf.amqp_durable_queues,
'queue_arguments': _get_queue_arguments(conf),
'auto_delete': conf.amqp_auto_delete,
'exclusive': False}
options.update(kwargs)
exchange_name = exchange_name or rpc_amqp.get_control_exchange(conf)
exchange = kombu.entity.Exchange(name=exchange_name,
type='topic',
durable=options['durable'],
auto_delete=options['auto_delete'])
super(TopicConsumer, self).__init__(channel,
callback,
tag,
name=name or topic,
exchange=exchange,
routing_key=topic,
**options)
class FanoutConsumer(ConsumerBase):
"""Consumer class for 'fanout'."""
def __init__(self, conf, channel, topic, callback, tag, **kwargs):
"""Init a 'fanout' queue.
'channel' is the amqp channel to use
'topic' is the topic to listen on
'callback' is the callback to call when messages are received
'tag' is a unique ID for the consumer on the channel
Other kombu options may be passed
"""
unique = uuid.uuid4().hex
exchange_name = '%s_fanout' % topic
queue_name = '%s_fanout_%s' % (topic, unique)
# Default options
options = {'durable': False,
'queue_arguments': _get_queue_arguments(conf),
'auto_delete': True,
'exclusive': False}
options.update(kwargs)
exchange = kombu.entity.Exchange(name=exchange_name, type='fanout',
durable=options['durable'],
auto_delete=options['auto_delete'])
super(FanoutConsumer, self).__init__(channel, callback, tag,
name=queue_name,
exchange=exchange,
routing_key=topic,
**options)
class Publisher(object):
"""Base Publisher class."""
def __init__(self, channel, exchange_name, routing_key, **kwargs):
"""Init the Publisher class with the exchange_name, routing_key,
and other options
"""
self.exchange_name = exchange_name
self.routing_key = routing_key
self.kwargs = kwargs
self.reconnect(channel)
def reconnect(self, channel):
"""Re-establish the Producer after a rabbit reconnection."""
self.exchange = kombu.entity.Exchange(name=self.exchange_name,
**self.kwargs)
self.producer = kombu.messaging.Producer(exchange=self.exchange,
channel=channel,
routing_key=self.routing_key)
def send(self, msg, timeout=None):
"""Send a message."""
if timeout:
#
# AMQP TTL is in milliseconds when set in the header.
#
self.producer.publish(msg, headers={'ttl': (timeout * 1000)})
else:
self.producer.publish(msg)
class DirectPublisher(Publisher):
"""Publisher class for 'direct'."""
def __init__(self, conf, channel, msg_id, **kwargs):
"""Init a 'direct' publisher.
Kombu options may be passed as keyword args to override defaults
"""
options = {'durable': False,
'auto_delete': True,
'exclusive': False}
options.update(kwargs)
super(DirectPublisher, self).__init__(channel, msg_id, msg_id,
type='direct', **options)
class TopicPublisher(Publisher):
"""Publisher class for 'topic'."""
def __init__(self, conf, channel, topic, **kwargs):
"""Init a 'topic' publisher.
Kombu options may be passed as keyword args to override defaults
"""
options = {'durable': conf.amqp_durable_queues,
'auto_delete': conf.amqp_auto_delete,
'exclusive': False}
options.update(kwargs)
exchange_name = rpc_amqp.get_control_exchange(conf)
super(TopicPublisher, self).__init__(channel,
exchange_name,
topic,
type='topic',
**options)
class FanoutPublisher(Publisher):
"""Publisher class for 'fanout'."""
def __init__(self, conf, channel, topic, **kwargs):
"""Init a 'fanout' publisher.
Kombu options may be passed as keyword args to override defaults
"""
options = {'durable': False,
'auto_delete': True,
'exclusive': False}
options.update(kwargs)
super(FanoutPublisher, self).__init__(channel, '%s_fanout' % topic,
None, type='fanout', **options)
class NotifyPublisher(TopicPublisher):
"""Publisher class for 'notify'."""
def __init__(self, conf, channel, topic, **kwargs):
self.durable = kwargs.pop('durable', conf.amqp_durable_queues)
self.queue_arguments = _get_queue_arguments(conf)
super(NotifyPublisher, self).__init__(conf, channel, topic, **kwargs)
def reconnect(self, channel):
super(NotifyPublisher, self).reconnect(channel)
# NOTE(jerdfelt): Normally the consumer would create the queue, but
# we do this to ensure that messages don't get dropped if the
# consumer is started after we do
queue = kombu.entity.Queue(channel=channel,
exchange=self.exchange,
durable=self.durable,
name=self.routing_key,
routing_key=self.routing_key,
queue_arguments=self.queue_arguments)
queue.declare()
class Connection(object):
"""Connection object."""
pool = None
def __init__(self, conf, server_params=None):
self.consumers = []
self.conf = conf
self.max_retries = self.conf.rabbit_max_retries
# Try forever?
if self.max_retries <= 0:
self.max_retries = None
self.interval_start = self.conf.rabbit_retry_interval
self.interval_stepping = self.conf.rabbit_retry_backoff
# max retry-interval = 30 seconds
self.interval_max = 30
self.memory_transport = False
if server_params is None:
server_params = {}
# Keys to translate from server_params to kombu params
server_params_to_kombu_params = {'username': 'userid'}
ssl_params = self._fetch_ssl_params()
params_list = []
for adr in self.conf.rabbit_hosts:
hostname, port = network_utils.parse_host_port(
adr, default_port=self.conf.rabbit_port)
params = {
'hostname': hostname,
'port': port,
'userid': self.conf.rabbit_userid,
'password': self.conf.rabbit_password,
'login_method': self.conf.rabbit_login_method,
'virtual_host': self.conf.rabbit_virtual_host,
}
for sp_key, value in six.iteritems(server_params):
p_key = server_params_to_kombu_params.get(sp_key, sp_key)
params[p_key] = value
if self.conf.fake_rabbit:
params['transport'] = 'memory'
if self.conf.rabbit_use_ssl:
params['ssl'] = ssl_params
params_list.append(params)
self.params_list = itertools.cycle(params_list)
self.memory_transport = self.conf.fake_rabbit
self.connection = None
self.do_consume = None
self.reconnect()
# FIXME(markmc): use oslo sslutils when it is available as a library
_SSL_PROTOCOLS = {
"tlsv1": ssl.PROTOCOL_TLSv1,
"sslv23": ssl.PROTOCOL_SSLv23,
"sslv3": ssl.PROTOCOL_SSLv3
}
try:
_SSL_PROTOCOLS["sslv2"] = ssl.PROTOCOL_SSLv2
except AttributeError:
pass
@classmethod
def validate_ssl_version(cls, version):
key = version.lower()
try:
return cls._SSL_PROTOCOLS[key]
except KeyError:
raise RuntimeError(_("Invalid SSL version : %s") % version)
def _fetch_ssl_params(self):
"""Handles fetching what ssl params should be used for the connection
(if any).
"""
ssl_params = dict()
# http://docs.python.org/library/ssl.html - ssl.wrap_socket
if self.conf.kombu_ssl_version:
ssl_params['ssl_version'] = self.validate_ssl_version(
self.conf.kombu_ssl_version)
if self.conf.kombu_ssl_keyfile:
ssl_params['keyfile'] = self.conf.kombu_ssl_keyfile
if self.conf.kombu_ssl_certfile:
ssl_params['certfile'] = self.conf.kombu_ssl_certfile
if self.conf.kombu_ssl_ca_certs:
ssl_params['ca_certs'] = self.conf.kombu_ssl_ca_certs
# We might want to allow variations in the
# future with this?
ssl_params['cert_reqs'] = ssl.CERT_REQUIRED
# Return the extended behavior or just have the default behavior
return ssl_params or True
def _connect(self, params):
"""Connect to rabbit. Re-establish any queues that may have
been declared before if we are reconnecting. Exceptions should
be handled by the caller.
"""
if self.connection:
LOG.info(_("Reconnecting to AMQP server on "
"%(hostname)s:%(port)d") % params)
try:
# XXX(nic): when reconnecting to a RabbitMQ cluster
# with mirrored queues in use, the attempt to release the
# connection can hang "indefinitely" somewhere deep down
# in Kombu. Blocking the thread for a bit prior to
# release seems to kludge around the problem where it is
# otherwise reproduceable.
if self.conf.kombu_reconnect_delay > 0:
LOG.info(_("Delaying reconnect for %1.1f seconds...") %
self.conf.kombu_reconnect_delay)
time.sleep(self.conf.kombu_reconnect_delay)
self.connection.release()
except self.connection_errors:
pass
# Setting this in case the next statement fails, though
# it shouldn't be doing any network operations, yet.
self.connection = None
self.connection = kombu.connection.BrokerConnection(**params)
self.connection_errors = self.connection.connection_errors
self.channel_errors = self.connection.channel_errors
if self.memory_transport:
# Kludge to speed up tests.
self.connection.transport.polling_interval = 0.0
self.do_consume = True
self.consumer_num = itertools.count(1)
self.connection.connect()
self.channel = self.connection.channel()
# work around 'memory' transport bug in 1.1.3
if self.memory_transport:
self.channel._new_queue('ae.undeliver')
for consumer in self.consumers:
consumer.reconnect(self.channel)
LOG.info(_('Connected to AMQP server on %(hostname)s:%(port)d') %
params)
def reconnect(self):
"""Handles reconnecting and re-establishing queues.
Will retry up to self.max_retries number of times.
self.max_retries = 0 means to retry forever.
Sleep between tries, starting at self.interval_start
seconds, backing off self.interval_stepping number of seconds
each attempt.
"""
attempt = 0
while True:
params = six.next(self.params_list)
attempt += 1
try:
self._connect(params)
return
except IOError as e:
pass
except self.connection_errors as e:
pass
except Exception as e:
# NOTE(comstud): Unfortunately it's possible for amqplib
# to return an error not covered by its transport
# connection_errors in the case of a timeout waiting for
# a protocol response. (See paste link in LP888621)
# So, we check all exceptions for 'timeout' in them
# and try to reconnect in this case.
if 'timeout' not in str(e):
raise
log_info = {}
log_info['err_str'] = str(e)
log_info['max_retries'] = self.max_retries
log_info.update(params)
if self.max_retries and attempt == self.max_retries:
msg = _('Unable to connect to AMQP server on '
'%(hostname)s:%(port)d after %(max_retries)d '
'tries: %(err_str)s') % log_info
LOG.error(msg)
raise rpc_common.RPCException(msg)
if attempt == 1:
sleep_time = self.interval_start or 1
elif attempt > 1:
sleep_time += self.interval_stepping
if self.interval_max:
sleep_time = min(sleep_time, self.interval_max)
log_info['sleep_time'] = sleep_time
LOG.error(_('AMQP server on %(hostname)s:%(port)d is '
'unreachable: %(err_str)s. Trying again in '
'%(sleep_time)d seconds.') % log_info)
time.sleep(sleep_time)
def ensure(self, error_callback, method, *args, **kwargs):
while True:
try:
return method(*args, **kwargs)
except self.connection_errors as e:
if error_callback:
error_callback(e)
except self.channel_errors as e:
if error_callback:
error_callback(e)
except (socket.timeout, IOError) as e:
if error_callback:
error_callback(e)
except Exception as e:
# NOTE(comstud): Unfortunately it's possible for amqplib
# to return an error not covered by its transport
# connection_errors in the case of a timeout waiting for
# a protocol response. (See paste link in LP888621)
# So, we check all exceptions for 'timeout' in them
# and try to reconnect in this case.
if 'timeout' not in str(e):
raise
if error_callback:
error_callback(e)
self.reconnect()
def get_channel(self):
"""Convenience call for bin/clear_rabbit_queues."""
return self.channel
def close(self):
"""Close/release this connection."""
self.connection.release()
self.connection = None
def reset(self):
"""Reset a connection so it can be used again."""
self.channel.close()
self.channel = self.connection.channel()
# work around 'memory' transport bug in 1.1.3
if self.memory_transport:
self.channel._new_queue('ae.undeliver')
self.consumers = []
def declare_consumer(self, consumer_cls, topic, callback):
"""Create a Consumer using the class that was passed in and
add it to our list of consumers
"""
def _connect_error(exc):
log_info = {'topic': topic, 'err_str': str(exc)}
LOG.error(_("Failed to declare consumer for topic '%(topic)s': "
"%(err_str)s") % log_info)
def _declare_consumer():
consumer = consumer_cls(self.conf, self.channel, topic, callback,
six.next(self.consumer_num))
self.consumers.append(consumer)
return consumer
return self.ensure(_connect_error, _declare_consumer)
def iterconsume(self, limit=None, timeout=None):
"""Return an iterator that will consume from all queues/consumers."""
def _error_callback(exc):
if isinstance(exc, socket.timeout):
LOG.debug(_('Timed out waiting for RPC response: %s') %
str(exc))
raise rpc_common.Timeout()
else:
LOG.exception(_('Failed to consume message from queue: %s') %
str(exc))
self.do_consume = True
def _consume():
if self.do_consume:
queues_head = self.consumers[:-1] # not fanout.
queues_tail = self.consumers[-1] # fanout
for queue in queues_head:
queue.consume(nowait=True)
queues_tail.consume(nowait=False)
self.do_consume = False
return self.connection.drain_events(timeout=timeout)
for iteration in itertools.count(0):
if limit and iteration >= limit:
raise StopIteration
yield self.ensure(_error_callback, _consume)
def publisher_send(self, cls, topic, msg, timeout=None, **kwargs):
"""Send to a publisher based on the publisher class."""
def _error_callback(exc):
log_info = {'topic': topic, 'err_str': str(exc)}
LOG.exception(_("Failed to publish message to topic "
"'%(topic)s': %(err_str)s") % log_info)
def _publish():
publisher = cls(self.conf, self.channel, topic, **kwargs)
publisher.send(msg, timeout)
self.ensure(_error_callback, _publish)
def declare_direct_consumer(self, topic, callback):
"""Create a 'direct' queue.
In nova's use, this is generally a msg_id queue used for
responses for call/multicall
"""
self.declare_consumer(DirectConsumer, topic, callback)
def declare_topic_consumer(self, topic, callback=None, queue_name=None,
exchange_name=None):
"""Create a 'topic' consumer."""
self.declare_consumer(functools.partial(TopicConsumer,
name=queue_name,
exchange_name=exchange_name,
),
topic, callback)
def declare_fanout_consumer(self, topic, callback):
"""Create a 'fanout' consumer."""
self.declare_consumer(FanoutConsumer, topic, callback)
def direct_send(self, msg_id, msg):
"""Send a 'direct' message."""
self.publisher_send(DirectPublisher, msg_id, msg)
def topic_send(self, topic, msg, timeout=None):
"""Send a 'topic' message."""
self.publisher_send(TopicPublisher, topic, msg, timeout)
def fanout_send(self, topic, msg):
"""Send a 'fanout' message."""
self.publisher_send(FanoutPublisher, topic, msg)
def notify_send(self, topic, msg, **kwargs):
"""Send a notify message on a topic."""
self.publisher_send(NotifyPublisher, topic, msg, None, **kwargs)
def consume(self, limit=None, timeout=None):
"""Consume from all queues/consumers."""
it = self.iterconsume(limit=limit, timeout=timeout)
while True:
try:
six.next(it)
except StopIteration:
return
class RabbitDriver(amqpdriver.AMQPDriverBase):
def __init__(self, conf, url, default_exchange=None,
allowed_remote_exmods=[]):
conf.register_opts(rabbit_opts)
conf.register_opts(rpc_amqp.amqp_opts)
connection_pool = rpc_amqp.get_connection_pool(conf, Connection)
super(RabbitDriver, self).__init__(conf, url,
connection_pool,
default_exchange,
allowed_remote_exmods)
def require_features(self, requeue=True):
pass
|
|
#C++ original by Pavel Strakhov ( https:#github.com/Riateche/toolwindowmanager )
from ToolWindowManagerWrapper import ToolWindowManagerWrapper
from ToolWindowManagerArea import ToolWindowManagerArea
from PyQt4 import QtCore, QtGui, uic
from PyQt4.QtGui import QApplication, QCursor, QSplitter, QWidget
from PyQt4.QtCore import qWarning, QPoint, QRect, QSize, pyqtSignal, Qt
def cast( obj, clas ):
if type( obj ) == clas: return obj
if isinstance( obj, clas ): return obj
return None
def findClosestParent( widget, clas ):
while widget != None:
if isinstance( widget, clas ):
return widget
widget = widget.parentWidget()
return None
##----------------------------------------------------------------##
class AreaReference( object ):
'''The AreaReference class represents a place where tool windows should be moved'''
def __init__( self, type, widget = None ):
self.type = type
self.setWidget( widget )
def setWidget( self, widget ):
if self.type in [
ToolWindowManager.LastUsedArea,
ToolWindowManager.NewFloatingArea,
ToolWindowManager.NoArea,
ToolWindowManager.EmptySpace ]:
if widget:
qWarning( 'area parameter ignored for this type' )
self.widget = None
elif self.type == ToolWindowManager.AddTo:
if isinstance( widget, ToolWindowManagerArea ):
self.widget = widget
else:
qWarning( 'only ToolWindowManagerArea can be used with this type' )
else:
if isinstance( widget, ToolWindowManagerArea ) or isinstance( widget, QSplitter ):
self.widget = widget
else:
qWarning( 'only ToolWindowManagerArea or splitter can be used with this type' )
self.widget = None
def area( self ):
if isinstance( self.widget, ToolWindowManagerArea ):
return self.widget
return None
##----------------------------------------------------------------##
class ToolWindowManager( QtGui.QWidget ):
'''docstring for ToolWindowManager'''
#! The area tool windows has been added to most recently.
LastUsedArea = 1
#! New area in a detached window.
NewFloatingArea = 2
#! Area inside the manager widget (only available when there is no tool windows in it).
EmptySpace = 3
#! Tool window is hidden.
NoArea = 4
#! Existing area specified in AreaReference argument.
AddTo = 5
#! New area to the left of the area specified in AreaReference argument.
LeftOf = 6
#! New area to the right of the area specified in AreaReference argument.
RightOf = 7
#! New area to the top of the area specified in AreaReference argument.
TopOf = 8
#! New area to the bottom of the area specified in AreaReference argument.
BottomOf = 9
#signal
toolWindowVisibilityChanged = pyqtSignal( QWidget, bool )
def __init__( self, parent = None ):
super( ToolWindowManager, self ).__init__( parent )
self.lastUsedArea = None
self.suggestions = []
self.wrappers = []
self.areas = []
self.draggedToolWindows = []
#----
self.borderSensitivity = 12
testSplitter = QSplitter()
self.rubberBandLineWidth = testSplitter.handleWidth()
self.dragIndicator = QtGui.QLabel( None, Qt.ToolTip)
self.dragIndicator.setAttribute( Qt.WA_ShowWithoutActivating)
mainLayout = QtGui.QVBoxLayout( self )
mainLayout.setContentsMargins( 0, 0, 0, 0 )
wrapper = ToolWindowManagerWrapper( self )
wrapper.setWindowFlags( wrapper.windowFlags() & ~Qt.Tool )
mainLayout.addWidget( wrapper )
self.dropSuggestionSwitchTimer = QtCore.QTimer( self )
self.dropSuggestionSwitchTimer.timeout.connect( self.showNextDropSuggestion )
self.dropSuggestionSwitchTimer.setInterval( 800 )
self.dropCurrentSuggestionIndex = 0
palette = QtGui.QPalette()
color = QtGui.QColor( Qt.blue )
color.setAlpha(80)
palette.setBrush( QtGui.QPalette.Highlight, QtGui.QBrush( color ) )
self.rectRubberBand = QtGui.QRubberBand( QtGui.QRubberBand.Rectangle, self )
self.lineRubberBand = QtGui.QRubberBand( QtGui.QRubberBand.Line, self )
self.rectRubberBand.setPalette( palette )
self.lineRubberBand.setPalette( palette )
self.toolWindowList = []
def hideToolWindow( self, toolWindow ):
self.moveToolWindow( toolWindow, ToolWindowManager.NoArea )
def toolWindows( self ):
return self.toolWindowList
def hasToolWindow( self, toolWindow ):
return toolWindow in self.toolWindowList
def addToolWindow( self, toolWindow, area ):
return self.addToolWindows( [toolWindow], area )
def addToolWindows( self, toolWindows, area ):
for toolWindow in toolWindows:
if self.hasToolWindow( toolWindow ): continue
toolWindow.hide()
toolWindow.setParent( None )
self.toolWindowList.append( toolWindow )
self.moveToolWindows( toolWindows, area )
def areaOf( self, toolWindow ):
return findClosestParent( toolWindow, ToolWindowManagerArea )
def moveToolWindow( self, toolWindow, area ):
self.moveToolWindows( [ toolWindow ], area )
def moveToolWindows( self, toolWindows, area ):
if type( area ) == int:
area = AreaReference( area )
for toolWindow in toolWindows:
if not self.hasToolWindow( toolWindow ): return
if toolWindow.parentWidget():
self.releaseToolWindow( toolWindow )
areaType = area.type
if areaType == ToolWindowManager.LastUsedArea and not self.lastUsedArea:
foundArea = self.findChild( ToolWindowManagerArea )
if foundArea:
area = AreaReference( ToolWindowManager.AddTo, foundArea )
else:
area = ToolWindowManager.EmptySpace
if areaType == ToolWindowManager.NoArea:
#do nothing
pass
elif areaType == ToolWindowManager.NewFloatingArea:
area = self.createArea()
area.addToolWindows( toolWindows )
wrapper = ToolWindowManagerWrapper( self )
wrapper.layout().addWidget( area )
wrapper.move( QCursor.pos() )
wrapper.show()
elif areaType == ToolWindowManager.AddTo:
area.area().addToolWindows( toolWindows )
elif areaType in ( ToolWindowManager.LeftOf, ToolWindowManager.RightOf, ToolWindowManager.TopOf, ToolWindowManager.BottomOf ):
parentSplitter = cast( area.widget.parentWidget(), QSplitter )
wrapper = cast( area.widget.parentWidget(), ToolWindowManagerWrapper )
if not ( parentSplitter or wrapper ):
qWarning( 'unknown parent type' )
return
# import pudb; pu.db
useParentSplitter = False
indexInParentSplitter = 0
if parentSplitter:
indexInParentSplitter = parentSplitter.indexOf( area.widget )
if parentSplitter.orientation() == Qt.Vertical:
useParentSplitter = areaType in ( ToolWindowManager.TopOf, ToolWindowManager.BottomOf )
else:
useParentSplitter = areaType in ( ToolWindowManager.LeftOf, ToolWindowManager.RightOf )
if useParentSplitter:
if areaType in ( ToolWindowManager.BottomOf , ToolWindowManager.RightOf ):
indexInParentSplitter += 1
newArea = self.createArea()
newArea.addToolWindows( toolWindows )
parentSplitter.insertWidget( indexInParentSplitter, newArea )
else:
area.widget.hide()
area.widget.setParent( None )
splitter = self.createSplitter()
if areaType in ( ToolWindowManager.TopOf, ToolWindowManager.BottomOf ):
splitter.setOrientation(Qt.Vertical)
else:
splitter.setOrientation(Qt.Horizontal)
splitter.addWidget( area.widget )
area.widget.show()
newArea = self.createArea()
if areaType in ( ToolWindowManager.TopOf, ToolWindowManager.LeftOf ):
splitter.insertWidget( 0, newArea )
else:
splitter.addWidget( newArea )
if parentSplitter:
parentSplitter.insertWidget( indexInParentSplitter, splitter )
else:
wrapper.layout().addWidget( splitter )
newArea.addToolWindows( toolWindows )
elif areaType == ToolWindowManager.EmptySpace:
wrapper = self.findChild( ToolWindowManagerWrapper )
if wrapper.isOccupied():
self.lastUsedArea.addToolWindows( toolWindows )
else:
newArea = self.createArea()
wrapper.layout().addWidget( newArea )
newArea.addToolWindows( toolWindows )
elif areaType == ToolWindowManager.LastUsedArea:
self.lastUsedArea.addToolWindows( toolWindows )
else:
qWarning( 'invalid type' )
self.simplifyLayout()
for toolWindow in toolWindows:
self.toolWindowVisibilityChanged.emit( toolWindow, toolWindow.parent() != None )
def removeToolWindow( self, toolWindow ):
if not self.toolWindowList.contains(toolWindow):
qWarning( 'unknown tool window' )
return
self.moveToolWindow( toolWindow, ToolWindowManager.NoArea )
self.toolWindowList.removeOne(toolWindow)
def setSuggestionSwitchInterval( self, msec ):
self.dropSuggestionSwitchTimer.setInterval(msec)
def suggestionSwitchInterval( self ):
return self.dropSuggestionSwitchTimer.interval()
def setBorderSensitivity( self, pixels ):
self.borderSensitivity = pixels
def setRubberBandLineWidth( self, pixels ):
self.rubberBandLineWidth = pixels
def saveState( self ):
result = {}
result[ 'toolWindowManagerStateFormat' ] = 1
mainWrapper = self.findChild( ToolWindowManagerWrapper )
if not mainWrapper:
qWarning( 'can not find main wrapper' )
return {}
result[ 'mainWrapper' ] = mainWrapper.saveState()
floatingWindowsData = []
for wrapper in self.wrappers:
if not wrapper.isWindow(): continue
floatingWindowsData.append( wrapper.saveState() )
result['floatingWindows'] = floatingWindowsData
return result
def restoreState( self, data ):
if not isinstance( data, dict ): return
if data[ 'toolWindowManagerStateFormat' ] != 1:
qWarning( 'state format is not recognized' )
return
self.moveToolWindows( self.toolWindowList, ToolWindowManager.NoArea )
mainWrapper = self.findChild( ToolWindowManagerWrapper )
if not mainWrapper:
qWarning( 'can not find main wrapper' )
mainWrapper.restoreState( data['mainWrapper'] )
for windowData in data['floatingWindows']:
wrapper = ToolWindowManagerWrapper( self )
wrapper.restoreState( windowData )
wrapper.show()
self.simplifyLayout()
for toolWindow in self.toolWindowList:
self.toolWindowVisibilityChanged.emit( toolWindow, toolWindow.parentWidget() != None )
def createArea( self ):
area = ToolWindowManagerArea( self, None )
area.tabCloseRequested.connect( self.tabCloseRequested )
return area
def handleNoSuggestions( self ):
self.rectRubberBand.hide()
self.lineRubberBand.hide()
self.lineRubberBand.setParent(self)
self.rectRubberBand.setParent(self)
self.suggestions = []
self.dropCurrentSuggestionIndex = 0
if self.dropSuggestionSwitchTimer.isActive():
self.dropSuggestionSwitchTimer.stop()
def releaseToolWindow( self, toolWindow ):
previousTabWidget = findClosestParent( toolWindow, ToolWindowManagerArea )
if not previousTabWidget:
qWarning( 'cannot find tab widget for tool window' )
return
previousTabWidget.removeTab( previousTabWidget.indexOf(toolWindow) )
toolWindow.hide()
toolWindow.setParent( None )
def removeArea( self, area ):
area.manager = None
area.deleteLater()
def removeWrapper( self, wrapper ):
wrapper.deleteLater()
self.wrappers.remove( wrapper )
def simplifyLayout( self ):
newAreas = []
currentAreas = self.areas
for area in currentAreas:
if area.parentWidget() is None:
if area.count() == 0:
if area == self.lastUsedArea: self.lastUsedArea = None
self.removeArea( area )
continue
splitter = cast( area.parentWidget(), QSplitter )
validSplitter = None # least top level splitter that should remain
invalidSplitter = None #most top level splitter that should be deleted
while( splitter ):
if splitter.count() > 1:
validSplitter = splitter
break
else:
invalidSplitter = splitter
splitter = cast( splitter.parentWidget(), QSplitter )
if not validSplitter:
wrapper = findClosestParent( area, ToolWindowManagerWrapper )
if not wrapper:
qWarning( 'can not find wrapper' )
print findClosestParent( area, ToolWindowManagerWrapper )
print type( area.parentWidget() ) == ToolWindowManagerWrapper
return
if area.count() == 0 and wrapper.isWindow():
wrapper.hide()
wrapper.setParent( None )
# can not deleteLater immediately (strange MacOS bug)
self.removeWrapper( wrapper )
elif area.parent() != wrapper:
wrapper.layout().addWidget( area )
else:
if area.count() > 0:
if validSplitter and area.parent() != validSplitter:
index = validSplitter.indexOf( invalidSplitter )
validSplitter.insertWidget( index, area )
if not invalidSplitter is None:
invalidSplitter.hide()
invalidSplitter.setParent( None )
invalidSplitter.deleteLater()
if area.count() == 0:
area.hide()
area.setParent( None )
if area == self.lastUsedArea: self.lastUsedArea = None
self.removeArea( area )
continue
newAreas.append( area )
#keep
self.areas = newAreas
def dragInProgress( self ):
return len( self.draggedToolWindows ) > 0
def startDrag( self, toolWindows ):
if self.dragInProgress():
qWarning( 'ToolWindowManager::execDrag: drag is already in progress' )
return
if not toolWindows: return
self.draggedToolWindows = toolWindows
self.dragIndicator.setPixmap( self.generateDragPixmap( toolWindows ) )
self.updateDragPosition()
self.dragIndicator.show()
def saveSplitterState( self, splitter ):
result = {}
result['state'] = splitter.saveState()
result['type'] = 'splitter'
items = []
for i in range( splitter.count() ):
item = splitter.widget(i)
area = cast( item, ToolWindowManagerArea )
if area:
items.append( area.saveState() )
else:
childSplitter = cast( item, QSplitter )
if childSplitter:
items.append( self. saveSplitterState( childSplitter ) )
else:
qWarning( 'unknown splitter item' )
result['items'] = items
return result
def restoreSplitterState( self, data ):
if len( data[ 'items' ] )< 2:
qWarning( 'invalid splitter encountered' )
splitter = self.createSplitter()
for itemData in data[ 'items' ]:
itemType = itemData['type']
if itemType == 'splitter':
splitter.addWidget( self.restoreSplitterState( itemData ) )
elif itemType == 'area':
area = self.createArea()
area.restoreState( itemData )
splitter.addWidget( area )
else:
qWarning( 'unknown item type' )
splitter.restoreState( data['state'] )
return splitter
def generateDragPixmap( self, toolWindows ):
widget = QtGui.QTabBar()
widget.setDocumentMode(True)
for toolWindow in toolWindows:
widget.addTab(toolWindow.windowIcon(), toolWindow.windowTitle())
#if QT_VERSION >= 0x050000 # Qt5
# return widget.grab()
#else #Qt4
return QtGui.QPixmap.grabWidget( widget )
#endif
def showNextDropSuggestion( self ):
if len( self.suggestions ) == 0:
qWarning( 'showNextDropSuggestion called but no suggestions' )
return
self.dropCurrentSuggestionIndex += 1
if self.dropCurrentSuggestionIndex >= len( self.suggestions ):
self.dropCurrentSuggestionIndex = 0
suggestion = self.suggestions[ self.dropCurrentSuggestionIndex ]
if suggestion.type in ( ToolWindowManager.AddTo , ToolWindowManager.EmptySpace ):
if suggestion.type == ToolWindowManager.EmptySpace:
widget = self.findChild( ToolWindowManagerWrapper )
else:
widget = suggestion.widget
if widget.topLevelWidget() == self.topLevelWidget():
placeHolderParent = self
else:
placeHolderParent = widget.topLevelWidget()
placeHolderGeometry = widget.rect()
placeHolderGeometry.moveTopLeft(
widget.mapTo( placeHolderParent, placeHolderGeometry.topLeft() )
)
self.rectRubberBand.setGeometry( placeHolderGeometry )
self.rectRubberBand.setParent( placeHolderParent )
self.rectRubberBand.show()
self.lineRubberBand.hide()
elif suggestion.type in (
ToolWindowManager.LeftOf , ToolWindowManager.RightOf,
ToolWindowManager.TopOf , ToolWindowManager.BottomOf ):
if suggestion.widget.topLevelWidget() == self.topLevelWidget():
placeHolderParent = self
else:
placeHolderParent = suggestion.widget.topLevelWidget()
placeHolderGeometry = self.sidePlaceHolderRect( suggestion.widget, suggestion.type )
placeHolderGeometry.moveTopLeft(
suggestion.widget.mapTo( placeHolderParent, placeHolderGeometry.topLeft() )
)
self.lineRubberBand.setGeometry(placeHolderGeometry)
self.lineRubberBand.setParent(placeHolderParent)
self.lineRubberBand.show()
self.rectRubberBand.hide()
else:
qWarning( 'unsupported suggestion type' )
def findSuggestions( self, wrapper ):
self.suggestions = []
self.dropCurrentSuggestionIndex = -1
globalPos = QCursor.pos()
candidates = []
for splitter in wrapper.findChildren( QSplitter ):
candidates.append( splitter )
for area in self.areas:
if area.topLevelWidget() == wrapper.topLevelWidget():
candidates.append( area )
for widget in candidates:
splitter = cast( widget, QSplitter )
area = cast( widget, ToolWindowManagerArea )
if not ( splitter or area ):
qWarning( 'unexpected widget type' )
continue
parentSplitter = cast( widget.parentWidget(), QSplitter )
lastInSplitter = parentSplitter and \
parentSplitter.indexOf(widget) == parentSplitter.count() - 1
allowedSides = []
if not splitter or splitter.orientation() == Qt.Vertical:
allowedSides.append( ToolWindowManager.LeftOf )
if not splitter or splitter.orientation() == Qt.Horizontal:
allowedSides.append( ToolWindowManager.TopOf )
if not parentSplitter or parentSplitter.orientation() == Qt.Vertical or lastInSplitter:
if not splitter or splitter.orientation() == Qt.Vertical:
allowedSides.append( ToolWindowManager.RightOf )
if not parentSplitter or parentSplitter.orientation() == Qt.Horizontal or lastInSplitter:
if not splitter or splitter.orientation() == Qt.Horizontal:
allowedSides.append( ToolWindowManager.BottomOf )
for side in allowedSides:
rect = self.sideSensitiveArea( widget, side )
pos = widget.mapFromGlobal( globalPos )
if rect.contains( pos ):
self.suggestions.append( AreaReference( side, widget ) )
if area:
rect = area.rect()
pos = area.mapFromGlobal( globalPos )
if rect.contains( pos ):
self.suggestions.append( AreaReference( ToolWindowManager.AddTo, area ) )
#end of for
if not candidates:
self.suggestions.append( AreaReference( ToolWindowManager.EmptySpace ) )
if len( self.suggestions ) == 0:
self.handleNoSuggestions()
else:
self.showNextDropSuggestion()
def sideSensitiveArea( self, widget, side ):
widgetRect = widget.rect()
if side == ToolWindowManager.TopOf:
return QRect(QPoint(widgetRect.left(), widgetRect.top() - self.borderSensitivity),
QSize(widgetRect.width(), self.borderSensitivity * 2))
elif side == ToolWindowManager.LeftOf:
return QRect(QPoint(widgetRect.left() - self.borderSensitivity, widgetRect.top()),
QSize(self.borderSensitivity * 2, widgetRect.height()))
elif side == ToolWindowManager.BottomOf:
return QRect(QPoint(widgetRect.left(), widgetRect.top() + widgetRect.height() - self.borderSensitivity),
QSize(widgetRect.width(), self.borderSensitivity * 2))
elif side == ToolWindowManager.RightOf:
return QRect(QPoint(widgetRect.left() + widgetRect.width() - self.borderSensitivity, widgetRect.top()),
QSize(self.borderSensitivity * 2, widgetRect.height()))
else:
qWarning( 'invalid side' )
return QRect()
def sidePlaceHolderRect( self, widget, side ):
widgetRect = widget.rect()
parentSplitter = cast( widget.parentWidget(), QSplitter )
if parentSplitter and parentSplitter.indexOf(widget) > 0:
delta = parentSplitter.handleWidth() / 2 + self.rubberBandLineWidth / 2
if side == ToolWindowManager.TopOf and parentSplitter.orientation() == Qt.Vertical:
return QRect(QPoint( widgetRect.left(), widgetRect.top() - delta ),
QSize( widgetRect.width(), self.rubberBandLineWidth ) )
elif side == ToolWindowManager.LeftOf and parentSplitter.orientation() == Qt.Horizontal:
return QRect(QPoint(widgetRect.left() - delta, widgetRect.top()),
QSize(self.rubberBandLineWidth, widgetRect.height()))
if side == ToolWindowManager.TopOf:
return QRect(QPoint(widgetRect.left(), widgetRect.top()),
QSize(widgetRect.width(), self.rubberBandLineWidth))
elif side == ToolWindowManager.LeftOf:
return QRect(QPoint(widgetRect.left(), widgetRect.top()),
QSize(self.rubberBandLineWidth, widgetRect.height()))
elif side == ToolWindowManager.BottomOf:
return QRect(QPoint(widgetRect.left(), widgetRect.top() + widgetRect.height() - self.rubberBandLineWidth),
QSize(widgetRect.width(), self.rubberBandLineWidth))
elif side == ToolWindowManager.RightOf:
return QRect(QPoint(widgetRect.left() + widgetRect.width() - self.rubberBandLineWidth, widgetRect.top()),
QSize(self.rubberBandLineWidth, widgetRect.height()))
else:
qWarning( 'invalid side' )
return QRect()
def updateDragPosition( self ):
if not self.dragInProgress(): return
if not QApplication.mouseButtons() & Qt.LeftButton :
self.finishDrag()
return
pos = QCursor.pos()
self.dragIndicator.move( pos + QPoint(1, 1) )
foundWrapper = False
window = QApplication.topLevelAt( pos )
for wrapper in self.wrappers:
if wrapper.window() == window:
if wrapper.rect().contains( wrapper.mapFromGlobal(pos) ):
self.findSuggestions( wrapper )
if len( self.suggestions ) > 0:
#starting or restarting timer
if self.dropSuggestionSwitchTimer.isActive():
self.dropSuggestionSwitchTimer.stop()
self.dropSuggestionSwitchTimer.start()
foundWrapper = True
break
if not foundWrapper:
self.handleNoSuggestions()
def finishDrag( self ):
if not self.dragInProgress():
qWarning( 'unexpected finishDrag' )
return
if len( self.suggestions ) == 0:
self.moveToolWindows( self.draggedToolWindows, ToolWindowManager.NewFloatingArea )
else:
if self.dropCurrentSuggestionIndex >= len( self.suggestions ):
qWarning( 'invalid self.dropCurrentSuggestionIndex' )
return
suggestion = self.suggestions[ self.dropCurrentSuggestionIndex ]
self.handleNoSuggestions()
self.moveToolWindows( self.draggedToolWindows, suggestion )
self.dragIndicator.hide()
self.draggedToolWindows = []
def tabCloseRequested( self, index ):
if not isinstance( self.sender(), ToolWindowManagerArea ):
qWarning( 'sender is not a ToolWindowManagerArea' )
return
area = self.sender()
toolWindow = area.widget( index )
if not self.hasToolWindow( toolWindow ):
qWarning( 'unknown tab in tab widget' )
return
self.hideToolWindow( toolWindow )
def createSplitter( self ):
splitter = QSplitter()
splitter.setChildrenCollapsible( False )
return splitter
#TEST
if __name__ == '__main__':
import sys
app = QtGui.QApplication( sys.argv )
styleSheetName = 'gii.qss'
app.setStyleSheet(
open( '/Users/tommo/prj/gii/data/theme/' + styleSheetName ).read()
)
class Test( QtGui.QMainWindow ):
def __init__(self, *args ):
super(Test, self).__init__( *args )
mgr = ToolWindowManager( self )
self.setCentralWidget( mgr )
widget = QtGui.QPushButton( 'hello' )
widget.setWindowTitle( 'hello' )
widget.setObjectName( 'hello' )
mgr.addToolWindow( widget, ToolWindowManager.EmptySpace )
widget = QtGui.QPushButton( 'world' )
widget.setWindowTitle( 'world' )
widget.setObjectName( 'world' )
mgr.addToolWindow( widget, ToolWindowManager.NewFloatingArea )
widget = QtGui.QPushButton( 'happy' )
widget.setWindowTitle( 'happy' )
widget.setObjectName( 'happy' )
mgr.addToolWindow( widget, ToolWindowManager.EmptySpace )
widget = QtGui.QPushButton( 'goodness' )
widget.setWindowTitle( 'goodness' )
widget.setObjectName( 'goodness' )
mgr.addToolWindow( widget, ToolWindowManager.LastUsedArea )
result = mgr.saveState()
for w in mgr.toolWindows():
mgr.moveToolWindow( w, ToolWindowManager.NewFloatingArea )
mgr.restoreState( result )
area = mgr.areaOf( widget )
mgr.hideToolWindow( widget )
area.addToolWindow( widget )
window = Test()
window.show()
window.raise_()
app.exec_()
|
|
import os
from datetime import datetime
from time import time
from django.http import JsonResponse
from django.shortcuts import render, redirect
from django.core.urlresolvers import reverse
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from xlrd import open_workbook, xldate_as_tuple
from itertools import izip
from ..model import ManagementScenario, SessionMaker
from ..model import (LITTLE_DELL_VOLUME,
LITTLE_DELL_RELEASE,
LITTLE_DELL_SPILL,
MOUNTAIN_DELL_VOLUME,
MOUNTAIN_DELL_RELEASE,
MOUNTAIN_DELL_SPILL,
DELL_CREEK_INFLOW,
LAMBS_CREEK_INFLOW,
RELIABILITY)
from ..lib.goldsim import runLittleDellGoldSim
from ..lib import get_package_name, CKAN_ENGINE
def jobs(request):
"""
Start a new scenario in the scenario table
"""
# Get user id
user_id = request.user.id
# Get a session
session = SessionMaker()
scenarios_list = session.query(ManagementScenario.id,
ManagementScenario.name,
ManagementScenario.description,
ManagementScenario.last_updated,
ManagementScenario.job_status,
ManagementScenario.percentage,
ManagementScenario.results_link). \
filter(ManagementScenario.user_id == str(user_id)). \
order_by(ManagementScenario.last_updated.desc()). \
all()
# Initialize paginator
page_number = request.GET.get('page')
paginator = Paginator(scenarios_list, 10)
# Define pager format
pager_format = '''
<ul class="pagination">
<li><a href="#">1</a></li>
<li><a href="#">1</a></li>
<li><a href="#">1</a></li>
</ul>
'''
try:
# Return the requested page
scenarios = paginator.page(page_number)
except PageNotAnInteger:
# Deliver first page if page is not an integer
scenarios = paginator.page(1)
except EmptyPage:
# Deliver last page if page number is out of range
scenarios = paginator.page(len(scenarios_list))
# Template context
context = {'scenarios': scenarios,
'paginator': paginator,
'statuses': ('pending', 'success', 'error'),
'nav': 'scenarios'}
return render(request, 'parleys_creek_management/jobs/jobs.html', context)
def delete(request, scenario_id):
"""
Delete the scenario
"""
# Retrieve the scenario
session = SessionMaker()
scenario = session.query(ManagementScenario).filter(ManagementScenario.id == scenario_id).one()
# Delete the current scenario
session.delete(scenario)
session.commit()
return redirect('parleys_creek_management:jobs')
def status(request, scenario_id):
"""
Return job status information for a job
"""
# Get user id
user_id = str(request.user.id)
# Get a session
session = SessionMaker()
scenario = session.query(ManagementScenario).get(scenario_id)
# Defaults
job_status = None
percentage = None
link = None
if scenario and scenario.user_id == user_id:
job_status = scenario.job_status
percentage = scenario.percentage
link = reverse('parleys_creek_management:results_view',
kwargs={'scenario_id': scenario_id, 'plot_name': 'little-dell-volume'})
# Form response
if percentage >= 100:
json_response = {'status': job_status, 'percentage': percentage, 'link': link}
else:
json_response = {'status': job_status, 'percentage': percentage, 'link': None}
return JsonResponse(json_response)
def run(request, scenario_id):
"""
Run the model action
"""
# Get user id
user_id = str(request.user.id)
# Get a session
session = SessionMaker()
scenario = session.query(ManagementScenario). \
filter(ManagementScenario.user_id == user_id). \
filter(ManagementScenario.id == scenario_id). \
one()
scenario.job_status = 'processing'
scenario.percentage = 0
session.commit()
# Get arguments for the web service
arguments = scenario.get_web_service_inputs()
# Get Path to Workspace and unique file name
workspace_dir = os.path.join(os.path.abspath(os.path.dirname(os.path.dirname(__file__))), 'workspace')
unique_file_name = request.user.username + datetime.now().strftime('%Y%d%m%H%M%S') + '.xls'
out_path = os.path.join(workspace_dir, unique_file_name)
# Update status of scenario in the database to processing
scenario.percentage = 25
session.commit()
# Set timeout to be 10 minutes
timeout = time() + 3 * 60 # seconds
frequency = 3 # seconds
# If timeout occurs, will be marked as error
job_status = 'error'
error_message = ''
# Start execution
execution = runLittleDellGoldSim(arguments, out_path)
# Check status until time-out happens
while not execution.isComplete():
if time() >= timeout:
# kill request
break
execution.checkStatus(sleepSecs=frequency)
if execution.isSucceded():
# Update status in db
scenario.job_status = 'downloading results'
scenario.percentage = 50
session.commit()
# Get results
execution.getOutput(out_path)
job_status = 'success'
# Get package name from app.ini
package_name = get_package_name()
result = {'success': False}
# Push file to ckan dataset
try:
# Push file to ckan dataset
resource_name = scenario.name
description = '{0} \<Created by {1} on {2}\>'.format(scenario.description, request.user.username,
datetime.now().strftime('%B, %d %Y @ %H:%M'))
result = CKAN_ENGINE.create_resource(dataset_id=package_name, file=out_path, name=resource_name,
format='xls', model='PCMT-GOLDSIM', description=description)
except Exception as e:
error_message = 'PCMT RUN WARNING: {0}'.format(e.message)
job_status = 'error'
print(error_message)
# Get link of the resource
if result['success']:
results_link = result['result']['url']
else:
error_message = 'PCMT RUN WARNING: Job execution failed.'
results_link = None
job_status = 'error'
print(error_message)
# Parse results into python data structures and cache in database for visualization
scenario.job_status = 'processing results'
scenario.percentage = 75
session.commit()
try:
parsed_results = parse_results(out_path)
scenario.set_results(parsed_results)
except Exception as e:
error_message = 'PCMT RUN WARNING: {0}'.format(e.message)
job_status = 'error'
print(error_message)
# Delete temp file in workspace
try:
os.remove(out_path)
except Exception as e:
error_message = 'PCMT RUN WARNING: {0}'.format(e.message)
print(error_message)
# Update the scenario job status
scenario.results_link = results_link
# Update status in db
scenario.job_status = job_status
scenario.percentage = 100
session.commit()
results_link = scenario.results_link
# Assemble response object
if error_message != '':
json_response = {'status': job_status, 'link': results_link}
else:
json_response = {'status': job_status, 'link': results_link, 'message': error_message}
session.close()
return JsonResponse(json_response)
def parse_results(filename):
"""
This method is used to parse the results into Python data structures.
"""
results = dict()
# Get a handle on the workbook
workbook = open_workbook(filename)
# Get handles on the sheets
little_dell = workbook.sheet_by_index(0)
mountain_dell = workbook.sheet_by_index(1)
inflows = workbook.sheet_by_index(2)
reliability = workbook.sheet_by_index(3)
for sheet_index in range(workbook.nsheets):
sheet = workbook.sheet_by_index(sheet_index)
sheet_name = sheet.name
if sheet_name == 'Little Dell':
little_dell = sheet
elif sheet_name == 'Mountain Dell':
mountain_dell = sheet
elif sheet_name == 'Lambs and Dell Creeks':
inflows = sheet
elif sheet_name == 'Reliability':
reliability = sheet
##
# Little Dell
##
# Parse Sheet and hack of headers (top three rows)
ld_time = little_dell.col_values(0)[3:]
ld_volume = little_dell.col_values(1)[3:]
ld_release = little_dell.col_values(2)[3:]
ld_spill = little_dell.col_values(3)[3:]
# Convert decimal date to datetime
ld_datetime = []
for dec_time in ld_time:
time_tuple = xldate_as_tuple(dec_time, workbook.datemode)
ld_datetime.append(datetime(*time_tuple))
# Stitch together
ld_volume_series = [list(i) for i in izip(ld_datetime, ld_volume)]
ld_release_series = [list(i) for i in izip(ld_datetime, ld_release)]
ld_spill_series = [list(i) for i in izip(ld_datetime, ld_spill)]
# Create series dictionaries
ld_volume_dict = {'title': 'Little Dell Volume',
'subtitle': '',
'y_axis_title': 'Volume',
'y_axis_units': 'kaf',
'series': ld_volume_series}
results[LITTLE_DELL_VOLUME] = ld_volume_dict
ld_release_dict = {'title': 'Little Dell Release',
'subtitle': '',
'y_axis_title': 'Flowrate',
'y_axis_units': 'af/d',
'series': ld_release_series}
results[LITTLE_DELL_RELEASE] = ld_release_dict
ld_spill_dict = {'title': 'Little Dell Spills',
'subtitle': '',
'y_axis_title': 'Flowrate',
'y_axis_units': 'af/d',
'series': ld_spill_series}
results[LITTLE_DELL_SPILL] = ld_spill_dict
##
# Mountain Dell
##
# Parse Sheet and hack of headers (top three rows)
md_time = mountain_dell.col_values(0)[3:]
md_volume = mountain_dell.col_values(1)[3:]
md_release = mountain_dell.col_values(2)[3:]
md_spill = mountain_dell.col_values(3)[3:]
# Convert decimal date to datetime
md_datetime = []
for dec_time in md_time:
time_tuple = xldate_as_tuple(dec_time, workbook.datemode)
md_datetime.append(datetime(*time_tuple))
# Stitch together
md_volume_series = [list(i) for i in izip(md_datetime, md_volume)]
md_release_series = [list(i) for i in izip(md_datetime, md_release)]
md_spill_series = [list(i) for i in izip(md_datetime, md_spill)]
# Create series dictionaries
md_volume_dict = {'title': 'Mountain Dell Volume',
'subtitle': '',
'y_axis_title': 'Volume',
'y_axis_units': 'kaf',
'series': md_volume_series}
results[MOUNTAIN_DELL_VOLUME] = md_volume_dict
md_release_dict = {'title': 'Mountain Dell Release',
'subtitle': '',
'y_axis_title': 'Flowrate',
'y_axis_units': 'af/d',
'series': md_release_series}
results[MOUNTAIN_DELL_RELEASE] = md_release_dict
md_spill_dict = {'title': 'Mountain Dell Spills',
'subtitle': '',
'y_axis_title': 'Flowrate',
'y_axis_units': 'af/d',
'series': md_spill_series}
results[MOUNTAIN_DELL_SPILL] = md_spill_dict
##
# Inflows
##
# Parse Sheet and hack of headers (top three rows)
inflow_time = inflows.col_values(0)[3:]
inflow_dell_creek = inflows.col_values(1)[3:]
inflow_lamb_creek = inflows.col_values(2)[3:]
# Convert decimal date to datetime
inflow_datetime = []
for dec_time in inflow_time:
time_tuple = xldate_as_tuple(dec_time, workbook.datemode)
inflow_datetime.append(datetime(*time_tuple))
# Stitch together
dell_creek_series = [list(i) for i in izip(inflow_datetime, inflow_dell_creek)]
lamb_creek_series = [list(i) for i in izip(inflow_datetime, inflow_lamb_creek)]
# Create series dictionaries
dell_creek_dict = {'title': 'Dell Creek Inflow',
'subtitle': '',
'y_axis_title': 'Flowrate',
'y_axis_units': 'cfs',
'series': dell_creek_series}
results[DELL_CREEK_INFLOW] = dell_creek_dict
lamb_creek_dict = {'title': 'Lambs Creek Inflow',
'subtitle': '',
'y_axis_title': 'Flowrate',
'y_axis_units': 'cfs',
'series': lamb_creek_series}
results[LAMBS_CREEK_INFLOW] = lamb_creek_dict
##
# Reliability
##
results[RELIABILITY] = reliability.cell_value(3, 6)
print results[RELIABILITY]
return results
|
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class AzureFirewallsOperations(object):
"""AzureFirewallsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2020_07_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _delete_initial(
self,
resource_group_name, # type: str
azure_firewall_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-07-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'azureFirewallName': self._serialize.url("azure_firewall_name", azure_firewall_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/azureFirewalls/{azureFirewallName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
azure_firewall_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes the specified Azure Firewall.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param azure_firewall_name: The name of the Azure Firewall.
:type azure_firewall_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
azure_firewall_name=azure_firewall_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'azureFirewallName': self._serialize.url("azure_firewall_name", azure_firewall_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/azureFirewalls/{azureFirewallName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
azure_firewall_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.AzureFirewall"
"""Gets the specified Azure Firewall.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param azure_firewall_name: The name of the Azure Firewall.
:type azure_firewall_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: AzureFirewall, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_07_01.models.AzureFirewall
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AzureFirewall"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-07-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'azureFirewallName': self._serialize.url("azure_firewall_name", azure_firewall_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('AzureFirewall', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/azureFirewalls/{azureFirewallName}'} # type: ignore
def _create_or_update_initial(
self,
resource_group_name, # type: str
azure_firewall_name, # type: str
parameters, # type: "_models.AzureFirewall"
**kwargs # type: Any
):
# type: (...) -> "_models.AzureFirewall"
cls = kwargs.pop('cls', None) # type: ClsType["_models.AzureFirewall"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-07-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'azureFirewallName': self._serialize.url("azure_firewall_name", azure_firewall_name, 'str', max_length=56, min_length=1),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'AzureFirewall')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('AzureFirewall', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('AzureFirewall', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/azureFirewalls/{azureFirewallName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
azure_firewall_name, # type: str
parameters, # type: "_models.AzureFirewall"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.AzureFirewall"]
"""Creates or updates the specified Azure Firewall.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param azure_firewall_name: The name of the Azure Firewall.
:type azure_firewall_name: str
:param parameters: Parameters supplied to the create or update Azure Firewall operation.
:type parameters: ~azure.mgmt.network.v2020_07_01.models.AzureFirewall
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either AzureFirewall or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2020_07_01.models.AzureFirewall]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.AzureFirewall"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
azure_firewall_name=azure_firewall_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('AzureFirewall', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'azureFirewallName': self._serialize.url("azure_firewall_name", azure_firewall_name, 'str', max_length=56, min_length=1),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/azureFirewalls/{azureFirewallName}'} # type: ignore
def _update_tags_initial(
self,
resource_group_name, # type: str
azure_firewall_name, # type: str
parameters, # type: "_models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> Optional["_models.AzureFirewall"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.AzureFirewall"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-07-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._update_tags_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'azureFirewallName': self._serialize.url("azure_firewall_name", azure_firewall_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('AzureFirewall', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_tags_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/azureFirewalls/{azureFirewallName}'} # type: ignore
def begin_update_tags(
self,
resource_group_name, # type: str
azure_firewall_name, # type: str
parameters, # type: "_models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.AzureFirewall"]
"""Updates tags of an Azure Firewall resource.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param azure_firewall_name: The name of the Azure Firewall.
:type azure_firewall_name: str
:param parameters: Parameters supplied to update azure firewall tags.
:type parameters: ~azure.mgmt.network.v2020_07_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either AzureFirewall or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2020_07_01.models.AzureFirewall]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.AzureFirewall"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._update_tags_initial(
resource_group_name=resource_group_name,
azure_firewall_name=azure_firewall_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('AzureFirewall', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'azureFirewallName': self._serialize.url("azure_firewall_name", azure_firewall_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/azureFirewalls/{azureFirewallName}'} # type: ignore
def list(
self,
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.AzureFirewallListResult"]
"""Lists all Azure Firewalls in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either AzureFirewallListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2020_07_01.models.AzureFirewallListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AzureFirewallListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-07-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('AzureFirewallListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/azureFirewalls'} # type: ignore
def list_all(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.AzureFirewallListResult"]
"""Gets all the Azure Firewalls in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either AzureFirewallListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2020_07_01.models.AzureFirewallListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AzureFirewallListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-07-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_all.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('AzureFirewallListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/azureFirewalls'} # type: ignore
|
|
"""
Support for RFXtrx components.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/rfxtrx/
"""
import logging
from collections import OrderedDict
import voluptuous as vol
import homeassistant.helpers.config_validation as cv
from homeassistant.util import slugify
from homeassistant.const import EVENT_HOMEASSISTANT_STOP
from homeassistant.helpers.entity import Entity
from homeassistant.const import (ATTR_ENTITY_ID, TEMP_CELSIUS)
REQUIREMENTS = ['pyRFXtrx==0.17.0']
DOMAIN = "rfxtrx"
DEFAULT_SIGNAL_REPETITIONS = 1
ATTR_AUTOMATIC_ADD = 'automatic_add'
ATTR_DEVICE = 'device'
ATTR_DEBUG = 'debug'
ATTR_STATE = 'state'
ATTR_NAME = 'name'
ATTR_FIREEVENT = 'fire_event'
ATTR_DATA_TYPE = 'data_type'
ATTR_DUMMY = 'dummy'
CONF_SIGNAL_REPETITIONS = 'signal_repetitions'
CONF_DEVICES = 'devices'
EVENT_BUTTON_PRESSED = 'button_pressed'
DATA_TYPES = OrderedDict([
('Temperature', TEMP_CELSIUS),
('Temperature2', TEMP_CELSIUS),
('Humidity', '%'),
('Barometer', ''),
('Wind direction', ''),
('Rain rate', ''),
('Energy usage', 'W'),
('Total usage', 'W'),
('Sound', ''),
('Sensor Status', ''),
('Counter value', '')])
RECEIVED_EVT_SUBSCRIBERS = []
RFX_DEVICES = {}
_LOGGER = logging.getLogger(__name__)
RFXOBJECT = None
def _valid_device(value, device_type):
"""Validate a dictionary of devices definitions."""
config = OrderedDict()
for key, device in value.items():
# Still accept old configuration
if 'packetid' in device.keys():
msg = 'You are using an outdated configuration of the rfxtrx ' +\
'device, {}.'.format(key) +\
' Your new config should be:\n {}: \n name: {}'\
.format(device.get('packetid'),
device.get(ATTR_NAME, 'deivce_name'))
_LOGGER.warning(msg)
key = device.get('packetid')
device.pop('packetid')
key = str(key)
if not len(key) % 2 == 0:
key = '0' + key
if get_rfx_object(key) is None:
raise vol.Invalid('Rfxtrx device {} is invalid: '
'Invalid device id for {}'.format(key, value))
if device_type == 'sensor':
config[key] = DEVICE_SCHEMA_SENSOR(device)
elif device_type == 'light_switch':
config[key] = DEVICE_SCHEMA(device)
else:
raise vol.Invalid('Rfxtrx device is invalid')
if not config[key][ATTR_NAME]:
config[key][ATTR_NAME] = key
return config
def valid_sensor(value):
"""Validate sensor configuration."""
return _valid_device(value, "sensor")
def _valid_light_switch(value):
return _valid_device(value, "light_switch")
DEVICE_SCHEMA = vol.Schema({
vol.Required(ATTR_NAME): cv.string,
vol.Optional(ATTR_FIREEVENT, default=False): cv.boolean,
})
DEVICE_SCHEMA_SENSOR = vol.Schema({
vol.Optional(ATTR_NAME, default=None): cv.string,
vol.Optional(ATTR_FIREEVENT, default=False): cv.boolean,
vol.Optional(ATTR_DATA_TYPE, default=[]):
vol.All(cv.ensure_list, [vol.In(DATA_TYPES.keys())]),
})
DEFAULT_SCHEMA = vol.Schema({
vol.Required("platform"): DOMAIN,
vol.Optional(CONF_DEVICES, default={}): vol.All(dict, _valid_light_switch),
vol.Optional(ATTR_AUTOMATIC_ADD, default=False): cv.boolean,
vol.Optional(CONF_SIGNAL_REPETITIONS, default=DEFAULT_SIGNAL_REPETITIONS):
vol.Coerce(int),
})
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.Schema({
vol.Required(ATTR_DEVICE): cv.string,
vol.Optional(ATTR_DEBUG, default=False): cv.boolean,
vol.Optional(ATTR_DUMMY, default=False): cv.boolean,
}),
}, extra=vol.ALLOW_EXTRA)
def setup(hass, config):
"""Setup the RFXtrx component."""
# Declare the Handle event
def handle_receive(event):
"""Callback all subscribers for RFXtrx gateway."""
# Log RFXCOM event
if not event.device.id_string:
return
_LOGGER.debug("Receive RFXCOM event from "
"(Device_id: %s Class: %s Sub: %s, Pkt_id: %s)",
slugify(event.device.id_string.lower()),
event.device.__class__.__name__,
event.device.subtype,
"".join("{0:02x}".format(x) for x in event.data))
# Callback to HA registered components.
for subscriber in RECEIVED_EVT_SUBSCRIBERS:
subscriber(event)
# Try to load the RFXtrx module.
import RFXtrx as rfxtrxmod
# Init the rfxtrx module.
global RFXOBJECT
device = config[DOMAIN][ATTR_DEVICE]
debug = config[DOMAIN][ATTR_DEBUG]
dummy_connection = config[DOMAIN][ATTR_DUMMY]
if dummy_connection:
RFXOBJECT =\
rfxtrxmod.Connect(device, handle_receive, debug=debug,
transport_protocol=rfxtrxmod.DummyTransport2)
else:
RFXOBJECT = rfxtrxmod.Connect(device, handle_receive, debug=debug)
def _shutdown_rfxtrx(event):
"""Close connection with RFXtrx."""
RFXOBJECT.close_connection()
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, _shutdown_rfxtrx)
return True
def get_rfx_object(packetid):
"""Return the RFXObject with the packetid."""
import RFXtrx as rfxtrxmod
try:
binarypacket = bytearray.fromhex(packetid)
except ValueError:
return None
pkt = rfxtrxmod.lowlevel.parse(binarypacket)
if pkt is None:
return None
if isinstance(pkt, rfxtrxmod.lowlevel.SensorPacket):
obj = rfxtrxmod.SensorEvent(pkt)
elif isinstance(pkt, rfxtrxmod.lowlevel.Status):
obj = rfxtrxmod.StatusEvent(pkt)
else:
obj = rfxtrxmod.ControlEvent(pkt)
return obj
def get_devices_from_config(config, device):
"""Read rfxtrx configuration."""
signal_repetitions = config[CONF_SIGNAL_REPETITIONS]
devices = []
for packet_id, entity_info in config[CONF_DEVICES].items():
event = get_rfx_object(packet_id)
device_id = slugify(event.device.id_string.lower())
if device_id in RFX_DEVICES:
continue
_LOGGER.info("Add %s rfxtrx", entity_info[ATTR_NAME])
# Check if i must fire event
fire_event = entity_info[ATTR_FIREEVENT]
datas = {ATTR_STATE: False, ATTR_FIREEVENT: fire_event}
new_device = device(entity_info[ATTR_NAME], event, datas,
signal_repetitions)
RFX_DEVICES[device_id] = new_device
devices.append(new_device)
return devices
def get_new_device(event, config, device):
"""Add entity if not exist and the automatic_add is True."""
device_id = slugify(event.device.id_string.lower())
if device_id in RFX_DEVICES:
return
if not config[ATTR_AUTOMATIC_ADD]:
return
pkt_id = "".join("{0:02x}".format(x) for x in event.data)
_LOGGER.info(
"Automatic add %s rfxtrx device (Class: %s Sub: %s Packet_id: %s)",
device_id,
event.device.__class__.__name__,
event.device.subtype,
pkt_id
)
datas = {ATTR_STATE: False, ATTR_FIREEVENT: False}
signal_repetitions = config[CONF_SIGNAL_REPETITIONS]
new_device = device(pkt_id, event, datas,
signal_repetitions)
RFX_DEVICES[device_id] = new_device
return new_device
def apply_received_command(event):
"""Apply command from rfxtrx."""
device_id = slugify(event.device.id_string.lower())
# Check if entity exists or previously added automatically
if device_id not in RFX_DEVICES:
return
_LOGGER.debug(
"Device_id: %s device_update. Command: %s",
device_id,
event.values['Command']
)
if event.values['Command'] == 'On'\
or event.values['Command'] == 'Off':
# Update the rfxtrx device state
is_on = event.values['Command'] == 'On'
RFX_DEVICES[device_id].update_state(is_on)
elif hasattr(RFX_DEVICES[device_id], 'brightness')\
and event.values['Command'] == 'Set level':
_brightness = (event.values['Dim level'] * 255 // 100)
# Update the rfxtrx device state
is_on = _brightness > 0
RFX_DEVICES[device_id].update_state(is_on, _brightness)
# Fire event
if RFX_DEVICES[device_id].should_fire_event:
RFX_DEVICES[device_id].hass.bus.fire(
EVENT_BUTTON_PRESSED, {
ATTR_ENTITY_ID:
RFX_DEVICES[device_id].entity_id,
ATTR_STATE: event.values['Command'].lower()
}
)
_LOGGER.info(
"Rfxtrx fired event: (event_type: %s, %s: %s, %s: %s)",
EVENT_BUTTON_PRESSED,
ATTR_ENTITY_ID,
RFX_DEVICES[device_id].entity_id,
ATTR_STATE,
event.values['Command'].lower()
)
class RfxtrxDevice(Entity):
"""Represents a Rfxtrx device.
Contains the common logic for Rfxtrx lights and switches.
"""
def __init__(self, name, event, datas, signal_repetitions):
"""Initialize the device."""
self.signal_repetitions = signal_repetitions
self._name = name
self._event = event
self._state = datas[ATTR_STATE]
self._should_fire_event = datas[ATTR_FIREEVENT]
self._brightness = 0
@property
def should_poll(self):
"""No polling needed for a RFXtrx switch."""
return False
@property
def name(self):
"""Return the name of the device if any."""
return self._name
@property
def should_fire_event(self):
"""Return is the device must fire event."""
return self._should_fire_event
@property
def is_on(self):
"""Return true if device is on."""
return self._state
@property
def assumed_state(self):
"""Return true if unable to access real state of entity."""
return True
def turn_off(self, **kwargs):
"""Turn the device off."""
self._send_command("turn_off")
def update_state(self, state, brightness=0):
"""Update det state of the device."""
self._state = state
self._brightness = brightness
self.update_ha_state()
def _send_command(self, command, brightness=0):
if not self._event:
return
if command == "turn_on":
for _ in range(self.signal_repetitions):
self._event.device.send_on(RFXOBJECT.transport)
self._state = True
elif command == "dim":
for _ in range(self.signal_repetitions):
self._event.device.send_dim(RFXOBJECT.transport,
brightness)
self._state = True
elif command == 'turn_off':
for _ in range(self.signal_repetitions):
self._event.device.send_off(RFXOBJECT.transport)
self._state = False
self._brightness = 0
elif command == "roll_up":
for _ in range(self.signal_repetitions):
self._event.device.send_open(RFXOBJECT.transport)
elif command == "roll_down":
for _ in range(self.signal_repetitions):
self._event.device.send_close(RFXOBJECT.transport)
elif command == "stop_roll":
for _ in range(self.signal_repetitions):
self._event.device.send_stop(RFXOBJECT.transport)
self.update_ha_state()
|
|
from scipy.misc import imread, imresize, imsave, fromimage, toimage
from scipy.optimize import fmin_l_bfgs_b
import scipy.interpolate
import scipy.ndimage
import numpy as np
import time
import argparse
import warnings
from sklearn.feature_extraction.image import reconstruct_from_patches_2d, extract_patches_2d
from keras.models import Model
from keras.layers import Input
from keras.layers.convolutional import Convolution2D, AveragePooling2D, MaxPooling2D
from keras import backend as K
from keras.utils.data_utils import get_file
from keras.utils.layer_utils import convert_all_kernels_in_model
TF_WEIGHTS_PATH_NO_TOP = 'https://github.com/fchollet/deep-learning-models/releases/download/v0.1/vgg16_weights_tf_dim_ordering_tf_kernels_notop.h5'
parser = argparse.ArgumentParser(description='Neural style transfer with Keras.')
parser.add_argument('base_image_path', metavar='base', type=str,
help='Path to the image to transform.')
parser.add_argument('style_image_paths', metavar='ref', nargs='+', type=str,
help='Path to the style reference image.')
parser.add_argument('result_prefix', metavar='res_prefix', type=str,
help='Prefix for the saved results.')
parser.add_argument("--image_size", dest="img_size", default=400, type=int,
help='Minimum image size')
parser.add_argument("--content_weight", dest="content_weight", default=0.025, type=float,
help="Weight of content")
parser.add_argument("--style_weight", dest="style_weight", nargs='+', default=[1], type=float,
help="Weight of style, can be multiple for multiple styles")
parser.add_argument("--total_variation_weight", dest="tv_weight", default=8.5e-5, type=float,
help="Total Variation weight")
parser.add_argument("--style_scale", dest="style_scale", default=1.0, type=float,
help="Scale the weighing of the style")
parser.add_argument("--num_iter", dest="num_iter", default=10, type=int,
help="Number of iterations")
parser.add_argument("--content_loss_type", default=0, type=int,
help='Can be one of 0, 1 or 2. Readme contains the required information of each mode.')
parser.add_argument("--content_layer", dest="content_layer", default="conv5_2", type=str,
help="Content layer used for content loss.")
parser.add_argument("--init_image", dest="init_image", default="content", type=str,
help="Initial image used to generate the final image. Options are 'content', 'noise', or 'gray'")
def _calc_patch_grid_dims(shape, patch_size, patch_stride):
x_w, x_h, x_c = shape
num_rows = 1 + (x_h - patch_size) // patch_stride
num_cols = 1 + (x_w - patch_size) // patch_stride
return num_rows, num_cols
def make_patch_grid(x, patch_size, patch_stride=1):
'''x shape: (num_channels, rows, cols)'''
x = x.transpose(2, 1, 0)
patches = extract_patches_2d(x, (patch_size, patch_size))
x_w, x_h, x_c = x.shape
num_rows, num_cols = _calc_patch_grid_dims(x.shape, patch_size, patch_stride)
patches = patches.reshape((num_rows, num_cols, patch_size, patch_size, x_c))
patches = patches.transpose((0, 1, 4, 2, 3))
#patches = np.rollaxis(patches, -1, 2)
return patches
def combine_patches_grid(in_patches, out_shape):
'''Reconstruct an image from these `patches`
input shape: (rows, cols, channels, patch_row, patch_col)
'''
num_rows, num_cols = in_patches.shape[:2]
num_channels = in_patches.shape[-3]
patch_size = in_patches.shape[-1]
num_patches = num_rows * num_cols
in_patches = np.reshape(in_patches, (num_patches, num_channels, patch_size, patch_size)) # (patches, channels, pr, pc)
in_patches = np.transpose(in_patches, (0, 2, 3, 1)) # (patches, p, p, channels)
recon = reconstruct_from_patches_2d(in_patches, out_shape)
return recon.transpose(2, 1, 0)
class PatchMatcher(object):
'''A matcher of image patches inspired by the PatchMatch algorithm.
image shape: (width, height, channels)
'''
def __init__(self, input_shape, target_img, patch_size=1, patch_stride=1, jump_size=0.5,
num_propagation_steps=5, num_random_steps=5, random_max_radius=1.0, random_scale=0.5):
self.input_shape = input_shape
self.patch_size = patch_size
self.patch_stride = patch_stride
self.jump_size = jump_size
self.num_propagation_steps = num_propagation_steps
self.num_random_steps = num_random_steps
self.random_max_radius = random_max_radius
self.random_scale = random_scale
self.num_input_rows, self.num_input_cols = _calc_patch_grid_dims(input_shape, patch_size, patch_stride)
self.target_patches = make_patch_grid(target_img, patch_size)
self.target_patches_normed = self.normalize_patches(self.target_patches)
self.coords = np.random.uniform(0.0, 1.0, # TODO: switch to pixels
(2, self.num_input_rows, self.num_input_cols))# * [[[self.num_input_rows]],[[self.num_input_cols]]]
self.similarity = np.zeros(input_shape[:2:-1], dtype ='float32')
self.min_propagration_row = 1.0 / self.num_input_rows
self.min_propagration_col = 1.0 / self.num_input_cols
self.delta_row = np.array([[[self.min_propagration_row]], [[0.0]]])
self.delta_col = np.array([[[0.0]], [[self.min_propagration_col]]])
def update(self, input_img, reverse_propagation=False):
input_patches = self.get_patches_for(input_img)
self.update_with_patches(self.normalize_patches(input_patches), reverse_propagation=reverse_propagation)
def update_with_patches(self, input_patches, reverse_propagation=False):
self._propagate(input_patches, reverse_propagation=reverse_propagation)
self._random_update(input_patches)
def get_patches_for(self, img):
return make_patch_grid(img, self.patch_size)
def normalize_patches(self, patches):
norm = np.sqrt(np.sum(np.square(patches), axis=(2, 3, 4), keepdims=True))
return patches / norm
def _propagate(self, input_patches, reverse_propagation=False):
if reverse_propagation:
roll_direction = 1
else:
roll_direction = -1
sign = float(roll_direction)
for step_i in range(self.num_propagation_steps):
new_coords = self.clip_coords(np.roll(self.coords, roll_direction, 1) + self.delta_row * sign)
coords_row, similarity_row = self.eval_state(new_coords, input_patches)
new_coords = self.clip_coords(np.roll(self.coords, roll_direction, 2) + self.delta_col * sign)
coords_col, similarity_col = self.eval_state(new_coords, input_patches)
self.coords, self.similarity = self.take_best(coords_row, similarity_row, coords_col, similarity_col)
def _random_update(self, input_patches):
for alpha in range(1, self.num_random_steps + 1): # NOTE this should actually stop when the move is < 1
new_coords = self.clip_coords(self.coords + np.random.uniform(-self.random_max_radius, self.random_max_radius, self.coords.shape) * self.random_scale ** alpha)
self.coords, self.similarity = self.eval_state(new_coords, input_patches)
def eval_state(self, new_coords, input_patches):
new_similarity = self.patch_similarity(input_patches, new_coords)
delta_similarity = new_similarity - self.similarity
coords = np.where(delta_similarity > 0, new_coords, self.coords)
best_similarity = np.where(delta_similarity > 0, new_similarity, self.similarity)
return coords, best_similarity
def take_best(self, coords_a, similarity_a, coords_b, similarity_b):
delta_similarity = similarity_a - similarity_b
best_coords = np.where(delta_similarity > 0, coords_a, coords_b)
best_similarity = np.where(delta_similarity > 0, similarity_a, similarity_b)
return best_coords, best_similarity
def patch_similarity(self, source, coords):
'''Check the similarity of the patches specified in coords.'''
target_vals = self.lookup_coords(self.target_patches_normed, coords)
err = source * target_vals
return np.sum(err, axis=(2, 3, 4))
def clip_coords(self, coords):
# TODO: should this all be in pixel space?
coords = np.clip(coords, 0.0, 1.0)
return coords
def lookup_coords(self, x, coords):
x_shape = np.expand_dims(np.expand_dims(x.shape, -1), -1)
i_coords = np.round(coords * (x_shape[:2] - 1)).astype('int32')
return x[i_coords[0], i_coords[1]]
def get_reconstruction(self, patches=None, combined=None):
if combined is not None:
patches = make_patch_grid(combined, self.patch_size)
if patches is None:
patches = self.target_patches
patches = self.lookup_coords(patches, self.coords)
recon = combine_patches_grid(patches, self.input_shape)
return recon
def scale(self, new_shape, new_target_img):
'''Create a new matcher of the given shape and replace its
state with a scaled up version of the current matcher's state.
'''
new_matcher = PatchMatcher(new_shape, new_target_img, patch_size=self.patch_size,
patch_stride=self.patch_stride, jump_size=self.jump_size,
num_propagation_steps=self.num_propagation_steps,
num_random_steps=self.num_random_steps,
random_max_radius=self.random_max_radius,
random_scale=self.random_scale)
new_matcher.coords = congrid(self.coords, new_matcher.coords.shape, method='neighbour')
new_matcher.similarity = congrid(self.similarity, new_matcher.coords.shape, method='neighbour')
return new_matcher
def congrid(a, newdims, method='linear', centre=False, minusone=False):
if not a.dtype in [np.float64, np.float32]:
a = np.cast[float](a)
m1 = np.cast[int](minusone)
ofs = np.cast[int](centre) * 0.5
old = np.array( a.shape )
ndims = len( a.shape )
if len( newdims ) != ndims:
print ("[congrid] dimensions error. "
"This routine currently only support "
"rebinning to the same number of dimensions.")
return None
newdims = np.asarray( newdims, dtype=float )
dimlist = []
if method == 'neighbour':
for i in range( ndims ):
base = np.indices(newdims)[i]
dimlist.append( (old[i] - m1) / (newdims[i] - m1) \
* (base + ofs) - ofs )
cd = np.array( dimlist ).round().astype(int)
newa = a[list( cd )]
return newa
elif method in ['nearest','linear']:
# calculate new dims
for i in range( ndims ):
base = np.arange( newdims[i] )
dimlist.append( (old[i] - m1) / (newdims[i] - m1) \
* (base + ofs) - ofs )
# specify old dims
olddims = [np.arange(i, dtype = np.float) for i in list( a.shape )]
# first interpolation - for ndims = any
mint = scipy.interpolate.interp1d( olddims[-1], a, kind=method )
newa = mint( dimlist[-1] )
trorder = [ndims - 1] + range( ndims - 1 )
for i in range( ndims - 2, -1, -1 ):
newa = newa.transpose( trorder )
mint = scipy.interpolate.interp1d( olddims[i], newa, kind=method )
newa = mint( dimlist[i] )
if ndims > 1:
# need one more transpose to return to original dimensions
newa = newa.transpose( trorder )
return newa
elif method in ['spline']:
oslices = [ slice(0,j) for j in old ]
oldcoords = np.ogrid[oslices]
nslices = [ slice(0,j) for j in list(newdims) ]
newcoords = np.mgrid[nslices]
newcoords_dims = [i for i in range(np.rank(newcoords))]
#make first index last
newcoords_dims.append(newcoords_dims.pop(0))
newcoords_tr = newcoords.transpose(newcoords_dims)
# makes a view that affects newcoords
newcoords_tr += ofs
deltas = (np.asarray(old) - m1) / (newdims - m1)
newcoords_tr *= deltas
newcoords_tr -= ofs
newa = scipy.ndimage.map_coordinates(a, newcoords)
return newa
else:
print("Congrid error: Unrecognized interpolation type.\n",
"Currently only \'neighbour\', \'nearest\',\'linear\',",
"and \'spline\' are supported.")
return None
args = parser.parse_args()
base_image_path = args.base_image_path
style_reference_image_paths = args.style_image_paths
style_image_paths = [path for path in args.style_image_paths]
result_prefix = args.result_prefix
content_weight = args.content_weight
total_variation_weight = args.tv_weight
scale_sizes = []
size = args.img_size
while size > 64:
scale_sizes.append(size/2)
size /= 2
img_width = img_height = 0
img_WIDTH = img_HEIGHT = 0
aspect_ratio = 0
read_mode = "color"
style_weights = []
if len(style_image_paths) != len(args.style_weight):
weight_sum = sum(args.style_weight) * args.style_scale
count = len(style_image_paths)
for i in range(len(style_image_paths)):
style_weights.append(weight_sum / count)
else:
style_weights = [weight*args.style_scale for weight in args.style_weight]
def pooling_func(x):
# return AveragePooling2D((2, 2), strides=(2, 2))(x)
return MaxPooling2D((2, 2), strides=(2, 2))(x)
#start proc_img
def preprocess_image(image_path, sc_size=args.img_size, load_dims=False):
global img_width, img_height, img_WIDTH, img_HEIGHT, aspect_ratio
mode = "RGB"
# mode = "RGB" if read_mode == "color" else "L"
img = imread(image_path, mode=mode) # Prevents crashes due to PNG images (ARGB)
if load_dims:
img_WIDTH = img.shape[0]
img_HEIGHT = img.shape[1]
aspect_ratio = float(img_HEIGHT) / img_WIDTH
img_width = sc_size
img_height = int(img_width * aspect_ratio)
img = imresize(img, (img_width, img_height)).astype('float32')
# RGB -> BGR
img = img[:, :, ::-1]
img[:, :, 0] -= 103.939
img[:, :, 1] -= 116.779
img[:, :, 2] -= 123.68
img = np.expand_dims(img, axis=0)
return img
# util function to convert a tensor into a valid image
def deprocess_image(x):
x = x.reshape((img_width, img_height, 3))
x[:, :, 0] += 103.939
x[:, :, 1] += 116.779
x[:, :, 2] += 123.68
# BGR -> RGB
x = x[:, :, ::-1]
x = np.clip(x, 0, 255).astype('uint8')
return x
combination_prev = ""
for scale_size in scale_sizes:
base_image = K.variable(preprocess_image(base_image_path, scale_size, True))
style_reference_images = [K.variable(preprocess_image(path)) for path in style_image_paths]
# this will contain our generated image
if combination_prev != "":
combination_image = imresize(combination_prev, (img_width, img_height), interp="bilinear").astype('float32')
else:
combination_image = K.placeholder((1, img_width, img_height, 3)) # tensorflow
image_tensors = [base_image]
for style_image_tensor in style_reference_images:
image_tensors.append(style_image_tensor)
image_tensors.append(combination_image)
nb_tensors = len(image_tensors)
nb_style_images = nb_tensors - 2 # Content and Output image not considered
# combine the various images into a single Keras tensor
input_tensor = K.concatenate(image_tensors, axis=0)
shape = (nb_tensors, img_width, img_height, 3) #tensorflow
#build the model
model_input = Input(tensor=input_tensor, shape=shape)
# build the VGG16 network with our 3 images as input
x = Convolution2D(64, 3, 3, activation='relu', name='conv1_1', border_mode='same')(model_input)
x = Convolution2D(64, 3, 3, activation='relu', name='conv1_2', border_mode='same')(x)
x = pooling_func(x)
x = Convolution2D(128, 3, 3, activation='relu', name='conv2_1', border_mode='same')(x)
x = Convolution2D(128, 3, 3, activation='relu', name='conv2_2', border_mode='same')(x)
x = pooling_func(x)
x = Convolution2D(256, 3, 3, activation='relu', name='conv3_1', border_mode='same')(x)
x = Convolution2D(256, 3, 3, activation='relu', name='conv3_2', border_mode='same')(x)
x = Convolution2D(256, 3, 3, activation='relu', name='conv3_3', border_mode='same')(x)
x = pooling_func(x)
x = Convolution2D(512, 3, 3, activation='relu', name='conv4_1', border_mode='same')(x)
x = Convolution2D(512, 3, 3, activation='relu', name='conv4_2', border_mode='same')(x)
x = Convolution2D(512, 3, 3, activation='relu', name='conv4_3', border_mode='same')(x)
x = pooling_func(x)
x = Convolution2D(512, 3, 3, activation='relu', name='conv5_1', border_mode='same')(x)
x = Convolution2D(512, 3, 3, activation='relu', name='conv5_2', border_mode='same')(x)
x = Convolution2D(512, 3, 3, activation='relu', name='conv5_3', border_mode='same')(x)
x = pooling_func(x)
model = Model(model_input, x)
weights = get_file('vgg16_weights_tf_dim_ordering_tf_kernels_notop.h5', TF_WEIGHTS_PATH_NO_TOP, cache_subdir='models')
print("Weights Path: ", weights)
model.load_weights(weights)
print('Model loaded.')
# get the symbolic outputs of each "key" layer (we gave them unique names).
outputs_dict = dict([(layer.name, layer.output) for layer in model.layers])
shape_dict = dict([(layer.name, layer.output_shape) for layer in model.layers])
# compute the neural style loss
# first we need to define 4 util functions
# the gram matrix of an image tensor (feature-wise outer product)
def gram_matrix(x):
features = K.batch_flatten(K.permute_dimensions(x, (2, 0, 1)))
gram = K.dot(features, K.transpose(features))
return gram
# the 3rd loss function, total variation loss,
# designed to keep the generated image locally coherent
def total_variation_loss(x):
assert K.ndim(x) == 4
a = K.square(x[:, :img_width - 1, :img_height - 1, :] - x[:, 1:, :img_height - 1, :])
b = K.square(x[:, :img_width - 1, :img_height - 1, :] - x[:, :img_width - 1, 1:, :])
return K.sum(K.pow(a + b, 1.25))
def mrf_loss(style, combination, patch_size=3, patch_stride=1):
# extract patches from feature maps with PatchMatch algorithm
style_patches = style_pmatcher.get_patches_for(style)
style_patches_norm = style_pmatcher.normalize_patches(style)
combination_patches = style_pmatcher.get_patches_for(style)
# style_patches, style_patches_norm = make_patches(style, patch_size, patch_stride)
style_pmatcher.update(style, True)
patch_coords = style_pmatcher.coords()
best_style_patches = K.reshape(patch_coords, K.shape(style_patches))
loss = K.sum(K.square(best_style_patches - combination_patches)) / patch_size ** 2
return loss
# an auxiliary loss function
# designed to maintain the "content" of the
# base image in the generated image
def content_loss(base, combination):
channels = K.shape(base)[-1]
size = img_width * img_height
if args.content_loss_type == 1:
multiplier = 1 / (2. * channels ** 0.5 * size ** 0.5)
elif args.content_loss_type == 2:
multiplier = 1 / (channels * size)
else:
multiplier = 1.
return multiplier * K.sum(K.square(combination - base))
# combine these loss functions into a single scalar
loss = K.variable(0.)
layer_features = outputs_dict[args.content_layer] # 'conv5_2' or 'conv4_2'
base_image_features = layer_features[0, :, :, :]
combination_features = layer_features[nb_tensors - 1, :, :, :]
loss += content_weight * content_loss(base_image_features,
combination_features)
channel_index = -1
#Style Loss calculation
mrf_layers = ['conv3_1', 'conv4_1']
# feature_layers = ['conv1_1', 'conv2_1', 'conv3_1', 'conv4_1', 'conv5_1']
for layer_name in mrf_layers:
output_features = outputs_dict[layer_name]
shape = shape_dict[layer_name]
combination_features = output_features[nb_tensors - 1, :, :, :]
style_features = output_features[1:nb_tensors - 1, :, :, :]
sl = []
for j in range(nb_style_images):
sl.append(mrf_loss(style_features[j], combination_features))
for j in range(nb_style_images):
loss += (style_weights[j] / len(mrf_layers)) * sl[j]
loss += total_variation_weight * total_variation_loss(combination_image)
# get the gradients of the generated image wrt the loss
grads = K.gradients(loss, combination_image)
outputs = [loss]
if type(grads) in {list, tuple}:
outputs += grads
else:
outputs.append(grads)
f_outputs = K.function([combination_image], outputs)
def eval_loss_and_grads(x):
x = x.reshape((1, img_width, img_height, 3))
outs = f_outputs([x])
loss_value = outs[0]
if len(outs[1:]) == 1:
grad_values = outs[1].flatten().astype('float64')
else:
grad_values = np.array(outs[1:]).flatten().astype('float64')
return loss_value, grad_values
# # this Evaluator class makes it possible
# # to compute loss and gradients in one pass
# # while retrieving them via two separate functions,
# # "loss" and "grads". This is done because scipy.optimize
# # requires separate functions for loss and gradients,
# # but computing them separately would be inefficient.
class Evaluator(object):
def __init__(self):
self.loss_value = None
self.grads_values = None
def loss(self, x):
assert self.loss_value is None
loss_value, grad_values = eval_loss_and_grads(x)
self.loss_value = loss_value
self.grad_values = grad_values
return self.loss_value
def grads(self, x):
assert self.loss_value is not None
grad_values = np.copy(self.grad_values)
self.loss_value = None
self.grad_values = None
return grad_values
evaluator = Evaluator()
# (L-BFGS)
if "content" in args.init_image or "gray" in args.init_image:
x = preprocess_image(base_image_path, True)
elif "noise" in args.init_image:
x = np.random.uniform(0, 255, (1, img_width, img_height, 3)) - 128.
if K.image_dim_ordering() == "th":
x = x.transpose((0, 3, 1, 2))
else:
print("Using initial image : ", args.init_image)
x = preprocess_image(args.init_image)
num_iter = args.num_iter
prev_min_val = -1
for i in range(num_iter):
print("Starting iteration %d of %d" % ((i + 1), num_iter))
start_time = time.time()
x, min_val, info = fmin_l_bfgs_b(evaluator.loss, x.flatten(), fprime=evaluator.grads, maxfun=20)
combination_prev = x
if prev_min_val == -1:
prev_min_val = min_val
improvement = (prev_min_val - min_val) / prev_min_val * 100
print('Current loss value:', min_val, " Improvement : %0.3f" % improvement, "%")
prev_min_val = min_val
# save current generated image
img = deprocess_image(x.copy())
img_ht = int(img_width * aspect_ratio)
print("Rescaling Image to (%d, %d)" % (img_width, img_ht))
img = imresize(img, (img_width, img_ht), interp="bilinear")
fname = result_prefix + '_at_iteration_%d.png' % (i + 1)
imsave(fname, img)
end_time = time.time()
print('Image saved as', fname)
print('Iteration %d completed in %ds' % (i + 1, end_time - start_time))
|
|
###############################################################################
#
# Copyright (c) 2011 Ruslan Spivak
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
###############################################################################
__author__ = 'Ruslan Spivak <[email protected]>'
import unittest
from slimit import minify
def decorator(cls):
def make_test_function(input, expected):
def test_func(self):
self.assertMinified(input, expected)
return test_func
for index, (input, expected) in enumerate(cls.TEST_CASES):
func = make_test_function(input, expected)
setattr(cls, 'test_case_%d' % index, func)
return cls
@decorator
class MinifierTestCase(unittest.TestCase):
def assertMinified(self, source, expected):
minified = minify(source)
self.maxDiff = None
self.assertSequenceEqual(minified, expected)
TEST_CASES = [
("""
jQuery.fn = jQuery.prototype = {
// For internal use only.
_data: function( elem, name, data ) {
return jQuery.data( elem, name, data, true );
}
};
""",
'jQuery.fn=jQuery.prototype={_data:function(elem,name,data){return jQuery.data(elem,name,data,true);}};'),
('context = context instanceof jQuery ? context[0] : context;',
'context=context instanceof jQuery?context[0]:context;'
),
("""
/*
* A number of helper functions used for managing events.
* Many of the ideas behind this code originated from
* Dean Edwards' addEvent library.
*/
if ( elem && elem.parentNode ) {
// Handle the case where IE and Opera return items
// by name instead of ID
if ( elem.id !== match[2] ) {
return rootjQuery.find( selector );
}
// Otherwise, we inject the element directly into the jQuery object
this.length = 1;
this[0] = elem;
}
""",
'if(elem&&elem.parentNode){if(elem.id!==match[2])return rootjQuery.find(selector);this.length=1;this[0]=elem;}'
),
("""
var a = function( obj ) {
for ( var name in obj ) {
return false;
}
return true;
};
""",
'var a=function(obj){for(var name in obj)return false;return true;};'
),
("""
x = "string", y = 5;
(x = 5) ? true : false;
for (p in obj)
;
if (true)
val = null;
else
val = false;
""",
'x="string",y=5;(x=5)?true:false;for(p in obj);if(true)val=null;else val=false;'
),
# for loops + empty statement in loop body
("""
for (x = 0; true; x++)
;
for (; true; x++)
;
for (x = 0, y = 5; true; x++)
;
y = (x + 5) * 20;
""",
'for(x=0;true;x++);for(;true;x++);for(x=0,y=5;true;x++);y=(x+5)*20;'),
# unary expressions
("""
delete x;
typeof x;
void x;
x += (!y)++;
""",
'delete x;typeof x;void x;x+=(!y)++;'),
# label + break label + continue label
("""
label:
if ( i == 0 )
continue label;
switch (day) {
case 5:
break ;
default:
break label;
}
""",
'label:if(i==0)continue label;switch(day){case 5:break;default:break label;}'),
# break + continue: no labels
("""
while (i <= 7) {
if ( i == 3 )
continue;
if ( i == 0 )
break;
}
""",
'while(i<=7){if(i==3)continue;if(i==0)break;}'),
# regex + one line statements in if and if .. else
("""
function a(x, y) {
var re = /ab+c/;
if (x == 1)
return x + y;
if (x == 3)
return {x: 1};
else
return;
}
""",
'function a(x,y){var re=/ab+c/;if(x==1)return x+y;if(x==3)return{x:1};else return;}'),
# new
('return new jQuery.fn.init( selector, context, rootjQuery );',
'return new jQuery.fn.init(selector,context,rootjQuery);'
),
# no space after 'else' when the next token is (, {
("""
if (true) {
x = true;
y = 3;
} else {
x = false
y = 5
}
""",
'if(true){x=true;y=3;}else{x=false;y=5;}'),
("""
if (true) {
x = true;
y = 3;
} else
(x + ' qw').split(' ');
""",
"if(true){x=true;y=3;}else(x+' qw').split(' ');"),
##############################################################
# Block braces removal
##############################################################
# do while
('do { x += 1; } while(true);', 'do x+=1;while(true);'),
# do while: multiple statements
('do { x += 1; y += 1;} while(true);', 'do{x+=1;y+=1;}while(true);'),
# elision
('var a = [1, 2, 3, ,,,5];', 'var a=[1,2,3,,,,5];'),
# with
("""
with (obj) {
a = b;
}
""",
'with(obj)a=b;'),
# with: multiple statements
("""
with (obj) {
a = b;
c = d;
}
""",
'with(obj){a=b;c=d;}'),
# if else
("""
if (true) {
x = true;
} else {
x = false
}
""",
'if(true)x=true;else x=false;'),
# if: multiple statements
("""
if (true) {
x = true;
y = false;
} else {
x = false;
y = true;
}
""",
'if(true){x=true;y=false;}else{x=false;y=true;}'),
# try catch finally: one statement
("""
try {
throw "my_exception"; // generates an exception
}
catch (e) {
// statements to handle any exceptions
log(e); // pass exception object to error handler
}
finally {
closefiles(); // always close the resource
}
""",
'try{throw "my_exception";}catch(e){log(e);}finally{closefiles();}'
),
# try catch finally: no statements
("""
try {
}
catch (e) {
}
finally {
}
""",
'try{}catch(e){}finally{}'
),
# try catch finally: multiple statements
("""
try {
x = 3;
y = 5;
}
catch (e) {
log(e);
log('e');
}
finally {
z = 7;
log('z');
}
""",
"try{x=3;y=5;}catch(e){log(e);log('e');}finally{z=7;log('z');}"
),
# tricky case with an 'if' nested in 'if .. else'
# We need to preserve braces in the first 'if' otherwise
# 'else' might get associated with nested 'if' instead
("""
if ( obj ) {
for ( n in obj ) {
if ( v === false) {
break;
}
}
} else {
for ( ; i < l; ) {
if ( nv === false ) {
break;
}
}
}
""",
'if(obj){for(n in obj)if(v===false)break;}else for(;i<l;)if(nv===false)break;'),
# We don't care about nested 'if' when enclosing 'if' block
# contains multiple statements because braces won't be removed
# by visit_Block when there are multiple statements in the block
("""
if ( obj ) {
for ( n in obj ) {
if ( v === false) {
break;
}
}
x = 5;
} else {
for ( ; i < l; ) {
if ( nv === false ) {
break;
}
}
}
""",
'if(obj){for(n in obj)if(v===false)break;x=5;}else for(;i<l;)if(nv===false)break;'),
# No dangling 'else' - remove braces
("""
if ( obj ) {
for ( n in obj ) {
if ( v === false) {
break;
} else {
n = 3;
}
}
} else {
for ( ; i < l; ) {
if ( nv === false ) {
break;
}
}
}
""",
'if(obj)for(n in obj)if(v===false)break;else n=3;else for(;i<l;)if(nv===false)break;'),
# foo["bar"] --> foo.bar
('foo["bar"];', 'foo.bar;'),
("foo['bar'];", 'foo.bar;'),
("""foo['bar"']=42;""", """foo['bar"']=42;"""),
("""foo["bar'"]=42;""", """foo["bar'"]=42;"""),
('foo["bar bar"];', 'foo["bar bar"];'),
('foo["bar"+"bar"];', 'foo["bar"+"bar"];'),
# https://github.com/rspivak/slimit/issues/34
# test some reserved keywords
('foo["for"];', 'foo["for"];'),
('foo["class"];', 'foo["class"];'),
# https://github.com/rspivak/slimit/issues/21
# c||(c=393,a=323,b=2321); --> c||c=393,a=323,b=2321; ERROR
('c||(c=393);', 'c||(c=393);'),
('c||(c=393,a=323,b=2321);', 'c||(c=393,a=323,b=2321);'),
# https://github.com/rspivak/slimit/issues/25
('for(a?b:c;d;)e=1;', 'for(a?b:c;d;)e=1;'),
# https://github.com/rspivak/slimit/issues/26
('"begin"+ ++a+"end";', '"begin"+ ++a+"end";'),
# https://github.com/rspivak/slimit/issues/28
("""
(function($) {
$.hello = 'world';
}(jQuery));
""",
"(function($){$.hello='world';}(jQuery));"),
# function call in FOR init
('for(o(); i < 3; i++) {}', 'for(o();i<3;i++){}'),
# unary increment operator in FOR init
('for(i++; i < 3; i++) {}', 'for(i++;i<3;i++){}'),
# unary decrement operator in FOR init
('for(i--; i < 3; i++) {}', 'for(i--;i<3;i++){}'),
# issue-37, simple identifier in FOR init
('for(i; i < 3; i++) {}', 'for(i;i<3;i++){}'),
# https://github.com/rspivak/slimit/issues/32
("""
Name.prototype = {
getPageProp: function Page_getPageProp(key) {
return this.pageDict.get(key);
},
get fullName() {
return this.first + " " + this.last;
},
set fullName(name) {
var names = name.split(" ");
this.first = names[0];
this.last = names[1];
}
};
""",
('Name.prototype={getPageProp:function Page_getPageProp(key){'
'return this.pageDict.get(key);},'
'get fullName(){return this.first+" "+this.last;},'
'set fullName(name){var names=name.split(" ");this.first=names[0];'
'this.last=names[1];}};')
),
# https://github.com/rspivak/slimit/issues/47 - might be a Python 3
# related issue
('testObj[":"] = undefined; // Breaks', 'testObj[":"]=undefined;'),
('testObj["::"] = undefined; // Breaks', 'testObj["::"]=undefined;'),
('testObj["a:"] = undefined; // Breaks', 'testObj["a:"]=undefined;'),
('testObj["."] = undefined; // OK', 'testObj["."]=undefined;'),
('testObj["{"] = undefined; // OK', 'testObj["{"]=undefined;'),
('testObj["}"] = undefined; // OK', 'testObj["}"]=undefined;'),
('testObj["["] = undefined; // Breaks', 'testObj["["]=undefined;'),
('testObj["]"] = undefined; // Breaks', 'testObj["]"]=undefined;'),
('testObj["("] = undefined; // OK', 'testObj["("]=undefined;'),
('testObj[")"] = undefined; // OK', 'testObj[")"]=undefined;'),
('testObj["="] = undefined; // Breaks', 'testObj["="]=undefined;'),
('testObj["-"] = undefined; // OK', 'testObj["-"]=undefined;'),
('testObj["+"] = undefined; // OK', 'testObj["+"]=undefined;'),
('testObj["*"] = undefined; // OK', 'testObj["*"]=undefined;'),
('testObj["/"] = undefined; // OK', 'testObj["/"]=undefined;'),
(r'testObj["\\"] = undefined; // Breaks', r'testObj["\\"]=undefined;'),
('testObj["%"] = undefined; // OK', 'testObj["%"]=undefined;'),
('testObj["<"] = undefined; // Breaks', 'testObj["<"]=undefined;'),
('testObj[">"] = undefined; // Breaks', 'testObj[">"]=undefined;'),
('testObj["!"] = undefined; // OK', 'testObj["!"]=undefined;'),
('testObj["?"] = undefined; // Breaks', 'testObj["?"]=undefined;'),
('testObj[","] = undefined; // OK', 'testObj[","]=undefined;'),
('testObj["@"] = undefined; // Breaks', 'testObj["@"]=undefined;'),
('testObj["#"] = undefined; // OK', 'testObj["#"]=undefined;'),
('testObj["&"] = undefined; // OK', 'testObj["&"]=undefined;'),
('testObj["|"] = undefined; // OK', 'testObj["|"]=undefined;'),
('testObj["~"] = undefined; // OK', 'testObj["~"]=undefined;'),
('testObj["`"] = undefined; // Breaks', 'testObj["`"]=undefined;'),
('testObj["."] = undefined; // OK', 'testObj["."]=undefined;'),
]
|
|
# -*- coding: utf-8 -*-
from ome.base import NotFoundError
from ome.util import scrub_gene_id, load_tsv
from ome import settings
import re
import cobra
import cobra.io
from cobra.core import Formula
from os.path import join
import hashlib
import logging
def hash_reaction(reaction, string_only=False):
"""Generate a unique hash for the metabolites and coefficients of the
reaction.
"""
def sorted_mets(reaction):
return sorted([(m.id, v) for m, v in reaction.metabolites.iteritems()],
key=lambda x: x[0])
if string_only:
hash_fn = lambda s: s
else:
hash_fn = lambda s: hashlib.md5(s).hexdigest()
return hash_fn(''.join(['%s%.3f' % t for t in sorted_mets(reaction)]))
def load_and_normalize(model_filepath):
"""Load a model, and give it a particular id style"""
# load the model
if model_filepath.endswith('.xml'):
model = cobra.io.read_sbml_model(model_filepath)
elif model_filepath.endswith('.mat'):
model = cobra.io.load_matlab_model(model_filepath)
else:
raise Exception('The %s file is not a valid filetype', model_filepath)
# convert the ids
model, old_ids = convert_ids(model)
# extract metabolite formulas from names (e.g. for iAF1260)
model = get_formulas_from_names(model)
return model, old_ids
def _get_rule_prefs():
"""Get gene_reaction_rule prefs."""
return load_tsv(settings.gene_reaction_rule_prefs, required_column_num=2)
def _check_rule_prefs(rule_prefs, rule):
"""Check the gene_reaction_rule against the prefs file, and return an existing
rule or the fixed one."""
for row in rule_prefs:
old_rule, new_rule = row
if old_rule == rule:
return new_rule
return rule
def convert_ids(model):
"""Converts metabolite and reaction ids to the new style.
Returns a tuple with the new model and a dictionary of old ids set up like this:
{'reactions': {'new_id': 'old_id'},
'metabolites': {'new_id': 'old_id'},
'genes': {'new_id': 'old_id'}}
"""
# loop through the ids:
metabolite_id_dict = {}
reaction_id_dict = {}
gene_id_dict = {}
# fix metabolites
for metabolite in model.metabolites:
new_id = id_for_new_id_style(fix_legacy_id(metabolite.id, use_hyphens=False),
is_metabolite=True)
metabolite_id_dict[new_id] = metabolite.id
metabolite.id = new_id
model.metabolites._generate_index()
# remove boundary metabolites (end in _b and only present in exchanges). Be
# sure to loop through a static list of ids so the list does not get shorter
# as the metabolites are deleted
for metabolite_id in [str(x) for x in model.metabolites]:
metabolite = model.metabolites.get_by_id(metabolite_id)
if not metabolite.id.endswith("_b"):
continue
for reaction in list(metabolite._reaction):
if reaction.id.startswith("EX_"):
metabolite.remove_from_model()
break
model.metabolites._generate_index()
# load fixes for gene_reaction_rule's
rule_prefs = _get_rule_prefs()
# separate ids and compartments, and convert to the new_id_style
for reaction in model.reactions:
new_id = id_for_new_id_style(fix_legacy_id(reaction.id, use_hyphens=False))
reaction_id_dict[new_id] = reaction.id
reaction.id = new_id
# fix the gene reaction rules
reaction.gene_reaction_rule = _check_rule_prefs(rule_prefs, reaction.gene_reaction_rule)
model.reactions._generate_index()
# update the genes
for gene in list(model.genes):
new_id = scrub_gene_id(gene.id)
gene_id_dict[new_id] = gene.id
for reaction in gene.reactions:
reaction.gene_reaction_rule = re.sub(r'\b'+gene.id+r'\b', new_id,
reaction.gene_reaction_rule)
# remove old genes
for gene in list(model.genes):
if len(gene.reactions) == 0:
gene.remove_from_model()
# fix the model id
bigg_id = re.sub(r'[^a-zA-Z0-9_]', '_', model.id)
model.id = bigg_id
model.description = bigg_id
old_ids = {'metabolites': metabolite_id_dict,
'reactions': reaction_id_dict,
'genes':gene_id_dict}
return model, old_ids
# the regex to separate the base id, the chirality ('_L') and the compartment ('_c')
reg_compartment = re.compile(r'(.*?)[_\(\[]([a-z][a-z0-9]?)[_\)\]]?$')
reg_chirality = re.compile(r'(.*?)_?_([LDSRM])$')
def id_for_new_id_style(old_id, is_metabolite=False):
""" Get the new style id"""
new_id = old_id
def _join_parts(the_id, the_compartment):
if the_compartment:
the_id = the_id + '_' + the_compartment
return the_id
def _remove_d_underscore(s):
"""Removed repeated, leading, and trailing underscores."""
s = re.sub(r'_+', '_', s)
s = re.sub(r'^_+', '', s)
s = re.sub(r'_+$', '', s)
return s
# remove parentheses and brackets, for SBML & BiGG spec compatibility
new_id = re.sub(r'[^a-zA-Z0-9_]', '_', new_id)
# strip leading and trailing underscores
# new_id = re.sub(r'^_+', '', new_id)
# new_id = re.sub(r'_+$', '', new_id)
compartment_match = reg_compartment.match(new_id)
if compartment_match is None:
# still remove double underscores
new_id = _remove_d_underscore(new_id)
else:
base, compartment = compartment_match.groups()
chirality_match = reg_chirality.match(base)
if chirality_match is None:
new_id = _join_parts(_remove_d_underscore(base), compartment)
else:
new_base = '%s__%s' % (_remove_d_underscore(chirality_match.group(1)),
chirality_match.group(2))
new_id = _join_parts(new_base, compartment)
return new_id
def get_formulas_from_names(model):
reg = re.compile(r'.*_([A-Za-z0-9]+)$')
# support cobra 0.3 and 0.4
for metabolite in model.metabolites:
if (metabolite.formula is not None and str(metabolite.formula) != '' and getattr(metabolite, 'formula', None) is not None):
continue
m = reg.match(metabolite.name)
if m:
try:
metabolite.formula = Formula(m.group(1))
except TypeError:
metabolite.formula = str(m.group(1))
return model
def setup_model(model, substrate_reactions, aerobic=True, sur=10, max_our=10,
id_style='cobrapy', fix_iJO1366=False):
"""Set up the model with environmntal parameters.
model: a cobra model
substrate_reactions: A single reaction id, list of reaction ids, or dictionary with reaction
ids as keys and max substrate uptakes as keys. If a list or single id is
given, then each substrate will be limited to /sur/
aerobic: True or False
sur: substrate uptake rate. Ignored if substrate_reactions is a dictionary.
max_our: Max oxygen uptake rate.
id_style: 'cobrapy' or 'simpheny'.
"""
if id_style=='cobrapy': o2 = 'EX_o2_e'
elif id_style=='simpheny': o2 = 'EX_o2(e)'
else: raise Exception('Invalid id_style')
if isinstance(substrate_reactions, dict):
for r, v in substrate_reactions.iteritems():
model.reactions.get_by_id(r).lower_bound = -abs(v)
elif isinstance(substrate_reactions, list):
for r in substrate_reactions:
model.reactions.get_by_id(r).lower_bound = -abs(sur)
elif isinstance(substrate_reactions, str):
model.reactions.get_by_id(substrate_reactions).lower_bound = -abs(sur)
else: raise Exception('bad substrate_reactions argument')
if aerobic:
model.reactions.get_by_id(o2).lower_bound = -abs(max_our)
else:
model.reactions.get_by_id(o2).lower_bound = 0
# model specific setup
if str(model)=='iJO1366' and aerobic==False:
for r in ['CAT', 'SPODM', 'SPODMpp']:
model.reactions.get_by_id(r).lower_bound = 0
model.reactions.get_by_id(r).upper_bound = 0
if fix_iJO1366 and str(model)=='iJO1366':
for r in ['ACACT2r']:
model.reactions.get_by_id(r).upper_bound = 0
print 'made ACACT2r irreversible'
# TODO hydrogen reaction for ijo
if str(model)=='iMM904' and aerobic==False:
necessary_ex = ['EX_ergst(e)', 'EX_zymst(e)', 'EX_hdcea(e)',
'EX_ocdca(e)', 'EX_ocdcea(e)', 'EX_ocdcya(e)']
for r in necessary_ex:
rxn = model.reactions.get_by_id(r)
rxn.lower_bound = -1000
rxn.upper_bound = 1000
return model
def turn_on_subsystem(model, subsytem):
raise NotImplementedError()
for reaction in model.reactions:
if reaction.subsystem.strip('_') == subsytem.strip('_'):
reaction.lower_bound = -1000 if reaction.reversibility else 0
reaction.upper_bound = 1000
return model
def carbons_for_exchange_reaction(reaction):
if len(reaction._metabolites) > 1:
raise Exception('%s not an exchange reaction' % str(reaction))
metabolite = reaction._metabolites.iterkeys().next()
try:
return metabolite.formula.elements['C']
except KeyError:
return 0
# match = re.match(r'C([0-9]+)', str(metabolite.formula))
# try:
# return int(match.group(1))
# except AttributeError:
# return 0
def fix_legacy_id(id, use_hyphens=False):
id = id.replace('_DASH_', '__')
id = id.replace('_FSLASH_', '/')
id = id.replace('_BSLASH_', "\\")
id = id.replace('_LPAREN_', '(')
id = id.replace('_LSQBKT_', '[')
id = id.replace('_RSQBKT_', ']')
id = id.replace('_RPAREN_', ')')
id = id.replace('_COMMA_', ',')
id = id.replace('_PERIOD_', '.')
id = id.replace('_APOS_', "'")
id = id.replace('&', '&')
id = id.replace('<', '<')
id = id.replace('>', '>')
id = id.replace('"', '"')
if use_hyphens:
id = id.replace('__', '-')
else:
id = id.replace("-", "__")
return id
def split_compartment(component_id):
"""Split the metabolite bigg_id into a metabolite and a compartment id.
Arguments
---------
component_id: the bigg_id of the metabolite.
"""
match = re.search(r'_[a-z][a-z0-9]?$', component_id)
if match is None:
raise NotFoundError("No compartment found for %s" % component_id)
met = component_id[0:match.start()]
compartment = component_id[match.start()+1:]
return met, compartment
|
|
#!/usr/bin/python
import sys, os
sys.path.append('..')
import socket
import session, config
from common import tool
from server import zygote
import json, copy, base64
sockFile = '/tmp/d2'
def send(module, message):
if message is None:
message = {}
else:
message = copy.deepcopy(message)
message['module'] = module
message = json.dumps(message)
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
sock.connect(sockFile)
sock.send(zygote.pack(message))
sock.close()
def login(message):
handler = message.get('handler')
if handler is None:
handler.redirect('/login.html', permanent = True)
return None
_ = handler.request.arguments
connector = config.MysqlConnector()
if connector is None:
handler.redirect('/login.html', permanent = True)
return None
uname = _.get('username', None)
if uname is None or len(uname) == 0:
handler.redirect('/login.html', permanent = True)
return None
db_result = connector.select('u_user', '`uname` = "%s"' % uname[0])
connector.close()
if len(db_result) == 0:
handler.redirect('/login.html', permanent = True)
return None
raw_password = db_result[0].get('passwd', None)
if raw_password is None:
handler.redirect('/login.html', permanent = True)
return None
passwd = _.get('password', None)
if passwd is None or len(passwd) == 0:
handler.redirect('/login.html', permanent = True)
return None
if raw_password <> passwd[0]:
handler.redirect('/login.html', permanent = True)
return None
id_ = db_result[0].get('id', None)
session.add(handler, id_)
handler.redirect('/index.html', permanent = True)
return None
def get_os_options(message):
result = config.GetAllOSType()
return json.dumps(result)
def cfg_dns(message):
handler = message.get('handler')
if handler is None:
return ''
uinfo = session.info(handler)
if uinfo is None:
return ''
connector = config.MysqlConnector()
if connector is None:
return ''
db_result = connector.select('u_machine', '`uid` = %s' % uinfo.get('id', 0))
connector.close()
data = json.dumps(db_result)
msg = {}
msg['desc'] = data
send('modules.ipconfig.clean.main', msg)
send('modules.ipconfig.install.main', msg)
def hosts(message):
handler = message.get('handler')
if handler is None:
return ''
uinfo = session.info(handler)
if uinfo is None:
return ''
connector = config.MysqlConnector()
if connector is None:
return ''
db_result = connector.select('u_machine', '`uid` = %s' % uinfo.get('id', 0))
connector.close()
for _ in db_result:
os_ = _.get('os')
if os_ is not None:
_['os_'] = config.GetOSType(os_)
_['roles_'] = '' # TODO
return json.dumps(db_result)
def host_add(message):
handler = message.get('handler')
if handler is None:
return ''
data = handler.request.body
obj = json.loads(data)
uinfo = session.info(handler)
if uinfo is None:
return ''
do = {'uid': uinfo.get('id', 0),
'name': obj.get('name', ''),
'in_ipaddr': obj.get('in_ipaddr', ''),
'ex_ipaddr': obj.get('ex_ipaddr', ''),
'hostname': obj.get('hostname', ''),
'os': obj.get('os', ''),
'uname': obj.get('username', ''),
'passwd': obj.get('password', '')}
connector = config.MysqlConnector()
if connector is None:
return ''
connector.insert('u_machine', do)
connector.close()
def clusters(message):
handler = message.get('handler')
if handler is None:
return ''
uinfo = session.info(handler)
if uinfo is None:
return ''
connector = config.MysqlConnector()
if connector is None:
return ''
db_result = connector.select('u_cluster', '`uid` = %s' % uinfo.get('id', 0))
connector.close()
for _ in db_result:
type_ = _.get('type')
if type_ is not None:
_['type_'] = config.GetClusterType(type_)
status = _.get('status')
if status is not None:
_['status_'] = config.GetMachineStatus(status)
return json.dumps(db_result)
def hs_install(message):
handler = message.get('handler')
if handler is None:
return ''
body = handler.request.body
cfg = json.loads(body)
uinfo = session.info(handler)
if uinfo is None:
return ''
uid = uinfo.get('id')
connector = config.MysqlConnector()
if connector is None:
return ''
db_result = []
for k, v in cfg.items():
result = connector.select_one('u_machine', '`uid` = %s and `id` = %s' % (uid, k))
if result is not None:
result['keys'] = ','.join(v)
db_result.append(result)
connector.close()
data = {}
data['uid'] = uid
data['name'] = 'Unknown' # fill it
data['desc'] = db_result
send('modules.hs.install.main', data)
def cluster_start(message):
handler = message.get('handler')
if handler is None:
return ''
body = handler.request.body
cfg = json.loads(body)
id_ = cfg.get('id')
cluster_type = cfg.get('type')
uinfo = session.info(handler)
if uinfo is None:
return ''
uid = uinfo.get('id')
connector = config.MysqlConnector()
if connector is None:
return ''
result = connector.select_one('u_cluster', '`uid` = %s and `id` = %s' % (uid, id_))
connector.close()
if result is None:
return ''
desc = result.get('desc')
if desc is None:
return ''
desc = base64.b64decode(desc)
data = {}
data['id'] = id_
data['desc'] = desc
send(config.GetStartModule(cluster_type), data)
def cluster_stop(message):
handler = message.get('handler')
if handler is None:
return ''
body = handler.request.body
cfg = json.loads(body)
id_ = cfg.get('id')
cluster_type = cfg.get('type')
uinfo = session.info(handler)
if uinfo is None:
return ''
uid = uinfo.get('id')
connector = config.MysqlConnector()
if connector is None:
return ''
result = connector.select_one('u_cluster', '`uid` = %s and `id` = %s' % (uid, id_))
connector.close()
if result is None:
return ''
desc = result.get('desc')
if desc is None:
return ''
desc = base64.b64decode(desc)
data = {}
data['id'] = id_
data['desc'] = desc
send(config.GetStopModule(cluster_type), data)
def cluster_clean(message):
handler = message.get('handler')
if handler is None:
return ''
body = handler.request.body
cfg = json.loads(body)
id_ = cfg.get('id')
cluster_type = cfg.get('type')
uinfo = session.info(handler)
if uinfo is None:
return ''
uid = uinfo.get('id')
connector = config.MysqlConnector()
if connector is None:
return ''
result = connector.select_one('u_cluster', '`uid` = %s and `id` = %s' % (uid, id_))
connector.close()
if result is None:
return ''
desc = result.get('desc')
if desc is None:
return ''
desc = base64.b64decode(desc)
data = {}
data['id'] = id_
data['desc'] = desc
send(config.GetCleanModule(cluster_type), data)
|
|
#!/usr/bin/env python
# Copyright (c) 2019 Arm Limited and Contributors. All rights reserved.
#
# SPDX-License-Identifier: Apache-2.0
"""Test the arm toolchain."""
import os
import sys
from unittest import TestCase
import mock
ROOT = os.path.abspath(
os.path.join(os.path.dirname(__file__), "..", "..", "..")
)
sys.path.insert(0, ROOT)
from tools.toolchains.arm import ARM_STD, ARM_MICRO, ARMC6
from tools.toolchains.gcc import GCC_ARM
from tools.toolchains.iar import IAR
from tools.toolchains.mbed_toolchain import UNSUPPORTED_C_LIB_EXCEPTION_STRING
from tools.utils import NotSupportedException
class TestArmToolchain(TestCase):
"""Test Arm classes."""
def test_arm_minimal_printf(self):
"""Test that linker flags are correctly added to an instance of ARM."""
mock_target = mock.MagicMock()
mock_target.core = "Cortex-M4"
mock_target.printf_lib = "minimal-printf"
mock_target.c_lib = "std"
mock_target.supported_c_libs = {"arm": ["std"]}
mock_target.supported_toolchains = ["ARM", "uARM", "ARMC5"]
del mock_target.default_lib
arm_std_obj = ARM_STD(mock_target)
arm_micro_obj = ARM_MICRO(mock_target)
arm_c6_obj = ARMC6(mock_target)
self.assertIn("-DMBED_MINIMAL_PRINTF", arm_std_obj.flags["common"])
self.assertIn("-DMBED_MINIMAL_PRINTF", arm_micro_obj.flags["common"])
self.assertIn("-DMBED_MINIMAL_PRINTF", arm_c6_obj.flags["common"])
def test_arm_c_lib(self):
"""Test that linker flags are correctly added to an instance of ARM."""
mock_target = mock.MagicMock()
mock_target.core = "Cortex-M4"
mock_target.supported_c_libs = {"arm": ["small"]}
mock_target.c_lib = "sMALL"
del mock_target.default_lib
mock_target.default_toolchain = "ARM"
mock_target.supported_toolchains = ["ARM", "uARM", "ARMC5", "ARMC6"]
arm_std_obj = ARM_STD(mock_target)
arm_micro_obj = ARM_MICRO(mock_target)
mock_target.default_toolchain = "ARMC6"
arm_c6_obj = ARMC6(mock_target)
self.assertIn("-D__MICROLIB", arm_std_obj.flags["common"])
self.assertIn("-D__MICROLIB", arm_micro_obj.flags["common"])
self.assertIn("-D__MICROLIB", arm_c6_obj.flags["common"])
self.assertIn("--library_type=microlib", arm_std_obj.flags["ld"])
self.assertIn("--library_type=microlib", arm_micro_obj.flags["ld"])
self.assertIn("--library_type=microlib", arm_c6_obj.flags["ld"])
self.assertIn("--library_type=microlib", arm_c6_obj.flags["asm"])
def test_arm_c_lib_std_exception(self):
"""Test that an exception is raised if the std C library is not supported for a target on the ARM toolchain."""
mock_target = mock.MagicMock()
mock_target.core = "Cortex-M4"
mock_target.supported_toolchains = ["ARM", "uARM", "ARMC5"]
mock_target.default_toolchain = "ARM"
mock_target.c_lib = "std"
del mock_target.default_lib
mock_target.supported_c_libs = {"arm": ["small"]}
with self.assertRaisesRegexp(NotSupportedException, UNSUPPORTED_C_LIB_EXCEPTION_STRING.format(mock_target.c_lib)):
ARM_STD(mock_target)
with self.assertRaisesRegexp(NotSupportedException, UNSUPPORTED_C_LIB_EXCEPTION_STRING.format(mock_target.c_lib)):
ARMC6(mock_target)
def test_arm_c_lib_small_exception(self):
"""Test that an exception is raised if the small and std C library are not supported for a target on the ARM toolchain."""
mock_target = mock.MagicMock()
mock_target.core = "Cortex-M4"
mock_target.c_lib = "small"
del mock_target.default_lib
mock_target.supported_c_libs = {"arm": [""]}
mock_target.default_toolchain = "ARM"
mock_target.supported_toolchains = ["ARM", "uARM", "ARMC5"]
with self.assertRaisesRegexp(NotSupportedException, UNSUPPORTED_C_LIB_EXCEPTION_STRING.format(mock_target.c_lib)):
ARM_STD(mock_target)
mock_target.default_toolchain = "ARMC6"
with self.assertRaisesRegexp(NotSupportedException, UNSUPPORTED_C_LIB_EXCEPTION_STRING.format(mock_target.c_lib)):
ARMC6(mock_target)
def test_arm_small_c_lib_swap_std_lib(self):
"""Test that no exception is raised when small c lib is not supported but std lib is supported."""
mock_target = mock.MagicMock()
mock_target.core = "Cortex-M4"
mock_target.c_lib = "small"
del mock_target.default_lib
mock_target.supported_c_libs = {"arm": ["std"]}
mock_target.supported_toolchains = ["ARM", "uARM", "ARMC5"]
mock_target.default_toolchain = "ARM"
try:
ARM_STD(mock_target)
except NotSupportedException:
self.fail(UNSUPPORTED_C_LIB_EXCEPTION_STRING.format(mock_target.c_lib))
mock_target.default_toolchain = "ARMC6"
try:
ARMC6(mock_target)
except NotSupportedException:
self.fail(UNSUPPORTED_C_LIB_EXCEPTION_STRING.format(mock_target.c_lib))
class TestGccToolchain(TestCase):
"""Test the GCC class."""
def test_gcc_minimal_printf(self):
"""Test that linker flags are correctly added to an instance of GCC_ARM."""
mock_target = mock.MagicMock()
mock_target.core = "Cortex-M4"
mock_target.printf_lib = "minimal-printf"
mock_target.supported_toolchains = ["GCC_ARM"]
mock_target.c_lib = "std"
del mock_target.default_lib
mock_target.supported_c_libs = {"gcc_arm": ["std"]}
gcc_obj = GCC_ARM(mock_target)
self.assertIn("-DMBED_MINIMAL_PRINTF", gcc_obj.flags["common"])
minimal_printf_wraps = [
"-Wl,--wrap,printf",
"-Wl,--wrap,sprintf",
"-Wl,--wrap,snprintf",
"-Wl,--wrap,vprintf",
"-Wl,--wrap,vsprintf",
"-Wl,--wrap,vsnprintf",
"-Wl,--wrap,fprintf",
"-Wl,--wrap,vfprintf",
]
for i in minimal_printf_wraps:
self.assertIn(i, gcc_obj.flags["ld"])
def test_gcc_arm_c_lib(self):
"""Test that linker flags are correctly added to an instance of GCC_ARM."""
mock_target = mock.MagicMock()
mock_target.core = "Cortex-M4"
mock_target.supported_c_libs = {"gcc_arm": ["small"]}
mock_target.c_lib = "sMALL"
del mock_target.default_lib
mock_target.supported_toolchains = ["GCC_ARM"]
gcc_arm_obj = GCC_ARM(mock_target)
self.assertIn("-DMBED_RTOS_SINGLE_THREAD", gcc_arm_obj.flags["common"])
self.assertIn("-D__NEWLIB_NANO", gcc_arm_obj.flags["common"])
self.assertIn("--specs=nano.specs", gcc_arm_obj.flags["ld"])
def test_gcc_arm_c_lib_std_exception(self):
"""Test that an exception is raised if the std C library is not supported for a target on the GCC_ARM toolchain."""
mock_target = mock.MagicMock()
mock_target.core = "Cortex-M4"
mock_target.default_toolchain = "GCC_ARM"
mock_target.c_lib = "std"
del mock_target.default_lib
mock_target.supported_c_libs = {"gcc_arm": ["small"]}
with self.assertRaisesRegexp(NotSupportedException, UNSUPPORTED_C_LIB_EXCEPTION_STRING.format(mock_target.c_lib)):
GCC_ARM(mock_target)
def test_gcc_arm_c_lib_small_exception(self):
"""Test that an exception is raised if the small and std C library are not supported for a target on the GCC_ARM toolchain."""
mock_target = mock.MagicMock()
mock_target.core = "Cortex-M4"
mock_target.c_lib = "small"
del mock_target.default_lib
mock_target.supported_c_libs = {"gcc_arm": [""]}
mock_target.default_toolchain = "GCC_ARM"
mock_target.supported_toolchains = ["GCC_ARM"]
with self.assertRaisesRegexp(NotSupportedException, UNSUPPORTED_C_LIB_EXCEPTION_STRING.format(mock_target.c_lib)):
GCC_ARM(mock_target)
def test_gcc_arm_small_c_lib_swap_std_lib(self):
"""Test that no exception is raised when small c lib is not supported but std lib is supported."""
mock_target = mock.MagicMock()
mock_target.core = "Cortex-M4"
mock_target.supported_c_libs = {"gcc_arm": ["std"]}
mock_target.c_lib = "small"
del mock_target.default_lib
mock_target.supported_toolchains = ["GCC_ARM"]
mock_target.is_TrustZone_secure_target = False
mock_target.default_toolchain = "GCC_ARM"
try:
GCC_ARM(mock_target)
except NotSupportedException:
self.fail(UNSUPPORTED_C_LIB_EXCEPTION_STRING.format(mock_target.c_lib))
class TestIarToolchain(TestCase):
"""Test the IAR class."""
def test_iar_minimal_printf(self):
"""Test that linker flags are correctly added to an instance of IAR."""
mock_target = mock.MagicMock()
mock_target.core = "Cortex-M4"
mock_target.printf_lib = "minimal-printf"
mock_target.supported_toolchains = ["IAR"]
del mock_target.default_lib
mock_target.c_lib = "std"
mock_target.supported_c_libs = {"iar": ["std"]}
iar_obj = IAR(mock_target)
var = "-DMBED_MINIMAL_PRINTF"
self.assertIn("-DMBED_MINIMAL_PRINTF", iar_obj.flags["common"])
def test_iar_c_lib(self):
"""Test that no exception is raised when a supported c library is specified."""
mock_target = mock.MagicMock()
mock_target.core = "Cortex-M4"
mock_target.supported_c_libs = {"iar": ["std"]}
mock_target.c_lib = "sTD"
del mock_target.default_lib
mock_target.supported_toolchains = ["IAR"]
try:
IAR(mock_target)
except NotSupportedException:
self.fail(UNSUPPORTED_C_LIB_EXCEPTION_STRING.format(mock_target.c_lib))
def test_iar_c_lib_std_exception(self):
"""Test that an exception is raised if the std C library is not supported for a target on the IAR toolchain."""
mock_target = mock.MagicMock()
mock_target.core = "Cortex-M4"
mock_target.c_lib = "std"
del mock_target.default_lib
mock_target.supported_c_libs = {"iar": ["small"]}
mock_target.supported_toolchains = ["IAR"]
with self.assertRaisesRegexp(NotSupportedException, UNSUPPORTED_C_LIB_EXCEPTION_STRING.format(mock_target.c_lib)):
IAR(mock_target)
def test_iar_c_lib_small_exception(self):
"""Test that an exception is raised if the small and std C library are not supported for a target on the IAR toolchain."""
mock_target = mock.MagicMock()
mock_target.core = "Cortex-M4"
mock_target.c_lib = "small"
del mock_target.default_lib
mock_target.supported_c_libs = {"iar": [""]}
mock_target.supported_toolchains = ["IAR"]
with self.assertRaisesRegexp(NotSupportedException, UNSUPPORTED_C_LIB_EXCEPTION_STRING.format(mock_target.c_lib)):
IAR(mock_target)
def test_iar_small_c_lib_swap_std_lib(self):
"""Test that no exception is raised when small c lib is not supported but std lib is supported."""
mock_target = mock.MagicMock()
mock_target.core = "Cortex-M4"
mock_target.supported_c_libs = {"iar": ["std"]}
mock_target.c_lib = "small"
del mock_target.default_lib
mock_target.supported_toolchains = ["IAR"]
mock_target.is_TrustZone_secure_target = False
try:
IAR(mock_target)
except NotSupportedException:
self.fail(UNSUPPORTED_C_LIB_EXCEPTION_STRING.format(mock_target.c_lib))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.