repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
Aloomaio/googleads-python-lib
|
examples/ad_manager/v201811/premium_rate_service/get_all_premium_rates.py
|
1
|
1967
|
#!/usr/bin/env python
#
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This example gets all premium rates.
"""
# Import appropriate modules from the client library.
from googleads import ad_manager
def main(client):
# Initialize appropriate service.
premium_rate_service = client.GetService(
'PremiumRateService', version='v201811')
# Create a statement to select premium rates.
statement = ad_manager.StatementBuilder(version='v201811')
# Retrieve a small amount of premium rates at a time, paging
# through until all premium rates have been retrieved.
while True:
response = premium_rate_service.getPremiumRatesByStatement(
statement.ToStatement())
if 'results' in response and len(response['results']):
for premium_rate in response['results']:
# Print out some information for each premium rate.
print('Premium rate with ID "%d", premium feature "%s", and rate card '
'id "%d" was found.\n' % (
premium_rate['id'],
ad_manager.AdManagerClassType(premium_rate),
premium_rate['rateCardId']))
statement.offset += statement.limit
else:
break
print '\nNumber of results found: %s' % response['totalResultSetSize']
if __name__ == '__main__':
# Initialize client object.
ad_manager_client = ad_manager.AdManagerClient.LoadFromStorage()
main(ad_manager_client)
|
apache-2.0
|
tryolabs/luminoth
|
luminoth/utils/bbox_transform_tf.py
|
1
|
4791
|
import tensorflow as tf
def get_width_upright(bboxes):
with tf.name_scope('BoundingBoxTransform/get_width_upright'):
bboxes = tf.cast(bboxes, tf.float32)
x1, y1, x2, y2 = tf.split(bboxes, 4, axis=1)
width = x2 - x1 + 1.
height = y2 - y1 + 1.
# Calculate up right point of bbox (urx = up right x)
urx = x1 + .5 * width
ury = y1 + .5 * height
return width, height, urx, ury
def encode(bboxes, gt_boxes, variances=None):
with tf.name_scope('BoundingBoxTransform/encode'):
(bboxes_width, bboxes_height,
bboxes_urx, bboxes_ury) = get_width_upright(bboxes)
(gt_boxes_width, gt_boxes_height,
gt_boxes_urx, gt_boxes_ury) = get_width_upright(gt_boxes)
if variances is None:
variances = [1., 1.]
targets_dx = (gt_boxes_urx - bboxes_urx)/(bboxes_width * variances[0])
targets_dy = (gt_boxes_ury - bboxes_ury)/(bboxes_height * variances[0])
targets_dw = tf.log(gt_boxes_width / bboxes_width) / variances[1]
targets_dh = tf.log(gt_boxes_height / bboxes_height) / variances[1]
targets = tf.concat(
[targets_dx, targets_dy, targets_dw, targets_dh], axis=1)
return targets
def decode(roi, deltas, variances=None):
with tf.name_scope('BoundingBoxTransform/decode'):
(roi_width, roi_height,
roi_urx, roi_ury) = get_width_upright(roi)
dx, dy, dw, dh = tf.split(deltas, 4, axis=1)
if variances is None:
variances = [1., 1.]
pred_ur_x = dx * roi_width * variances[0] + roi_urx
pred_ur_y = dy * roi_height * variances[0] + roi_ury
pred_w = tf.exp(dw * variances[1]) * roi_width
pred_h = tf.exp(dh * variances[1]) * roi_height
bbox_x1 = pred_ur_x - 0.5 * pred_w
bbox_y1 = pred_ur_y - 0.5 * pred_h
# This -1. extra is different from reference implementation.
bbox_x2 = pred_ur_x + 0.5 * pred_w - 1.
bbox_y2 = pred_ur_y + 0.5 * pred_h - 1.
bboxes = tf.concat(
[bbox_x1, bbox_y1, bbox_x2, bbox_y2], axis=1)
return bboxes
def clip_boxes(bboxes, imshape):
"""
Clips bounding boxes to image boundaries based on image shape.
Args:
bboxes: Tensor with shape (num_bboxes, 4)
where point order is x1, y1, x2, y2.
imshape: Tensor with shape (2, )
where the first value is height and the next is width.
Returns
Tensor with same shape as bboxes but making sure that none
of the bboxes are outside the image.
"""
with tf.name_scope('BoundingBoxTransform/clip_bboxes'):
bboxes = tf.cast(bboxes, dtype=tf.float32)
imshape = tf.cast(imshape, dtype=tf.float32)
x1, y1, x2, y2 = tf.split(bboxes, 4, axis=1)
width = imshape[1]
height = imshape[0]
x1 = tf.maximum(tf.minimum(x1, width - 1.0), 0.0)
x2 = tf.maximum(tf.minimum(x2, width - 1.0), 0.0)
y1 = tf.maximum(tf.minimum(y1, height - 1.0), 0.0)
y2 = tf.maximum(tf.minimum(y2, height - 1.0), 0.0)
bboxes = tf.concat([x1, y1, x2, y2], axis=1)
return bboxes
def change_order(bboxes):
"""Change bounding box encoding order.
TensorFlow works with the (y_min, x_min, y_max, x_max) order while we work
with the (x_min, y_min, x_max, y_min).
While both encoding options have its advantages and disadvantages we
decided to use the (x_min, y_min, x_max, y_min), forcing use to switch to
TensorFlow's every time we want to use a std function that handles bounding
boxes.
Args:
bboxes: A Tensor of shape (total_bboxes, 4)
Returns:
bboxes: A Tensor of shape (total_bboxes, 4) with the order swaped.
"""
with tf.name_scope('BoundingBoxTransform/change_order'):
first_min, second_min, first_max, second_max = tf.unstack(
bboxes, axis=1
)
bboxes = tf.stack(
[second_min, first_min, second_max, first_max], axis=1
)
return bboxes
if __name__ == '__main__':
import numpy as np
bboxes = tf.placeholder(tf.float32)
bboxes_val = [[10, 10, 20, 22]]
gt_boxes = tf.placeholder(tf.float32)
gt_boxes_val = [[11, 13, 34, 31]]
imshape = tf.placeholder(tf.int32)
imshape_val = (100, 100)
deltas = encode(bboxes, gt_boxes)
decoded_bboxes = decode(bboxes, deltas)
final_decoded_bboxes = clip_boxes(decoded_bboxes, imshape)
with tf.Session() as sess:
final_decoded_bboxes = sess.run(final_decoded_bboxes, feed_dict={
bboxes: bboxes_val,
gt_boxes: gt_boxes_val,
imshape: imshape_val,
})
assert np.all(gt_boxes_val == final_decoded_bboxes)
|
bsd-3-clause
|
KonradBreitsprecher/espresso
|
testsuite/coulomb_tuning.py
|
1
|
3369
|
#
# Copyright (C) 2017 The ESPResSo project
#
# This file is part of ESPResSo.
#
# ESPResSo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Tests particle property setters/getters
from __future__ import print_function
import os
import pickle
import numpy as np
import unittest as ut
import espressomd
from espressomd.electrostatics import *
from espressomd import scafacos
import tests_common
@ut.skipIf(not espressomd.has_features(["ELECTROSTATICS"]),
"Features not available, skipping test!")
class CoulombCloudWallTune(ut.TestCase):
"""This compares p3m, p3m_gpu electrostatic forces against stored data."""
S = espressomd.System(box_l=[1.0, 1.0, 1.0])
tolerance = 1E-3
def setUp(self):
self.S.box_l = (10, 10, 10)
self.S.time_step = 0.01
self.S.cell_system.skin = 0.4
# Clear actors that might be left from prev tests
if len(self.S.actors):
del self.S.actors[0]
self.S.part.clear()
data = np.load(tests_common.abspath("data/coulomb_tuning_system.npz"))
self.forces = []
# Add particles to system and store reference forces in hash
# Input format: id pos q f
for id in range(len(data['pos'])):
pos = data['pos'][id]
q = data['charges'][id]
self.forces.append(data['forces'][id])
self.S.part.add(id=id, pos=pos, q=q)
def compare(self, method_name):
# Compare forces now in the system to stored ones
force_abs_diff = 0.
for p in self.S.part:
force_abs_diff += abs(np.sqrt(sum((p.f - self.forces[p.id])**2)))
force_abs_diff /= len(self.S.part)
print(method_name, "force difference", force_abs_diff)
self.assertLessEqual(
force_abs_diff,
self.tolerance,
"Asbolute force difference " +
str(force_abs_diff) +
" too large for method " +
method_name)
# Tests for individual methods
if espressomd.has_features(["P3M"]):
def test_p3m(self):
# We have to add some tolerance here, because the reference
# system is not homogeneous
self.S.actors.add(P3M(prefactor=1., accuracy=5e-4,
tune=True))
self.S.integrator.run(0)
self.compare("p3m")
if espressomd.has_features(["ELECTROSTATICS", "CUDA"]):
def test_p3m_gpu(self):
# We have to add some tolerance here, because the reference
# system is not homogeneous
self.S.actors.add(P3MGPU(prefactor=1., accuracy=5e-4,
tune=True))
self.S.integrator.run(0)
self.compare("p3m_gpu")
if __name__ == "__main__":
ut.main()
|
gpl-3.0
|
saurabh6790/test-med-app
|
accounts/report/item_wise_sales_register/item_wise_sales_register.py
|
14
|
3553
|
# Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import webnotes
from webnotes.utils import flt
def execute(filters=None):
if not filters: filters = {}
columns = get_columns()
last_col = len(columns)
item_list = get_items(filters)
item_tax, tax_accounts = get_tax_accounts(item_list, columns)
data = []
for d in item_list:
row = [d.item_code, d.item_name, d.item_group, d.parent, d.posting_date,
d.customer_name, d.debit_to, d.territory, d.project_name, d.company, d.sales_order,
d.delivery_note, d.income_account, d.qty, d.basic_rate, d.amount]
for tax in tax_accounts:
row.append(item_tax.get(d.parent, {}).get(d.item_code, {}).get(tax, 0))
total_tax = sum(row[last_col:])
row += [total_tax, d.amount + total_tax]
data.append(row)
return columns, data
def get_columns():
return [
"Item Code:Link/Item:120", "Item Name::120", "Item Group:Link/Item Group:100",
"Invoice:Link/Sales Invoice:120", "Posting Date:Date:80", "Customer:Link/Customer:120",
"Customer Account:Link/Account:120", "Territory:Link/Territory:80",
"Project:Link/Project:80", "Company:Link/Company:100", "Sales Order:Link/Sales Order:100",
"Delivery Note:Link/Delivery Note:100", "Income Account:Link/Account:140",
"Qty:Float:120", "Rate:Currency:120", "Amount:Currency:120"
]
def get_conditions(filters):
conditions = ""
for opts in (("company", " and company=%(company)s"),
("account", " and si.debit_to = %(account)s"),
("item_code", " and si_item.item_code = %(item_code)s"),
("from_date", " and si.posting_date>=%(from_date)s"),
("to_date", " and si.posting_date<=%(to_date)s")):
if filters.get(opts[0]):
conditions += opts[1]
return conditions
def get_items(filters):
conditions = get_conditions(filters)
return webnotes.conn.sql("""select si_item.parent, si.posting_date, si.debit_to, si.project_name,
si.customer, si.remarks, si.territory, si.company, si_item.item_code, si_item.item_name,
si_item.item_group, si_item.sales_order, si_item.delivery_note, si_item.income_account,
si_item.qty, si_item.basic_rate, si_item.amount, si.customer_name
from `tabSales Invoice` si, `tabSales Invoice Item` si_item
where si.name = si_item.parent and si.docstatus = 1 %s
order by si.posting_date desc, si_item.item_code desc""" % conditions, filters, as_dict=1)
def get_tax_accounts(item_list, columns):
import json
item_tax = {}
tax_accounts = []
tax_details = webnotes.conn.sql("""select parent, account_head, item_wise_tax_detail
from `tabSales Taxes and Charges` where parenttype = 'Sales Invoice'
and docstatus = 1 and ifnull(account_head, '') != ''
and parent in (%s)""" % ', '.join(['%s']*len(item_list)),
tuple([item.parent for item in item_list]))
for parent, account_head, item_wise_tax_detail in tax_details:
if account_head not in tax_accounts:
tax_accounts.append(account_head)
if item_wise_tax_detail:
try:
item_wise_tax_detail = json.loads(item_wise_tax_detail)
for item, tax_amount in item_wise_tax_detail.items():
item_tax.setdefault(parent, {}).setdefault(item, {})[account_head] = \
flt(tax_amount[1]) if isinstance(tax_amount, list) else flt(tax_amount)
except ValueError:
continue
tax_accounts.sort()
columns += [account_head + ":Currency:80" for account_head in tax_accounts]
columns += ["Total Tax:Currency:80", "Total:Currency:80"]
return item_tax, tax_accounts
|
agpl-3.0
|
Weihonghao/ECM
|
Vpy34/lib/python3.5/site-packages/tensorflow/python/framework/importer.py
|
8
|
18388
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""A utility function for importing TensorFlow graphs."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import contextlib
from tensorflow.core.framework import attr_value_pb2
from tensorflow.core.framework import graph_pb2
from tensorflow.core.framework import types_pb2
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import op_def_registry
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.util import compat
# TODO(josh11b): SWIG the code from node_def_util instead of duplicating
# the logic here.
def _GetNodeAttr(node_def, attr_name):
if attr_name not in node_def.attr:
raise ValueError('Expected one attr with name %r in %s.'
% (attr_name, str(node_def)))
return node_def.attr[attr_name]
def _ArgToTypesNoRef(node_def, arg_def):
if arg_def.number_attr:
repeats = _GetNodeAttr(node_def, arg_def.number_attr).i
if arg_def.type_attr:
dtype = _GetNodeAttr(node_def, arg_def.type_attr).type
else:
assert arg_def.type != types_pb2.DT_INVALID
dtype = arg_def.type
return [dtype] * repeats
elif arg_def.type_attr:
return [_GetNodeAttr(node_def, arg_def.type_attr).type]
elif arg_def.type_list_attr:
return _GetNodeAttr(node_def, arg_def.type_list_attr).list.type
else:
assert arg_def.type != types_pb2.DT_INVALID
return [arg_def.type]
def _SingleArgToTypes(node_def, arg_def):
types = _ArgToTypesNoRef(node_def, arg_def)
if arg_def.is_ref:
return [dtypes.as_dtype(dt)._as_ref.as_datatype_enum for dt in types] # pylint: disable=protected-access
return types
def _ArgsToTypes(node_def, arg_list):
types = []
for arg_def in arg_list:
types.extend(_SingleArgToTypes(node_def, arg_def))
return types
def _InputTypes(node_def, op_dict):
op_def = op_dict[node_def.op]
return _ArgsToTypes(node_def, op_def.input_arg)
def _OutputTypes(node_def, op_dict):
op_def = op_dict[node_def.op]
return _ArgsToTypes(node_def, op_def.output_arg)
def _IsControlInput(input_name):
# Expected format: '^operation_name' (control input).
return input_name.startswith('^')
def _ParseTensorName(tensor_name):
"""Parses a tensor name into an operation name and output index.
This function will canonicalize tensor names as follows:
* "foo:0" -> ("foo", 0)
* "foo:7" -> ("foo", 7)
* "foo" -> ("foo", 0)
* "foo:bar:baz" -> ValueError
Args:
tensor_name: The name of a tensor.
Returns:
A tuple containing the operation name, and the output index.
Raises:
ValueError: If `tensor_name' cannot be interpreted as the name of a tensor.
"""
components = tensor_name.split(':')
if len(components) == 2:
# Expected format: 'operation_name:output_index'.
try:
output_index = int(components[1])
except ValueError:
raise ValueError('Cannot convert %r to a tensor name.' % (tensor_name,))
return components[0], output_index
elif len(components) == 1:
# Expected format: 'operation_name' (implicit 0th output).
return components[0], 0
else:
raise ValueError('Cannot convert %r to a tensor name.' % (tensor_name,))
def _CanonicalInputName(input_name):
input_name = compat.as_str(input_name)
if _IsControlInput(input_name):
return input_name
input_op_name, output_index = _ParseTensorName(input_name)
return '%s:%d' % (input_op_name, output_index)
def _InvalidNodeMessage(node, message):
return 'graph_def is invalid at node %r: %s.' % (node.name, message)
@contextlib.contextmanager
def _MaybeDevice(device):
"""Applies the given device only if device is not None or empty."""
if device:
with ops.device(device):
yield
else:
yield
def _FindAttrInOpDef(attr_name, op_def):
for attr_def in op_def.attr:
if attr_name == attr_def.name:
return attr_def
return None
def import_graph_def(graph_def, input_map=None, return_elements=None,
name=None, op_dict=None, producer_op_list=None):
"""Imports the TensorFlow graph in `graph_def` into the Python `Graph`.
This function provides a way to import a serialized TensorFlow
[`GraphDef`](https://www.tensorflow.org/code/tensorflow/core/framework/graph.proto)
protocol buffer, and extract individual objects in the `GraphDef` as
[`Tensor`](#Tensor) and [`Operation`](#Operation) objects. See
[`Graph.as_graph_def()`](#Graph.as_graph_def) for a way to create a
`GraphDef` proto.
Args:
graph_def: A `GraphDef` proto containing operations to be imported into
the default graph.
input_map: A dictionary mapping input names (as strings) in `graph_def`
to `Tensor` objects. The values of the named input tensors in the
imported graph will be re-mapped to the respective `Tensor` values.
return_elements: A list of strings containing operation names in
`graph_def` that will be returned as `Operation` objects; and/or
tensor names in `graph_def` that will be returned as `Tensor` objects.
name: (Optional.) A prefix that will be prepended to the names in
`graph_def`. Defaults to `"import"`.
op_dict: (Optional.) A dictionary mapping op type names to `OpDef` protos.
Must contain an `OpDef` proto for each op type named in `graph_def`.
If omitted, uses the `OpDef` protos registered in the global registry.
producer_op_list: (Optional.) An `OpList` proto with the (possibly stripped)
list of `OpDef`s used by the producer of the graph. If provided, attrs
for ops in `graph_def` that are not in `op_dict` that have their default
value according to `producer_op_list` will be removed. This will allow
some more `GraphDef`s produced by later binaries to be accepted by
earlier binaries.
Returns:
A list of `Operation` and/or `Tensor` objects from the imported graph,
corresponding to the names in `return_elements`.
Raises:
TypeError: If `graph_def` is not a `GraphDef` proto,
`input_map` is not a dictionary mapping strings to `Tensor` objects,
or `return_elements` is not a list of strings.
ValueError: If `input_map`, or `return_elements` contains names that
do not appear in `graph_def`, or `graph_def` is not well-formed (e.g.
it refers to an unknown tensor).
"""
# Type checks for inputs.
if not isinstance(graph_def, graph_pb2.GraphDef):
# `graph_def` could be a dynamically-created message, so try a duck-typed
# approach
try:
old_graph_def = graph_def
graph_def = graph_pb2.GraphDef()
graph_def.MergeFrom(old_graph_def)
except TypeError:
raise TypeError('graph_def must be a GraphDef proto.')
if input_map is None:
input_map = {}
else:
if not (isinstance(input_map, dict)
and all(isinstance(k, compat.bytes_or_text_types)
for k in input_map.keys())):
raise TypeError('input_map must be a dictionary mapping strings to '
'Tensor objects.')
if return_elements is not None:
return_elements = tuple(return_elements)
if not all(isinstance(x, compat.bytes_or_text_types)
for x in return_elements):
raise TypeError('return_elements must be a list of strings.')
# Use a canonical representation for all tensor names.
input_map = {_CanonicalInputName(k): v for k, v in input_map.items()}
used_input_keys = set()
name_to_op = {}
if op_dict is None:
op_dict = op_def_registry.get_registered_ops()
if producer_op_list is None:
producer_op_dict = None
else:
producer_op_dict = {op.name: op for op in producer_op_list.op}
# LINT.IfChange
with ops.name_scope(name, 'import', input_map.values()) as scope:
g = ops.get_default_graph()
# TODO(ashankar): Should this just copy over or should it do some
# more nuanced merging? For example, the graph may already have some
# marked "bad versions" and we don't want to lose those because of
# what's in graph_def.versions? The C++ ImporGraphDef does something
# more nuanced.
g.graph_def_versions.CopyFrom(graph_def.versions)
if input_map:
if not scope:
# The caller must have passed `name=''`.
raise ValueError('tf.import_graph_def() requires a non-empty `name` '
'if `input_map` is used.')
with ops.name_scope('_inputs'):
input_map = {k: ops.convert_to_tensor(v) for k, v in input_map.items()}
# NOTE(mrry): We do this in two passes, because there may be a cycle in
# `graph_def`.
# 1. Add operations without their inputs.
for node in graph_def.node:
# Set any default attr values that aren't present.
op_def = op_dict[node.op]
for attr_def in op_def.attr:
key = attr_def.name
if attr_def.HasField('default_value'):
value = node.attr[key]
if value is None or value.WhichOneof('value') is None:
node.attr[key].CopyFrom(attr_def.default_value)
if producer_op_dict:
# Remove any default attr values that aren't in op_def.
if node.op in producer_op_dict:
producer_op_def = producer_op_dict[node.op]
# We make a copy of node.attr to iterate through since we
# may modify node.attr inside the loop.
for key in list(node.attr):
if _FindAttrInOpDef(key, op_def) is None:
# No attr_def in consumer, look in producer.
attr_def = _FindAttrInOpDef(key, producer_op_def)
if (attr_def and attr_def.HasField('default_value') and
node.attr[key] == attr_def.default_value):
# Unknown attr had default value in producer, delete it
# so it can be understood by consumer.
del node.attr[key]
output_types = _OutputTypes(node, op_dict)
name_to_op[node.name] = g.create_op(
node.op, [], output_types, name=node.name, attrs=node.attr,
compute_shapes=False, compute_device=False,
op_def=op_def)
# 2. Add inputs to the operations.
for node in graph_def.node:
op = name_to_op[node.name]
input_types = _InputTypes(node, op_dict)
# Rewrite the colocation attributes in the graph, since the
# names of new ops may have changed.
for key, value in op.node_def.attr.items():
if key == '_class':
class_values = value.list
new_class_values = []
for class_value in class_values.s:
if class_value.startswith(b'loc:@'):
op_to_bind_to = class_value[5:].decode()
# Find the op by its original name.
if op_to_bind_to not in name_to_op:
raise ValueError('Specified colocation to an op that '
'does not exist during import: %s in %s' % (
op_to_bind_to, node.name))
original_op = name_to_op[op_to_bind_to]
new_class_values.append(compat.as_bytes(
'loc:@' + original_op.name))
else:
new_class_values.append(class_value)
value.list.CopyFrom(attr_value_pb2.AttrValue.ListValue(
s=new_class_values))
# NOTE(mrry): We cannot use zip here because control inputs do not appear
# in the list of input_types.
for i, input_name in enumerate(
[_CanonicalInputName(x) for x in node.input]):
if _IsControlInput(input_name):
# (a) Input is a control input that should be taken from an op
# in "graph_def".
try:
source_op = name_to_op[input_name[1:]]
except KeyError:
raise ValueError(
_InvalidNodeMessage(
node,
'Control input %r not found in graph_def.' % (input_name,)))
# pylint: disable=protected-access
op._add_control_input(source_op)
# pylint: enable=protected-access
else:
try:
input_type = input_types[i]
except IndexError:
raise ValueError(_InvalidNodeMessage(
node, 'More inputs specified (%r) than the op expects.'
% (input_name,)))
if input_name in input_map:
# (b) Input should be replaced by a tensor from the caller.
source_tensor = input_map[input_name]
used_input_keys.add(input_name)
else:
# (c) Input should be taken from an op in `graph_def`.
operation_name, output_index = _ParseTensorName(input_name)
try:
source_op = name_to_op[operation_name]
source_tensor = list(source_op.values())[output_index]
except (KeyError, IndexError):
raise ValueError(
_InvalidNodeMessage(
node,
'Input tensor %r not found in graph_def.'
% (input_name,)))
try:
# pylint: disable=protected-access
op._add_input(source_tensor, dtype=input_type)
# pylint: enable=protected-access
except TypeError as te:
raise ValueError(_InvalidNodeMessage(
node, 'Input tensor %r %s' % (input_name, te)))
# pylint: disable=protected_access
if op._input_dtypes != input_types:
raise ValueError(
_InvalidNodeMessage(
node,
'Input types mismatch (expected %r but got %r)'
% (', '.join(dtypes.as_dtype(x).name for x in input_types),
', '.join(x.name for x in op._input_dtypes))))
# pylint: enable=protected_access
# Execute shape inference for this op.
# NOTE(mrry): If the graph contains a cycle, the full shape information
# may not be available for this op's inputs.
ops.set_shapes_for_outputs(op)
# For nodes with _output_shapes set, set the output shapes.
if '_output_shapes' in op.node_def.attr:
for i, output in enumerate(op.outputs):
dims = op.node_def.attr['_output_shapes'].list.shape[i]
output_shape = tensor_shape.TensorShape(
None if dims.unknown_rank else
[dim.size if dim.size >= 0 else None for dim in dims.dim])
try:
output.set_shape(output_shape)
except ValueError as e:
# If the output shape is incompatible with what is inferred
# by the graph for a very specific whitelist of ops, then we
# ignore this output shape. This can happen if there is a
# bug in the shape function for some operation, and the
# serialized graph def has the incorrect shape set when
# running on a newer binary with the fixed shape function.
# This is an escape hatch that allows us to correct shape
# functions that are not critical to correct execution but
# would cause graphs to fail if imported after correcting.
#
# This can be removed after 2017/03/08.
if op.type in ['RandomShuffleQueue', 'PaddingFIFOQueue',
'FIFOQueue', 'PriorityQueue', 'QueueSize',
'Stack', 'Barrier', 'BarrierReadySize',
'BarrierIncompleteSize', 'HashTable',
'MutableHashTable',
'MutableHashTableOfTensors', 'Mutex',
'CuckooTable', 'IndexTable',
'WholeFileReader', 'TextLineReader',
'FixedLengthRecordReader',
'TFRecordReader', 'IdentityReader',
'RefSwitch', 'RefEnter', 'RefNextIteration',
'RefMerge', 'RefIdentity']:
pass
elif op.type in [
'ConditionalAccumulator', 'SparseConditionalAccumulator',
'Table'
]:
# This can be removed after 2017/04/24.
pass
else:
raise e
del op.node_def.attr['_output_shapes']
# Apply device functions for this op.
# NOTE(mrry): We do this after configuring the inputs, because
# the result of the device functions may depend on the inputs.
with _MaybeDevice(node.device):
g._apply_device_functions(op) # pylint: disable=protected-access
# Treat unused input mappings as an error, because they are likely to be
# due to a typo.
unused_input_keys = frozenset(input_map.keys()).difference(used_input_keys)
if unused_input_keys:
raise ValueError(
'Attempted to map inputs that were not found in graph_def: [%s]'
% ', '.join(unused_input_keys))
if return_elements is None:
return None
else:
ret = []
for name in return_elements:
name = compat.as_str(name)
if ':' in name:
try:
operation_name, output_index = _ParseTensorName(name)
ret.append(name_to_op[operation_name].outputs[output_index])
except (ValueError, KeyError, IndexError):
raise ValueError(
'Requested return_element %r not found in graph_def.' % name)
else:
try:
ret.append(name_to_op[name])
except KeyError:
raise ValueError(
'Requested return_element %r not found in graph_def.' % name)
return ret
# LINT.ThenChange(//tensorflow/core/graph/graph_constructor.cc)
|
agpl-3.0
|
MiltosD/CEFELRC
|
lib/python2.7/site-packages/django/contrib/localflavor/in_/forms.py
|
309
|
1741
|
"""
India-specific Form helpers.
"""
from django.core.validators import EMPTY_VALUES
from django.forms import ValidationError
from django.forms.fields import Field, RegexField, Select
from django.utils.encoding import smart_unicode
from django.utils.translation import gettext
import re
class INZipCodeField(RegexField):
default_error_messages = {
'invalid': gettext(u'Enter a zip code in the format XXXXXXX.'),
}
def __init__(self, *args, **kwargs):
super(INZipCodeField, self).__init__(r'^\d{6}$',
max_length=None, min_length=None, *args, **kwargs)
class INStateField(Field):
"""
A form field that validates its input is a Indian state name or
abbreviation. It normalizes the input to the standard two-letter vehicle
registration abbreviation for the given state or union territory
"""
default_error_messages = {
'invalid': u'Enter a Indian state or territory.',
}
def clean(self, value):
from in_states import STATES_NORMALIZED
super(INStateField, self).clean(value)
if value in EMPTY_VALUES:
return u''
try:
value = value.strip().lower()
except AttributeError:
pass
else:
try:
return smart_unicode(STATES_NORMALIZED[value.strip().lower()])
except KeyError:
pass
raise ValidationError(self.error_messages['invalid'])
class INStateSelect(Select):
"""
A Select widget that uses a list of Indian states/territories as its
choices.
"""
def __init__(self, attrs=None):
from in_states import STATE_CHOICES
super(INStateSelect, self).__init__(attrs, choices=STATE_CHOICES)
|
bsd-3-clause
|
simeonmiteff/junkbox
|
wbx.py
|
1
|
1978
|
#!/usr/bin/python
# wbx.py - dump MikroTik winbox addresses.wbx file format
# Mon Oct 13 10:03:26 SAST 2008
import sys
def munge_pair(pair):
""" Convert integer values to integers """
name, value, offset = pair
if name in ['secure-mode', 'keep-pwd']:
value = ord(value)
return (name, value, offset)
def get_pair(offset,data):
""" Extract a name/value pair from the data at offset. """
if ord(data[offset])==0:
# End of record
# Return (None, None, offset of next record)
return (None, None, offset+2)
rlen = ord(data[offset])-1 # Record length
nlen = ord(data[offset+2]) # Name length
vlen = rlen-nlen # Value length
offset = offset+3 # Skip to the name
name = data[offset:offset+nlen]
offset = offset+nlen # Skip to the value
value = data[offset:offset+vlen]
# Return (name, value, offset of next pair)
return (name, value, offset+vlen)
if __name__ == "__main__":
# Check arguments
if len(sys.argv)<2:
sys.stderr.write("Usage: %s [addresses.wbx]\n" % sys.argv[0])
sys.exit(1)
# Open file
try:
wbxfile = open(sys.argv[1])
except IOError, e:
sys.stderr.write("Failed to open file '%s': %s\n" % (sys.argv[1], str(e)))
sys.exit(1)
data = wbxfile.read() # Slurp the wbx file into a string
headings = ['host','login','note','secure-mode','keep-pwd','pwd']
offset = 4 # Skip the 4-byte file header
for head in headings: # Print headings
sys.stdout.write(head.ljust(16))
print
for head in headings: # Underline them
sys.stdout.write(('-'*len(head)).ljust(16))
print
# Dump data
try:
while True:
n, v, offset = munge_pair(get_pair(offset,data))
if n == "type": continue
if not n:
print
continue
sys.stdout.write(str(v).ljust(16))
except IndexError:
pass
|
apache-2.0
|
llooker/python_sdk
|
test/test_prefetch_access_filter_value.py
|
1
|
2462
|
# coding: utf-8
"""
Looker API 3.0 Reference
### Authorization The Looker API uses Looker **API3** credentials for authorization and access control. Looker admins can create API3 credentials on Looker's **Admin/Users** page. Pass API3 credentials to the **/login** endpoint to obtain a temporary access_token. Include that access_token in the Authorization header of Looker API requests. For details, see [Looker API Authorization](https://looker.com/docs/r/api/authorization) ### Client SDKs The Looker API is a RESTful system that should be usable by any programming language capable of making HTTPS requests. Client SDKs for a variety of programming languages can be generated from the Looker API's Swagger JSON metadata to streamline use of the Looker API in your applications. A client SDK for Ruby is available as an example. For more information, see [Looker API Client SDKs](https://looker.com/docs/r/api/client_sdks) ### Try It Out! The 'api-docs' page served by the Looker instance includes 'Try It Out!' buttons for each API method. After logging in with API3 credentials, you can use the \"Try It Out!\" buttons to call the API directly from the documentation page to interactively explore API features and responses. ### Versioning Future releases of Looker will expand this API release-by-release to securely expose more and more of the core power of Looker to API client applications. API endpoints marked as \"beta\" may receive breaking changes without warning. Stable (non-beta) API endpoints should not receive breaking changes in future releases. For more information, see [Looker API Versioning](https://looker.com/docs/r/api/versioning)
OpenAPI spec version: 3.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import swagger_client
from swagger_client.rest import ApiException
from swagger_client.models.prefetch_access_filter_value import PrefetchAccessFilterValue
class TestPrefetchAccessFilterValue(unittest.TestCase):
""" PrefetchAccessFilterValue unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testPrefetchAccessFilterValue(self):
"""
Test PrefetchAccessFilterValue
"""
model = swagger_client.models.prefetch_access_filter_value.PrefetchAccessFilterValue()
if __name__ == '__main__':
unittest.main()
|
mit
|
bukzor/sympy
|
sympy/galgebra/debug.py
|
52
|
4148
|
# sympy/galgebra/debug.py
from __future__ import print_function
from itertools import islice
def ostr(obj, dict_mode=False):
"""
Recursively convert iterated object (list/tuple/dict/set) to string.
"""
def ostr_rec(obj, dict_mode):
global ostr_s
if isinstance(obj, tuple):
if len(obj) == 0:
ostr_s += '(),'
else:
ostr_s += '('
for obj_i in obj:
ostr_rec(obj_i, dict_mode)
ostr_s = ostr_s[:-1] + '),'
elif isinstance(obj, list):
if len(obj) == 0:
ostr_s += '[],'
else:
ostr_s += '['
for obj_i in obj:
ostr_rec(obj_i, dict_mode)
ostr_s = ostr_s[:-1] + '],'
elif isinstance(obj, dict):
if dict_mode:
ostr_s += '\n'
for key in obj.keys():
ostr_rec(key, dict_mode)
if ostr_s[-1] == ',':
ostr_s = ostr_s[:-1]
ostr_s += ' -> '
ostr_rec(obj[key], dict_mode)
if ostr_s[-1] == ',':
ostr_s = ostr_s[:-1]
ostr_s += '\n'
else:
ostr_s += '{'
for key in obj.keys():
ostr_rec(key, dict_mode)
if ostr_s[-1] == ',':
ostr_s = ostr_s[:-1]
ostr_s += ':'
ostr_rec(obj[key], dict_mode)
ostr_s = ostr_s[:-1] + '} '
elif isinstance(obj, set):
tmp_obj = list(obj)
ostr_s += '{'
for obj_i in tmp_obj:
ostr_rec(obj_i, dict_mode)
ostr_s = ostr_s[:-1] + '},'
else:
ostr_s += str(obj) + ','
return
global ostr_s
ostr_s = ''
if isinstance(obj, (tuple, list, dict, set)):
ostr_rec(obj, dict_mode)
return ostr_s[:-1]
else:
return str(obj)
def oprint(*args, **kwargs):
"""
Debug printing for iterated (list/tuple/dict/set) objects. args is
of form (title1,object1,title2,object2,...) and prints:
title1 = object1
title2 = object2
...
If you only wish to print a title set object = None.
"""
if 'dict_mode' in kwargs:
dict_mode = kwargs['dict_mode']
else:
dict_mode = False
if isinstance(args[0], str) or args[0] is None:
titles = list(islice(args, None, None, 2))
objs = tuple(islice(args, 1, None, 2))
if len(args) > 2:
if objs[0] is None:
n = 0
else:
n = len(titles[0])
for (title, obj) in zip(titles[1:], objs[1:]):
if obj is not None:
if not (dict_mode and isinstance(obj, dict)):
n = max(n, len(title))
else:
n = len(titles[0])
for (title, obj) in zip(titles, objs):
if obj is None:
print(title)
else:
npad = n - len(title)
if isinstance(obj, dict):
print(title + ':' + ostr(obj, dict_mode))
else:
print(title + npad * ' ' + ' = ' + ostr(obj, dict_mode))
else:
for arg in args:
print(ostr(arg, dict_mode))
return
def print_sub_table(title, keys, sdict, blade_rep=True):
"""
Print substitution dictionary, sdict, according to order of keys in
keys
"""
if title is not None:
print(title)
for key in keys:
print(str(key) + ' = ' + ostr(sdict[key]))
return
def print_product_table(title, keys, pdict, op='*', blade_rep=True):
"""
Print product dictionary, pdict, according to order of keys in keys
"""
if title is not None:
print(title)
pop = ')' + op + '('
for key1 in keys:
for key2 in keys:
print('(' + str(key1) + pop + str(key2) + ') = ' + ostr(pdict[(key1, key2)]))
return
|
bsd-3-clause
|
fujunwei/chromium-crosswalk
|
tools/memory_inspector/memory_inspector/classification/mmap_classifier_unittest.py
|
109
|
3441
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import unittest
from memory_inspector.classification import mmap_classifier
from memory_inspector.core import memory_map
_TEST_RULES = """
[
{
'name': 'anon',
'mmap_file': r'^\[anon',
'children': [
{
'name': 'jit',
'mmap_prot': 'r-x',
},
],
},
{
'name': 'dev',
'mmap_file': r'^/dev',
'children': [
{
'name': 'gpu',
'mmap_file': r'/gpu',
},
],
},
{
'name': 'lib',
'mmap_file': r'.so$',
'children': [
{
'name': 'data',
'mmap_prot': 'rw',
},
{
'name': 'text',
'mmap_prot': 'r-x',
},
],
},
]
"""
_TEST_MMAPS = [
# START END PROT FILE P.Dirt P.Clean S.Dirt S.Clean
(0x00000, 0x03fff, 'rw--', '[anon]', 4096, 0, 4096, 0),
(0x04000, 0x07fff, 'rw--', '/lib/1.so', 8192, 0, 0, 0),
(0x08000, 0x0bfff, 'r-x-', '/lib/1.so', 4096, 8192, 0, 0),
(0x0c000, 0x0ffff, 'rw--', '/lib/2.so', 0, 0, 4096, 8192),
(0x10000, 0x13fff, 'r-x-', '/lib/2.so', 0, 12288, 0, 4096),
(0x14000, 0x17fff, 'rw--', '/dev/gpu/1', 4096, 0, 0, 0),
(0x18000, 0x1bfff, 'rw--', '/dev/gpu/2', 8192, 0, 4096, 0),
(0x1c000, 0x1ffff, 'rw--', '/dev/foo', 0, 4096, 0, 8192),
(0x20000, 0x23fff, 'r-x-', '[anon:jit]', 8192, 0, 4096, 0),
(0x24000, 0x27fff, 'r---', 'OTHER', 0, 0, 8192, 0),
]
_EXPECTED_RESULTS = {
'Total': [36864, 24576, 24576, 20480],
'Total::anon': [12288, 0, 8192, 0],
'Total::anon::jit': [8192, 0, 4096, 0],
'Total::anon::anon-other': [4096, 0, 4096, 0],
'Total::dev': [12288, 4096, 4096, 8192],
'Total::dev::gpu': [12288, 0, 4096, 0],
'Total::dev::dev-other': [0, 4096, 0, 8192],
'Total::lib': [12288, 20480, 4096, 12288],
'Total::lib::data': [8192, 0, 4096, 8192],
'Total::lib::text': [4096, 20480, 0, 4096],
'Total::lib::lib-other': [0, 0, 0, 0],
'Total::Total-other': [0, 0, 8192, 0],
}
class MmapClassifierTest(unittest.TestCase):
def runTest(self):
rule_tree = mmap_classifier.LoadRules(_TEST_RULES)
mmap = memory_map.Map()
for m in _TEST_MMAPS:
mmap.Add(memory_map.MapEntry(
m[0], m[1], m[2], m[3], 0, m[4], m[5], m[6], m[7]))
res = mmap_classifier.Classify(mmap, rule_tree)
def CheckResult(node, prefix):
node_name = prefix + node.name
self.assertIn(node_name, _EXPECTED_RESULTS)
subtotal = node.values[0]
values = node.values[1:]
# First check that the subtotal matches clean + dirty + shared + priv.
self.assertEqual(subtotal, values[0] + values[1] + values[2] + values[3])
# Then check that the single values match the expectations.
self.assertEqual(values, _EXPECTED_RESULTS[node_name])
for child in node.children:
CheckResult(child, node_name + '::')
CheckResult(res.total, '')
|
bsd-3-clause
|
natebeacham/saml2
|
src/saml2/__init__.py
|
1
|
32985
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2006 Google Inc.
# Copyright (C) 2009 Umeå University
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains base classes representing SAML elements.
These codes were originally written by Jeffrey Scudder for
representing Saml elements. Takashi Matsuo had added some codes, and
changed some. Roland Hedberg rewrote the whole thing from bottom up so
barely anything but the original structures remained.
Module objective: provide data classes for SAML constructs. These
classes hide the XML-ness of SAML and provide a set of native Python
classes to interact with.
Conversions to and from XML should only be necessary when the SAML classes
"touch the wire" and are sent over HTTP. For this reason this module
provides methods and functions to convert SAML classes to and from strings.
"""
# try:
# # lxml: best performance for XML processing
# import lxml.etree as ET
# except ImportError:
# try:
# # Python 2.5+: batteries included
# import xml.etree.cElementTree as ET
# except ImportError:
# try:
# # Python <2.5: standalone ElementTree install
# import elementtree.cElementTree as ET
# except ImportError:
# raise ImportError, "lxml or ElementTree are not installed, "\
# +"see http://codespeak.net/lxml "\
# +"or http://effbot.org/zone/element-index.htm"
import logging
try:
from xml.etree import cElementTree as ElementTree
if ElementTree.VERSION < '1.3.0':
# cElementTree has no support for register_namespace
# neither _namespace_map, thus we sacrify performance
# for correctness
from xml.etree import ElementTree
except ImportError:
try:
import cElementTree as ElementTree
except ImportError:
from elementtree import ElementTree
root_logger = logging.getLogger("pySAML2")
root_logger.level = logging.NOTSET
NAMESPACE = 'urn:oasis:names:tc:SAML:2.0:assertion'
#TEMPLATE = '{urn:oasis:names:tc:SAML:2.0:assertion}%s'
#XSI_NAMESPACE = 'http://www.w3.org/2001/XMLSchema-instance'
NAMEID_FORMAT_EMAILADDRESS = (
"urn:oasis:names:tc:SAML:2.0:nameid-format:emailAddress")
# These are defined in saml2.saml
#NAME_FORMAT_UNSPECIFIED = (
# "urn:oasis:names:tc:SAML:2.0:attrname-format:unspecified")
#NAME_FORMAT_URI = "urn:oasis:names:tc:SAML:2.0:attrname-format:uri"
#NAME_FORMAT_BASIC = "urn:oasis:names:tc:SAML:2.0:attrname-format:basic"
SUBJECT_CONFIRMATION_METHOD_BEARER = "urn:oasis:names:tc:SAML:2.0:cm:bearer"
DECISION_TYPE_PERMIT = "Permit"
DECISION_TYPE_DENY = "Deny"
DECISION_TYPE_INDETERMINATE = "Indeterminate"
VERSION = "2.0"
BINDING_SOAP = 'urn:oasis:names:tc:SAML:2.0:bindings:SOAP'
BINDING_PAOS = 'urn:oasis:names:tc:SAML:2.0:bindings:PAOS'
BINDING_HTTP_REDIRECT = 'urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Redirect'
BINDING_HTTP_POST = 'urn:oasis:names:tc:SAML:2.0:bindings:HTTP-POST'
BINDING_HTTP_ARTIFACT = 'urn:oasis:names:tc:SAML:2.0:bindings:HTTP-Artifact'
BINDING_URI = 'urn:oasis:names:tc:SAML:2.0:bindings:URI'
def class_name(instance):
return "%s:%s" % (instance.c_namespace, instance.c_tag)
def create_class_from_xml_string(target_class, xml_string):
"""Creates an instance of the target class from a string.
:param target_class: The class which will be instantiated and populated
with the contents of the XML. This class must have a c_tag and a
c_namespace class variable.
:param xml_string: A string which contains valid XML. The root element
of the XML string should match the tag and namespace of the desired
class.
:return: An instance of the target class with members assigned according to
the contents of the XML - or None if the root XML tag and namespace did
not match those of the target class.
"""
tree = ElementTree.fromstring(xml_string)
return create_class_from_element_tree(target_class, tree)
def create_class_from_element_tree(target_class, tree, namespace=None,
tag=None):
"""Instantiates the class and populates members according to the tree.
Note: Only use this function with classes that have c_namespace and c_tag
class members.
:param target_class: The class which will be instantiated and populated
with the contents of the XML.
:param tree: An element tree whose contents will be converted into
members of the new target_class instance.
:param namespace: The namespace which the XML tree's root node must
match. If omitted, the namespace defaults to the c_namespace of the
target class.
:param tag: The tag which the XML tree's root node must match. If
omitted, the tag defaults to the c_tag class member of the target
class.
:return: An instance of the target class - or None if the tag and namespace
of the XML tree's root node did not match the desired namespace and tag.
"""
if namespace is None:
namespace = target_class.c_namespace
if tag is None:
tag = target_class.c_tag
if tree.tag == '{%s}%s' % (namespace, tag):
target = target_class()
target.harvest_element_tree(tree)
return target
else:
return None
class Error(Exception):
"""Exception class thrown by this module."""
pass
class ExtensionElement(object):
"""XML which is not part of the SAML specification,
these are called extension elements. If a classes parser
encounters an unexpected XML construct, it is translated into an
ExtensionElement instance. ExtensionElement is designed to fully
capture the information in the XML. Child nodes in an XML
extension are turned into ExtensionElements as well.
"""
def __init__(self, tag, namespace=None, attributes=None,
children=None, text=None):
"""Constructor for ExtensionElement
:param namespace: The XML namespace for this element.
:param tag: The tag (without the namespace qualifier) for
this element. To reconstruct the full qualified name of the
element, combine this tag with the namespace.
:param attributes: The attribute value string pairs for the XML
attributes of this element.
:param children: list (optional) A list of ExtensionElements which
represent the XML child nodes of this element.
"""
self.namespace = namespace
self.tag = tag
self.attributes = attributes or {}
self.children = children or []
self.text = text
def to_string(self):
""" Serialize the object into a XML string """
element_tree = self.transfer_to_element_tree()
return ElementTree.tostring(element_tree, encoding="UTF-8")
def transfer_to_element_tree(self):
if self.tag is None:
return None
element_tree = ElementTree.Element('')
if self.namespace is not None:
element_tree.tag = '{%s}%s' % (self.namespace, self.tag)
else:
element_tree.tag = self.tag
for key, value in self.attributes.iteritems():
element_tree.attrib[key] = value
for child in self.children:
child.become_child_element_of(element_tree)
element_tree.text = self.text
return element_tree
def become_child_element_of(self, element_tree):
"""Converts this object into an etree element and adds it as a child
node in an etree element.
Adds self to the ElementTree. This method is required to avoid verbose
XML which constantly redefines the namespace.
:param element_tree: ElementTree._Element The element to which this
object's XML will be added.
"""
new_element = self.transfer_to_element_tree()
element_tree.append(new_element)
def find_children(self, tag=None, namespace=None):
"""Searches child nodes for objects with the desired tag/namespace.
Returns a list of extension elements within this object whose tag
and/or namespace match those passed in. To find all children in
a particular namespace, specify the namespace but not the tag name.
If you specify only the tag, the result list may contain extension
elements in multiple namespaces.
:param tag: str (optional) The desired tag
:param namespace: str (optional) The desired namespace
:return: A list of elements whose tag and/or namespace match the
parameters values
"""
results = []
if tag and namespace:
for element in self.children:
if element.tag == tag and element.namespace == namespace:
results.append(element)
elif tag and not namespace:
for element in self.children:
if element.tag == tag:
results.append(element)
elif namespace and not tag:
for element in self.children:
if element.namespace == namespace:
results.append(element)
else:
for element in self.children:
results.append(element)
return results
def loadd(self, ava):
""" expects a special set of keys """
if "attributes" in ava:
for key, val in ava["attributes"].items():
self.attributes[key] = val
try:
self.tag = ava["tag"]
except KeyError:
if not self.tag:
raise KeyError("ExtensionElement must have a tag")
try:
self.namespace = ava["namespace"]
except KeyError:
if not self.namespace:
raise KeyError("ExtensionElement must belong to a namespace")
try:
self.text = ava["text"]
except KeyError:
pass
if "children" in ava:
for item in ava["children"]:
self.children.append(ExtensionElement(item["tag"]).loadd(item))
return self
def extension_element_from_string(xml_string):
element_tree = ElementTree.fromstring(xml_string)
return _extension_element_from_element_tree(element_tree)
def _extension_element_from_element_tree(element_tree):
elementc_tag = element_tree.tag
if '}' in elementc_tag:
namespace = elementc_tag[1:elementc_tag.index('}')]
tag = elementc_tag[elementc_tag.index('}')+1:]
else:
namespace = None
tag = elementc_tag
extension = ExtensionElement(namespace=namespace, tag=tag)
for key, value in element_tree.attrib.iteritems():
extension.attributes[key] = value
for child in element_tree:
extension.children.append(_extension_element_from_element_tree(child))
extension.text = element_tree.text
return extension
class ExtensionContainer(object):
c_tag = ""
c_namespace = ""
def __init__(self, text=None, extension_elements=None,
extension_attributes=None):
self.text = text
self.extension_elements = extension_elements or []
self.extension_attributes = extension_attributes or {}
# Three methods to create an object from an ElementTree
def harvest_element_tree(self, tree):
# Fill in the instance members from the contents of the XML tree.
for child in tree:
self._convert_element_tree_to_member(child)
for attribute, value in tree.attrib.iteritems():
self._convert_element_attribute_to_member(attribute, value)
self.text = tree.text
def _convert_element_tree_to_member(self, child_tree):
self.extension_elements.append(_extension_element_from_element_tree(
child_tree))
def _convert_element_attribute_to_member(self, attribute, value):
self.extension_attributes[attribute] = value
# One method to create an ElementTree from an object
def _add_members_to_element_tree(self, tree):
for child in self.extension_elements:
child.become_child_element_of(tree)
for attribute, value in self.extension_attributes.iteritems():
tree.attrib[attribute] = value
tree.text = self.text
def find_extensions(self, tag=None, namespace=None):
"""Searches extension elements for child nodes with the desired name.
Returns a list of extension elements within this object whose tag
and/or namespace match those passed in. To find all extensions in
a particular namespace, specify the namespace but not the tag name.
If you specify only the tag, the result list may contain extension
elements in multiple namespaces.
:param tag: str (optional) The desired tag
:param namespace: str (optional) The desired namespace
:Return: A list of elements whose tag and/or namespace match the
parameters values
"""
results = []
if tag and namespace:
for element in self.extension_elements:
if element.tag == tag and element.namespace == namespace:
results.append(element)
elif tag and not namespace:
for element in self.extension_elements:
if element.tag == tag:
results.append(element)
elif namespace and not tag:
for element in self.extension_elements:
if element.namespace == namespace:
results.append(element)
else:
for element in self.extension_elements:
results.append(element)
return results
def extensions_as_elements(self, tag, schema):
""" Return extensions that has the given tag and belongs to the
given schema as native elements of that schema.
:param tag: The tag of the element
:param schema: Which schema the element should originate from
:return: a list of native elements
"""
result = []
for ext in self.find_extensions(tag, schema.NAMESPACE):
ets = schema.ELEMENT_FROM_STRING[tag]
result.append(ets(ext.to_string()))
return result
def add_extension_elements(self, items):
for item in items:
self.extension_elements.append(element_to_extension_element(item))
def add_extension_element(self, item):
self.extension_elements.append(element_to_extension_element(item))
def add_extension_attribute(self, name, value):
self.extension_attributes[name] = value
def make_vals(val, klass, klass_inst=None, prop=None, part=False,
base64encode=False):
"""
Creates a class instance with a specified value, the specified
class instance may be a value on a property in a defined class instance.
:param val: The value
:param klass: The value class
:param klass_inst: The class instance which has a property on which
what this function returns is a value.
:param prop: The property which the value should be assigned to.
:param part: If the value is one of a possible list of values it should be
handled slightly different compared to if it isn't.
:return: Value class instance
"""
cinst = None
#print "make_vals(%s, %s)" % (val, klass)
if isinstance(val, dict):
cinst = klass().loadd(val, base64encode=base64encode)
else:
try:
cinst = klass().set_text(val)
except ValueError:
if not part:
cis = [make_vals(sval, klass, klass_inst, prop, True,
base64encode) for sval in val]
setattr(klass_inst, prop, cis)
else:
raise
if part:
return cinst
else:
if cinst:
cis = [cinst]
setattr(klass_inst, prop, cis)
def make_instance(klass, spec, base64encode=False):
"""
Constructs a class instance containing the specified information
:param klass: The class
:param spec: Information to be placed in the instance (a dictionary)
:return: The instance
"""
return klass().loadd(spec, base64encode)
class SamlBase(ExtensionContainer):
"""A foundation class on which SAML classes are built. It
handles the parsing of attributes and children which are common to all
SAML classes. By default, the SamlBase class translates all XML child
nodes into ExtensionElements.
"""
c_children = {}
c_attributes = {}
c_attribute_type = {}
#c_attribute_use = {}
#c_attribute_required = {}
c_child_order = []
c_cardinality = {}
def _get_all_c_children_with_order(self):
if len(self.c_child_order) > 0:
for child in self.c_child_order:
yield child
else:
for _, values in self.__class__.c_children.iteritems():
yield values[0]
def _convert_element_tree_to_member(self, child_tree):
# Find the element's tag in this class's list of child members
if self.__class__.c_children.has_key(child_tree.tag):
member_name = self.__class__.c_children[child_tree.tag][0]
member_class = self.__class__.c_children[child_tree.tag][1]
# If the class member is supposed to contain a list, make sure the
# matching member is set to a list, then append the new member
# instance to the list.
if isinstance(member_class, list):
if getattr(self, member_name) is None:
setattr(self, member_name, [])
getattr(self, member_name).append(
create_class_from_element_tree(
member_class[0], child_tree))
else:
setattr(self, member_name,
create_class_from_element_tree(member_class,
child_tree))
else:
ExtensionContainer._convert_element_tree_to_member(self,
child_tree)
def _convert_element_attribute_to_member(self, attribute, value):
# Find the attribute in this class's list of attributes.
if self.__class__.c_attributes.has_key(attribute):
# Find the member of this class which corresponds to the XML
# attribute(lookup in current_class.c_attributes) and set this
# member to the desired value (using self.__dict__).
setattr(self, self.__class__.c_attributes[attribute][0], value)
else:
# If it doesn't appear in the attribute list it's an extension
ExtensionContainer._convert_element_attribute_to_member(self,
attribute, value)
# Three methods to create an ElementTree from an object
def _add_members_to_element_tree(self, tree):
# Convert the members of this class which are XML child nodes.
# This uses the class's c_children dictionary to find the members which
# should become XML child nodes.
for member_name in self._get_all_c_children_with_order():
member = getattr(self, member_name)
if member is None:
pass
elif isinstance(member, list):
for instance in member:
instance.become_child_element_of(tree)
else:
member.become_child_element_of(tree)
# Convert the members of this class which are XML attributes.
for xml_attribute, attribute_info in \
self.__class__.c_attributes.iteritems():
(member_name, member_type, required) = attribute_info
member = getattr(self, member_name)
if member is not None:
tree.attrib[xml_attribute] = member
# Lastly, call the ExtensionContainers's _add_members_to_element_tree
# to convert any extension attributes.
ExtensionContainer._add_members_to_element_tree(self, tree)
def become_child_element_of(self, tree):
"""
Note: Only for use with classes that have a c_tag and c_namespace class
member. It is in SamlBase so that it can be inherited but it should
not be called on instances of SamlBase.
:param tree: The tree to which this instance should be a child
"""
new_child = self._to_element_tree()
tree.append(new_child)
def _to_element_tree(self):
"""
Note, this method is designed to be used only with classes that have a
c_tag and c_namespace. It is placed in SamlBase for inheritance but
should not be called on in this class.
"""
new_tree = ElementTree.Element('{%s}%s' % (self.__class__.c_namespace,
self.__class__.c_tag))
self._add_members_to_element_tree(new_tree)
return new_tree
def to_string(self, nspair=None):
"""Converts the Saml object to a string containing XML."""
if nspair:
for prefix, uri in nspair.items():
try:
ElementTree.register_namespace(prefix, uri)
except AttributeError:
# Backwards compatibility with ET < 1.3
ElementTree._namespace_map[uri] = prefix
return ElementTree.tostring(self._to_element_tree(), encoding="UTF-8")
def __str__(self):
return self.to_string()
# def _init_attribute(self, extension_attribute_id,
# extension_attribute_name, value=None):
#
# self.c_attributes[extension_attribute_id] = (extension_attribute_name,
# None, False)
# if value:
# self.__dict__[extension_attribute_name] = value
def keyswv(self):
""" Return the keys of attributes or children that has values
:return: list of keys
"""
return [key for key, val in self.__dict__.items() if val]
def keys(self):
""" Return all the keys that represent possible attributes and
children.
:return: list of keys
"""
keys = ['text']
keys.extend([n for (n, t, r) in self.c_attributes.values()])
keys.extend([v[0] for v in self.c_children.values()])
return keys
def children_with_values(self):
""" Returns all children that has values
:return: Possibly empty list of children.
"""
childs = []
for attribute in self._get_all_c_children_with_order():
member = getattr(self, attribute)
if member is None or member == []:
pass
elif isinstance(member, list):
for instance in member:
childs.append(instance)
else:
childs.append(member)
return childs
#noinspection PyUnusedLocal
def set_text(self, val, base64encode=False):
""" Sets the text property of this instance.
:param val: The value of the text property
:param base64encode: Whether the value should be base64encoded
:return: The instance
"""
#print "set_text: %s" % (val,)
if isinstance(val, bool):
if val:
setattr(self, "text", "true")
else:
setattr(self, "text", "false")
elif isinstance(val, int):
setattr(self, "text", "%d" % val)
elif isinstance(val, basestring):
setattr(self, "text", val)
elif val is None:
pass
else:
raise ValueError( "Type shouldn't be '%s'" % (val,))
return self
def loadd(self, ava, base64encode=False):
"""
Sets attributes, children, extension elements and extension
attributes of this element instance depending on what is in
the given dictionary. If there are already values on properties
those will be overwritten. If the keys in the dictionary does
not correspond to known attributes/children/.. they are ignored.
:param ava: The dictionary
:param base64encode: Whether the values on attributes or texts on
children shoule be base64encoded.
:return: The instance
"""
for prop, _typ, _req in self.c_attributes.values():
#print "# %s" % (prop)
if prop in ava:
if isinstance(ava[prop], bool):
setattr(self, prop, "%s" % ava[prop])
elif isinstance(ava[prop], int):
setattr(self, prop, "%d" % ava[prop])
else:
setattr(self, prop, ava[prop])
if "text" in ava:
self.set_text(ava["text"], base64encode)
for prop, klassdef in self.c_children.values():
#print "## %s, %s" % (prop, klassdef)
if prop in ava:
#print "### %s" % ava[prop]
# means there can be a list of values
if isinstance(klassdef, list):
make_vals(ava[prop], klassdef[0], self, prop,
base64encode=base64encode)
else:
cis = make_vals(ava[prop], klassdef, self, prop, True,
base64encode)
setattr(self, prop, cis)
if "extension_elements" in ava:
for item in ava["extension_elements"]:
self.extension_elements.append(ExtensionElement(
item["tag"]).loadd(item))
if "extension_attributes" in ava:
for key, val in ava["extension_attributes"].items():
self.extension_attributes[key] = val
return self
# def complete(self):
# for prop, _typ, req in self.c_attributes.values():
# if req and not getattr(self, prop):
# return False
#
# for prop, klassdef in self.c_children.values():
# try:
# restriction = self.c_cardinality[prop]
# val = getattr(self, prop)
# if val is None:
# num = 0
# elif isinstance(val, list):
# num = len(val)
# else:
# num = 1
#
# try:
# minimum = restriction["min"]
# except KeyError:
# minimum = 1
# if num < minimum:
# return False
# try:
# maximum = restriction["max"]
# except KeyError:
# maximum = 1
# # what if max == 0 ??
# if maximum == "unbounded":
# continue
# elif num > maximum:
# return False
# except KeyError:
# # default cardinality: min=max=1
# if not getattr(self, prop):
# return False
#
# return True
def child_class(self, child):
""" Return the class a child element should be an instance of
:param child: The name of the child element
:return: The class
"""
for prop, klassdef in self.c_children.values():
if child == prop:
if isinstance(klassdef, list):
return klassdef[0]
else:
return klassdef
return None
def child_cardinality(self, child):
""" Return the cardinality of a child element
:param child: The name of the child element
:return: The cardinality as a 2-tuple (min, max).
The max value is either a number or the string "unbounded".
The min value is always a number.
"""
for prop, klassdef in self.c_children.values():
if child == prop:
if isinstance(klassdef, list):
try:
min = self.c_cardinality["min"]
except KeyError:
min = 1
try:
max = self.c_cardinality["max"]
except KeyError:
max = "unbounded"
return min, max
else:
return 1,1
return None
def element_to_extension_element(element):
"""
Convert an element into a extension element
:param element: The element instance
:return: An extension element instance
"""
exel = ExtensionElement(element.c_tag, element.c_namespace,
text=element.text)
exel.attributes.update(element.extension_attributes)
exel.children.extend(element.extension_elements)
for xml_attribute, (member_name, typ, req) in element.c_attributes.iteritems():
member_value = getattr(element, member_name)
if member_value is not None:
exel.attributes[xml_attribute] = member_value
exel.children.extend([element_to_extension_element(c) \
for c in element.children_with_values()])
return exel
def extension_element_to_element(extension_element, translation_functions,
namespace=None):
""" Convert an extension element to a normal element.
In order to do this you need to have an idea of what type of
element it is. Or rather which module it belongs to.
:param extension_element: The extension element
:prama translation_functions: A dictionary which klass identifiers
as keys and string-to-element translations functions as values
:param namespace: The namespace of the translation functions.
:return: An element instance or None
"""
try:
element_namespace = extension_element.namespace
except AttributeError:
element_namespace = extension_element.c_namespace
if element_namespace == namespace:
try:
try:
ets = translation_functions[extension_element.tag]
except AttributeError:
ets = translation_functions[extension_element.c_tag]
return ets(extension_element.to_string())
except KeyError:
pass
return None
def extension_elements_to_elements(extension_elements, schemas):
""" Create a list of elements each one matching one of the
given extension elements. This is of course dependent on the access
to schemas that describe the extension elements.
:param extension_elements: The list of extension elements
:param schemas: Imported Python modules that represent the different
known schemas used for the extension elements
:return: A list of elements, representing the set of extension elements
that was possible to match against a Class in the given schemas.
The elements returned are the native representation of the elements
according to the schemas.
"""
res = []
for extension_element in extension_elements:
for schema in schemas:
inst = extension_element_to_element(extension_element,
schema.ELEMENT_FROM_STRING,
schema.NAMESPACE)
if inst:
res.append(inst)
break
return res
def extension_elements_as_dict(extension_elements, onts):
ees_ = extension_elements_to_elements(extension_elements, onts)
res = {}
for elem in ees_:
try:
res[elem.c_tag].append(elem)
except KeyError:
res[elem.c_tag] = [elem]
return res
|
bsd-2-clause
|
crs4/pydoop
|
examples/avro/py/avro_container_dump_results.py
|
2
|
1109
|
# BEGIN_COPYRIGHT
#
# Copyright 2009-2021 CRS4.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# END_COPYRIGHT
import sys
from avro.io import DatumReader
from avro.datafile import DataFileReader
def main(fn, out_fn, avro_mode=''):
with open(out_fn, 'w') as fo:
with open(fn, 'rb') as f:
reader = DataFileReader(f, DatumReader())
for r in reader:
if avro_mode.upper() == 'KV':
r = r['key']
fo.write('%s\t%r\n' % (r['office'], r['counts']))
print('wrote', out_fn)
if __name__ == '__main__':
main(*sys.argv[1:])
|
apache-2.0
|
google-research/seed_rl
|
football/vtrace_main.py
|
1
|
1875
|
# coding=utf-8
# Copyright 2019 The SEED Authors
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""V-trace (IMPALA) learner for Google Research Football."""
from absl import app
from absl import flags
from seed_rl.agents.vtrace import learner
from seed_rl.common import actor
from seed_rl.common import common_flags
from seed_rl.football import env
from seed_rl.football import networks
import tensorflow as tf
FLAGS = flags.FLAGS
# Optimizer settings.
flags.DEFINE_float('learning_rate', 0.00048, 'Learning rate.')
def create_agent(unused_action_space, unused_env_observation_space,
parametric_action_distribution):
return networks.GFootball(parametric_action_distribution)
def create_optimizer(unused_final_iteration):
learning_rate_fn = lambda iteration: FLAGS.learning_rate
optimizer = tf.keras.optimizers.Adam(FLAGS.learning_rate)
return optimizer, learning_rate_fn
def main(argv):
if len(argv) > 1:
raise app.UsageError('Too many command-line arguments.')
if FLAGS.run_mode == 'actor':
actor.actor_loop(env.create_environment)
elif FLAGS.run_mode == 'learner':
learner.learner_loop(env.create_environment,
create_agent,
create_optimizer)
else:
raise ValueError('Unsupported run mode {}'.format(FLAGS.run_mode))
if __name__ == '__main__':
app.run(main)
|
apache-2.0
|
Limezero/libreoffice
|
wizards/com/sun/star/wizards/ui/DocumentPreview.py
|
6
|
3783
|
#
# This file is part of the LibreOffice project.
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
#
# This file incorporates work covered by the following license notice:
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed
# with this work for additional information regarding copyright
# ownership. The ASF licenses this file to you under the Apache
# License, Version 2.0 (the "License"); you may not use this file
# except in compliance with the License. You may obtain a copy of
# the License at http://www.apache.org/licenses/LICENSE-2.0 .
#
import traceback
from ..common.Properties import Properties
from com.sun.star.awt import WindowDescriptor
from com.sun.star.awt import Rectangle
from com.sun.star.awt.WindowClass import SIMPLE
from com.sun.star.awt.VclWindowPeerAttribute import CLIPCHILDREN
from com.sun.star.awt.WindowAttribute import SHOW
'''
@author rpiterman
To change the template for this generated type comment go to
Window>Preferences>Java>Code Generation>Code and Comments
'''
class DocumentPreview(object):
PREVIEW_MODE = 1
'''
create new frame with window inside
load a component as preview into this frame
'''
def __init__(self, xmsf, control):
self.xControl = control
self.createPreviewFrame(xmsf, self.xControl)
def setDocument(self, url_, propNames, propValues=None):
if propValues is None:
if propNames == DocumentPreview.PREVIEW_MODE:
self.setDocument(url_, ("Preview", "ReadOnly"), (True, True))
else:
self.loadArgs = propNames
self.xFrame.activate()
self.xComponent = self.xFrame.loadComponentFromURL(url_, "_self", 0, tuple(self.loadArgs))
return self.xComponent
else:
self.url = url_
ps = Properties()
for index,item in enumerate(propNames):
ps[item] = propValues[index]
return self.setDocument(self.url, ps.getProperties1())
def reload(self, xmsf):
self.closeFrame()
self.createPreviewFrame(xmsf, self.xControl)
self.setDocument(self.url, self.loadArgs)
def closeFrame(self):
if self.xFrame is not None:
self.xFrame.close(False)
'''
create a new frame with a new container window inside,
which is not part of the global frame tree.
Attention:
a) This frame wont be destroyed by the office. It must be closed by you!
Do so - please call XCloseable::close().
b) The container window is part of the frame. Dont hold it alive - nor try to kill it.
It will be destroyed inside close().
'''
def createPreviewFrame(self, xmsf, xControl):
controlPeer = xControl.Peer
r = xControl.PosSize
toolkit = xmsf.createInstance("com.sun.star.awt.Toolkit")
aDescriptor = WindowDescriptor()
aDescriptor.Type = SIMPLE
aDescriptor.WindowServiceName = "window"
aDescriptor.ParentIndex = -1
aDescriptor.Parent = controlPeer
#xWindowPeer; #argument !
aDescriptor.Bounds = Rectangle(0, 0, r.Width, r.Height)
aDescriptor.WindowAttributes = CLIPCHILDREN | SHOW
self.xWindow = toolkit.createWindow(aDescriptor)
self.xFrame = xmsf.createInstance("com.sun.star.frame.Frame")
self.xFrame.initialize(self.xWindow)
self.xWindow.setVisible(True)
def dispose(self):
try:
self.closeFrame()
except Exception:
traceback.print_exc()
|
gpl-3.0
|
amorwilliams/chameleon
|
client/tools/buildscript/chameleon_upgrade.py
|
4
|
1737
|
#encoding=utf-8
import os, zipfile, sys
BASEDIR = os.path.split(os.path.abspath(__file__))[0]
class UpgradeEnv(object):
def __init__(self, basedir):
self.basedir = basedir
def upgradeFromZip(self, filename):
with zipfile.ZipFile(filename) as fs:
try:
with fs.open('filelist.txt', 'r') as filelistFs:
content = filelistFs.read()
addedFiles, deletedFiles, modFiles = parseDiffFile(content)
for f in addedFiles:
self.addFiles(f, fs)
for f in deletedFiles:
self.deleteFiles(f)
for f in modFiles:
self.modFile(f, fs)
except Exception, e:
print >> sys.stderr, u'升级失败。。。。'
raise e
def addFiles(self, fpath, zipfs):
zipfs.extract(fpath, self.basedir)
print '%s is extracted' %fpath
def deleteFiles(self, fpath):
pp = os.path.join(self.basedir, fpath)
print pp
if os.path.isdir(pp):
shutil.rmtree(pp)
elif os.path.isfile(pp):
os.remove(pp)
def modFile(self, fpath, zipfs):
pp = os.path.join(self.basedir, fpath)
if os.path.isfile(pp):
os.remove(pp)
self.addFiles(fpath, zipfs)
def parseDiffFile(content):
if len(content) == 0:
return [[], [], []]
ls = content.split('\n')
ss = [(l[0],l[2:]) for l in ls if len(l) > 2]
r = {'A':[], 'D':[], 'M':[]}
for s in ss:
if r.has_key(s[0]):
r[s[0]].append(s[1])
else:
r[s[0]] = [s[1]]
return [r['A'], r['D'], r['M']]
|
mit
|
gberl001/vlfeat
|
python/vlfeat/misc/colorspaces.py
|
6
|
3124
|
import numpy
def vl_xyz2lab(I,il='E'):
# VL_XYZ2LAB Convert XYZ color space to LAB
# J = VL_XYZ2LAB(I) converts the image from XYZ format to LAB format.
#
# VL_XYZ2LAB(I,IL) uses one of the illuminants A, B, C, E, D50, D55,
# D65, D75, D93. The default illuminant is E.
#
# See also:: VL_XYZ2LUV(), VL_HELP().
# AUTORIGHTS
# Copyright (C) 2007-10 Andrea Vedaldi and Brian Fulkerson
#
# This file is part of VLFeat, available under the terms of the
# GNU GPLv2, or (at your option) any later version.
def f(a):
k = 903.3
b=numpy.zeros(a.shape)
b[a>0.00856] = a[a>0.00856]**(1/3.)
b[a<=0.00856] = (k*a[a<=0.00856] + 16)/116.
return b
il=il.lower()
if il=='a':
xw = 0.4476
yw = 0.4074
elif il=='b':
xw = 0.3324
yw = 0.3474
elif il=='c':
xw = 0.3101
yw = 0.3162
elif il=='e':
xw = 1./3
yw = 1./3
elif il=='d50':
xw = 0.3457
yw = 0.3585
elif il=='d55':
xw = 0.3324
yw = 0.3474
elif il=='d65':
xw = 0.312713
yw = 0.329016
elif il=='d75':
xw = 0.299
yw = 0.3149
elif il=='d93':
xw = 0.2848
yw = 0.2932
J=numpy.zeros(I.shape)
# Reference white
Yw = 1.0
Xw = xw/yw
Zw = (1-xw-yw)/yw * Yw
# XYZ components
X = I[:,:,0]
Y = I[:,:,1]
Z = I[:,:,2]
x = X/Xw
y = Y/Yw
z = Z/Zw
L = 116 * f(y) - 16
a = 500*(f(x) - f(y))
b = 200*(f(y) - f(z))
J = numpy.rollaxis(numpy.array([L,a,b]),0,3)
return J
def vl_rgb2xyz(I,workspace="CIE"):
#VL_RGB2XYZ Convert RGB color space to XYZ
#J=VL_RGB2XYZ(I) converts the CIE RGB image I to the image J in
#CIE XYZ format. CIE RGB has a white point of R=G=B=1.0
#VL_RGB2XYZ(I,WS) uses the specified RGB working space WS. The
#function supports the following RGB working spaces:
#* `CIE' E illuminant, gamma=2.2
#* `Adobe' D65 illuminant, gamma=2.2
#The default workspace is CIE.
#See also:: VL_XYZ2RGB(), VL_HELP().
#AUTORIGHTS
#Copyright (C) 2007-10 Andrea Vedaldi and Brian Fulkerson
#This file is part of VLFeat, available under the terms of the
#GNU GPLv2, or (at your option) any later version.
M,N,K = I.shape
if not K==3:
print('I must be a MxNx3 array.')
exit(0)
#I=im2double(I) ;
if workspace=='CIE':
#CIE: E illuminant and 2.2 gamma
A = numpy.array([
[0.488718, 0.176204, 0.000000],
[0.310680, 0.812985, 0.0102048],
[0.200602, 0.0108109, 0.989795 ]]).T
gamma = 2.2
if workspace=='Adobe':
#Adobe 1998: D65 illuminant and 2.2 gamma
A = numpy.array([
[0.576700, 0.297361, 0.0270328],
[0.185556, 0.627355, 0.0706879],
[0.188212, 0.0752847, 0.99124 ]]).T
gamma = 2.2
I = (I**gamma).reshape(M*N, K) ;
J = numpy.dot(A,I.T)
J = J.T.reshape(M, N, K)
return J
|
gpl-2.0
|
CeltonMcGrath/TACTIC
|
src/pyasm/deprecated/flash/widget/flash_status_filter.py
|
6
|
3018
|
###########################################################
#
# Copyright (c) 2005, Southpaw Technology
# All Rights Reserved
#
# PROPRIETARY INFORMATION. This software is proprietary to
# Southpaw Technology, and is not to be reproduced, transmitted,
# or disclosed in any way without written permission.
#
#
#
__all__ = ['FlashStatusFilter','FlashStatusViewFilter']
from pyasm.common import SetupException
from pyasm.biz import *
from pyasm.web import *
from pyasm.widget import *
from pyasm.search import Search
class FlashStatusFilter(Widget):
def __init__(my, pipeline_name="dept"):
my.pipeline_name = pipeline_name
my.pipeline = Pipeline.get_by_name(pipeline_name)
super(FlashStatusFilter,my).__init__(my)
def init(my):
if my.pipeline == None:
raise SetupException("Pipeline [%s] does not exist" % my.pipeline_name)
processes = my.pipeline.get_process_names()
processes.append("all")
# copy for labels and add an "All"
labels = [x.capitalize() for x in processes]
my.status_select = SelectWdg("status_filter")
my.status_select.add_style("font-size: 0.9em")
my.status_select.set_option("values", processes)
my.status_select.set_option("labels", labels)
my.status_select.set_persistence()
my.status_select.set_submit_onchange()
status = my.status_select.get_value()
my.add(HtmlElement.b("Status:"))
my.add( my.status_select )
state = WebState.get()
state.add_state("status_filter", status)
def alter_search(my, search):
status_value = my.get_value()
if status_value == "artist":
where = "(status is NULL or %s)" \
% Search.get_regex_filter('status', status_value)
search.add_where( where )
elif status_value != 'all':
where = Search.get_regex_filter('status', status_value)
search.add_where( where )
def get_value(my):
value = my.status_select.get_value()
return value
class FlashStatusViewFilter(FlashStatusFilter):
'''A filter for the different view of the Serial Status Wdg'''
def init(my):
filter_vals = [SerialStatusWdg.CONNECTION_VIEW, SerialStatusWdg.SIMPLE_VIEW]
labels = [x.capitalize() for x in filter_vals]
my.status_select = SelectWdg("status_view_filter")
my.status_select.add_style("font-size: 0.9em")
my.status_select.set_option("values", filter_vals)
my.status_select.set_option("labels", labels)
my.status_select.set_option("default", SerialStatusWdg.SIMPLE_VIEW)
my.status_select.set_persistence()
my.status_select.set_submit_onchange()
my.status_select.get_value()
span = SpanWdg(HtmlElement.b("View:"), css='small')
span.add(my.status_select)
my.add(span)
def alter_search(my, search):
pass
|
epl-1.0
|
EvenStrangest/tensorflow
|
tensorflow/python/framework/function_test.py
|
8
|
20920
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Tests for functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import time
import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin
import tensorflow as tf
from tensorflow.python.framework import function
from tensorflow.python.ops import functional_ops
def _OptimizerOptions():
for cse in [False, True]:
for inline in [False, True]:
for cfold in [False, True]:
yield tf.ConfigProto(
graph_options=tf.GraphOptions(optimizer_options=tf.OptimizerOptions(
opt_level=tf.OptimizerOptions.L0,
do_common_subexpression_elimination=cse,
do_function_inlining=inline,
do_constant_folding=cfold)))
class FunctionTest(tf.test.TestCase):
def _mat(self, x):
return np.array([x]).astype("float32").reshape([1, 1])
def testBasic(self):
g = tf.Graph()
# Define a function
# foo(a:float, b:float, c:float)->u:float,v:float,w:float
# u = matmul(a, b) + c
# v = u^2
# w = u + v
# TODO(zhifengc): replaces w/ a nicer @decorator sugar.
foo = tf.Graph()
with foo.as_default():
a = tf.placeholder(tf.float32, name="a")
b = tf.placeholder(tf.float32, name="b")
c = tf.placeholder(tf.float32, name="c")
u = tf.add(tf.matmul(a, b), c, name="u")
v = tf.square(u, name="v")
w = tf.add_n([u, v], name="w")
fdef = function.graph_to_function_def(foo, "foo", [a, b, c], [u, v, w])
g._add_function(fdef)
# Compute 2 * 3 + 4 and its square.
with g.as_default(), tf.Session() as sess:
two = tf.constant(self._mat(2.0), name="two")
three = tf.constant(self._mat(3.0), name="three")
four = tf.constant(self._mat(4.0), name="four")
# TODO(zhifengc): w/ @decorator sugar, we will just do:
# y, s, t = foo_func(two, three, four)
# The graph contains two ops each of which calls foo.
u0, v0, w0 = g.create_op("foo",
[two, three, four],
[tf.float32, tf.float32, tf.float32],
compute_shapes=False).outputs
u1, v1, w1 = g.create_op("foo",
[four, two, three],
[tf.float32, tf.float32, tf.float32],
compute_shapes=False).outputs
# Checks some property of the graph def.
gdef = g.as_graph_def()
self.assertEqual(len(gdef.node), 5) # 5 nodes added.
self.assertEqual(len(gdef.library.function), 1) # 1 function is defined.
for _ in xrange(10):
# Run the graph, which is basicly two function calls.
ans_u0, ans_v0, ans_w0, ans_u1, ans_v1, ans_w1 = sess.run([u0, v0, w0,
u1, v1, w1])
self.assertAllEqual(ans_u0, self._mat(10.0)) # 2 * 3 + 4 = 10
self.assertAllEqual(ans_v0, self._mat(100.0)) # 10^2 = 100
self.assertAllEqual(ans_w0, self._mat(110.0)) # 100 + 10 = 110
self.assertAllEqual(ans_u1, self._mat(11.0)) # 4 * 2 + 3 = 11
self.assertAllEqual(ans_v1, self._mat(121.0)) # 11^2 = 121
self.assertAllEqual(ans_w1, self._mat(132.0)) # 11 + 121 = 132
def testDefineFunction2Args(self):
def APlus2B(a, b):
return a + b * 2
with tf.Graph().as_default():
f_def = function.define_function(APlus2B, {"a": tf.float32,
"b": tf.float32})
one = tf.constant([1.0])
two = tf.constant([2.0])
call = function.call_function(f_def, one, two)
self.assertEquals("APlus2B", call.op.name)
with tf.Session() as sess:
self.assertAllEqual([5.0], sess.run(call))
def testGradientFunc(self):
def XSquarePlusOne(x):
return x * x + 1.0
def XSquarePlusOneGrad(x, dy):
dx = functional_ops._symbolic_gradient(
input=[x, dy],
Tout=[tf.float32],
# This line on define_function to register the above
# function with name "XSquarePlusOneFn"
f="XSquarePlusOneFn",
name="dx")
return dx
g = tf.Graph()
with g.as_default():
# This line registers the Function "XSquarePlusOneFn"
f = function.define_function(
XSquarePlusOne, {"x": tf.float32}, func_name="XSquarePlusOneFn")
g = function.define_function(XSquarePlusOneGrad, {"x": tf.float32,
"dy": tf.float32})
epsilon = tf.constant([0.1])
two = tf.constant([2.0])
call_f = function.call_function(f, two)
call_g = function.call_function(g, two, epsilon)
with tf.Session() as sess:
self.assertAllClose([5.0], sess.run(call_f))
self.assertAllClose([0.4], sess.run(call_g))
def testTanhSymGrad(self):
g = tf.Graph()
with g.as_default():
@function.Defun(tf.float32)
def Forward(x):
return tf.reduce_sum(tf.tanh(x))
x = tf.placeholder(tf.float32)
y = Forward(x)
dx = tf.gradients([y], [x])
inp = np.array([-1, 1, 2, -2], dtype=np.float32)
feed = {x: inp}
cfg = tf.ConfigProto(
graph_options=tf.GraphOptions(
optimizer_options=tf.OptimizerOptions(
opt_level=tf.OptimizerOptions.L1,
do_function_inlining=True)))
with tf.Session(graph=g, config=cfg) as sess:
out, = sess.run(dx, feed)
self.assertAllClose(1 - np.square(np.tanh(inp)), out)
def testCustomGradient(self):
g = tf.Graph()
dtype = tf.float32
with g.as_default():
@function.Defun(dtype, dtype, dtype)
def XentLossGrad(logits, labels, dloss):
dlogits = tf.reshape(dloss, [-1, 1]) * (tf.nn.softmax(logits) - labels)
dlabels = tf.zeros_like(labels)
# Takes exp(dlogits) to differentiate it from the "correct" gradient.
return tf.exp(dlogits), dlabels
@function.Defun(dtype, dtype, grad_func=XentLossGrad)
def XentLoss(logits, labels):
return tf.reduce_sum(labels * tf.log(tf.nn.softmax(logits)), 1)
logits = tf.placeholder(dtype)
labels = tf.placeholder(dtype)
loss = XentLoss(logits, labels)
dlogits = tf.gradients([loss], [logits])
x = np.random.uniform(-10., 10., size=(4, 9)).astype(np.float32)
prob = np.exp(x) / np.sum(np.exp(x), 1, keepdims=1)
y = np.random.uniform(-10., 10., size=(4, 9)).astype(np.float32)
for cfg in _OptimizerOptions():
print("cfg = ", cfg)
with tf.Session(graph=g, config=cfg) as sess:
out, = sess.run(dlogits, {logits: x, labels: y})
self.assertAllClose(out, np.exp(prob - y))
def testCustomGradientError(self):
g = tf.Graph()
dtype = tf.float32
with g.as_default():
@function.Defun(dtype, dtype, dtype)
def Grad(x, dy, dz):
# Should have returned 1 result.
return x, dy + dz
@function.Defun(dtype, grad_func=Grad)
def Forward(x):
return x, x
inp = tf.placeholder(dtype)
out = tf.add_n(Forward(inp))
dinp = tf.gradients(out, [inp])
x = np.random.uniform(-10., 10., size=(4, 9)).astype(np.float32)
with tf.Session(graph=g) as sess:
with self.assertRaisesRegexp(
tf.errors.InvalidArgumentError,
"SymGrad expects to return 1.*but get 2.*instead"):
_ = sess.run(dinp, {inp: x})
def testSymGradShape(self):
g = tf.Graph()
with g.as_default():
x = tf.placeholder(tf.float32, [25, 4])
y = tf.placeholder(tf.float32, [200, 100])
dz = tf.placeholder(tf.float32, [1])
# We assume Foo is a function of (x, y) -> (z) Then, Foo's
# gradient function is (x, y, dz) -> (dx, dy). dx's shape
# should be the same as x's; and dy's shape should be the same
# as y's.
dx, dy = functional_ops._symbolic_gradient(input=[x, y, dz],
Tout=[tf.float32] * 2,
f="Foo")
self.assertEquals(x.get_shape(), dx.get_shape())
self.assertEquals(y.get_shape(), dy.get_shape())
def testZNoDepOnY(self):
with tf.Graph().as_default():
# z = Foo(x, y). z doe
@function.Defun(tf.float32, tf.float32)
def Foo(x, y):
return x * 2
x = tf.constant(1.0)
y = tf.constant(2.0)
z = Foo(x, y)
dx, dy = tf.gradients([z], [x, y])
with tf.Session() as sess:
dx_val, dy_val = sess.run([dx, dy])
self.assertEquals([2.0], dx_val)
self.assertEquals([0.0], dy_val)
def testDefineFunctionNoArgs(self):
def AConstant():
return tf.constant([42])
with tf.Graph().as_default():
f_def = function.define_function(AConstant, {})
call = function.call_function(f_def)
self.assertEquals("AConstant", call.op.name)
with tf.Session() as sess:
self.assertAllEqual([42], sess.run(call))
def testDefineFunctionNames(self):
def Foo(a):
return a + 1
with tf.Graph().as_default():
f_def = function.define_function(Foo, {"a": tf.float32})
one = tf.constant([1.0])
call1 = function.call_function(f_def, one)
self.assertEquals("Foo", call1.op.name)
call2 = function.call_function(f_def, one)
self.assertEquals("Foo_1", call2.op.name)
call3 = function.call_function(f_def, one, name="mine")
self.assertEquals("mine", call3.op.name)
with tf.name_scope("my"):
call4 = function.call_function(f_def, one, name="precious")
self.assertEquals("my/precious", call4.op.name)
def testDefineErrors(self):
def NoResult():
pass
def DefaultArg(unused_a=12):
return tf.constant([1])
def KwArgs(**unused_kwargs):
return tf.constant([1])
def PlusMinus(a, b):
return a + b, b - a
with tf.Graph().as_default():
with self.assertRaisesRegexp(ValueError, "return at least one tensor"):
function.define_function(NoResult, {})
with self.assertRaisesRegexp(ValueError, "are not supported"):
function.define_function(DefaultArg, {})
with self.assertRaisesRegexp(ValueError, "are not supported"):
function.define_function(KwArgs, {})
with self.assertRaisesRegexp(ValueError, "specified input types"):
function.define_function(PlusMinus, {})
with self.assertRaisesRegexp(ValueError, "specified input types"):
function.define_function(PlusMinus, {"c": tf.float32})
with self.assertRaisesRegexp(ValueError, "type for argument: b"):
function.define_function(PlusMinus, {"a": tf.float32,
"c": tf.float32})
with self.assertRaisesRegexp(ValueError, "specified input types"):
function.define_function(PlusMinus, {"a": tf.float32,
"b": tf.float32,
"c": tf.float32})
def testCallErrors(self):
def Const():
return tf.constant(1)
def PlusOne(a):
return a + 1
def PlusMinus(a, b):
return a + b, b - a
with tf.Graph().as_default():
one = tf.constant([1])
two = tf.constant([2])
const = function.define_function(Const, {})
plus_one = function.define_function(PlusOne, {"a": tf.int32})
plus_minus = function.define_function(PlusMinus, {"a": tf.int32,
"b": tf.int32})
function.call_function(const)
with self.assertRaisesRegexp(ValueError, "arguments: 0"):
function.call_function(const, one)
with self.assertRaisesRegexp(ValueError, "arguments: 0"):
function.call_function(const, one, two)
with self.assertRaisesRegexp(ValueError, "arguments: 1"):
function.call_function(plus_one)
function.call_function(plus_one, one)
with self.assertRaisesRegexp(ValueError, "arguments: 1"):
function.call_function(plus_one, one, two)
with self.assertRaisesRegexp(ValueError, "arguments: 2"):
function.call_function(plus_minus)
with self.assertRaisesRegexp(ValueError, "arguments: 2"):
function.call_function(plus_minus, one)
function.call_function(plus_minus, one, two)
function.call_function(plus_one, one, name="p1")
with self.assertRaisesRegexp(ValueError, "Unknown keyword arguments"):
function.call_function(plus_one, one, device="/gpu:0")
def testFunctionDecorator(self):
with tf.Graph().as_default():
@function.Defun(tf.float32)
def Minus1(b):
return b - 1.0
two = tf.constant([2.])
call1 = Minus1(two)
self.assertTrue(isinstance(Minus1, function._DefinedFunction))
self.assertEqual(Minus1.name, "Minus1")
# pylint: disable=unexpected-keyword-arg
call2 = Minus1(call1, name="next")
# pylint:enable=unexpected-keyword-arg
self.assertEquals("next", call2.op.name)
with tf.Session() as sess:
self.assertAllEqual([1], sess.run(call1))
self.assertAllEqual([0], sess.run(call2))
def testNestedFunction(self):
with tf.Graph().as_default():
@function.Defun(tf.float32)
def Cube(x):
return x * x * x
@function.Defun(tf.float32, tf.float32)
def CubeXPlusY(x, y):
return Cube(x) + y
z = CubeXPlusY(tf.constant(3.0), tf.constant(-2.0))
with self.test_session():
self.assertAllEqual(z.eval(), 25.0)
def testReduction(self):
g = tf.Graph()
# BN0 is computing batch normed matrix along rows.
def BN0(x):
mean = tf.reduce_mean(x, [0])
var = tf.reduce_mean(tf.square(x - mean)) # biased var
rstd = tf.rsqrt(var + 1e-8)
return (x - mean) * rstd
with g.as_default():
# Wraps BatchNorm in a tf function.
@function.Defun(tf.float32)
def BN1(x):
return BN0(x)
x = tf.placeholder(tf.float32)
y0 = BN0(x) # A plain graph
y1 = BN1(x) # A tf function
dx0, = tf.gradients([y0], [x])
dx1, = tf.gradients([y1], [x])
# Both should produce the same result and gradient.
with self.test_session(graph=g) as sess:
vals = sess.run([y0, y1, dx0, dx1], {x: np.random.uniform(size=(3, 7))})
self.assertAllClose(vals[0], vals[1])
self.assertAllClose(vals[2], vals[3])
class UnrollLSTMTest(tf.test.TestCase):
BATCH_SIZE = 16
LSTM_DIMS = 32
NUM_UNROLL = 20
def _Weights(self):
dims = self.LSTM_DIMS
return tf.random_uniform([2 * dims, 4 * dims], -1, 1, seed=123456)
def _Input(self):
return tf.random_uniform(
[self.NUM_UNROLL, self.BATCH_SIZE, self.LSTM_DIMS],
seed=654321)
# Helper to construct a LSTM cell graph.
@classmethod
def LSTMCell(cls, x, mprev, cprev, weights):
xm = tf.concat(1, [x, mprev])
i_i, i_g, f_g, o_g = tf.split(1, 4, tf.matmul(xm, weights))
new_c = tf.sigmoid(f_g) * cprev + tf.sigmoid(i_g) * tf.tanh(i_i)
new_c = tf.clip_by_value(new_c, -50.0, 50.0)
new_m = tf.sigmoid(o_g) * tf.tanh(new_c)
return new_m, new_c
def _BuildForward(self, weights, inp, mode="cell"):
def Loop(cell, w, i):
x = tf.unpack(i, self.NUM_UNROLL)
m = tf.zeros_like(x[0])
c = tf.zeros_like(x[0])
for i in range(self.NUM_UNROLL):
m, c = cell(x[i], m, c, w)
return m
cell = UnrollLSTMTest.LSTMCell
if mode == "complete":
# Constructs the complete graph in python.
return Loop(cell, weights, inp)
cell = function.Defun(x=tf.float32,
mprev=tf.float32,
cprev=tf.float32,
weights=tf.float32)(cell)
if mode == "cell":
# Just represent the LSTM as a function.
return Loop(cell, weights, inp)
if mode == "loop":
# Wraps the whole loop as a function.
@function.Defun(tf.float32, tf.float32)
def LSTMLoop(w, i):
return Loop(cell, w, i)
return LSTMLoop(weights, inp)
if mode == "loop10":
# Wraps 10 lstm steps into one function, and the whole loop
# into another calling the formers.
# Groups 10 steps at a time.
@function.Defun(tf.float32, tf.float32, tf.float32,
*([tf.float32] * 10))
def Loop10(w, m, c, *args):
for x in args:
m, c = cell(x, m, c, w)
return m, c
@function.Defun(tf.float32, tf.float32)
def LSTMLoop10(weights, inp):
x = tf.unpack(inp, self.NUM_UNROLL)
m = tf.zeros_like(x[0])
c = tf.zeros_like(x[0])
assert self.NUM_UNROLL % 10 == 0
for i in range(0, self.NUM_UNROLL, 10):
m, c = Loop10(weights, m, c, *x[i:i + 10])
return m
return LSTMLoop10(weights, inp)
def testUnrollLSTM(self):
# Run one step of the unrolled lstm graph.
def RunForward(mode, cfg=None):
print("mode = ", mode)
g = tf.Graph()
start = time.time()
with g.as_default():
weights = self._Weights()
inp = self._Input()
m = self._BuildForward(weights, inp, mode)
gdef = g.as_graph_def()
finish = time.time()
print("time: ", finish - start, " txt size: ", len(str(gdef)),
"gdef bin size: ", len(gdef.SerializeToString()))
with g.as_default(), tf.Session(config=cfg) as sess:
return sess.run(m)
mv0 = RunForward("complete")
for cfg in _OptimizerOptions():
print("cfg = ", cfg)
mv1 = RunForward("cell", cfg)
mv2 = RunForward("loop", cfg)
mv3 = RunForward("loop10", cfg)
self.assertAllClose(mv0, mv1, rtol=1e-4)
self.assertAllClose(mv0, mv2, rtol=1e-4)
self.assertAllClose(mv0, mv3, rtol=1e-4)
def testUnrollLSTMGrad(self):
# Run one step of the unrolled lstm graph.
def RunForwardBackward(mode, cfg=None):
print("mode = ", mode)
g = tf.Graph()
start = time.time()
with g.as_default():
weights = self._Weights()
inp = self._Input()
m = self._BuildForward(weights, inp, mode)
loss = tf.reduce_sum(tf.square(m))
dw = tf.gradients([loss], [weights])
gdef = g.as_graph_def()
finish = time.time()
print("time: ", finish - start, " txt size: ", len(str(gdef)),
"gdef bin size: ", len(gdef.SerializeToString()))
with g.as_default(), tf.Session(config=cfg) as sess:
return sess.run(dw)
d0 = RunForwardBackward("complete")
for cfg in _OptimizerOptions():
print("cfg = ", cfg)
d1 = RunForwardBackward("cell", cfg)
d2 = RunForwardBackward("loop", cfg)
d3 = RunForwardBackward("loop10", cfg)
self.assertAllClose(d0, d1, rtol=1e-4)
self.assertAllClose(d0, d2, rtol=1e-4)
self.assertAllClose(d0, d3, rtol=1e-4)
class FunctionInlineControlTest(tf.test.TestCase):
def testFoo(self):
dtype = tf.float32
cfg = tf.ConfigProto(
graph_options=tf.GraphOptions(optimizer_options=tf.OptimizerOptions(
opt_level=tf.OptimizerOptions.L0,
do_common_subexpression_elimination=True,
do_function_inlining=True,
do_constant_folding=True)))
for noinline in [False, True]:
g = tf.Graph()
with g.as_default():
@function.Defun(dtype)
def Cell(v):
# If v is a vector [n, 1], x is a big square matrix.
x = tf.tanh(v + tf.transpose(v, [1, 0]))
return tf.reduce_sum(x, 1, keep_dims=True)
@function.Defun(dtype)
def Forward(x):
for _ in range(10):
x = Cell(x, noinline=noinline)
return tf.reduce_sum(x, [0, 1])
x = tf.placeholder(dtype)
y = Forward(x)
dx, = tf.gradients([y], [x])
np.random.seed(321)
inp = np.random.uniform(-1, 1, [16, 1]).astype(np.float32)
with tf.Session(graph=g, config=cfg) as sess:
ans = sess.run([y, dx], {x: inp})
print(ans[0], np.sum(ans[1]))
self.assertAllClose(ans[0], 255.971, rtol=1e-3)
self.assertAllClose(np.sum(ans[1]), 13.0408, rtol=1e-3)
if __name__ == "__main__":
tf.test.main()
|
apache-2.0
|
bregman-arie/ansible
|
lib/ansible/modules/cloud/amazon/aws_ssm_parameter_store.py
|
18
|
7495
|
#!/usr/bin/python
# Copyright: (c) 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: aws_ssm_parameter_store
short_description: Manage key-value pairs in aws parameter store.
description:
- Manage key-value pairs in aws parameter store.
version_added: "2.5"
options:
name:
description:
- parameter key name.
required: true
description:
description:
- parameter key desciption.
required: false
value:
description:
- Parameter value.
required: false
state:
description:
- Creates or modifies an existing parameter
- Deletes a parameter
required: false
choices: ['present', 'absent']
default: present
string_type:
description:
- Parameter String type
required: false
choices: ['String', 'StringList', 'SecureString']
default: String
decryption:
description:
- Work with SecureString type to get plain text secrets
- Boolean
required: false
default: True
key_id:
description:
- aws KMS key to decrypt the secrets.
required: false
default: aws/ssm (this key is automatically generated at the first parameter created).
overwrite_value:
description:
- Option to overwrite an existing value if it already exists.
- String
required: false
version_added: "2.6"
choices: ['never', 'changed', 'always']
default: changed
region:
description:
- region.
required: false
author:
- Nathan Webster (@nathanwebsterdotme)
- Bill Wang ([email protected])
- Michael De La Rue (@mikedlr)
extends_documentation_fragment: aws
requirements: [ botocore, boto3 ]
'''
EXAMPLES = '''
- name: Create or update key/value pair in aws parameter store
aws_ssm_parameter_store:
name: "Hello"
description: "This is your first key"
value: "World"
- name: Delete the key
aws_ssm_parameter_store:
name: "Hello"
state: absent
- name: Create or update secure key/value pair with default kms key (aws/ssm)
aws_ssm_parameter_store:
name: "Hello"
description: "This is your first key"
string_type: "SecureString"
value: "World"
- name: Create or update secure key/value pair with nominated kms key
aws_ssm_parameter_store:
name: "Hello"
description: "This is your first key"
string_type: "SecureString"
key_id: "alias/demo"
value: "World"
- name: Always update a parameter store value and create a new version
aws_ssm_parameter_store:
name: "overwrite_example"
description: "This example will always overwrite the value"
string_type: "String"
value: "Test1234"
overwrite_value: "always"
- name: recommend to use with aws_ssm lookup plugin
debug: msg="{{ lookup('aws_ssm', 'hello') }}"
'''
RETURN = '''
put_parameter:
description: Add one or more paramaters to the system.
returned: success
type: dictionary
delete_parameter:
description: Delete a parameter from the system.
returned: success
type: dictionary
'''
from ansible.module_utils.aws.core import AnsibleAWSModule
from ansible.module_utils.ec2 import boto3_conn, get_aws_connection_info
try:
from botocore.exceptions import ClientError
except ImportError:
pass # will be captured by imported HAS_BOTO3
def update_parameter(client, module, args):
changed = False
response = {}
try:
response = client.put_parameter(**args)
changed = True
except ClientError as e:
module.fail_json_aws(e, msg="setting parameter")
return changed, response
def create_update_parameter(client, module):
changed = False
existing_parameter = None
response = {}
args = dict(
Name=module.params.get('name'),
Value=module.params.get('value'),
Type=module.params.get('string_type')
)
if (module.params.get('overwrite_value') == "always" or "changed"):
args.update(Overwrite=True)
else:
args.update(Overwrite=False)
if module.params.get('description'):
args.update(Description=module.params.get('description'))
if module.params.get('string_type') == 'SecureString':
args.update(KeyId=module.params.get('key_id'))
try:
existing_parameter = client.get_parameter(Name=args['Name'], WithDecryption=True)
except:
pass
if existing_parameter:
if (module.params.get('overwrite_value') == 'always'):
(changed, response) = update_parameter(client, module, args)
elif (module.params.get('overwrite_value') == 'changed'):
if existing_parameter['Parameter']['Type'] != args['Type']:
(changed, response) = update_parameter(client, module, args)
if existing_parameter['Parameter']['Value'] != args['Value']:
(changed, response) = update_parameter(client, module, args)
if args['Description']:
# Description field not available from get_parameter function so get it from describe_parameters
describe_existing_parameter = None
try:
describe_existing_parameter = client.describe_parameters(Filters=[{"Key": "Name", "Values": [args['Name']]}])
except ClientError as e:
module.fail_json_aws(e, msg="getting description value")
if describe_existing_parameter['Parameters'][0]['Description'] != args['Description']:
(changed, response) = update_parameter(client, module, args)
else:
(changed, response) = update_parameter(client, module, args)
return changed, response
def delete_parameter(client, module):
response = {}
try:
response = client.delete_parameter(
Name=module.params.get('name')
)
except ClientError as e:
if e.response['Error']['Code'] == 'ParameterNotFound':
return False, {}
module.fail_json_aws(e, msg="deleting parameter")
return True, response
def setup_client(module):
region, ec2_url, aws_connect_params = get_aws_connection_info(module, boto3=True)
connection = boto3_conn(module, conn_type='client', resource='ssm', region=region, endpoint=ec2_url, **aws_connect_params)
return connection
def setup_module_object():
argument_spec = dict(
name=dict(required=True),
description=dict(),
value=dict(required=False, no_log=True),
state=dict(default='present', choices=['present', 'absent']),
string_type=dict(default='String', choices=['String', 'StringList', 'SecureString']),
decryption=dict(default=True, type='bool'),
key_id=dict(default="alias/aws/ssm"),
overwrite_value=dict(default='changed', choices=['never', 'changed', 'always']),
region=dict(required=False),
)
return AnsibleAWSModule(
argument_spec=argument_spec,
)
def main():
module = setup_module_object()
state = module.params.get('state')
client = setup_client(module)
invocations = {
"present": create_update_parameter,
"absent": delete_parameter,
}
(changed, response) = invocations[state](client, module)
module.exit_json(changed=changed, response=response)
if __name__ == '__main__':
main()
|
gpl-3.0
|
a-doumoulakis/tensorflow
|
tensorflow/python/debug/cli/tensor_format_test.py
|
41
|
37994
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Unit tests for tensor formatter."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.core.framework import tensor_pb2
from tensorflow.core.framework import tensor_shape_pb2
from tensorflow.core.framework import types_pb2
from tensorflow.python.debug.cli import tensor_format
from tensorflow.python.debug.lib import debug_data
from tensorflow.python.framework import test_util
from tensorflow.python.platform import googletest
class RichTextLinesTest(test_util.TensorFlowTestCase):
def setUp(self):
np.set_printoptions(
precision=8, threshold=1000, edgeitems=3, linewidth=75)
def _checkTensorMetadata(self, tensor, annotations):
self.assertEqual(
{"dtype": tensor.dtype, "shape": tensor.shape},
annotations["tensor_metadata"])
def _checkBeginIndices(self, expected_indices, annot):
self.assertEqual({tensor_format.BEGIN_INDICES_KEY: expected_indices},
annot)
def _checkOmittedIndices(self, expected_indices, annot):
self.assertEqual({tensor_format.OMITTED_INDICES_KEY: expected_indices},
annot)
def testFormatZeroDimensionTensor(self):
a = np.array(42.0, dtype=np.float32)
out = tensor_format.format_tensor(a, "a")
self.assertEqual(["Tensor \"a\":", "", "array(42.0, dtype=float32)"],
out.lines)
self._checkTensorMetadata(a, out.annotations)
def testFormatTensorHighlightsTensorNameWithoutDebugOp(self):
tensor_name = "a_tensor:0"
a = np.zeros(2)
out = tensor_format.format_tensor(
a, tensor_name, np_printoptions={"linewidth": 40})
self.assertEqual([(8, 8 + len(tensor_name), "bold")], out.font_attr_segs[0])
def testFormatTensorHighlightsTensorNameWithDebugOp(self):
tensor_name = "a_tensor:0"
debug_op = "DebugIdentity"
a = np.zeros(2)
out = tensor_format.format_tensor(
a, "%s:%s" % (tensor_name, debug_op), np_printoptions={"linewidth": 40})
self.assertEqual([(8, 8 + len(tensor_name), "bold"),
(8 + len(tensor_name) + 1,
8 + len(tensor_name) + 1 + len(debug_op), "yellow")],
out.font_attr_segs[0])
def testFormatTensor1DNoEllipsis(self):
a = np.zeros(20)
out = tensor_format.format_tensor(
a, "a", np_printoptions={"linewidth": 40})
self.assertEqual([
"Tensor \"a\":",
"",
"array([ 0., 0., 0., 0., 0., 0.,",
" 0., 0., 0., 0., 0., 0.,",
" 0., 0., 0., 0., 0., 0.,",
" 0., 0.])",
], out.lines)
self._checkTensorMetadata(a, out.annotations)
# Check annotations for beginning indices of the lines.
self._checkBeginIndices([0], out.annotations[2])
self._checkBeginIndices([6], out.annotations[3])
self._checkBeginIndices([12], out.annotations[4])
self._checkBeginIndices([18], out.annotations[5])
def testFormatTensor2DNoEllipsisNoRowBreak(self):
a = np.linspace(0.0, 1.0 - 1.0 / 16.0, 16).reshape([4, 4])
out = tensor_format.format_tensor(a, "a")
self.assertEqual([
"Tensor \"a\":",
"",
"array([[ 0. , 0.0625, 0.125 , 0.1875],",
" [ 0.25 , 0.3125, 0.375 , 0.4375],",
" [ 0.5 , 0.5625, 0.625 , 0.6875],",
" [ 0.75 , 0.8125, 0.875 , 0.9375]])",
], out.lines)
self._checkTensorMetadata(a, out.annotations)
# Check annotations for the beginning indices of the lines.
for i in xrange(2, 6):
self._checkBeginIndices([i - 2, 0], out.annotations[i])
def testFormatTensorSuppressingTensorName(self):
a = np.linspace(0.0, 1.0 - 1.0 / 16.0, 16).reshape([4, 4])
out = tensor_format.format_tensor(a, None)
self.assertEqual([
"array([[ 0. , 0.0625, 0.125 , 0.1875],",
" [ 0.25 , 0.3125, 0.375 , 0.4375],",
" [ 0.5 , 0.5625, 0.625 , 0.6875],",
" [ 0.75 , 0.8125, 0.875 , 0.9375]])",
], out.lines)
self._checkTensorMetadata(a, out.annotations)
# Check annotations for the beginning indices of the lines.
for i in xrange(4):
self._checkBeginIndices([i, 0], out.annotations[i])
def testFormatTensorWithMetadata(self):
a = np.linspace(0.0, 1.0 - 1.0 / 16.0, 16).reshape([4, 4])
out = tensor_format.format_tensor(a, "a", include_metadata=True)
self.assertEqual([
"Tensor \"a\":",
" dtype: float64",
" shape: (4, 4)",
"",
"array([[ 0. , 0.0625, 0.125 , 0.1875],",
" [ 0.25 , 0.3125, 0.375 , 0.4375],",
" [ 0.5 , 0.5625, 0.625 , 0.6875],",
" [ 0.75 , 0.8125, 0.875 , 0.9375]])",
], out.lines)
self._checkTensorMetadata(a, out.annotations)
# Check annotations for the beginning indices of the lines.
for i in xrange(4, 7):
self._checkBeginIndices([i - 4, 0], out.annotations[i])
def testFormatTensor2DNoEllipsisWithRowBreak(self):
a = np.linspace(0.0, 1.0 - 1.0 / 40.0, 40).reshape([2, 20])
out = tensor_format.format_tensor(
a, "a", np_printoptions={"linewidth": 50})
self.assertEqual(
{"dtype": a.dtype, "shape": a.shape},
out.annotations["tensor_metadata"])
self.assertEqual([
"Tensor \"a\":",
"",
"array([[ 0. , 0.025, 0.05 , 0.075, 0.1 ,",
" 0.125, 0.15 , 0.175, 0.2 , 0.225,",
" 0.25 , 0.275, 0.3 , 0.325, 0.35 ,",
" 0.375, 0.4 , 0.425, 0.45 , 0.475],",
" [ 0.5 , 0.525, 0.55 , 0.575, 0.6 ,",
" 0.625, 0.65 , 0.675, 0.7 , 0.725,",
" 0.75 , 0.775, 0.8 , 0.825, 0.85 ,",
" 0.875, 0.9 , 0.925, 0.95 , 0.975]])",
], out.lines)
self._checkTensorMetadata(a, out.annotations)
# Check annotations for the beginning indices of the lines.
self._checkBeginIndices([0, 0], out.annotations[2])
self._checkBeginIndices([0, 5], out.annotations[3])
self._checkBeginIndices([0, 10], out.annotations[4])
self._checkBeginIndices([0, 15], out.annotations[5])
self._checkBeginIndices([1, 0], out.annotations[6])
self._checkBeginIndices([1, 5], out.annotations[7])
self._checkBeginIndices([1, 10], out.annotations[8])
self._checkBeginIndices([1, 15], out.annotations[9])
def testFormatTensor3DNoEllipsis(self): # TODO(cais): Test name.
a = np.linspace(0.0, 1.0 - 1.0 / 24.0, 24).reshape([2, 3, 4])
out = tensor_format.format_tensor(a, "a")
self.assertEqual([
"Tensor \"a\":",
"",
"array([[[ 0. , 0.04166667, 0.08333333, 0.125 ],",
" [ 0.16666667, 0.20833333, 0.25 , 0.29166667],",
" [ 0.33333333, 0.375 , 0.41666667, 0.45833333]],",
"",
" [[ 0.5 , 0.54166667, 0.58333333, 0.625 ],",
" [ 0.66666667, 0.70833333, 0.75 , 0.79166667],",
" [ 0.83333333, 0.875 , 0.91666667, 0.95833333]]])",
], out.lines)
self._checkTensorMetadata(a, out.annotations)
# Check annotations for beginning indices of the lines.
self._checkBeginIndices([0, 0, 0], out.annotations[2])
self._checkBeginIndices([0, 1, 0], out.annotations[3])
self._checkBeginIndices([0, 2, 0], out.annotations[4])
self.assertNotIn(5, out.annotations)
self._checkBeginIndices([1, 0, 0], out.annotations[6])
self._checkBeginIndices([1, 1, 0], out.annotations[7])
self._checkBeginIndices([1, 2, 0], out.annotations[8])
def testFormatTensor3DNoEllipsisWithArgwhereHighlightWithMatches(self):
a = np.linspace(0.0, 1.0 - 1.0 / 24.0, 24).reshape([2, 3, 4])
lower_bound = 0.26
upper_bound = 0.5
def highlight_filter(x):
return np.logical_and(x > lower_bound, x < upper_bound)
highlight_options = tensor_format.HighlightOptions(
highlight_filter, description="between 0.26 and 0.5")
out = tensor_format.format_tensor(
a, "a", highlight_options=highlight_options)
self.assertEqual([
"Tensor \"a\": "
"Highlighted(between 0.26 and 0.5): 5 of 24 element(s) (20.83%)",
"",
"array([[[ 0. , 0.04166667, 0.08333333, 0.125 ],",
" [ 0.16666667, 0.20833333, 0.25 , 0.29166667],",
" [ 0.33333333, 0.375 , 0.41666667, 0.45833333]],",
"",
" [[ 0.5 , 0.54166667, 0.58333333, 0.625 ],",
" [ 0.66666667, 0.70833333, 0.75 , 0.79166667],",
" [ 0.83333333, 0.875 , 0.91666667, 0.95833333]]])",
], out.lines)
self._checkTensorMetadata(a, out.annotations)
# Check annotations for beginning indices of the lines.
self._checkBeginIndices([0, 0, 0], out.annotations[2])
self._checkBeginIndices([0, 1, 0], out.annotations[3])
self._checkBeginIndices([0, 2, 0], out.annotations[4])
self.assertNotIn(5, out.annotations)
self._checkBeginIndices([1, 0, 0], out.annotations[6])
self._checkBeginIndices([1, 1, 0], out.annotations[7])
self._checkBeginIndices([1, 2, 0], out.annotations[8])
# Check font attribute segments for highlighted elements.
self.assertNotIn(2, out.font_attr_segs)
self.assertEqual([(49, 59, "bold")], out.font_attr_segs[3])
self.assertEqual([(10, 20, "bold"), (23, 28, "bold"), (36, 46, "bold"),
(49, 59, "bold")], out.font_attr_segs[4])
self.assertNotIn(5, out.font_attr_segs)
self.assertNotIn(6, out.font_attr_segs)
self.assertNotIn(7, out.font_attr_segs)
self.assertNotIn(8, out.font_attr_segs)
def testFormatTensor3DNoEllipsisWithArgwhereHighlightWithNoMatches(self):
a = np.linspace(0.0, 1.0 - 1.0 / 24.0, 24).reshape([2, 3, 4])
def highlight_filter(x):
return x > 10.0
highlight_options = tensor_format.HighlightOptions(highlight_filter)
out = tensor_format.format_tensor(
a, "a", highlight_options=highlight_options)
self.assertEqual([
"Tensor \"a\": Highlighted: 0 of 24 element(s) (0.00%)", "",
"array([[[ 0. , 0.04166667, 0.08333333, 0.125 ],",
" [ 0.16666667, 0.20833333, 0.25 , 0.29166667],",
" [ 0.33333333, 0.375 , 0.41666667, 0.45833333]],", "",
" [[ 0.5 , 0.54166667, 0.58333333, 0.625 ],",
" [ 0.66666667, 0.70833333, 0.75 , 0.79166667],",
" [ 0.83333333, 0.875 , 0.91666667, 0.95833333]]])"
], out.lines)
self._checkTensorMetadata(a, out.annotations)
# Check annotations for beginning indices of the lines.
self._checkBeginIndices([0, 0, 0], out.annotations[2])
self._checkBeginIndices([0, 1, 0], out.annotations[3])
self._checkBeginIndices([0, 2, 0], out.annotations[4])
self.assertNotIn(5, out.annotations)
self._checkBeginIndices([1, 0, 0], out.annotations[6])
self._checkBeginIndices([1, 1, 0], out.annotations[7])
self._checkBeginIndices([1, 2, 0], out.annotations[8])
# Check font attribute segments for highlighted elements.
self.assertNotIn(2, out.font_attr_segs)
self.assertNotIn(3, out.font_attr_segs)
self.assertNotIn(4, out.font_attr_segs)
self.assertNotIn(5, out.font_attr_segs)
self.assertNotIn(6, out.font_attr_segs)
self.assertNotIn(7, out.font_attr_segs)
self.assertNotIn(8, out.font_attr_segs)
def testFormatTensorWithEllipses(self):
a = np.zeros([11, 11, 11])
out = tensor_format.format_tensor(
a, "a", False, np_printoptions={"threshold": 100, "edgeitems": 2})
self.assertEqual([
"Tensor \"a\":",
"",
"array([[[ 0., 0., ..., 0., 0.],",
" [ 0., 0., ..., 0., 0.],",
" ..., ",
" [ 0., 0., ..., 0., 0.],",
" [ 0., 0., ..., 0., 0.]],",
"",
" [[ 0., 0., ..., 0., 0.],",
" [ 0., 0., ..., 0., 0.],",
" ..., ",
" [ 0., 0., ..., 0., 0.],",
" [ 0., 0., ..., 0., 0.]],",
"",
" ..., ",
" [[ 0., 0., ..., 0., 0.],",
" [ 0., 0., ..., 0., 0.],",
" ..., ",
" [ 0., 0., ..., 0., 0.],",
" [ 0., 0., ..., 0., 0.]],",
"",
" [[ 0., 0., ..., 0., 0.],",
" [ 0., 0., ..., 0., 0.],",
" ..., ",
" [ 0., 0., ..., 0., 0.],",
" [ 0., 0., ..., 0., 0.]]])",
], out.lines)
self._checkTensorMetadata(a, out.annotations)
# Check annotations for beginning indices of the lines.
for i in xrange(2):
self._checkBeginIndices([i, 0, 0], out.annotations[i * 6 + 2])
self._checkBeginIndices([i, 1, 0], out.annotations[i * 6 + 3])
self._checkOmittedIndices([i, 2, 0], out.annotations[i * 6 + 4])
self._checkBeginIndices([i, 9, 0], out.annotations[i * 6 + 5])
self._checkBeginIndices([i, 10, 0], out.annotations[i * 6 + 6])
self.assertNotIn(i * 6 + 7, out.annotations)
p = 15
for i in xrange(2):
self._checkBeginIndices([9 + i, 0, 0], out.annotations[p + i * 6])
self._checkBeginIndices([9 + i, 1, 0], out.annotations[p + i * 6 + 1])
self._checkOmittedIndices(
[9 + i, 2, 0], out.annotations[p + i * 6 + 2])
self._checkBeginIndices([9 + i, 9, 0], out.annotations[p + i * 6 + 3])
self._checkBeginIndices([9 + i, 10, 0], out.annotations[p + i * 6 + 4])
if i < 1:
self.assertNotIn(p + i * 6 + 5, out.annotations)
def testFormatUninitializedTensor(self):
tensor_proto = tensor_pb2.TensorProto(
dtype=types_pb2.DataType.Value("DT_FLOAT"),
tensor_shape=tensor_shape_pb2.TensorShapeProto(
dim=[tensor_shape_pb2.TensorShapeProto.Dim(size=1)]))
out = tensor_format.format_tensor(
debug_data.InconvertibleTensorProto(tensor_proto, False), "a")
self.assertEqual(["Tensor \"a\":", "", "Uninitialized tensor:"],
out.lines[:3])
self.assertEqual(str(tensor_proto).split("\n"), out.lines[3:])
def testFormatResourceTypeTensor(self):
tensor_proto = tensor_pb2.TensorProto(
dtype=types_pb2.DataType.Value("DT_RESOURCE"),
tensor_shape=tensor_shape_pb2.TensorShapeProto(
dim=[tensor_shape_pb2.TensorShapeProto.Dim(size=1)]))
out = tensor_format.format_tensor(
debug_data.InconvertibleTensorProto(tensor_proto), "a")
self.assertEqual(["Tensor \"a\":", ""], out.lines[:2])
self.assertEqual(str(tensor_proto).split("\n"), out.lines[2:])
def testLocateTensorElement1DNoEllipsis(self):
a = np.zeros(20)
out = tensor_format.format_tensor(
a, "a", np_printoptions={"linewidth": 40})
self.assertEqual([
"Tensor \"a\":",
"",
"array([ 0., 0., 0., 0., 0., 0.,",
" 0., 0., 0., 0., 0., 0.,",
" 0., 0., 0., 0., 0., 0.,",
" 0., 0.])",
], out.lines)
is_omitted, row, start_col, end_col = tensor_format.locate_tensor_element(
out, [0])
self.assertFalse(is_omitted)
self.assertEqual(2, row)
self.assertEqual(8, start_col)
self.assertEqual(10, end_col)
is_omitted, row, start_col, end_col = tensor_format.locate_tensor_element(
out, [5])
self.assertFalse(is_omitted)
self.assertEqual(2, row)
self.assertEqual(33, start_col)
self.assertEqual(35, end_col)
is_omitted, row, start_col, end_col = tensor_format.locate_tensor_element(
out, [6])
self.assertFalse(is_omitted)
self.assertEqual(3, row)
self.assertEqual(8, start_col)
self.assertEqual(10, end_col)
is_omitted, row, start_col, end_col = tensor_format.locate_tensor_element(
out, [11])
self.assertFalse(is_omitted)
self.assertEqual(3, row)
self.assertEqual(33, start_col)
self.assertEqual(35, end_col)
is_omitted, row, start_col, end_col = tensor_format.locate_tensor_element(
out, [12])
self.assertFalse(is_omitted)
self.assertEqual(4, row)
self.assertEqual(8, start_col)
self.assertEqual(10, end_col)
is_omitted, row, start_col, end_col = tensor_format.locate_tensor_element(
out, [18])
self.assertFalse(is_omitted)
self.assertEqual(5, row)
self.assertEqual(8, start_col)
self.assertEqual(10, end_col)
is_omitted, row, start_col, end_col = tensor_format.locate_tensor_element(
out, [19])
self.assertFalse(is_omitted)
self.assertEqual(5, row)
self.assertEqual(13, start_col)
self.assertEqual(15, end_col)
with self.assertRaisesRegexp(
ValueError, "Indices exceed tensor dimensions"):
tensor_format.locate_tensor_element(out, [20])
with self.assertRaisesRegexp(
ValueError, "Indices contain negative"):
tensor_format.locate_tensor_element(out, [-1])
with self.assertRaisesRegexp(
ValueError, "Dimensions mismatch"):
tensor_format.locate_tensor_element(out, [0, 0])
def testLocateTensorElement1DNoEllipsisBatchMode(self):
a = np.zeros(20)
out = tensor_format.format_tensor(
a, "a", np_printoptions={"linewidth": 40})
self.assertEqual([
"Tensor \"a\":",
"",
"array([ 0., 0., 0., 0., 0., 0.,",
" 0., 0., 0., 0., 0., 0.,",
" 0., 0., 0., 0., 0., 0.,",
" 0., 0.])",
], out.lines)
(are_omitted, rows, start_cols,
end_cols) = tensor_format.locate_tensor_element(out, [[0]])
self.assertEqual([False], are_omitted)
self.assertEqual([2], rows)
self.assertEqual([8], start_cols)
self.assertEqual([10], end_cols)
(are_omitted, rows, start_cols,
end_cols) = tensor_format.locate_tensor_element(out, [[0], [5]])
self.assertEqual([False, False], are_omitted)
self.assertEqual([2, 2], rows)
self.assertEqual([8, 33], start_cols)
self.assertEqual([10, 35], end_cols)
(are_omitted, rows, start_cols,
end_cols) = tensor_format.locate_tensor_element(out, [[0], [6]])
self.assertEqual([False, False], are_omitted)
self.assertEqual([2, 3], rows)
self.assertEqual([8, 8], start_cols)
self.assertEqual([10, 10], end_cols)
(are_omitted, rows, start_cols,
end_cols) = tensor_format.locate_tensor_element(out, [[0], [5], [6]])
self.assertEqual([False, False, False], are_omitted)
self.assertEqual([2, 2, 3], rows)
self.assertEqual([8, 33, 8], start_cols)
self.assertEqual([10, 35, 10], end_cols)
(are_omitted, rows, start_cols,
end_cols) = tensor_format.locate_tensor_element(out, [[0], [5], [6], [19]])
self.assertEqual([False, False, False, False], are_omitted)
self.assertEqual([2, 2, 3, 5], rows)
self.assertEqual([8, 33, 8, 13], start_cols)
self.assertEqual([10, 35, 10, 15], end_cols)
def testBatchModeWithErrors(self):
a = np.zeros(20)
out = tensor_format.format_tensor(
a, "a", np_printoptions={"linewidth": 40})
self.assertEqual([
"Tensor \"a\":",
"",
"array([ 0., 0., 0., 0., 0., 0.,",
" 0., 0., 0., 0., 0., 0.,",
" 0., 0., 0., 0., 0., 0.,",
" 0., 0.])",
], out.lines)
with self.assertRaisesRegexp(ValueError, "Dimensions mismatch"):
tensor_format.locate_tensor_element(out, [[0, 0], [0]])
with self.assertRaisesRegexp(ValueError,
"Indices exceed tensor dimensions"):
tensor_format.locate_tensor_element(out, [[0], [20]])
with self.assertRaisesRegexp(ValueError,
r"Indices contain negative value\(s\)"):
tensor_format.locate_tensor_element(out, [[0], [-1]])
with self.assertRaisesRegexp(
ValueError, "Input indices sets are not in ascending order"):
tensor_format.locate_tensor_element(out, [[5], [0]])
def testLocateTensorElement1DTinyAndNanValues(self):
a = np.ones([3, 3]) * 1e-8
a[1, 0] = np.nan
a[1, 2] = np.inf
out = tensor_format.format_tensor(
a, "a", np_printoptions={"linewidth": 100})
self.assertEqual([
"Tensor \"a\":",
"",
"array([[ 1.00000000e-08, 1.00000000e-08, 1.00000000e-08],",
" [ nan, 1.00000000e-08, inf],",
" [ 1.00000000e-08, 1.00000000e-08, 1.00000000e-08]])",
], out.lines)
is_omitted, row, start_col, end_col = tensor_format.locate_tensor_element(
out, [0, 0])
self.assertFalse(is_omitted)
self.assertEqual(2, row)
self.assertEqual(10, start_col)
self.assertEqual(24, end_col)
is_omitted, row, start_col, end_col = tensor_format.locate_tensor_element(
out, [0, 2])
self.assertFalse(is_omitted)
self.assertEqual(2, row)
self.assertEqual(46, start_col)
self.assertEqual(60, end_col)
is_omitted, row, start_col, end_col = tensor_format.locate_tensor_element(
out, [1, 0])
self.assertFalse(is_omitted)
self.assertEqual(3, row)
self.assertEqual(21, start_col)
self.assertEqual(24, end_col)
is_omitted, row, start_col, end_col = tensor_format.locate_tensor_element(
out, [1, 1])
self.assertFalse(is_omitted)
self.assertEqual(3, row)
self.assertEqual(28, start_col)
self.assertEqual(42, end_col)
is_omitted, row, start_col, end_col = tensor_format.locate_tensor_element(
out, [1, 2])
self.assertFalse(is_omitted)
self.assertEqual(3, row)
self.assertEqual(57, start_col)
self.assertEqual(60, end_col)
is_omitted, row, start_col, end_col = tensor_format.locate_tensor_element(
out, [2, 2])
self.assertFalse(is_omitted)
self.assertEqual(4, row)
self.assertEqual(46, start_col)
self.assertEqual(60, end_col)
def testLocateTensorElement2DNoEllipsis(self):
a = np.linspace(0.0, 1.0 - 1.0 / 16.0, 16).reshape([4, 4])
out = tensor_format.format_tensor(a, "a")
self.assertEqual([
"Tensor \"a\":",
"",
"array([[ 0. , 0.0625, 0.125 , 0.1875],",
" [ 0.25 , 0.3125, 0.375 , 0.4375],",
" [ 0.5 , 0.5625, 0.625 , 0.6875],",
" [ 0.75 , 0.8125, 0.875 , 0.9375]])",
], out.lines)
is_omitted, row, start_col, end_col = tensor_format.locate_tensor_element(
out, [0, 0])
self.assertFalse(is_omitted)
self.assertEqual(2, row)
self.assertEqual(9, start_col)
self.assertEqual(11, end_col)
is_omitted, row, start_col, end_col = tensor_format.locate_tensor_element(
out, [0, 3])
self.assertFalse(is_omitted)
self.assertEqual(2, row)
self.assertEqual(36, start_col)
self.assertEqual(42, end_col)
is_omitted, row, start_col, end_col = tensor_format.locate_tensor_element(
out, [1, 0])
self.assertFalse(is_omitted)
self.assertEqual(3, row)
self.assertEqual(9, start_col)
self.assertEqual(13, end_col)
is_omitted, row, start_col, end_col = tensor_format.locate_tensor_element(
out, [1, 3])
self.assertFalse(is_omitted)
self.assertEqual(3, row)
self.assertEqual(36, start_col)
self.assertEqual(42, end_col)
is_omitted, row, start_col, end_col = tensor_format.locate_tensor_element(
out, [3, 3])
self.assertFalse(is_omitted)
self.assertEqual(5, row)
self.assertEqual(36, start_col)
self.assertEqual(42, end_col)
with self.assertRaisesRegexp(
ValueError, "Indices exceed tensor dimensions"):
tensor_format.locate_tensor_element(out, [1, 4])
with self.assertRaisesRegexp(
ValueError, "Indices contain negative"):
tensor_format.locate_tensor_element(out, [-1, 2])
with self.assertRaisesRegexp(
ValueError, "Dimensions mismatch"):
tensor_format.locate_tensor_element(out, [0])
def testLocateTensorElement2DNoEllipsisWithNumericSummary(self):
a = np.linspace(0.0, 1.0 - 1.0 / 16.0, 16).reshape([4, 4])
out = tensor_format.format_tensor(a, "a", include_numeric_summary=True)
self.assertEqual([
"Tensor \"a\":",
"",
"Numeric summary:",
"| 0 + | total |",
"| 1 15 | 16 |",
"| min max mean std |",
"| 0.0 0.9375 0.46875 0.28811076429 |",
"",
"array([[ 0. , 0.0625, 0.125 , 0.1875],",
" [ 0.25 , 0.3125, 0.375 , 0.4375],",
" [ 0.5 , 0.5625, 0.625 , 0.6875],",
" [ 0.75 , 0.8125, 0.875 , 0.9375]])",
], out.lines)
is_omitted, row, start_col, end_col = tensor_format.locate_tensor_element(
out, [0, 0])
self.assertFalse(is_omitted)
self.assertEqual(8, row)
self.assertEqual(9, start_col)
self.assertEqual(11, end_col)
is_omitted, row, start_col, end_col = tensor_format.locate_tensor_element(
out, [0, 3])
self.assertFalse(is_omitted)
self.assertEqual(8, row)
self.assertEqual(36, start_col)
self.assertEqual(42, end_col)
is_omitted, row, start_col, end_col = tensor_format.locate_tensor_element(
out, [1, 0])
self.assertFalse(is_omitted)
self.assertEqual(9, row)
self.assertEqual(9, start_col)
self.assertEqual(13, end_col)
is_omitted, row, start_col, end_col = tensor_format.locate_tensor_element(
out, [1, 3])
self.assertFalse(is_omitted)
self.assertEqual(9, row)
self.assertEqual(36, start_col)
self.assertEqual(42, end_col)
is_omitted, row, start_col, end_col = tensor_format.locate_tensor_element(
out, [3, 3])
self.assertFalse(is_omitted)
self.assertEqual(11, row)
self.assertEqual(36, start_col)
self.assertEqual(42, end_col)
with self.assertRaisesRegexp(
ValueError, "Indices exceed tensor dimensions"):
tensor_format.locate_tensor_element(out, [1, 4])
with self.assertRaisesRegexp(
ValueError, "Indices contain negative"):
tensor_format.locate_tensor_element(out, [-1, 2])
with self.assertRaisesRegexp(
ValueError, "Dimensions mismatch"):
tensor_format.locate_tensor_element(out, [0])
def testLocateTensorElement3DWithEllipses(self):
a = np.zeros([11, 11, 11])
out = tensor_format.format_tensor(
a, "a", False, np_printoptions={"threshold": 100, "edgeitems": 2})
self.assertEqual([
"Tensor \"a\":",
"",
"array([[[ 0., 0., ..., 0., 0.],",
" [ 0., 0., ..., 0., 0.],",
" ..., ",
" [ 0., 0., ..., 0., 0.],",
" [ 0., 0., ..., 0., 0.]],",
"",
" [[ 0., 0., ..., 0., 0.],",
" [ 0., 0., ..., 0., 0.],",
" ..., ",
" [ 0., 0., ..., 0., 0.],",
" [ 0., 0., ..., 0., 0.]],",
"",
" ..., ",
" [[ 0., 0., ..., 0., 0.],",
" [ 0., 0., ..., 0., 0.],",
" ..., ",
" [ 0., 0., ..., 0., 0.],",
" [ 0., 0., ..., 0., 0.]],",
"",
" [[ 0., 0., ..., 0., 0.],",
" [ 0., 0., ..., 0., 0.],",
" ..., ",
" [ 0., 0., ..., 0., 0.],",
" [ 0., 0., ..., 0., 0.]]])",
], out.lines)
is_omitted, row, start_col, end_col = tensor_format.locate_tensor_element(
out, [0, 0, 0])
self.assertFalse(is_omitted)
self.assertEqual(2, row)
self.assertEqual(10, start_col)
self.assertEqual(12, end_col)
is_omitted, row, start_col, end_col = tensor_format.locate_tensor_element(
out, [0, 0, 10])
self.assertFalse(is_omitted)
self.assertEqual(2, row)
self.assertIsNone(start_col) # Passes ellipsis.
self.assertIsNone(end_col)
is_omitted, row, start_col, end_col = tensor_format.locate_tensor_element(
out, [0, 1, 0])
self.assertFalse(is_omitted)
self.assertEqual(3, row)
self.assertEqual(10, start_col)
self.assertEqual(12, end_col)
is_omitted, row, start_col, end_col = tensor_format.locate_tensor_element(
out, [0, 2, 0])
self.assertTrue(is_omitted) # In omitted line.
self.assertEqual(4, row)
self.assertIsNone(start_col)
self.assertIsNone(end_col)
is_omitted, row, start_col, end_col = tensor_format.locate_tensor_element(
out, [0, 2, 10])
self.assertTrue(is_omitted) # In omitted line.
self.assertEqual(4, row)
self.assertIsNone(start_col)
self.assertIsNone(end_col)
is_omitted, row, start_col, end_col = tensor_format.locate_tensor_element(
out, [0, 8, 10])
self.assertTrue(is_omitted) # In omitted line.
self.assertEqual(4, row)
self.assertIsNone(start_col)
self.assertIsNone(end_col)
is_omitted, row, start_col, end_col = tensor_format.locate_tensor_element(
out, [0, 10, 1])
self.assertFalse(is_omitted)
self.assertEqual(6, row)
self.assertEqual(15, start_col)
self.assertEqual(17, end_col)
is_omitted, row, start_col, end_col = tensor_format.locate_tensor_element(
out, [5, 1, 1])
self.assertTrue(is_omitted) # In omitted line.
self.assertEqual(14, row)
self.assertIsNone(start_col)
self.assertIsNone(end_col)
is_omitted, row, start_col, end_col = tensor_format.locate_tensor_element(
out, [10, 10, 10])
self.assertFalse(is_omitted)
self.assertEqual(25, row)
self.assertIsNone(start_col) # Past ellipsis.
self.assertIsNone(end_col)
with self.assertRaisesRegexp(
ValueError, "Indices exceed tensor dimensions"):
tensor_format.locate_tensor_element(out, [11, 5, 5])
with self.assertRaisesRegexp(
ValueError, "Indices contain negative"):
tensor_format.locate_tensor_element(out, [-1, 5, 5])
with self.assertRaisesRegexp(
ValueError, "Dimensions mismatch"):
tensor_format.locate_tensor_element(out, [5, 5])
def testLocateTensorElement3DWithEllipsesBatchMode(self):
a = np.zeros([11, 11, 11])
out = tensor_format.format_tensor(
a, "a", False, np_printoptions={"threshold": 100,
"edgeitems": 2})
self.assertEqual([
"Tensor \"a\":",
"",
"array([[[ 0., 0., ..., 0., 0.],",
" [ 0., 0., ..., 0., 0.],",
" ..., ",
" [ 0., 0., ..., 0., 0.],",
" [ 0., 0., ..., 0., 0.]],",
"",
" [[ 0., 0., ..., 0., 0.],",
" [ 0., 0., ..., 0., 0.],",
" ..., ",
" [ 0., 0., ..., 0., 0.],",
" [ 0., 0., ..., 0., 0.]],",
"",
" ..., ",
" [[ 0., 0., ..., 0., 0.],",
" [ 0., 0., ..., 0., 0.],",
" ..., ",
" [ 0., 0., ..., 0., 0.],",
" [ 0., 0., ..., 0., 0.]],",
"",
" [[ 0., 0., ..., 0., 0.],",
" [ 0., 0., ..., 0., 0.],",
" ..., ",
" [ 0., 0., ..., 0., 0.],",
" [ 0., 0., ..., 0., 0.]]])",
], out.lines)
(are_omitted, rows, start_cols,
end_cols) = tensor_format.locate_tensor_element(out, [[0, 0, 0]])
self.assertEqual([False], are_omitted)
self.assertEqual([2], rows)
self.assertEqual([10], start_cols)
self.assertEqual([12], end_cols)
(are_omitted, rows, start_cols,
end_cols) = tensor_format.locate_tensor_element(out,
[[0, 0, 0], [0, 0, 10]])
self.assertEqual([False, False], are_omitted)
self.assertEqual([2, 2], rows)
self.assertEqual([10, None], start_cols)
self.assertEqual([12, None], end_cols)
(are_omitted, rows, start_cols,
end_cols) = tensor_format.locate_tensor_element(out,
[[0, 0, 0], [0, 2, 0]])
self.assertEqual([False, True], are_omitted)
self.assertEqual([2, 4], rows)
self.assertEqual([10, None], start_cols)
self.assertEqual([12, None], end_cols)
(are_omitted, rows, start_cols,
end_cols) = tensor_format.locate_tensor_element(out,
[[0, 0, 0], [10, 10, 10]])
self.assertEqual([False, False], are_omitted)
self.assertEqual([2, 25], rows)
self.assertEqual([10, None], start_cols)
self.assertEqual([12, None], end_cols)
def testLocateTensorElementAnnotationsUnavailable(self):
tensor_proto = tensor_pb2.TensorProto(
dtype=types_pb2.DataType.Value("DT_FLOAT"),
tensor_shape=tensor_shape_pb2.TensorShapeProto(
dim=[tensor_shape_pb2.TensorShapeProto.Dim(size=1)]))
out = tensor_format.format_tensor(
debug_data.InconvertibleTensorProto(tensor_proto, False), "a")
self.assertEqual(["Tensor \"a\":", "", "Uninitialized tensor:"],
out.lines[:3])
with self.assertRaisesRegexp(
AttributeError, "tensor_metadata is not available in annotations"):
tensor_format.locate_tensor_element(out, [0])
class NumericSummaryTest(test_util.TensorFlowTestCase):
def testNumericSummaryOnFloatFullHouse(self):
x = np.array([np.nan, np.nan, -np.inf, np.inf, np.inf, np.inf, -2, -3, -4,
0, 1, 2, 2, 2, 2, 0, 0, 0, np.inf, np.inf, np.inf])
out = tensor_format.numeric_summary(x)
self.assertEqual(
"| nan -inf - 0 + +inf | total |", out.lines[0])
self.assertEqual(
"| 2 1 3 4 5 6 | 21 |", out.lines[1])
self.assertEqual(
"| min max mean std |",
out.lines[2])
self.assertEqual(
"| -4.0 2.0 0.0 1.95789002075 |",
out.lines[3])
def testNumericSummaryOnFloatMissingCategories(self):
x = np.array([np.nan, np.nan])
out = tensor_format.numeric_summary(x)
self.assertEqual(2, len(out.lines))
self.assertEqual("| nan | total |", out.lines[0])
self.assertEqual("| 2 | 2 |", out.lines[1])
x = np.array([-np.inf, np.inf, 0, 0, np.inf, np.inf])
out = tensor_format.numeric_summary(x)
self.assertEqual("| -inf 0 +inf | total |", out.lines[0])
self.assertEqual("| 1 2 3 | 6 |", out.lines[1])
self.assertEqual("| min max mean std |", out.lines[2])
self.assertEqual("| 0.0 0.0 0.0 0.0 |", out.lines[3])
x = np.array([-120, 120, 130])
out = tensor_format.numeric_summary(x)
self.assertEqual("| - + | total |", out.lines[0])
self.assertEqual("| 1 2 | 3 |", out.lines[1])
self.assertEqual(
"| min max mean std |",
out.lines[2])
self.assertEqual(
"| -120 130 43.3333333333 115.566238822 |",
out.lines[3])
def testNumericSummaryOnEmptyFloat(self):
x = np.array([], dtype=np.float32)
out = tensor_format.numeric_summary(x)
self.assertEqual(["No numeric summary available due to empty tensor."],
out.lines)
def testNumericSummaryOnInt(self):
x = np.array([-3] * 50 + [3] * 200 + [0], dtype=np.int32)
out = tensor_format.numeric_summary(x)
self.assertEqual("| - 0 + | total |", out.lines[0])
self.assertEqual("| 50 1 200 | 251 |", out.lines[1])
self.assertEqual(
"| min max mean std |",
out.lines[2])
self.assertEqual(
"| -3 3 1.79282868526 2.39789673081 |",
out.lines[3])
def testNumericSummaryOnBool(self):
x = np.array([False, True, True, False], dtype=np.bool)
out = tensor_format.numeric_summary(x)
self.assertEqual(2, len(out.lines))
self.assertEqual("| False True | total |", out.lines[0])
self.assertEqual("| 2 2 | 4 |", out.lines[1])
x = np.array([True] * 10, dtype=np.bool)
out = tensor_format.numeric_summary(x)
self.assertEqual(2, len(out.lines))
self.assertEqual("| True | total |", out.lines[0])
self.assertEqual("| 10 | 10 |", out.lines[1])
x = np.array([False] * 10, dtype=np.bool)
out = tensor_format.numeric_summary(x)
self.assertEqual(2, len(out.lines))
self.assertEqual("| False | total |", out.lines[0])
self.assertEqual("| 10 | 10 |", out.lines[1])
x = np.array([], dtype=np.bool)
out = tensor_format.numeric_summary(x)
self.assertEqual(["No numeric summary available due to empty tensor."],
out.lines)
def testNumericSummaryOnStrTensor(self):
x = np.array(["spam", "egg"], dtype=np.object)
out = tensor_format.numeric_summary(x)
self.assertEqual(
["No numeric summary available due to tensor dtype: object."],
out.lines)
if __name__ == "__main__":
googletest.main()
|
apache-2.0
|
jonparrott/google-cloud-python
|
api_core/tests/unit/future/test__helpers.py
|
3
|
1334
|
# Copyright 2017, Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import mock
from google.api_core.future import _helpers
@mock.patch('threading.Thread', autospec=True)
def test_start_deamon_thread(unused_thread):
deamon_thread = _helpers.start_daemon_thread(target=mock.sentinel.target)
assert deamon_thread.daemon is True
def test_safe_invoke_callback():
callback = mock.Mock(spec=['__call__'], return_value=42)
result = _helpers.safe_invoke_callback(callback, 'a', b='c')
assert result == 42
callback.assert_called_once_with('a', b='c')
def test_safe_invoke_callback_exception():
callback = mock.Mock(spec=['__call__'], side_effect=ValueError())
result = _helpers.safe_invoke_callback(callback, 'a', b='c')
assert result is None
callback.assert_called_once_with('a', b='c')
|
apache-2.0
|
charitychain/Charitychain
|
Simplechaindb/clusterdeploy/write_keypairs_file.py
|
1
|
1603
|
"""A Python 3 script to write a file with a specified number
of keypairs, using bigchaindb.crypto.generate_key_pair()
The written file is always named keypairs.py and it should be
interpreted as a Python 2 script.
Usage:
$ python3 write_keypairs_file.py num_pairs
Using the list in other Python scripts:
# in a Python 2 script:
from keypairs import keypairs_list
# keypairs_list is a list of (sk, pk) tuples
# sk = signing key (private key)
# pk = verifying key (public key)
"""
import argparse
from bigchaindb import crypto
# Parse the command-line arguments
desc = 'Write a set of keypairs to keypairs.py'
parser = argparse.ArgumentParser(description=desc)
parser.add_argument('num_pairs',
help='number of keypairs to write',
type=int)
args = parser.parse_args()
num_pairs = int(args.num_pairs)
# Generate and write the keypairs to keypairs.py
print('Writing {} keypairs to keypairs.py...'.format(num_pairs))
with open('keypairs.py', 'w') as f:
f.write('# -*- coding: utf-8 -*-\n')
f.write('"""A set of keypairs for use in deploying\n')
f.write('BigchainDB servers with a predictable set of keys.\n')
f.write('"""\n')
f.write('\n')
f.write('from __future__ import unicode_literals\n')
f.write('\n')
f.write('keypairs_list = [')
for pair_num in range(num_pairs):
keypair = crypto.generate_key_pair()
spacer = '' if pair_num == 0 else ' '
f.write("{}('{}',\n '{}'),\n".format(
spacer, keypair[0], keypair[1]))
f.write(' ]\n')
print('Done.')
|
apache-2.0
|
jakevdp/supersmoother
|
supersmoother/windowed_sum.py
|
2
|
9106
|
import numpy as np
def windowed_sum_slow(arrays, span, t=None, indices=None, tpowers=0,
period=None, subtract_mid=False):
"""Compute the windowed sum of the given arrays.
This is a slow function, used primarily for testing and validation
of the faster version of ``windowed_sum()``
Parameters
----------
arrays : tuple of arrays
arrays to window
span : int or array of ints
The span to use for the sum at each point. If array is provided,
it must be broadcastable with ``indices``
indices : array
the indices of the center of the desired windows. If ``None``,
the indices are assumed to be ``range(len(arrays[0]))`` though
these are not actually instantiated.
t : array (optional)
Times associated with the arrays
tpowers : list (optional)
Powers of t for each array sum
period : float (optional)
Period to use, if times are periodic. If supplied, input times
must be arranged such that (t % period) is sorted!
subtract_mid : boolean
If true, then subtract the middle value from each sum
Returns
-------
arrays : tuple of ndarrays
arrays containing the windowed sum of each input array
"""
span = np.asarray(span, dtype=int)
if not np.all(span > 0):
raise ValueError("span values must be positive")
arrays = tuple(map(np.asarray, arrays))
N = arrays[0].size
if not all(a.shape == (N,) for a in arrays):
raise ValueError("sizes of provided arrays must match")
t_input = t
if t is not None:
t = np.asarray(t)
if t.shape != (N,):
raise ValueError("shape of t must match shape of arrays")
else:
t = np.ones(N)
tpowers = tpowers + np.zeros(len(arrays))
if len(tpowers) != len(arrays):
raise ValueError("tpowers must be broadcastable with number of arrays")
if period:
if t_input is None:
raise ValueError("periodic requires t to be provided")
t = t % period
if indices is None:
indices = np.arange(N)
spans, indices = np.broadcast_arrays(span, indices)
results = []
for tpower, array in zip(tpowers, arrays):
if period:
result = [sum(array[j % N]
* (t[j % N] + (j // N) * period) ** tpower
for j in range(i - s // 2,
i - s // 2 + s)
if not (subtract_mid and j == i))
for i, s in np.broadcast(indices, spans)]
else:
result = [sum(array[j] * t[j] ** tpower
for j in range(max(0, i - s // 2),
min(N, i - s // 2 + s))
if not (subtract_mid and j == i))
for i, s in np.broadcast(indices, spans)]
results.append(np.asarray(result))
return tuple(results)
def windowed_sum(arrays, span, t=None, indices=None, tpowers=0,
period=None, subtract_mid=False):
"""Compute the windowed sum of the given arrays.
Parameters
----------
arrays : tuple of arrays
arrays to window
span : int or array of ints
The span to use for the sum at each point. If array is provided,
it must be broadcastable with ``indices``
indices : array
the indices of the center of the desired windows. If ``None``,
the indices are assumed to be ``range(len(arrays[0]))`` though
these are not actually instantiated.
t : array (optional)
Times associated with the arrays
tpowers : list (optional)
Powers of t for each array sum
period : float (optional)
Period to use, if times are periodic. If supplied, input times
must be arranged such that (t % period) is sorted!
subtract_mid : boolean
If true, then subtract the middle value from each sum
Returns
-------
arrays : tuple of ndarrays
arrays containing the windowed sum of each input array
"""
span = np.asarray(span, dtype=int)
if not np.all(span > 0):
raise ValueError("span values must be positive")
arrays = tuple(map(np.asarray, arrays))
N = arrays[0].size
if not all(a.shape == (N,) for a in arrays):
raise ValueError("sizes of provided arrays must match")
t_input = t
if t is not None:
t = np.asarray(t)
if t.shape != (N,):
raise ValueError("shape of t must match shape of arrays "
"t -> {0} arr -> {1}".format(t.shape,
arrays[0].shape))
else:
# XXX: special-case no t?
t = np.ones(N)
tpowers = np.asarray(tpowers) + np.zeros(len(arrays))
if indices is not None:
span, indices = np.broadcast_arrays(span, indices)
# For the periodic case, re-call the function with padded arrays
if period:
if t_input is None:
raise ValueError("periodic requires t to be provided")
t = t % period
t, arrays, sl = _pad_arrays(t, arrays, indices, span, period)
if len(t) > N:
# arrays are padded. Recursively call windowed_sum() and return.
if span.ndim == 0 and indices is None:
# fixed-span/no index case is done faster this way
arrs = windowed_sum(arrays, span, t=t, indices=indices,
tpowers=tpowers, period=None,
subtract_mid=subtract_mid)
return tuple([a[sl] for a in arrs])
else:
# this works for variable span and general indices
if indices is None:
indices = np.arange(N)
indices = indices + sl.start
return windowed_sum(arrays, span, t=t, indices=indices,
tpowers=tpowers, period=None,
subtract_mid=subtract_mid)
else:
# No padding needed! We can carry-on as if it's a non-periodic case
period = None
# The rest of the algorithm now proceeds without reference to the period
# just as a sanity check...
assert not period
if span.ndim == 0:
# fixed-span case. Because of the checks & manipulations above
# we know here that indices=None
assert indices is None
window = np.ones(span)
def convolve_same(a, window):
if len(window) <= len(a):
res = np.convolve(a, window, mode='same')
else:
res = np.convolve(a, window, mode='full')
start = (len(window) - 1) // 2
res = res[start:start + len(a)]
return res
results = [convolve_same(a * t ** tp, window)
for a, tp in zip(arrays, tpowers)]
indices = slice(None)
else:
# variable-span case. Use reduceat() in a clever way for speed.
if indices is None:
indices = np.arange(len(span))
# we checked this above, but just as a sanity check assert it here...
assert span.shape == indices.shape
mins = np.asarray(indices) - span // 2
results = []
for a, tp in zip(arrays, tpowers):
ranges = np.vstack([np.maximum(0, mins),
np.minimum(len(a), mins+span)]).ravel('F')
results.append(np.add.reduceat(np.append(a * t ** tp, 0),
ranges)[::2])
# Subtract the midpoint if required: this is used in cross-validation
if subtract_mid:
results = [r - a[indices] * t[indices] ** tp
for r, a, tp in zip(results, arrays, tpowers)]
return tuple(results)
def _pad_arrays(t, arrays, indices, span, period):
"""Internal routine to pad arrays for periodic models."""
N = len(t)
if indices is None:
indices = np.arange(N)
pad_left = max(0, 0 - np.min(indices - span // 2))
pad_right = max(0, np.max(indices + span - span // 2) - (N - 1))
if pad_left + pad_right > 0:
Nright, pad_right = divmod(pad_right, N)
Nleft, pad_left = divmod(pad_left, N)
t = np.concatenate([t[N - pad_left:] - (Nleft + 1) * period]
+ [t + i * period
for i in range(-Nleft, Nright + 1)]
+ [t[:pad_right] + (Nright + 1) * period])
arrays = [np.concatenate([a[N - pad_left:]]
+ (Nleft + Nright + 1) * [a]
+ [a[:pad_right]])
for a in arrays]
pad_left = pad_left % N
Nright = pad_right / N
pad_right = pad_right % N
return (t, arrays, slice(pad_left + Nleft * N,
pad_left + (Nleft + 1) * N))
else:
return (t, arrays, slice(None))
|
bsd-2-clause
|
mcardillo55/django
|
tests/signals/tests.py
|
311
|
10273
|
from __future__ import unicode_literals
from django.db import models
from django.db.models import signals
from django.dispatch import receiver
from django.test import TestCase
from django.utils import six
from .models import Author, Book, Car, Person
class BaseSignalTest(TestCase):
def setUp(self):
# Save up the number of connected signals so that we can check at the
# end that all the signals we register get properly unregistered (#9989)
self.pre_signals = (
len(signals.pre_save.receivers),
len(signals.post_save.receivers),
len(signals.pre_delete.receivers),
len(signals.post_delete.receivers),
)
def tearDown(self):
# Check that all our signals got disconnected properly.
post_signals = (
len(signals.pre_save.receivers),
len(signals.post_save.receivers),
len(signals.pre_delete.receivers),
len(signals.post_delete.receivers),
)
self.assertEqual(self.pre_signals, post_signals)
class SignalTests(BaseSignalTest):
def test_model_pre_init_and_post_init(self):
data = []
def pre_init_callback(sender, args, **kwargs):
data.append(kwargs['kwargs'])
signals.pre_init.connect(pre_init_callback)
def post_init_callback(sender, instance, **kwargs):
data.append(instance)
signals.post_init.connect(post_init_callback)
p1 = Person(first_name="John", last_name="Doe")
self.assertEqual(data, [{}, p1])
def test_save_signals(self):
data = []
def pre_save_handler(signal, sender, instance, **kwargs):
data.append(
(instance, kwargs.get("raw", False))
)
def post_save_handler(signal, sender, instance, **kwargs):
data.append(
(instance, kwargs.get("created"), kwargs.get("raw", False))
)
signals.pre_save.connect(pre_save_handler, weak=False)
signals.post_save.connect(post_save_handler, weak=False)
try:
p1 = Person.objects.create(first_name="John", last_name="Smith")
self.assertEqual(data, [
(p1, False),
(p1, True, False),
])
data[:] = []
p1.first_name = "Tom"
p1.save()
self.assertEqual(data, [
(p1, False),
(p1, False, False),
])
data[:] = []
# Calling an internal method purely so that we can trigger a "raw" save.
p1.save_base(raw=True)
self.assertEqual(data, [
(p1, True),
(p1, False, True),
])
data[:] = []
p2 = Person(first_name="James", last_name="Jones")
p2.id = 99999
p2.save()
self.assertEqual(data, [
(p2, False),
(p2, True, False),
])
data[:] = []
p2.id = 99998
p2.save()
self.assertEqual(data, [
(p2, False),
(p2, True, False),
])
finally:
signals.pre_save.disconnect(pre_save_handler)
signals.post_save.disconnect(post_save_handler)
def test_delete_signals(self):
data = []
def pre_delete_handler(signal, sender, instance, **kwargs):
data.append(
(instance, instance.id is None)
)
# #8285: signals can be any callable
class PostDeleteHandler(object):
def __init__(self, data):
self.data = data
def __call__(self, signal, sender, instance, **kwargs):
self.data.append(
(instance, instance.id is None)
)
post_delete_handler = PostDeleteHandler(data)
signals.pre_delete.connect(pre_delete_handler, weak=False)
signals.post_delete.connect(post_delete_handler, weak=False)
try:
p1 = Person.objects.create(first_name="John", last_name="Smith")
p1.delete()
self.assertEqual(data, [
(p1, False),
(p1, False),
])
data[:] = []
p2 = Person(first_name="James", last_name="Jones")
p2.id = 99999
p2.save()
p2.id = 99998
p2.save()
p2.delete()
self.assertEqual(data, [
(p2, False),
(p2, False)
])
data[:] = []
self.assertQuerysetEqual(
Person.objects.all(), [
"James Jones",
],
six.text_type
)
finally:
signals.pre_delete.disconnect(pre_delete_handler)
signals.post_delete.disconnect(post_delete_handler)
def test_decorators(self):
data = []
@receiver(signals.pre_save, weak=False)
def decorated_handler(signal, sender, instance, **kwargs):
data.append(instance)
@receiver(signals.pre_save, sender=Car, weak=False)
def decorated_handler_with_sender_arg(signal, sender, instance, **kwargs):
data.append(instance)
try:
c1 = Car.objects.create(make="Volkswagon", model="Passat")
self.assertEqual(data, [c1, c1])
finally:
signals.pre_save.disconnect(decorated_handler)
signals.pre_save.disconnect(decorated_handler_with_sender_arg, sender=Car)
def test_save_and_delete_signals_with_m2m(self):
data = []
def pre_save_handler(signal, sender, instance, **kwargs):
data.append('pre_save signal, %s' % instance)
if kwargs.get('raw'):
data.append('Is raw')
def post_save_handler(signal, sender, instance, **kwargs):
data.append('post_save signal, %s' % instance)
if 'created' in kwargs:
if kwargs['created']:
data.append('Is created')
else:
data.append('Is updated')
if kwargs.get('raw'):
data.append('Is raw')
def pre_delete_handler(signal, sender, instance, **kwargs):
data.append('pre_delete signal, %s' % instance)
data.append('instance.id is not None: %s' % (instance.id is not None))
def post_delete_handler(signal, sender, instance, **kwargs):
data.append('post_delete signal, %s' % instance)
data.append('instance.id is not None: %s' % (instance.id is not None))
signals.pre_save.connect(pre_save_handler, weak=False)
signals.post_save.connect(post_save_handler, weak=False)
signals.pre_delete.connect(pre_delete_handler, weak=False)
signals.post_delete.connect(post_delete_handler, weak=False)
try:
a1 = Author.objects.create(name='Neal Stephenson')
self.assertEqual(data, [
"pre_save signal, Neal Stephenson",
"post_save signal, Neal Stephenson",
"Is created"
])
data[:] = []
b1 = Book.objects.create(name='Snow Crash')
self.assertEqual(data, [
"pre_save signal, Snow Crash",
"post_save signal, Snow Crash",
"Is created"
])
data[:] = []
# Assigning and removing to/from m2m shouldn't generate an m2m signal.
b1.authors = [a1]
self.assertEqual(data, [])
b1.authors = []
self.assertEqual(data, [])
finally:
signals.pre_save.disconnect(pre_save_handler)
signals.post_save.disconnect(post_save_handler)
signals.pre_delete.disconnect(pre_delete_handler)
signals.post_delete.disconnect(post_delete_handler)
def test_disconnect_in_dispatch(self):
"""
Test that signals that disconnect when being called don't mess future
dispatching.
"""
class Handler(object):
def __init__(self, param):
self.param = param
self._run = False
def __call__(self, signal, sender, **kwargs):
self._run = True
signal.disconnect(receiver=self, sender=sender)
a, b = Handler(1), Handler(2)
signals.post_save.connect(a, sender=Person, weak=False)
signals.post_save.connect(b, sender=Person, weak=False)
Person.objects.create(first_name='John', last_name='Smith')
self.assertTrue(a._run)
self.assertTrue(b._run)
self.assertEqual(signals.post_save.receivers, [])
class LazyModelRefTest(BaseSignalTest):
def setUp(self):
super(LazyModelRefTest, self).setUp()
self.received = []
def receiver(self, **kwargs):
self.received.append(kwargs)
def test_invalid_sender_model_name(self):
with self.assertRaisesMessage(ValueError,
"Specified sender must either be a model or a "
"model name of the 'app_label.ModelName' form."):
signals.post_init.connect(self.receiver, sender='invalid')
def test_already_loaded_model(self):
signals.post_init.connect(
self.receiver, sender='signals.Book', weak=False
)
try:
instance = Book()
self.assertEqual(self.received, [{
'signal': signals.post_init,
'sender': Book,
'instance': instance
}])
finally:
signals.post_init.disconnect(self.receiver, sender=Book)
def test_not_loaded_model(self):
signals.post_init.connect(
self.receiver, sender='signals.Created', weak=False
)
try:
class Created(models.Model):
pass
instance = Created()
self.assertEqual(self.received, [{
'signal': signals.post_init, 'sender': Created, 'instance': instance
}])
finally:
signals.post_init.disconnect(self.receiver, sender=Created)
|
bsd-3-clause
|
haoyunfeix/crosswalk-test-suite
|
stability/stability-lowresource-android-tests/lowresource/Webapp_Operations_UnderLowDisk.py
|
7
|
9653
|
#!/usr/bin/env python
# coding=utf-8
#
# Copyright (c) 2015 Intel Corporation.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of works must retain the original copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the original copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of Intel Corporation nor the names of its contributors
# may be used to endorse or promote products derived from this work without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY INTEL CORPORATION "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL INTEL CORPORATION BE LIABLE FOR ANY DIRECT,
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors:
# Li, Hao<[email protected]>
import unittest
import os
import sys
import commands
import shutil
import time
import subprocess
import glob
from TestApp import *
reload(sys)
sys.setdefaultencoding('utf-8')
SCRIPT_PATH = os.path.realpath(__file__)
ConstPath = os.path.dirname(SCRIPT_PATH)
appsrc = ConstPath + "/../testapp/helloworld"
approot = ConstPath + "/helloworld"
app_tools_dir = os.environ.get('CROSSWALK_APP_TOOLS_CACHE_DIR')
instaled_app_list = []
def setUp():
global device, apptools, crosswalkzip
#device = 'E6OKCY411012'
device = os.environ.get('DEVICE_ID')
global device_abi
device_abi = getDeviceCpuAbi(device)
if not device:
print 'Get env error\n'
sys.exit(1)
if not app_tools_dir:
print ("Not find CROSSWALK_APP_TOOLS_CACHE_DIR\n")
sys.exit(1)
# app tools commend
apptools = "crosswalk-pkg"
if os.system(apptools) != 0:
apptools = app_tools_dir + "/crosswalk-app-tools/src/crosswalk-pkg"
# crosswalk lib
zips = glob.glob(os.path.join(app_tools_dir, "crosswalk-*.zip"))
if len(zips) == 0:
print ("Not find crosswalk zip in CROSSWALK_APP_TOOLS_CACHE_DIR\n")
sys.exit(1)
# latest version
zips.sort(reverse = True)
crosswalkzip = zips[0]
def getFreeDiskSize(device):
# Disk size: M
cmd = "%s -s %s shell df|grep %s |awk -F \" \" '{print $4}'" % (ADB_CMD, device, "/data")
(return_code, output) = doCMD(cmd)
for line in output:
if line.endswith("G"):
# 1G = 1024M
return int(float(line[0:-1]) * 1024)
else:
return int(float(line[0:-1]))
def getDeviceCpuAbi(device):
cmd = "%s -s %s shell getprop|grep \"\[ro.product.cpu.abi\]\"" % (ADB_CMD, device)
(return_code, output) = doCMD(cmd)
for line in output:
if "x86" in line:
return "x86"
else:
return "arm"
def getFileSize(filepath):
filesize = 0
if os.path.exists(filepath):
filesize = float(os.stat(filepath).st_size)
# size: M
filesize = filesize/1024/1024
else:
print "-->> %s does not exists" % filepath
return filesize
def createAPK(appname):
action_status = True
# Remove existed manifest.json
if os.path.exists(appsrc + "/manifest.json"):
os.remove(appsrc + "/manifest.json")
# build apk
cmd = "%s --crosswalk=%s --platforms=android --android=%s --targets=%s -m " \
"\"{\\\"name\\\": \\\"%s\\\", \\\"start_url\\\": \\\"index.html\\\", \\\"xwalk_package_id\\\": \\\"org.xwalk.%s\\\"}\" %s" % \
(apptools,
crosswalkzip,
"embedded",
device_abi,
appname,
appname,
appsrc)
(return_code, output) = doCMD(cmd)
if return_code == 0:
print "-->> org.xwalk.%s success to build." % appname
cmd = "mv *.apk %s/%s.apk" % (approot, appname)
(return_code, output) = doCMD(cmd)
else:
print "-->> org.xwalk.%s fail to build." % appname
action_status = False
return action_status
def deleteAPK(testapp):
cmd = "rm -rf %s" % (testapp.location)
(return_code, output) = doCMD(cmd)
if return_code == 0:
print "-->> %s success to delete." % testapp.location
return True
else:
print "-->> %s fail to delete." % testapp.location
return False
def cleanWork():
cmd = "rm -rf %s" % (appsrc + "/*.temp.mp4")
(return_code, output) = doCMD(cmd)
cmd = "rm -rf %s" % (approot)
(return_code, output) = doCMD(cmd)
for i in range(len(instaled_app_list)):
instaled_app_list[i].uninstall()
def makeLowDisk():
cleanWork()
action_status = False
if not os.path.exists(approot):
cmd = "mkdir %s" % approot
(return_code, output) = doCMD(cmd)
videofile = appsrc + "/res/w3c/movie_300.mp4"
videosize = getFileSize(videofile)
if videosize <= 0:
print "-->> Lack pre-condition resource files"
return False
tmpreadystate = [False, False, False]
global instaled_app_list
while not action_status:
freesize = getFreeDiskSize(device)
if (freesize >= 1024) and not tmpreadystate[0]:
# make app size: 500M
count = int((500 - videosize)/videosize)
for i in range(count):
cmd = "cp %s %s " % (videofile, appsrc + "/video" + str(i) +".temp.mp4")
(return_code, output) = doCMD(cmd)
tmpreadystate[0] = True
elif (freesize >= 512) and (freesize < 1024) and not tmpreadystate[1]:
# clean appsrc
if tmpreadystate[0]:
cmd = "rm -rf %s/*.temp.mp4" % (appsrc)
(return_code, output) = doCMD(cmd)
(return_code, output) = doCMD(cmd)
# make app size: 100M
count = int((100 - videosize)/videosize)
for i in range(count):
cmd = "cp %s %s " % (videofile, appsrc + "/video" + str(i) +".temp.mp4")
(return_code, output) = doCMD(cmd)
tmpreadystate[1] = True
elif (freesize < 512) and not tmpreadystate[2]:
# clean appsrc
cmd = "rm -rf %s/*.temp.mp4" % (appsrc)
(return_code, output) = doCMD(cmd)
tmpreadystate[2] = True
appname = "helloworld%s" % int(time.time())
if createAPK(appname):
apkname = appname[0].upper() + appname[1:]
apkpath = approot + "/" + appname + ".apk"
testapp = TestApp(device, apkpath,
"org.xwalk." + appname, apkname + "Activity")
#If app exists, go to next
if not testapp.isInstalled():
#if app is not installed successful, delete the package, return True
if not testapp.install():
action_status = True
deleteAPK(testapp)
# tmpreadystate[2] == True,
# means free disk is too small to install test app
# need to uninstall the last one to keep more free disk
if len(instaled_app_list) > 0 and tmpreadystate[2]:
testapp = instaled_app_list.pop(-1)
testapp.uninstall()
deleteAPK(testapp)
else:
instaled_app_list.append(testapp)
else:
break
return action_status
class TestStabilityInLowDiskFunctions(unittest.TestCase):
def test_app_repeatedly_in_lowdisk(self):
setUp()
if makeLowDisk():
testapp = TestApp(device, ConstPath + "/../testapp/lowresourcetest.apk",
"org.xwalk.lowresourcetest", "LowresourcetestActivity")
if testapp.isInstalled():
testapp.uninstall()
for i in range(20):
if testapp.install() and testapp.launch():
switchresult = False
for i in range(2):
time.sleep(1)
# swtich app
switchresult = testapp.switch()
if switchresult:
time.sleep(1)
if testapp.stop() and testapp.uninstall():
time.sleep(1)
else:
testapp.uninstall()
cleanWork()
self.assertTrue(False)
else:
testapp.uninstall()
cleanWork()
self.assertTrue(False)
else:
testapp.uninstall()
cleanWork()
self.assertTrue(False)
testapp.uninstall()
cleanWork()
self.assertTrue(True)
else:
print "-->> Test envrionment fail to set up"
cleanWork()
self.assertTrue(False)
if __name__ == '__main__':
unittest.main()
|
bsd-3-clause
|
no-zone6/pimouse_ros_2
|
scripts/motors2.py
|
1
|
2299
|
#!/usr/bin/env python
#enchoding: utf8
import sys, rospy, math
from pimouse_ros_2.msg import MotorFreqs
from geometry_msgs.msg import Twist
from std_srvs.srv import Trigger, TriggerResponse
class Motor():
def __init__(self):
if not self.set_power(False): sys.exit(1)
rospy.on_shutdown(self.set_power)
self.sub_raw = rospy.Subscriber('motor_raw', MotorFreqs, self.callback_raw_freq)
self.sub_cmd_vel = rospy.Subscriber('cmd_vel', Twist, self.callback_cmd_vel)
self.srv_on = rospy.Service('motor_on', Trigger, self.callback_on)
self.srv_off = rospy.Service('motor_off', Trigger, self.callback_off)
self.last_time = rospy.Time.now()
self.using_cmd_vel = False
def set_power(self, onoff=False):
en = "/dev/rtmotoren0"
try:
with open(en,'w') as f:
f.write("1\n" if onoff else "0\n")
self.is_on = onoff
return True
except:
rospy.logerr("cannot write to " + en)
return False
def set_raw_freq(self, left_hz, right_hz):
if not self.is_on:
rospy.logerr("not enpowered")
return
try:
with open("/dev/rtmotor_raw_l0",'w') as lf,\
open("/dev/rtmotor_raw_r0",'w') as rf:
lf.write(str(int(round(left_hz))) + "\n")
rf.write(str(int(round(right_hz))) + "\n")
except:
rospy.logerr("cannot write to rtmotor_raw_*")
def callback_raw_freq(self, message):
self.set_raw_freq(message.left_hz,message.right_hz)
def callback_cmd_vel(self, message):
forward_hz = 80000.0*message.linear.x/(9*math.pi)
rot_hz = 400.0*message.angular.z/math.pi
self.set_raw_freq(forward_hz-rot_hz, forward_hz+rot_hz)
self.using_cmd_vel = True
self.last_time = rospy.Time.now()
def onoff_response(self,onoff):
d = TriggerResponse()
d.success = self.set_power(onoff)
d.message = "ON" if self.is_on else "OFF"
return d
def callback_on(self,message): return self.onoff_response(True)
def callback_off(self,message): return self.onoff_response(False)
if __name__ == '__main__':
rospy.init_node('motors')
m = Motor()
rate = rospy.Rate(10)
while not rospy.is_shutdown():
if m.using_cmd_vel and rospy.Time.now().to_sec() - m.last_time.to_sec() >= 1.0:
m.set_raw_freq(0,0)
m.using_cmd_vel = False
rate.sleep()
|
gpl-3.0
|
jeasoft/odoo
|
addons/portal/wizard/__init__.py
|
447
|
1098
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2011 OpenERP S.A (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import portal_wizard
import share_wizard
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
Omegaice/smartcontainers
|
sc/configmanager.py
|
3
|
3152
|
"""Configuration manager for reading and writing SC configuration files."""
import simplejson as json
import os
import rdflib
__author__ = 'cwilli34'
# noinspection PyBroadException,PyBroadException
class ConfigManager(object):
""" Configuration File Creator """
def __init__(self, filename='sc.config'):
"""Initialize
Parameters
----------
:param filename: string
For renaming the configuration file name. Default value.
Returns
-------
:returns: none
"""
self.graph = rdflib.Graph()
self.filename = filename
if os.environ.get('SC_HOME'):
self.config_path = os.getenv('SC_HOME')
else:
os.environ['SC_HOME'] = os.environ['HOME'] + '/.sc/'
self.config_path = os.getenv('SC_HOME')
def write_config(self):
"""Write the configuration file
Writes the configuration file to the SC directory, or program home directory
Parameters
----------
:param: None
Returns
-------
:returns: none
"""
if os.path.exists(self.config_path):
# Open existing file, read and write
ctgfile = open(self.config_path + self.filename, 'w+')
else:
# Create config file, write
os.mkdir(self.config_path)
ctgfile = open(self.config_path + self.filename, 'w')
try:
ctgfile.write(str(self.config_obj))
# ctgfile.write(json.dumps(self.config_obj, indent=4, sort_keys=True))
ctgfile.close()
print('The configuration file has been created.')
except:
print('An unexpected error has occurred. The configuration file could not be created.')
ctgfile.close()
def read_config(self):
"""Read the configuration file. The configuration file is in a Turtle syntax format and
is intended for RDF graph creation. The 'result' returned will be parsed as RDF
Parameters
----------
:param: None
Returns
-------
:returns message: string
If the configuration file does not exist, return error string
"""
if not os.path.exists(self.config_path):
# If the directory does not exist, we cannot read it.
return 'Configuration does not exist.'
elif not os.path.exists(self.config_path + self.filename):
# If the file does not exist, we cannot read it.
return 'Configuration does not exist.'
else:
# Open existing file, read and write
ctgfile = open(self.config_path + self.filename, 'r')
try:
contents = ctgfile.read()
self.config_object = contents
ctgfile.close()
self.graph.parse(data=self.config_object, format='n3')
return ''
except:
ctgfile.close()
return 'Configration could not be read or parsed correctly'
configmanager = ConfigManager()
configmanager.read_config()
|
apache-2.0
|
mpare002/HackTech_2017
|
env/Lib/site-packages/flask/blueprints.py
|
773
|
16320
|
# -*- coding: utf-8 -*-
"""
flask.blueprints
~~~~~~~~~~~~~~~~
Blueprints are the recommended way to implement larger or more
pluggable applications in Flask 0.7 and later.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
from functools import update_wrapper
from .helpers import _PackageBoundObject, _endpoint_from_view_func
class BlueprintSetupState(object):
"""Temporary holder object for registering a blueprint with the
application. An instance of this class is created by the
:meth:`~flask.Blueprint.make_setup_state` method and later passed
to all register callback functions.
"""
def __init__(self, blueprint, app, options, first_registration):
#: a reference to the current application
self.app = app
#: a reference to the blueprint that created this setup state.
self.blueprint = blueprint
#: a dictionary with all options that were passed to the
#: :meth:`~flask.Flask.register_blueprint` method.
self.options = options
#: as blueprints can be registered multiple times with the
#: application and not everything wants to be registered
#: multiple times on it, this attribute can be used to figure
#: out if the blueprint was registered in the past already.
self.first_registration = first_registration
subdomain = self.options.get('subdomain')
if subdomain is None:
subdomain = self.blueprint.subdomain
#: The subdomain that the blueprint should be active for, `None`
#: otherwise.
self.subdomain = subdomain
url_prefix = self.options.get('url_prefix')
if url_prefix is None:
url_prefix = self.blueprint.url_prefix
#: The prefix that should be used for all URLs defined on the
#: blueprint.
self.url_prefix = url_prefix
#: A dictionary with URL defaults that is added to each and every
#: URL that was defined with the blueprint.
self.url_defaults = dict(self.blueprint.url_values_defaults)
self.url_defaults.update(self.options.get('url_defaults', ()))
def add_url_rule(self, rule, endpoint=None, view_func=None, **options):
"""A helper method to register a rule (and optionally a view function)
to the application. The endpoint is automatically prefixed with the
blueprint's name.
"""
if self.url_prefix:
rule = self.url_prefix + rule
options.setdefault('subdomain', self.subdomain)
if endpoint is None:
endpoint = _endpoint_from_view_func(view_func)
defaults = self.url_defaults
if 'defaults' in options:
defaults = dict(defaults, **options.pop('defaults'))
self.app.add_url_rule(rule, '%s.%s' % (self.blueprint.name, endpoint),
view_func, defaults=defaults, **options)
class Blueprint(_PackageBoundObject):
"""Represents a blueprint. A blueprint is an object that records
functions that will be called with the
:class:`~flask.blueprint.BlueprintSetupState` later to register functions
or other things on the main application. See :ref:`blueprints` for more
information.
.. versionadded:: 0.7
"""
warn_on_modifications = False
_got_registered_once = False
def __init__(self, name, import_name, static_folder=None,
static_url_path=None, template_folder=None,
url_prefix=None, subdomain=None, url_defaults=None):
_PackageBoundObject.__init__(self, import_name, template_folder)
self.name = name
self.url_prefix = url_prefix
self.subdomain = subdomain
self.static_folder = static_folder
self.static_url_path = static_url_path
self.deferred_functions = []
self.view_functions = {}
if url_defaults is None:
url_defaults = {}
self.url_values_defaults = url_defaults
def record(self, func):
"""Registers a function that is called when the blueprint is
registered on the application. This function is called with the
state as argument as returned by the :meth:`make_setup_state`
method.
"""
if self._got_registered_once and self.warn_on_modifications:
from warnings import warn
warn(Warning('The blueprint was already registered once '
'but is getting modified now. These changes '
'will not show up.'))
self.deferred_functions.append(func)
def record_once(self, func):
"""Works like :meth:`record` but wraps the function in another
function that will ensure the function is only called once. If the
blueprint is registered a second time on the application, the
function passed is not called.
"""
def wrapper(state):
if state.first_registration:
func(state)
return self.record(update_wrapper(wrapper, func))
def make_setup_state(self, app, options, first_registration=False):
"""Creates an instance of :meth:`~flask.blueprints.BlueprintSetupState`
object that is later passed to the register callback functions.
Subclasses can override this to return a subclass of the setup state.
"""
return BlueprintSetupState(self, app, options, first_registration)
def register(self, app, options, first_registration=False):
"""Called by :meth:`Flask.register_blueprint` to register a blueprint
on the application. This can be overridden to customize the register
behavior. Keyword arguments from
:func:`~flask.Flask.register_blueprint` are directly forwarded to this
method in the `options` dictionary.
"""
self._got_registered_once = True
state = self.make_setup_state(app, options, first_registration)
if self.has_static_folder:
state.add_url_rule(self.static_url_path + '/<path:filename>',
view_func=self.send_static_file,
endpoint='static')
for deferred in self.deferred_functions:
deferred(state)
def route(self, rule, **options):
"""Like :meth:`Flask.route` but for a blueprint. The endpoint for the
:func:`url_for` function is prefixed with the name of the blueprint.
"""
def decorator(f):
endpoint = options.pop("endpoint", f.__name__)
self.add_url_rule(rule, endpoint, f, **options)
return f
return decorator
def add_url_rule(self, rule, endpoint=None, view_func=None, **options):
"""Like :meth:`Flask.add_url_rule` but for a blueprint. The endpoint for
the :func:`url_for` function is prefixed with the name of the blueprint.
"""
if endpoint:
assert '.' not in endpoint, "Blueprint endpoint's should not contain dot's"
self.record(lambda s:
s.add_url_rule(rule, endpoint, view_func, **options))
def endpoint(self, endpoint):
"""Like :meth:`Flask.endpoint` but for a blueprint. This does not
prefix the endpoint with the blueprint name, this has to be done
explicitly by the user of this method. If the endpoint is prefixed
with a `.` it will be registered to the current blueprint, otherwise
it's an application independent endpoint.
"""
def decorator(f):
def register_endpoint(state):
state.app.view_functions[endpoint] = f
self.record_once(register_endpoint)
return f
return decorator
def app_template_filter(self, name=None):
"""Register a custom template filter, available application wide. Like
:meth:`Flask.template_filter` but for a blueprint.
:param name: the optional name of the filter, otherwise the
function name will be used.
"""
def decorator(f):
self.add_app_template_filter(f, name=name)
return f
return decorator
def add_app_template_filter(self, f, name=None):
"""Register a custom template filter, available application wide. Like
:meth:`Flask.add_template_filter` but for a blueprint. Works exactly
like the :meth:`app_template_filter` decorator.
:param name: the optional name of the filter, otherwise the
function name will be used.
"""
def register_template(state):
state.app.jinja_env.filters[name or f.__name__] = f
self.record_once(register_template)
def app_template_test(self, name=None):
"""Register a custom template test, available application wide. Like
:meth:`Flask.template_test` but for a blueprint.
.. versionadded:: 0.10
:param name: the optional name of the test, otherwise the
function name will be used.
"""
def decorator(f):
self.add_app_template_test(f, name=name)
return f
return decorator
def add_app_template_test(self, f, name=None):
"""Register a custom template test, available application wide. Like
:meth:`Flask.add_template_test` but for a blueprint. Works exactly
like the :meth:`app_template_test` decorator.
.. versionadded:: 0.10
:param name: the optional name of the test, otherwise the
function name will be used.
"""
def register_template(state):
state.app.jinja_env.tests[name or f.__name__] = f
self.record_once(register_template)
def app_template_global(self, name=None):
"""Register a custom template global, available application wide. Like
:meth:`Flask.template_global` but for a blueprint.
.. versionadded:: 0.10
:param name: the optional name of the global, otherwise the
function name will be used.
"""
def decorator(f):
self.add_app_template_global(f, name=name)
return f
return decorator
def add_app_template_global(self, f, name=None):
"""Register a custom template global, available application wide. Like
:meth:`Flask.add_template_global` but for a blueprint. Works exactly
like the :meth:`app_template_global` decorator.
.. versionadded:: 0.10
:param name: the optional name of the global, otherwise the
function name will be used.
"""
def register_template(state):
state.app.jinja_env.globals[name or f.__name__] = f
self.record_once(register_template)
def before_request(self, f):
"""Like :meth:`Flask.before_request` but for a blueprint. This function
is only executed before each request that is handled by a function of
that blueprint.
"""
self.record_once(lambda s: s.app.before_request_funcs
.setdefault(self.name, []).append(f))
return f
def before_app_request(self, f):
"""Like :meth:`Flask.before_request`. Such a function is executed
before each request, even if outside of a blueprint.
"""
self.record_once(lambda s: s.app.before_request_funcs
.setdefault(None, []).append(f))
return f
def before_app_first_request(self, f):
"""Like :meth:`Flask.before_first_request`. Such a function is
executed before the first request to the application.
"""
self.record_once(lambda s: s.app.before_first_request_funcs.append(f))
return f
def after_request(self, f):
"""Like :meth:`Flask.after_request` but for a blueprint. This function
is only executed after each request that is handled by a function of
that blueprint.
"""
self.record_once(lambda s: s.app.after_request_funcs
.setdefault(self.name, []).append(f))
return f
def after_app_request(self, f):
"""Like :meth:`Flask.after_request` but for a blueprint. Such a function
is executed after each request, even if outside of the blueprint.
"""
self.record_once(lambda s: s.app.after_request_funcs
.setdefault(None, []).append(f))
return f
def teardown_request(self, f):
"""Like :meth:`Flask.teardown_request` but for a blueprint. This
function is only executed when tearing down requests handled by a
function of that blueprint. Teardown request functions are executed
when the request context is popped, even when no actual request was
performed.
"""
self.record_once(lambda s: s.app.teardown_request_funcs
.setdefault(self.name, []).append(f))
return f
def teardown_app_request(self, f):
"""Like :meth:`Flask.teardown_request` but for a blueprint. Such a
function is executed when tearing down each request, even if outside of
the blueprint.
"""
self.record_once(lambda s: s.app.teardown_request_funcs
.setdefault(None, []).append(f))
return f
def context_processor(self, f):
"""Like :meth:`Flask.context_processor` but for a blueprint. This
function is only executed for requests handled by a blueprint.
"""
self.record_once(lambda s: s.app.template_context_processors
.setdefault(self.name, []).append(f))
return f
def app_context_processor(self, f):
"""Like :meth:`Flask.context_processor` but for a blueprint. Such a
function is executed each request, even if outside of the blueprint.
"""
self.record_once(lambda s: s.app.template_context_processors
.setdefault(None, []).append(f))
return f
def app_errorhandler(self, code):
"""Like :meth:`Flask.errorhandler` but for a blueprint. This
handler is used for all requests, even if outside of the blueprint.
"""
def decorator(f):
self.record_once(lambda s: s.app.errorhandler(code)(f))
return f
return decorator
def url_value_preprocessor(self, f):
"""Registers a function as URL value preprocessor for this
blueprint. It's called before the view functions are called and
can modify the url values provided.
"""
self.record_once(lambda s: s.app.url_value_preprocessors
.setdefault(self.name, []).append(f))
return f
def url_defaults(self, f):
"""Callback function for URL defaults for this blueprint. It's called
with the endpoint and values and should update the values passed
in place.
"""
self.record_once(lambda s: s.app.url_default_functions
.setdefault(self.name, []).append(f))
return f
def app_url_value_preprocessor(self, f):
"""Same as :meth:`url_value_preprocessor` but application wide.
"""
self.record_once(lambda s: s.app.url_value_preprocessors
.setdefault(None, []).append(f))
return f
def app_url_defaults(self, f):
"""Same as :meth:`url_defaults` but application wide.
"""
self.record_once(lambda s: s.app.url_default_functions
.setdefault(None, []).append(f))
return f
def errorhandler(self, code_or_exception):
"""Registers an error handler that becomes active for this blueprint
only. Please be aware that routing does not happen local to a
blueprint so an error handler for 404 usually is not handled by
a blueprint unless it is caused inside a view function. Another
special case is the 500 internal server error which is always looked
up from the application.
Otherwise works as the :meth:`~flask.Flask.errorhandler` decorator
of the :class:`~flask.Flask` object.
"""
def decorator(f):
self.record_once(lambda s: s.app._register_error_handler(
self.name, code_or_exception, f))
return f
return decorator
|
mit
|
Krossom/python-for-android
|
python3-alpha/python3-src/Misc/Vim/syntax_test.py
|
82
|
1457
|
"""Test file for syntax highlighting of editors.
Meant to cover a wide range of different types of statements and expressions.
Not necessarily sensical or comprehensive (assume that if one exception is
highlighted that all are, for instance).
Extraneous trailing whitespace can't be tested because of svn pre-commit hook
checks for such things.
"""
# Comment
# OPTIONAL: XXX catch your attention
# Statements
from __future__ import with_statement # Import
from sys import path as thing
assert True # keyword
def foo(): # function definition
return []
class Bar(object): # Class definition
def __enter__(self):
pass
def __exit__(self, *args):
pass
foo() # UNCOLOURED: function call
while False: # 'while'
continue
for x in foo(): # 'for'
break
with Bar() as stuff:
pass
if False: pass # 'if'
elif False: pass
else: pass
# Constants
'single-quote', u'unicode' # Strings of all kinds; prefixes not highlighted
"double-quote"
"""triple double-quote"""
'''triple single-quote'''
r'raw'
ur'unicode raw'
'escape\n'
'\04' # octal
'\xFF' # hex
'\u1111' # unicode character
1 # Integral
1L
1.0 # Float
.1
1+2j # Complex
# Expressions
1 and 2 or 3 # Boolean operators
2 < 3 # UNCOLOURED: comparison operators
spam = 42 # UNCOLOURED: assignment
2 + 3 # UNCOLOURED: number operators
[] # UNCOLOURED: list
{} # UNCOLOURED: dict
(1,) # UNCOLOURED: tuple
all # Built-in functions
GeneratorExit # Exceptions
|
apache-2.0
|
starrybeam/samba
|
source4/dsdb/tests/python/token_group.py
|
26
|
20392
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# test tokengroups attribute against internal token calculation
import optparse
import sys
import os
sys.path.insert(0, "bin/python")
import samba
from samba.tests.subunitrun import SubunitOptions, TestProgram
import samba.getopt as options
from samba.auth import system_session
from samba import ldb, dsdb
from samba.samdb import SamDB
from samba.auth import AuthContext
from samba.ndr import ndr_unpack
from samba import gensec
from samba.credentials import Credentials, DONT_USE_KERBEROS
from samba.dsdb import GTYPE_SECURITY_GLOBAL_GROUP, GTYPE_SECURITY_UNIVERSAL_GROUP
import samba.tests
from samba.tests import delete_force
from samba.auth import AUTH_SESSION_INFO_DEFAULT_GROUPS, AUTH_SESSION_INFO_AUTHENTICATED, AUTH_SESSION_INFO_SIMPLE_PRIVILEGES
parser = optparse.OptionParser("ldap.py [options] <host>")
sambaopts = options.SambaOptions(parser)
parser.add_option_group(sambaopts)
parser.add_option_group(options.VersionOptions(parser))
# use command line creds if available
credopts = options.CredentialsOptions(parser)
parser.add_option_group(credopts)
subunitopts = SubunitOptions(parser)
parser.add_option_group(subunitopts)
opts, args = parser.parse_args()
if len(args) < 1:
parser.print_usage()
sys.exit(1)
url = args[0]
lp = sambaopts.get_loadparm()
creds = credopts.get_credentials(lp)
creds.set_gensec_features(creds.get_gensec_features() | gensec.FEATURE_SEAL)
def closure(vSet, wSet, aSet):
for edge in aSet:
start, end = edge
if start in wSet:
if end not in wSet and end in vSet:
wSet.add(end)
closure(vSet, wSet, aSet)
class StaticTokenTest(samba.tests.TestCase):
def setUp(self):
super(StaticTokenTest, self).setUp()
self.ldb = SamDB(url, credentials=creds, session_info=system_session(lp), lp=lp)
self.base_dn = self.ldb.domain_dn()
res = self.ldb.search("", scope=ldb.SCOPE_BASE, attrs=["tokenGroups"])
self.assertEquals(len(res), 1)
self.user_sid_dn = "<SID=%s>" % str(ndr_unpack(samba.dcerpc.security.dom_sid, res[0]["tokenGroups"][0]))
session_info_flags = ( AUTH_SESSION_INFO_DEFAULT_GROUPS |
AUTH_SESSION_INFO_AUTHENTICATED |
AUTH_SESSION_INFO_SIMPLE_PRIVILEGES)
session = samba.auth.user_session(self.ldb, lp_ctx=lp, dn=self.user_sid_dn,
session_info_flags=session_info_flags)
token = session.security_token
self.user_sids = []
for s in token.sids:
self.user_sids.append(str(s))
def test_rootDSE_tokenGroups(self):
"""Testing rootDSE tokengroups against internal calculation"""
if not url.startswith("ldap"):
self.fail(msg="This test is only valid on ldap")
res = self.ldb.search("", scope=ldb.SCOPE_BASE, attrs=["tokenGroups"])
self.assertEquals(len(res), 1)
print("Getting tokenGroups from rootDSE")
tokengroups = []
for sid in res[0]['tokenGroups']:
tokengroups.append(str(ndr_unpack(samba.dcerpc.security.dom_sid, sid)))
sidset1 = set(tokengroups)
sidset2 = set(self.user_sids)
if len(sidset1.difference(sidset2)):
print("token sids don't match")
print("tokengroups: %s" % tokengroups)
print("calculated : %s" % self.user_sids)
print("difference : %s" % sidset1.difference(sidset2))
self.fail(msg="calculated groups don't match against rootDSE tokenGroups")
def test_dn_tokenGroups(self):
print("Getting tokenGroups from user DN")
res = self.ldb.search(self.user_sid_dn, scope=ldb.SCOPE_BASE, attrs=["tokenGroups"])
self.assertEquals(len(res), 1)
dn_tokengroups = []
for sid in res[0]['tokenGroups']:
dn_tokengroups.append(str(ndr_unpack(samba.dcerpc.security.dom_sid, sid)))
sidset1 = set(dn_tokengroups)
sidset2 = set(self.user_sids)
if len(sidset1.difference(sidset2)):
print("token sids don't match")
print("difference : %s" % sidset1.difference(sidset2))
self.fail(msg="calculated groups don't match against user DN tokenGroups")
def test_pac_groups(self):
settings = {}
settings["lp_ctx"] = lp
settings["target_hostname"] = lp.get("netbios name")
gensec_client = gensec.Security.start_client(settings)
gensec_client.set_credentials(creds)
gensec_client.want_feature(gensec.FEATURE_SEAL)
gensec_client.start_mech_by_sasl_name("GSSAPI")
auth_context = AuthContext(lp_ctx=lp, ldb=self.ldb, methods=[])
gensec_server = gensec.Security.start_server(settings, auth_context)
machine_creds = Credentials()
machine_creds.guess(lp)
machine_creds.set_machine_account(lp)
gensec_server.set_credentials(machine_creds)
gensec_server.want_feature(gensec.FEATURE_SEAL)
gensec_server.start_mech_by_sasl_name("GSSAPI")
client_finished = False
server_finished = False
server_to_client = ""
# Run the actual call loop.
while client_finished == False and server_finished == False:
if not client_finished:
print "running client gensec_update"
(client_finished, client_to_server) = gensec_client.update(server_to_client)
if not server_finished:
print "running server gensec_update"
(server_finished, server_to_client) = gensec_server.update(client_to_server)
session = gensec_server.session_info()
token = session.security_token
pac_sids = []
for s in token.sids:
pac_sids.append(str(s))
sidset1 = set(pac_sids)
sidset2 = set(self.user_sids)
if len(sidset1.difference(sidset2)):
print("token sids don't match")
print("difference : %s" % sidset1.difference(sidset2))
self.fail(msg="calculated groups don't match against user PAC tokenGroups")
class DynamicTokenTest(samba.tests.TestCase):
def get_creds(self, target_username, target_password):
creds_tmp = Credentials()
creds_tmp.set_username(target_username)
creds_tmp.set_password(target_password)
creds_tmp.set_domain(creds.get_domain())
creds_tmp.set_realm(creds.get_realm())
creds_tmp.set_workstation(creds.get_workstation())
creds_tmp.set_gensec_features(creds_tmp.get_gensec_features()
| gensec.FEATURE_SEAL)
return creds_tmp
def get_ldb_connection(self, target_username, target_password):
creds_tmp = self.get_creds(target_username, target_password)
ldb_target = SamDB(url=url, credentials=creds_tmp, lp=lp)
return ldb_target
def setUp(self):
super(DynamicTokenTest, self).setUp()
self.admin_ldb = SamDB(url, credentials=creds, session_info=system_session(lp), lp=lp)
self.base_dn = self.admin_ldb.domain_dn()
self.test_user = "tokengroups_user1"
self.test_user_pass = "samba123@"
self.admin_ldb.newuser(self.test_user, self.test_user_pass)
self.test_group0 = "tokengroups_group0"
self.admin_ldb.newgroup(self.test_group0, grouptype=dsdb.GTYPE_SECURITY_DOMAIN_LOCAL_GROUP)
res = self.admin_ldb.search(base="cn=%s,cn=users,%s" % (self.test_group0, self.base_dn),
attrs=["objectSid"], scope=ldb.SCOPE_BASE)
self.test_group0_sid = ndr_unpack(samba.dcerpc.security.dom_sid, res[0]["objectSid"][0])
self.admin_ldb.add_remove_group_members(self.test_group0, [self.test_user],
add_members_operation=True)
self.test_group1 = "tokengroups_group1"
self.admin_ldb.newgroup(self.test_group1, grouptype=dsdb.GTYPE_SECURITY_GLOBAL_GROUP)
res = self.admin_ldb.search(base="cn=%s,cn=users,%s" % (self.test_group1, self.base_dn),
attrs=["objectSid"], scope=ldb.SCOPE_BASE)
self.test_group1_sid = ndr_unpack(samba.dcerpc.security.dom_sid, res[0]["objectSid"][0])
self.admin_ldb.add_remove_group_members(self.test_group1, [self.test_user],
add_members_operation=True)
self.test_group2 = "tokengroups_group2"
self.admin_ldb.newgroup(self.test_group2, grouptype=dsdb.GTYPE_SECURITY_UNIVERSAL_GROUP)
res = self.admin_ldb.search(base="cn=%s,cn=users,%s" % (self.test_group2, self.base_dn),
attrs=["objectSid"], scope=ldb.SCOPE_BASE)
self.test_group2_sid = ndr_unpack(samba.dcerpc.security.dom_sid, res[0]["objectSid"][0])
self.admin_ldb.add_remove_group_members(self.test_group2, [self.test_user],
add_members_operation=True)
self.ldb = self.get_ldb_connection(self.test_user, self.test_user_pass)
res = self.ldb.search("", scope=ldb.SCOPE_BASE, attrs=["tokenGroups"])
self.assertEquals(len(res), 1)
self.user_sid_dn = "<SID=%s>" % str(ndr_unpack(samba.dcerpc.security.dom_sid, res[0]["tokenGroups"][0]))
res = self.ldb.search(self.user_sid_dn, scope=ldb.SCOPE_BASE, attrs=[])
self.assertEquals(len(res), 1)
self.test_user_dn = res[0].dn
session_info_flags = ( AUTH_SESSION_INFO_DEFAULT_GROUPS |
AUTH_SESSION_INFO_AUTHENTICATED |
AUTH_SESSION_INFO_SIMPLE_PRIVILEGES)
session = samba.auth.user_session(self.ldb, lp_ctx=lp, dn=self.user_sid_dn,
session_info_flags=session_info_flags)
token = session.security_token
self.user_sids = []
for s in token.sids:
self.user_sids.append(str(s))
def tearDown(self):
super(DynamicTokenTest, self).tearDown()
delete_force(self.admin_ldb, "CN=%s,%s,%s" %
(self.test_user, "cn=users", self.base_dn))
delete_force(self.admin_ldb, "CN=%s,%s,%s" %
(self.test_group0, "cn=users", self.base_dn))
delete_force(self.admin_ldb, "CN=%s,%s,%s" %
(self.test_group1, "cn=users", self.base_dn))
delete_force(self.admin_ldb, "CN=%s,%s,%s" %
(self.test_group2, "cn=users", self.base_dn))
def test_rootDSE_tokenGroups(self):
"""Testing rootDSE tokengroups against internal calculation"""
if not url.startswith("ldap"):
self.fail(msg="This test is only valid on ldap")
res = self.ldb.search("", scope=ldb.SCOPE_BASE, attrs=["tokenGroups"])
self.assertEquals(len(res), 1)
print("Getting tokenGroups from rootDSE")
tokengroups = []
for sid in res[0]['tokenGroups']:
tokengroups.append(str(ndr_unpack(samba.dcerpc.security.dom_sid, sid)))
sidset1 = set(tokengroups)
sidset2 = set(self.user_sids)
if len(sidset1.difference(sidset2)):
print("token sids don't match")
print("tokengroups: %s" % tokengroups)
print("calculated : %s" % self.user_sids)
print("difference : %s" % sidset1.difference(sidset2))
self.fail(msg="calculated groups don't match against rootDSE tokenGroups")
def test_dn_tokenGroups(self):
print("Getting tokenGroups from user DN")
res = self.ldb.search(self.user_sid_dn, scope=ldb.SCOPE_BASE, attrs=["tokenGroups"])
self.assertEquals(len(res), 1)
dn_tokengroups = []
for sid in res[0]['tokenGroups']:
dn_tokengroups.append(str(ndr_unpack(samba.dcerpc.security.dom_sid, sid)))
sidset1 = set(dn_tokengroups)
sidset2 = set(self.user_sids)
if len(sidset1.difference(sidset2)):
print("token sids don't match")
print("difference : %s" % sidset1.difference(sidset2))
self.fail(msg="calculated groups don't match against user DN tokenGroups")
def test_pac_groups(self):
settings = {}
settings["lp_ctx"] = lp
settings["target_hostname"] = lp.get("netbios name")
gensec_client = gensec.Security.start_client(settings)
gensec_client.set_credentials(self.get_creds(self.test_user, self.test_user_pass))
gensec_client.want_feature(gensec.FEATURE_SEAL)
gensec_client.start_mech_by_sasl_name("GSSAPI")
auth_context = AuthContext(lp_ctx=lp, ldb=self.ldb, methods=[])
gensec_server = gensec.Security.start_server(settings, auth_context)
machine_creds = Credentials()
machine_creds.guess(lp)
machine_creds.set_machine_account(lp)
gensec_server.set_credentials(machine_creds)
gensec_server.want_feature(gensec.FEATURE_SEAL)
gensec_server.start_mech_by_sasl_name("GSSAPI")
client_finished = False
server_finished = False
server_to_client = ""
# Run the actual call loop.
while client_finished == False and server_finished == False:
if not client_finished:
print "running client gensec_update"
(client_finished, client_to_server) = gensec_client.update(server_to_client)
if not server_finished:
print "running server gensec_update"
(server_finished, server_to_client) = gensec_server.update(client_to_server)
session = gensec_server.session_info()
token = session.security_token
pac_sids = []
for s in token.sids:
pac_sids.append(str(s))
sidset1 = set(pac_sids)
sidset2 = set(self.user_sids)
if len(sidset1.difference(sidset2)):
print("token sids don't match")
print("difference : %s" % sidset1.difference(sidset2))
self.fail(msg="calculated groups don't match against user PAC tokenGroups")
def test_tokenGroups_manual(self):
# Manually run the tokenGroups algorithm from MS-ADTS 3.1.1.4.5.19 and MS-DRSR 4.1.8.3
# and compare the result
res = self.admin_ldb.search(base=self.base_dn, scope=ldb.SCOPE_SUBTREE,
expression="(|(objectclass=user)(objectclass=group))",
attrs=["memberOf"])
aSet = set()
aSetR = set()
vSet = set()
for obj in res:
if "memberOf" in obj:
for dn in obj["memberOf"]:
first = obj.dn.get_casefold()
second = ldb.Dn(self.admin_ldb, dn).get_casefold()
aSet.add((first, second))
aSetR.add((second, first))
vSet.add(first)
vSet.add(second)
res = self.admin_ldb.search(base=self.base_dn, scope=ldb.SCOPE_SUBTREE,
expression="(objectclass=user)",
attrs=["primaryGroupID"])
for obj in res:
if "primaryGroupID" in obj:
sid = "%s-%d" % (self.admin_ldb.get_domain_sid(), int(obj["primaryGroupID"][0]))
res2 = self.admin_ldb.search(base="<SID=%s>" % sid, scope=ldb.SCOPE_BASE,
attrs=[])
first = obj.dn.get_casefold()
second = res2[0].dn.get_casefold()
aSet.add((first, second))
aSetR.add((second, first))
vSet.add(first)
vSet.add(second)
wSet = set()
wSet.add(self.test_user_dn.get_casefold())
closure(vSet, wSet, aSet)
wSet.remove(self.test_user_dn.get_casefold())
tokenGroupsSet = set()
res = self.ldb.search(self.user_sid_dn, scope=ldb.SCOPE_BASE, attrs=["tokenGroups"])
self.assertEquals(len(res), 1)
dn_tokengroups = []
for sid in res[0]['tokenGroups']:
sid = ndr_unpack(samba.dcerpc.security.dom_sid, sid)
res3 = self.admin_ldb.search(base="<SID=%s>" % sid, scope=ldb.SCOPE_BASE,
attrs=[])
tokenGroupsSet.add(res3[0].dn.get_casefold())
if len(wSet.difference(tokenGroupsSet)):
self.fail(msg="additional calculated: %s" % wSet.difference(tokenGroupsSet))
if len(tokenGroupsSet.difference(wSet)):
self.fail(msg="additional tokenGroups: %s" % tokenGroupsSet.difference(wSet))
def filtered_closure(self, wSet, filter_grouptype):
res = self.admin_ldb.search(base=self.base_dn, scope=ldb.SCOPE_SUBTREE,
expression="(|(objectclass=user)(objectclass=group))",
attrs=["memberOf"])
aSet = set()
aSetR = set()
vSet = set()
for obj in res:
vSet.add(obj.dn.get_casefold())
if "memberOf" in obj:
for dn in obj["memberOf"]:
first = obj.dn.get_casefold()
second = ldb.Dn(self.admin_ldb, dn).get_casefold()
aSet.add((first, second))
aSetR.add((second, first))
vSet.add(first)
vSet.add(second)
res = self.admin_ldb.search(base=self.base_dn, scope=ldb.SCOPE_SUBTREE,
expression="(objectclass=user)",
attrs=["primaryGroupID"])
for obj in res:
if "primaryGroupID" in obj:
sid = "%s-%d" % (self.admin_ldb.get_domain_sid(), int(obj["primaryGroupID"][0]))
res2 = self.admin_ldb.search(base="<SID=%s>" % sid, scope=ldb.SCOPE_BASE,
attrs=[])
first = obj.dn.get_casefold()
second = res2[0].dn.get_casefold()
aSet.add((first, second))
aSetR.add((second, first))
vSet.add(first)
vSet.add(second)
uSet = set()
for v in vSet:
res_group = self.admin_ldb.search(base=v, scope=ldb.SCOPE_BASE,
attrs=["groupType"],
expression="objectClass=group")
if len(res_group) == 1:
if hex(int(res_group[0]["groupType"][0]) & 0x00000000FFFFFFFF) == hex(filter_grouptype):
uSet.add(v)
else:
uSet.add(v)
closure(uSet, wSet, aSet)
def test_tokenGroupsGlobalAndUniversal_manual(self):
# Manually run the tokenGroups algorithm from MS-ADTS 3.1.1.4.5.19 and MS-DRSR 4.1.8.3
# and compare the result
# The variable names come from MS-ADTS May 15, 2014
S = set()
S.add(self.test_user_dn.get_casefold())
self.filtered_closure(S, GTYPE_SECURITY_GLOBAL_GROUP)
T = set()
# Not really a SID, we do this on DNs...
for sid in S:
X = set()
X.add(sid)
self.filtered_closure(X, GTYPE_SECURITY_UNIVERSAL_GROUP)
T = T.union(X)
T.remove(self.test_user_dn.get_casefold())
tokenGroupsSet = set()
res = self.ldb.search(self.user_sid_dn, scope=ldb.SCOPE_BASE, attrs=["tokenGroupsGlobalAndUniversal"])
self.assertEquals(len(res), 1)
dn_tokengroups = []
for sid in res[0]['tokenGroupsGlobalAndUniversal']:
sid = ndr_unpack(samba.dcerpc.security.dom_sid, sid)
res3 = self.admin_ldb.search(base="<SID=%s>" % sid, scope=ldb.SCOPE_BASE,
attrs=[])
tokenGroupsSet.add(res3[0].dn.get_casefold())
if len(T.difference(tokenGroupsSet)):
self.fail(msg="additional calculated: %s" % T.difference(tokenGroupsSet))
if len(tokenGroupsSet.difference(T)):
self.fail(msg="additional tokenGroupsGlobalAndUniversal: %s" % tokenGroupsSet.difference(T))
if not "://" in url:
if os.path.isfile(url):
url = "tdb://%s" % url
else:
url = "ldap://%s" % url
TestProgram(module=__name__, opts=subunitopts)
|
gpl-3.0
|
weimingtom/python-for-android
|
python3-alpha/python3-src/Lib/test/test_pkgimport.py
|
57
|
2757
|
import os
import sys
import shutil
import string
import random
import tempfile
import unittest
from imp import cache_from_source
from test.support import run_unittest
class TestImport(unittest.TestCase):
def __init__(self, *args, **kw):
self.package_name = 'PACKAGE_'
while self.package_name in sys.modules:
self.package_name += random.choose(string.ascii_letters)
self.module_name = self.package_name + '.foo'
unittest.TestCase.__init__(self, *args, **kw)
def remove_modules(self):
for module_name in (self.package_name, self.module_name):
if module_name in sys.modules:
del sys.modules[module_name]
def setUp(self):
self.test_dir = tempfile.mkdtemp()
sys.path.append(self.test_dir)
self.package_dir = os.path.join(self.test_dir,
self.package_name)
os.mkdir(self.package_dir)
open(os.path.join(self.package_dir, '__init__.py'), 'w').close()
self.module_path = os.path.join(self.package_dir, 'foo.py')
def tearDown(self):
shutil.rmtree(self.test_dir)
self.assertNotEqual(sys.path.count(self.test_dir), 0)
sys.path.remove(self.test_dir)
self.remove_modules()
def rewrite_file(self, contents):
compiled_path = cache_from_source(self.module_path)
if os.path.exists(compiled_path):
os.remove(compiled_path)
with open(self.module_path, 'w') as f:
f.write(contents)
def test_package_import__semantics(self):
# Generate a couple of broken modules to try importing.
# ...try loading the module when there's a SyntaxError
self.rewrite_file('for')
try: __import__(self.module_name)
except SyntaxError: pass
else: raise RuntimeError('Failed to induce SyntaxError') # self.fail()?
self.assertNotIn(self.module_name, sys.modules)
self.assertFalse(hasattr(sys.modules[self.package_name], 'foo'))
# ...make up a variable name that isn't bound in __builtins__
var = 'a'
while var in dir(__builtins__):
var += random.choose(string.ascii_letters)
# ...make a module that just contains that
self.rewrite_file(var)
try: __import__(self.module_name)
except NameError: pass
else: raise RuntimeError('Failed to induce NameError.')
# ...now change the module so that the NameError doesn't
# happen
self.rewrite_file('%s = 1' % var)
module = __import__(self.module_name).foo
self.assertEqual(getattr(module, var), 1)
def test_main():
run_unittest(TestImport)
if __name__ == "__main__":
test_main()
|
apache-2.0
|
rcbops/python-django-buildpackage
|
django/test/simple.py
|
150
|
15012
|
import unittest as real_unittest
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.db.models import get_app, get_apps
from django.test import _doctest as doctest
from django.test.utils import setup_test_environment, teardown_test_environment
from django.test.testcases import OutputChecker, DocTestRunner, TestCase
from django.utils import unittest
try:
all
except NameError:
from django.utils.itercompat import all
__all__ = ('DjangoTestRunner', 'DjangoTestSuiteRunner', 'run_tests')
# The module name for tests outside models.py
TEST_MODULE = 'tests'
doctestOutputChecker = OutputChecker()
class DjangoTestRunner(unittest.TextTestRunner):
def __init__(self, *args, **kwargs):
import warnings
warnings.warn(
"DjangoTestRunner is deprecated; it's functionality is indistinguishable from TextTestRunner",
PendingDeprecationWarning
)
super(DjangoTestRunner, self).__init__(*args, **kwargs)
def get_tests(app_module):
try:
app_path = app_module.__name__.split('.')[:-1]
test_module = __import__('.'.join(app_path + [TEST_MODULE]), {}, {}, TEST_MODULE)
except ImportError, e:
# Couldn't import tests.py. Was it due to a missing file, or
# due to an import error in a tests.py that actually exists?
import os.path
from imp import find_module
try:
mod = find_module(TEST_MODULE, [os.path.dirname(app_module.__file__)])
except ImportError:
# 'tests' module doesn't exist. Move on.
test_module = None
else:
# The module exists, so there must be an import error in the
# test module itself. We don't need the module; so if the
# module was a single file module (i.e., tests.py), close the file
# handle returned by find_module. Otherwise, the test module
# is a directory, and there is nothing to close.
if mod[0]:
mod[0].close()
raise
return test_module
def build_suite(app_module):
"Create a complete Django test suite for the provided application module"
suite = unittest.TestSuite()
# Load unit and doctests in the models.py module. If module has
# a suite() method, use it. Otherwise build the test suite ourselves.
if hasattr(app_module, 'suite'):
suite.addTest(app_module.suite())
else:
suite.addTest(unittest.defaultTestLoader.loadTestsFromModule(app_module))
try:
suite.addTest(doctest.DocTestSuite(app_module,
checker=doctestOutputChecker,
runner=DocTestRunner))
except ValueError:
# No doc tests in models.py
pass
# Check to see if a separate 'tests' module exists parallel to the
# models module
test_module = get_tests(app_module)
if test_module:
# Load unit and doctests in the tests.py module. If module has
# a suite() method, use it. Otherwise build the test suite ourselves.
if hasattr(test_module, 'suite'):
suite.addTest(test_module.suite())
else:
suite.addTest(unittest.defaultTestLoader.loadTestsFromModule(test_module))
try:
suite.addTest(doctest.DocTestSuite(test_module,
checker=doctestOutputChecker,
runner=DocTestRunner))
except ValueError:
# No doc tests in tests.py
pass
return suite
def build_test(label):
"""Construct a test case with the specified label. Label should be of the
form model.TestClass or model.TestClass.test_method. Returns an
instantiated test or test suite corresponding to the label provided.
"""
parts = label.split('.')
if len(parts) < 2 or len(parts) > 3:
raise ValueError("Test label '%s' should be of the form app.TestCase or app.TestCase.test_method" % label)
#
# First, look for TestCase instances with a name that matches
#
app_module = get_app(parts[0])
test_module = get_tests(app_module)
TestClass = getattr(app_module, parts[1], None)
# Couldn't find the test class in models.py; look in tests.py
if TestClass is None:
if test_module:
TestClass = getattr(test_module, parts[1], None)
try:
if issubclass(TestClass, (unittest.TestCase, real_unittest.TestCase)):
if len(parts) == 2: # label is app.TestClass
try:
return unittest.TestLoader().loadTestsFromTestCase(TestClass)
except TypeError:
raise ValueError("Test label '%s' does not refer to a test class" % label)
else: # label is app.TestClass.test_method
return TestClass(parts[2])
except TypeError:
# TestClass isn't a TestClass - it must be a method or normal class
pass
#
# If there isn't a TestCase, look for a doctest that matches
#
tests = []
for module in app_module, test_module:
try:
doctests = doctest.DocTestSuite(module,
checker=doctestOutputChecker,
runner=DocTestRunner)
# Now iterate over the suite, looking for doctests whose name
# matches the pattern that was given
for test in doctests:
if test._dt_test.name in (
'%s.%s' % (module.__name__, '.'.join(parts[1:])),
'%s.__test__.%s' % (module.__name__, '.'.join(parts[1:]))):
tests.append(test)
except ValueError:
# No doctests found.
pass
# If no tests were found, then we were given a bad test label.
if not tests:
raise ValueError("Test label '%s' does not refer to a test" % label)
# Construct a suite out of the tests that matched.
return unittest.TestSuite(tests)
def partition_suite(suite, classes, bins):
"""
Partitions a test suite by test type.
classes is a sequence of types
bins is a sequence of TestSuites, one more than classes
Tests of type classes[i] are added to bins[i],
tests with no match found in classes are place in bins[-1]
"""
for test in suite:
if isinstance(test, unittest.TestSuite):
partition_suite(test, classes, bins)
else:
for i in range(len(classes)):
if isinstance(test, classes[i]):
bins[i].addTest(test)
break
else:
bins[-1].addTest(test)
def reorder_suite(suite, classes):
"""
Reorders a test suite by test type.
classes is a sequence of types
All tests of type clases[0] are placed first, then tests of type classes[1], etc.
Tests with no match in classes are placed last.
"""
class_count = len(classes)
bins = [unittest.TestSuite() for i in range(class_count+1)]
partition_suite(suite, classes, bins)
for i in range(class_count):
bins[0].addTests(bins[i+1])
return bins[0]
def dependency_ordered(test_databases, dependencies):
"""Reorder test_databases into an order that honors the dependencies
described in TEST_DEPENDENCIES.
"""
ordered_test_databases = []
resolved_databases = set()
while test_databases:
changed = False
deferred = []
while test_databases:
signature, (db_name, aliases) = test_databases.pop()
dependencies_satisfied = True
for alias in aliases:
if alias in dependencies:
if all(a in resolved_databases for a in dependencies[alias]):
# all dependencies for this alias are satisfied
dependencies.pop(alias)
resolved_databases.add(alias)
else:
dependencies_satisfied = False
else:
resolved_databases.add(alias)
if dependencies_satisfied:
ordered_test_databases.append((signature, (db_name, aliases)))
changed = True
else:
deferred.append((signature, (db_name, aliases)))
if not changed:
raise ImproperlyConfigured("Circular dependency in TEST_DEPENDENCIES")
test_databases = deferred
return ordered_test_databases
class DjangoTestSuiteRunner(object):
def __init__(self, verbosity=1, interactive=True, failfast=True, **kwargs):
self.verbosity = verbosity
self.interactive = interactive
self.failfast = failfast
def setup_test_environment(self, **kwargs):
setup_test_environment()
settings.DEBUG = False
unittest.installHandler()
def build_suite(self, test_labels, extra_tests=None, **kwargs):
suite = unittest.TestSuite()
if test_labels:
for label in test_labels:
if '.' in label:
suite.addTest(build_test(label))
else:
app = get_app(label)
suite.addTest(build_suite(app))
else:
for app in get_apps():
suite.addTest(build_suite(app))
if extra_tests:
for test in extra_tests:
suite.addTest(test)
return reorder_suite(suite, (TestCase,))
def setup_databases(self, **kwargs):
from django.db import connections, DEFAULT_DB_ALIAS
# First pass -- work out which databases actually need to be created,
# and which ones are test mirrors or duplicate entries in DATABASES
mirrored_aliases = {}
test_databases = {}
dependencies = {}
for alias in connections:
connection = connections[alias]
if connection.settings_dict['TEST_MIRROR']:
# If the database is marked as a test mirror, save
# the alias.
mirrored_aliases[alias] = connection.settings_dict['TEST_MIRROR']
else:
# Store a tuple with DB parameters that uniquely identify it.
# If we have two aliases with the same values for that tuple,
# we only need to create the test database once.
item = test_databases.setdefault(
connection.creation.test_db_signature(),
(connection.settings_dict['NAME'], [])
)
item[1].append(alias)
if 'TEST_DEPENDENCIES' in connection.settings_dict:
dependencies[alias] = connection.settings_dict['TEST_DEPENDENCIES']
else:
if alias != DEFAULT_DB_ALIAS:
dependencies[alias] = connection.settings_dict.get('TEST_DEPENDENCIES', [DEFAULT_DB_ALIAS])
# Second pass -- actually create the databases.
old_names = []
mirrors = []
for signature, (db_name, aliases) in dependency_ordered(test_databases.items(), dependencies):
# Actually create the database for the first connection
connection = connections[aliases[0]]
old_names.append((connection, db_name, True))
test_db_name = connection.creation.create_test_db(self.verbosity, autoclobber=not self.interactive)
for alias in aliases[1:]:
connection = connections[alias]
if db_name:
old_names.append((connection, db_name, False))
connection.settings_dict['NAME'] = test_db_name
else:
# If settings_dict['NAME'] isn't defined, we have a backend where
# the name isn't important -- e.g., SQLite, which uses :memory:.
# Force create the database instead of assuming it's a duplicate.
old_names.append((connection, db_name, True))
connection.creation.create_test_db(self.verbosity, autoclobber=not self.interactive)
for alias, mirror_alias in mirrored_aliases.items():
mirrors.append((alias, connections[alias].settings_dict['NAME']))
connections[alias].settings_dict['NAME'] = connections[mirror_alias].settings_dict['NAME']
return old_names, mirrors
def run_suite(self, suite, **kwargs):
return unittest.TextTestRunner(verbosity=self.verbosity, failfast=self.failfast).run(suite)
def teardown_databases(self, old_config, **kwargs):
from django.db import connections
old_names, mirrors = old_config
# Point all the mirrors back to the originals
for alias, old_name in mirrors:
connections[alias].settings_dict['NAME'] = old_name
# Destroy all the non-mirror databases
for connection, old_name, destroy in old_names:
if destroy:
connection.creation.destroy_test_db(old_name, self.verbosity)
else:
connection.settings_dict['NAME'] = old_name
def teardown_test_environment(self, **kwargs):
unittest.removeHandler()
teardown_test_environment()
def suite_result(self, suite, result, **kwargs):
return len(result.failures) + len(result.errors)
def run_tests(self, test_labels, extra_tests=None, **kwargs):
"""
Run the unit tests for all the test labels in the provided list.
Labels must be of the form:
- app.TestClass.test_method
Run a single specific test method
- app.TestClass
Run all the test methods in a given class
- app
Search for doctests and unittests in the named application.
When looking for tests, the test runner will look in the models and
tests modules for the application.
A list of 'extra' tests may also be provided; these tests
will be added to the test suite.
Returns the number of tests that failed.
"""
self.setup_test_environment()
suite = self.build_suite(test_labels, extra_tests)
old_config = self.setup_databases()
result = self.run_suite(suite)
self.teardown_databases(old_config)
self.teardown_test_environment()
return self.suite_result(suite, result)
def run_tests(test_labels, verbosity=1, interactive=True, failfast=False, extra_tests=None):
import warnings
warnings.warn(
'The run_tests() test runner has been deprecated in favor of DjangoTestSuiteRunner.',
DeprecationWarning
)
test_runner = DjangoTestSuiteRunner(verbosity=verbosity, interactive=interactive, failfast=failfast)
return test_runner.run_tests(test_labels, extra_tests=extra_tests)
|
bsd-3-clause
|
Meee32/Net-kgw
|
share/qt/extract_strings_qt.py
|
2945
|
1844
|
#!/usr/bin/python
'''
Extract _("...") strings for translation and convert to Qt4 stringdefs so that
they can be picked up by Qt linguist.
'''
from subprocess import Popen, PIPE
import glob
import operator
OUT_CPP="src/qt/bitcoinstrings.cpp"
EMPTY=['""']
def parse_po(text):
"""
Parse 'po' format produced by xgettext.
Return a list of (msgid,msgstr) tuples.
"""
messages = []
msgid = []
msgstr = []
in_msgid = False
in_msgstr = False
for line in text.split('\n'):
line = line.rstrip('\r')
if line.startswith('msgid '):
if in_msgstr:
messages.append((msgid, msgstr))
in_msgstr = False
# message start
in_msgid = True
msgid = [line[6:]]
elif line.startswith('msgstr '):
in_msgid = False
in_msgstr = True
msgstr = [line[7:]]
elif line.startswith('"'):
if in_msgid:
msgid.append(line)
if in_msgstr:
msgstr.append(line)
if in_msgstr:
messages.append((msgid, msgstr))
return messages
files = glob.glob('src/*.cpp') + glob.glob('src/*.h')
# xgettext -n --keyword=_ $FILES
child = Popen(['xgettext','--output=-','-n','--keyword=_'] + files, stdout=PIPE)
(out, err) = child.communicate()
messages = parse_po(out)
f = open(OUT_CPP, 'w')
f.write("""#include <QtGlobal>
// Automatically generated by extract_strings.py
#ifdef __GNUC__
#define UNUSED __attribute__((unused))
#else
#define UNUSED
#endif
""")
f.write('static const char UNUSED *bitcoin_strings[] = {\n')
messages.sort(key=operator.itemgetter(0))
for (msgid, msgstr) in messages:
if msgid != EMPTY:
f.write('QT_TRANSLATE_NOOP("bitcoin-core", %s),\n' % ('\n'.join(msgid)))
f.write('};')
f.close()
|
mit
|
msmolens/VTK
|
Interaction/Widgets/Testing/Python/TestSphereWidget.py
|
21
|
14176
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
=========================================================================
Program: Visualization Toolkit
Module: TestNamedColorsIntegration.py
Copyright (c) Ken Martin, Will Schroeder, Bill Lorensen
All rights reserved.
See Copyright.txt or http://www.kitware.com/Copyright.htm for details.
This software is distributed WITHOUT ANY WARRANTY; without even
the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the above copyright notice for more information.
=========================================================================
'''
import vtk
import vtk.test.Testing
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
class TestSphereWidget(vtk.test.Testing.vtkTest):
def testSphereWidget(self):
# This example demonstrates how to use the vtkSphereWidget to control the
# position of a light.
# These are the pre-recorded events
Recording = \
"# StreamVersion 1\n\
CharEvent 23 266 0 0 105 1 i\n\
KeyReleaseEvent 23 266 0 0 105 1 i\n\
EnterEvent 69 294 0 0 0 0 i\n\
MouseMoveEvent 69 294 0 0 0 0 i\n\
MouseMoveEvent 68 293 0 0 0 0 i\n\
MouseMoveEvent 67 292 0 0 0 0 i\n\
MouseMoveEvent 66 289 0 0 0 0 i\n\
MouseMoveEvent 66 282 0 0 0 0 i\n\
MouseMoveEvent 66 271 0 0 0 0 i\n\
MouseMoveEvent 69 253 0 0 0 0 i\n\
MouseMoveEvent 71 236 0 0 0 0 i\n\
MouseMoveEvent 74 219 0 0 0 0 i\n\
MouseMoveEvent 76 208 0 0 0 0 i\n\
MouseMoveEvent 78 190 0 0 0 0 i\n\
MouseMoveEvent 78 173 0 0 0 0 i\n\
MouseMoveEvent 77 162 0 0 0 0 i\n\
MouseMoveEvent 77 151 0 0 0 0 i\n\
MouseMoveEvent 77 139 0 0 0 0 i\n\
MouseMoveEvent 76 125 0 0 0 0 i\n\
MouseMoveEvent 73 114 0 0 0 0 i\n\
MouseMoveEvent 73 106 0 0 0 0 i\n\
MouseMoveEvent 73 101 0 0 0 0 i\n\
MouseMoveEvent 72 95 0 0 0 0 i\n\
MouseMoveEvent 72 92 0 0 0 0 i\n\
MouseMoveEvent 70 89 0 0 0 0 i\n\
MouseMoveEvent 69 86 0 0 0 0 i\n\
MouseMoveEvent 67 84 0 0 0 0 i\n\
MouseMoveEvent 65 81 0 0 0 0 i\n\
MouseMoveEvent 60 79 0 0 0 0 i\n\
MouseMoveEvent 59 79 0 0 0 0 i\n\
MouseMoveEvent 58 79 0 0 0 0 i\n\
MouseMoveEvent 57 78 0 0 0 0 i\n\
MouseMoveEvent 55 78 0 0 0 0 i\n\
MouseMoveEvent 54 77 0 0 0 0 i\n\
LeftButtonPressEvent 54 77 0 0 0 0 i\n\
MouseMoveEvent 61 79 0 0 0 0 i\n\
MouseMoveEvent 67 83 0 0 0 0 i\n\
MouseMoveEvent 72 88 0 0 0 0 i\n\
MouseMoveEvent 77 90 0 0 0 0 i\n\
MouseMoveEvent 78 91 0 0 0 0 i\n\
MouseMoveEvent 80 92 0 0 0 0 i\n\
MouseMoveEvent 84 93 0 0 0 0 i\n\
MouseMoveEvent 85 94 0 0 0 0 i\n\
MouseMoveEvent 88 97 0 0 0 0 i\n\
MouseMoveEvent 90 100 0 0 0 0 i\n\
MouseMoveEvent 92 102 0 0 0 0 i\n\
MouseMoveEvent 94 103 0 0 0 0 i\n\
MouseMoveEvent 97 105 0 0 0 0 i\n\
MouseMoveEvent 101 107 0 0 0 0 i\n\
MouseMoveEvent 102 109 0 0 0 0 i\n\
MouseMoveEvent 104 111 0 0 0 0 i\n\
MouseMoveEvent 108 113 0 0 0 0 i\n\
MouseMoveEvent 112 115 0 0 0 0 i\n\
MouseMoveEvent 118 119 0 0 0 0 i\n\
MouseMoveEvent 118 120 0 0 0 0 i\n\
MouseMoveEvent 118 123 0 0 0 0 i\n\
MouseMoveEvent 120 125 0 0 0 0 i\n\
MouseMoveEvent 122 128 0 0 0 0 i\n\
MouseMoveEvent 123 129 0 0 0 0 i\n\
MouseMoveEvent 125 132 0 0 0 0 i\n\
MouseMoveEvent 125 134 0 0 0 0 i\n\
MouseMoveEvent 127 138 0 0 0 0 i\n\
MouseMoveEvent 127 142 0 0 0 0 i\n\
MouseMoveEvent 127 147 0 0 0 0 i\n\
MouseMoveEvent 126 152 0 0 0 0 i\n\
MouseMoveEvent 126 155 0 0 0 0 i\n\
MouseMoveEvent 125 160 0 0 0 0 i\n\
MouseMoveEvent 125 167 0 0 0 0 i\n\
MouseMoveEvent 125 169 0 0 0 0 i\n\
MouseMoveEvent 125 174 0 0 0 0 i\n\
MouseMoveEvent 122 179 0 0 0 0 i\n\
MouseMoveEvent 120 183 0 0 0 0 i\n\
MouseMoveEvent 116 187 0 0 0 0 i\n\
MouseMoveEvent 113 192 0 0 0 0 i\n\
MouseMoveEvent 113 193 0 0 0 0 i\n\
MouseMoveEvent 111 195 0 0 0 0 i\n\
MouseMoveEvent 108 198 0 0 0 0 i\n\
MouseMoveEvent 106 200 0 0 0 0 i\n\
MouseMoveEvent 104 202 0 0 0 0 i\n\
MouseMoveEvent 103 203 0 0 0 0 i\n\
MouseMoveEvent 99 205 0 0 0 0 i\n\
MouseMoveEvent 97 207 0 0 0 0 i\n\
MouseMoveEvent 94 208 0 0 0 0 i\n\
MouseMoveEvent 91 210 0 0 0 0 i\n\
MouseMoveEvent 89 211 0 0 0 0 i\n\
MouseMoveEvent 86 211 0 0 0 0 i\n\
MouseMoveEvent 84 211 0 0 0 0 i\n\
MouseMoveEvent 80 211 0 0 0 0 i\n\
MouseMoveEvent 77 211 0 0 0 0 i\n\
MouseMoveEvent 75 211 0 0 0 0 i\n\
MouseMoveEvent 71 211 0 0 0 0 i\n\
MouseMoveEvent 68 211 0 0 0 0 i\n\
MouseMoveEvent 66 210 0 0 0 0 i\n\
MouseMoveEvent 62 210 0 0 0 0 i\n\
MouseMoveEvent 58 209 0 0 0 0 i\n\
MouseMoveEvent 54 207 0 0 0 0 i\n\
MouseMoveEvent 52 204 0 0 0 0 i\n\
MouseMoveEvent 51 203 0 0 0 0 i\n\
MouseMoveEvent 51 200 0 0 0 0 i\n\
MouseMoveEvent 48 196 0 0 0 0 i\n\
MouseMoveEvent 45 187 0 0 0 0 i\n\
MouseMoveEvent 45 181 0 0 0 0 i\n\
MouseMoveEvent 44 168 0 0 0 0 i\n\
MouseMoveEvent 40 161 0 0 0 0 i\n\
MouseMoveEvent 39 154 0 0 0 0 i\n\
MouseMoveEvent 38 146 0 0 0 0 i\n\
MouseMoveEvent 35 131 0 0 0 0 i\n\
MouseMoveEvent 34 121 0 0 0 0 i\n\
MouseMoveEvent 34 110 0 0 0 0 i\n\
MouseMoveEvent 34 103 0 0 0 0 i\n\
MouseMoveEvent 34 91 0 0 0 0 i\n\
MouseMoveEvent 34 86 0 0 0 0 i\n\
MouseMoveEvent 34 73 0 0 0 0 i\n\
MouseMoveEvent 35 66 0 0 0 0 i\n\
MouseMoveEvent 37 60 0 0 0 0 i\n\
MouseMoveEvent 37 53 0 0 0 0 i\n\
MouseMoveEvent 38 50 0 0 0 0 i\n\
MouseMoveEvent 38 48 0 0 0 0 i\n\
MouseMoveEvent 41 45 0 0 0 0 i\n\
MouseMoveEvent 43 45 0 0 0 0 i\n\
MouseMoveEvent 44 45 0 0 0 0 i\n\
MouseMoveEvent 47 43 0 0 0 0 i\n\
MouseMoveEvent 51 44 0 0 0 0 i\n\
MouseMoveEvent 54 44 0 0 0 0 i\n\
MouseMoveEvent 55 44 0 0 0 0 i\n\
MouseMoveEvent 59 44 0 0 0 0 i\n\
MouseMoveEvent 64 44 0 0 0 0 i\n\
MouseMoveEvent 67 44 0 0 0 0 i\n\
MouseMoveEvent 68 44 0 0 0 0 i\n\
MouseMoveEvent 71 44 0 0 0 0 i\n\
MouseMoveEvent 74 44 0 0 0 0 i\n\
MouseMoveEvent 77 44 0 0 0 0 i\n\
MouseMoveEvent 80 45 0 0 0 0 i\n\
MouseMoveEvent 81 45 0 0 0 0 i\n\
MouseMoveEvent 85 49 0 0 0 0 i\n\
MouseMoveEvent 89 50 0 0 0 0 i\n\
MouseMoveEvent 94 52 0 0 0 0 i\n\
MouseMoveEvent 99 56 0 0 0 0 i\n\
MouseMoveEvent 104 58 0 0 0 0 i\n\
MouseMoveEvent 107 61 0 0 0 0 i\n\
MouseMoveEvent 109 63 0 0 0 0 i\n\
MouseMoveEvent 109 67 0 0 0 0 i\n\
MouseMoveEvent 111 83 0 0 0 0 i\n\
MouseMoveEvent 113 86 0 0 0 0 i\n\
MouseMoveEvent 113 87 0 0 0 0 i\n\
MouseMoveEvent 113 89 0 0 0 0 i\n\
MouseMoveEvent 112 93 0 0 0 0 i\n\
MouseMoveEvent 112 97 0 0 0 0 i\n\
MouseMoveEvent 111 104 0 0 0 0 i\n\
MouseMoveEvent 112 108 0 0 0 0 i\n\
MouseMoveEvent 116 115 0 0 0 0 i\n\
MouseMoveEvent 116 123 0 0 0 0 i\n\
MouseMoveEvent 116 129 0 0 0 0 i\n\
MouseMoveEvent 119 138 0 0 0 0 i\n\
MouseMoveEvent 122 141 0 0 0 0 i\n\
MouseMoveEvent 127 148 0 0 0 0 i\n\
MouseMoveEvent 128 161 0 0 0 0 i\n\
MouseMoveEvent 131 166 0 0 0 0 i\n\
MouseMoveEvent 134 168 0 0 0 0 i\n\
MouseMoveEvent 135 171 0 0 0 0 i\n\
MouseMoveEvent 134 174 0 0 0 0 i\n\
MouseMoveEvent 132 176 0 0 0 0 i\n\
MouseMoveEvent 132 178 0 0 0 0 i\n\
MouseMoveEvent 129 180 0 0 0 0 i\n\
MouseMoveEvent 127 182 0 0 0 0 i\n\
MouseMoveEvent 124 185 0 0 0 0 i\n\
MouseMoveEvent 122 186 0 0 0 0 i\n\
MouseMoveEvent 118 189 0 0 0 0 i\n\
MouseMoveEvent 114 191 0 0 0 0 i\n\
MouseMoveEvent 114 193 0 0 0 0 i\n\
MouseMoveEvent 112 193 0 0 0 0 i\n\
MouseMoveEvent 111 194 0 0 0 0 i\n\
MouseMoveEvent 110 197 0 0 0 0 i\n\
MouseMoveEvent 110 198 0 0 0 0 i\n\
MouseMoveEvent 109 199 0 0 0 0 i\n\
MouseMoveEvent 108 200 0 0 0 0 i\n\
MouseMoveEvent 108 201 0 0 0 0 i\n\
MouseMoveEvent 108 202 0 0 0 0 i\n\
MouseMoveEvent 108 203 0 0 0 0 i\n\
MouseMoveEvent 104 206 0 0 0 0 i\n\
LeftButtonReleaseEvent 104 206 0 0 0 0 i\n\
MouseMoveEvent 104 205 0 0 0 0 i\n\
MouseMoveEvent 104 204 0 0 0 0 i\n\
MouseMoveEvent 105 205 0 0 0 0 i\n\
MouseMoveEvent 105 206 0 0 0 0 i\n\
"
# Start by loading some data.
#
dem = vtk.vtkDEMReader()
dem.SetFileName(VTK_DATA_ROOT + "/Data/SainteHelens.dem")
dem.Update()
Scale = 2
lut = vtk.vtkLookupTable()
lut.SetHueRange(0.6, 0)
lut.SetSaturationRange(1.0, 0)
lut.SetValueRange(0.5, 1.0)
lo = Scale * dem.GetElevationBounds()[0]
hi = Scale * dem.GetElevationBounds()[1]
shrink = vtk.vtkImageShrink3D()
shrink.SetShrinkFactors(4, 4, 1)
shrink.SetInputConnection(dem.GetOutputPort())
shrink.AveragingOn()
geom = vtk.vtkImageDataGeometryFilter()
geom.SetInputConnection(shrink.GetOutputPort())
geom.ReleaseDataFlagOn()
warp = vtk.vtkWarpScalar()
warp.SetInputConnection(geom.GetOutputPort())
warp.SetNormal(0, 0, 1)
warp.UseNormalOn()
warp.SetScaleFactor(Scale)
warp.ReleaseDataFlagOn()
elevation = vtk.vtkElevationFilter()
elevation.SetInputConnection(warp.GetOutputPort())
elevation.SetLowPoint(0, 0, lo)
elevation.SetHighPoint(0, 0, hi)
elevation.SetScalarRange(lo, hi)
elevation.ReleaseDataFlagOn()
normals = vtk.vtkPolyDataNormals()
normals.SetInputConnection(elevation.GetOutputPort())
normals.SetFeatureAngle(60)
normals.ConsistencyOff()
normals.SplittingOff()
normals.ReleaseDataFlagOn()
normals.Update()
demMapper = vtk.vtkPolyDataMapper()
demMapper.SetInputConnection(normals.GetOutputPort())
demMapper.SetScalarRange(lo, hi)
demMapper.SetLookupTable(lut)
demMapper.ImmediateModeRenderingOn()
demActor = vtk.vtkActor()
demActor.SetMapper(demMapper)
# Create the RenderWindow, Renderer and both Actors
#
ren = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
renWin.SetMultiSamples(0)
renWin.AddRenderer(ren)
iRen = vtk.vtkRenderWindowInteractor()
iRen.SetRenderWindow(renWin)
iRen.LightFollowCameraOff()
# iRen.SetInteractorStyle("")
# The callback takes two arguments.
# The first being the object that generates the event and
# the second argument the event name (which is a string).
def MoveLight(widget, event_string):
light.SetPosition(rep.GetHandlePosition())
# Associate the line widget with the interactor
rep = vtk.vtkSphereRepresentation()
rep.SetPlaceFactor(4)
rep.PlaceWidget(normals.GetOutput().GetBounds())
rep.HandleVisibilityOn()
rep.SetRepresentationToWireframe()
# rep HandleVisibilityOff
# rep HandleTextOff
sphereWidget = vtk.vtkSphereWidget2()
sphereWidget.SetInteractor(iRen)
sphereWidget.SetRepresentation(rep)
# sphereWidget.TranslationEnabledOff()
# sphereWidget.ScalingEnabledOff()
sphereWidget.AddObserver("InteractionEvent", MoveLight)
recorder = vtk.vtkInteractorEventRecorder()
recorder.SetInteractor(iRen)
# recorder.SetFileName("c:/record.log")
# recorder.Record()
recorder.ReadFromInputStringOn()
recorder.SetInputString(Recording)
# Add the actors to the renderer, set the background and size
#
ren.AddActor(demActor)
ren.SetBackground(1, 1, 1)
renWin.SetSize(300, 300)
ren.SetBackground(0.1, 0.2, 0.4)
cam1 = ren.GetActiveCamera()
cam1.SetViewUp(0, 0, 1)
cam1.SetFocalPoint(dem.GetOutput().GetCenter())
cam1.SetPosition(1, 0, 0)
ren.ResetCamera()
cam1.Elevation(25)
cam1.Azimuth(125)
cam1.Zoom(1.25)
light = vtk.vtkLight()
light.SetFocalPoint(rep.GetCenter())
light.SetPosition(rep.GetHandlePosition())
ren.AddLight(light)
iRen.Initialize()
renWin.Render()
# render the image
renWin.Render()
# Actually probe the data
recorder.Play()
img_file = "TestSphereWidget.png"
vtk.test.Testing.compareImage(iRen.GetRenderWindow(), vtk.test.Testing.getAbsImagePath(img_file), threshold=25)
vtk.test.Testing.interact()
if __name__ == "__main__":
vtk.test.Testing.main([(TestSphereWidget, 'test')])
|
bsd-3-clause
|
drexly/openhgsenti
|
lib/django/db/models/fields/reverse_related.py
|
106
|
11166
|
"""
"Rel objects" for related fields.
"Rel objects" (for lack of a better name) carry information about the relation
modeled by a related field and provide some utility functions. They're stored
in the ``remote_field`` attribute of the field.
They also act as reverse fields for the purposes of the Meta API because
they're the closest concept currently available.
"""
from __future__ import unicode_literals
import warnings
from django.core import exceptions
from django.utils.deprecation import RemovedInDjango20Warning
from django.utils.encoding import smart_text
from django.utils.functional import cached_property
from . import BLANK_CHOICE_DASH
class ForeignObjectRel(object):
"""
Used by ForeignObject to store information about the relation.
``_meta.get_fields()`` returns this class to provide access to the field
flags for the reverse relation.
"""
# Field flags
auto_created = True
concrete = False
editable = False
is_relation = True
# Reverse relations are always nullable (Django can't enforce that a
# foreign key on the related model points to this model).
null = True
def __init__(self, field, to, related_name=None, related_query_name=None,
limit_choices_to=None, parent_link=False, on_delete=None):
self.field = field
self.model = to
self.related_name = related_name
self.related_query_name = related_query_name
self.limit_choices_to = {} if limit_choices_to is None else limit_choices_to
self.parent_link = parent_link
self.on_delete = on_delete
self.symmetrical = False
self.multiple = True
# Some of the following cached_properties can't be initialized in
# __init__ as the field doesn't have its model yet. Calling these methods
# before field.contribute_to_class() has been called will result in
# AttributeError
@property
def to(self):
warnings.warn(
"Usage of ForeignObjectRel.to attribute has been deprecated. "
"Use the model attribute instead.",
RemovedInDjango20Warning, 2)
return self.model
@cached_property
def hidden(self):
return self.is_hidden()
@cached_property
def name(self):
return self.field.related_query_name()
@property
def remote_field(self):
return self.field
@property
def target_field(self):
"""
When filtering against this relation, returns the field on the remote
model against which the filtering should happen.
"""
target_fields = self.get_path_info()[-1].target_fields
if len(target_fields) > 1:
raise exceptions.FieldError("Can't use target_field for multicolumn relations.")
return target_fields[0]
@cached_property
def related_model(self):
if not self.field.model:
raise AttributeError(
"This property can't be accessed before self.field.contribute_to_class has been called.")
return self.field.model
@cached_property
def many_to_many(self):
return self.field.many_to_many
@cached_property
def many_to_one(self):
return self.field.one_to_many
@cached_property
def one_to_many(self):
return self.field.many_to_one
@cached_property
def one_to_one(self):
return self.field.one_to_one
def get_prep_lookup(self, lookup_name, value):
return self.field.get_prep_lookup(lookup_name, value)
def get_lookup(self, lookup_name):
return self.field.get_lookup(lookup_name)
def get_internal_type(self):
return self.field.get_internal_type()
@property
def db_type(self):
return self.field.db_type
def __repr__(self):
return '<%s: %s.%s>' % (
type(self).__name__,
self.related_model._meta.app_label,
self.related_model._meta.model_name,
)
def get_choices(self, include_blank=True, blank_choice=BLANK_CHOICE_DASH,
limit_to_currently_related=False):
"""
Return choices with a default blank choices included, for use as
SelectField choices for this field.
Analog of django.db.models.fields.Field.get_choices(), provided
initially for utilization by RelatedFieldListFilter.
"""
first_choice = blank_choice if include_blank else []
queryset = self.related_model._default_manager.all()
if limit_to_currently_related:
queryset = queryset.complex_filter(
{'%s__isnull' % self.related_model._meta.model_name: False}
)
lst = [(x._get_pk_val(), smart_text(x)) for x in queryset]
return first_choice + lst
def get_db_prep_lookup(self, lookup_type, value, connection, prepared=False):
# Defer to the actual field definition for db prep
return self.field.get_db_prep_lookup(lookup_type, value, connection=connection, prepared=prepared)
def is_hidden(self):
"Should the related object be hidden?"
return bool(self.related_name) and self.related_name[-1] == '+'
def get_joining_columns(self):
return self.field.get_reverse_joining_columns()
def get_extra_restriction(self, where_class, alias, related_alias):
return self.field.get_extra_restriction(where_class, related_alias, alias)
def set_field_name(self):
"""
Set the related field's name, this is not available until later stages
of app loading, so set_field_name is called from
set_attributes_from_rel()
"""
# By default foreign object doesn't relate to any remote field (for
# example custom multicolumn joins currently have no remote field).
self.field_name = None
def get_accessor_name(self, model=None):
# This method encapsulates the logic that decides what name to give an
# accessor descriptor that retrieves related many-to-one or
# many-to-many objects. It uses the lower-cased object_name + "_set",
# but this can be overridden with the "related_name" option.
# Due to backwards compatibility ModelForms need to be able to provide
# an alternate model. See BaseInlineFormSet.get_default_prefix().
opts = model._meta if model else self.related_model._meta
model = model or self.related_model
if self.multiple:
# If this is a symmetrical m2m relation on self, there is no reverse accessor.
if self.symmetrical and model == self.model:
return None
if self.related_name:
return self.related_name
if opts.default_related_name:
return opts.default_related_name % {
'model_name': opts.model_name.lower(),
'app_label': opts.app_label.lower(),
}
return opts.model_name + ('_set' if self.multiple else '')
def get_cache_name(self):
return "_%s_cache" % self.get_accessor_name()
def get_path_info(self):
return self.field.get_reverse_path_info()
class ManyToOneRel(ForeignObjectRel):
"""
Used by the ForeignKey field to store information about the relation.
``_meta.get_fields()`` returns this class to provide access to the field
flags for the reverse relation.
Note: Because we somewhat abuse the Rel objects by using them as reverse
fields we get the funny situation where
``ManyToOneRel.many_to_one == False`` and
``ManyToOneRel.one_to_many == True``. This is unfortunate but the actual
ManyToOneRel class is a private API and there is work underway to turn
reverse relations into actual fields.
"""
def __init__(self, field, to, field_name, related_name=None, related_query_name=None,
limit_choices_to=None, parent_link=False, on_delete=None):
super(ManyToOneRel, self).__init__(
field, to,
related_name=related_name,
related_query_name=related_query_name,
limit_choices_to=limit_choices_to,
parent_link=parent_link,
on_delete=on_delete,
)
self.field_name = field_name
def __getstate__(self):
state = self.__dict__.copy()
state.pop('related_model', None)
return state
def get_related_field(self):
"""
Return the Field in the 'to' object to which this relationship is tied.
"""
field = self.model._meta.get_field(self.field_name)
if not field.concrete:
raise exceptions.FieldDoesNotExist("No related field named '%s'" %
self.field_name)
return field
def set_field_name(self):
self.field_name = self.field_name or self.model._meta.pk.name
class OneToOneRel(ManyToOneRel):
"""
Used by OneToOneField to store information about the relation.
``_meta.get_fields()`` returns this class to provide access to the field
flags for the reverse relation.
"""
def __init__(self, field, to, field_name, related_name=None, related_query_name=None,
limit_choices_to=None, parent_link=False, on_delete=None):
super(OneToOneRel, self).__init__(
field, to, field_name,
related_name=related_name,
related_query_name=related_query_name,
limit_choices_to=limit_choices_to,
parent_link=parent_link,
on_delete=on_delete,
)
self.multiple = False
class ManyToManyRel(ForeignObjectRel):
"""
Used by ManyToManyField to store information about the relation.
``_meta.get_fields()`` returns this class to provide access to the field
flags for the reverse relation.
"""
def __init__(self, field, to, related_name=None, related_query_name=None,
limit_choices_to=None, symmetrical=True, through=None, through_fields=None,
db_constraint=True):
super(ManyToManyRel, self).__init__(
field, to,
related_name=related_name,
related_query_name=related_query_name,
limit_choices_to=limit_choices_to,
)
if through and not db_constraint:
raise ValueError("Can't supply a through model and db_constraint=False")
self.through = through
if through_fields and not through:
raise ValueError("Cannot specify through_fields without a through model")
self.through_fields = through_fields
self.symmetrical = symmetrical
self.db_constraint = db_constraint
def get_related_field(self):
"""
Return the field in the 'to' object to which this relationship is tied.
Provided for symmetry with ManyToOneRel.
"""
opts = self.through._meta
if self.through_fields:
field = opts.get_field(self.through_fields[0])
else:
for field in opts.fields:
rel = getattr(field, 'remote_field', None)
if rel and rel.model == self.model:
break
return field.foreign_related_fields[0]
|
apache-2.0
|
peteralfonso/platform_kernel_tegra
|
scripts/tracing/draw_functrace.py
|
14676
|
3560
|
#!/usr/bin/python
"""
Copyright 2008 (c) Frederic Weisbecker <[email protected]>
Licensed under the terms of the GNU GPL License version 2
This script parses a trace provided by the function tracer in
kernel/trace/trace_functions.c
The resulted trace is processed into a tree to produce a more human
view of the call stack by drawing textual but hierarchical tree of
calls. Only the functions's names and the the call time are provided.
Usage:
Be sure that you have CONFIG_FUNCTION_TRACER
# mount -t debugfs nodev /sys/kernel/debug
# echo function > /sys/kernel/debug/tracing/current_tracer
$ cat /sys/kernel/debug/tracing/trace_pipe > ~/raw_trace_func
Wait some times but not too much, the script is a bit slow.
Break the pipe (Ctrl + Z)
$ scripts/draw_functrace.py < raw_trace_func > draw_functrace
Then you have your drawn trace in draw_functrace
"""
import sys, re
class CallTree:
""" This class provides a tree representation of the functions
call stack. If a function has no parent in the kernel (interrupt,
syscall, kernel thread...) then it is attached to a virtual parent
called ROOT.
"""
ROOT = None
def __init__(self, func, time = None, parent = None):
self._func = func
self._time = time
if parent is None:
self._parent = CallTree.ROOT
else:
self._parent = parent
self._children = []
def calls(self, func, calltime):
""" If a function calls another one, call this method to insert it
into the tree at the appropriate place.
@return: A reference to the newly created child node.
"""
child = CallTree(func, calltime, self)
self._children.append(child)
return child
def getParent(self, func):
""" Retrieve the last parent of the current node that
has the name given by func. If this function is not
on a parent, then create it as new child of root
@return: A reference to the parent.
"""
tree = self
while tree != CallTree.ROOT and tree._func != func:
tree = tree._parent
if tree == CallTree.ROOT:
child = CallTree.ROOT.calls(func, None)
return child
return tree
def __repr__(self):
return self.__toString("", True)
def __toString(self, branch, lastChild):
if self._time is not None:
s = "%s----%s (%s)\n" % (branch, self._func, self._time)
else:
s = "%s----%s\n" % (branch, self._func)
i = 0
if lastChild:
branch = branch[:-1] + " "
while i < len(self._children):
if i != len(self._children) - 1:
s += "%s" % self._children[i].__toString(branch +\
" |", False)
else:
s += "%s" % self._children[i].__toString(branch +\
" |", True)
i += 1
return s
class BrokenLineException(Exception):
"""If the last line is not complete because of the pipe breakage,
we want to stop the processing and ignore this line.
"""
pass
class CommentLineException(Exception):
""" If the line is a comment (as in the beginning of the trace file),
just ignore it.
"""
pass
def parseLine(line):
line = line.strip()
if line.startswith("#"):
raise CommentLineException
m = re.match("[^]]+?\\] +([0-9.]+): (\\w+) <-(\\w+)", line)
if m is None:
raise BrokenLineException
return (m.group(1), m.group(2), m.group(3))
def main():
CallTree.ROOT = CallTree("Root (Nowhere)", None, None)
tree = CallTree.ROOT
for line in sys.stdin:
try:
calltime, callee, caller = parseLine(line)
except BrokenLineException:
break
except CommentLineException:
continue
tree = tree.getParent(caller)
tree = tree.calls(callee, calltime)
print CallTree.ROOT
if __name__ == "__main__":
main()
|
gpl-2.0
|
timduru/platform-external-chromium_org
|
chrome/test/install_test/chrome_options.py
|
36
|
1375
|
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Chrome-specific options for configuring a ChromeDriver instance."""
import base64
class ChromeOptions(object):
"""Chrome-specific options for configuring a ChromeDriver instance."""
def __init__(self):
"""Initialize ChromeOptions object."""
self._capabilities = {'chrome.switches': [], 'chrome.extensions': []}
def AddSwitch(self, switch):
"""Add a switch to be passed to Chrome.
Args:
switch: String switch to be passed to Chrome.
"""
self._capabilities['chrome.switches'].append(switch)
def AddExtension(self, extension):
"""Add an extension to be loaded onto Chrome.
Args:
extension: String path to the extension to be loaded onto Chrome.
"""
with open(extension, 'rb') as ext_file:
self._capabilities['chrome.extensions'].append(
base64.b64encode(ext_file.read()))
def SetUserDataDir(self, user_data_dir):
"""Set the Chrome user data dir.
Args:
user_data_dir: String path to the profile directory.
"""
self.AddSwitch('user-data-dir=%s' % user_data_dir)
def GetCapabilities(self):
"""Returns a capabilities object suitable for using with ChromeDriver."""
return self._capabilities
|
bsd-3-clause
|
TeamEOS/external_chromium_org
|
build/android/buildbot/bb_run_bot.py
|
10
|
11374
|
#!/usr/bin/env python
#
# Copyright (c) 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import collections
import copy
import json
import os
import pipes
import re
import subprocess
import sys
import bb_utils
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
from pylib import constants
CHROMIUM_COVERAGE_BUCKET = 'chromium-code-coverage'
_BotConfig = collections.namedtuple(
'BotConfig', ['bot_id', 'host_obj', 'test_obj'])
HostConfig = collections.namedtuple(
'HostConfig',
['script', 'host_steps', 'extra_args', 'extra_gyp_defines', 'target_arch'])
TestConfig = collections.namedtuple('Tests', ['script', 'tests', 'extra_args'])
def BotConfig(bot_id, host_object, test_object=None):
return _BotConfig(bot_id, host_object, test_object)
def DictDiff(d1, d2):
diff = []
for key in sorted(set(d1.keys() + d2.keys())):
if key in d1 and d1[key] != d2.get(key):
diff.append('- %s=%s' % (key, pipes.quote(d1[key])))
if key in d2 and d2[key] != d1.get(key):
diff.append('+ %s=%s' % (key, pipes.quote(d2[key])))
return '\n'.join(diff)
def GetEnvironment(host_obj, testing, extra_env_vars=None):
init_env = dict(os.environ)
init_env['GYP_GENERATORS'] = 'ninja'
if extra_env_vars:
init_env.update(extra_env_vars)
envsetup_cmd = '. build/android/envsetup.sh'
if testing:
# Skip envsetup to avoid presubmit dependence on android deps.
print 'Testing mode - skipping "%s"' % envsetup_cmd
envsetup_cmd = ':'
else:
print 'Running %s' % envsetup_cmd
proc = subprocess.Popen(['bash', '-exc',
envsetup_cmd + ' >&2; python build/android/buildbot/env_to_json.py'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE,
cwd=bb_utils.CHROME_SRC, env=init_env)
json_env, envsetup_output = proc.communicate()
if proc.returncode != 0:
print >> sys.stderr, 'FATAL Failure in envsetup.'
print >> sys.stderr, envsetup_output
sys.exit(1)
env = json.loads(json_env)
env['GYP_DEFINES'] = env.get('GYP_DEFINES', '') + \
' OS=android fastbuild=1 use_goma=1 gomadir=%s' % bb_utils.GOMA_DIR
if host_obj.target_arch:
env['GYP_DEFINES'] += ' target_arch=%s' % host_obj.target_arch
extra_gyp = host_obj.extra_gyp_defines
if extra_gyp:
env['GYP_DEFINES'] += ' %s' % extra_gyp
if re.search('(asan|clang)=1', extra_gyp):
env.pop('CXX_target', None)
# Bots checkout chrome in /b/build/slave/<name>/build/src
build_internal_android = os.path.abspath(os.path.join(
bb_utils.CHROME_SRC, '..', '..', '..', '..', '..', 'build_internal',
'scripts', 'slave', 'android'))
if os.path.exists(build_internal_android):
env['PATH'] = os.pathsep.join([build_internal_android, env['PATH']])
return env
def GetCommands(options, bot_config):
"""Get a formatted list of commands.
Args:
options: Options object.
bot_config: A BotConfig named tuple.
host_step_script: Host step script.
device_step_script: Device step script.
Returns:
list of Command objects.
"""
property_args = bb_utils.EncodeProperties(options)
commands = [[bot_config.host_obj.script,
'--steps=%s' % ','.join(bot_config.host_obj.host_steps)] +
property_args + (bot_config.host_obj.extra_args or [])]
test_obj = bot_config.test_obj
if test_obj:
run_test_cmd = [test_obj.script] + property_args
for test in test_obj.tests:
run_test_cmd.extend(['-f', test])
if test_obj.extra_args:
run_test_cmd.extend(test_obj.extra_args)
commands.append(run_test_cmd)
return commands
def GetBotStepMap():
compile_step = ['compile']
std_host_tests = ['check_webview_licenses', 'findbugs']
std_build_steps = ['compile', 'zip_build']
std_test_steps = ['extract_build']
std_tests = ['ui', 'unit', 'mojo']
telemetry_tests = ['telemetry_perf_unittests']
flakiness_server = (
'--flakiness-server=%s' % constants.UPSTREAM_FLAKINESS_SERVER)
experimental = ['--experimental']
bisect_chrome_output_dir = os.path.abspath(
os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, os.pardir,
os.pardir, 'bisect', 'src', 'out'))
B = BotConfig
H = (lambda steps, extra_args=None, extra_gyp=None, target_arch=None :
HostConfig('build/android/buildbot/bb_host_steps.py', steps, extra_args,
extra_gyp, target_arch))
T = (lambda tests, extra_args=None :
TestConfig('build/android/buildbot/bb_device_steps.py', tests,
extra_args))
bot_configs = [
# Main builders
B('main-builder-dbg', H(std_build_steps + std_host_tests)),
B('main-builder-rel', H(std_build_steps)),
B('main-clang-builder',
H(compile_step, extra_gyp='clang=1 component=shared_library')),
B('main-clobber', H(compile_step)),
B('main-tests-rel', H(std_test_steps), T(std_tests + telemetry_tests,
[flakiness_server])),
B('main-tests', H(std_test_steps), T(std_tests, [flakiness_server])),
# Other waterfalls
B('asan-builder-tests', H(compile_step,
extra_gyp='asan=1 component=shared_library'),
T(std_tests, ['--asan', '--asan-symbolize'])),
B('blink-try-builder', H(compile_step)),
B('chromedriver-fyi-tests-dbg', H(std_test_steps),
T(['chromedriver'], ['--install=ChromeShell', '--skip-wipe'])),
B('fyi-x86-builder-dbg',
H(compile_step + std_host_tests, experimental, target_arch='x86')),
B('fyi-builder-dbg',
H(std_build_steps + std_host_tests, experimental,
extra_gyp='emma_coverage=1 android_lint=1')),
B('x86-builder-dbg',
H(compile_step + std_host_tests, target_arch='x86')),
B('fyi-builder-rel', H(std_build_steps, experimental)),
B('fyi-tests', H(std_test_steps),
T(std_tests, ['--experimental', flakiness_server,
'--coverage-bucket', CHROMIUM_COVERAGE_BUCKET])),
B('fyi-component-builder-tests-dbg',
H(compile_step, extra_gyp='component=shared_library'),
T(std_tests, ['--experimental', flakiness_server])),
B('gpu-builder-tests-dbg',
H(compile_step),
T(['gpu'], ['--install=ContentShell'])),
# Pass empty T([]) so that logcat monitor and device status check are run.
B('perf-bisect-builder-tests-dbg',
H(['bisect_perf_regression']),
T([], ['--chrome-output-dir', bisect_chrome_output_dir])),
B('perf-tests-rel', H(std_test_steps),
T([], ['--install=ChromeShell'])),
B('webkit-latest-webkit-tests', H(std_test_steps),
T(['webkit_layout', 'webkit'], ['--auto-reconnect'])),
B('webkit-latest-contentshell', H(compile_step),
T(['webkit_layout'], ['--auto-reconnect'])),
B('builder-unit-tests', H(compile_step), T(['unit'])),
B('webrtc-chromium-builder',
H(std_build_steps,
extra_args=['--build-targets=android_builder_chromium_webrtc'])),
B('webrtc-native-builder',
H(std_build_steps,
extra_args=['--build-targets=android_builder_webrtc'],
extra_gyp='include_tests=1 enable_tracing=1')),
B('webrtc-chromium-tests', H(std_test_steps),
T(['webrtc_chromium'],
[flakiness_server, '--gtest-filter=WebRtc*'])),
B('webrtc-native-tests', H(std_test_steps),
T(['webrtc_native'], [flakiness_server])),
# Generic builder config (for substring match).
B('builder', H(std_build_steps)),
]
bot_map = dict((config.bot_id, config) for config in bot_configs)
# These bots have identical configuration to ones defined earlier.
copy_map = [
('lkgr-clobber', 'main-clobber'),
('try-builder-dbg', 'main-builder-dbg'),
('try-builder-rel', 'main-builder-rel'),
('try-clang-builder', 'main-clang-builder'),
('try-fyi-builder-dbg', 'fyi-builder-dbg'),
('try-x86-builder-dbg', 'x86-builder-dbg'),
('try-tests-rel', 'main-tests-rel'),
('try-tests', 'main-tests'),
('try-fyi-tests', 'fyi-tests'),
('webkit-latest-tests', 'main-tests'),
]
for to_id, from_id in copy_map:
assert to_id not in bot_map
# pylint: disable=W0212
bot_map[to_id] = copy.deepcopy(bot_map[from_id])._replace(bot_id=to_id)
# Trybots do not upload to flakiness dashboard. They should be otherwise
# identical in configuration to their trunk building counterparts.
test_obj = bot_map[to_id].test_obj
if to_id.startswith('try') and test_obj:
extra_args = test_obj.extra_args
if extra_args and flakiness_server in extra_args:
extra_args.remove(flakiness_server)
return bot_map
# Return an object from the map, looking first for an exact id match.
# If this fails, look for an id which is a substring of the specified id.
# Choose the longest of all substring matches.
# pylint: disable=W0622
def GetBestMatch(id_map, id):
config = id_map.get(id)
if not config:
substring_matches = filter(lambda x: x in id, id_map.iterkeys())
if substring_matches:
max_id = max(substring_matches, key=len)
print 'Using config from id="%s" (substring match).' % max_id
config = id_map[max_id]
return config
def GetRunBotOptParser():
parser = bb_utils.GetParser()
parser.add_option('--bot-id', help='Specify bot id directly.')
parser.add_option('--testing', action='store_true',
help='For testing: print, but do not run commands')
return parser
def GetBotConfig(options, bot_step_map):
bot_id = options.bot_id or options.factory_properties.get('android_bot_id')
if not bot_id:
print (sys.stderr,
'A bot id must be specified through option or factory_props.')
return
bot_config = GetBestMatch(bot_step_map, bot_id)
if not bot_config:
print 'Error: config for id="%s" cannot be inferred.' % bot_id
return bot_config
def RunBotCommands(options, commands, env):
print 'Environment changes:'
print DictDiff(dict(os.environ), env)
for command in commands:
print bb_utils.CommandToString(command)
sys.stdout.flush()
if options.testing:
env['BUILDBOT_TESTING'] = '1'
return_code = subprocess.call(command, cwd=bb_utils.CHROME_SRC, env=env)
if return_code != 0:
return return_code
def main(argv):
proc = subprocess.Popen(
['/bin/hostname', '-f'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
hostname_stdout, hostname_stderr = proc.communicate()
if proc.returncode == 0:
print 'Running on: ' + hostname_stdout
else:
print >> sys.stderr, 'WARNING: failed to run hostname'
print >> sys.stderr, hostname_stdout
print >> sys.stderr, hostname_stderr
sys.exit(1)
parser = GetRunBotOptParser()
options, args = parser.parse_args(argv[1:])
if args:
parser.error('Unused args: %s' % args)
bot_config = GetBotConfig(options, GetBotStepMap())
if not bot_config:
sys.exit(1)
print 'Using config:', bot_config
commands = GetCommands(options, bot_config)
for command in commands:
print 'Will run: ', bb_utils.CommandToString(command)
print
env = GetEnvironment(bot_config.host_obj, options.testing)
return RunBotCommands(options, commands, env)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
bsd-3-clause
|
tkinz27/ansible
|
lib/ansible/inventory/script.py
|
80
|
6338
|
# (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#############################################
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
import subprocess
import sys
from collections import Mapping
from ansible import constants as C
from ansible.errors import *
from ansible.inventory.host import Host
from ansible.inventory.group import Group
from ansible.module_utils.basic import json_dict_bytes_to_unicode
class InventoryScript:
''' Host inventory parser for ansible using external inventory scripts. '''
def __init__(self, loader, filename=C.DEFAULT_HOST_LIST):
self._loader = loader
# Support inventory scripts that are not prefixed with some
# path information but happen to be in the current working
# directory when '.' is not in PATH.
self.filename = os.path.abspath(filename)
cmd = [ self.filename, "--list" ]
try:
sp = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
except OSError, e:
raise AnsibleError("problem running %s (%s)" % (' '.join(cmd), e))
(stdout, stderr) = sp.communicate()
if sp.returncode != 0:
raise AnsibleError("Inventory script (%s) had an execution error: %s " % (filename,stderr))
self.data = stdout
# see comment about _meta below
self.host_vars_from_top = None
self.groups = self._parse(stderr)
def _parse(self, err):
all_hosts = {}
# not passing from_remote because data from CMDB is trusted
try:
self.raw = self._loader.load(self.data)
except Exception as e:
sys.stderr.write(err + "\n")
raise AnsibleError("failed to parse executable inventory script results from {0}: {1}".format(self.filename, str(e)))
if not isinstance(self.raw, Mapping):
sys.stderr.write(err + "\n")
raise AnsibleError("failed to parse executable inventory script results from {0}: data needs to be formatted as a json dict".format(self.filename))
self.raw = json_dict_bytes_to_unicode(self.raw)
all = Group('all')
groups = dict(all=all)
group = None
for (group_name, data) in self.raw.items():
# in Ansible 1.3 and later, a "_meta" subelement may contain
# a variable "hostvars" which contains a hash for each host
# if this "hostvars" exists at all then do not call --host for each
# host. This is for efficiency and scripts should still return data
# if called with --host for backwards compat with 1.2 and earlier.
if group_name == '_meta':
if 'hostvars' in data:
self.host_vars_from_top = data['hostvars']
continue
if group_name != all.name:
group = groups[group_name] = Group(group_name)
else:
group = all
host = None
if not isinstance(data, dict):
data = {'hosts': data}
# is not those subkeys, then simplified syntax, host with vars
elif not any(k in data for k in ('hosts','vars')):
data = {'hosts': [group_name], 'vars': data}
if 'hosts' in data:
if not isinstance(data['hosts'], list):
raise AnsibleError("You defined a group \"%s\" with bad "
"data for the host list:\n %s" % (group_name, data))
for hostname in data['hosts']:
if not hostname in all_hosts:
all_hosts[hostname] = Host(hostname)
host = all_hosts[hostname]
group.add_host(host)
if 'vars' in data:
if not isinstance(data['vars'], dict):
raise AnsibleError("You defined a group \"%s\" with bad "
"data for variables:\n %s" % (group_name, data))
for k, v in data['vars'].iteritems():
if group.name == all.name:
all.set_variable(k, v)
else:
group.set_variable(k, v)
# Separate loop to ensure all groups are defined
for (group_name, data) in self.raw.items():
if group_name == '_meta':
continue
if isinstance(data, dict) and 'children' in data:
for child_name in data['children']:
if child_name in groups:
groups[group_name].add_child_group(groups[child_name])
for group in groups.values():
if group.depth == 0 and group.name != 'all':
all.add_child_group(group)
return groups
def get_host_variables(self, host):
""" Runs <script> --host <hostname> to determine additional host variables """
if self.host_vars_from_top is not None:
got = self.host_vars_from_top.get(host.name, {})
return got
cmd = [self.filename, "--host", host.name]
try:
sp = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
except OSError, e:
raise AnsibleError("problem running %s (%s)" % (' '.join(cmd), e))
(out, err) = sp.communicate()
if out.strip() == '':
return dict()
try:
return json_dict_bytes_to_unicode(self._loader.load(out))
except ValueError:
raise AnsibleError("could not parse post variable response: %s, %s" % (cmd, out))
|
gpl-3.0
|
chugunovyar/factoryForBuild
|
env/lib/python2.7/site-packages/numpy/lib/tests/test_io.py
|
29
|
77244
|
from __future__ import division, absolute_import, print_function
import sys
import gzip
import os
import threading
from tempfile import NamedTemporaryFile
import time
import warnings
import gc
from io import BytesIO
from datetime import datetime
import numpy as np
import numpy.ma as ma
from numpy.lib._iotools import ConverterError, ConversionWarning
from numpy.compat import asbytes, bytes, unicode, Path
from numpy.ma.testutils import assert_equal
from numpy.testing import (
TestCase, run_module_suite, assert_warns, assert_,
assert_raises_regex, assert_raises, assert_allclose,
assert_array_equal, temppath, dec, IS_PYPY, suppress_warnings
)
class TextIO(BytesIO):
"""Helper IO class.
Writes encode strings to bytes if needed, reads return bytes.
This makes it easier to emulate files opened in binary mode
without needing to explicitly convert strings to bytes in
setting up the test data.
"""
def __init__(self, s=""):
BytesIO.__init__(self, asbytes(s))
def write(self, s):
BytesIO.write(self, asbytes(s))
def writelines(self, lines):
BytesIO.writelines(self, [asbytes(s) for s in lines])
MAJVER, MINVER = sys.version_info[:2]
IS_64BIT = sys.maxsize > 2**32
def strptime(s, fmt=None):
"""
This function is available in the datetime module only from Python >=
2.5.
"""
if sys.version_info[0] >= 3:
return datetime(*time.strptime(s.decode('latin1'), fmt)[:3])
else:
return datetime(*time.strptime(s, fmt)[:3])
class RoundtripTest(object):
def roundtrip(self, save_func, *args, **kwargs):
"""
save_func : callable
Function used to save arrays to file.
file_on_disk : bool
If true, store the file on disk, instead of in a
string buffer.
save_kwds : dict
Parameters passed to `save_func`.
load_kwds : dict
Parameters passed to `numpy.load`.
args : tuple of arrays
Arrays stored to file.
"""
save_kwds = kwargs.get('save_kwds', {})
load_kwds = kwargs.get('load_kwds', {})
file_on_disk = kwargs.get('file_on_disk', False)
if file_on_disk:
target_file = NamedTemporaryFile(delete=False)
load_file = target_file.name
else:
target_file = BytesIO()
load_file = target_file
try:
arr = args
save_func(target_file, *arr, **save_kwds)
target_file.flush()
target_file.seek(0)
if sys.platform == 'win32' and not isinstance(target_file, BytesIO):
target_file.close()
arr_reloaded = np.load(load_file, **load_kwds)
self.arr = arr
self.arr_reloaded = arr_reloaded
finally:
if not isinstance(target_file, BytesIO):
target_file.close()
# holds an open file descriptor so it can't be deleted on win
if not isinstance(arr_reloaded, np.lib.npyio.NpzFile):
os.remove(target_file.name)
def check_roundtrips(self, a):
self.roundtrip(a)
self.roundtrip(a, file_on_disk=True)
self.roundtrip(np.asfortranarray(a))
self.roundtrip(np.asfortranarray(a), file_on_disk=True)
if a.shape[0] > 1:
# neither C nor Fortran contiguous for 2D arrays or more
self.roundtrip(np.asfortranarray(a)[1:])
self.roundtrip(np.asfortranarray(a)[1:], file_on_disk=True)
def test_array(self):
a = np.array([], float)
self.check_roundtrips(a)
a = np.array([[1, 2], [3, 4]], float)
self.check_roundtrips(a)
a = np.array([[1, 2], [3, 4]], int)
self.check_roundtrips(a)
a = np.array([[1 + 5j, 2 + 6j], [3 + 7j, 4 + 8j]], dtype=np.csingle)
self.check_roundtrips(a)
a = np.array([[1 + 5j, 2 + 6j], [3 + 7j, 4 + 8j]], dtype=np.cdouble)
self.check_roundtrips(a)
def test_array_object(self):
if sys.version_info[:2] >= (2, 7):
a = np.array([], object)
self.check_roundtrips(a)
a = np.array([[1, 2], [3, 4]], object)
self.check_roundtrips(a)
# Fails with UnpicklingError: could not find MARK on Python 2.6
def test_1D(self):
a = np.array([1, 2, 3, 4], int)
self.roundtrip(a)
@np.testing.dec.knownfailureif(sys.platform == 'win32', "Fail on Win32")
def test_mmap(self):
a = np.array([[1, 2.5], [4, 7.3]])
self.roundtrip(a, file_on_disk=True, load_kwds={'mmap_mode': 'r'})
a = np.asfortranarray([[1, 2.5], [4, 7.3]])
self.roundtrip(a, file_on_disk=True, load_kwds={'mmap_mode': 'r'})
def test_record(self):
a = np.array([(1, 2), (3, 4)], dtype=[('x', 'i4'), ('y', 'i4')])
self.check_roundtrips(a)
@dec.slow
def test_format_2_0(self):
dt = [(("%d" % i) * 100, float) for i in range(500)]
a = np.ones(1000, dtype=dt)
with warnings.catch_warnings(record=True):
warnings.filterwarnings('always', '', UserWarning)
self.check_roundtrips(a)
class TestSaveLoad(RoundtripTest, TestCase):
def roundtrip(self, *args, **kwargs):
RoundtripTest.roundtrip(self, np.save, *args, **kwargs)
assert_equal(self.arr[0], self.arr_reloaded)
assert_equal(self.arr[0].dtype, self.arr_reloaded.dtype)
assert_equal(self.arr[0].flags.fnc, self.arr_reloaded.flags.fnc)
class TestSavezLoad(RoundtripTest, TestCase):
def roundtrip(self, *args, **kwargs):
RoundtripTest.roundtrip(self, np.savez, *args, **kwargs)
try:
for n, arr in enumerate(self.arr):
reloaded = self.arr_reloaded['arr_%d' % n]
assert_equal(arr, reloaded)
assert_equal(arr.dtype, reloaded.dtype)
assert_equal(arr.flags.fnc, reloaded.flags.fnc)
finally:
# delete tempfile, must be done here on windows
if self.arr_reloaded.fid:
self.arr_reloaded.fid.close()
os.remove(self.arr_reloaded.fid.name)
@np.testing.dec.skipif(not IS_64BIT, "Works only with 64bit systems")
@np.testing.dec.slow
def test_big_arrays(self):
L = (1 << 31) + 100000
a = np.empty(L, dtype=np.uint8)
with temppath(prefix="numpy_test_big_arrays_", suffix=".npz") as tmp:
np.savez(tmp, a=a)
del a
npfile = np.load(tmp)
a = npfile['a'] # Should succeed
npfile.close()
del a # Avoid pyflakes unused variable warning.
def test_multiple_arrays(self):
a = np.array([[1, 2], [3, 4]], float)
b = np.array([[1 + 2j, 2 + 7j], [3 - 6j, 4 + 12j]], complex)
self.roundtrip(a, b)
def test_named_arrays(self):
a = np.array([[1, 2], [3, 4]], float)
b = np.array([[1 + 2j, 2 + 7j], [3 - 6j, 4 + 12j]], complex)
c = BytesIO()
np.savez(c, file_a=a, file_b=b)
c.seek(0)
l = np.load(c)
assert_equal(a, l['file_a'])
assert_equal(b, l['file_b'])
def test_BagObj(self):
a = np.array([[1, 2], [3, 4]], float)
b = np.array([[1 + 2j, 2 + 7j], [3 - 6j, 4 + 12j]], complex)
c = BytesIO()
np.savez(c, file_a=a, file_b=b)
c.seek(0)
l = np.load(c)
assert_equal(sorted(dir(l.f)), ['file_a','file_b'])
assert_equal(a, l.f.file_a)
assert_equal(b, l.f.file_b)
def test_savez_filename_clashes(self):
# Test that issue #852 is fixed
# and savez functions in multithreaded environment
def writer(error_list):
with temppath(suffix='.npz') as tmp:
arr = np.random.randn(500, 500)
try:
np.savez(tmp, arr=arr)
except OSError as err:
error_list.append(err)
errors = []
threads = [threading.Thread(target=writer, args=(errors,))
for j in range(3)]
for t in threads:
t.start()
for t in threads:
t.join()
if errors:
raise AssertionError(errors)
def test_not_closing_opened_fid(self):
# Test that issue #2178 is fixed:
# verify could seek on 'loaded' file
with temppath(suffix='.npz') as tmp:
with open(tmp, 'wb') as fp:
np.savez(fp, data='LOVELY LOAD')
with open(tmp, 'rb', 10000) as fp:
fp.seek(0)
assert_(not fp.closed)
np.load(fp)['data']
# fp must not get closed by .load
assert_(not fp.closed)
fp.seek(0)
assert_(not fp.closed)
@np.testing.dec.skipif(IS_PYPY, "context manager required on PyPy")
def test_closing_fid(self):
# Test that issue #1517 (too many opened files) remains closed
# It might be a "weak" test since failed to get triggered on
# e.g. Debian sid of 2012 Jul 05 but was reported to
# trigger the failure on Ubuntu 10.04:
# http://projects.scipy.org/numpy/ticket/1517#comment:2
with temppath(suffix='.npz') as tmp:
np.savez(tmp, data='LOVELY LOAD')
# We need to check if the garbage collector can properly close
# numpy npz file returned by np.load when their reference count
# goes to zero. Python 3 running in debug mode raises a
# ResourceWarning when file closing is left to the garbage
# collector, so we catch the warnings. Because ResourceWarning
# is unknown in Python < 3.x, we take the easy way out and
# catch all warnings.
with suppress_warnings() as sup:
sup.filter(Warning) # TODO: specify exact message
for i in range(1, 1025):
try:
np.load(tmp)["data"]
except Exception as e:
msg = "Failed to load data from a file: %s" % e
raise AssertionError(msg)
def test_closing_zipfile_after_load(self):
# Check that zipfile owns file and can close it. This needs to
# pass a file name to load for the test. On windows failure will
# cause a second error will be raised when the attempt to remove
# the open file is made.
prefix = 'numpy_test_closing_zipfile_after_load_'
with temppath(suffix='.npz', prefix=prefix) as tmp:
np.savez(tmp, lab='place holder')
data = np.load(tmp)
fp = data.zip.fp
data.close()
assert_(fp.closed)
class TestSaveTxt(TestCase):
def test_array(self):
a = np.array([[1, 2], [3, 4]], float)
fmt = "%.18e"
c = BytesIO()
np.savetxt(c, a, fmt=fmt)
c.seek(0)
assert_equal(c.readlines(),
[asbytes((fmt + ' ' + fmt + '\n') % (1, 2)),
asbytes((fmt + ' ' + fmt + '\n') % (3, 4))])
a = np.array([[1, 2], [3, 4]], int)
c = BytesIO()
np.savetxt(c, a, fmt='%d')
c.seek(0)
assert_equal(c.readlines(), [b'1 2\n', b'3 4\n'])
def test_1D(self):
a = np.array([1, 2, 3, 4], int)
c = BytesIO()
np.savetxt(c, a, fmt='%d')
c.seek(0)
lines = c.readlines()
assert_equal(lines, [b'1\n', b'2\n', b'3\n', b'4\n'])
def test_record(self):
a = np.array([(1, 2), (3, 4)], dtype=[('x', 'i4'), ('y', 'i4')])
c = BytesIO()
np.savetxt(c, a, fmt='%d')
c.seek(0)
assert_equal(c.readlines(), [b'1 2\n', b'3 4\n'])
def test_delimiter(self):
a = np.array([[1., 2.], [3., 4.]])
c = BytesIO()
np.savetxt(c, a, delimiter=',', fmt='%d')
c.seek(0)
assert_equal(c.readlines(), [b'1,2\n', b'3,4\n'])
def test_format(self):
a = np.array([(1, 2), (3, 4)])
c = BytesIO()
# Sequence of formats
np.savetxt(c, a, fmt=['%02d', '%3.1f'])
c.seek(0)
assert_equal(c.readlines(), [b'01 2.0\n', b'03 4.0\n'])
# A single multiformat string
c = BytesIO()
np.savetxt(c, a, fmt='%02d : %3.1f')
c.seek(0)
lines = c.readlines()
assert_equal(lines, [b'01 : 2.0\n', b'03 : 4.0\n'])
# Specify delimiter, should be overiden
c = BytesIO()
np.savetxt(c, a, fmt='%02d : %3.1f', delimiter=',')
c.seek(0)
lines = c.readlines()
assert_equal(lines, [b'01 : 2.0\n', b'03 : 4.0\n'])
# Bad fmt, should raise a ValueError
c = BytesIO()
assert_raises(ValueError, np.savetxt, c, a, fmt=99)
def test_header_footer(self):
# Test the functionality of the header and footer keyword argument.
c = BytesIO()
a = np.array([(1, 2), (3, 4)], dtype=np.int)
test_header_footer = 'Test header / footer'
# Test the header keyword argument
np.savetxt(c, a, fmt='%1d', header=test_header_footer)
c.seek(0)
assert_equal(c.read(),
asbytes('# ' + test_header_footer + '\n1 2\n3 4\n'))
# Test the footer keyword argument
c = BytesIO()
np.savetxt(c, a, fmt='%1d', footer=test_header_footer)
c.seek(0)
assert_equal(c.read(),
asbytes('1 2\n3 4\n# ' + test_header_footer + '\n'))
# Test the commentstr keyword argument used on the header
c = BytesIO()
commentstr = '% '
np.savetxt(c, a, fmt='%1d',
header=test_header_footer, comments=commentstr)
c.seek(0)
assert_equal(c.read(),
asbytes(commentstr + test_header_footer + '\n' + '1 2\n3 4\n'))
# Test the commentstr keyword argument used on the footer
c = BytesIO()
commentstr = '% '
np.savetxt(c, a, fmt='%1d',
footer=test_header_footer, comments=commentstr)
c.seek(0)
assert_equal(c.read(),
asbytes('1 2\n3 4\n' + commentstr + test_header_footer + '\n'))
def test_file_roundtrip(self):
with temppath() as name:
a = np.array([(1, 2), (3, 4)])
np.savetxt(name, a)
b = np.loadtxt(name)
assert_array_equal(a, b)
def test_complex_arrays(self):
ncols = 2
nrows = 2
a = np.zeros((ncols, nrows), dtype=np.complex128)
re = np.pi
im = np.e
a[:] = re + 1.0j * im
# One format only
c = BytesIO()
np.savetxt(c, a, fmt=' %+.3e')
c.seek(0)
lines = c.readlines()
assert_equal(
lines,
[b' ( +3.142e+00+ +2.718e+00j) ( +3.142e+00+ +2.718e+00j)\n',
b' ( +3.142e+00+ +2.718e+00j) ( +3.142e+00+ +2.718e+00j)\n'])
# One format for each real and imaginary part
c = BytesIO()
np.savetxt(c, a, fmt=' %+.3e' * 2 * ncols)
c.seek(0)
lines = c.readlines()
assert_equal(
lines,
[b' +3.142e+00 +2.718e+00 +3.142e+00 +2.718e+00\n',
b' +3.142e+00 +2.718e+00 +3.142e+00 +2.718e+00\n'])
# One format for each complex number
c = BytesIO()
np.savetxt(c, a, fmt=['(%.3e%+.3ej)'] * ncols)
c.seek(0)
lines = c.readlines()
assert_equal(
lines,
[b'(3.142e+00+2.718e+00j) (3.142e+00+2.718e+00j)\n',
b'(3.142e+00+2.718e+00j) (3.142e+00+2.718e+00j)\n'])
def test_custom_writer(self):
class CustomWriter(list):
def write(self, text):
self.extend(text.split(b'\n'))
w = CustomWriter()
a = np.array([(1, 2), (3, 4)])
np.savetxt(w, a)
b = np.loadtxt(w)
assert_array_equal(a, b)
class TestLoadTxt(TestCase):
def test_record(self):
c = TextIO()
c.write('1 2\n3 4')
c.seek(0)
x = np.loadtxt(c, dtype=[('x', np.int32), ('y', np.int32)])
a = np.array([(1, 2), (3, 4)], dtype=[('x', 'i4'), ('y', 'i4')])
assert_array_equal(x, a)
d = TextIO()
d.write('M 64.0 75.0\nF 25.0 60.0')
d.seek(0)
mydescriptor = {'names': ('gender', 'age', 'weight'),
'formats': ('S1', 'i4', 'f4')}
b = np.array([('M', 64.0, 75.0),
('F', 25.0, 60.0)], dtype=mydescriptor)
y = np.loadtxt(d, dtype=mydescriptor)
assert_array_equal(y, b)
def test_array(self):
c = TextIO()
c.write('1 2\n3 4')
c.seek(0)
x = np.loadtxt(c, dtype=np.int)
a = np.array([[1, 2], [3, 4]], int)
assert_array_equal(x, a)
c.seek(0)
x = np.loadtxt(c, dtype=float)
a = np.array([[1, 2], [3, 4]], float)
assert_array_equal(x, a)
def test_1D(self):
c = TextIO()
c.write('1\n2\n3\n4\n')
c.seek(0)
x = np.loadtxt(c, dtype=int)
a = np.array([1, 2, 3, 4], int)
assert_array_equal(x, a)
c = TextIO()
c.write('1,2,3,4\n')
c.seek(0)
x = np.loadtxt(c, dtype=int, delimiter=',')
a = np.array([1, 2, 3, 4], int)
assert_array_equal(x, a)
def test_missing(self):
c = TextIO()
c.write('1,2,3,,5\n')
c.seek(0)
x = np.loadtxt(c, dtype=int, delimiter=',',
converters={3: lambda s: int(s or - 999)})
a = np.array([1, 2, 3, -999, 5], int)
assert_array_equal(x, a)
def test_converters_with_usecols(self):
c = TextIO()
c.write('1,2,3,,5\n6,7,8,9,10\n')
c.seek(0)
x = np.loadtxt(c, dtype=int, delimiter=',',
converters={3: lambda s: int(s or - 999)},
usecols=(1, 3,))
a = np.array([[2, -999], [7, 9]], int)
assert_array_equal(x, a)
def test_comments_unicode(self):
c = TextIO()
c.write('# comment\n1,2,3,5\n')
c.seek(0)
x = np.loadtxt(c, dtype=int, delimiter=',',
comments=unicode('#'))
a = np.array([1, 2, 3, 5], int)
assert_array_equal(x, a)
def test_comments_byte(self):
c = TextIO()
c.write('# comment\n1,2,3,5\n')
c.seek(0)
x = np.loadtxt(c, dtype=int, delimiter=',',
comments=b'#')
a = np.array([1, 2, 3, 5], int)
assert_array_equal(x, a)
def test_comments_multiple(self):
c = TextIO()
c.write('# comment\n1,2,3\n@ comment2\n4,5,6 // comment3')
c.seek(0)
x = np.loadtxt(c, dtype=int, delimiter=',',
comments=['#', '@', '//'])
a = np.array([[1, 2, 3], [4, 5, 6]], int)
assert_array_equal(x, a)
def test_comments_multi_chars(self):
c = TextIO()
c.write('/* comment\n1,2,3,5\n')
c.seek(0)
x = np.loadtxt(c, dtype=int, delimiter=',',
comments='/*')
a = np.array([1, 2, 3, 5], int)
assert_array_equal(x, a)
# Check that '/*' is not transformed to ['/', '*']
c = TextIO()
c.write('*/ comment\n1,2,3,5\n')
c.seek(0)
assert_raises(ValueError, np.loadtxt, c, dtype=int, delimiter=',',
comments='/*')
def test_skiprows(self):
c = TextIO()
c.write('comment\n1,2,3,5\n')
c.seek(0)
x = np.loadtxt(c, dtype=int, delimiter=',',
skiprows=1)
a = np.array([1, 2, 3, 5], int)
assert_array_equal(x, a)
c = TextIO()
c.write('# comment\n1,2,3,5\n')
c.seek(0)
x = np.loadtxt(c, dtype=int, delimiter=',',
skiprows=1)
a = np.array([1, 2, 3, 5], int)
assert_array_equal(x, a)
def test_usecols(self):
a = np.array([[1, 2], [3, 4]], float)
c = BytesIO()
np.savetxt(c, a)
c.seek(0)
x = np.loadtxt(c, dtype=float, usecols=(1,))
assert_array_equal(x, a[:, 1])
a = np.array([[1, 2, 3], [3, 4, 5]], float)
c = BytesIO()
np.savetxt(c, a)
c.seek(0)
x = np.loadtxt(c, dtype=float, usecols=(1, 2))
assert_array_equal(x, a[:, 1:])
# Testing with arrays instead of tuples.
c.seek(0)
x = np.loadtxt(c, dtype=float, usecols=np.array([1, 2]))
assert_array_equal(x, a[:, 1:])
# Testing with an integer instead of a sequence
for int_type in [int, np.int8, np.int16,
np.int32, np.int64, np.uint8, np.uint16,
np.uint32, np.uint64]:
to_read = int_type(1)
c.seek(0)
x = np.loadtxt(c, dtype=float, usecols=to_read)
assert_array_equal(x, a[:, 1])
# Testing with some crazy custom integer type
class CrazyInt(object):
def __index__(self):
return 1
crazy_int = CrazyInt()
c.seek(0)
x = np.loadtxt(c, dtype=float, usecols=crazy_int)
assert_array_equal(x, a[:, 1])
c.seek(0)
x = np.loadtxt(c, dtype=float, usecols=(crazy_int,))
assert_array_equal(x, a[:, 1])
# Checking with dtypes defined converters.
data = '''JOE 70.1 25.3
BOB 60.5 27.9
'''
c = TextIO(data)
names = ['stid', 'temp']
dtypes = ['S4', 'f8']
arr = np.loadtxt(c, usecols=(0, 2), dtype=list(zip(names, dtypes)))
assert_equal(arr['stid'], [b"JOE", b"BOB"])
assert_equal(arr['temp'], [25.3, 27.9])
# Testing non-ints in usecols
c.seek(0)
bogus_idx = 1.5
assert_raises_regex(
TypeError,
'^usecols must be.*%s' % type(bogus_idx),
np.loadtxt, c, usecols=bogus_idx
)
assert_raises_regex(
TypeError,
'^usecols must be.*%s' % type(bogus_idx),
np.loadtxt, c, usecols=[0, bogus_idx, 0]
)
def test_fancy_dtype(self):
c = TextIO()
c.write('1,2,3.0\n4,5,6.0\n')
c.seek(0)
dt = np.dtype([('x', int), ('y', [('t', int), ('s', float)])])
x = np.loadtxt(c, dtype=dt, delimiter=',')
a = np.array([(1, (2, 3.0)), (4, (5, 6.0))], dt)
assert_array_equal(x, a)
def test_shaped_dtype(self):
c = TextIO("aaaa 1.0 8.0 1 2 3 4 5 6")
dt = np.dtype([('name', 'S4'), ('x', float), ('y', float),
('block', int, (2, 3))])
x = np.loadtxt(c, dtype=dt)
a = np.array([('aaaa', 1.0, 8.0, [[1, 2, 3], [4, 5, 6]])],
dtype=dt)
assert_array_equal(x, a)
def test_3d_shaped_dtype(self):
c = TextIO("aaaa 1.0 8.0 1 2 3 4 5 6 7 8 9 10 11 12")
dt = np.dtype([('name', 'S4'), ('x', float), ('y', float),
('block', int, (2, 2, 3))])
x = np.loadtxt(c, dtype=dt)
a = np.array([('aaaa', 1.0, 8.0,
[[[1, 2, 3], [4, 5, 6]], [[7, 8, 9], [10, 11, 12]]])],
dtype=dt)
assert_array_equal(x, a)
def test_empty_file(self):
with suppress_warnings() as sup:
sup.filter(message="loadtxt: Empty input file:")
c = TextIO()
x = np.loadtxt(c)
assert_equal(x.shape, (0,))
x = np.loadtxt(c, dtype=np.int64)
assert_equal(x.shape, (0,))
assert_(x.dtype == np.int64)
def test_unused_converter(self):
c = TextIO()
c.writelines(['1 21\n', '3 42\n'])
c.seek(0)
data = np.loadtxt(c, usecols=(1,),
converters={0: lambda s: int(s, 16)})
assert_array_equal(data, [21, 42])
c.seek(0)
data = np.loadtxt(c, usecols=(1,),
converters={1: lambda s: int(s, 16)})
assert_array_equal(data, [33, 66])
def test_dtype_with_object(self):
# Test using an explicit dtype with an object
data = """ 1; 2001-01-01
2; 2002-01-31 """
ndtype = [('idx', int), ('code', np.object)]
func = lambda s: strptime(s.strip(), "%Y-%m-%d")
converters = {1: func}
test = np.loadtxt(TextIO(data), delimiter=";", dtype=ndtype,
converters=converters)
control = np.array(
[(1, datetime(2001, 1, 1)), (2, datetime(2002, 1, 31))],
dtype=ndtype)
assert_equal(test, control)
def test_uint64_type(self):
tgt = (9223372043271415339, 9223372043271415853)
c = TextIO()
c.write("%s %s" % tgt)
c.seek(0)
res = np.loadtxt(c, dtype=np.uint64)
assert_equal(res, tgt)
def test_int64_type(self):
tgt = (-9223372036854775807, 9223372036854775807)
c = TextIO()
c.write("%s %s" % tgt)
c.seek(0)
res = np.loadtxt(c, dtype=np.int64)
assert_equal(res, tgt)
def test_from_float_hex(self):
# IEEE doubles and floats only, otherwise the float32
# conversion may fail.
tgt = np.logspace(-10, 10, 5).astype(np.float32)
tgt = np.hstack((tgt, -tgt)).astype(np.float)
inp = '\n'.join(map(float.hex, tgt))
c = TextIO()
c.write(inp)
for dt in [np.float, np.float32]:
c.seek(0)
res = np.loadtxt(c, dtype=dt)
assert_equal(res, tgt, err_msg="%s" % dt)
def test_from_complex(self):
tgt = (complex(1, 1), complex(1, -1))
c = TextIO()
c.write("%s %s" % tgt)
c.seek(0)
res = np.loadtxt(c, dtype=np.complex)
assert_equal(res, tgt)
def test_universal_newline(self):
with temppath() as name:
with open(name, 'w') as f:
f.write('1 21\r3 42\r')
data = np.loadtxt(name)
assert_array_equal(data, [[1, 21], [3, 42]])
def test_empty_field_after_tab(self):
c = TextIO()
c.write('1 \t2 \t3\tstart \n4\t5\t6\t \n7\t8\t9.5\t')
c.seek(0)
dt = {'names': ('x', 'y', 'z', 'comment'),
'formats': ('<i4', '<i4', '<f4', '|S8')}
x = np.loadtxt(c, dtype=dt, delimiter='\t')
a = np.array([b'start ', b' ', b''])
assert_array_equal(x['comment'], a)
def test_structure_unpack(self):
txt = TextIO("M 21 72\nF 35 58")
dt = {'names': ('a', 'b', 'c'), 'formats': ('|S1', '<i4', '<f4')}
a, b, c = np.loadtxt(txt, dtype=dt, unpack=True)
assert_(a.dtype.str == '|S1')
assert_(b.dtype.str == '<i4')
assert_(c.dtype.str == '<f4')
assert_array_equal(a, np.array([b'M', b'F']))
assert_array_equal(b, np.array([21, 35]))
assert_array_equal(c, np.array([72., 58.]))
def test_ndmin_keyword(self):
c = TextIO()
c.write('1,2,3\n4,5,6')
c.seek(0)
assert_raises(ValueError, np.loadtxt, c, ndmin=3)
c.seek(0)
assert_raises(ValueError, np.loadtxt, c, ndmin=1.5)
c.seek(0)
x = np.loadtxt(c, dtype=int, delimiter=',', ndmin=1)
a = np.array([[1, 2, 3], [4, 5, 6]])
assert_array_equal(x, a)
d = TextIO()
d.write('0,1,2')
d.seek(0)
x = np.loadtxt(d, dtype=int, delimiter=',', ndmin=2)
assert_(x.shape == (1, 3))
d.seek(0)
x = np.loadtxt(d, dtype=int, delimiter=',', ndmin=1)
assert_(x.shape == (3,))
d.seek(0)
x = np.loadtxt(d, dtype=int, delimiter=',', ndmin=0)
assert_(x.shape == (3,))
e = TextIO()
e.write('0\n1\n2')
e.seek(0)
x = np.loadtxt(e, dtype=int, delimiter=',', ndmin=2)
assert_(x.shape == (3, 1))
e.seek(0)
x = np.loadtxt(e, dtype=int, delimiter=',', ndmin=1)
assert_(x.shape == (3,))
e.seek(0)
x = np.loadtxt(e, dtype=int, delimiter=',', ndmin=0)
assert_(x.shape == (3,))
# Test ndmin kw with empty file.
with suppress_warnings() as sup:
sup.filter(message="loadtxt: Empty input file:")
f = TextIO()
assert_(np.loadtxt(f, ndmin=2).shape == (0, 1,))
assert_(np.loadtxt(f, ndmin=1).shape == (0,))
def test_generator_source(self):
def count():
for i in range(10):
yield "%d" % i
res = np.loadtxt(count())
assert_array_equal(res, np.arange(10))
def test_bad_line(self):
c = TextIO()
c.write('1 2 3\n4 5 6\n2 3')
c.seek(0)
# Check for exception and that exception contains line number
assert_raises_regex(ValueError, "3", np.loadtxt, c)
def test_none_as_string(self):
# gh-5155, None should work as string when format demands it
c = TextIO()
c.write('100,foo,200\n300,None,400')
c.seek(0)
dt = np.dtype([('x', int), ('a', 'S10'), ('y', int)])
np.loadtxt(c, delimiter=',', dtype=dt, comments=None) # Should succeed
class Testfromregex(TestCase):
# np.fromregex expects files opened in binary mode.
def test_record(self):
c = TextIO()
c.write('1.312 foo\n1.534 bar\n4.444 qux')
c.seek(0)
dt = [('num', np.float64), ('val', 'S3')]
x = np.fromregex(c, r"([0-9.]+)\s+(...)", dt)
a = np.array([(1.312, 'foo'), (1.534, 'bar'), (4.444, 'qux')],
dtype=dt)
assert_array_equal(x, a)
def test_record_2(self):
c = TextIO()
c.write('1312 foo\n1534 bar\n4444 qux')
c.seek(0)
dt = [('num', np.int32), ('val', 'S3')]
x = np.fromregex(c, r"(\d+)\s+(...)", dt)
a = np.array([(1312, 'foo'), (1534, 'bar'), (4444, 'qux')],
dtype=dt)
assert_array_equal(x, a)
def test_record_3(self):
c = TextIO()
c.write('1312 foo\n1534 bar\n4444 qux')
c.seek(0)
dt = [('num', np.float64)]
x = np.fromregex(c, r"(\d+)\s+...", dt)
a = np.array([(1312,), (1534,), (4444,)], dtype=dt)
assert_array_equal(x, a)
#####--------------------------------------------------------------------------
class TestFromTxt(TestCase):
#
def test_record(self):
# Test w/ explicit dtype
data = TextIO('1 2\n3 4')
test = np.ndfromtxt(data, dtype=[('x', np.int32), ('y', np.int32)])
control = np.array([(1, 2), (3, 4)], dtype=[('x', 'i4'), ('y', 'i4')])
assert_equal(test, control)
#
data = TextIO('M 64.0 75.0\nF 25.0 60.0')
descriptor = {'names': ('gender', 'age', 'weight'),
'formats': ('S1', 'i4', 'f4')}
control = np.array([('M', 64.0, 75.0), ('F', 25.0, 60.0)],
dtype=descriptor)
test = np.ndfromtxt(data, dtype=descriptor)
assert_equal(test, control)
def test_array(self):
# Test outputing a standard ndarray
data = TextIO('1 2\n3 4')
control = np.array([[1, 2], [3, 4]], dtype=int)
test = np.ndfromtxt(data, dtype=int)
assert_array_equal(test, control)
#
data.seek(0)
control = np.array([[1, 2], [3, 4]], dtype=float)
test = np.loadtxt(data, dtype=float)
assert_array_equal(test, control)
def test_1D(self):
# Test squeezing to 1D
control = np.array([1, 2, 3, 4], int)
#
data = TextIO('1\n2\n3\n4\n')
test = np.ndfromtxt(data, dtype=int)
assert_array_equal(test, control)
#
data = TextIO('1,2,3,4\n')
test = np.ndfromtxt(data, dtype=int, delimiter=',')
assert_array_equal(test, control)
def test_comments(self):
# Test the stripping of comments
control = np.array([1, 2, 3, 5], int)
# Comment on its own line
data = TextIO('# comment\n1,2,3,5\n')
test = np.ndfromtxt(data, dtype=int, delimiter=',', comments='#')
assert_equal(test, control)
# Comment at the end of a line
data = TextIO('1,2,3,5# comment\n')
test = np.ndfromtxt(data, dtype=int, delimiter=',', comments='#')
assert_equal(test, control)
def test_skiprows(self):
# Test row skipping
control = np.array([1, 2, 3, 5], int)
kwargs = dict(dtype=int, delimiter=',')
#
data = TextIO('comment\n1,2,3,5\n')
test = np.ndfromtxt(data, skip_header=1, **kwargs)
assert_equal(test, control)
#
data = TextIO('# comment\n1,2,3,5\n')
test = np.loadtxt(data, skiprows=1, **kwargs)
assert_equal(test, control)
def test_skip_footer(self):
data = ["# %i" % i for i in range(1, 6)]
data.append("A, B, C")
data.extend(["%i,%3.1f,%03s" % (i, i, i) for i in range(51)])
data[-1] = "99,99"
kwargs = dict(delimiter=",", names=True, skip_header=5, skip_footer=10)
test = np.genfromtxt(TextIO("\n".join(data)), **kwargs)
ctrl = np.array([("%f" % i, "%f" % i, "%f" % i) for i in range(41)],
dtype=[(_, float) for _ in "ABC"])
assert_equal(test, ctrl)
def test_skip_footer_with_invalid(self):
with suppress_warnings() as sup:
sup.filter(ConversionWarning)
basestr = '1 1\n2 2\n3 3\n4 4\n5 \n6 \n7 \n'
# Footer too small to get rid of all invalid values
assert_raises(ValueError, np.genfromtxt,
TextIO(basestr), skip_footer=1)
# except ValueError:
# pass
a = np.genfromtxt(
TextIO(basestr), skip_footer=1, invalid_raise=False)
assert_equal(a, np.array([[1., 1.], [2., 2.], [3., 3.], [4., 4.]]))
#
a = np.genfromtxt(TextIO(basestr), skip_footer=3)
assert_equal(a, np.array([[1., 1.], [2., 2.], [3., 3.], [4., 4.]]))
#
basestr = '1 1\n2 \n3 3\n4 4\n5 \n6 6\n7 7\n'
a = np.genfromtxt(
TextIO(basestr), skip_footer=1, invalid_raise=False)
assert_equal(a, np.array([[1., 1.], [3., 3.], [4., 4.], [6., 6.]]))
a = np.genfromtxt(
TextIO(basestr), skip_footer=3, invalid_raise=False)
assert_equal(a, np.array([[1., 1.], [3., 3.], [4., 4.]]))
def test_header(self):
# Test retrieving a header
data = TextIO('gender age weight\nM 64.0 75.0\nF 25.0 60.0')
test = np.ndfromtxt(data, dtype=None, names=True)
control = {'gender': np.array([b'M', b'F']),
'age': np.array([64.0, 25.0]),
'weight': np.array([75.0, 60.0])}
assert_equal(test['gender'], control['gender'])
assert_equal(test['age'], control['age'])
assert_equal(test['weight'], control['weight'])
def test_auto_dtype(self):
# Test the automatic definition of the output dtype
data = TextIO('A 64 75.0 3+4j True\nBCD 25 60.0 5+6j False')
test = np.ndfromtxt(data, dtype=None)
control = [np.array([b'A', b'BCD']),
np.array([64, 25]),
np.array([75.0, 60.0]),
np.array([3 + 4j, 5 + 6j]),
np.array([True, False]), ]
assert_equal(test.dtype.names, ['f0', 'f1', 'f2', 'f3', 'f4'])
for (i, ctrl) in enumerate(control):
assert_equal(test['f%i' % i], ctrl)
def test_auto_dtype_uniform(self):
# Tests whether the output dtype can be uniformized
data = TextIO('1 2 3 4\n5 6 7 8\n')
test = np.ndfromtxt(data, dtype=None)
control = np.array([[1, 2, 3, 4], [5, 6, 7, 8]])
assert_equal(test, control)
def test_fancy_dtype(self):
# Check that a nested dtype isn't MIA
data = TextIO('1,2,3.0\n4,5,6.0\n')
fancydtype = np.dtype([('x', int), ('y', [('t', int), ('s', float)])])
test = np.ndfromtxt(data, dtype=fancydtype, delimiter=',')
control = np.array([(1, (2, 3.0)), (4, (5, 6.0))], dtype=fancydtype)
assert_equal(test, control)
def test_names_overwrite(self):
# Test overwriting the names of the dtype
descriptor = {'names': ('g', 'a', 'w'),
'formats': ('S1', 'i4', 'f4')}
data = TextIO(b'M 64.0 75.0\nF 25.0 60.0')
names = ('gender', 'age', 'weight')
test = np.ndfromtxt(data, dtype=descriptor, names=names)
descriptor['names'] = names
control = np.array([('M', 64.0, 75.0),
('F', 25.0, 60.0)], dtype=descriptor)
assert_equal(test, control)
def test_commented_header(self):
# Check that names can be retrieved even if the line is commented out.
data = TextIO("""
#gender age weight
M 21 72.100000
F 35 58.330000
M 33 21.99
""")
# The # is part of the first name and should be deleted automatically.
test = np.genfromtxt(data, names=True, dtype=None)
ctrl = np.array([('M', 21, 72.1), ('F', 35, 58.33), ('M', 33, 21.99)],
dtype=[('gender', '|S1'), ('age', int), ('weight', float)])
assert_equal(test, ctrl)
# Ditto, but we should get rid of the first element
data = TextIO(b"""
# gender age weight
M 21 72.100000
F 35 58.330000
M 33 21.99
""")
test = np.genfromtxt(data, names=True, dtype=None)
assert_equal(test, ctrl)
def test_autonames_and_usecols(self):
# Tests names and usecols
data = TextIO('A B C D\n aaaa 121 45 9.1')
test = np.ndfromtxt(data, usecols=('A', 'C', 'D'),
names=True, dtype=None)
control = np.array(('aaaa', 45, 9.1),
dtype=[('A', '|S4'), ('C', int), ('D', float)])
assert_equal(test, control)
def test_converters_with_usecols(self):
# Test the combination user-defined converters and usecol
data = TextIO('1,2,3,,5\n6,7,8,9,10\n')
test = np.ndfromtxt(data, dtype=int, delimiter=',',
converters={3: lambda s: int(s or - 999)},
usecols=(1, 3,))
control = np.array([[2, -999], [7, 9]], int)
assert_equal(test, control)
def test_converters_with_usecols_and_names(self):
# Tests names and usecols
data = TextIO('A B C D\n aaaa 121 45 9.1')
test = np.ndfromtxt(data, usecols=('A', 'C', 'D'), names=True,
dtype=None, converters={'C': lambda s: 2 * int(s)})
control = np.array(('aaaa', 90, 9.1),
dtype=[('A', '|S4'), ('C', int), ('D', float)])
assert_equal(test, control)
def test_converters_cornercases(self):
# Test the conversion to datetime.
converter = {
'date': lambda s: strptime(s, '%Y-%m-%d %H:%M:%SZ')}
data = TextIO('2009-02-03 12:00:00Z, 72214.0')
test = np.ndfromtxt(data, delimiter=',', dtype=None,
names=['date', 'stid'], converters=converter)
control = np.array((datetime(2009, 2, 3), 72214.),
dtype=[('date', np.object_), ('stid', float)])
assert_equal(test, control)
def test_converters_cornercases2(self):
# Test the conversion to datetime64.
converter = {
'date': lambda s: np.datetime64(strptime(s, '%Y-%m-%d %H:%M:%SZ'))}
data = TextIO('2009-02-03 12:00:00Z, 72214.0')
test = np.ndfromtxt(data, delimiter=',', dtype=None,
names=['date', 'stid'], converters=converter)
control = np.array((datetime(2009, 2, 3), 72214.),
dtype=[('date', 'datetime64[us]'), ('stid', float)])
assert_equal(test, control)
def test_unused_converter(self):
# Test whether unused converters are forgotten
data = TextIO("1 21\n 3 42\n")
test = np.ndfromtxt(data, usecols=(1,),
converters={0: lambda s: int(s, 16)})
assert_equal(test, [21, 42])
#
data.seek(0)
test = np.ndfromtxt(data, usecols=(1,),
converters={1: lambda s: int(s, 16)})
assert_equal(test, [33, 66])
def test_invalid_converter(self):
strip_rand = lambda x: float((b'r' in x.lower() and x.split()[-1]) or
(b'r' not in x.lower() and x.strip() or 0.0))
strip_per = lambda x: float((b'%' in x.lower() and x.split()[0]) or
(b'%' not in x.lower() and x.strip() or 0.0))
s = TextIO("D01N01,10/1/2003 ,1 %,R 75,400,600\r\n"
"L24U05,12/5/2003, 2 %,1,300, 150.5\r\n"
"D02N03,10/10/2004,R 1,,7,145.55")
kwargs = dict(
converters={2: strip_per, 3: strip_rand}, delimiter=",",
dtype=None)
assert_raises(ConverterError, np.genfromtxt, s, **kwargs)
def test_tricky_converter_bug1666(self):
# Test some corner cases
s = TextIO('q1,2\nq3,4')
cnv = lambda s: float(s[1:])
test = np.genfromtxt(s, delimiter=',', converters={0: cnv})
control = np.array([[1., 2.], [3., 4.]])
assert_equal(test, control)
def test_dtype_with_converters(self):
dstr = "2009; 23; 46"
test = np.ndfromtxt(TextIO(dstr,),
delimiter=";", dtype=float, converters={0: bytes})
control = np.array([('2009', 23., 46)],
dtype=[('f0', '|S4'), ('f1', float), ('f2', float)])
assert_equal(test, control)
test = np.ndfromtxt(TextIO(dstr,),
delimiter=";", dtype=float, converters={0: float})
control = np.array([2009., 23., 46],)
assert_equal(test, control)
def test_dtype_with_converters_and_usecols(self):
dstr = "1,5,-1,1:1\n2,8,-1,1:n\n3,3,-2,m:n\n"
dmap = {'1:1':0, '1:n':1, 'm:1':2, 'm:n':3}
dtyp = [('e1','i4'),('e2','i4'),('e3','i2'),('n', 'i1')]
conv = {0: int, 1: int, 2: int, 3: lambda r: dmap[r.decode()]}
test = np.recfromcsv(TextIO(dstr,), dtype=dtyp, delimiter=',',
names=None, converters=conv)
control = np.rec.array([[1,5,-1,0], [2,8,-1,1], [3,3,-2,3]], dtype=dtyp)
assert_equal(test, control)
dtyp = [('e1','i4'),('e2','i4'),('n', 'i1')]
test = np.recfromcsv(TextIO(dstr,), dtype=dtyp, delimiter=',',
usecols=(0,1,3), names=None, converters=conv)
control = np.rec.array([[1,5,0], [2,8,1], [3,3,3]], dtype=dtyp)
assert_equal(test, control)
def test_dtype_with_object(self):
# Test using an explicit dtype with an object
data = """ 1; 2001-01-01
2; 2002-01-31 """
ndtype = [('idx', int), ('code', np.object)]
func = lambda s: strptime(s.strip(), "%Y-%m-%d")
converters = {1: func}
test = np.genfromtxt(TextIO(data), delimiter=";", dtype=ndtype,
converters=converters)
control = np.array(
[(1, datetime(2001, 1, 1)), (2, datetime(2002, 1, 31))],
dtype=ndtype)
assert_equal(test, control)
ndtype = [('nest', [('idx', int), ('code', np.object)])]
try:
test = np.genfromtxt(TextIO(data), delimiter=";",
dtype=ndtype, converters=converters)
except NotImplementedError:
pass
else:
errmsg = "Nested dtype involving objects should be supported."
raise AssertionError(errmsg)
def test_userconverters_with_explicit_dtype(self):
# Test user_converters w/ explicit (standard) dtype
data = TextIO('skip,skip,2001-01-01,1.0,skip')
test = np.genfromtxt(data, delimiter=",", names=None, dtype=float,
usecols=(2, 3), converters={2: bytes})
control = np.array([('2001-01-01', 1.)],
dtype=[('', '|S10'), ('', float)])
assert_equal(test, control)
def test_spacedelimiter(self):
# Test space delimiter
data = TextIO("1 2 3 4 5\n6 7 8 9 10")
test = np.ndfromtxt(data)
control = np.array([[1., 2., 3., 4., 5.],
[6., 7., 8., 9., 10.]])
assert_equal(test, control)
def test_integer_delimiter(self):
# Test using an integer for delimiter
data = " 1 2 3\n 4 5 67\n890123 4"
test = np.genfromtxt(TextIO(data), delimiter=3)
control = np.array([[1, 2, 3], [4, 5, 67], [890, 123, 4]])
assert_equal(test, control)
def test_missing(self):
data = TextIO('1,2,3,,5\n')
test = np.ndfromtxt(data, dtype=int, delimiter=',',
converters={3: lambda s: int(s or - 999)})
control = np.array([1, 2, 3, -999, 5], int)
assert_equal(test, control)
def test_missing_with_tabs(self):
# Test w/ a delimiter tab
txt = "1\t2\t3\n\t2\t\n1\t\t3"
test = np.genfromtxt(TextIO(txt), delimiter="\t",
usemask=True,)
ctrl_d = np.array([(1, 2, 3), (np.nan, 2, np.nan), (1, np.nan, 3)],)
ctrl_m = np.array([(0, 0, 0), (1, 0, 1), (0, 1, 0)], dtype=bool)
assert_equal(test.data, ctrl_d)
assert_equal(test.mask, ctrl_m)
def test_usecols(self):
# Test the selection of columns
# Select 1 column
control = np.array([[1, 2], [3, 4]], float)
data = TextIO()
np.savetxt(data, control)
data.seek(0)
test = np.ndfromtxt(data, dtype=float, usecols=(1,))
assert_equal(test, control[:, 1])
#
control = np.array([[1, 2, 3], [3, 4, 5]], float)
data = TextIO()
np.savetxt(data, control)
data.seek(0)
test = np.ndfromtxt(data, dtype=float, usecols=(1, 2))
assert_equal(test, control[:, 1:])
# Testing with arrays instead of tuples.
data.seek(0)
test = np.ndfromtxt(data, dtype=float, usecols=np.array([1, 2]))
assert_equal(test, control[:, 1:])
def test_usecols_as_css(self):
# Test giving usecols with a comma-separated string
data = "1 2 3\n4 5 6"
test = np.genfromtxt(TextIO(data),
names="a, b, c", usecols="a, c")
ctrl = np.array([(1, 3), (4, 6)], dtype=[(_, float) for _ in "ac"])
assert_equal(test, ctrl)
def test_usecols_with_structured_dtype(self):
# Test usecols with an explicit structured dtype
data = TextIO("JOE 70.1 25.3\nBOB 60.5 27.9")
names = ['stid', 'temp']
dtypes = ['S4', 'f8']
test = np.ndfromtxt(
data, usecols=(0, 2), dtype=list(zip(names, dtypes)))
assert_equal(test['stid'], [b"JOE", b"BOB"])
assert_equal(test['temp'], [25.3, 27.9])
def test_usecols_with_integer(self):
# Test usecols with an integer
test = np.genfromtxt(TextIO(b"1 2 3\n4 5 6"), usecols=0)
assert_equal(test, np.array([1., 4.]))
def test_usecols_with_named_columns(self):
# Test usecols with named columns
ctrl = np.array([(1, 3), (4, 6)], dtype=[('a', float), ('c', float)])
data = "1 2 3\n4 5 6"
kwargs = dict(names="a, b, c")
test = np.genfromtxt(TextIO(data), usecols=(0, -1), **kwargs)
assert_equal(test, ctrl)
test = np.genfromtxt(TextIO(data),
usecols=('a', 'c'), **kwargs)
assert_equal(test, ctrl)
def test_empty_file(self):
# Test that an empty file raises the proper warning.
with suppress_warnings() as sup:
sup.filter(message="genfromtxt: Empty input file:")
data = TextIO()
test = np.genfromtxt(data)
assert_equal(test, np.array([]))
def test_fancy_dtype_alt(self):
# Check that a nested dtype isn't MIA
data = TextIO('1,2,3.0\n4,5,6.0\n')
fancydtype = np.dtype([('x', int), ('y', [('t', int), ('s', float)])])
test = np.mafromtxt(data, dtype=fancydtype, delimiter=',')
control = ma.array([(1, (2, 3.0)), (4, (5, 6.0))], dtype=fancydtype)
assert_equal(test, control)
def test_shaped_dtype(self):
c = TextIO("aaaa 1.0 8.0 1 2 3 4 5 6")
dt = np.dtype([('name', 'S4'), ('x', float), ('y', float),
('block', int, (2, 3))])
x = np.ndfromtxt(c, dtype=dt)
a = np.array([('aaaa', 1.0, 8.0, [[1, 2, 3], [4, 5, 6]])],
dtype=dt)
assert_array_equal(x, a)
def test_withmissing(self):
data = TextIO('A,B\n0,1\n2,N/A')
kwargs = dict(delimiter=",", missing_values="N/A", names=True)
test = np.mafromtxt(data, dtype=None, **kwargs)
control = ma.array([(0, 1), (2, -1)],
mask=[(False, False), (False, True)],
dtype=[('A', np.int), ('B', np.int)])
assert_equal(test, control)
assert_equal(test.mask, control.mask)
#
data.seek(0)
test = np.mafromtxt(data, **kwargs)
control = ma.array([(0, 1), (2, -1)],
mask=[(False, False), (False, True)],
dtype=[('A', np.float), ('B', np.float)])
assert_equal(test, control)
assert_equal(test.mask, control.mask)
def test_user_missing_values(self):
data = "A, B, C\n0, 0., 0j\n1, N/A, 1j\n-9, 2.2, N/A\n3, -99, 3j"
basekwargs = dict(dtype=None, delimiter=",", names=True,)
mdtype = [('A', int), ('B', float), ('C', complex)]
#
test = np.mafromtxt(TextIO(data), missing_values="N/A",
**basekwargs)
control = ma.array([(0, 0.0, 0j), (1, -999, 1j),
(-9, 2.2, -999j), (3, -99, 3j)],
mask=[(0, 0, 0), (0, 1, 0), (0, 0, 1), (0, 0, 0)],
dtype=mdtype)
assert_equal(test, control)
#
basekwargs['dtype'] = mdtype
test = np.mafromtxt(TextIO(data),
missing_values={0: -9, 1: -99, 2: -999j}, **basekwargs)
control = ma.array([(0, 0.0, 0j), (1, -999, 1j),
(-9, 2.2, -999j), (3, -99, 3j)],
mask=[(0, 0, 0), (0, 1, 0), (1, 0, 1), (0, 1, 0)],
dtype=mdtype)
assert_equal(test, control)
#
test = np.mafromtxt(TextIO(data),
missing_values={0: -9, 'B': -99, 'C': -999j},
**basekwargs)
control = ma.array([(0, 0.0, 0j), (1, -999, 1j),
(-9, 2.2, -999j), (3, -99, 3j)],
mask=[(0, 0, 0), (0, 1, 0), (1, 0, 1), (0, 1, 0)],
dtype=mdtype)
assert_equal(test, control)
def test_user_filling_values(self):
# Test with missing and filling values
ctrl = np.array([(0, 3), (4, -999)], dtype=[('a', int), ('b', int)])
data = "N/A, 2, 3\n4, ,???"
kwargs = dict(delimiter=",",
dtype=int,
names="a,b,c",
missing_values={0: "N/A", 'b': " ", 2: "???"},
filling_values={0: 0, 'b': 0, 2: -999})
test = np.genfromtxt(TextIO(data), **kwargs)
ctrl = np.array([(0, 2, 3), (4, 0, -999)],
dtype=[(_, int) for _ in "abc"])
assert_equal(test, ctrl)
#
test = np.genfromtxt(TextIO(data), usecols=(0, -1), **kwargs)
ctrl = np.array([(0, 3), (4, -999)], dtype=[(_, int) for _ in "ac"])
assert_equal(test, ctrl)
data2 = "1,2,*,4\n5,*,7,8\n"
test = np.genfromtxt(TextIO(data2), delimiter=',', dtype=int,
missing_values="*", filling_values=0)
ctrl = np.array([[1, 2, 0, 4], [5, 0, 7, 8]])
assert_equal(test, ctrl)
test = np.genfromtxt(TextIO(data2), delimiter=',', dtype=int,
missing_values="*", filling_values=-1)
ctrl = np.array([[1, 2, -1, 4], [5, -1, 7, 8]])
assert_equal(test, ctrl)
def test_withmissing_float(self):
data = TextIO('A,B\n0,1.5\n2,-999.00')
test = np.mafromtxt(data, dtype=None, delimiter=',',
missing_values='-999.0', names=True,)
control = ma.array([(0, 1.5), (2, -1.)],
mask=[(False, False), (False, True)],
dtype=[('A', np.int), ('B', np.float)])
assert_equal(test, control)
assert_equal(test.mask, control.mask)
def test_with_masked_column_uniform(self):
# Test masked column
data = TextIO('1 2 3\n4 5 6\n')
test = np.genfromtxt(data, dtype=None,
missing_values='2,5', usemask=True)
control = ma.array([[1, 2, 3], [4, 5, 6]], mask=[[0, 1, 0], [0, 1, 0]])
assert_equal(test, control)
def test_with_masked_column_various(self):
# Test masked column
data = TextIO('True 2 3\nFalse 5 6\n')
test = np.genfromtxt(data, dtype=None,
missing_values='2,5', usemask=True)
control = ma.array([(1, 2, 3), (0, 5, 6)],
mask=[(0, 1, 0), (0, 1, 0)],
dtype=[('f0', bool), ('f1', bool), ('f2', int)])
assert_equal(test, control)
def test_invalid_raise(self):
# Test invalid raise
data = ["1, 1, 1, 1, 1"] * 50
for i in range(5):
data[10 * i] = "2, 2, 2, 2 2"
data.insert(0, "a, b, c, d, e")
mdata = TextIO("\n".join(data))
#
kwargs = dict(delimiter=",", dtype=None, names=True)
# XXX: is there a better way to get the return value of the
# callable in assert_warns ?
ret = {}
def f(_ret={}):
_ret['mtest'] = np.ndfromtxt(mdata, invalid_raise=False, **kwargs)
assert_warns(ConversionWarning, f, _ret=ret)
mtest = ret['mtest']
assert_equal(len(mtest), 45)
assert_equal(mtest, np.ones(45, dtype=[(_, int) for _ in 'abcde']))
#
mdata.seek(0)
assert_raises(ValueError, np.ndfromtxt, mdata,
delimiter=",", names=True)
def test_invalid_raise_with_usecols(self):
# Test invalid_raise with usecols
data = ["1, 1, 1, 1, 1"] * 50
for i in range(5):
data[10 * i] = "2, 2, 2, 2 2"
data.insert(0, "a, b, c, d, e")
mdata = TextIO("\n".join(data))
kwargs = dict(delimiter=",", dtype=None, names=True,
invalid_raise=False)
# XXX: is there a better way to get the return value of the
# callable in assert_warns ?
ret = {}
def f(_ret={}):
_ret['mtest'] = np.ndfromtxt(mdata, usecols=(0, 4), **kwargs)
assert_warns(ConversionWarning, f, _ret=ret)
mtest = ret['mtest']
assert_equal(len(mtest), 45)
assert_equal(mtest, np.ones(45, dtype=[(_, int) for _ in 'ae']))
#
mdata.seek(0)
mtest = np.ndfromtxt(mdata, usecols=(0, 1), **kwargs)
assert_equal(len(mtest), 50)
control = np.ones(50, dtype=[(_, int) for _ in 'ab'])
control[[10 * _ for _ in range(5)]] = (2, 2)
assert_equal(mtest, control)
def test_inconsistent_dtype(self):
# Test inconsistent dtype
data = ["1, 1, 1, 1, -1.1"] * 50
mdata = TextIO("\n".join(data))
converters = {4: lambda x: "(%s)" % x}
kwargs = dict(delimiter=",", converters=converters,
dtype=[(_, int) for _ in 'abcde'],)
assert_raises(ValueError, np.genfromtxt, mdata, **kwargs)
def test_default_field_format(self):
# Test default format
data = "0, 1, 2.3\n4, 5, 6.7"
mtest = np.ndfromtxt(TextIO(data),
delimiter=",", dtype=None, defaultfmt="f%02i")
ctrl = np.array([(0, 1, 2.3), (4, 5, 6.7)],
dtype=[("f00", int), ("f01", int), ("f02", float)])
assert_equal(mtest, ctrl)
def test_single_dtype_wo_names(self):
# Test single dtype w/o names
data = "0, 1, 2.3\n4, 5, 6.7"
mtest = np.ndfromtxt(TextIO(data),
delimiter=",", dtype=float, defaultfmt="f%02i")
ctrl = np.array([[0., 1., 2.3], [4., 5., 6.7]], dtype=float)
assert_equal(mtest, ctrl)
def test_single_dtype_w_explicit_names(self):
# Test single dtype w explicit names
data = "0, 1, 2.3\n4, 5, 6.7"
mtest = np.ndfromtxt(TextIO(data),
delimiter=",", dtype=float, names="a, b, c")
ctrl = np.array([(0., 1., 2.3), (4., 5., 6.7)],
dtype=[(_, float) for _ in "abc"])
assert_equal(mtest, ctrl)
def test_single_dtype_w_implicit_names(self):
# Test single dtype w implicit names
data = "a, b, c\n0, 1, 2.3\n4, 5, 6.7"
mtest = np.ndfromtxt(TextIO(data),
delimiter=",", dtype=float, names=True)
ctrl = np.array([(0., 1., 2.3), (4., 5., 6.7)],
dtype=[(_, float) for _ in "abc"])
assert_equal(mtest, ctrl)
def test_easy_structured_dtype(self):
# Test easy structured dtype
data = "0, 1, 2.3\n4, 5, 6.7"
mtest = np.ndfromtxt(TextIO(data), delimiter=",",
dtype=(int, float, float), defaultfmt="f_%02i")
ctrl = np.array([(0, 1., 2.3), (4, 5., 6.7)],
dtype=[("f_00", int), ("f_01", float), ("f_02", float)])
assert_equal(mtest, ctrl)
def test_autostrip(self):
# Test autostrip
data = "01/01/2003 , 1.3, abcde"
kwargs = dict(delimiter=",", dtype=None)
mtest = np.ndfromtxt(TextIO(data), **kwargs)
ctrl = np.array([('01/01/2003 ', 1.3, ' abcde')],
dtype=[('f0', '|S12'), ('f1', float), ('f2', '|S8')])
assert_equal(mtest, ctrl)
mtest = np.ndfromtxt(TextIO(data), autostrip=True, **kwargs)
ctrl = np.array([('01/01/2003', 1.3, 'abcde')],
dtype=[('f0', '|S10'), ('f1', float), ('f2', '|S5')])
assert_equal(mtest, ctrl)
def test_replace_space(self):
# Test the 'replace_space' option
txt = "A.A, B (B), C:C\n1, 2, 3.14"
# Test default: replace ' ' by '_' and delete non-alphanum chars
test = np.genfromtxt(TextIO(txt),
delimiter=",", names=True, dtype=None)
ctrl_dtype = [("AA", int), ("B_B", int), ("CC", float)]
ctrl = np.array((1, 2, 3.14), dtype=ctrl_dtype)
assert_equal(test, ctrl)
# Test: no replace, no delete
test = np.genfromtxt(TextIO(txt),
delimiter=",", names=True, dtype=None,
replace_space='', deletechars='')
ctrl_dtype = [("A.A", int), ("B (B)", int), ("C:C", float)]
ctrl = np.array((1, 2, 3.14), dtype=ctrl_dtype)
assert_equal(test, ctrl)
# Test: no delete (spaces are replaced by _)
test = np.genfromtxt(TextIO(txt),
delimiter=",", names=True, dtype=None,
deletechars='')
ctrl_dtype = [("A.A", int), ("B_(B)", int), ("C:C", float)]
ctrl = np.array((1, 2, 3.14), dtype=ctrl_dtype)
assert_equal(test, ctrl)
def test_replace_space_known_dtype(self):
# Test the 'replace_space' (and related) options when dtype != None
txt = "A.A, B (B), C:C\n1, 2, 3"
# Test default: replace ' ' by '_' and delete non-alphanum chars
test = np.genfromtxt(TextIO(txt),
delimiter=",", names=True, dtype=int)
ctrl_dtype = [("AA", int), ("B_B", int), ("CC", int)]
ctrl = np.array((1, 2, 3), dtype=ctrl_dtype)
assert_equal(test, ctrl)
# Test: no replace, no delete
test = np.genfromtxt(TextIO(txt),
delimiter=",", names=True, dtype=int,
replace_space='', deletechars='')
ctrl_dtype = [("A.A", int), ("B (B)", int), ("C:C", int)]
ctrl = np.array((1, 2, 3), dtype=ctrl_dtype)
assert_equal(test, ctrl)
# Test: no delete (spaces are replaced by _)
test = np.genfromtxt(TextIO(txt),
delimiter=",", names=True, dtype=int,
deletechars='')
ctrl_dtype = [("A.A", int), ("B_(B)", int), ("C:C", int)]
ctrl = np.array((1, 2, 3), dtype=ctrl_dtype)
assert_equal(test, ctrl)
def test_incomplete_names(self):
# Test w/ incomplete names
data = "A,,C\n0,1,2\n3,4,5"
kwargs = dict(delimiter=",", names=True)
# w/ dtype=None
ctrl = np.array([(0, 1, 2), (3, 4, 5)],
dtype=[(_, int) for _ in ('A', 'f0', 'C')])
test = np.ndfromtxt(TextIO(data), dtype=None, **kwargs)
assert_equal(test, ctrl)
# w/ default dtype
ctrl = np.array([(0, 1, 2), (3, 4, 5)],
dtype=[(_, float) for _ in ('A', 'f0', 'C')])
test = np.ndfromtxt(TextIO(data), **kwargs)
def test_names_auto_completion(self):
# Make sure that names are properly completed
data = "1 2 3\n 4 5 6"
test = np.genfromtxt(TextIO(data),
dtype=(int, float, int), names="a")
ctrl = np.array([(1, 2, 3), (4, 5, 6)],
dtype=[('a', int), ('f0', float), ('f1', int)])
assert_equal(test, ctrl)
def test_names_with_usecols_bug1636(self):
# Make sure we pick up the right names w/ usecols
data = "A,B,C,D,E\n0,1,2,3,4\n0,1,2,3,4\n0,1,2,3,4"
ctrl_names = ("A", "C", "E")
test = np.genfromtxt(TextIO(data),
dtype=(int, int, int), delimiter=",",
usecols=(0, 2, 4), names=True)
assert_equal(test.dtype.names, ctrl_names)
#
test = np.genfromtxt(TextIO(data),
dtype=(int, int, int), delimiter=",",
usecols=("A", "C", "E"), names=True)
assert_equal(test.dtype.names, ctrl_names)
#
test = np.genfromtxt(TextIO(data),
dtype=int, delimiter=",",
usecols=("A", "C", "E"), names=True)
assert_equal(test.dtype.names, ctrl_names)
def test_fixed_width_names(self):
# Test fix-width w/ names
data = " A B C\n 0 1 2.3\n 45 67 9."
kwargs = dict(delimiter=(5, 5, 4), names=True, dtype=None)
ctrl = np.array([(0, 1, 2.3), (45, 67, 9.)],
dtype=[('A', int), ('B', int), ('C', float)])
test = np.ndfromtxt(TextIO(data), **kwargs)
assert_equal(test, ctrl)
#
kwargs = dict(delimiter=5, names=True, dtype=None)
ctrl = np.array([(0, 1, 2.3), (45, 67, 9.)],
dtype=[('A', int), ('B', int), ('C', float)])
test = np.ndfromtxt(TextIO(data), **kwargs)
assert_equal(test, ctrl)
def test_filling_values(self):
# Test missing values
data = b"1, 2, 3\n1, , 5\n0, 6, \n"
kwargs = dict(delimiter=",", dtype=None, filling_values=-999)
ctrl = np.array([[1, 2, 3], [1, -999, 5], [0, 6, -999]], dtype=int)
test = np.ndfromtxt(TextIO(data), **kwargs)
assert_equal(test, ctrl)
def test_comments_is_none(self):
# Github issue 329 (None was previously being converted to 'None').
test = np.genfromtxt(TextIO("test1,testNonetherestofthedata"),
dtype=None, comments=None, delimiter=',')
assert_equal(test[1], b'testNonetherestofthedata')
test = np.genfromtxt(TextIO("test1, testNonetherestofthedata"),
dtype=None, comments=None, delimiter=',')
assert_equal(test[1], b' testNonetherestofthedata')
def test_recfromtxt(self):
#
data = TextIO('A,B\n0,1\n2,3')
kwargs = dict(delimiter=",", missing_values="N/A", names=True)
test = np.recfromtxt(data, **kwargs)
control = np.array([(0, 1), (2, 3)],
dtype=[('A', np.int), ('B', np.int)])
self.assertTrue(isinstance(test, np.recarray))
assert_equal(test, control)
#
data = TextIO('A,B\n0,1\n2,N/A')
test = np.recfromtxt(data, dtype=None, usemask=True, **kwargs)
control = ma.array([(0, 1), (2, -1)],
mask=[(False, False), (False, True)],
dtype=[('A', np.int), ('B', np.int)])
assert_equal(test, control)
assert_equal(test.mask, control.mask)
assert_equal(test.A, [0, 2])
def test_recfromcsv(self):
#
data = TextIO('A,B\n0,1\n2,3')
kwargs = dict(missing_values="N/A", names=True, case_sensitive=True)
test = np.recfromcsv(data, dtype=None, **kwargs)
control = np.array([(0, 1), (2, 3)],
dtype=[('A', np.int), ('B', np.int)])
self.assertTrue(isinstance(test, np.recarray))
assert_equal(test, control)
#
data = TextIO('A,B\n0,1\n2,N/A')
test = np.recfromcsv(data, dtype=None, usemask=True, **kwargs)
control = ma.array([(0, 1), (2, -1)],
mask=[(False, False), (False, True)],
dtype=[('A', np.int), ('B', np.int)])
assert_equal(test, control)
assert_equal(test.mask, control.mask)
assert_equal(test.A, [0, 2])
#
data = TextIO('A,B\n0,1\n2,3')
test = np.recfromcsv(data, missing_values='N/A',)
control = np.array([(0, 1), (2, 3)],
dtype=[('a', np.int), ('b', np.int)])
self.assertTrue(isinstance(test, np.recarray))
assert_equal(test, control)
#
data = TextIO('A,B\n0,1\n2,3')
dtype = [('a', np.int), ('b', np.float)]
test = np.recfromcsv(data, missing_values='N/A', dtype=dtype)
control = np.array([(0, 1), (2, 3)],
dtype=dtype)
self.assertTrue(isinstance(test, np.recarray))
assert_equal(test, control)
def test_max_rows(self):
# Test the `max_rows` keyword argument.
data = '1 2\n3 4\n5 6\n7 8\n9 10\n'
txt = TextIO(data)
a1 = np.genfromtxt(txt, max_rows=3)
a2 = np.genfromtxt(txt)
assert_equal(a1, [[1, 2], [3, 4], [5, 6]])
assert_equal(a2, [[7, 8], [9, 10]])
# max_rows must be at least 1.
assert_raises(ValueError, np.genfromtxt, TextIO(data), max_rows=0)
# An input with several invalid rows.
data = '1 1\n2 2\n0 \n3 3\n4 4\n5 \n6 \n7 \n'
test = np.genfromtxt(TextIO(data), max_rows=2)
control = np.array([[1., 1.], [2., 2.]])
assert_equal(test, control)
# Test keywords conflict
assert_raises(ValueError, np.genfromtxt, TextIO(data), skip_footer=1,
max_rows=4)
# Test with invalid value
assert_raises(ValueError, np.genfromtxt, TextIO(data), max_rows=4)
# Test with invalid not raise
with suppress_warnings() as sup:
sup.filter(ConversionWarning)
test = np.genfromtxt(TextIO(data), max_rows=4, invalid_raise=False)
control = np.array([[1., 1.], [2., 2.], [3., 3.], [4., 4.]])
assert_equal(test, control)
test = np.genfromtxt(TextIO(data), max_rows=5, invalid_raise=False)
control = np.array([[1., 1.], [2., 2.], [3., 3.], [4., 4.]])
assert_equal(test, control)
# Structured array with field names.
data = 'a b\n#c d\n1 1\n2 2\n#0 \n3 3\n4 4\n5 5\n'
# Test with header, names and comments
txt = TextIO(data)
test = np.genfromtxt(txt, skip_header=1, max_rows=3, names=True)
control = np.array([(1.0, 1.0), (2.0, 2.0), (3.0, 3.0)],
dtype=[('c', '<f8'), ('d', '<f8')])
assert_equal(test, control)
# To continue reading the same "file", don't use skip_header or
# names, and use the previously determined dtype.
test = np.genfromtxt(txt, max_rows=None, dtype=test.dtype)
control = np.array([(4.0, 4.0), (5.0, 5.0)],
dtype=[('c', '<f8'), ('d', '<f8')])
assert_equal(test, control)
def test_gft_using_filename(self):
# Test that we can load data from a filename as well as a file
# object
tgt = np.arange(6).reshape((2, 3))
if sys.version_info[0] >= 3:
# python 3k is known to fail for '\r'
linesep = ('\n', '\r\n')
else:
linesep = ('\n', '\r\n', '\r')
for sep in linesep:
data = '0 1 2' + sep + '3 4 5'
with temppath() as name:
with open(name, 'w') as f:
f.write(data)
res = np.genfromtxt(name)
assert_array_equal(res, tgt)
def test_gft_using_generator(self):
# gft doesn't work with unicode.
def count():
for i in range(10):
yield asbytes("%d" % i)
res = np.genfromtxt(count())
assert_array_equal(res, np.arange(10))
def test_auto_dtype_largeint(self):
# Regression test for numpy/numpy#5635 whereby large integers could
# cause OverflowErrors.
# Test the automatic definition of the output dtype
#
# 2**66 = 73786976294838206464 => should convert to float
# 2**34 = 17179869184 => should convert to int64
# 2**10 = 1024 => should convert to int (int32 on 32-bit systems,
# int64 on 64-bit systems)
data = TextIO('73786976294838206464 17179869184 1024')
test = np.ndfromtxt(data, dtype=None)
assert_equal(test.dtype.names, ['f0', 'f1', 'f2'])
assert_(test.dtype['f0'] == np.float)
assert_(test.dtype['f1'] == np.int64)
assert_(test.dtype['f2'] == np.integer)
assert_allclose(test['f0'], 73786976294838206464.)
assert_equal(test['f1'], 17179869184)
assert_equal(test['f2'], 1024)
class TestPathUsage(TestCase):
# Test that pathlib.Path can be used
@np.testing.dec.skipif(Path is None, "No pathlib.Path")
def test_loadtxt(self):
with temppath(suffix='.txt') as path:
path = Path(path)
a = np.array([[1.1, 2], [3, 4]])
np.savetxt(path, a)
x = np.loadtxt(path)
assert_array_equal(x, a)
@np.testing.dec.skipif(Path is None, "No pathlib.Path")
def test_save_load(self):
# Test that pathlib.Path instances can be used with savez.
with temppath(suffix='.npy') as path:
path = Path(path)
a = np.array([[1, 2], [3, 4]], int)
np.save(path, a)
data = np.load(path)
assert_array_equal(data, a)
@np.testing.dec.skipif(Path is None, "No pathlib.Path")
def test_savez_load(self):
# Test that pathlib.Path instances can be used with savez.
with temppath(suffix='.npz') as path:
path = Path(path)
np.savez(path, lab='place holder')
with np.load(path) as data:
assert_array_equal(data['lab'], 'place holder')
@np.testing.dec.skipif(Path is None, "No pathlib.Path")
def test_savez_compressed_load(self):
# Test that pathlib.Path instances can be used with savez.
with temppath(suffix='.npz') as path:
path = Path(path)
np.savez_compressed(path, lab='place holder')
data = np.load(path)
assert_array_equal(data['lab'], 'place holder')
data.close()
@np.testing.dec.skipif(Path is None, "No pathlib.Path")
def test_genfromtxt(self):
with temppath(suffix='.txt') as path:
path = Path(path)
a = np.array([(1, 2), (3, 4)])
np.savetxt(path, a)
data = np.genfromtxt(path)
assert_array_equal(a, data)
@np.testing.dec.skipif(Path is None, "No pathlib.Path")
def test_ndfromtxt(self):
# Test outputing a standard ndarray
with temppath(suffix='.txt') as path:
path = Path(path)
with path.open('w') as f:
f.write(u'1 2\n3 4')
control = np.array([[1, 2], [3, 4]], dtype=int)
test = np.ndfromtxt(path, dtype=int)
assert_array_equal(test, control)
@np.testing.dec.skipif(Path is None, "No pathlib.Path")
def test_mafromtxt(self):
# From `test_fancy_dtype_alt` above
with temppath(suffix='.txt') as path:
path = Path(path)
with path.open('w') as f:
f.write(u'1,2,3.0\n4,5,6.0\n')
test = np.mafromtxt(path, delimiter=',')
control = ma.array([(1.0, 2.0, 3.0), (4.0, 5.0, 6.0)])
assert_equal(test, control)
@np.testing.dec.skipif(Path is None, "No pathlib.Path")
def test_recfromtxt(self):
with temppath(suffix='.txt') as path:
path = Path(path)
with path.open('w') as f:
f.write(u'A,B\n0,1\n2,3')
kwargs = dict(delimiter=",", missing_values="N/A", names=True)
test = np.recfromtxt(path, **kwargs)
control = np.array([(0, 1), (2, 3)],
dtype=[('A', np.int), ('B', np.int)])
self.assertTrue(isinstance(test, np.recarray))
assert_equal(test, control)
@np.testing.dec.skipif(Path is None, "No pathlib.Path")
def test_recfromcsv(self):
with temppath(suffix='.txt') as path:
path = Path(path)
with path.open('w') as f:
f.write(u'A,B\n0,1\n2,3')
kwargs = dict(missing_values="N/A", names=True, case_sensitive=True)
test = np.recfromcsv(path, dtype=None, **kwargs)
control = np.array([(0, 1), (2, 3)],
dtype=[('A', np.int), ('B', np.int)])
self.assertTrue(isinstance(test, np.recarray))
assert_equal(test, control)
def test_gzip_load():
a = np.random.random((5, 5))
s = BytesIO()
f = gzip.GzipFile(fileobj=s, mode="w")
np.save(f, a)
f.close()
s.seek(0)
f = gzip.GzipFile(fileobj=s, mode="r")
assert_array_equal(np.load(f), a)
def test_gzip_loadtxt():
# Thanks to another windows brokeness, we can't use
# NamedTemporaryFile: a file created from this function cannot be
# reopened by another open call. So we first put the gzipped string
# of the test reference array, write it to a securely opened file,
# which is then read from by the loadtxt function
s = BytesIO()
g = gzip.GzipFile(fileobj=s, mode='w')
g.write(b'1 2 3\n')
g.close()
s.seek(0)
with temppath(suffix='.gz') as name:
with open(name, 'wb') as f:
f.write(s.read())
res = np.loadtxt(name)
s.close()
assert_array_equal(res, [1, 2, 3])
def test_gzip_loadtxt_from_string():
s = BytesIO()
f = gzip.GzipFile(fileobj=s, mode="w")
f.write(b'1 2 3\n')
f.close()
s.seek(0)
f = gzip.GzipFile(fileobj=s, mode="r")
assert_array_equal(np.loadtxt(f), [1, 2, 3])
def test_npzfile_dict():
s = BytesIO()
x = np.zeros((3, 3))
y = np.zeros((3, 3))
np.savez(s, x=x, y=y)
s.seek(0)
z = np.load(s)
assert_('x' in z)
assert_('y' in z)
assert_('x' in z.keys())
assert_('y' in z.keys())
for f, a in z.items():
assert_(f in ['x', 'y'])
assert_equal(a.shape, (3, 3))
assert_(len(z.items()) == 2)
for f in z:
assert_(f in ['x', 'y'])
assert_('x' in z.keys())
def test_load_refcount():
# Check that objects returned by np.load are directly freed based on
# their refcount, rather than needing the gc to collect them.
f = BytesIO()
np.savez(f, [1, 2, 3])
f.seek(0)
assert_(gc.isenabled())
gc.disable()
try:
gc.collect()
np.load(f)
# gc.collect returns the number of unreachable objects in cycles that
# were found -- we are checking that no cycles were created by np.load
n_objects_in_cycles = gc.collect()
finally:
gc.enable()
assert_equal(n_objects_in_cycles, 0)
if __name__ == "__main__":
run_module_suite()
|
gpl-3.0
|
grammarly/browser-extensions
|
firefox/addon-sdk/python-lib/cuddlefish/tests/test_init.py
|
34
|
9626
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import os, unittest, shutil
import zipfile
from StringIO import StringIO
from cuddlefish import initializer
from cuddlefish.templates import TEST_MAIN_JS, PACKAGE_JSON
tests_path = os.path.abspath(os.path.dirname(__file__))
class TestInit(unittest.TestCase):
def run_init_in_subdir(self, dirname, f, *args, **kwargs):
top = os.path.abspath(os.getcwd())
basedir = os.path.abspath(os.path.join(".test_tmp",self.id(),dirname))
if os.path.isdir(basedir):
assert basedir.startswith(top)
shutil.rmtree(basedir)
os.makedirs(basedir)
try:
os.chdir(basedir)
return f(basedir, *args, **kwargs)
finally:
os.chdir(top)
def do_test_init(self,basedir):
# Let's init the addon, no error admitted
f = open(".ignoreme","w")
f.write("stuff")
f.close()
out, err = StringIO(), StringIO()
init_run = initializer(None, ["init"], out, err)
out, err = out.getvalue(), err.getvalue()
self.assertEqual(init_run["result"], 0)
self.assertTrue("* lib directory created" in out)
self.assertTrue("* data directory created" in out)
self.assertTrue("Have fun!" in out)
self.assertEqual(err,"")
self.assertTrue(len(os.listdir(basedir))>0)
main_js = os.path.join(basedir,"lib","main.js")
package_json = os.path.join(basedir,"package.json")
test_main_js = os.path.join(basedir,"test","test-main.js")
self.assertTrue(os.path.exists(main_js))
self.assertTrue(os.path.exists(package_json))
self.assertTrue(os.path.exists(test_main_js))
self.assertEqual(open(main_js,"r").read(),"")
self.assertEqual(open(package_json,"r").read() % {"id":"tmp_addon_id" },
PACKAGE_JSON % {"name":"tmp_addon_sample",
"title": "tmp_addon_SAMPLE",
"id":init_run["jid"] })
self.assertEqual(open(test_main_js,"r").read(),TEST_MAIN_JS)
# Let's check that the addon is initialized
out, err = StringIO(), StringIO()
init_run = initializer(None, ["init"], out, err)
out, err = out.getvalue(), err.getvalue()
self.failIfEqual(init_run["result"],0)
self.assertTrue("This command must be run in an empty directory." in err)
def test_initializer(self):
self.run_init_in_subdir("tmp_addon_SAMPLE",self.do_test_init)
def do_test_args(self, basedir):
# check that running it with spurious arguments will fail
out,err = StringIO(), StringIO()
init_run = initializer(None, ["init", "specified-dirname", "extra-arg"], out, err)
out, err = out.getvalue(), err.getvalue()
self.failIfEqual(init_run["result"], 0)
self.assertTrue("Too many arguments" in err)
def test_args(self):
self.run_init_in_subdir("tmp_addon_sample", self.do_test_args)
def _test_existing_files(self, basedir):
f = open("pay_attention_to_me","w")
f.write("stuff")
f.close()
out,err = StringIO(), StringIO()
rc = initializer(None, ["init"], out, err)
out, err = out.getvalue(), err.getvalue()
self.assertEqual(rc["result"], 1)
self.failUnless("This command must be run in an empty directory" in err,
err)
self.failIf(os.path.exists("lib"))
def test_existing_files(self):
self.run_init_in_subdir("existing_files", self._test_existing_files)
def test_init_subdir(self):
parent = os.path.abspath(os.path.join(".test_tmp", self.id()))
basedir = os.path.join(parent, "init-basedir")
if os.path.exists(parent):
shutil.rmtree(parent)
os.makedirs(parent)
# if the basedir exists and is not empty, init should refuse
os.makedirs(basedir)
f = open(os.path.join(basedir, "boo"), "w")
f.write("stuff")
f.close()
out, err = StringIO(), StringIO()
rc = initializer(None, ["init", basedir], out, err)
out, err = out.getvalue(), err.getvalue()
self.assertEqual(rc["result"], 1)
self.assertTrue("testing if directory is empty" in out, out)
self.assertTrue("This command must be run in an empty directory." in err,
err)
# a .dotfile should be tolerated
os.rename(os.path.join(basedir, "boo"), os.path.join(basedir, ".phew"))
out, err = StringIO(), StringIO()
rc = initializer(None, ["init", basedir], out, err)
out, err = out.getvalue(), err.getvalue()
self.assertEqual(rc["result"], 0)
self.assertTrue("* data directory created" in out, out)
self.assertTrue("Have fun!" in out)
self.assertEqual(err,"")
self.assertTrue(os.listdir(basedir))
main_js = os.path.join(basedir,"lib","main.js")
package_json = os.path.join(basedir,"package.json")
self.assertTrue(os.path.exists(main_js))
self.assertTrue(os.path.exists(package_json))
shutil.rmtree(basedir)
# init should create directories that don't exist already
out, err = StringIO(), StringIO()
rc = initializer(None, ["init", basedir], out, err)
out, err = out.getvalue(), err.getvalue()
self.assertEqual(rc["result"], 0)
self.assertTrue("* data directory created" in out)
self.assertTrue("Have fun!" in out)
self.assertEqual(err,"")
self.assertTrue(os.listdir(basedir))
main_js = os.path.join(basedir,"lib","main.js")
package_json = os.path.join(basedir,"package.json")
self.assertTrue(os.path.exists(main_js))
self.assertTrue(os.path.exists(package_json))
class TestCfxQuits(unittest.TestCase):
def run_cfx(self, addon_path, command):
old_cwd = os.getcwd()
os.chdir(addon_path)
import sys
old_stdout = sys.stdout
old_stderr = sys.stderr
sys.stdout = out = StringIO()
sys.stderr = err = StringIO()
rc = 0
try:
import cuddlefish
args = list(command)
# Pass arguments given to cfx so that cfx can find firefox path
# if --binary option is given:
args.extend(sys.argv[1:])
cuddlefish.run(arguments=args)
except SystemExit, e:
if "code" in e:
rc = e.code
elif "args" in e and len(e.args)>0:
rc = e.args[0]
else:
rc = 0
finally:
sys.stdout = old_stdout
sys.stderr = old_stderr
os.chdir(old_cwd)
out.flush()
err.flush()
return rc, out.getvalue(), err.getvalue()
# this method doesn't exists in python 2.5,
# implements our own
def assertIn(self, member, container):
"""Just like self.assertTrue(a in b), but with a nicer default message."""
if member not in container:
standardMsg = '"%s" not found in "%s"' % (member,
container)
self.fail(standardMsg)
def test_cfx_run(self):
addon_path = os.path.join(tests_path,
"addons", "simplest-test")
rc, out, err = self.run_cfx(addon_path, ["run"])
self.assertEqual(rc, 0)
self.assertIn("Program terminated successfully.", err)
def test_cfx_test(self):
addon_path = os.path.join(tests_path,
"addons", "simplest-test")
rc, out, err = self.run_cfx(addon_path, ["test"])
self.assertEqual(rc, 0)
self.assertIn("1 of 1 tests passed.", err)
self.assertIn("Program terminated successfully.", err)
def test_cfx_xpi(self):
addon_path = os.path.join(tests_path,
"addons", "simplest-test")
rc, out, err = self.run_cfx(addon_path, \
["xpi", "--manifest-overload", "manifest-overload.json"])
self.assertEqual(rc, 0)
# Ensure that the addon version from our manifest overload is used
# in install.rdf
xpi_path = os.path.join(addon_path, "simplest-test.xpi")
xpi = zipfile.ZipFile(xpi_path, "r")
manifest = xpi.read("install.rdf")
self.assertIn("<em:version>1.0-nightly</em:version>", manifest)
xpi.close()
os.remove(xpi_path)
def test_cfx_init(self):
# Create an empty test directory
addon_path = os.path.abspath(os.path.join(".test_tmp", "test-cfx-init"))
if os.path.isdir(addon_path):
shutil.rmtree(addon_path)
os.makedirs(addon_path)
# Fake a call to cfx init
old_cwd = os.getcwd()
os.chdir(addon_path)
out, err = StringIO(), StringIO()
rc = initializer(None, ["init"], out, err)
os.chdir(old_cwd)
out, err = out.getvalue(), err.getvalue()
self.assertEqual(rc["result"], 0)
self.assertTrue("Have fun!" in out)
self.assertEqual(err,"")
# run cfx test
rc, out, err = self.run_cfx(addon_path, ["test"])
self.assertEqual(rc, 0)
self.assertIn("2 of 2 tests passed.", err)
self.assertIn("Program terminated successfully.", err)
if __name__ == "__main__":
unittest.main()
|
bsd-3-clause
|
dscho/hg
|
tests/test-minirst.py
|
1
|
5259
|
from __future__ import absolute_import, print_function
import pprint
from mercurial import (
minirst,
)
def debugformat(text, form, **kwargs):
if form == 'html':
print("html format:")
out = minirst.format(text, style=form, **kwargs)
else:
print("%d column format:" % form)
out = minirst.format(text, width=form, **kwargs)
print("-" * 70)
if type(out) == tuple:
print(out[0][:-1])
print("-" * 70)
pprint.pprint(out[1])
else:
print(out[:-1])
print("-" * 70)
print()
def debugformats(title, text, **kwargs):
print("== %s ==" % title)
debugformat(text, 60, **kwargs)
debugformat(text, 30, **kwargs)
debugformat(text, 'html', **kwargs)
paragraphs = """
This is some text in the first paragraph.
A small indented paragraph.
It is followed by some lines
containing random whitespace.
\n \n \nThe third and final paragraph.
"""
debugformats('paragraphs', paragraphs)
definitions = """
A Term
Definition. The indented
lines make up the definition.
Another Term
Another definition. The final line in the
definition determines the indentation, so
this will be indented with four spaces.
A Nested/Indented Term
Definition.
"""
debugformats('definitions', definitions)
literals = r"""
The fully minimized form is the most
convenient form::
Hello
literal
world
In the partially minimized form a paragraph
simply ends with space-double-colon. ::
////////////////////////////////////////
long un-wrapped line in a literal block
\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\
::
This literal block is started with '::',
the so-called expanded form. The paragraph
with '::' disappears in the final output.
"""
debugformats('literals', literals)
lists = """
- This is the first list item.
Second paragraph in the first list item.
- List items need not be separated
by a blank line.
- And will be rendered without
one in any case.
We can have indented lists:
- This is an indented list item
- Another indented list item::
- A literal block in the middle
of an indented list.
(The above is not a list item since we are in the literal block.)
::
Literal block with no indentation (apart from
the two spaces added to all literal blocks).
1. This is an enumerated list (first item).
2. Continuing with the second item.
(1) foo
(2) bar
1) Another
2) List
Line blocks are also a form of list:
| This is the first line.
The line continues here.
| This is the second line.
"""
debugformats('lists', lists)
options = """
There is support for simple option lists,
but only with long options:
-X, --exclude filter an option with a short and long option with an argument
-I, --include an option with both a short option and a long option
--all Output all.
--both Output both (this description is
quite long).
--long Output all day long.
--par This option has two paragraphs in its description.
This is the first.
This is the second. Blank lines may be omitted between
options (as above) or left in (as here).
The next paragraph looks like an option list, but lacks the two-space
marker after the option. It is treated as a normal paragraph:
--foo bar baz
"""
debugformats('options', options)
fields = """
:a: First item.
:ab: Second item. Indentation and wrapping
is handled automatically.
Next list:
:small: The larger key below triggers full indentation here.
:much too large: This key is big enough to get its own line.
"""
debugformats('fields', fields)
containers = """
Normal output.
.. container:: debug
Initial debug output.
.. container:: verbose
Verbose output.
.. container:: debug
Debug output.
"""
debugformats('containers (normal)', containers)
debugformats('containers (verbose)', containers, keep=['verbose'])
debugformats('containers (debug)', containers, keep=['debug'])
debugformats('containers (verbose debug)', containers,
keep=['verbose', 'debug'])
roles = """Please see :hg:`add`."""
debugformats('roles', roles)
sections = """
Title
=====
Section
-------
Subsection
''''''''''
Markup: ``foo`` and :hg:`help`
------------------------------
"""
debugformats('sections', sections)
admonitions = """
.. note::
This is a note
- Bullet 1
- Bullet 2
.. warning:: This is a warning Second
input line of warning
.. danger::
This is danger
"""
debugformats('admonitions', admonitions)
comments = """
Some text.
.. A comment
.. An indented comment
Some indented text.
..
Empty comment above
"""
debugformats('comments', comments)
data = [['a', 'b', 'c'],
['1', '2', '3'],
['foo', 'bar', 'baz this list is very very very long man']]
rst = minirst.maketable(data, 2, True)
table = ''.join(rst)
print(table)
debugformats('table', table)
data = [['s', 'long', 'line\ngoes on here'],
['', 'xy', 'tried to fix here\n by indenting']]
rst = minirst.maketable(data, 1, False)
table = ''.join(rst)
print(table)
debugformats('table+nl', table)
|
gpl-2.0
|
rkq/cxxexp
|
third-party/src/boost_1_56_0/tools/build/src/tools/cast.py
|
45
|
2707
|
# Status: ported
# Base revision: 64432.
# Copyright 2005-2010 Vladimir Prus.
# Distributed under the Boost Software License, Version 1.0. (See
# accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
# Defines main target 'cast', used to change type for target. For example, in Qt
# library one wants two kinds of CPP files -- those that just compiled and those
# that are passed via the MOC tool.
#
# This is done with:
#
# exe main : main.cpp [ cast _ moccable-cpp : widget.cpp ] ;
#
# Boost.Build will assing target type CPP to both main.cpp and widget.cpp. Then,
# the cast rule will change target type of widget.cpp to MOCCABLE-CPP, and Qt
# support will run the MOC tool as part of the build process.
#
# At the moment, the 'cast' rule only works for non-derived (source) targets.
#
# TODO: The following comment is unclear or incorrect. Clean it up.
# > Another solution would be to add a separate main target 'moc-them' that
# > would moc all the passed sources, no matter what their type is, but I prefer
# > cast, as defining a new target type + generator for that type is somewhat
# > simpler than defining a main target rule.
import b2.build.targets as targets
import b2.build.virtual_target as virtual_target
from b2.manager import get_manager
from b2.util import bjam_signature
class CastTargetClass(targets.TypedTarget):
def construct(name, source_targets, ps):
result = []
for s in source_targets:
if not isinstance(s, virtual_targets.FileTarget):
get_manager().errors()("Source to the 'cast' metatager is not a file")
if s.action():
get_manager().errors()("Only non-derived targets allowed as sources for 'cast'.")
r = s.clone_with_different_type(self.type())
result.append(get_manager().virtual_targets().register(r))
return result
@bjam_signature((["name", "type"], ["sources", "*"], ["requirements", "*"],
["default_build", "*"], ["usage_requirements", "*"]))
def cast(name, type, sources, requirements, default_build, usage_requirements):
from b2.manager import get_manager
t = get_manager().targets()
project = get_manager().projects().current()
return t.main_target_alternative(
CastTargetClass(name, project, type,
t.main_target_sources(sources, name),
t.main_target_requirements(requirements, project),
t.main_target_default_build(default_build, project),
t.main_target_usage_requirements(usage_requirements, project)))
get_manager().projects().add_rule("cast", cast)
|
mit
|
flavour/RedHat
|
modules/geopy/geocoders/yahoo.py
|
46
|
3135
|
"""
Wrapper to the Yahoo's new PlaceFinder API. (doc says that the API RELEASE 1.0 (22 JUNE 2010))
"""
import xml.dom.minidom
from geopy import util
from geopy import Point
from urllib import urlencode
from urllib2 import urlopen
from geopy.geocoders.base import Geocoder
try:
import json
except ImportError:
try:
import simplejson as json
except ImportError:
from django.utils import simplejson as json
class Yahoo(Geocoder):
BASE_URL = "http://where.yahooapis.com/geocode?%s"
def __init__(self, app_id, format_string='%s', output_format=None):
self.app_id = app_id
self.format_string = format_string
if output_format != None:
from warnings import warn
warn('geopy.geocoders.yahoo.Yahoo: The `output_format` parameter is deprecated '+
'and now ignored. JSON will be used internally.', DeprecationWarning)
def geocode(self, string, exactly_one=True):
if isinstance(string, unicode):
string = string.encode('utf-8')
params = {'location': self.format_string % string,
'appid': self.app_id,
'flags': 'J'
}
url = self.BASE_URL % urlencode(params)
util.logger.debug("Fetching %s..." % url)
return self.geocode_url(url, exactly_one)
def geocode_url(self, url, exactly_one=True):
page = urlopen(url)
return self.parse_json(page, exactly_one)
def parse_json(self, page, exactly_one=True):
if not isinstance(page, basestring):
page = util.decode_page(page)
doc = json.loads(page)
results = doc.get('ResultSet', []).get('Results', [])
if not results:
raise ValueError("No results found")
elif exactly_one and len(results) != 1:
raise ValueError("Didn't find exactly one placemark! " \
"(Found %d.)" % len(results))
def parse_result(place):
line1, line2, line3, line4 = place.get('line1'), place.get('line2'), place.get('line3'), place.get('line4')
address = util.join_filter(", ", [line1, line2, line3, line4])
city = place.get('city')
state = place.get('state')
country = place.get('country')
location = util.join_filter(", ", [address, city, country])
lat, lng = place.get('latitude'), place.get('longitude')
#if lat and lng:
# point = Point(floatlat, lng)
#else:
# point = None
return (location, (float(lat), float(lng)))
if exactly_one:
return parse_result(results[0])
else:
return [parse_result(result) for result in results]
def reverse(self, coord, exactly_one=True):
(lat, lng) = coord
params = {'location': '%s,%s' % (lat, lng),
'gflags' : 'R',
'appid': self.app_id,
'flags': 'J'
}
url = self.BASE_URL % urlencode(params)
return self.geocode_url(url, exactly_one)
|
mit
|
adfernandes/mbed
|
tools/host_tests/stdio_auto.py
|
13
|
2147
|
"""
mbed SDK
Copyright (c) 2011-2013 ARM Limited
SPDX-License-Identifier: Apache-2.0
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import re
import random
from time import time
class StdioTest(object):
PATTERN_INT_VALUE = "Your value was: (-?\d+)"
re_detect_int_value = re.compile(PATTERN_INT_VALUE)
def test(self, selftest):
test_result = True
c = selftest.mbed.serial_readline() # {{start}} preamble
if c is None:
return selftest.RESULT_IO_SERIAL
selftest.notify(c)
for i in range(0, 10):
random_integer = random.randint(-99999, 99999)
selftest.notify("HOST: Generated number: " + str(random_integer))
start = time()
selftest.mbed.serial_write(str(random_integer) + "\n")
serial_stdio_msg = selftest.mbed.serial_readline()
if serial_stdio_msg is None:
return selftest.RESULT_IO_SERIAL
delay_time = time() - start
selftest.notify(serial_stdio_msg.strip())
# Searching for reply with scanned values
m = self.re_detect_int_value.search(serial_stdio_msg)
if m and len(m.groups()):
int_value = m.groups()[0]
int_value_cmp = random_integer == int(int_value)
test_result = test_result and int_value_cmp
selftest.notify("HOST: Number %s read after %.3f sec ... [%s]"% (int_value, delay_time, "OK" if int_value_cmp else "FAIL"))
else:
test_result = False
break
return selftest.RESULT_SUCCESS if test_result else selftest.RESULT_FAILURE
|
apache-2.0
|
shuishoudage/CloudappCLI
|
env/lib/python2.7/site-packages/setuptools/command/rotate.py
|
389
|
2164
|
from distutils.util import convert_path
from distutils import log
from distutils.errors import DistutilsOptionError
import os
import shutil
from setuptools.extern import six
from setuptools import Command
class rotate(Command):
"""Delete older distributions"""
description = "delete older distributions, keeping N newest files"
user_options = [
('match=', 'm', "patterns to match (required)"),
('dist-dir=', 'd', "directory where the distributions are"),
('keep=', 'k', "number of matching distributions to keep"),
]
boolean_options = []
def initialize_options(self):
self.match = None
self.dist_dir = None
self.keep = None
def finalize_options(self):
if self.match is None:
raise DistutilsOptionError(
"Must specify one or more (comma-separated) match patterns "
"(e.g. '.zip' or '.egg')"
)
if self.keep is None:
raise DistutilsOptionError("Must specify number of files to keep")
try:
self.keep = int(self.keep)
except ValueError:
raise DistutilsOptionError("--keep must be an integer")
if isinstance(self.match, six.string_types):
self.match = [
convert_path(p.strip()) for p in self.match.split(',')
]
self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
def run(self):
self.run_command("egg_info")
from glob import glob
for pattern in self.match:
pattern = self.distribution.get_name() + '*' + pattern
files = glob(os.path.join(self.dist_dir, pattern))
files = [(os.path.getmtime(f), f) for f in files]
files.sort()
files.reverse()
log.info("%d file(s) matching %s", len(files), pattern)
files = files[self.keep:]
for (t, f) in files:
log.info("Deleting %s", f)
if not self.dry_run:
if os.path.isdir(f):
shutil.rmtree(f)
else:
os.unlink(f)
|
gpl-3.0
|
paypal/keystone
|
tests/test_import_legacy.py
|
4
|
4145
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack LLC
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
try:
import sqlite3 as dbapi
except ImportError:
from pysqlite2 import dbapi2 as dbapi
from keystone.catalog.backends import templated as catalog_templated
from keystone.common.sql import legacy
from keystone.common.sql import util as sql_util
from keystone import config
from keystone.identity.backends import sql as identity_sql
from keystone import test
CONF = config.CONF
class ImportLegacy(test.TestCase):
def setUp(self):
super(ImportLegacy, self).setUp()
self.config([test.etcdir('keystone.conf.sample'),
test.testsdir('test_overrides.conf'),
test.testsdir('backend_sql.conf'),
test.testsdir('backend_sql_disk.conf')])
sql_util.setup_test_database()
self.identity_api = identity_sql.Identity()
def tearDown(self):
sql_util.teardown_test_database()
super(ImportLegacy, self).tearDown()
def setup_old_database(self, sql_dump):
sql_path = test.testsdir(sql_dump)
db_path = test.testsdir('%s.db' % sql_dump)
try:
os.unlink(db_path)
except OSError:
pass
script_str = open(sql_path).read().strip()
conn = dbapi.connect(db_path)
conn.executescript(script_str)
conn.commit()
return db_path
def test_import_d5(self):
db_path = self.setup_old_database('legacy_d5.sqlite')
migration = legacy.LegacyMigration('sqlite:///%s' % db_path)
migration.migrate_all()
admin_id = '1'
user_ref = self.identity_api.get_user(admin_id)
self.assertEquals(user_ref['name'], 'admin')
self.assertEquals(user_ref['enabled'], True)
# check password hashing
user_ref, tenant_ref, metadata_ref = self.identity_api.authenticate(
user_id=admin_id, password='secrete')
# check catalog
self._check_catalog(migration)
def test_import_diablo(self):
db_path = self.setup_old_database('legacy_diablo.sqlite')
migration = legacy.LegacyMigration('sqlite:///%s' % db_path)
migration.migrate_all()
admin_id = '1'
user_ref = self.identity_api.get_user(admin_id)
self.assertEquals(user_ref['name'], 'admin')
self.assertEquals(user_ref['enabled'], True)
# check password hashing
user_ref, tenant_ref, metadata_ref = self.identity_api.authenticate(
user_id=admin_id, password='secrete')
# check catalog
self._check_catalog(migration)
def test_import_essex(self):
db_path = self.setup_old_database('legacy_essex.sqlite')
migration = legacy.LegacyMigration('sqlite:///%s' % db_path)
migration.migrate_all()
admin_id = 'c93b19ea3fa94484824213db8ac0afce'
user_ref = self.identity_api.get_user(admin_id)
self.assertEquals(user_ref['name'], 'admin')
self.assertEquals(user_ref['enabled'], True)
# check password hashing
user_ref, tenant_ref, metadata_ref = self.identity_api.authenticate(
user_id=admin_id, password='secrete')
# check catalog
self._check_catalog(migration)
def _check_catalog(self, migration):
catalog_lines = migration.dump_catalog()
catalog = catalog_templated.parse_templates(catalog_lines)
self.assert_('RegionOne' in catalog)
self.assert_('compute' in catalog['RegionOne'])
self.assert_('adminURL' in catalog['RegionOne']['compute'])
|
apache-2.0
|
cs564/heron
|
heron/tools/tracker/tests/python/topology_helpers_unittest.py
|
8
|
10177
|
''' topology_helpers_unittest.py '''
# pylint: disable=missing-docstring
import unittest2 as unittest
from heron.common.src.python import constants
from heron.tools.tracker.src.python import topology_helpers
from mock_proto import MockProto
class TopologyHelpersTest(unittest.TestCase):
def setUp(self):
self.mock_proto = MockProto()
def test_get_component_parallelism(self):
topology = self.mock_proto.create_mock_medium_topology(1, 2, 3, 4)
cmap = topology_helpers.get_component_parallelism(topology)
self.assertEqual(1, cmap["mock_spout1"])
self.assertEqual(2, cmap["mock_bolt1"])
self.assertEqual(3, cmap["mock_bolt2"])
self.assertEqual(4, cmap["mock_bolt3"])
def test_get_disk_per_container(self):
# We would have 9 instances
topology = self.mock_proto.create_mock_medium_topology(1, 2, 3, 4)
# First try with 1 container, so the disk request should be:
# 10 * GB + Padding_Disk (12GB) = 22GB
default_disk = topology_helpers.get_disk_per_container(topology)
self.assertEqual(22 * constants.GB, default_disk)
# Then try with 4 container, so the disk request should be:
# 10/4 = 2.5 -> 3 (round to ceiling) + 12 = 15GB
self.mock_proto.add_topology_config(topology, constants.TOPOLOGY_STMGRS, 4)
self.assertEqual(15 * constants.GB, topology_helpers.get_disk_per_container(topology))
# Then let's set the disk_per_container explicitly
self.mock_proto.add_topology_config(
topology, constants.TOPOLOGY_CONTAINER_DISK_REQUESTED, 950109)
# The add_topology_config will convert config into string
self.assertEqual(str(950109), topology_helpers.get_disk_per_container(topology))
def test_get_total_instances(self):
topology = self.mock_proto.create_mock_medium_topology(3, 4, 5, 6)
num_instances = topology_helpers.get_total_instances(topology)
self.assertEqual(18, num_instances)
def test_sane(self):
# Make wrong topology names
topology = self.mock_proto.create_mock_simple_topology()
topology.name = ""
self.assertFalse(topology_helpers.sane(topology))
topology = self.mock_proto.create_mock_simple_topology()
topology.name = "test.with.a.dot"
self.assertFalse(topology_helpers.sane(topology))
topology = self.mock_proto.create_mock_simple_topology()
topology.name = "test/with/a/slash"
self.assertFalse(topology_helpers.sane(topology))
# Add another spout with the same name
topology = self.mock_proto.create_mock_simple_topology()
topology.spouts.extend([self.mock_proto.create_mock_spout("mock_spout", [], 1)])
self.assertFalse(topology_helpers.sane(topology))
# Add another bolt with the same name
topology = self.mock_proto.create_mock_simple_topology()
topology.bolts.extend([self.mock_proto.create_mock_bolt("mock_bolt", [], [], 1)])
self.assertFalse(topology_helpers.sane(topology))
# If num containers are greater than num instances
topology = self.mock_proto.create_mock_simple_topology(1, 1)
self.mock_proto.add_topology_config(topology, constants.TOPOLOGY_STMGRS, 4)
self.assertFalse(topology_helpers.sane(topology))
# If rammap is partial with less componenets
topology = self.mock_proto.create_mock_simple_topology()
self.mock_proto.add_topology_config(
topology, constants.TOPOLOGY_COMPONENT_RAMMAP, "mock_spout:1")
self.assertTrue(topology_helpers.sane(topology))
# If rammap is not well formatted
topology = self.mock_proto.create_mock_simple_topology()
self.mock_proto.add_topology_config(
topology, constants.TOPOLOGY_COMPONENT_RAMMAP, "mock_spout:1:2,mock_bolt:2:3")
self.assertFalse(topology_helpers.sane(topology))
# If rammap has wrong component name
topology = self.mock_proto.create_mock_simple_topology()
self.mock_proto.add_topology_config(
topology, constants.TOPOLOGY_COMPONENT_RAMMAP, "wrong_mock_spout:1,mock_bolt:2")
self.assertFalse(topology_helpers.sane(topology))
# If everything is right
topology = self.mock_proto.create_mock_simple_topology()
self.mock_proto.add_topology_config(
topology, constants.TOPOLOGY_COMPONENT_RAMMAP, "mock_spout:1,mock_bolt:2")
self.assertTrue(topology_helpers.sane(topology))
def test_num_cpus_per_container(self):
topology = self.mock_proto.create_mock_simple_topology(2, 2)
self.mock_proto.add_topology_config(topology, constants.TOPOLOGY_STMGRS, 4)
self.assertEqual(2, topology_helpers.get_cpus_per_container(topology))
topology = self.mock_proto.create_mock_simple_topology(2, 2)
self.mock_proto.add_topology_config(topology, constants.TOPOLOGY_CONTAINER_CPU_REQUESTED, 42)
self.assertEqual(42, topology_helpers.get_cpus_per_container(topology))
def test_get_user_rammap(self):
topology = self.mock_proto.create_mock_simple_topology()
self.mock_proto.add_topology_config(
topology, constants.TOPOLOGY_COMPONENT_RAMMAP, "mock_spout:2,mock_bolt:3")
self.assertEqual({"mock_spout":2, "mock_bolt":3}, topology_helpers.get_user_rammap(topology))
def test_get_component_distribution(self):
topology = self.mock_proto.create_mock_simple_topology(4, 8)
self.mock_proto.add_topology_config(topology, constants.TOPOLOGY_STMGRS, 4)
component_distribution = topology_helpers.get_component_distribution(topology)
expected_component_distribution = {
1: [
("mock_bolt", "1", "0"),
("mock_bolt", "5", "4"),
("mock_spout", "9", "0")
],
2: [
("mock_bolt", "2", "1"),
("mock_bolt", "6", "5"),
("mock_spout", "10", "1")
],
3: [
("mock_bolt", "3", "2"),
("mock_bolt", "7", "6"),
("mock_spout", "11", "2")
],
4: [
("mock_bolt", "4", "3"),
("mock_bolt", "8", "7"),
("mock_spout", "12", "3")
]
}
self.assertEqual(expected_component_distribution, component_distribution)
def test_get_component_rammap(self):
# Mock a few methods
# This is not a good way since this shows the internals
# of the method. These methods need to be changed.
# For example, ram_per_contaner could be taken as an argument.
original_ram_for_stmgr = constants.RAM_FOR_STMGR
constants.RAM_FOR_STMGR = 2
# When rammap is specified, it should be used.
topology = self.mock_proto.create_mock_simple_topology(4, 8)
self.mock_proto.add_topology_config(
topology, constants.TOPOLOGY_COMPONENT_RAMMAP, "mock_spout:2,mock_bolt:3")
self.assertEqual(
{"mock_spout":2, "mock_bolt":3}, topology_helpers.get_component_rammap(topology))
# When partial rammap is specified, rest of the components should get default
topology = self.mock_proto.create_mock_simple_topology(4, 8)
self.mock_proto.add_topology_config(
topology, constants.TOPOLOGY_COMPONENT_RAMMAP, "mock_spout:2")
expected_component_rammap = {
"mock_spout": 2,
"mock_bolt": constants.DEFAULT_RAM_FOR_INSTANCE
}
self.assertEqual(expected_component_rammap, topology_helpers.get_component_rammap(topology))
# When container ram is specified.
topology = self.mock_proto.create_mock_simple_topology(4, 8)
self.mock_proto.add_topology_config(topology, constants.TOPOLOGY_STMGRS, 4)
self.mock_proto.add_topology_config(topology, constants.TOPOLOGY_CONTAINER_RAM_REQUESTED, 8)
expected_component_rammap = {
"mock_spout": 2,
"mock_bolt": 2
}
component_rammap = topology_helpers.get_component_rammap(topology)
self.assertEqual(expected_component_rammap, component_rammap)
# When nothing is specified.
topology = self.mock_proto.create_mock_simple_topology(4, 8)
component_rammap = topology_helpers.get_component_rammap(topology)
expected_component_rammap = {
"mock_spout": constants.DEFAULT_RAM_FOR_INSTANCE,
"mock_bolt": constants.DEFAULT_RAM_FOR_INSTANCE
}
self.assertEqual(expected_component_rammap, component_rammap)
# Unmock the things that we mocked.
constants.RAM_FOR_STMGR = original_ram_for_stmgr
def test_get_ram_per_container(self):
# Mock a few things
original_ram_for_stmgr = constants.RAM_FOR_STMGR
constants.RAM_FOR_STMGR = 2
original_default_ram_for_instance = constants.DEFAULT_RAM_FOR_INSTANCE
constants.DEFAULT_RAM_FOR_INSTANCE = 1
# When rammap is specified
topology = self.mock_proto.create_mock_simple_topology(4, 8)
self.mock_proto.add_topology_config(
topology, constants.TOPOLOGY_COMPONENT_RAMMAP, "mock_spout:2,mock_bolt:3")
self.mock_proto.add_topology_config(topology, constants.TOPOLOGY_STMGRS, 4)
self.assertEqual(10, topology_helpers.get_ram_per_container(topology))
# When partial rammap is specified, rest of the components should get default
topology = self.mock_proto.create_mock_simple_topology(4, 8)
self.mock_proto.add_topology_config(
topology, constants.TOPOLOGY_COMPONENT_RAMMAP, "mock_spout:2")
self.mock_proto.add_topology_config(topology, constants.TOPOLOGY_STMGRS, 4)
expected_ram_per_container = 6
self.assertEqual(expected_ram_per_container, topology_helpers.get_ram_per_container(topology))
# If container ram is specified
topology = self.mock_proto.create_mock_simple_topology(4, 8)
requested_ram = 15000
self.mock_proto.add_topology_config(
topology, constants.TOPOLOGY_CONTAINER_RAM_REQUESTED, str(requested_ram))
# Difference should be less than the total instances
self.assertLess(abs(topology_helpers.get_ram_per_container(topology) - requested_ram), 12)
# When nothing is specified
topology = self.mock_proto.create_mock_simple_topology(4, 8)
self.mock_proto.add_topology_config(topology, constants.TOPOLOGY_STMGRS, 4)
expected_ram_per_container = 5
self.assertEqual(expected_ram_per_container, topology_helpers.get_ram_per_container(topology))
# Unmock the things that we mocked.
constants.RAM_FOR_STMGR = original_ram_for_stmgr
constants.DEFAULT_RAM_FOR_INSTANCE = original_default_ram_for_instance
|
apache-2.0
|
ertugruldeveloper/hesapmak1
|
hesapmak1.py
|
1
|
14146
|
# hesapmak1
print("\nSürüm kontrolü yapıldı, 3.X sürümü kullanılıyor\n")
karşılama = open("karşılama.txt","r")
print("\n",karşılama.read())
karşılama.close()
def kimlik():
#KİMLİK FONKSİYONU
print("1.) Merhabalar Yeni Kullanıcı Detaylı Kimlik Kartını Oluşturmak\n\
ister misin? Evet için 'E', Hayır için 'H' Olarak Cevap veriniz.")
cevap = input("Cevap :")
# BURADA LOWER() VE UPPER() İLE GİRİLEN VERİYİ BÜYÜK HARF KÜÇÜK HARF KONTROLÜNÜ SAĞLADIK
cevap = cevap.lower().upper()
if cevap == "EVET":
# kimlik fonk başladı
try:
# Hata kontrolü başladı
ad = input("\nAD :")
ad = ad.lower().upper()
#-------------------------------------------------------------
soyad = input("SOYAD :")
soyad = soyad.lower().upper()
#-------------------------------------------------------------
tc = input("TC :")
doğum = int(input("DOĞUM YIL :"))
#-------------------------------------------------------------
ana_ad = input("ANA ADI :")
ana_ad = ana_ad.lower().upper()
#-------------------------------------------------------------
baba_ad = input("BABA ADI :")
baba_ad = baba_ad.lower().upper()
#-------------------------------------------------------------
memleket = input("MEMLEKET :")
memleket = memleket.lower().upper()
#-------------------------------------------------------------
doğum_yer = input("DOĞUM YERİ :")
doğum_yer = doğum_yer.lower().upper()
print(
"""
|-------------------------------------------------|
| | KİMLİK BİLGİSİ | |
|-------------------------------------------------|
| |
| TC = {}
| |
| |
| |
| AD = {}
| SOYAD = {}
| DOĞUM TARİH = {}
| ANA AD = {}
| BABA = {}
| MEMLEKET = {}
| DOĞUM YER = {}
--------------------------------------------------
\n""".format(tc,ad,soyad,doğum,ana_ad,baba_ad,memleket,doğum_yer))
except (ValueError,ZeroDivisionError):
hata1 = "Girdiyi kontrol ediniz. Rakam veya özel kaakter kullanımına dikkat ediniz"
çiz = (len(hata1)*"-")
print(çiz,"\n",hata1,"\n",çiz,sep="")
except:
hata = "Nedeni bilinmeyen bir hata oluştu..."
çiz = (len(hata)*"-")
print(çiz,"\n",hata,"\n",çiz,sep="")
# kimlik fonk bitti
else:
print("Kimlik aşaması atlandı...\n",("-"*50),sep="")
exit
#-------------------------------------------------------------------------------------
def kayıtlı_kullanıcı():
#-------------------------------------------------------------------------------------
try:
kaydet = open("kaydet.txt","w")
ad = input("Kullanıcı AD :")
ad = ad.lower().upper()
kaydet.write(ad)
parola = input("Parola :")
kaydet.write(" ")
kaydet.write(parola)
havuz = "ALI","VELI","AHMET","CENGİZ","ŞEKER"
if ad in havuz and len(parola) in range(3,8):
tanım = open("tanım.txt","r")
print("\nGiriş başarılı bir şekilde sağlandı...{}\n".format(ad),tanım.read())
tanım.close()
elif ad not in havuz or len(parola) not in range(3,8):
yeni_kullanıcı()
else:
exit
except(ValueError,ZeroDivisionError):
hata1="Hata oluştu, ya sayı girmediniz yada 2. rakamı '0' girdiniz."
print(("-"*len(hata1)),"\n",hata1,"\n",("-"*len(hata1)),sep="")
except:
hata2="Beklenmeyen bir hata oluştu."
print(("-"*len(hata2)),"\n",hata2,"\n",("-"*len(hata2)),sep="")
#-------------------------------------------------------------------------------------
def yeni_kullanıcı():
try:
kaydet = open("kaydet.txt","w")
print("Yeni Kulanıcı Kayıt Ekranına Hoş Geldin...\n")
ad = input("Kullanıcı AD :")
ad = ad.lower().ad.upper()
kaydet.write(ad)
e_posta = input("E-posta :")
e_posta = e_posta.lower().upper()
kaydet.write(e_posta)
kaydet.write(" ")
parola = input("Parola :")
kaydet.write(" ")
kaydet.write(parola)
havuz = ad
kaydet.close()
if ad in havuz and len(parola) in range(3,8):
tanım = open("tanım.txt","r")
print("Giriş başarılı bir şekilde sağlandı...{}".format(ad),tanım.read())
tanım.close()
else:
print("Hatallı Giriş")
exit
except(ValueError,ZeroDivisionError):
hata1="Hata oluştu, ya sayı girmediniz yada 2. rakamı '0' girdiniz."
print(("-"*len(hata1)),"\n",hata1,"\n",("-"*len(hata1)),sep="")
except:
hata2="Beklenmeyen bir hata oluştu."
print(("-"*len(hata2)),"\n",hata2,"\n",("-"*len(hata2)),sep="")
#-------------------------------------------------------------------------------------
def dos_uzantı_sıralama():
try:
print("2.) Merhabalar, dosya uzantısına göre sıralanacak ögeleriniz var\n\
bu sıralamayı yapmak için Evet için 'E', Hayır için 'h' basınız...\n")
girdi = input("Cevanınız nedir ? = ")
girdi = girdi.lower().upper()
if girdi == "E":
dosyalar =open("dosyalar.txt","r")
liste = dosyalar.read()
liste = liste.split()
print("\n\n'dosyalar.txt' içerisinde en çok tekrar eden uzantılı\n\
karakter dizilerini istedik...")
for i in liste:
if i.endswith(".com"):
print(("-"*15),i,sep="\n")
print("\n")
dosyalar.close()
else:
print("Dosya uzantısı sıralama uygulamasındna çıkıldı...\n",("-"*30),sep="")
exit
except(ValueError,ZeroDivisionError):
hata ="Hata oluştu, harf veya 2. sayıyı '0' olarak girdiniz , lütfen kontrol ediniz"
print((len(hata)*"-"),"\n",hata,"\n",(len(hata)*"-"),sep="")
except:
hata ="Bir hata oluştu üzgünüm"
print((len(hata)*"-"),"\n",hata,"\n",(len(hata)*"-"),sep="")
#-------------------------------------------------------------------------------------
def dos_baslangıc_sıralama():
try:
print("3.) Merhabalar, dosya uzantısına göre sıralanacak ögeleriniz var\n\
bu sıralamayı yapmak için Evet için 'E', Hayır için 'h' basınız...\n")
girdi = input("Cevanınız nedir ? = ")
girdi = girdi.lower().upper()
if girdi =="E":
dosyalar = open("dosyalar.txt","r")
liste = dosyalar.read()
liste = liste.split()
print("\nDosyaları ilk harfine göre sıralama yapıyoruz\n")
for i in liste:
if i.startswith("d"):
print(("-"*15),i,sep="\n")
print("\n")
dosyalar.close()
else:
print("Dosyanın ilk harfine göre sıralama uygulamasıdnan çıkıldı...\n",("-"*30),sep="")
exit
except(ValueError,ZeroDivisionError):
hata ="Hata oluştu, harf veya 2. sayıyı '0' olarak girdiniz , lütfen kontrol ediniz"
print((len(hata)*"-"),"\n",hata,"\n",(len(hata)*"-"),sep="")
except:
hata ="Bir hata oluştu üzgünüm"
print((len(hata)*"-"),"\n",hata,"\n",(len(hata)*"-"),sep="")
#-------------------------------------------------------------------------------------
def toplama():
try:
bilgi1 = open("toplama.txt","r")
print("\nAÇIKLAMA METNİ :\n",bilgi1.read(),"\n",sep="")
sayı1 = int(input("1. sayıyı gir :"))
sayı2 = int(input("2. sayıyı gir :"))
print("\n","\t",sayı1,"\n","\t",sayı2,"\n","+","\n",("-"*20),"\n\
","\t",(sayı1+sayı2),"\n",sep="",end="\n\n----FİNİŞ----\n\n")
bilgi1.close
except(ZeroDivisionError,ValueError):
hata ="Hata oluştu, harf veya 2. sayıyı '0' olarak girdiniz , lütfen kontrol ediniz"
print((len(hata)*"-"),"\n",hata,"\n",(len(hata)*"-"),sep="")
except:
hata ="Bir hata oluştu üzgünüm"
print((len(hata)*"-"),"\n",hata,"\n",(len(hata)*"-"),sep="")
#-------------------------------------------------------------------------------------
def çıkartma():
try:
bilgi2 = open("çıkartma.txt","r")
print("\nÇıkartma işlemine hoşgeldiniz...\n",bilgi2.read(),"\n",sep="")
sayı1 = int(input("1. sayıyı gir :"))
sayı2 = int(input("2. sayıyı gir :"))
print(("-"*50),"\n","\t",sayı1,"\n","\t",sayı2,"\n","-","\n",("-"*20),"\n\
","\t",(sayı1-sayı2),"\n",sep="",end="\n\n----FİNİŞ----\n\n")
bilgi2.close()
except(ValueError,ZeroDivisionError):
hata ="Hata oluştu, harf veya 2. sayıyı '0' olarak girdiniz , lütfen kontrol ediniz"
print((len(hata)*"-"),"\n",hata,"\n",(len(hata)*"-"),sep="")
except:
hata ="Bir hata oluştu üzgünüm"
print((len(hata)*"-"),"\n",hata,"\n",(len(hata)*"-"),sep="")
#-------------------------------------------------------------------------------------
def çarpma():
try:
çarpma = open("çarpma.txt","r")
print("\nÇarpma işlemine hoşgeldiniz...\n",çarpma.read(),sep="")
sayı1 = int(input("1. sayıyı gir :"))
sayı2 = int(input("2. sayıyı gir :"))
print(("-"*50),"\n","\t",sayı1,"\n","\t",sayı2,"\n","*","\n",("-"*20),"\n\
","\t",(sayı1*sayı2),"\n",sep="",end="\n\n----FİNİŞ----\n\n")
çarpma.close()
except(ZeroDivisionError,ValueError):
hata ="Hata oluştu, harf veya 2. sayıyı '0' olarak girdiniz , lütfen kontrol ediniz"
print((len(hata)*"-"),"\n",hata,"\n",(len(hata)*"-"),sep="")
except:
hata ="Bir hata oluştu üzgünüm"
print((len(hata)*"-"),"\n",hata,"\n",(len(hata)*"-"),sep="")
def bölme():
try:
bölme = open("bölme.txt","r")
print("\nBölme işlemine hoşgeldiniz...\n",bölme.read(),sep="")
sayı1 = int(input("1. sayıyı gir :"))
sayı2 = int(input("2. sayıyı gir :"))
print(("-"*50),"\n","\t",sayı1,"\n","\t",sayı2,"\n","/","\n",("-"*20),"\n\
","\t",(sayı1/sayı2),"\n",sep="",end="\n\n----FİNİŞ----\n\n")
except(ZeroDivisionError,ValueError):
hata ="Hata oluştu, harf veya 2. sayıyı '0' olarak girdiniz , lütfen kontrol ediniz"
print((len(hata)*"-"),"\n",hata,"\n",(len(hata)*"-"),sep="")
except:
hata ="Bir hata oluştu üzgünüm"
print((len(hata)*"-"),"\n",hata,"\n",(len(hata)*"-"),sep="")
def ü_alan():
try:
sayı1=int(input("'a' köşesini gir :"))
sayı2=int(input("'b' köşesini gir :"))
print("Sonuç :",(sayı1*sayı2)/2)
except(ValueError,ZeroDivisionError):
hata ="Hata oluştu, harf veya 2. sayıyı '0' olarak girdiniz , lütfen kontrol ediniz"
print((len(hata)*"-"),"\n",hata,"\n",(len(hata)*"-"),sep="")
except:
hata ="Bir hata oluştu üzgünüm"
print((len(hata)*"-"),"\n",hata,"\n",(len(hata)*"-"),sep="")
#-------------------------------------------------------------------------------------
def hacim():
#HACİM FONKSİYONU
try:
print ("""Hacim formül = π.r.r.h \n(π=3,14 alırız, r taban yarıçapı, h yükseklik)""")
sayı_r =int(input("r taban yarıçapı :"))
sayı_h =int(input("h yükseklik :"))
sayı_π = 3.14
sonuç = (sayı_π*sayı_h*pow(sayı_r,2))
print("'r'taban yarıçap :",str(sayı_r),str(sayı_r),"'h' yükseklik :",sayı_h,sonuç,sep="")
except(ValueError,ZeroDivisionError):
hata ="Hata oluştu, harf veya 2. sayıyı '0' olarak girdiniz , lütfen kontrol ediniz"
print((len(hata)*"-"),"\n",hata,"\n",(len(hata)*"-"),sep="")
except:
hata ="Bir hata oluştu üzgünüm"
print((len(hata)*"-"),"\n",hata,"\n",(len(hata)*"-"),sep="")
#HACİM FONKSİYONU
def üs_al():
#KAREE AL FONKSİYONU
try:
print("Üs alma işlemine hoş geldiniz...")
sayı1 = int(input("Sayı gir :"))
print(sayı1," girilen sayısının üssü ",(sayı1*sayı1))
except(ValueError,ZeroDivisionError):
hata ="Hata oluştu, harf veya 2. sayıyı '0' olarak girdiniz , lütfen kontrol ediniz"
print((len(hata)*"-"),"\n",hata,"\n",(len(hata)*"-"),sep="")
except:
hata ="Bir hata oluştu üzgünüm"
print((len(hata)*"-"),"\n",hata,"\n",(len(hata)*"-"),sep="")
#KAREE AL FONKSİYONU
def yardım():
#YARDIM FONKSİYONU
print("Yardım menüsü çalıştı...")
yardım = input("Yardım alam istediğin işlemi gir :")
print("\n",help(yardım))
#YARDIM FONKSİYONU
#-------------------------------------------------------------------------------------
while True:
kimlik()
kayıtlı_kullanıcı()
girdi = input ("Yapmak istediğiniz işlemi giriniz :")
# İf bloğu ile girdi değerlendirmesi yapılacak
if girdi == "+":
toplama()
elif girdi == "-":
çıkartma()
elif girdi == "*":
çarpma()
elif girdi == "/":
bölme()
elif girdi == "a":
ü_alan()
elif girdi == "h":
hacim()
elif girdi == "k":
üs_al()
elif girdi == "y":
yardım()
else:
print("\nProgramdan Tamamen Çıkıldı...")
exit
|
gpl-3.0
|
trivoldus28/pulsarch-verilog
|
tools/local/bas-release/bas,3.9/lib/python/lib/python2.3/email/MIMEMessage.py
|
11
|
1244
|
# Copyright (C) 2001,2002 Python Software Foundation
# Author: [email protected] (Barry Warsaw)
"""Class representing message/* MIME documents.
"""
from email import Message
from email.MIMENonMultipart import MIMENonMultipart
class MIMEMessage(MIMENonMultipart):
"""Class representing message/* MIME documents."""
def __init__(self, _msg, _subtype='rfc822'):
"""Create a message/* type MIME document.
_msg is a message object and must be an instance of Message, or a
derived class of Message, otherwise a TypeError is raised.
Optional _subtype defines the subtype of the contained message. The
default is "rfc822" (this is defined by the MIME standard, even though
the term "rfc822" is technically outdated by RFC 2822).
"""
MIMENonMultipart.__init__(self, 'message', _subtype)
if not isinstance(_msg, Message.Message):
raise TypeError, 'Argument is not an instance of Message'
# It's convenient to use this base class method. We need to do it
# this way or we'll get an exception
Message.Message.attach(self, _msg)
# And be sure our default type is set correctly
self.set_default_type('message/rfc822')
|
gpl-2.0
|
SnappleCap/oh-mainline
|
vendor/packages/scrapy/scrapy/tests/test_selector_libxml2.py
|
17
|
3028
|
"""
Selectors tests, specific for libxml2 backend
"""
import unittest
from scrapy.http import TextResponse, HtmlResponse, XmlResponse
from scrapy.selector.libxml2sel import XmlXPathSelector, HtmlXPathSelector, \
XPathSelector
from scrapy.selector.document import Libxml2Document
from scrapy.utils.test import libxml2debug
from scrapy.tests import test_selector
class Libxml2XPathSelectorTestCase(test_selector.XPathSelectorTestCase):
xs_cls = XPathSelector
hxs_cls = HtmlXPathSelector
xxs_cls = XmlXPathSelector
@libxml2debug
def test_null_bytes(self):
hxs = HtmlXPathSelector(text='<root>la\x00la</root>')
self.assertEqual(hxs.extract(),
u'<html><body><root>lala</root></body></html>')
xxs = XmlXPathSelector(text='<root>la\x00la</root>')
self.assertEqual(xxs.extract(),
u'<root>lala</root>')
@libxml2debug
def test_unquote(self):
xmldoc = '\n'.join((
'<root>',
' lala',
' <node>',
' blabla&more<!--comment-->a<b>test</b>oh',
' <![CDATA[lalalal&ppppp<b>PPPP</b>ppp&la]]>',
' </node>',
' pff',
'</root>'))
xxs = XmlXPathSelector(text=xmldoc)
self.assertEqual(xxs.extract_unquoted(), u'')
self.assertEqual(xxs.select('/root').extract_unquoted(), [u''])
self.assertEqual(xxs.select('/root/text()').extract_unquoted(), [
u'\n lala\n ',
u'\n pff\n'])
self.assertEqual(xxs.select('//*').extract_unquoted(), [u'', u'', u''])
self.assertEqual(xxs.select('//text()').extract_unquoted(), [
u'\n lala\n ',
u'\n blabla&more',
u'a',
u'test',
u'oh\n ',
u'lalalal&ppppp<b>PPPP</b>ppp&la',
u'\n ',
u'\n pff\n'])
class Libxml2DocumentTest(unittest.TestCase):
@libxml2debug
def test_response_libxml2_caching(self):
r1 = HtmlResponse('http://www.example.com', body='<html><head></head><body></body></html>')
r2 = r1.copy()
doc1 = Libxml2Document(r1)
doc2 = Libxml2Document(r1)
doc3 = Libxml2Document(r2)
# make sure it's cached
assert doc1 is doc2
assert doc1.xmlDoc is doc2.xmlDoc
assert doc1 is not doc3
assert doc1.xmlDoc is not doc3.xmlDoc
# don't leave libxml2 documents in memory to avoid wrong libxml2 leaks reports
del doc1, doc2, doc3
@libxml2debug
def test_null_char(self):
# make sure bodies with null char ('\x00') don't raise a TypeError exception
self.body_content = 'test problematic \x00 body'
response = TextResponse('http://example.com/catalog/product/blabla-123',
headers={'Content-Type': 'text/plain; charset=utf-8'}, body=self.body_content)
Libxml2Document(response)
if __name__ == "__main__":
unittest.main()
|
agpl-3.0
|
Yen-Chung-En/w16b_test
|
static/Brython3.1.1-20150328-091302/Lib/importlib/_bootstrap.py
|
623
|
63710
|
"""Core implementation of import.
This module is NOT meant to be directly imported! It has been designed such
that it can be bootstrapped into Python as the implementation of import. As
such it requires the injection of specific modules and attributes in order to
work. One should use importlib as the public-facing version of this module.
"""
#
# IMPORTANT: Whenever making changes to this module, be sure to run
# a top-level make in order to get the frozen version of the module
# update. Not doing so, will result in the Makefile to fail for
# all others who don't have a ./python around to freeze the module
# in the early stages of compilation.
#
# See importlib._setup() for what is injected into the global namespace.
# When editing this code be aware that code executed at import time CANNOT
# reference any injected objects! This includes not only global code but also
# anything specified at the class level.
# XXX Make sure all public names have no single leading underscore and all
# others do.
# Bootstrap-related code ######################################################
_CASE_INSENSITIVE_PLATFORMS = 'win', 'cygwin', 'darwin'
def _make_relax_case():
if sys.platform.startswith(_CASE_INSENSITIVE_PLATFORMS):
def _relax_case():
"""True if filenames must be checked case-insensitively."""
return b'PYTHONCASEOK' in _os.environ
else:
def _relax_case():
"""True if filenames must be checked case-insensitively."""
return False
return _relax_case
# TODO: Expose from marshal
def _w_long(x):
"""Convert a 32-bit integer to little-endian.
XXX Temporary until marshal's long functions are exposed.
"""
x = int(x)
int_bytes = []
int_bytes.append(x & 0xFF)
int_bytes.append((x >> 8) & 0xFF)
int_bytes.append((x >> 16) & 0xFF)
int_bytes.append((x >> 24) & 0xFF)
return bytearray(int_bytes)
# TODO: Expose from marshal
def _r_long(int_bytes):
"""Convert 4 bytes in little-endian to an integer.
XXX Temporary until marshal's long function are exposed.
"""
x = int_bytes[0]
x |= int_bytes[1] << 8
x |= int_bytes[2] << 16
x |= int_bytes[3] << 24
return x
def _path_join(*path_parts):
"""Replacement for os.path.join()."""
new_parts = []
for part in path_parts:
if not part:
continue
new_parts.append(part)
if part[-1] not in path_separators:
new_parts.append(path_sep)
return ''.join(new_parts[:-1]) # Drop superfluous path separator.
def _path_split(path):
"""Replacement for os.path.split()."""
for x in reversed(path):
if x in path_separators:
sep = x
break
else:
sep = path_sep
front, _, tail = path.rpartition(sep)
return front, tail
def _path_is_mode_type(path, mode):
"""Test whether the path is the specified mode type."""
try:
stat_info = _os.stat(path)
except OSError:
return False
return (stat_info.st_mode & 0o170000) == mode
# XXX Could also expose Modules/getpath.c:isfile()
def _path_isfile(path):
"""Replacement for os.path.isfile."""
return _path_is_mode_type(path, 0o100000)
# XXX Could also expose Modules/getpath.c:isdir()
def _path_isdir(path):
"""Replacement for os.path.isdir."""
if not path:
path = _os.getcwd()
return _path_is_mode_type(path, 0o040000)
def _write_atomic(path, data, mode=0o666):
"""Best-effort function to write data to a path atomically.
Be prepared to handle a FileExistsError if concurrent writing of the
temporary file is attempted."""
# id() is used to generate a pseudo-random filename.
path_tmp = '{}.{}'.format(path, id(path))
fd = _os.open(path_tmp,
_os.O_EXCL | _os.O_CREAT | _os.O_WRONLY, mode & 0o666)
try:
# We first write data to a temporary file, and then use os.replace() to
# perform an atomic rename.
with _io.FileIO(fd, 'wb') as file:
file.write(data)
_os.replace(path_tmp, path)
except OSError:
try:
_os.unlink(path_tmp)
except OSError:
pass
raise
def _wrap(new, old):
"""Simple substitute for functools.update_wrapper."""
for replace in ['__module__', '__name__', '__qualname__', '__doc__']:
if hasattr(old, replace):
setattr(new, replace, getattr(old, replace))
new.__dict__.update(old.__dict__)
_code_type = type(_wrap.__code__)
def new_module(name):
"""Create a new module.
The module is not entered into sys.modules.
"""
return type(_io)(name)
# Module-level locking ########################################################
# A dict mapping module names to weakrefs of _ModuleLock instances
_module_locks = {}
# A dict mapping thread ids to _ModuleLock instances
_blocking_on = {}
class _DeadlockError(RuntimeError):
pass
class _ModuleLock:
"""A recursive lock implementation which is able to detect deadlocks
(e.g. thread 1 trying to take locks A then B, and thread 2 trying to
take locks B then A).
"""
def __init__(self, name):
self.lock = _thread.allocate_lock()
self.wakeup = _thread.allocate_lock()
self.name = name
self.owner = None
self.count = 0
self.waiters = 0
def has_deadlock(self):
# Deadlock avoidance for concurrent circular imports.
me = _thread.get_ident()
tid = self.owner
while True:
lock = _blocking_on.get(tid)
if lock is None:
return False
tid = lock.owner
if tid == me:
return True
def acquire(self):
"""
Acquire the module lock. If a potential deadlock is detected,
a _DeadlockError is raised.
Otherwise, the lock is always acquired and True is returned.
"""
tid = _thread.get_ident()
_blocking_on[tid] = self
try:
while True:
with self.lock:
if self.count == 0 or self.owner == tid:
self.owner = tid
self.count += 1
return True
if self.has_deadlock():
raise _DeadlockError("deadlock detected by %r" % self)
if self.wakeup.acquire(False):
self.waiters += 1
# Wait for a release() call
self.wakeup.acquire()
self.wakeup.release()
finally:
del _blocking_on[tid]
def release(self):
tid = _thread.get_ident()
with self.lock:
if self.owner != tid:
raise RuntimeError("cannot release un-acquired lock")
assert self.count > 0
self.count -= 1
if self.count == 0:
self.owner = None
if self.waiters:
self.waiters -= 1
self.wakeup.release()
def __repr__(self):
return "_ModuleLock(%r) at %d" % (self.name, id(self))
class _DummyModuleLock:
"""A simple _ModuleLock equivalent for Python builds without
multi-threading support."""
def __init__(self, name):
self.name = name
self.count = 0
def acquire(self):
self.count += 1
return True
def release(self):
if self.count == 0:
raise RuntimeError("cannot release un-acquired lock")
self.count -= 1
def __repr__(self):
return "_DummyModuleLock(%r) at %d" % (self.name, id(self))
# The following two functions are for consumption by Python/import.c.
def _get_module_lock(name):
"""Get or create the module lock for a given module name.
Should only be called with the import lock taken."""
lock = None
try:
lock = _module_locks[name]()
except KeyError:
pass
if lock is None:
if _thread is None:
lock = _DummyModuleLock(name)
else:
lock = _ModuleLock(name)
def cb(_):
del _module_locks[name]
_module_locks[name] = _weakref.ref(lock, cb)
return lock
def _lock_unlock_module(name):
"""Release the global import lock, and acquires then release the
module lock for a given module name.
This is used to ensure a module is completely initialized, in the
event it is being imported by another thread.
Should only be called with the import lock taken."""
lock = _get_module_lock(name)
_imp.release_lock()
try:
lock.acquire()
except _DeadlockError:
# Concurrent circular import, we'll accept a partially initialized
# module object.
pass
else:
lock.release()
# Frame stripping magic ###############################################
def _call_with_frames_removed(f, *args, **kwds):
"""remove_importlib_frames in import.c will always remove sequences
of importlib frames that end with a call to this function
Use it instead of a normal call in places where including the importlib
frames introduces unwanted noise into the traceback (e.g. when executing
module code)
"""
return f(*args, **kwds)
# Finder/loader utility code ###############################################
"""Magic word to reject .pyc files generated by other Python versions.
It should change for each incompatible change to the bytecode.
The value of CR and LF is incorporated so if you ever read or write
a .pyc file in text mode the magic number will be wrong; also, the
Apple MPW compiler swaps their values, botching string constants.
The magic numbers must be spaced apart at least 2 values, as the
-U interpeter flag will cause MAGIC+1 being used. They have been
odd numbers for some time now.
There were a variety of old schemes for setting the magic number.
The current working scheme is to increment the previous value by
10.
Starting with the adoption of PEP 3147 in Python 3.2, every bump in magic
number also includes a new "magic tag", i.e. a human readable string used
to represent the magic number in __pycache__ directories. When you change
the magic number, you must also set a new unique magic tag. Generally this
can be named after the Python major version of the magic number bump, but
it can really be anything, as long as it's different than anything else
that's come before. The tags are included in the following table, starting
with Python 3.2a0.
Known values:
Python 1.5: 20121
Python 1.5.1: 20121
Python 1.5.2: 20121
Python 1.6: 50428
Python 2.0: 50823
Python 2.0.1: 50823
Python 2.1: 60202
Python 2.1.1: 60202
Python 2.1.2: 60202
Python 2.2: 60717
Python 2.3a0: 62011
Python 2.3a0: 62021
Python 2.3a0: 62011 (!)
Python 2.4a0: 62041
Python 2.4a3: 62051
Python 2.4b1: 62061
Python 2.5a0: 62071
Python 2.5a0: 62081 (ast-branch)
Python 2.5a0: 62091 (with)
Python 2.5a0: 62092 (changed WITH_CLEANUP opcode)
Python 2.5b3: 62101 (fix wrong code: for x, in ...)
Python 2.5b3: 62111 (fix wrong code: x += yield)
Python 2.5c1: 62121 (fix wrong lnotab with for loops and
storing constants that should have been removed)
Python 2.5c2: 62131 (fix wrong code: for x, in ... in listcomp/genexp)
Python 2.6a0: 62151 (peephole optimizations and STORE_MAP opcode)
Python 2.6a1: 62161 (WITH_CLEANUP optimization)
Python 3000: 3000
3010 (removed UNARY_CONVERT)
3020 (added BUILD_SET)
3030 (added keyword-only parameters)
3040 (added signature annotations)
3050 (print becomes a function)
3060 (PEP 3115 metaclass syntax)
3061 (string literals become unicode)
3071 (PEP 3109 raise changes)
3081 (PEP 3137 make __file__ and __name__ unicode)
3091 (kill str8 interning)
3101 (merge from 2.6a0, see 62151)
3103 (__file__ points to source file)
Python 3.0a4: 3111 (WITH_CLEANUP optimization).
Python 3.0a5: 3131 (lexical exception stacking, including POP_EXCEPT)
Python 3.1a0: 3141 (optimize list, set and dict comprehensions:
change LIST_APPEND and SET_ADD, add MAP_ADD)
Python 3.1a0: 3151 (optimize conditional branches:
introduce POP_JUMP_IF_FALSE and POP_JUMP_IF_TRUE)
Python 3.2a0: 3160 (add SETUP_WITH)
tag: cpython-32
Python 3.2a1: 3170 (add DUP_TOP_TWO, remove DUP_TOPX and ROT_FOUR)
tag: cpython-32
Python 3.2a2 3180 (add DELETE_DEREF)
Python 3.3a0 3190 __class__ super closure changed
Python 3.3a0 3200 (__qualname__ added)
3210 (added size modulo 2**32 to the pyc header)
Python 3.3a1 3220 (changed PEP 380 implementation)
Python 3.3a4 3230 (revert changes to implicit __class__ closure)
MAGIC must change whenever the bytecode emitted by the compiler may no
longer be understood by older implementations of the eval loop (usually
due to the addition of new opcodes).
"""
_RAW_MAGIC_NUMBER = 3230 | ord('\r') << 16 | ord('\n') << 24
_MAGIC_BYTES = bytes(_RAW_MAGIC_NUMBER >> n & 0xff for n in range(0, 25, 8))
_PYCACHE = '__pycache__'
SOURCE_SUFFIXES = ['.py'] # _setup() adds .pyw as needed.
DEBUG_BYTECODE_SUFFIXES = ['.pyc']
OPTIMIZED_BYTECODE_SUFFIXES = ['.pyo']
def cache_from_source(path, debug_override=None):
"""Given the path to a .py file, return the path to its .pyc/.pyo file.
The .py file does not need to exist; this simply returns the path to the
.pyc/.pyo file calculated as if the .py file were imported. The extension
will be .pyc unless sys.flags.optimize is non-zero, then it will be .pyo.
If debug_override is not None, then it must be a boolean and is used in
place of sys.flags.optimize.
If sys.implementation.cache_tag is None then NotImplementedError is raised.
"""
debug = not sys.flags.optimize if debug_override is None else debug_override
if debug:
suffixes = DEBUG_BYTECODE_SUFFIXES
else:
suffixes = OPTIMIZED_BYTECODE_SUFFIXES
head, tail = _path_split(path)
base_filename, sep, _ = tail.partition('.')
tag = sys.implementation.cache_tag
if tag is None:
raise NotImplementedError('sys.implementation.cache_tag is None')
filename = ''.join([base_filename, sep, tag, suffixes[0]])
return _path_join(head, _PYCACHE, filename)
def source_from_cache(path):
"""Given the path to a .pyc./.pyo file, return the path to its .py file.
The .pyc/.pyo file does not need to exist; this simply returns the path to
the .py file calculated to correspond to the .pyc/.pyo file. If path does
not conform to PEP 3147 format, ValueError will be raised. If
sys.implementation.cache_tag is None then NotImplementedError is raised.
"""
if sys.implementation.cache_tag is None:
raise NotImplementedError('sys.implementation.cache_tag is None')
head, pycache_filename = _path_split(path)
head, pycache = _path_split(head)
if pycache != _PYCACHE:
raise ValueError('{} not bottom-level directory in '
'{!r}'.format(_PYCACHE, path))
if pycache_filename.count('.') != 2:
raise ValueError('expected only 2 dots in '
'{!r}'.format(pycache_filename))
base_filename = pycache_filename.partition('.')[0]
return _path_join(head, base_filename + SOURCE_SUFFIXES[0])
def _get_sourcefile(bytecode_path):
"""Convert a bytecode file path to a source path (if possible).
This function exists purely for backwards-compatibility for
PyImport_ExecCodeModuleWithFilenames() in the C API.
"""
if len(bytecode_path) == 0:
return None
rest, _, extension = bytecode_path.rpartition('.')
if not rest or extension.lower()[-3:-1] != 'py':
return bytecode_path
try:
source_path = source_from_cache(bytecode_path)
except (NotImplementedError, ValueError):
source_path = bytecode_path[:-1]
return source_path if _path_isfile(source_path) else bytecode_path
def _verbose_message(message, *args, verbosity=1):
"""Print the message to stderr if -v/PYTHONVERBOSE is turned on."""
if sys.flags.verbose >= verbosity:
if not message.startswith(('#', 'import ')):
message = '# ' + message
print(message.format(*args), file=sys.stderr)
def set_package(fxn):
"""Set __package__ on the returned module."""
def set_package_wrapper(*args, **kwargs):
module = fxn(*args, **kwargs)
if getattr(module, '__package__', None) is None:
module.__package__ = module.__name__
if not hasattr(module, '__path__'):
module.__package__ = module.__package__.rpartition('.')[0]
return module
_wrap(set_package_wrapper, fxn)
return set_package_wrapper
def set_loader(fxn):
"""Set __loader__ on the returned module."""
def set_loader_wrapper(self, *args, **kwargs):
module = fxn(self, *args, **kwargs)
if not hasattr(module, '__loader__'):
module.__loader__ = self
return module
_wrap(set_loader_wrapper, fxn)
return set_loader_wrapper
def module_for_loader(fxn):
"""Decorator to handle selecting the proper module for loaders.
The decorated function is passed the module to use instead of the module
name. The module passed in to the function is either from sys.modules if
it already exists or is a new module. If the module is new, then __name__
is set the first argument to the method, __loader__ is set to self, and
__package__ is set accordingly (if self.is_package() is defined) will be set
before it is passed to the decorated function (if self.is_package() does
not work for the module it will be set post-load).
If an exception is raised and the decorator created the module it is
subsequently removed from sys.modules.
The decorator assumes that the decorated function takes the module name as
the second argument.
"""
def module_for_loader_wrapper(self, fullname, *args, **kwargs):
module = sys.modules.get(fullname)
is_reload = module is not None
if not is_reload:
# This must be done before open() is called as the 'io' module
# implicitly imports 'locale' and would otherwise trigger an
# infinite loop.
module = new_module(fullname)
# This must be done before putting the module in sys.modules
# (otherwise an optimization shortcut in import.c becomes wrong)
module.__initializing__ = True
sys.modules[fullname] = module
module.__loader__ = self
try:
is_package = self.is_package(fullname)
except (ImportError, AttributeError):
pass
else:
if is_package:
module.__package__ = fullname
else:
module.__package__ = fullname.rpartition('.')[0]
else:
module.__initializing__ = True
try:
# If __package__ was not set above, __import__() will do it later.
return fxn(self, module, *args, **kwargs)
except:
if not is_reload:
del sys.modules[fullname]
raise
finally:
module.__initializing__ = False
_wrap(module_for_loader_wrapper, fxn)
return module_for_loader_wrapper
def _check_name(method):
"""Decorator to verify that the module being requested matches the one the
loader can handle.
The first argument (self) must define _name which the second argument is
compared against. If the comparison fails then ImportError is raised.
"""
def _check_name_wrapper(self, name=None, *args, **kwargs):
if name is None:
name = self.name
elif self.name != name:
raise ImportError("loader cannot handle %s" % name, name=name)
return method(self, name, *args, **kwargs)
_wrap(_check_name_wrapper, method)
return _check_name_wrapper
def _requires_builtin(fxn):
"""Decorator to verify the named module is built-in."""
def _requires_builtin_wrapper(self, fullname):
if fullname not in sys.builtin_module_names:
raise ImportError("{} is not a built-in module".format(fullname),
name=fullname)
return fxn(self, fullname)
_wrap(_requires_builtin_wrapper, fxn)
return _requires_builtin_wrapper
def _requires_frozen(fxn):
"""Decorator to verify the named module is frozen."""
def _requires_frozen_wrapper(self, fullname):
if not _imp.is_frozen(fullname):
raise ImportError("{} is not a frozen module".format(fullname),
name=fullname)
return fxn(self, fullname)
_wrap(_requires_frozen_wrapper, fxn)
return _requires_frozen_wrapper
def _find_module_shim(self, fullname):
"""Try to find a loader for the specified module by delegating to
self.find_loader()."""
# Call find_loader(). If it returns a string (indicating this
# is a namespace package portion), generate a warning and
# return None.
loader, portions = self.find_loader(fullname)
if loader is None and len(portions):
msg = "Not importing directory {}: missing __init__"
_warnings.warn(msg.format(portions[0]), ImportWarning)
return loader
# Loaders #####################################################################
class BuiltinImporter:
"""Meta path import for built-in modules.
All methods are either class or static methods to avoid the need to
instantiate the class.
"""
@classmethod
def module_repr(cls, module):
return "<module '{}' (built-in)>".format(module.__name__)
@classmethod
def find_module(cls, fullname, path=None):
"""Find the built-in module.
If 'path' is ever specified then the search is considered a failure.
"""
if path is not None:
return None
return cls if _imp.is_builtin(fullname) else None
@classmethod
@set_package
@set_loader
@_requires_builtin
def load_module(cls, fullname):
"""Load a built-in module."""
is_reload = fullname in sys.modules
try:
return _call_with_frames_removed(_imp.init_builtin, fullname)
except:
if not is_reload and fullname in sys.modules:
del sys.modules[fullname]
raise
@classmethod
@_requires_builtin
def get_code(cls, fullname):
"""Return None as built-in modules do not have code objects."""
return None
@classmethod
@_requires_builtin
def get_source(cls, fullname):
"""Return None as built-in modules do not have source code."""
return None
@classmethod
@_requires_builtin
def is_package(cls, fullname):
"""Return False as built-in modules are never packages."""
return False
class FrozenImporter:
"""Meta path import for frozen modules.
All methods are either class or static methods to avoid the need to
instantiate the class.
"""
@classmethod
def module_repr(cls, m):
return "<module '{}' (frozen)>".format(m.__name__)
@classmethod
def find_module(cls, fullname, path=None):
"""Find a frozen module."""
return cls if _imp.is_frozen(fullname) else None
@classmethod
@set_package
@set_loader
@_requires_frozen
def load_module(cls, fullname):
"""Load a frozen module."""
is_reload = fullname in sys.modules
try:
m = _call_with_frames_removed(_imp.init_frozen, fullname)
# Let our own module_repr() method produce a suitable repr.
del m.__file__
return m
except:
if not is_reload and fullname in sys.modules:
del sys.modules[fullname]
raise
@classmethod
@_requires_frozen
def get_code(cls, fullname):
"""Return the code object for the frozen module."""
return _imp.get_frozen_object(fullname)
@classmethod
@_requires_frozen
def get_source(cls, fullname):
"""Return None as frozen modules do not have source code."""
return None
@classmethod
@_requires_frozen
def is_package(cls, fullname):
"""Return True if the frozen module is a package."""
return _imp.is_frozen_package(fullname)
class WindowsRegistryFinder:
"""Meta path finder for modules declared in the Windows registry.
"""
REGISTRY_KEY = (
"Software\\Python\\PythonCore\\{sys_version}"
"\\Modules\\{fullname}")
REGISTRY_KEY_DEBUG = (
"Software\\Python\\PythonCore\\{sys_version}"
"\\Modules\\{fullname}\\Debug")
DEBUG_BUILD = False # Changed in _setup()
@classmethod
def _open_registry(cls, key):
try:
return _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, key)
except WindowsError:
return _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, key)
@classmethod
def _search_registry(cls, fullname):
if cls.DEBUG_BUILD:
registry_key = cls.REGISTRY_KEY_DEBUG
else:
registry_key = cls.REGISTRY_KEY
key = registry_key.format(fullname=fullname,
sys_version=sys.version[:3])
try:
with cls._open_registry(key) as hkey:
filepath = _winreg.QueryValue(hkey, "")
except WindowsError:
return None
return filepath
@classmethod
def find_module(cls, fullname, path=None):
"""Find module named in the registry."""
filepath = cls._search_registry(fullname)
if filepath is None:
return None
try:
_os.stat(filepath)
except OSError:
return None
for loader, suffixes in _get_supported_file_loaders():
if filepath.endswith(tuple(suffixes)):
return loader(fullname, filepath)
class _LoaderBasics:
"""Base class of common code needed by both SourceLoader and
SourcelessFileLoader."""
def is_package(self, fullname):
"""Concrete implementation of InspectLoader.is_package by checking if
the path returned by get_filename has a filename of '__init__.py'."""
filename = _path_split(self.get_filename(fullname))[1]
filename_base = filename.rsplit('.', 1)[0]
tail_name = fullname.rpartition('.')[2]
return filename_base == '__init__' and tail_name != '__init__'
def _bytes_from_bytecode(self, fullname, data, bytecode_path, source_stats):
"""Return the marshalled bytes from bytecode, verifying the magic
number, timestamp and source size along the way.
If source_stats is None then skip the timestamp check.
"""
magic = data[:4]
raw_timestamp = data[4:8]
raw_size = data[8:12]
if magic != _MAGIC_BYTES:
msg = 'bad magic number in {!r}: {!r}'.format(fullname, magic)
_verbose_message(msg)
raise ImportError(msg, name=fullname, path=bytecode_path)
elif len(raw_timestamp) != 4:
message = 'bad timestamp in {}'.format(fullname)
_verbose_message(message)
raise EOFError(message)
elif len(raw_size) != 4:
message = 'bad size in {}'.format(fullname)
_verbose_message(message)
raise EOFError(message)
if source_stats is not None:
try:
source_mtime = int(source_stats['mtime'])
except KeyError:
pass
else:
if _r_long(raw_timestamp) != source_mtime:
message = 'bytecode is stale for {}'.format(fullname)
_verbose_message(message)
raise ImportError(message, name=fullname,
path=bytecode_path)
try:
source_size = source_stats['size'] & 0xFFFFFFFF
except KeyError:
pass
else:
if _r_long(raw_size) != source_size:
raise ImportError(
"bytecode is stale for {}".format(fullname),
name=fullname, path=bytecode_path)
# Can't return the code object as errors from marshal loading need to
# propagate even when source is available.
return data[12:]
@module_for_loader
def _load_module(self, module, *, sourceless=False):
"""Helper for load_module able to handle either source or sourceless
loading."""
name = module.__name__
code_object = self.get_code(name)
module.__file__ = self.get_filename(name)
if not sourceless:
try:
module.__cached__ = cache_from_source(module.__file__)
except NotImplementedError:
module.__cached__ = module.__file__
else:
module.__cached__ = module.__file__
module.__package__ = name
if self.is_package(name):
module.__path__ = [_path_split(module.__file__)[0]]
else:
module.__package__ = module.__package__.rpartition('.')[0]
module.__loader__ = self
_call_with_frames_removed(exec, code_object, module.__dict__)
return module
class SourceLoader(_LoaderBasics):
def path_mtime(self, path):
"""Optional method that returns the modification time (an int) for the
specified path, where path is a str.
"""
raise NotImplementedError
def path_stats(self, path):
"""Optional method returning a metadata dict for the specified path
to by the path (str).
Possible keys:
- 'mtime' (mandatory) is the numeric timestamp of last source
code modification;
- 'size' (optional) is the size in bytes of the source code.
Implementing this method allows the loader to read bytecode files.
"""
return {'mtime': self.path_mtime(path)}
def _cache_bytecode(self, source_path, cache_path, data):
"""Optional method which writes data (bytes) to a file path (a str).
Implementing this method allows for the writing of bytecode files.
The source path is needed in order to correctly transfer permissions
"""
# For backwards compatibility, we delegate to set_data()
return self.set_data(cache_path, data)
def set_data(self, path, data):
"""Optional method which writes data (bytes) to a file path (a str).
Implementing this method allows for the writing of bytecode files.
"""
raise NotImplementedError
def get_source(self, fullname):
"""Concrete implementation of InspectLoader.get_source."""
import tokenize
path = self.get_filename(fullname)
try:
source_bytes = self.get_data(path)
except IOError as exc:
raise ImportError("source not available through get_data()",
name=fullname) from exc
readsource = _io.BytesIO(source_bytes).readline
try:
encoding = tokenize.detect_encoding(readsource)
except SyntaxError as exc:
raise ImportError("Failed to detect encoding",
name=fullname) from exc
newline_decoder = _io.IncrementalNewlineDecoder(None, True)
try:
return newline_decoder.decode(source_bytes.decode(encoding[0]))
except UnicodeDecodeError as exc:
raise ImportError("Failed to decode source file",
name=fullname) from exc
def get_code(self, fullname):
"""Concrete implementation of InspectLoader.get_code.
Reading of bytecode requires path_stats to be implemented. To write
bytecode, set_data must also be implemented.
"""
source_path = self.get_filename(fullname)
source_mtime = None
try:
bytecode_path = cache_from_source(source_path)
except NotImplementedError:
bytecode_path = None
else:
try:
st = self.path_stats(source_path)
except NotImplementedError:
pass
else:
source_mtime = int(st['mtime'])
try:
data = self.get_data(bytecode_path)
except IOError:
pass
else:
try:
bytes_data = self._bytes_from_bytecode(fullname, data,
bytecode_path,
st)
except (ImportError, EOFError):
pass
else:
_verbose_message('{} matches {}', bytecode_path,
source_path)
found = marshal.loads(bytes_data)
if isinstance(found, _code_type):
_imp._fix_co_filename(found, source_path)
_verbose_message('code object from {}',
bytecode_path)
return found
else:
msg = "Non-code object in {}"
raise ImportError(msg.format(bytecode_path),
name=fullname, path=bytecode_path)
source_bytes = self.get_data(source_path)
code_object = _call_with_frames_removed(compile,
source_bytes, source_path, 'exec',
dont_inherit=True)
_verbose_message('code object from {}', source_path)
if (not sys.dont_write_bytecode and bytecode_path is not None and
source_mtime is not None):
data = bytearray(_MAGIC_BYTES)
data.extend(_w_long(source_mtime))
data.extend(_w_long(len(source_bytes)))
data.extend(marshal.dumps(code_object))
try:
self._cache_bytecode(source_path, bytecode_path, data)
_verbose_message('wrote {!r}', bytecode_path)
except NotImplementedError:
pass
return code_object
def load_module(self, fullname):
"""Concrete implementation of Loader.load_module.
Requires ExecutionLoader.get_filename and ResourceLoader.get_data to be
implemented to load source code. Use of bytecode is dictated by whether
get_code uses/writes bytecode.
"""
return self._load_module(fullname)
class FileLoader:
"""Base file loader class which implements the loader protocol methods that
require file system usage."""
def __init__(self, fullname, path):
"""Cache the module name and the path to the file found by the
finder."""
self.name = fullname
self.path = path
@_check_name
def load_module(self, fullname):
"""Load a module from a file."""
# Issue #14857: Avoid the zero-argument form so the implementation
# of that form can be updated without breaking the frozen module
return super(FileLoader, self).load_module(fullname)
@_check_name
def get_filename(self, fullname):
"""Return the path to the source file as found by the finder."""
return self.path
def get_data(self, path):
"""Return the data from path as raw bytes."""
with _io.FileIO(path, 'r') as file:
return file.read()
class SourceFileLoader(FileLoader, SourceLoader):
"""Concrete implementation of SourceLoader using the file system."""
def path_stats(self, path):
"""Return the metadata for the path."""
st = _os.stat(path)
return {'mtime': st.st_mtime, 'size': st.st_size}
def _cache_bytecode(self, source_path, bytecode_path, data):
# Adapt between the two APIs
try:
mode = _os.stat(source_path).st_mode
except OSError:
mode = 0o666
# We always ensure write access so we can update cached files
# later even when the source files are read-only on Windows (#6074)
mode |= 0o200
return self.set_data(bytecode_path, data, _mode=mode)
def set_data(self, path, data, *, _mode=0o666):
"""Write bytes data to a file."""
parent, filename = _path_split(path)
path_parts = []
# Figure out what directories are missing.
while parent and not _path_isdir(parent):
parent, part = _path_split(parent)
path_parts.append(part)
# Create needed directories.
for part in reversed(path_parts):
parent = _path_join(parent, part)
try:
_os.mkdir(parent)
except FileExistsError:
# Probably another Python process already created the dir.
continue
except OSError as exc:
# Could be a permission error, read-only filesystem: just forget
# about writing the data.
_verbose_message('could not create {!r}: {!r}', parent, exc)
return
try:
_write_atomic(path, data, _mode)
_verbose_message('created {!r}', path)
except OSError as exc:
# Same as above: just don't write the bytecode.
_verbose_message('could not create {!r}: {!r}', path, exc)
class SourcelessFileLoader(FileLoader, _LoaderBasics):
"""Loader which handles sourceless file imports."""
def load_module(self, fullname):
return self._load_module(fullname, sourceless=True)
def get_code(self, fullname):
path = self.get_filename(fullname)
data = self.get_data(path)
bytes_data = self._bytes_from_bytecode(fullname, data, path, None)
found = marshal.loads(bytes_data)
if isinstance(found, _code_type):
_verbose_message('code object from {!r}', path)
return found
else:
raise ImportError("Non-code object in {}".format(path),
name=fullname, path=path)
def get_source(self, fullname):
"""Return None as there is no source code."""
return None
# Filled in by _setup().
EXTENSION_SUFFIXES = []
class ExtensionFileLoader:
"""Loader for extension modules.
The constructor is designed to work with FileFinder.
"""
def __init__(self, name, path):
self.name = name
self.path = path
@_check_name
@set_package
@set_loader
def load_module(self, fullname):
"""Load an extension module."""
is_reload = fullname in sys.modules
try:
module = _call_with_frames_removed(_imp.load_dynamic,
fullname, self.path)
_verbose_message('extension module loaded from {!r}', self.path)
if self.is_package(fullname) and not hasattr(module, '__path__'):
module.__path__ = [_path_split(self.path)[0]]
return module
except:
if not is_reload and fullname in sys.modules:
del sys.modules[fullname]
raise
def is_package(self, fullname):
"""Return True if the extension module is a package."""
file_name = _path_split(self.path)[1]
return any(file_name == '__init__' + suffix
for suffix in EXTENSION_SUFFIXES)
def get_code(self, fullname):
"""Return None as an extension module cannot create a code object."""
return None
def get_source(self, fullname):
"""Return None as extension modules have no source code."""
return None
class _NamespacePath:
"""Represents a namespace package's path. It uses the module name
to find its parent module, and from there it looks up the parent's
__path__. When this changes, the module's own path is recomputed,
using path_finder. For top-level modules, the parent module's path
is sys.path."""
def __init__(self, name, path, path_finder):
self._name = name
self._path = path
self._last_parent_path = tuple(self._get_parent_path())
self._path_finder = path_finder
def _find_parent_path_names(self):
"""Returns a tuple of (parent-module-name, parent-path-attr-name)"""
parent, dot, me = self._name.rpartition('.')
if dot == '':
# This is a top-level module. sys.path contains the parent path.
return 'sys', 'path'
# Not a top-level module. parent-module.__path__ contains the
# parent path.
return parent, '__path__'
def _get_parent_path(self):
parent_module_name, path_attr_name = self._find_parent_path_names()
return getattr(sys.modules[parent_module_name], path_attr_name)
def _recalculate(self):
# If the parent's path has changed, recalculate _path
parent_path = tuple(self._get_parent_path()) # Make a copy
if parent_path != self._last_parent_path:
loader, new_path = self._path_finder(self._name, parent_path)
# Note that no changes are made if a loader is returned, but we
# do remember the new parent path
if loader is None:
self._path = new_path
self._last_parent_path = parent_path # Save the copy
return self._path
def __iter__(self):
return iter(self._recalculate())
def __len__(self):
return len(self._recalculate())
def __repr__(self):
return "_NamespacePath({!r})".format(self._path)
def __contains__(self, item):
return item in self._recalculate()
def append(self, item):
self._path.append(item)
class NamespaceLoader:
def __init__(self, name, path, path_finder):
self._path = _NamespacePath(name, path, path_finder)
@classmethod
def module_repr(cls, module):
return "<module '{}' (namespace)>".format(module.__name__)
@module_for_loader
def load_module(self, module):
"""Load a namespace module."""
_verbose_message('namespace module loaded with path {!r}', self._path)
module.__path__ = self._path
return module
# Finders #####################################################################
class PathFinder:
"""Meta path finder for sys.path and package __path__ attributes."""
@classmethod
def invalidate_caches(cls):
"""Call the invalidate_caches() method on all path entry finders
stored in sys.path_importer_caches (where implemented)."""
for finder in sys.path_importer_cache.values():
if hasattr(finder, 'invalidate_caches'):
finder.invalidate_caches()
@classmethod
def _path_hooks(cls, path):
"""Search sequence of hooks for a finder for 'path'.
If 'hooks' is false then use sys.path_hooks.
"""
if not sys.path_hooks:
_warnings.warn('sys.path_hooks is empty', ImportWarning)
for hook in sys.path_hooks:
try:
return hook(path)
except ImportError:
continue
else:
return None
@classmethod
def _path_importer_cache(cls, path):
"""Get the finder for the path entry from sys.path_importer_cache.
If the path entry is not in the cache, find the appropriate finder
and cache it. If no finder is available, store None.
"""
if path == '':
path = '.'
try:
finder = sys.path_importer_cache[path]
except KeyError:
finder = cls._path_hooks(path)
sys.path_importer_cache[path] = finder
return finder
@classmethod
def _get_loader(cls, fullname, path):
"""Find the loader or namespace_path for this module/package name."""
# If this ends up being a namespace package, namespace_path is
# the list of paths that will become its __path__
namespace_path = []
for entry in path:
if not isinstance(entry, (str, bytes)):
continue
finder = cls._path_importer_cache(entry)
if finder is not None:
if hasattr(finder, 'find_loader'):
loader, portions = finder.find_loader(fullname)
else:
loader = finder.find_module(fullname)
portions = []
if loader is not None:
# We found a loader: return it immediately.
return loader, namespace_path
# This is possibly part of a namespace package.
# Remember these path entries (if any) for when we
# create a namespace package, and continue iterating
# on path.
namespace_path.extend(portions)
else:
return None, namespace_path
@classmethod
def find_module(cls, fullname, path=None):
"""Find the module on sys.path or 'path' based on sys.path_hooks and
sys.path_importer_cache."""
if path is None:
path = sys.path
loader, namespace_path = cls._get_loader(fullname, path)
if loader is not None:
return loader
else:
if namespace_path:
# We found at least one namespace path. Return a
# loader which can create the namespace package.
return NamespaceLoader(fullname, namespace_path, cls._get_loader)
else:
return None
class FileFinder:
"""File-based finder.
Interactions with the file system are cached for performance, being
refreshed when the directory the finder is handling has been modified.
"""
def __init__(self, path, *loader_details):
"""Initialize with the path to search on and a variable number of
2-tuples containing the loader and the file suffixes the loader
recognizes."""
loaders = []
for loader, suffixes in loader_details:
loaders.extend((suffix, loader) for suffix in suffixes)
self._loaders = loaders
# Base (directory) path
self.path = path or '.'
self._path_mtime = -1
self._path_cache = set()
self._relaxed_path_cache = set()
def invalidate_caches(self):
"""Invalidate the directory mtime."""
self._path_mtime = -1
find_module = _find_module_shim
def find_loader(self, fullname):
"""Try to find a loader for the specified module, or the namespace
package portions. Returns (loader, list-of-portions)."""
is_namespace = False
tail_module = fullname.rpartition('.')[2]
try:
mtime = _os.stat(self.path).st_mtime
except OSError:
mtime = -1
if mtime != self._path_mtime:
self._fill_cache()
self._path_mtime = mtime
# tail_module keeps the original casing, for __file__ and friends
if _relax_case():
cache = self._relaxed_path_cache
cache_module = tail_module.lower()
else:
cache = self._path_cache
cache_module = tail_module
# Check if the module is the name of a directory (and thus a package).
if cache_module in cache:
base_path = _path_join(self.path, tail_module)
if _path_isdir(base_path):
for suffix, loader in self._loaders:
init_filename = '__init__' + suffix
full_path = _path_join(base_path, init_filename)
if _path_isfile(full_path):
return (loader(fullname, full_path), [base_path])
else:
# A namespace package, return the path if we don't also
# find a module in the next section.
is_namespace = True
# Check for a file w/ a proper suffix exists.
for suffix, loader in self._loaders:
full_path = _path_join(self.path, tail_module + suffix)
_verbose_message('trying {}'.format(full_path), verbosity=2)
if cache_module + suffix in cache:
if _path_isfile(full_path):
return (loader(fullname, full_path), [])
if is_namespace:
_verbose_message('possible namespace for {}'.format(base_path))
return (None, [base_path])
return (None, [])
def _fill_cache(self):
"""Fill the cache of potential modules and packages for this directory."""
path = self.path
try:
contents = _os.listdir(path)
except (FileNotFoundError, PermissionError, NotADirectoryError):
# Directory has either been removed, turned into a file, or made
# unreadable.
contents = []
# We store two cached versions, to handle runtime changes of the
# PYTHONCASEOK environment variable.
if not sys.platform.startswith('win'):
self._path_cache = set(contents)
else:
# Windows users can import modules with case-insensitive file
# suffixes (for legacy reasons). Make the suffix lowercase here
# so it's done once instead of for every import. This is safe as
# the specified suffixes to check against are always specified in a
# case-sensitive manner.
lower_suffix_contents = set()
for item in contents:
name, dot, suffix = item.partition('.')
if dot:
new_name = '{}.{}'.format(name, suffix.lower())
else:
new_name = name
lower_suffix_contents.add(new_name)
self._path_cache = lower_suffix_contents
if sys.platform.startswith(_CASE_INSENSITIVE_PLATFORMS):
self._relaxed_path_cache = set(fn.lower() for fn in contents)
@classmethod
def path_hook(cls, *loader_details):
"""A class method which returns a closure to use on sys.path_hook
which will return an instance using the specified loaders and the path
called on the closure.
If the path called on the closure is not a directory, ImportError is
raised.
"""
def path_hook_for_FileFinder(path):
"""Path hook for importlib.machinery.FileFinder."""
if not _path_isdir(path):
raise ImportError("only directories are supported", path=path)
return cls(path, *loader_details)
return path_hook_for_FileFinder
def __repr__(self):
return "FileFinder(%r)" % (self.path,)
# Import itself ###############################################################
class _ImportLockContext:
"""Context manager for the import lock."""
def __enter__(self):
"""Acquire the import lock."""
_imp.acquire_lock()
def __exit__(self, exc_type, exc_value, exc_traceback):
"""Release the import lock regardless of any raised exceptions."""
_imp.release_lock()
def _resolve_name(name, package, level):
"""Resolve a relative module name to an absolute one."""
bits = package.rsplit('.', level - 1)
if len(bits) < level:
raise ValueError('attempted relative import beyond top-level package')
base = bits[0]
return '{}.{}'.format(base, name) if name else base
def _find_module(name, path):
"""Find a module's loader."""
if not sys.meta_path:
_warnings.warn('sys.meta_path is empty', ImportWarning)
for finder in sys.meta_path:
#with _ImportLockContext():
# loader = finder.find_module(name, path)
loader = finder.find_module(name, path)
if loader is not None:
# The parent import may have already imported this module.
if name not in sys.modules:
return loader
else:
return sys.modules[name].__loader__
else:
return None
def _sanity_check(name, package, level):
"""Verify arguments are "sane"."""
if not isinstance(name, str):
raise TypeError("module name must be str, not {}".format(type(name)))
if level < 0:
raise ValueError('level must be >= 0')
if package:
if not isinstance(package, str):
raise TypeError("__package__ not set to a string")
elif package not in sys.modules:
msg = ("Parent module {!r} not loaded, cannot perform relative "
"import")
raise SystemError(msg.format(package))
if not name and level == 0:
raise ValueError("Empty module name")
_ERR_MSG = 'No module named {!r}'
def _find_and_load_unlocked(name, import_):
path = None
parent = name.rpartition('.')[0]
if parent:
if parent not in sys.modules:
_call_with_frames_removed(import_, parent)
# Crazy side-effects!
if name in sys.modules:
return sys.modules[name]
# Backwards-compatibility; be nicer to skip the dict lookup.
parent_module = sys.modules[parent]
try:
path = parent_module.__path__
except AttributeError:
msg = (_ERR_MSG + '; {} is not a package').format(name, parent)
raise ImportError(msg, name=name)
loader = _find_module(name, path)
if loader is None:
exc = ImportError(_ERR_MSG.format(name), name=name)
# TODO(brett): switch to a proper ModuleNotFound exception in Python
# 3.4.
exc._not_found = True
raise exc
elif name not in sys.modules:
# The parent import may have already imported this module.
loader.load_module(name)
_verbose_message('import {!r} # {!r}', name, loader)
# Backwards-compatibility; be nicer to skip the dict lookup.
module = sys.modules[name]
if parent:
# Set the module as an attribute on its parent.
parent_module = sys.modules[parent]
setattr(parent_module, name.rpartition('.')[2], module)
# Set __package__ if the loader did not.
if getattr(module, '__package__', None) is None:
try:
module.__package__ = module.__name__
if not hasattr(module, '__path__'):
module.__package__ = module.__package__.rpartition('.')[0]
except AttributeError:
pass
# Set loader if need be.
if not hasattr(module, '__loader__'):
try:
module.__loader__ = loader
except AttributeError:
pass
return module
def _find_and_load(name, import_):
"""Find and load the module, and release the import lock."""
try:
lock = _get_module_lock(name)
finally:
_imp.release_lock()
lock.acquire()
try:
return _find_and_load_unlocked(name, import_)
finally:
lock.release()
def _gcd_import(name, package=None, level=0):
"""Import and return the module based on its name, the package the call is
being made from, and the level adjustment.
This function represents the greatest common denominator of functionality
between import_module and __import__. This includes setting __package__ if
the loader did not.
"""
_sanity_check(name, package, level)
if level > 0:
name = _resolve_name(name, package, level)
_imp.acquire_lock()
if name not in sys.modules:
return _find_and_load(name, _gcd_import)
module = sys.modules[name]
if module is None:
_imp.release_lock()
message = ("import of {} halted; "
"None in sys.modules".format(name))
raise ImportError(message, name=name)
_lock_unlock_module(name)
return module
def _handle_fromlist(module, fromlist, import_):
"""Figure out what __import__ should return.
The import_ parameter is a callable which takes the name of module to
import. It is required to decouple the function from assuming importlib's
import implementation is desired.
"""
# The hell that is fromlist ...
# If a package was imported, try to import stuff from fromlist.
if hasattr(module, '__path__'):
if '*' in fromlist:
fromlist = list(fromlist)
fromlist.remove('*')
if hasattr(module, '__all__'):
fromlist.extend(module.__all__)
for x in fromlist:
if not hasattr(module, x):
from_name = '{}.{}'.format(module.__name__, x)
try:
_call_with_frames_removed(import_, from_name)
except ImportError as exc:
# Backwards-compatibility dictates we ignore failed
# imports triggered by fromlist for modules that don't
# exist.
# TODO(brett): In Python 3.4, have import raise
# ModuleNotFound and catch that.
if getattr(exc, '_not_found', False):
if exc.name == from_name:
continue
raise
return module
def _calc___package__(globals):
"""Calculate what __package__ should be.
__package__ is not guaranteed to be defined or could be set to None
to represent that its proper value is unknown.
"""
package = globals.get('__package__')
if package is None:
package = globals['__name__']
if '__path__' not in globals:
package = package.rpartition('.')[0]
return package
def _get_supported_file_loaders():
"""Returns a list of file-based module loaders.
Each item is a tuple (loader, suffixes).
"""
extensions = ExtensionFileLoader, _imp.extension_suffixes()
source = SourceFileLoader, SOURCE_SUFFIXES
bytecode = SourcelessFileLoader, BYTECODE_SUFFIXES
return [extensions, source, bytecode]
def __import__(name, globals=None, locals=None, fromlist=(), level=0):
"""Import a module.
The 'globals' argument is used to infer where the import is occuring from
to handle relative imports. The 'locals' argument is ignored. The
'fromlist' argument specifies what should exist as attributes on the module
being imported (e.g. ``from module import <fromlist>``). The 'level'
argument represents the package location to import from in a relative
import (e.g. ``from ..pkg import mod`` would have a 'level' of 2).
"""
if level == 0:
module = _gcd_import(name)
else:
globals_ = globals if globals is not None else {}
package = _calc___package__(globals_)
module = _gcd_import(name, package, level)
if not fromlist:
# Return up to the first dot in 'name'. This is complicated by the fact
# that 'name' may be relative.
if level == 0:
return _gcd_import(name.partition('.')[0])
elif not name:
return module
else:
# Figure out where to slice the module's name up to the first dot
# in 'name'.
cut_off = len(name) - len(name.partition('.')[0])
# Slice end needs to be positive to alleviate need to special-case
# when ``'.' not in name``.
return sys.modules[module.__name__[:len(module.__name__)-cut_off]]
else:
return _handle_fromlist(module, fromlist, _gcd_import)
def _setup(sys_module, _imp_module):
"""Setup importlib by importing needed built-in modules and injecting them
into the global namespace.
As sys is needed for sys.modules access and _imp is needed to load built-in
modules, those two modules must be explicitly passed in.
"""
global _imp, sys, BYTECODE_SUFFIXES
_imp = _imp_module
sys = sys_module
if sys.flags.optimize:
BYTECODE_SUFFIXES = OPTIMIZED_BYTECODE_SUFFIXES
else:
BYTECODE_SUFFIXES = DEBUG_BYTECODE_SUFFIXES
module_type = type(sys)
for name, module in sys.modules.items():
if isinstance(module, module_type):
if not hasattr(module, '__loader__'):
if name in sys.builtin_module_names:
module.__loader__ = BuiltinImporter
#fix me brython
#elif _imp.is_frozen(name):
# module.__loader__ = FrozenImporter
self_module = sys.modules[__name__]
for builtin_name in ('_io', '_warnings', 'builtins'): #, 'marshal'):
if builtin_name not in sys.modules:
builtin_module = BuiltinImporter.load_module(builtin_name)
else:
builtin_module = sys.modules[builtin_name]
setattr(self_module, builtin_name, builtin_module)
os_details = ('posix', ['/']), ('nt', ['\\', '/']), ('os2', ['\\', '/'])
for builtin_os, path_separators in os_details:
# Assumption made in _path_join()
assert all(len(sep) == 1 for sep in path_separators)
path_sep = path_separators[0]
if builtin_os in sys.modules:
os_module = sys.modules[builtin_os]
break
else:
try:
os_module = BuiltinImporter.load_module(builtin_os)
# TODO: rip out os2 code after 3.3 is released as per PEP 11
if builtin_os == 'os2' and 'EMX GCC' in sys.version:
path_sep = path_separators[1]
break
except ImportError:
continue
else:
raise ImportError('importlib requires posix or nt')
try:
thread_module = BuiltinImporter.load_module('_thread')
except ImportError:
# Python was built without threads
thread_module = None
weakref_module = BuiltinImporter.load_module('_weakref')
if builtin_os == 'nt':
winreg_module = BuiltinImporter.load_module('winreg')
setattr(self_module, '_winreg', winreg_module)
setattr(self_module, '_os', os_module)
setattr(self_module, '_thread', thread_module)
setattr(self_module, '_weakref', weakref_module)
setattr(self_module, 'path_sep', path_sep)
setattr(self_module, 'path_separators', set(path_separators))
# Constants
setattr(self_module, '_relax_case', _make_relax_case())
EXTENSION_SUFFIXES.extend(_imp.extension_suffixes())
if builtin_os == 'nt':
SOURCE_SUFFIXES.append('.pyw')
if '_d.pyd' in EXTENSION_SUFFIXES:
WindowsRegistryFinder.DEBUG_BUILD = True
def _install(sys_module, _imp_module):
"""Install importlib as the implementation of import."""
_setup(sys_module, _imp_module)
supported_loaders = _get_supported_file_loaders()
sys.path_hooks.extend([FileFinder.path_hook(*supported_loaders)])
sys.meta_path.append(BuiltinImporter)
sys.meta_path.append(FrozenImporter)
if _os.__name__ == 'nt':
sys.meta_path.append(WindowsRegistryFinder)
sys.meta_path.append(PathFinder)
|
agpl-3.0
|
maelg/RaspiNotifier
|
checker/facebook.py
|
1
|
1031
|
import RPi.GPIO as GPIO, subprocess
#if( int(time.strftime('%H')) >= 8 and int(time.strftime('%H')) <= 21 ):
def checkFacebook():
nbr_notif = int(open("/home/pi/RaspiNotifier/nbr/nbr_facebook.txt", "r").read())
GPIO_PIN = int(config.get("Facebook", "gpioPin"))
GPIO.setmode(GPIO.BOARD)
GPIO.setup(GPIO_PIN, GPIO.OUT)
proc = subprocess.Popen("php /home/pi/RaspiNotifier/FacebookAPI/FBChecker.php", shell=True, stdout=subprocess.PIPE)
newnotif = proc.stdout.read()
if newnotif.isdigit():
print("Facebook say: " + str(newnotif))
print("Last time: " + str(nbr_notif))
if int(newnotif) > nbr_notif:
GPIO.output(GPIO_PIN, True)
print("Turn on pin " + str(GPIO_PIN))
if int(newnotif) == nbr_notif:
print("Don't change state on GPIO")
if int(newnotif) < nbr_notif:
GPIO.output(GPIO_PIN, False)
print("Turn off pin " + str(GPIO_PIN))
open("/home/pi/RaspiNotifier/nbr/nbr_facebook.txt", "w").write(str(newnotif))
else:
print("Error: " + newnotif)
#else:
# print("Silence !")
|
gpl-2.0
|
openshift/openshift-tools
|
ansible/roles/lib_openshift_3.2/library/oc_process.py
|
6
|
37409
|
#!/usr/bin/env python # pylint: disable=too-many-lines
# ___ ___ _ _ ___ ___ _ _____ ___ ___
# / __| __| \| | __| _ \ /_\_ _| __| \
# | (_ | _|| .` | _|| / / _ \| | | _|| |) |
# \___|___|_|\_|___|_|_\/_/_\_\_|_|___|___/_ _____
# | \ / _ \ | \| |/ _ \_ _| | __| \_ _|_ _|
# | |) | (_) | | .` | (_) || | | _|| |) | | | |
# |___/ \___/ |_|\_|\___/ |_| |___|___/___| |_|
'''
OpenShiftCLI class that wraps the oc commands in a subprocess
'''
# pylint: disable=too-many-lines
import atexit
import json
import os
import re
import shutil
import subprocess
import ruamel.yaml as yaml
#import yaml
#
## This is here because of a bug that causes yaml
## to incorrectly handle timezone info on timestamps
#def timestamp_constructor(_, node):
# '''return timestamps as strings'''
# return str(node.value)
#yaml.add_constructor(u'tag:yaml.org,2002:timestamp', timestamp_constructor)
class OpenShiftCLIError(Exception):
'''Exception class for openshiftcli'''
pass
# pylint: disable=too-few-public-methods
class OpenShiftCLI(object):
''' Class to wrap the command line tools '''
def __init__(self,
namespace,
kubeconfig='/etc/origin/master/admin.kubeconfig',
verbose=False,
all_namespaces=False):
''' Constructor for OpenshiftCLI '''
self.namespace = namespace
self.verbose = verbose
self.kubeconfig = kubeconfig
self.all_namespaces = all_namespaces
# Pylint allows only 5 arguments to be passed.
# pylint: disable=too-many-arguments
def _replace_content(self, resource, rname, content, force=False, sep='.'):
''' replace the current object with the content '''
res = self._get(resource, rname)
if not res['results']:
return res
fname = '/tmp/%s' % rname
yed = Yedit(fname, res['results'][0], separator=sep)
changes = []
for key, value in content.items():
changes.append(yed.put(key, value))
if any([change[0] for change in changes]):
yed.write()
atexit.register(Utils.cleanup, [fname])
return self._replace(fname, force)
return {'returncode': 0, 'updated': False}
def _replace(self, fname, force=False):
'''return all pods '''
cmd = ['-n', self.namespace, 'replace', '-f', fname]
if force:
cmd.append('--force')
return self.openshift_cmd(cmd)
def _create_from_content(self, rname, content):
'''return all pods '''
fname = '/tmp/%s' % rname
yed = Yedit(fname, content=content)
yed.write()
atexit.register(Utils.cleanup, [fname])
return self._create(fname)
def _create(self, fname):
'''return all pods '''
return self.openshift_cmd(['create', '-f', fname, '-n', self.namespace])
def _delete(self, resource, rname, selector=None):
'''return all pods '''
cmd = ['delete', resource, rname, '-n', self.namespace]
if selector:
cmd.append('--selector=%s' % selector)
return self.openshift_cmd(cmd)
def _process(self, template_name, create=False, params=None, template_data=None):
'''return all pods '''
cmd = ['process', '-n', self.namespace]
if template_data:
cmd.extend(['-f', '-'])
else:
cmd.append(template_name)
if params:
param_str = ["%s=%s" % (key, value) for key, value in params.items()]
cmd.append('-v')
cmd.extend(param_str)
results = self.openshift_cmd(cmd, output=True, input_data=template_data)
if results['returncode'] != 0 or not create:
return results
fname = '/tmp/%s' % template_name
yed = Yedit(fname, results['results'])
yed.write()
atexit.register(Utils.cleanup, [fname])
return self.openshift_cmd(['-n', self.namespace, 'create', '-f', fname])
def _get(self, resource, rname=None, selector=None):
'''return a resource by name '''
cmd = ['get', resource]
if selector:
cmd.append('--selector=%s' % selector)
if self.all_namespaces:
cmd.extend(['--all-namespaces'])
elif self.namespace:
cmd.extend(['-n', self.namespace])
cmd.extend(['-o', 'json'])
if rname:
cmd.append(rname)
rval = self.openshift_cmd(cmd, output=True)
# Ensure results are retuned in an array
if rval.has_key('items'):
rval['results'] = rval['items']
elif not isinstance(rval['results'], list):
rval['results'] = [rval['results']]
return rval
def _schedulable(self, node=None, selector=None, schedulable=True):
''' perform oadm manage-node scheduable '''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector=%s' % selector)
cmd.append('--schedulable=%s' % schedulable)
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
def _list_pods(self, node=None, selector=None, pod_selector=None):
''' perform oadm manage-node evacuate '''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector=%s' % selector)
if pod_selector:
cmd.append('--pod-selector=%s' % pod_selector)
cmd.extend(['--list-pods', '-o', 'json'])
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
#pylint: disable=too-many-arguments
def _evacuate(self, node=None, selector=None, pod_selector=None, dry_run=False, grace_period=None, force=False):
''' perform oadm manage-node evacuate '''
cmd = ['manage-node']
if node:
cmd.extend(node)
else:
cmd.append('--selector=%s' % selector)
if dry_run:
cmd.append('--dry-run')
if pod_selector:
cmd.append('--pod-selector=%s' % pod_selector)
if grace_period:
cmd.append('--grace-period=%s' % int(grace_period))
if force:
cmd.append('--force')
cmd.append('--evacuate')
return self.openshift_cmd(cmd, oadm=True, output=True, output_type='raw')
def _import_image(self, url=None, name=None, tag=None):
''' perform image import '''
cmd = ['import-image']
image = '{0}'.format(name)
if tag:
image += ':{0}'.format(tag)
cmd.append(image)
if url:
cmd.append('--from={0}/{1}'.format(url, image))
cmd.append('-n{0}'.format(self.namespace))
cmd.append('--confirm')
return self.openshift_cmd(cmd)
#pylint: disable=too-many-arguments
def openshift_cmd(self, cmd, oadm=False, output=False, output_type='json', input_data=None):
'''Base command for oc '''
cmds = []
if oadm:
cmds = ['/usr/bin/oc', 'adm']
else:
cmds = ['/usr/bin/oc']
cmds.extend(cmd)
rval = {}
results = ''
err = None
if self.verbose:
print ' '.join(cmds)
proc = subprocess.Popen(cmds,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
env={'KUBECONFIG': self.kubeconfig})
stdout, stderr = proc.communicate(input_data)
rval = {"returncode": proc.returncode,
"results": results,
"cmd": ' '.join(cmds),
}
if proc.returncode == 0:
if output:
if output_type == 'json':
try:
rval['results'] = json.loads(stdout)
except ValueError as err:
if "No JSON object could be decoded" in err.message:
err = err.message
elif output_type == 'raw':
rval['results'] = stdout
if self.verbose:
print stdout
print stderr
if err:
rval.update({"err": err,
"stderr": stderr,
"stdout": stdout,
"cmd": cmds
})
else:
rval.update({"stderr": stderr,
"stdout": stdout,
"results": {},
})
return rval
class Utils(object):
''' utilities for openshiftcli modules '''
@staticmethod
def create_file(rname, data, ftype='yaml'):
''' create a file in tmp with name and contents'''
path = os.path.join('/tmp', rname)
with open(path, 'w') as fds:
if ftype == 'yaml':
fds.write(yaml.dump(data, Dumper=yaml.RoundTripDumper))
elif ftype == 'json':
fds.write(json.dumps(data))
else:
fds.write(data)
# Register cleanup when module is done
atexit.register(Utils.cleanup, [path])
return path
@staticmethod
def create_files_from_contents(content, content_type=None):
'''Turn an array of dict: filename, content into a files array'''
if not isinstance(content, list):
content = [content]
files = []
for item in content:
path = Utils.create_file(item['path'], item['data'], ftype=content_type)
files.append({'name': os.path.basename(path), 'path': path})
return files
@staticmethod
def cleanup(files):
'''Clean up on exit '''
for sfile in files:
if os.path.exists(sfile):
if os.path.isdir(sfile):
shutil.rmtree(sfile)
elif os.path.isfile(sfile):
os.remove(sfile)
@staticmethod
def exists(results, _name):
''' Check to see if the results include the name '''
if not results:
return False
if Utils.find_result(results, _name):
return True
return False
@staticmethod
def find_result(results, _name):
''' Find the specified result by name'''
rval = None
for result in results:
if result.has_key('metadata') and result['metadata']['name'] == _name:
rval = result
break
return rval
@staticmethod
def get_resource_file(sfile, sfile_type='yaml'):
''' return the service file '''
contents = None
with open(sfile) as sfd:
contents = sfd.read()
if sfile_type == 'yaml':
contents = yaml.load(contents, yaml.RoundTripLoader)
elif sfile_type == 'json':
contents = json.loads(contents)
return contents
# Disabling too-many-branches. This is a yaml dictionary comparison function
# pylint: disable=too-many-branches,too-many-return-statements,too-many-statements
@staticmethod
def check_def_equal(user_def, result_def, skip_keys=None, debug=False):
''' Given a user defined definition, compare it with the results given back by our query. '''
# Currently these values are autogenerated and we do not need to check them
skip = ['metadata', 'status']
if skip_keys:
skip.extend(skip_keys)
for key, value in result_def.items():
if key in skip:
continue
# Both are lists
if isinstance(value, list):
if not user_def.has_key(key):
if debug:
print 'User data does not have key [%s]' % key
print 'User data: %s' % user_def
return False
if not isinstance(user_def[key], list):
if debug:
print 'user_def[key] is not a list key=[%s] user_def[key]=%s' % (key, user_def[key])
return False
if len(user_def[key]) != len(value):
if debug:
print "List lengths are not equal."
print "key=[%s]: user_def[%s] != value[%s]" % (key, len(user_def[key]), len(value))
print "user_def: %s" % user_def[key]
print "value: %s" % value
return False
for values in zip(user_def[key], value):
if isinstance(values[0], dict) and isinstance(values[1], dict):
if debug:
print 'sending list - list'
print type(values[0])
print type(values[1])
result = Utils.check_def_equal(values[0], values[1], skip_keys=skip_keys, debug=debug)
if not result:
print 'list compare returned false'
return False
elif value != user_def[key]:
if debug:
print 'value should be identical'
print value
print user_def[key]
return False
# recurse on a dictionary
elif isinstance(value, dict):
if not user_def.has_key(key):
if debug:
print "user_def does not have key [%s]" % key
return False
if not isinstance(user_def[key], dict):
if debug:
print "dict returned false: not instance of dict"
return False
# before passing ensure keys match
api_values = set(value.keys()) - set(skip)
user_values = set(user_def[key].keys()) - set(skip)
if api_values != user_values:
if debug:
print "keys are not equal in dict"
print api_values
print user_values
return False
result = Utils.check_def_equal(user_def[key], value, skip_keys=skip_keys, debug=debug)
if not result:
if debug:
print "dict returned false"
print result
return False
# Verify each key, value pair is the same
else:
if not user_def.has_key(key) or value != user_def[key]:
if debug:
print "value not equal; user_def does not have key"
print key
print value
if user_def.has_key(key):
print user_def[key]
return False
if debug:
print 'returning true'
return True
class OpenShiftCLIConfig(object):
'''Generic Config'''
def __init__(self, rname, namespace, kubeconfig, options):
self.kubeconfig = kubeconfig
self.name = rname
self.namespace = namespace
self._options = options
@property
def config_options(self):
''' return config options '''
return self._options
def to_option_list(self):
'''return all options as a string'''
return self.stringify()
def stringify(self):
''' return the options hash as cli params in a string '''
rval = []
for key, data in self.config_options.items():
if data['include'] \
and (data['value'] or isinstance(data['value'], int)):
rval.append('--%s=%s' % (key.replace('_', '-'), data['value']))
return rval
class YeditException(Exception):
''' Exception class for Yedit '''
pass
class Yedit(object):
''' Class to modify yaml files '''
re_valid_key = r"(((\[-?\d+\])|([0-9a-zA-Z%s/_-]+)).?)+$"
re_key = r"(?:\[(-?\d+)\])|([0-9a-zA-Z%s/_-]+)"
com_sep = set(['.', '#', '|', ':'])
# pylint: disable=too-many-arguments
def __init__(self, filename=None, content=None, content_type='yaml', separator='.', backup=False):
self.content = content
self._separator = separator
self.filename = filename
self.__yaml_dict = content
self.content_type = content_type
self.backup = backup
self.load(content_type=self.content_type)
if self.__yaml_dict == None:
self.__yaml_dict = {}
@property
def separator(self):
''' getter method for yaml_dict '''
return self._separator
@separator.setter
def separator(self):
''' getter method for yaml_dict '''
return self._separator
@property
def yaml_dict(self):
''' getter method for yaml_dict '''
return self.__yaml_dict
@yaml_dict.setter
def yaml_dict(self, value):
''' setter method for yaml_dict '''
self.__yaml_dict = value
@staticmethod
def parse_key(key, sep='.'):
'''parse the key allowing the appropriate separator'''
common_separators = list(Yedit.com_sep - set([sep]))
return re.findall(Yedit.re_key % ''.join(common_separators), key)
@staticmethod
def valid_key(key, sep='.'):
'''validate the incoming key'''
common_separators = list(Yedit.com_sep - set([sep]))
if not re.match(Yedit.re_valid_key % ''.join(common_separators), key):
return False
return True
@staticmethod
def remove_entry(data, key, sep='.'):
''' remove data at location key '''
if key == '' and isinstance(data, dict):
data.clear()
return True
elif key == '' and isinstance(data, list):
del data[:]
return True
if not (key and Yedit.valid_key(key, sep)) and isinstance(data, (list, dict)):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes[:-1]:
if dict_key and isinstance(data, dict):
data = data.get(dict_key, None)
elif arr_ind and isinstance(data, list) and int(arr_ind) <= len(data) - 1:
data = data[int(arr_ind)]
else:
return None
# process last index for remove
# expected list entry
if key_indexes[-1][0]:
if isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1:
del data[int(key_indexes[-1][0])]
return True
# expected dict entry
elif key_indexes[-1][1]:
if isinstance(data, dict):
del data[key_indexes[-1][1]]
return True
@staticmethod
def add_entry(data, key, item=None, sep='.'):
''' Get an item from a dictionary with key notation a.b.c
d = {'a': {'b': 'c'}}}
key = a#b
return c
'''
if key == '':
pass
elif not (key and Yedit.valid_key(key, sep)) and isinstance(data, (list, dict)):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes[:-1]:
if dict_key:
if isinstance(data, dict) and data.has_key(dict_key) and data[dict_key]:
data = data[dict_key]
continue
elif data and not isinstance(data, dict):
raise YeditException("Unexpected item type found while going through key " +
"path: {} (at key: {})".format(key, dict_key))
data[dict_key] = {}
data = data[dict_key]
elif arr_ind and isinstance(data, list) and int(arr_ind) <= len(data) - 1:
data = data[int(arr_ind)]
else:
raise YeditException("Unexpected item type found while going through key path: {}".format(key))
if key == '':
data = item
# process last index for add
# expected list entry
elif key_indexes[-1][0] and isinstance(data, list) and int(key_indexes[-1][0]) <= len(data) - 1:
data[int(key_indexes[-1][0])] = item
# expected dict entry
elif key_indexes[-1][1] and isinstance(data, dict):
data[key_indexes[-1][1]] = item
# didn't add/update to an existing list, nor add/update key to a dict
# so we must have been provided some syntax like a.b.c[<int>] = "data" for a
# non-existent array
else:
raise YeditException("Error adding data to object at path: {}".format(key))
return data
@staticmethod
def get_entry(data, key, sep='.'):
''' Get an item from a dictionary with key notation a.b.c
d = {'a': {'b': 'c'}}}
key = a.b
return c
'''
if key == '':
pass
elif not (key and Yedit.valid_key(key, sep)) and isinstance(data, (list, dict)):
return None
key_indexes = Yedit.parse_key(key, sep)
for arr_ind, dict_key in key_indexes:
if dict_key and isinstance(data, dict):
data = data.get(dict_key, None)
elif arr_ind and isinstance(data, list) and int(arr_ind) <= len(data) - 1:
data = data[int(arr_ind)]
else:
return None
return data
def write(self):
''' write to file '''
if not self.filename:
raise YeditException('Please specify a filename.')
if self.backup and self.file_exists():
shutil.copy(self.filename, self.filename + '.orig')
tmp_filename = self.filename + '.yedit'
try:
with open(tmp_filename, 'w') as yfd:
# pylint: disable=no-member,maybe-no-member
if hasattr(self.yaml_dict, 'fa'):
self.yaml_dict.fa.set_block_style()
yfd.write(yaml.dump(self.yaml_dict, Dumper=yaml.RoundTripDumper))
except Exception as err:
raise YeditException(err.message)
os.rename(tmp_filename, self.filename)
return (True, self.yaml_dict)
def read(self):
''' read from file '''
# check if it exists
if self.filename == None or not self.file_exists():
return None
contents = None
with open(self.filename) as yfd:
contents = yfd.read()
return contents
def file_exists(self):
''' return whether file exists '''
if os.path.exists(self.filename):
return True
return False
def load(self, content_type='yaml'):
''' return yaml file '''
contents = self.read()
if not contents and not self.content:
return None
if self.content:
if isinstance(self.content, dict):
self.yaml_dict = self.content
return self.yaml_dict
elif isinstance(self.content, str):
contents = self.content
# check if it is yaml
try:
if content_type == 'yaml' and contents:
self.yaml_dict = yaml.load(contents, yaml.RoundTripLoader)
# pylint: disable=no-member,maybe-no-member
if hasattr(self.yaml_dict, 'fa'):
self.yaml_dict.fa.set_block_style()
elif content_type == 'json' and contents:
self.yaml_dict = json.loads(contents)
except yaml.YAMLError as err:
# Error loading yaml or json
raise YeditException('Problem with loading yaml file. %s' % err)
return self.yaml_dict
def get(self, key):
''' get a specified key'''
try:
entry = Yedit.get_entry(self.yaml_dict, key, self.separator)
except KeyError as _:
entry = None
return entry
def pop(self, path, key_or_item):
''' remove a key, value pair from a dict or an item for a list'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError as _:
entry = None
if entry == None:
return (False, self.yaml_dict)
if isinstance(entry, dict):
# pylint: disable=no-member,maybe-no-member
if entry.has_key(key_or_item):
entry.pop(key_or_item)
return (True, self.yaml_dict)
return (False, self.yaml_dict)
elif isinstance(entry, list):
# pylint: disable=no-member,maybe-no-member
ind = None
try:
ind = entry.index(key_or_item)
except ValueError:
return (False, self.yaml_dict)
entry.pop(ind)
return (True, self.yaml_dict)
return (False, self.yaml_dict)
def delete(self, path):
''' remove path from a dict'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError as _:
entry = None
if entry == None:
return (False, self.yaml_dict)
result = Yedit.remove_entry(self.yaml_dict, path, self.separator)
if not result:
return (False, self.yaml_dict)
return (True, self.yaml_dict)
def exists(self, path, value):
''' check if value exists at path'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError as _:
entry = None
if isinstance(entry, list):
if value in entry:
return True
return False
elif isinstance(entry, dict):
if isinstance(value, dict):
rval = False
for key, val in value.items():
if entry[key] != val:
rval = False
break
else:
rval = True
return rval
return value in entry
return entry == value
def append(self, path, value):
'''append value to a list'''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError as _:
entry = None
if entry is None:
self.put(path, [])
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
if not isinstance(entry, list):
return (False, self.yaml_dict)
# pylint: disable=no-member,maybe-no-member
entry.append(value)
return (True, self.yaml_dict)
# pylint: disable=too-many-arguments
def update(self, path, value, index=None, curr_value=None):
''' put path, value into a dict '''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError as _:
entry = None
if isinstance(entry, dict):
# pylint: disable=no-member,maybe-no-member
if not isinstance(value, dict):
raise YeditException('Cannot replace key, value entry in dict with non-dict type.' \
' value=[%s] [%s]' % (value, type(value)))
entry.update(value)
return (True, self.yaml_dict)
elif isinstance(entry, list):
# pylint: disable=no-member,maybe-no-member
ind = None
if curr_value:
try:
ind = entry.index(curr_value)
except ValueError:
return (False, self.yaml_dict)
elif index != None:
ind = index
if ind != None and entry[ind] != value:
entry[ind] = value
return (True, self.yaml_dict)
# see if it exists in the list
try:
ind = entry.index(value)
except ValueError:
# doesn't exist, append it
entry.append(value)
return (True, self.yaml_dict)
#already exists, return
if ind != None:
return (False, self.yaml_dict)
return (False, self.yaml_dict)
def put(self, path, value):
''' put path, value into a dict '''
try:
entry = Yedit.get_entry(self.yaml_dict, path, self.separator)
except KeyError as _:
entry = None
if entry == value:
return (False, self.yaml_dict)
# deepcopy didn't work
tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict, default_flow_style=False), yaml.RoundTripLoader)
# pylint: disable=no-member
if hasattr(self.yaml_dict, 'fa'):
tmp_copy.fa.set_block_style()
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if not result:
return (False, self.yaml_dict)
self.yaml_dict = tmp_copy
return (True, self.yaml_dict)
def create(self, path, value):
''' create a yaml file '''
if not self.file_exists():
# deepcopy didn't work
tmp_copy = yaml.load(yaml.round_trip_dump(self.yaml_dict, default_flow_style=False), yaml.RoundTripLoader)
# pylint: disable=no-member
if hasattr(self.yaml_dict, 'fa'):
tmp_copy.fa.set_block_style()
result = Yedit.add_entry(tmp_copy, path, value, self.separator)
if result:
self.yaml_dict = tmp_copy
return (True, self.yaml_dict)
return (False, self.yaml_dict)
# pylint: disable=too-many-instance-attributes
class OCProcess(OpenShiftCLI):
''' Class to wrap the oc command line tools '''
# pylint allows 5. we need 6
# pylint: disable=too-many-arguments
def __init__(self,
namespace,
tname=None,
params=None,
create=False,
kubeconfig='/etc/origin/master/admin.kubeconfig',
tdata=None,
verbose=False):
''' Constructor for OpenshiftOC '''
super(OCProcess, self).__init__(namespace, kubeconfig)
self.namespace = namespace
self.name = tname
self.data = tdata
self.params = params
self.create = create
self.kubeconfig = kubeconfig
self.verbose = verbose
self._template = None
@property
def template(self):
'''template property'''
if self._template == None:
results = self._process(self.name, False, self.params, self.data)
if results['returncode'] != 0:
raise OpenShiftCLIError('Error processing template [%s].' % self.name)
self._template = results['results']['items']
return self._template
def get(self):
'''get the template'''
results = self._get('template', self.name)
if results['returncode'] != 0:
# Does the template exist??
if 'not found' in results['stderr']:
results['returncode'] = 0
results['exists'] = False
results['results'] = []
return results
def delete(self, obj):
'''delete a resource'''
return self._delete(obj['kind'], obj['metadata']['name'])
def create_obj(self, obj):
'''create a resource'''
return self._create_from_content(obj['metadata']['name'], obj)
def process(self, create=None):
'''process a template'''
do_create = False
if create != None:
do_create = create
else:
do_create = self.create
return self._process(self.name, do_create, self.params, self.data)
def exists(self):
'''return whether the template exists'''
# Always return true if we're being passed template data
if self.data:
return True
t_results = self._get('template', self.name)
if t_results['returncode'] != 0:
# Does the template exist??
if 'not found' in t_results['stderr']:
return False
else:
raise OpenShiftCLIError('Something went wrong. %s' % t_results)
return True
def needs_update(self):
'''attempt to process the template and return it for comparison with oc objects'''
obj_results = []
for obj in self.template:
# build a list of types to skip
skip = []
if obj['kind'] == 'ServiceAccount':
skip.extend(['secrets', 'imagePullSecrets'])
if obj['kind'] == 'BuildConfig':
skip.extend(['lastTriggeredImageID'])
if obj['kind'] == 'ImageStream':
skip.extend(['generation'])
if obj['kind'] == 'DeploymentConfig':
skip.extend(['lastTriggeredImage'])
# fetch the current object
curr_obj_results = self._get(obj['kind'], obj['metadata']['name'])
if curr_obj_results['returncode'] != 0:
# Does the template exist??
if 'not found' in curr_obj_results['stderr']:
obj_results.append((obj, True))
continue
# check the generated object against the existing object
if not Utils.check_def_equal(obj, curr_obj_results['results'][0], skip_keys=skip):
obj_results.append((obj, True))
continue
obj_results.append((obj, False))
return obj_results
# pylint: disable=too-many-branches
def main():
'''
ansible oc module for services
'''
module = AnsibleModule(
argument_spec=dict(
kubeconfig=dict(default='/etc/origin/master/admin.kubeconfig', type='str'),
state=dict(default='present', type='str', choices=['present', 'list']),
debug=dict(default=False, type='bool'),
namespace=dict(default='default', type='str'),
template_name=dict(default=None, type='str'),
content=dict(default=None, type='str'),
params=dict(default=None, type='dict'),
create=dict(default=False, type='bool'),
reconcile=dict(default=True, type='bool'),
),
supports_check_mode=True,
)
ocprocess = OCProcess(module.params['namespace'],
module.params['template_name'],
module.params['params'],
module.params['create'],
kubeconfig=module.params['kubeconfig'],
tdata=module.params['content'],
verbose=module.params['debug'])
state = module.params['state']
api_rval = ocprocess.get()
if state == 'list':
if api_rval['returncode'] != 0:
module.fail_json(msg=api_rval)
module.exit_json(changed=False, results=api_rval, state="list")
elif state == 'present':
if not ocprocess.exists() or not module.params['reconcile']:
#FIXME: this code will never get run in a way that succeeds when
# module.params['reconcile'] is true. Because oc_process doesn't
# create the actual template, the check of ocprocess.exists()
# is meaningless. Either it's already here and this code
# won't be run, or this code will fail because there is no
# template available for oc process to use. Have we conflated
# the template's existence with the existence of the objects
# it describes?
# Create it here
api_rval = ocprocess.process()
if api_rval['returncode'] != 0:
module.fail_json(msg=api_rval)
module.exit_json(changed=True, results=api_rval, state="present")
# verify results
update = False
rval = []
all_results = ocprocess.needs_update()
for obj, status in all_results:
if status:
ocprocess.delete(obj)
results = ocprocess.create_obj(obj)
results['kind'] = obj['kind']
rval.append(results)
update = True
if not update:
module.exit_json(changed=update, results=api_rval, state="present")
for cmd in rval:
if cmd['returncode'] != 0:
module.fail_json(changed=update, results=rval, state="present")
module.exit_json(changed=update, results=rval, state="present")
module.exit_json(failed=True,
changed=False,
results='Unknown state passed. %s' % state,
state="unknown")
# pylint: disable=redefined-builtin, unused-wildcard-import, wildcard-import, locally-disabled
# import module snippets. This are required
if __name__ == '__main__':
from ansible.module_utils.basic import *
main()
|
apache-2.0
|
boxuk/ansible-boxuk-modules-passwordstate
|
passwordstate_password/passwordstate_password.py
|
1
|
9460
|
#!/usr/bin/python
""" PasswordState Ansible Module """
from ansible.module_utils.basic import *
import urllib
import urllib2
import json
class PasswordIdException(Exception):
msg = 'Either the password id or the match ' \
'field id and value must be configured'
class Password(object):
""" Password """
def __init__(self, api, password_list_id, matcher):
self.api = api
self.password_list_id = password_list_id
if 'id' in matcher and matcher['id'] != None:
self.password_id = matcher['id']
elif 'field' in matcher and 'field_id' in matcher \
and matcher['field'] != None and matcher['field_id'] != None:
self.match_field = matcher['field']
self.match_field_id = matcher['field_id']
else:
raise PasswordIdException()
@property
def password(self):
""" fetch the password from the api """
return self.api.get_password_fields(self)['Password']
@property
def type(self):
""" the method to uniquely identify the password """
if hasattr(self, 'password_id'):
return 'password_id'
elif hasattr(self, 'match_field') and hasattr(self, 'match_field_id'):
return 'match_field'
raise PasswordIdException()
def update(self, fields):
""" Update the password """
self.api.update(self, fields)
class PasswordState(object):
""" PasswordState """
def __init__(self, module, url, api_key):
self.module = module
self.url = url
self.api_key = api_key
def update(self, password, fields):
""" update the password in PasswordState """
if self._password_match(password, fields):
self.module.exit_json(changed=False)
return None
if password.type == 'password_id':
params = {
'PasswordID': password.password_id,
'PasswordListID': password.password_list_id
}
params = PasswordState._merge_dicts(fields, params)
self._raw_request('passwords', 'PUT', params)
elif password.type == 'match_field':
if self._has_password(password):
pid = self._get_password_id(password)
params = {
'PasswordID': pid,
'PasswordListID': password.password_list_id
}
params = PasswordState._merge_dicts(fields, params)
self._raw_request('passwords', 'PUT', params)
else:
if not 'Title' in fields:
self.module.fail_json(msg='Title is required when creating passwords')
return None
params = {
'PasswordListID': password.password_list_id,
password.match_field: password.match_field_id
}
params = PasswordState._merge_dicts(fields, params)
self._raw_request('passwords', 'POST', params)
self.module.exit_json(changed=True)
return None
def get_password_fields(self, password):
""" get the password fields """
if password.type == 'password_id':
return self._get_password_by_id(password.password_id)
elif password.type == 'match_field':
return self._get_password_by_field(password)
def _get_password_by_id(self, password_id):
""" get the password by the password id """
passwords = self._request('passwords/' + str(password_id), 'GET')
if len(passwords) == 0:
self.module.fail_json(msg='Password not found')
return None
if len(passwords) > 1:
self.module.fail_json(msg='Multiple matching passwords found')
return None
return passwords[0]
def _get_password_by_field(self, password):
""" get the password by a specific field """
return self._get_password_by_id(self._get_password_id(password))
def _get_password_id(self, password):
""" get the password id by using a specific field """
uri = 'passwords/' + password.password_list_id + '?QueryAll&ExcludePassword=true'
passwords = self._request(uri, 'GET')
passwords = PasswordState._filter_passwords(
passwords,
password.match_field,
password.match_field_id
)
if len(passwords) == 0:
self.module.fail_json(msg='Password not found')
return None
elif len(passwords) > 1:
self.module.fail_json(msg='Multiple matching passwords found')
return None
return passwords[0]['PasswordID']
def _has_password(self, password):
""" checks if the password exists """
if password.type == 'password_id':
uri = 'passwords/' + password.password_id
passwords = self._request(uri, 'GET')
if len(passwords) == 0:
return False
return True
elif password.type == 'match_field':
plid = password.password_list_id
uri = 'passwords/' + plid + '?QueryAll&ExcludePassword=true'
passwords = self._request(uri, 'GET')
passwords = PasswordState._filter_passwords(
passwords,
password.match_field,
password.match_field_id
)
if len(passwords) == 1:
return True
elif len(passwords) > 1:
self.module.fail_json(msg='Multiple matching passwords found')
return None
return False
def _password_match(self, password, fields):
""" checks if the password entity is up to date """
match = True
if self._has_password(password):
current_password = self.get_password_fields(password)
if 'password' in fields and current_password['Password'] != fields['password']:
match = False
if 'Title' in fields and current_password['Title'] != fields['Title']:
match = False
if 'UserName' in fields and current_password['UserName'] != fields['UserName']:
match = False
else:
match = False
return match
def _request(self, uri, method, params=None):
""" send a request to the api and return as json """
response = self._raw_request(uri, method, params)
if response == False:
return []
return json.loads(response)
def _raw_request(self, uri, method, params=None):
""" send a request to the api and return the raw response """
request = self._create_request(uri, method)
try:
if params:
response = urllib2.urlopen(request, urllib.urlencode(params)).read()
else:
response = urllib2.urlopen(request).read()
except urllib2.URLError as inst:
msg = str(inst)
if "No Passwords found in the Password Lists for PasswordListID of" in msg:
return False
else:
self.module.fail_json(msg="Failed: %s" % str(inst))
return None
return response
def _create_request(self, uri, method):
""" creates a request object """
request = urllib2.Request(self.url + '/api/' +uri)
request.add_header('APIKey', self.api_key)
request.get_method = lambda: method
return request
@staticmethod
def _filter_passwords(passwords, field, value):
""" filter out passwords which does not match the specific field value """
return [obj for i, obj in enumerate(passwords) if obj[field] == value]
@staticmethod
def _merge_dicts(xray, yankee):
""" merge two dicts """
zulu = xray.copy()
zulu.update(yankee)
return zulu
def main():
""" main """
module = AnsibleModule(
argument_spec=dict(
state=dict(default='present', choices=['present']),
url=dict(required=True),
api_key=dict(required=True),
password_list_id=dict(required=False),
match_field=dict(required=False),
match_field_id=dict(required=False),
password_id=dict(required=False),
username=dict(required=False),
password=dict(required=False),
title=dict(required=False)
),
supports_check_mode=False,
)
state = module.params['state']
url = module.params['url']
api_key = module.params['api_key']
password_list_id = module.params['password_list_id']
match_field = module.params['match_field']
match_field_id = module.params['match_field_id']
password_id = module.params['password_id']
username = module.params['username']
new_password = module.params['password']
title = module.params['title']
api = PasswordState(module, url, api_key)
password = Password(api, password_list_id,
{"id": password_id, "field": match_field, "field_id": match_field_id})
fields = {}
if title != None:
fields['Title'] = title
if username != None:
fields['UserName'] = username
if password != None:
fields['password'] = new_password
if state == "present":
password.update(fields)
if __name__ == '__main__':
main()
|
mit
|
nekohayo/snowy
|
lib/south/management/commands/startmigration.py
|
4
|
34660
|
"""
Startmigration command, version 2.
"""
import sys
import os
import re
import string
import random
import inspect
import parser
from optparse import make_option
from django.core.management.base import BaseCommand
from django.core.management.color import no_style
from django.db import models
from django.db.models.fields.related import RECURSIVE_RELATIONSHIP_CONSTANT
from django.contrib.contenttypes.generic import GenericRelation
from django.db.models.fields import FieldDoesNotExist
from django.conf import settings
try:
set
except NameError:
from sets import Set as set
from south import migration, modelsparser
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--model', action='append', dest='added_model_list', type='string',
help='Generate a Create Table migration for the specified model. Add multiple models to this migration with subsequent --model parameters.'),
make_option('--add-field', action='append', dest='added_field_list', type='string',
help='Generate an Add Column migration for the specified modelname.fieldname - you can use this multiple times to add more than one column.'),
make_option('--initial', action='store_true', dest='initial', default=False,
help='Generate the initial schema for the app.'),
make_option('--auto', action='store_true', dest='auto', default=False,
help='Attempt to automatically detect differences from the last migration.'),
make_option('--freeze', action='append', dest='freeze_list', type='string',
help='Freeze the specified model(s). Pass in either an app name (to freeze the whole app) or a single model, as appname.modelname.'),
)
help = "Creates a new template migration for the given app"
def handle(self, app=None, name="", added_model_list=None, added_field_list=None, initial=False, freeze_list=None, auto=False, **options):
# Any supposed lists that are None become empty lists
added_model_list = added_model_list or []
added_field_list = added_field_list or []
# Make sure options are compatable
if initial and (added_model_list or added_field_list or auto):
print "You cannot use --initial and other options together"
return
if auto and (added_model_list or added_field_list or initial):
print "You cannot use --auto and other options together"
return
# specify the default name 'initial' if a name wasn't specified and we're
# doing a migration for an entire app
if not name and initial:
name = 'initial'
# if not name, there's an error
if not name:
print "You must name this migration"
return
if not app:
print "Please provide an app in which to create the migration."
return
# Make sure the app is short form
app = app.split(".")[-1]
# See if the app exists
app_models_module = models.get_app(app)
if not app_models_module:
print "App '%s' doesn't seem to exist, isn't in INSTALLED_APPS, or has no models." % app
return
# If they've set SOUTH_AUTO_FREEZE_APP = True (or not set it - defaults to True)
if not hasattr(settings, 'SOUTH_AUTO_FREEZE_APP') or settings.SOUTH_AUTO_FREEZE_APP:
if freeze_list and app not in freeze_list:
freeze_list += [app]
else:
freeze_list = [app]
# Make the migrations directory if it's not there
app_module_path = app_models_module.__name__.split('.')[0:-1]
try:
app_module = __import__('.'.join(app_module_path), {}, {}, [''])
except ImportError:
print "Couldn't find path to App '%s'." % app
return
migrations_dir = os.path.join(
os.path.dirname(app_module.__file__),
"migrations",
)
# Make sure there's a migrations directory and __init__.py
if not os.path.isdir(migrations_dir):
print "Creating migrations directory at '%s'..." % migrations_dir
os.mkdir(migrations_dir)
init_path = os.path.join(migrations_dir, "__init__.py")
if not os.path.isfile(init_path):
# Touch the init py file
print "Creating __init__.py in '%s'..." % migrations_dir
open(init_path, "w").close()
# See what filename is next in line. We assume they use numbers.
migrations = migration.get_migration_names(migration.get_app(app))
highest_number = 0
for migration_name in migrations:
try:
number = int(migration_name.split("_")[0])
highest_number = max(highest_number, number)
except ValueError:
pass
# Make the new filename
new_filename = "%04i%s_%s.py" % (
highest_number + 1,
"".join([random.choice(string.letters.lower()) for i in range(0)]), # Possible random stuff insertion
name,
)
# Find the source file encoding, using PEP 0263's method
encoding = None
first_two_lines = inspect.getsourcelines(app_models_module)[0][:2]
for line in first_two_lines:
if re.search("coding[:=]\s*([-\w.]+)", line):
encoding = line
# Initialise forwards, backwards and models to blank things
forwards = ""
backwards = ""
frozen_models = {} # Frozen models, used by the Fake ORM
stub_models = {} # Frozen models, but only enough for relation ends (old mock models)
complete_apps = set() # Apps that are completely frozen - useable for diffing.
# Sets of actions
added_models = set()
deleted_models = [] # Special: contains instances _not_ string keys
added_fields = set()
deleted_fields = [] # Similar to deleted_models
changed_fields = [] # (mkey, fname, old_def, new_def)
added_uniques = set() # (mkey, field_names)
deleted_uniques = set() # (mkey, field_names)
# --initial means 'add all models in this app'.
if initial:
for model in models.get_models(app_models_module):
added_models.add("%s.%s" % (app, model._meta.object_name))
# Added models might be 'model' or 'app.model'.
for modelname in added_model_list:
if "." in modelname:
added_models.add(modelname)
else:
added_models.add("%s.%s" % (app, modelname))
# Fields need translating from "model.field" to (app.model, field)
for fielddef in added_field_list:
try:
modelname, fieldname = fielddef.split(".", 1)
except ValueError:
print "The field specification '%s' is not in modelname.fieldname format." % fielddef
else:
added_fields.add(("%s.%s" % (app, modelname), fieldname))
# Add anything frozen (I almost called the dict Iceland...)
if freeze_list:
for item in freeze_list:
if "." in item:
# It's a specific model
app_name, model_name = item.split(".", 1)
model = models.get_model(app_name, model_name)
if model is None:
print "Cannot find the model '%s' to freeze it." % item
return
frozen_models[model] = None
else:
# Get everything in an app!
frozen_models.update(dict([(x, None) for x in models.get_models(models.get_app(item))]))
complete_apps.add(item.split(".")[-1])
# For every model in the freeze list, add in dependency stubs
for model in frozen_models:
stub_models.update(model_dependencies(model))
### Automatic Detection ###
if auto:
# Get the last migration for this app
last_models = None
app_module = migration.get_app(app)
if app_module is None:
print "You cannot use automatic detection on the first migration of an app. Try --initial instead."
else:
migrations = list(migration.get_migration_classes(app_module))
if not migrations:
print "You cannot use automatic detection on the first migration of an app. Try --initial instead."
else:
if hasattr(migrations[-1], "complete_apps") and \
app in migrations[-1].complete_apps:
last_models = migrations[-1].models
last_orm = migrations[-1].orm
else:
print "You cannot use automatic detection, since the previous migration does not have this whole app frozen.\nEither make migrations using '--freeze %s' or set 'SOUTH_AUTO_FREEZE_APP = True' in your settings.py." % app
# Right, did we manage to get the last set of models?
if last_models is None:
return
# Good! Get new things.
new = dict([
(model_key(model), prep_for_freeze(model))
for model in models.get_models(app_models_module)
])
# And filter other apps out of the old
old = dict([
(key, fields)
for key, fields in last_models.items()
if key.split(".", 1)[0] == app
])
am, dm, cm, af, df, cf = models_diff(old, new)
# For models that were there before and after, do a meta diff
was_meta_change = False
for mkey in cm:
au, du = meta_diff(old[mkey].get("Meta", {}), new[mkey].get("Meta", {}))
for entry in au:
added_uniques.add((mkey, entry))
was_meta_change = True
for entry in du:
deleted_uniques.add((mkey, entry))
was_meta_change = True
if not (am or dm or af or df or cf or was_meta_change):
print "Nothing seems to have changed."
return
# Add items to the todo lists
added_models.update(am)
added_fields.update(af)
changed_fields.extend(cf)
# Deleted models are from the past, and so we use instances instead.
for mkey in dm:
model = last_orm[mkey]
fields = last_models[mkey]
if "Meta" in fields:
del fields['Meta']
deleted_models.append((model, fields, last_models))
# For deleted fields, we tag the instance on the end too
for mkey, fname in df:
deleted_fields.append((
mkey,
fname,
last_orm[mkey]._meta.get_field_by_name(fname)[0],
last_models[mkey][fname],
last_models,
))
### Added model ###
for mkey in added_models:
print " + Added model '%s'" % (mkey,)
model = model_unkey(mkey)
# Add the model's dependencies to the stubs
stub_models.update(model_dependencies(model))
# Get the field definitions
fields = modelsparser.get_model_fields(model)
# Turn the (class, args, kwargs) format into a string
fields = triples_to_defs(app, model, fields)
# Make the code
forwards += CREATE_TABLE_SNIPPET % (
model._meta.object_name,
model._meta.db_table,
"\n ".join(["('%s', %s)," % (fname, fdef) for fname, fdef in fields.items()]),
model._meta.app_label,
model._meta.object_name,
)
# And the backwards code
backwards += DELETE_TABLE_SNIPPET % (
model._meta.object_name,
model._meta.db_table
)
# Now add M2M fields to be done
for field in model._meta.local_many_to_many:
added_fields.add((mkey, field.attname))
# And unique_togethers to be added
for ut in model._meta.unique_together:
added_uniques.add((mkey, tuple(ut)))
### Added fields ###
for mkey, field_name in added_fields:
print " + Added field '%s.%s'" % (mkey, field_name)
# Get the model
model = model_unkey(mkey)
# Get the field
try:
field = model._meta.get_field(field_name)
except FieldDoesNotExist:
print "Model '%s' doesn't have a field '%s'" % (mkey, field_name)
return
# ManyToMany fields need special attention.
if isinstance(field, models.ManyToManyField):
if not field.rel.through: # Bug #120
# Add a stub model for each side
stub_models[model] = None
stub_models[field.rel.to] = None
# And a field defn, that's actually a table creation
forwards += CREATE_M2MFIELD_SNIPPET % (
model._meta.object_name,
field.name,
field.m2m_db_table(),
field.m2m_column_name()[:-3], # strip off the '_id' at the end
model._meta.object_name,
field.m2m_reverse_name()[:-3], # strip off the '_id' at the ned
field.rel.to._meta.object_name
)
backwards += DELETE_M2MFIELD_SNIPPET % (
model._meta.object_name,
field.name,
field.m2m_db_table()
)
continue
# GenericRelations need ignoring
if isinstance(field, GenericRelation):
continue
# Add any dependencies
stub_models.update(field_dependencies(field))
# Work out the definition
triple = remove_useless_attributes(
modelsparser.get_model_fields(model)[field_name])
field_definition = make_field_constructor(app, field, triple)
forwards += CREATE_FIELD_SNIPPET % (
model._meta.object_name,
field.name,
model._meta.db_table,
field.name,
field_definition,
)
backwards += DELETE_FIELD_SNIPPET % (
model._meta.object_name,
field.name,
model._meta.db_table,
field.column,
)
### Deleted fields ###
for mkey, field_name, field, triple, last_models in deleted_fields:
print " - Deleted field '%s.%s'" % (mkey, field_name)
# Get the model
model = model_unkey(mkey)
# ManyToMany fields need special attention.
if isinstance(field, models.ManyToManyField):
# Add a stub model for each side, if they're not already there
# (if we just added old versions, we might override new ones)
if model not in stub_models:
stub_models[model] = last_models
if field.rel.to not in last_models:
stub_models[field.rel.to] = last_models
# And a field defn, that's actually a table deletion
forwards += DELETE_M2MFIELD_SNIPPET % (
model._meta.object_name,
field.name,
field.m2m_db_table()
)
backwards += CREATE_M2MFIELD_SNIPPET % (
model._meta.object_name,
field.name,
field.m2m_db_table(),
field.m2m_column_name()[:-3], # strip off the '_id' at the end
model._meta.object_name,
field.m2m_reverse_name()[:-3], # strip off the '_id' at the ned
field.rel.to._meta.object_name
)
continue
# Add any dependencies
deps = field_dependencies(field, last_models)
deps.update(stub_models)
stub_models = deps
# Work out the definition
triple = remove_useless_attributes(triple)
field_definition = make_field_constructor(app, field, triple)
forwards += DELETE_FIELD_SNIPPET % (
model._meta.object_name,
field.name,
model._meta.db_table,
field.column,
)
backwards += CREATE_FIELD_SNIPPET % (
model._meta.object_name,
field.name,
model._meta.db_table,
field.name,
field_definition,
)
### Deleted model ###
for model, fields, last_models in deleted_models:
print " - Deleted model '%s.%s'" % (model._meta.app_label,model._meta.object_name)
# Add the model's dependencies to the stubs
deps = model_dependencies(model, last_models)
deps.update(stub_models)
stub_models = deps
# Turn the (class, args, kwargs) format into a string
fields = triples_to_defs(app, model, fields)
# Make the code
forwards += DELETE_TABLE_SNIPPET % (
model._meta.object_name,
model._meta.db_table
)
# And the backwards code
backwards += CREATE_TABLE_SNIPPET % (
model._meta.object_name,
model._meta.db_table,
"\n ".join(["('%s', %s)," % (fname, fdef) for fname, fdef in fields.items()]),
model._meta.app_label,
model._meta.object_name,
)
### Changed fields ###
for mkey, field_name, old_triple, new_triple in changed_fields:
model = model_unkey(mkey)
old_def = triples_to_defs(app, model, {
field_name: old_triple,
})[field_name]
new_def = triples_to_defs(app, model, {
field_name: new_triple,
})[field_name]
# We need to create the field, to see if it needs _id, or if it's an M2M
field = model._meta.get_field_by_name(field_name)[0]
if hasattr(field, "m2m_db_table"):
# See if anything has ACTUALLY changed
if old_triple[1] != new_triple[1]:
print " ! Detected change to the target model of M2M field '%s.%s'. South can't handle this; leaving this change out." % (mkey, field_name)
continue
print " ~ Changed field '%s.%s'." % (mkey, field_name)
forwards += CHANGE_FIELD_SNIPPET % (
model._meta.object_name,
field_name,
model._meta.db_table,
field.get_attname(),
new_def,
)
backwards += CHANGE_FIELD_SNIPPET % (
model._meta.object_name,
field_name,
model._meta.db_table,
field.get_attname(),
old_def,
)
### Added unique_togethers ###
for mkey, ut in added_uniques:
model = model_unkey(mkey)
print " + Added unique_together for [%s] on %s." % (", ".join(ut), model._meta.object_name)
cols = [get_field_column(model, f) for f in ut]
forwards += CREATE_UNIQUE_SNIPPET % (
", ".join(ut),
model._meta.object_name,
model._meta.db_table,
cols,
)
backwards += DELETE_UNIQUE_SNIPPET % (
", ".join(ut),
model._meta.object_name,
model._meta.db_table,
cols,
)
### Deleted unique_togethers ###
for mkey, ut in deleted_uniques:
model = model_unkey(mkey)
print " - Deleted unique_together for [%s] on %s." % (", ".join(ut), model._meta.object_name)
forwards += DELETE_UNIQUE_SNIPPET % (
", ".join(ut),
model._meta.object_name,
model._meta.db_table,
ut,
)
backwards += CREATE_UNIQUE_SNIPPET % (
", ".join(ut),
model._meta.object_name,
model._meta.db_table,
ut,
)
# Default values for forwards/backwards
if (not forwards) and (not backwards):
forwards = '"Write your forwards migration here"'
backwards = '"Write your backwards migration here"'
all_models = {}
# Fill out frozen model definitions
for model, last_models in frozen_models.items():
all_models[model_key(model)] = prep_for_freeze(model, last_models)
# Fill out stub model definitions
for model, last_models in stub_models.items():
key = model_key(model)
if key in all_models:
continue # We'd rather use full models than stubs.
all_models[key] = prep_for_stub(model, last_models)
# Do some model cleanup, and warnings
for modelname, model in all_models.items():
for fieldname, fielddef in model.items():
# Remove empty-after-cleaning Metas.
if fieldname == "Meta" and not fielddef:
del model['Meta']
# Warn about undefined fields
elif fielddef is None:
print "WARNING: Cannot get definition for '%s' on '%s'. Please edit the migration manually." % (
fieldname,
modelname,
)
model[fieldname] = FIELD_NEEDS_DEF_SNIPPET
# Write the migration file
fp = open(os.path.join(migrations_dir, new_filename), "w")
fp.write(MIGRATION_SNIPPET % (
encoding or "", '.'.join(app_module_path),
forwards,
backwards,
pprint_frozen_models(all_models),
complete_apps and "complete_apps = [%s]" % (", ".join(map(repr, complete_apps))) or ""
))
fp.close()
print "Created %s." % new_filename
### Cleaning functions for freezing
def prep_for_freeze(model, last_models=None):
if last_models:
fields = last_models[model_key(model)]
else:
fields = modelsparser.get_model_fields(model, m2m=True)
# Remove useless attributes (like 'choices')
for name, field in fields.items():
fields[name] = remove_useless_attributes(field)
# See if there's a Meta
if last_models:
meta = last_models[model_key(model)].get("Meta", {})
else:
meta = modelsparser.get_model_meta(model)
if meta:
fields['Meta'] = remove_useless_meta(meta)
return fields
def prep_for_stub(model, last_models=None):
if last_models:
fields = last_models[model_key(model)]
else:
fields = modelsparser.get_model_fields(model)
# Now, take only the PK (and a 'we're a stub' field) and freeze 'em
pk = model._meta.pk.name
fields = {
pk: remove_useless_attributes(fields[pk]),
"_stub": True,
}
# Meta is important too.
if last_models:
meta = last_models[model_key(model)].get("Meta", {})
else:
meta = modelsparser.get_model_meta(model)
if meta:
fields['Meta'] = remove_useless_meta(meta)
return fields
### Module handling functions
def model_key(model):
"For a given model, return 'appname.modelname'."
return ("%s.%s" % (model._meta.app_label, model._meta.object_name)).lower()
def model_unkey(key):
"For 'appname.modelname', return the model."
app, modelname = key.split(".", 1)
model = models.get_model(app, modelname)
if not model:
print "Couldn't find model '%s' in app '%s'" % (modelname, app)
sys.exit(1)
return model
### Dependency resolvers
def model_dependencies(model, last_models=None):
"""
Returns a set of models this one depends on to be defined; things like
OneToOneFields as ID, ForeignKeys everywhere, etc.
"""
depends = {}
for field in model._meta.fields + model._meta.many_to_many:
depends.update(field_dependencies(field, last_models))
return depends
def stub_model_dependencies(model, last_models=None):
"""
Returns a set of models this one depends on to be defined as a stub model
(i.e. deps of the PK).
"""
return field_dependencies(model._meta.pk, last_models)
def field_dependencies(field, last_models=None):
depends = {}
if isinstance(field, (models.OneToOneField, models.ForeignKey, models.ManyToManyField)):
depends[field.rel.to] = last_models
depends.update(stub_model_dependencies(field.rel.to, last_models))
return depends
### Prettyprinters
def pprint_frozen_models(models):
return "{\n %s\n }" % ",\n ".join([
"%r: %s" % (name, pprint_fields(fields))
for name, fields in models.items()
])
def pprint_fields(fields):
return "{\n %s\n }" % ",\n ".join([
"%r: %r" % (name, defn)
for name, defn in sorted(fields.items())
])
### Output sanitisers
USELESS_KEYWORDS = ["choices", "help_text"]
USELESS_DB_KEYWORDS = ["related_name", "upload_to"] # Important for ORM, not for DB.
def remove_useless_attributes(field, db=False):
"Removes useless (for database) attributes from the field's defn."
keywords = db and USELESS_DB_KEYWORDS or USELESS_KEYWORDS
if field:
for name in keywords:
if name in field[2]:
del field[2][name]
return field
USELESS_META = ["verbose_name", "verbose_name_plural"]
def remove_useless_meta(meta):
"Removes useless (for database) attributes from the table's meta."
if meta:
for name in USELESS_META:
if name in meta:
del meta[name]
return meta
### Turns (class, args, kwargs) triples into function defs.
def make_field_constructor(default_app, field, triple):
"""
Given the defualt app, the field class,
and the defn triple (or string), make the defition string.
"""
# It might be a defn string already...
if isinstance(triple, (str, unicode)):
return triple
# OK, do it the hard way
if hasattr(field, "rel") and hasattr(field.rel, "to") and field.rel.to:
rel_to = field.rel.to
else:
rel_to = None
args = [poss_ormise(default_app, rel_to, arg) for arg in triple[1]]
kwds = ["%s=%s" % (k, poss_ormise(default_app, rel_to, v)) for k,v in triple[2].items()]
return "%s(%s)" % (triple[0], ", ".join(args+kwds))
QUOTES = ['"""', "'''", '"', "'"]
def poss_ormise(default_app, rel_to, arg):
"""
Given the name of something that needs orm. stuck on the front and
a python eval-able string, possibly add orm. to it.
"""
orig_arg = arg
# If it's not a relative field, short-circuit out
if not rel_to:
return arg
# Get the name of the other model
rel_name = rel_to._meta.object_name
# Is it in a different app? If so, use proper addressing.
if rel_to._meta.app_label != default_app:
real_name = "orm['%s.%s']" % (rel_to._meta.app_label, rel_name)
else:
real_name = "orm.%s" % rel_name
# If it's surrounded by quotes, get rid of those
for quote_type in QUOTES:
l = len(quote_type)
if arg[:l] == quote_type and arg[-l:] == quote_type:
arg = arg[l:-l]
break
# Now see if we can replace it.
if arg.lower() == rel_name.lower():
return real_name
# Or perhaps it's app.model?
if arg.lower() == rel_to._meta.app_label.lower() + "." + rel_name.lower():
return real_name
# Or perhaps it's 'self'?
if arg == RECURSIVE_RELATIONSHIP_CONSTANT:
return real_name
return orig_arg
### Diffing functions between sets of models
def models_diff(old, new):
"""
Returns the difference between the old and new sets of models as a 5-tuple:
added_models, deleted_models, added_fields, deleted_fields, changed_fields
"""
added_models = set()
deleted_models = set()
ignored_models = set() # Stubs for backwards
continued_models = set() # Models that existed before and after
added_fields = set()
deleted_fields = set()
changed_fields = []
# See if anything's vanished
for key in old:
if key not in new:
if "_stub" not in old[key]:
deleted_models.add(key)
else:
ignored_models.add(key)
# Or appeared
for key in new:
if key not in old:
added_models.add(key)
# Now, for every model that's stayed the same, check its fields.
for key in old:
if key not in deleted_models and key not in ignored_models:
continued_models.add(key)
still_there = set()
# Find fields that have vanished.
for fieldname in old[key]:
if fieldname != "Meta" and fieldname not in new[key]:
deleted_fields.add((key, fieldname))
else:
still_there.add(fieldname)
# And ones that have appeared
for fieldname in new[key]:
if fieldname != "Meta" and fieldname not in old[key]:
added_fields.add((key, fieldname))
# For the ones that exist in both models, see if they were changed
for fieldname in still_there:
if fieldname != "Meta" and \
remove_useless_attributes(new[key][fieldname], True) != \
remove_useless_attributes(old[key][fieldname], True):
changed_fields.append((key, fieldname, old[key][fieldname], new[key][fieldname]))
return added_models, deleted_models, continued_models, added_fields, deleted_fields, changed_fields
def meta_diff(old, new):
"""
Diffs the two provided Meta definitions (dicts).
"""
# First, diff unique_together
old_unique_together = eval(old.get('unique_together', "[]"))
new_unique_together = eval(new.get('unique_together', "[]"))
added_uniques = set()
removed_uniques = set()
for entry in old_unique_together:
if entry not in new_unique_together:
removed_uniques.add(tuple(entry))
for entry in new_unique_together:
if entry not in old_unique_together:
added_uniques.add(tuple(entry))
return added_uniques, removed_uniques
### Used to work out what columns any fields affect ###
def get_field_column(model, field_name):
return model._meta.get_field_by_name(field_name)[0].column
### Creates SQL snippets for various common operations
def triples_to_defs(app, model, fields):
# Turn the (class, args, kwargs) format into a string
for field, triple in fields.items():
triple = remove_useless_attributes(triple)
if triple is None:
print "WARNING: Cannot get definition for '%s' on '%s'. Please edit the migration manually." % (
field,
model_key(model),
)
fields[field] = FIELD_NEEDS_DEF_SNIPPET
else:
fields[field] = make_field_constructor(
app,
model._meta.get_field_by_name(field)[0],
triple,
)
return fields
### Various code snippets we need to use
MIGRATION_SNIPPET = """%s
from south.db import db
from django.db import models
from %s.models import *
class Migration:
def forwards(self, orm):
%s
def backwards(self, orm):
%s
models = %s
%s
"""
CREATE_TABLE_SNIPPET = '''
# Adding model '%s'
db.create_table(%r, (
%s
))
db.send_create_signal(%r, [%r])
'''
DELETE_TABLE_SNIPPET = '''
# Deleting model '%s'
db.delete_table(%r)
'''
CREATE_FIELD_SNIPPET = '''
# Adding field '%s.%s'
db.add_column(%r, %r, %s)
'''
DELETE_FIELD_SNIPPET = '''
# Deleting field '%s.%s'
db.delete_column(%r, %r)
'''
CHANGE_FIELD_SNIPPET = '''
# Changing field '%s.%s'
db.alter_column(%r, %r, %s)
'''
CREATE_M2MFIELD_SNIPPET = '''
# Adding ManyToManyField '%s.%s'
db.create_table('%s', (
('id', models.AutoField(verbose_name='ID', primary_key=True, auto_created=True)),
('%s', models.ForeignKey(%s, null=False)),
('%s', models.ForeignKey(%s, null=False))
))
'''
DELETE_M2MFIELD_SNIPPET = '''
# Dropping ManyToManyField '%s.%s'
db.delete_table('%s')
'''
CREATE_UNIQUE_SNIPPET = '''
# Creating unique_together for [%s] on %s.
db.create_unique(%r, %r)
'''
DELETE_UNIQUE_SNIPPET = '''
# Deleting unique_together for [%s] on %s.
db.delete_unique(%r, %r)
'''
FIELD_NEEDS_DEF_SNIPPET = "<< PUT FIELD DEFINITION HERE >>"
|
agpl-3.0
|
beni55/edx-platform
|
lms/lib/xblock/test/test_mixin.py
|
37
|
14587
|
"""
Tests of the LMS XBlock Mixin
"""
import ddt
from xblock.validation import ValidationMessage
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.tests.factories import CourseFactory, ItemFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase, TEST_DATA_MIXED_TOY_MODULESTORE
from xmodule.partitions.partitions import Group, UserPartition
class LmsXBlockMixinTestCase(ModuleStoreTestCase):
"""
Base class for XBlock mixin tests cases. A simple course with a single user partition is created
in setUp for all subclasses to use.
"""
def build_course(self):
"""
Build up a course tree with a UserPartition.
"""
# pylint: disable=attribute-defined-outside-init
self.user_partition = UserPartition(
0,
'first_partition',
'First Partition',
[
Group(0, 'alpha'),
Group(1, 'beta')
]
)
self.group1 = self.user_partition.groups[0] # pylint: disable=no-member
self.group2 = self.user_partition.groups[1] # pylint: disable=no-member
self.course = CourseFactory.create(user_partitions=[self.user_partition])
section = ItemFactory.create(parent=self.course, category='chapter', display_name='Test Section')
subsection = ItemFactory.create(parent=section, category='sequential', display_name='Test Subsection')
vertical = ItemFactory.create(parent=subsection, category='vertical', display_name='Test Unit')
video = ItemFactory.create(parent=vertical, category='video', display_name='Test Video 1')
self.section_location = section.location
self.subsection_location = subsection.location
self.vertical_location = vertical.location
self.video_location = video.location
def set_group_access(self, block_location, access_dict):
"""
Sets the group_access dict on the block referenced by block_location.
"""
block = self.store.get_item(block_location)
block.group_access = access_dict
self.store.update_item(block, 1)
class XBlockValidationTest(LmsXBlockMixinTestCase):
"""
Unit tests for XBlock validation
"""
def setUp(self):
super(XBlockValidationTest, self).setUp()
self.build_course()
def verify_validation_message(self, message, expected_message, expected_message_type):
"""
Verify that the validation message has the expected validation message and type.
"""
self.assertEqual(message.text, expected_message)
self.assertEqual(message.type, expected_message_type)
def test_validate_full_group_access(self):
"""
Test the validation messages produced for an xblock with full group access.
"""
validation = self.store.get_item(self.video_location).validate()
self.assertEqual(len(validation.messages), 0)
def test_validate_restricted_group_access(self):
"""
Test the validation messages produced for an xblock with a valid group access restriction
"""
self.set_group_access(self.video_location, {self.user_partition.id: [self.group1.id, self.group2.id]})
validation = self.store.get_item(self.video_location).validate()
self.assertEqual(len(validation.messages), 0)
def test_validate_invalid_user_partitions(self):
"""
Test the validation messages produced for an xblock referring to non-existent user partitions.
"""
self.set_group_access(self.video_location, {999: [self.group1.id]})
validation = self.store.get_item(self.video_location).validate()
self.assertEqual(len(validation.messages), 1)
self.verify_validation_message(
validation.messages[0],
u"This component refers to deleted or invalid content group configurations.",
ValidationMessage.ERROR,
)
# Now add a second invalid user partition and validate again.
# Note that even though there are two invalid configurations,
# only a single error message will be returned.
self.set_group_access(self.video_location, {998: [self.group2.id]})
validation = self.store.get_item(self.video_location).validate()
self.assertEqual(len(validation.messages), 1)
self.verify_validation_message(
validation.messages[0],
u"This component refers to deleted or invalid content group configurations.",
ValidationMessage.ERROR,
)
def test_validate_invalid_groups(self):
"""
Test the validation messages produced for an xblock referring to non-existent groups.
"""
self.set_group_access(self.video_location, {self.user_partition.id: [self.group1.id, 999]})
validation = self.store.get_item(self.video_location).validate()
self.assertEqual(len(validation.messages), 1)
self.verify_validation_message(
validation.messages[0],
u"This component refers to deleted or invalid content groups.",
ValidationMessage.ERROR,
)
# Now try again with two invalid group ids
self.set_group_access(self.video_location, {self.user_partition.id: [self.group1.id, 998, 999]})
validation = self.store.get_item(self.video_location).validate()
self.assertEqual(len(validation.messages), 1)
self.verify_validation_message(
validation.messages[0],
u"This component refers to deleted or invalid content groups.",
ValidationMessage.ERROR,
)
class OpenAssessmentBlockMixinTestCase(ModuleStoreTestCase):
"""
Tests for OpenAssessmentBlock mixin.
"""
def setUp(self):
super(OpenAssessmentBlockMixinTestCase, self).setUp()
self.course = CourseFactory.create()
self.section = ItemFactory.create(parent=self.course, category='chapter', display_name='Test Section')
self.open_assessment = ItemFactory.create(
parent=self.section,
category="openassessment",
display_name="untitled",
)
def test_has_score(self):
"""
Test has_score is true for ora2 problems.
"""
self.assertTrue(self.open_assessment.has_score)
@ddt.ddt
class XBlockGetParentTest(LmsXBlockMixinTestCase):
"""
Test that XBlock.get_parent returns correct results with each modulestore
backend.
"""
MODULESTORE = TEST_DATA_MIXED_TOY_MODULESTORE
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split, ModuleStoreEnum.Type.xml)
def test_parents(self, modulestore_type):
with self.store.default_store(modulestore_type):
# setting up our own local course tree here, since it needs to be
# created with the correct modulestore type.
if modulestore_type == 'xml':
course_key = self.store.make_course_key('edX', 'toy', '2012_Fall')
else:
course_key = self.create_toy_course('edX', 'toy', '2012_Fall_copy')
course = self.store.get_course(course_key)
self.assertIsNone(course.get_parent())
def recurse(parent):
"""
Descend the course tree and ensure the result of get_parent()
is the expected one.
"""
visited = []
for child in parent.get_children():
self.assertEqual(parent.location, child.get_parent().location)
visited.append(child)
visited += recurse(child)
return visited
visited = recurse(course)
self.assertEqual(len(visited), 28)
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_parents_draft_content(self, modulestore_type):
# move the video to the new vertical
with self.store.default_store(modulestore_type):
self.build_course()
subsection = self.store.get_item(self.subsection_location)
new_vertical = ItemFactory.create(parent=subsection, category='vertical', display_name='New Test Unit')
child_to_move_location = self.video_location.for_branch(None)
new_parent_location = new_vertical.location.for_branch(None)
old_parent_location = self.vertical_location.for_branch(None)
with self.store.branch_setting(ModuleStoreEnum.Branch.draft_preferred):
self.assertIsNone(self.course.get_parent())
with self.store.bulk_operations(self.course.id):
user_id = ModuleStoreEnum.UserID.test
old_parent = self.store.get_item(old_parent_location)
old_parent.children.remove(child_to_move_location)
self.store.update_item(old_parent, user_id)
new_parent = self.store.get_item(new_parent_location)
new_parent.children.append(child_to_move_location)
self.store.update_item(new_parent, user_id)
# re-fetch video from draft store
video = self.store.get_item(child_to_move_location)
self.assertEqual(
new_parent_location,
video.get_parent().location
)
with self.store.branch_setting(ModuleStoreEnum.Branch.published_only):
# re-fetch video from published store
video = self.store.get_item(child_to_move_location)
self.assertEqual(
old_parent_location,
video.get_parent().location.for_branch(None)
)
class RenamedTuple(tuple): # pylint: disable=incomplete-protocol
"""
This class is only used to allow overriding __name__ on the tuples passed
through ddt, in order to have the generated test names make sense.
"""
pass
def ddt_named(parent, child):
"""
Helper to get more readable dynamically-generated test names from ddt.
"""
args = RenamedTuple([parent, child])
setattr(args, '__name__', 'parent_{}_child_{}'.format(parent, child))
return args
@ddt.ddt
class XBlockMergedGroupAccessTest(LmsXBlockMixinTestCase):
"""
Test that XBlock.merged_group_access is computed correctly according to
our access control rules.
"""
PARTITION_1 = 1
PARTITION_1_GROUP_1 = 11
PARTITION_1_GROUP_2 = 12
PARTITION_2 = 2
PARTITION_2_GROUP_1 = 21
PARTITION_2_GROUP_2 = 22
PARENT_CHILD_PAIRS = (
ddt_named('section_location', 'subsection_location'),
ddt_named('section_location', 'vertical_location'),
ddt_named('section_location', 'video_location'),
ddt_named('subsection_location', 'vertical_location'),
ddt_named('subsection_location', 'video_location'),
)
def setUp(self):
super(XBlockMergedGroupAccessTest, self).setUp()
self.build_course()
def verify_group_access(self, block_location, expected_dict):
"""
Verify the expected value for the block's group_access.
"""
block = self.store.get_item(block_location)
self.assertEqual(block.merged_group_access, expected_dict)
@ddt.data(*PARENT_CHILD_PAIRS)
@ddt.unpack
def test_intersecting_groups(self, parent, child):
"""
When merging group_access on a block, the resulting group IDs for each
partition is the intersection of the group IDs defined for that
partition across all ancestor blocks (including this one).
"""
parent_block = getattr(self, parent)
child_block = getattr(self, child)
self.set_group_access(parent_block, {self.PARTITION_1: [self.PARTITION_1_GROUP_1, self.PARTITION_1_GROUP_2]})
self.set_group_access(child_block, {self.PARTITION_1: [self.PARTITION_1_GROUP_2]})
self.verify_group_access(parent_block, {self.PARTITION_1: [self.PARTITION_1_GROUP_1, self.PARTITION_1_GROUP_2]})
self.verify_group_access(child_block, {self.PARTITION_1: [self.PARTITION_1_GROUP_2]})
@ddt.data(*PARENT_CHILD_PAIRS)
@ddt.unpack
def test_disjoint_groups(self, parent, child):
"""
When merging group_access on a block, if the intersection of group IDs
for a partition is empty, the merged value for that partition is False.
"""
parent_block = getattr(self, parent)
child_block = getattr(self, child)
self.set_group_access(parent_block, {self.PARTITION_1: [self.PARTITION_1_GROUP_1]})
self.set_group_access(child_block, {self.PARTITION_1: [self.PARTITION_1_GROUP_2]})
self.verify_group_access(parent_block, {self.PARTITION_1: [self.PARTITION_1_GROUP_1]})
self.verify_group_access(child_block, {self.PARTITION_1: False})
def test_disjoint_groups_no_override(self):
"""
Special case of the above test - ensures that `False` propagates down
to the block being queried even if blocks further down in the hierarchy
try to override it.
"""
self.set_group_access(self.section_location, {self.PARTITION_1: [self.PARTITION_1_GROUP_1]})
self.set_group_access(self.subsection_location, {self.PARTITION_1: [self.PARTITION_1_GROUP_2]})
self.set_group_access(
self.vertical_location, {self.PARTITION_1: [self.PARTITION_1_GROUP_1, self.PARTITION_1_GROUP_2]}
)
self.verify_group_access(self.vertical_location, {self.PARTITION_1: False})
self.verify_group_access(self.video_location, {self.PARTITION_1: False})
@ddt.data(*PARENT_CHILD_PAIRS)
@ddt.unpack
def test_union_partitions(self, parent, child):
"""
When merging group_access on a block, the result's keys (partitions)
are the union of all partitions specified across all ancestor blocks
(including this one).
"""
parent_block = getattr(self, parent)
child_block = getattr(self, child)
self.set_group_access(parent_block, {self.PARTITION_1: [self.PARTITION_1_GROUP_1]})
self.set_group_access(child_block, {self.PARTITION_2: [self.PARTITION_1_GROUP_2]})
self.verify_group_access(parent_block, {self.PARTITION_1: [self.PARTITION_1_GROUP_1]})
self.verify_group_access(
child_block, {self.PARTITION_1: [self.PARTITION_1_GROUP_1], self.PARTITION_2: [self.PARTITION_1_GROUP_2]}
)
|
agpl-3.0
|
Deester4x4jr/team-altitude
|
app/user/plugins/shoppingcart/vendor/guzzle/guzzle/docs/conf.py
|
469
|
3047
|
import sys, os
from sphinx.highlighting import lexers
from pygments.lexers.web import PhpLexer
lexers['php'] = PhpLexer(startinline=True, linenos=1)
lexers['php-annotations'] = PhpLexer(startinline=True, linenos=1)
primary_domain = 'php'
# -- General configuration -----------------------------------------------------
extensions = []
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
project = u'Guzzle'
copyright = u'2012, Michael Dowling'
version = '3.0.0'
release = '3.0.0'
exclude_patterns = ['_build']
# -- Options for HTML output ---------------------------------------------------
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
html_title = "Guzzle documentation"
html_short_title = "Guzzle"
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Custom sidebar templates, maps document names to template names.
html_sidebars = {
'**': ['localtoc.html', 'leftbar.html', 'searchbox.html']
}
# Output file base name for HTML help builder.
htmlhelp_basename = 'Guzzledoc'
# -- Guzzle Sphinx theme setup ------------------------------------------------
sys.path.insert(0, '/Users/dowling/projects/guzzle_sphinx_theme')
import guzzle_sphinx_theme
html_translator_class = 'guzzle_sphinx_theme.HTMLTranslator'
html_theme_path = guzzle_sphinx_theme.html_theme_path()
html_theme = 'guzzle_sphinx_theme'
# Guzzle theme options (see theme.conf for more information)
html_theme_options = {
"index_template": "index.html",
"project_nav_name": "Guzzle",
"github_user": "guzzle",
"github_repo": "guzzle",
"disqus_comments_shortname": "guzzle",
"google_analytics_account": "UA-22752917-1"
}
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'Guzzle.tex', u'Guzzle Documentation',
u'Michael Dowling', 'manual'),
]
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'guzzle', u'Guzzle Documentation',
[u'Michael Dowling'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'Guzzle', u'Guzzle Documentation',
u'Michael Dowling', 'Guzzle', 'One line description of project.',
'Miscellaneous'),
]
|
gpl-3.0
|
klmitch/nova
|
nova/objects/instance_action.py
|
3
|
12223
|
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_utils import timeutils
from oslo_utils import versionutils
from nova.compute import utils as compute_utils
from nova.db import api as db
from nova import exception
from nova import objects
from nova.objects import base
from nova.objects import fields
# TODO(berrange): Remove NovaObjectDictCompat
@base.NovaObjectRegistry.register
class InstanceAction(base.NovaPersistentObject, base.NovaObject,
base.NovaObjectDictCompat):
# Version 1.0: Initial version
# Version 1.1: String attributes updated to support unicode
# Version 1.2: Add create() method.
VERSION = '1.2'
fields = {
'id': fields.IntegerField(),
'action': fields.StringField(nullable=True),
'instance_uuid': fields.UUIDField(nullable=True),
'request_id': fields.StringField(nullable=True),
'user_id': fields.StringField(nullable=True),
'project_id': fields.StringField(nullable=True),
'start_time': fields.DateTimeField(nullable=True),
'finish_time': fields.DateTimeField(nullable=True),
'message': fields.StringField(nullable=True),
}
@staticmethod
def _from_db_object(context, action, db_action):
for field in action.fields:
action[field] = db_action[field]
action._context = context
action.obj_reset_changes()
return action
@staticmethod
def pack_action_start(context, instance_uuid, action_name):
values = {'request_id': context.request_id,
'instance_uuid': instance_uuid,
'user_id': context.user_id,
'project_id': context.project_id,
'action': action_name,
'start_time': context.timestamp,
'updated_at': context.timestamp}
return values
@staticmethod
def pack_action_finish(context, instance_uuid):
utcnow = timeutils.utcnow()
values = {'request_id': context.request_id,
'instance_uuid': instance_uuid,
'finish_time': utcnow,
'updated_at': utcnow}
return values
@base.remotable_classmethod
def get_by_request_id(cls, context, instance_uuid, request_id):
db_action = db.action_get_by_request_id(context, instance_uuid,
request_id)
if db_action:
return cls._from_db_object(context, cls(), db_action)
@base.remotable_classmethod
def action_start(cls, context, instance_uuid, action_name,
want_result=True):
values = cls.pack_action_start(context, instance_uuid, action_name)
db_action = db.action_start(context, values)
if want_result:
return cls._from_db_object(context, cls(), db_action)
@base.remotable_classmethod
def action_finish(cls, context, instance_uuid, want_result=True):
values = cls.pack_action_finish(context, instance_uuid)
db_action = db.action_finish(context, values)
if want_result:
return cls._from_db_object(context, cls(), db_action)
@base.remotable
def finish(self):
values = self.pack_action_finish(self._context, self.instance_uuid)
db_action = db.action_finish(self._context, values)
self._from_db_object(self._context, self, db_action)
# NOTE(mriedem): In most cases, the action_start() method should be used
# to create new InstanceAction records. This method should only be used
# in specific exceptional cases like when cloning actions from one cell
# database to another.
@base.remotable
def create(self):
if 'id' in self:
raise exception.ObjectActionError(action='create',
reason='already created')
updates = self.obj_get_changes()
db_action = db.action_start(self._context, updates)
self._from_db_object(self._context, self, db_action)
@base.NovaObjectRegistry.register
class InstanceActionList(base.ObjectListBase, base.NovaObject):
# Version 1.0: Initial version
# Version 1.1: get_by_instance_uuid added pagination and filters support
VERSION = '1.1'
fields = {
'objects': fields.ListOfObjectsField('InstanceAction'),
}
@base.remotable_classmethod
def get_by_instance_uuid(cls, context, instance_uuid, limit=None,
marker=None, filters=None):
db_actions = db.actions_get(
context, instance_uuid, limit, marker, filters)
return base.obj_make_list(context, cls(), InstanceAction, db_actions)
# TODO(berrange): Remove NovaObjectDictCompat
@base.NovaObjectRegistry.register
class InstanceActionEvent(base.NovaPersistentObject, base.NovaObject,
base.NovaObjectDictCompat):
# Version 1.0: Initial version
# Version 1.1: event_finish_with_failure decorated with serialize_args
# Version 1.2: Add 'host' field
# Version 1.3: Add create() method.
# Version 1.4: Added 'details' field.
VERSION = '1.4'
fields = {
'id': fields.IntegerField(),
'event': fields.StringField(nullable=True),
'action_id': fields.IntegerField(nullable=True),
'start_time': fields.DateTimeField(nullable=True),
'finish_time': fields.DateTimeField(nullable=True),
'result': fields.StringField(nullable=True),
'traceback': fields.StringField(nullable=True),
'host': fields.StringField(nullable=True),
'details': fields.StringField(nullable=True)
}
def obj_make_compatible(self, primitive, target_version):
target_version = versionutils.convert_version_to_tuple(target_version)
if target_version < (1, 4) and 'details' in primitive:
del primitive['details']
if target_version < (1, 2) and 'host' in primitive:
del primitive['host']
@staticmethod
def _from_db_object(context, event, db_event):
for field in event.fields:
event[field] = db_event[field]
event._context = context
event.obj_reset_changes()
return event
@staticmethod
def pack_action_event_start(context, instance_uuid, event_name,
host=None):
values = {'event': event_name,
'instance_uuid': instance_uuid,
'request_id': context.request_id,
'start_time': timeutils.utcnow(),
'host': host}
return values
@staticmethod
def pack_action_event_finish(context, instance_uuid, event_name,
exc_val=None, exc_tb=None):
values = {'event': event_name,
'instance_uuid': instance_uuid,
'request_id': context.request_id,
'finish_time': timeutils.utcnow()}
if exc_tb is None:
values['result'] = 'Success'
else:
values['result'] = 'Error'
# Store the details using the same logic as storing an instance
# fault message.
if exc_val:
# If we got a string for exc_val it's probably because of
# the serialize_args decorator on event_finish_with_failure
# so pass that as the message to exception_to_dict otherwise
# the details will just the exception class name since it
# cannot format the message as a NovaException.
message = exc_val if isinstance(exc_val, str) else None
values['details'] = compute_utils.exception_to_dict(
exc_val, message=message)['message']
values['traceback'] = exc_tb
return values
@base.remotable_classmethod
def get_by_id(cls, context, action_id, event_id):
db_event = db.action_event_get_by_id(context, action_id, event_id)
return cls._from_db_object(context, cls(), db_event)
@base.remotable_classmethod
def event_start(cls, context, instance_uuid, event_name, want_result=True,
host=None):
values = cls.pack_action_event_start(context, instance_uuid,
event_name, host=host)
db_event = db.action_event_start(context, values)
if want_result:
return cls._from_db_object(context, cls(), db_event)
@base.serialize_args
@base.remotable_classmethod
def event_finish_with_failure(cls, context, instance_uuid, event_name,
exc_val=None, exc_tb=None, want_result=None):
values = cls.pack_action_event_finish(context, instance_uuid,
event_name, exc_val=exc_val,
exc_tb=exc_tb)
db_event = db.action_event_finish(context, values)
if want_result:
return cls._from_db_object(context, cls(), db_event)
@base.remotable_classmethod
def event_finish(cls, context, instance_uuid, event_name,
want_result=True):
return cls.event_finish_with_failure(context, instance_uuid,
event_name, exc_val=None,
exc_tb=None,
want_result=want_result)
@base.remotable
def finish_with_failure(self, exc_val, exc_tb):
values = self.pack_action_event_finish(self._context,
self.instance_uuid,
self.event, exc_val=exc_val,
exc_tb=exc_tb)
db_event = db.action_event_finish(self._context, values)
self._from_db_object(self._context, self, db_event)
@base.remotable
def finish(self):
self.finish_with_failure(self._context, exc_val=None, exc_tb=None)
# NOTE(mriedem): In most cases, the event_start() method should be used
# to create new InstanceActionEvent records. This method should only be
# used in specific exceptional cases like when cloning events from one cell
# database to another.
@base.remotable
def create(self, instance_uuid, request_id):
if 'id' in self:
raise exception.ObjectActionError(action='create',
reason='already created')
updates = self.obj_get_changes()
# The instance_uuid and request_id uniquely identify the "parent"
# InstanceAction for this event and are used in action_event_start().
# TODO(mriedem): This could be optimized if we just didn't use
# db.action_event_start and inserted the record ourselves and passed
# in the action_id.
updates['instance_uuid'] = instance_uuid
updates['request_id'] = request_id
db_event = db.action_event_start(self._context, updates)
self._from_db_object(self._context, self, db_event)
@base.NovaObjectRegistry.register
class InstanceActionEventList(base.ObjectListBase, base.NovaObject):
# Version 1.0: Initial version
# Version 1.1: InstanceActionEvent <= 1.1
VERSION = '1.1'
fields = {
'objects': fields.ListOfObjectsField('InstanceActionEvent'),
}
@base.remotable_classmethod
def get_by_action(cls, context, action_id):
db_events = db.action_events_get(context, action_id)
return base.obj_make_list(context, cls(context),
objects.InstanceActionEvent, db_events)
|
apache-2.0
|
wli/django-allauth
|
allauth/socialaccount/providers/oauth2/views.py
|
1
|
5403
|
from __future__ import absolute_import
from datetime import timedelta
from django.core.exceptions import PermissionDenied
from django.http import HttpResponseRedirect
from django.utils import timezone
from allauth.compat import reverse
from allauth.exceptions import ImmediateHttpResponse
from allauth.utils import build_absolute_uri
from allauth.socialaccount.helpers import render_authentication_error
from allauth.socialaccount import providers
from allauth.socialaccount.providers.oauth2.client import (OAuth2Client,
OAuth2Error)
from allauth.socialaccount.helpers import complete_social_login
from allauth.socialaccount.models import SocialToken, SocialLogin
from allauth.utils import get_request_param
from ..base import AuthAction, AuthError
class OAuth2Adapter(object):
expires_in_key = 'expires_in'
supports_state = True
redirect_uri_protocol = None
access_token_method = 'POST'
login_cancelled_error = 'access_denied'
scope_delimiter = ' '
basic_auth = False
headers = None
def __init__(self, request):
self.request = request
def get_provider(self):
return providers.registry.by_id(self.provider_id, self.request)
def complete_login(self, request, app, access_token, **kwargs):
"""
Returns a SocialLogin instance
"""
raise NotImplementedError
def parse_token(self, data):
token = SocialToken(token=data['access_token'])
token.token_secret = data.get('refresh_token', '')
expires_in = data.get(self.expires_in_key, None)
if expires_in:
token.expires_at = timezone.now() + timedelta(
seconds=int(expires_in))
return token
class OAuth2View(object):
@classmethod
def adapter_view(cls, adapter):
def view(request, *args, **kwargs):
self = cls()
self.request = request
self.adapter = adapter(request)
try:
return self.dispatch(request, *args, **kwargs)
except ImmediateHttpResponse as e:
return e.response
return view
def get_client(self, request, app):
callback_url = reverse(self.adapter.provider_id + "_callback")
callback_url = build_absolute_uri(
request, callback_url,
protocol=self.adapter.redirect_uri_protocol)
provider = self.adapter.get_provider()
scope = provider.get_scope(request)
client = OAuth2Client(self.request, app.client_id, app.secret,
self.adapter.access_token_method,
self.adapter.access_token_url,
callback_url,
scope,
scope_delimiter=self.adapter.scope_delimiter,
headers=self.adapter.headers,
basic_auth=self.adapter.basic_auth)
return client
class OAuth2LoginView(OAuth2View):
def dispatch(self, request):
provider = self.adapter.get_provider()
app = provider.get_app(self.request)
client = self.get_client(request, app)
action = request.GET.get('action', AuthAction.AUTHENTICATE)
auth_url = self.adapter.authorize_url
auth_params = provider.get_auth_params(request, action)
client.state = SocialLogin.stash_state(request)
try:
return HttpResponseRedirect(client.get_redirect_url(
auth_url, auth_params))
except OAuth2Error as e:
return render_authentication_error(
request,
provider.id,
exception=e)
class OAuth2CallbackView(OAuth2View):
def dispatch(self, request):
if 'error' in request.GET or 'code' not in request.GET:
# Distinguish cancel from error
auth_error = request.GET.get('error', None)
if auth_error == self.adapter.login_cancelled_error:
error = AuthError.CANCELLED
else:
error = AuthError.UNKNOWN
return render_authentication_error(
request,
self.adapter.provider_id,
error=error)
app = self.adapter.get_provider().get_app(self.request)
client = self.get_client(request, app)
try:
access_token = client.get_access_token(request.GET['code'])
token = self.adapter.parse_token(access_token)
token.app = app
login = self.adapter.complete_login(request,
app,
token,
response=access_token)
login.token = token
if self.adapter.supports_state:
login.state = SocialLogin \
.verify_and_unstash_state(
request,
get_request_param(request, 'state'))
else:
login.state = SocialLogin.unstash_state(request)
return complete_social_login(request, login)
except (PermissionDenied, OAuth2Error) as e:
return render_authentication_error(
request,
self.adapter.provider_id,
exception=e)
|
mit
|
Work4Labs/lettuce
|
tests/integration/lib/Django-1.3/django/contrib/redirects/middleware.py
|
447
|
1105
|
from django.contrib.redirects.models import Redirect
from django import http
from django.conf import settings
class RedirectFallbackMiddleware(object):
def process_response(self, request, response):
if response.status_code != 404:
return response # No need to check for a redirect for non-404 responses.
path = request.get_full_path()
try:
r = Redirect.objects.get(site__id__exact=settings.SITE_ID, old_path=path)
except Redirect.DoesNotExist:
r = None
if r is None and settings.APPEND_SLASH:
# Try removing the trailing slash.
try:
r = Redirect.objects.get(site__id__exact=settings.SITE_ID,
old_path=path[:path.rfind('/')]+path[path.rfind('/')+1:])
except Redirect.DoesNotExist:
pass
if r is not None:
if r.new_path == '':
return http.HttpResponseGone()
return http.HttpResponsePermanentRedirect(r.new_path)
# No redirect was found. Return the response.
return response
|
gpl-3.0
|
MTG/acousticbrainz-client
|
abz/vendor/requests/packages/chardet/jisfreq.py
|
3131
|
47315
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# Sampling from about 20M text materials include literature and computer technology
#
# Japanese frequency table, applied to both S-JIS and EUC-JP
# They are sorted in order.
# 128 --> 0.77094
# 256 --> 0.85710
# 512 --> 0.92635
# 1024 --> 0.97130
# 2048 --> 0.99431
#
# Ideal Distribution Ratio = 0.92635 / (1-0.92635) = 12.58
# Random Distribution Ration = 512 / (2965+62+83+86-512) = 0.191
#
# Typical Distribution Ratio, 25% of IDR
JIS_TYPICAL_DISTRIBUTION_RATIO = 3.0
# Char to FreqOrder table ,
JIS_TABLE_SIZE = 4368
JISCharToFreqOrder = (
40, 1, 6, 182, 152, 180, 295,2127, 285, 381,3295,4304,3068,4606,3165,3510, # 16
3511,1822,2785,4607,1193,2226,5070,4608, 171,2996,1247, 18, 179,5071, 856,1661, # 32
1262,5072, 619, 127,3431,3512,3230,1899,1700, 232, 228,1294,1298, 284, 283,2041, # 48
2042,1061,1062, 48, 49, 44, 45, 433, 434,1040,1041, 996, 787,2997,1255,4305, # 64
2108,4609,1684,1648,5073,5074,5075,5076,5077,5078,3687,5079,4610,5080,3927,3928, # 80
5081,3296,3432, 290,2285,1471,2187,5082,2580,2825,1303,2140,1739,1445,2691,3375, # 96
1691,3297,4306,4307,4611, 452,3376,1182,2713,3688,3069,4308,5083,5084,5085,5086, # 112
5087,5088,5089,5090,5091,5092,5093,5094,5095,5096,5097,5098,5099,5100,5101,5102, # 128
5103,5104,5105,5106,5107,5108,5109,5110,5111,5112,4097,5113,5114,5115,5116,5117, # 144
5118,5119,5120,5121,5122,5123,5124,5125,5126,5127,5128,5129,5130,5131,5132,5133, # 160
5134,5135,5136,5137,5138,5139,5140,5141,5142,5143,5144,5145,5146,5147,5148,5149, # 176
5150,5151,5152,4612,5153,5154,5155,5156,5157,5158,5159,5160,5161,5162,5163,5164, # 192
5165,5166,5167,5168,5169,5170,5171,5172,5173,5174,5175,1472, 598, 618, 820,1205, # 208
1309,1412,1858,1307,1692,5176,5177,5178,5179,5180,5181,5182,1142,1452,1234,1172, # 224
1875,2043,2149,1793,1382,2973, 925,2404,1067,1241, 960,1377,2935,1491, 919,1217, # 240
1865,2030,1406,1499,2749,4098,5183,5184,5185,5186,5187,5188,2561,4099,3117,1804, # 256
2049,3689,4309,3513,1663,5189,3166,3118,3298,1587,1561,3433,5190,3119,1625,2998, # 272
3299,4613,1766,3690,2786,4614,5191,5192,5193,5194,2161, 26,3377, 2,3929, 20, # 288
3691, 47,4100, 50, 17, 16, 35, 268, 27, 243, 42, 155, 24, 154, 29, 184, # 304
4, 91, 14, 92, 53, 396, 33, 289, 9, 37, 64, 620, 21, 39, 321, 5, # 320
12, 11, 52, 13, 3, 208, 138, 0, 7, 60, 526, 141, 151,1069, 181, 275, # 336
1591, 83, 132,1475, 126, 331, 829, 15, 69, 160, 59, 22, 157, 55,1079, 312, # 352
109, 38, 23, 25, 10, 19, 79,5195, 61, 382,1124, 8, 30,5196,5197,5198, # 368
5199,5200,5201,5202,5203,5204,5205,5206, 89, 62, 74, 34,2416, 112, 139, 196, # 384
271, 149, 84, 607, 131, 765, 46, 88, 153, 683, 76, 874, 101, 258, 57, 80, # 400
32, 364, 121,1508, 169,1547, 68, 235, 145,2999, 41, 360,3027, 70, 63, 31, # 416
43, 259, 262,1383, 99, 533, 194, 66, 93, 846, 217, 192, 56, 106, 58, 565, # 432
280, 272, 311, 256, 146, 82, 308, 71, 100, 128, 214, 655, 110, 261, 104,1140, # 448
54, 51, 36, 87, 67,3070, 185,2618,2936,2020, 28,1066,2390,2059,5207,5208, # 464
5209,5210,5211,5212,5213,5214,5215,5216,4615,5217,5218,5219,5220,5221,5222,5223, # 480
5224,5225,5226,5227,5228,5229,5230,5231,5232,5233,5234,5235,5236,3514,5237,5238, # 496
5239,5240,5241,5242,5243,5244,2297,2031,4616,4310,3692,5245,3071,5246,3598,5247, # 512
4617,3231,3515,5248,4101,4311,4618,3808,4312,4102,5249,4103,4104,3599,5250,5251, # 528
5252,5253,5254,5255,5256,5257,5258,5259,5260,5261,5262,5263,5264,5265,5266,5267, # 544
5268,5269,5270,5271,5272,5273,5274,5275,5276,5277,5278,5279,5280,5281,5282,5283, # 560
5284,5285,5286,5287,5288,5289,5290,5291,5292,5293,5294,5295,5296,5297,5298,5299, # 576
5300,5301,5302,5303,5304,5305,5306,5307,5308,5309,5310,5311,5312,5313,5314,5315, # 592
5316,5317,5318,5319,5320,5321,5322,5323,5324,5325,5326,5327,5328,5329,5330,5331, # 608
5332,5333,5334,5335,5336,5337,5338,5339,5340,5341,5342,5343,5344,5345,5346,5347, # 624
5348,5349,5350,5351,5352,5353,5354,5355,5356,5357,5358,5359,5360,5361,5362,5363, # 640
5364,5365,5366,5367,5368,5369,5370,5371,5372,5373,5374,5375,5376,5377,5378,5379, # 656
5380,5381, 363, 642,2787,2878,2788,2789,2316,3232,2317,3434,2011, 165,1942,3930, # 672
3931,3932,3933,5382,4619,5383,4620,5384,5385,5386,5387,5388,5389,5390,5391,5392, # 688
5393,5394,5395,5396,5397,5398,5399,5400,5401,5402,5403,5404,5405,5406,5407,5408, # 704
5409,5410,5411,5412,5413,5414,5415,5416,5417,5418,5419,5420,5421,5422,5423,5424, # 720
5425,5426,5427,5428,5429,5430,5431,5432,5433,5434,5435,5436,5437,5438,5439,5440, # 736
5441,5442,5443,5444,5445,5446,5447,5448,5449,5450,5451,5452,5453,5454,5455,5456, # 752
5457,5458,5459,5460,5461,5462,5463,5464,5465,5466,5467,5468,5469,5470,5471,5472, # 768
5473,5474,5475,5476,5477,5478,5479,5480,5481,5482,5483,5484,5485,5486,5487,5488, # 784
5489,5490,5491,5492,5493,5494,5495,5496,5497,5498,5499,5500,5501,5502,5503,5504, # 800
5505,5506,5507,5508,5509,5510,5511,5512,5513,5514,5515,5516,5517,5518,5519,5520, # 816
5521,5522,5523,5524,5525,5526,5527,5528,5529,5530,5531,5532,5533,5534,5535,5536, # 832
5537,5538,5539,5540,5541,5542,5543,5544,5545,5546,5547,5548,5549,5550,5551,5552, # 848
5553,5554,5555,5556,5557,5558,5559,5560,5561,5562,5563,5564,5565,5566,5567,5568, # 864
5569,5570,5571,5572,5573,5574,5575,5576,5577,5578,5579,5580,5581,5582,5583,5584, # 880
5585,5586,5587,5588,5589,5590,5591,5592,5593,5594,5595,5596,5597,5598,5599,5600, # 896
5601,5602,5603,5604,5605,5606,5607,5608,5609,5610,5611,5612,5613,5614,5615,5616, # 912
5617,5618,5619,5620,5621,5622,5623,5624,5625,5626,5627,5628,5629,5630,5631,5632, # 928
5633,5634,5635,5636,5637,5638,5639,5640,5641,5642,5643,5644,5645,5646,5647,5648, # 944
5649,5650,5651,5652,5653,5654,5655,5656,5657,5658,5659,5660,5661,5662,5663,5664, # 960
5665,5666,5667,5668,5669,5670,5671,5672,5673,5674,5675,5676,5677,5678,5679,5680, # 976
5681,5682,5683,5684,5685,5686,5687,5688,5689,5690,5691,5692,5693,5694,5695,5696, # 992
5697,5698,5699,5700,5701,5702,5703,5704,5705,5706,5707,5708,5709,5710,5711,5712, # 1008
5713,5714,5715,5716,5717,5718,5719,5720,5721,5722,5723,5724,5725,5726,5727,5728, # 1024
5729,5730,5731,5732,5733,5734,5735,5736,5737,5738,5739,5740,5741,5742,5743,5744, # 1040
5745,5746,5747,5748,5749,5750,5751,5752,5753,5754,5755,5756,5757,5758,5759,5760, # 1056
5761,5762,5763,5764,5765,5766,5767,5768,5769,5770,5771,5772,5773,5774,5775,5776, # 1072
5777,5778,5779,5780,5781,5782,5783,5784,5785,5786,5787,5788,5789,5790,5791,5792, # 1088
5793,5794,5795,5796,5797,5798,5799,5800,5801,5802,5803,5804,5805,5806,5807,5808, # 1104
5809,5810,5811,5812,5813,5814,5815,5816,5817,5818,5819,5820,5821,5822,5823,5824, # 1120
5825,5826,5827,5828,5829,5830,5831,5832,5833,5834,5835,5836,5837,5838,5839,5840, # 1136
5841,5842,5843,5844,5845,5846,5847,5848,5849,5850,5851,5852,5853,5854,5855,5856, # 1152
5857,5858,5859,5860,5861,5862,5863,5864,5865,5866,5867,5868,5869,5870,5871,5872, # 1168
5873,5874,5875,5876,5877,5878,5879,5880,5881,5882,5883,5884,5885,5886,5887,5888, # 1184
5889,5890,5891,5892,5893,5894,5895,5896,5897,5898,5899,5900,5901,5902,5903,5904, # 1200
5905,5906,5907,5908,5909,5910,5911,5912,5913,5914,5915,5916,5917,5918,5919,5920, # 1216
5921,5922,5923,5924,5925,5926,5927,5928,5929,5930,5931,5932,5933,5934,5935,5936, # 1232
5937,5938,5939,5940,5941,5942,5943,5944,5945,5946,5947,5948,5949,5950,5951,5952, # 1248
5953,5954,5955,5956,5957,5958,5959,5960,5961,5962,5963,5964,5965,5966,5967,5968, # 1264
5969,5970,5971,5972,5973,5974,5975,5976,5977,5978,5979,5980,5981,5982,5983,5984, # 1280
5985,5986,5987,5988,5989,5990,5991,5992,5993,5994,5995,5996,5997,5998,5999,6000, # 1296
6001,6002,6003,6004,6005,6006,6007,6008,6009,6010,6011,6012,6013,6014,6015,6016, # 1312
6017,6018,6019,6020,6021,6022,6023,6024,6025,6026,6027,6028,6029,6030,6031,6032, # 1328
6033,6034,6035,6036,6037,6038,6039,6040,6041,6042,6043,6044,6045,6046,6047,6048, # 1344
6049,6050,6051,6052,6053,6054,6055,6056,6057,6058,6059,6060,6061,6062,6063,6064, # 1360
6065,6066,6067,6068,6069,6070,6071,6072,6073,6074,6075,6076,6077,6078,6079,6080, # 1376
6081,6082,6083,6084,6085,6086,6087,6088,6089,6090,6091,6092,6093,6094,6095,6096, # 1392
6097,6098,6099,6100,6101,6102,6103,6104,6105,6106,6107,6108,6109,6110,6111,6112, # 1408
6113,6114,2044,2060,4621, 997,1235, 473,1186,4622, 920,3378,6115,6116, 379,1108, # 1424
4313,2657,2735,3934,6117,3809, 636,3233, 573,1026,3693,3435,2974,3300,2298,4105, # 1440
854,2937,2463, 393,2581,2417, 539, 752,1280,2750,2480, 140,1161, 440, 708,1569, # 1456
665,2497,1746,1291,1523,3000, 164,1603, 847,1331, 537,1997, 486, 508,1693,2418, # 1472
1970,2227, 878,1220, 299,1030, 969, 652,2751, 624,1137,3301,2619, 65,3302,2045, # 1488
1761,1859,3120,1930,3694,3516, 663,1767, 852, 835,3695, 269, 767,2826,2339,1305, # 1504
896,1150, 770,1616,6118, 506,1502,2075,1012,2519, 775,2520,2975,2340,2938,4314, # 1520
3028,2086,1224,1943,2286,6119,3072,4315,2240,1273,1987,3935,1557, 175, 597, 985, # 1536
3517,2419,2521,1416,3029, 585, 938,1931,1007,1052,1932,1685,6120,3379,4316,4623, # 1552
804, 599,3121,1333,2128,2539,1159,1554,2032,3810, 687,2033,2904, 952, 675,1467, # 1568
3436,6121,2241,1096,1786,2440,1543,1924, 980,1813,2228, 781,2692,1879, 728,1918, # 1584
3696,4624, 548,1950,4625,1809,1088,1356,3303,2522,1944, 502, 972, 373, 513,2827, # 1600
586,2377,2391,1003,1976,1631,6122,2464,1084, 648,1776,4626,2141, 324, 962,2012, # 1616
2177,2076,1384, 742,2178,1448,1173,1810, 222, 102, 301, 445, 125,2420, 662,2498, # 1632
277, 200,1476,1165,1068, 224,2562,1378,1446, 450,1880, 659, 791, 582,4627,2939, # 1648
3936,1516,1274, 555,2099,3697,1020,1389,1526,3380,1762,1723,1787,2229, 412,2114, # 1664
1900,2392,3518, 512,2597, 427,1925,2341,3122,1653,1686,2465,2499, 697, 330, 273, # 1680
380,2162, 951, 832, 780, 991,1301,3073, 965,2270,3519, 668,2523,2636,1286, 535, # 1696
1407, 518, 671, 957,2658,2378, 267, 611,2197,3030,6123, 248,2299, 967,1799,2356, # 1712
850,1418,3437,1876,1256,1480,2828,1718,6124,6125,1755,1664,2405,6126,4628,2879, # 1728
2829, 499,2179, 676,4629, 557,2329,2214,2090, 325,3234, 464, 811,3001, 992,2342, # 1744
2481,1232,1469, 303,2242, 466,1070,2163, 603,1777,2091,4630,2752,4631,2714, 322, # 1760
2659,1964,1768, 481,2188,1463,2330,2857,3600,2092,3031,2421,4632,2318,2070,1849, # 1776
2598,4633,1302,2254,1668,1701,2422,3811,2905,3032,3123,2046,4106,1763,1694,4634, # 1792
1604, 943,1724,1454, 917, 868,2215,1169,2940, 552,1145,1800,1228,1823,1955, 316, # 1808
1080,2510, 361,1807,2830,4107,2660,3381,1346,1423,1134,4108,6127, 541,1263,1229, # 1824
1148,2540, 545, 465,1833,2880,3438,1901,3074,2482, 816,3937, 713,1788,2500, 122, # 1840
1575, 195,1451,2501,1111,6128, 859, 374,1225,2243,2483,4317, 390,1033,3439,3075, # 1856
2524,1687, 266, 793,1440,2599, 946, 779, 802, 507, 897,1081, 528,2189,1292, 711, # 1872
1866,1725,1167,1640, 753, 398,2661,1053, 246, 348,4318, 137,1024,3440,1600,2077, # 1888
2129, 825,4319, 698, 238, 521, 187,2300,1157,2423,1641,1605,1464,1610,1097,2541, # 1904
1260,1436, 759,2255,1814,2150, 705,3235, 409,2563,3304, 561,3033,2005,2564, 726, # 1920
1956,2343,3698,4109, 949,3812,3813,3520,1669, 653,1379,2525, 881,2198, 632,2256, # 1936
1027, 778,1074, 733,1957, 514,1481,2466, 554,2180, 702,3938,1606,1017,1398,6129, # 1952
1380,3521, 921, 993,1313, 594, 449,1489,1617,1166, 768,1426,1360, 495,1794,3601, # 1968
1177,3602,1170,4320,2344, 476, 425,3167,4635,3168,1424, 401,2662,1171,3382,1998, # 1984
1089,4110, 477,3169, 474,6130,1909, 596,2831,1842, 494, 693,1051,1028,1207,3076, # 2000
606,2115, 727,2790,1473,1115, 743,3522, 630, 805,1532,4321,2021, 366,1057, 838, # 2016
684,1114,2142,4322,2050,1492,1892,1808,2271,3814,2424,1971,1447,1373,3305,1090, # 2032
1536,3939,3523,3306,1455,2199, 336, 369,2331,1035, 584,2393, 902, 718,2600,6131, # 2048
2753, 463,2151,1149,1611,2467, 715,1308,3124,1268, 343,1413,3236,1517,1347,2663, # 2064
2093,3940,2022,1131,1553,2100,2941,1427,3441,2942,1323,2484,6132,1980, 872,2368, # 2080
2441,2943, 320,2369,2116,1082, 679,1933,3941,2791,3815, 625,1143,2023, 422,2200, # 2096
3816,6133, 730,1695, 356,2257,1626,2301,2858,2637,1627,1778, 937, 883,2906,2693, # 2112
3002,1769,1086, 400,1063,1325,3307,2792,4111,3077, 456,2345,1046, 747,6134,1524, # 2128
884,1094,3383,1474,2164,1059, 974,1688,2181,2258,1047, 345,1665,1187, 358, 875, # 2144
3170, 305, 660,3524,2190,1334,1135,3171,1540,1649,2542,1527, 927, 968,2793, 885, # 2160
1972,1850, 482, 500,2638,1218,1109,1085,2543,1654,2034, 876, 78,2287,1482,1277, # 2176
861,1675,1083,1779, 724,2754, 454, 397,1132,1612,2332, 893, 672,1237, 257,2259, # 2192
2370, 135,3384, 337,2244, 547, 352, 340, 709,2485,1400, 788,1138,2511, 540, 772, # 2208
1682,2260,2272,2544,2013,1843,1902,4636,1999,1562,2288,4637,2201,1403,1533, 407, # 2224
576,3308,1254,2071, 978,3385, 170, 136,1201,3125,2664,3172,2394, 213, 912, 873, # 2240
3603,1713,2202, 699,3604,3699, 813,3442, 493, 531,1054, 468,2907,1483, 304, 281, # 2256
4112,1726,1252,2094, 339,2319,2130,2639, 756,1563,2944, 748, 571,2976,1588,2425, # 2272
2715,1851,1460,2426,1528,1392,1973,3237, 288,3309, 685,3386, 296, 892,2716,2216, # 2288
1570,2245, 722,1747,2217, 905,3238,1103,6135,1893,1441,1965, 251,1805,2371,3700, # 2304
2601,1919,1078, 75,2182,1509,1592,1270,2640,4638,2152,6136,3310,3817, 524, 706, # 2320
1075, 292,3818,1756,2602, 317, 98,3173,3605,3525,1844,2218,3819,2502, 814, 567, # 2336
385,2908,1534,6137, 534,1642,3239, 797,6138,1670,1529, 953,4323, 188,1071, 538, # 2352
178, 729,3240,2109,1226,1374,2000,2357,2977, 731,2468,1116,2014,2051,6139,1261, # 2368
1593, 803,2859,2736,3443, 556, 682, 823,1541,6140,1369,2289,1706,2794, 845, 462, # 2384
2603,2665,1361, 387, 162,2358,1740, 739,1770,1720,1304,1401,3241,1049, 627,1571, # 2400
2427,3526,1877,3942,1852,1500, 431,1910,1503, 677, 297,2795, 286,1433,1038,1198, # 2416
2290,1133,1596,4113,4639,2469,1510,1484,3943,6141,2442, 108, 712,4640,2372, 866, # 2432
3701,2755,3242,1348, 834,1945,1408,3527,2395,3243,1811, 824, 994,1179,2110,1548, # 2448
1453, 790,3003, 690,4324,4325,2832,2909,3820,1860,3821, 225,1748, 310, 346,1780, # 2464
2470, 821,1993,2717,2796, 828, 877,3528,2860,2471,1702,2165,2910,2486,1789, 453, # 2480
359,2291,1676, 73,1164,1461,1127,3311, 421, 604, 314,1037, 589, 116,2487, 737, # 2496
837,1180, 111, 244, 735,6142,2261,1861,1362, 986, 523, 418, 581,2666,3822, 103, # 2512
855, 503,1414,1867,2488,1091, 657,1597, 979, 605,1316,4641,1021,2443,2078,2001, # 2528
1209, 96, 587,2166,1032, 260,1072,2153, 173, 94, 226,3244, 819,2006,4642,4114, # 2544
2203, 231,1744, 782, 97,2667, 786,3387, 887, 391, 442,2219,4326,1425,6143,2694, # 2560
633,1544,1202, 483,2015, 592,2052,1958,2472,1655, 419, 129,4327,3444,3312,1714, # 2576
1257,3078,4328,1518,1098, 865,1310,1019,1885,1512,1734, 469,2444, 148, 773, 436, # 2592
1815,1868,1128,1055,4329,1245,2756,3445,2154,1934,1039,4643, 579,1238, 932,2320, # 2608
353, 205, 801, 115,2428, 944,2321,1881, 399,2565,1211, 678, 766,3944, 335,2101, # 2624
1459,1781,1402,3945,2737,2131,1010, 844, 981,1326,1013, 550,1816,1545,2620,1335, # 2640
1008, 371,2881, 936,1419,1613,3529,1456,1395,2273,1834,2604,1317,2738,2503, 416, # 2656
1643,4330, 806,1126, 229, 591,3946,1314,1981,1576,1837,1666, 347,1790, 977,3313, # 2672
764,2861,1853, 688,2429,1920,1462, 77, 595, 415,2002,3034, 798,1192,4115,6144, # 2688
2978,4331,3035,2695,2582,2072,2566, 430,2430,1727, 842,1396,3947,3702, 613, 377, # 2704
278, 236,1417,3388,3314,3174, 757,1869, 107,3530,6145,1194, 623,2262, 207,1253, # 2720
2167,3446,3948, 492,1117,1935, 536,1838,2757,1246,4332, 696,2095,2406,1393,1572, # 2736
3175,1782, 583, 190, 253,1390,2230, 830,3126,3389, 934,3245,1703,1749,2979,1870, # 2752
2545,1656,2204, 869,2346,4116,3176,1817, 496,1764,4644, 942,1504, 404,1903,1122, # 2768
1580,3606,2945,1022, 515, 372,1735, 955,2431,3036,6146,2797,1110,2302,2798, 617, # 2784
6147, 441, 762,1771,3447,3607,3608,1904, 840,3037, 86, 939,1385, 572,1370,2445, # 2800
1336, 114,3703, 898, 294, 203,3315, 703,1583,2274, 429, 961,4333,1854,1951,3390, # 2816
2373,3704,4334,1318,1381, 966,1911,2322,1006,1155, 309, 989, 458,2718,1795,1372, # 2832
1203, 252,1689,1363,3177, 517,1936, 168,1490, 562, 193,3823,1042,4117,1835, 551, # 2848
470,4645, 395, 489,3448,1871,1465,2583,2641, 417,1493, 279,1295, 511,1236,1119, # 2864
72,1231,1982,1812,3004, 871,1564, 984,3449,1667,2696,2096,4646,2347,2833,1673, # 2880
3609, 695,3246,2668, 807,1183,4647, 890, 388,2333,1801,1457,2911,1765,1477,1031, # 2896
3316,3317,1278,3391,2799,2292,2526, 163,3450,4335,2669,1404,1802,6148,2323,2407, # 2912
1584,1728,1494,1824,1269, 298, 909,3318,1034,1632, 375, 776,1683,2061, 291, 210, # 2928
1123, 809,1249,1002,2642,3038, 206,1011,2132, 144, 975, 882,1565, 342, 667, 754, # 2944
1442,2143,1299,2303,2062, 447, 626,2205,1221,2739,2912,1144,1214,2206,2584, 760, # 2960
1715, 614, 950,1281,2670,2621, 810, 577,1287,2546,4648, 242,2168, 250,2643, 691, # 2976
123,2644, 647, 313,1029, 689,1357,2946,1650, 216, 771,1339,1306, 808,2063, 549, # 2992
913,1371,2913,2914,6149,1466,1092,1174,1196,1311,2605,2396,1783,1796,3079, 406, # 3008
2671,2117,3949,4649, 487,1825,2220,6150,2915, 448,2348,1073,6151,2397,1707, 130, # 3024
900,1598, 329, 176,1959,2527,1620,6152,2275,4336,3319,1983,2191,3705,3610,2155, # 3040
3706,1912,1513,1614,6153,1988, 646, 392,2304,1589,3320,3039,1826,1239,1352,1340, # 3056
2916, 505,2567,1709,1437,2408,2547, 906,6154,2672, 384,1458,1594,1100,1329, 710, # 3072
423,3531,2064,2231,2622,1989,2673,1087,1882, 333, 841,3005,1296,2882,2379, 580, # 3088
1937,1827,1293,2585, 601, 574, 249,1772,4118,2079,1120, 645, 901,1176,1690, 795, # 3104
2207, 478,1434, 516,1190,1530, 761,2080, 930,1264, 355, 435,1552, 644,1791, 987, # 3120
220,1364,1163,1121,1538, 306,2169,1327,1222, 546,2645, 218, 241, 610,1704,3321, # 3136
1984,1839,1966,2528, 451,6155,2586,3707,2568, 907,3178, 254,2947, 186,1845,4650, # 3152
745, 432,1757, 428,1633, 888,2246,2221,2489,3611,2118,1258,1265, 956,3127,1784, # 3168
4337,2490, 319, 510, 119, 457,3612, 274,2035,2007,4651,1409,3128, 970,2758, 590, # 3184
2800, 661,2247,4652,2008,3950,1420,1549,3080,3322,3951,1651,1375,2111, 485,2491, # 3200
1429,1156,6156,2548,2183,1495, 831,1840,2529,2446, 501,1657, 307,1894,3247,1341, # 3216
666, 899,2156,1539,2549,1559, 886, 349,2208,3081,2305,1736,3824,2170,2759,1014, # 3232
1913,1386, 542,1397,2948, 490, 368, 716, 362, 159, 282,2569,1129,1658,1288,1750, # 3248
2674, 276, 649,2016, 751,1496, 658,1818,1284,1862,2209,2087,2512,3451, 622,2834, # 3264
376, 117,1060,2053,1208,1721,1101,1443, 247,1250,3179,1792,3952,2760,2398,3953, # 3280
6157,2144,3708, 446,2432,1151,2570,3452,2447,2761,2835,1210,2448,3082, 424,2222, # 3296
1251,2449,2119,2836, 504,1581,4338, 602, 817, 857,3825,2349,2306, 357,3826,1470, # 3312
1883,2883, 255, 958, 929,2917,3248, 302,4653,1050,1271,1751,2307,1952,1430,2697, # 3328
2719,2359, 354,3180, 777, 158,2036,4339,1659,4340,4654,2308,2949,2248,1146,2232, # 3344
3532,2720,1696,2623,3827,6158,3129,1550,2698,1485,1297,1428, 637, 931,2721,2145, # 3360
914,2550,2587, 81,2450, 612, 827,2646,1242,4655,1118,2884, 472,1855,3181,3533, # 3376
3534, 569,1353,2699,1244,1758,2588,4119,2009,2762,2171,3709,1312,1531,6159,1152, # 3392
1938, 134,1830, 471,3710,2276,1112,1535,3323,3453,3535, 982,1337,2950, 488, 826, # 3408
674,1058,1628,4120,2017, 522,2399, 211, 568,1367,3454, 350, 293,1872,1139,3249, # 3424
1399,1946,3006,1300,2360,3324, 588, 736,6160,2606, 744, 669,3536,3828,6161,1358, # 3440
199, 723, 848, 933, 851,1939,1505,1514,1338,1618,1831,4656,1634,3613, 443,2740, # 3456
3829, 717,1947, 491,1914,6162,2551,1542,4121,1025,6163,1099,1223, 198,3040,2722, # 3472
370, 410,1905,2589, 998,1248,3182,2380, 519,1449,4122,1710, 947, 928,1153,4341, # 3488
2277, 344,2624,1511, 615, 105, 161,1212,1076,1960,3130,2054,1926,1175,1906,2473, # 3504
414,1873,2801,6164,2309, 315,1319,3325, 318,2018,2146,2157, 963, 631, 223,4342, # 3520
4343,2675, 479,3711,1197,2625,3712,2676,2361,6165,4344,4123,6166,2451,3183,1886, # 3536
2184,1674,1330,1711,1635,1506, 799, 219,3250,3083,3954,1677,3713,3326,2081,3614, # 3552
1652,2073,4657,1147,3041,1752, 643,1961, 147,1974,3955,6167,1716,2037, 918,3007, # 3568
1994, 120,1537, 118, 609,3184,4345, 740,3455,1219, 332,1615,3830,6168,1621,2980, # 3584
1582, 783, 212, 553,2350,3714,1349,2433,2082,4124, 889,6169,2310,1275,1410, 973, # 3600
166,1320,3456,1797,1215,3185,2885,1846,2590,2763,4658, 629, 822,3008, 763, 940, # 3616
1990,2862, 439,2409,1566,1240,1622, 926,1282,1907,2764, 654,2210,1607, 327,1130, # 3632
3956,1678,1623,6170,2434,2192, 686, 608,3831,3715, 903,3957,3042,6171,2741,1522, # 3648
1915,1105,1555,2552,1359, 323,3251,4346,3457, 738,1354,2553,2311,2334,1828,2003, # 3664
3832,1753,2351,1227,6172,1887,4125,1478,6173,2410,1874,1712,1847, 520,1204,2607, # 3680
264,4659, 836,2677,2102, 600,4660,3833,2278,3084,6174,4347,3615,1342, 640, 532, # 3696
543,2608,1888,2400,2591,1009,4348,1497, 341,1737,3616,2723,1394, 529,3252,1321, # 3712
983,4661,1515,2120, 971,2592, 924, 287,1662,3186,4349,2700,4350,1519, 908,1948, # 3728
2452, 156, 796,1629,1486,2223,2055, 694,4126,1259,1036,3392,1213,2249,2742,1889, # 3744
1230,3958,1015, 910, 408, 559,3617,4662, 746, 725, 935,4663,3959,3009,1289, 563, # 3760
867,4664,3960,1567,2981,2038,2626, 988,2263,2381,4351, 143,2374, 704,1895,6175, # 3776
1188,3716,2088, 673,3085,2362,4352, 484,1608,1921,2765,2918, 215, 904,3618,3537, # 3792
894, 509, 976,3043,2701,3961,4353,2837,2982, 498,6176,6177,1102,3538,1332,3393, # 3808
1487,1636,1637, 233, 245,3962, 383, 650, 995,3044, 460,1520,1206,2352, 749,3327, # 3824
530, 700, 389,1438,1560,1773,3963,2264, 719,2951,2724,3834, 870,1832,1644,1000, # 3840
839,2474,3717, 197,1630,3394, 365,2886,3964,1285,2133, 734, 922, 818,1106, 732, # 3856
480,2083,1774,3458, 923,2279,1350, 221,3086, 85,2233,2234,3835,1585,3010,2147, # 3872
1387,1705,2382,1619,2475, 133, 239,2802,1991,1016,2084,2383, 411,2838,1113, 651, # 3888
1985,1160,3328, 990,1863,3087,1048,1276,2647, 265,2627,1599,3253,2056, 150, 638, # 3904
2019, 656, 853, 326,1479, 680,1439,4354,1001,1759, 413,3459,3395,2492,1431, 459, # 3920
4355,1125,3329,2265,1953,1450,2065,2863, 849, 351,2678,3131,3254,3255,1104,1577, # 3936
227,1351,1645,2453,2193,1421,2887, 812,2121, 634, 95,2435, 201,2312,4665,1646, # 3952
1671,2743,1601,2554,2702,2648,2280,1315,1366,2089,3132,1573,3718,3965,1729,1189, # 3968
328,2679,1077,1940,1136, 558,1283, 964,1195, 621,2074,1199,1743,3460,3619,1896, # 3984
1916,1890,3836,2952,1154,2112,1064, 862, 378,3011,2066,2113,2803,1568,2839,6178, # 4000
3088,2919,1941,1660,2004,1992,2194, 142, 707,1590,1708,1624,1922,1023,1836,1233, # 4016
1004,2313, 789, 741,3620,6179,1609,2411,1200,4127,3719,3720,4666,2057,3721, 593, # 4032
2840, 367,2920,1878,6180,3461,1521, 628,1168, 692,2211,2649, 300, 720,2067,2571, # 4048
2953,3396, 959,2504,3966,3539,3462,1977, 701,6181, 954,1043, 800, 681, 183,3722, # 4064
1803,1730,3540,4128,2103, 815,2314, 174, 467, 230,2454,1093,2134, 755,3541,3397, # 4080
1141,1162,6182,1738,2039, 270,3256,2513,1005,1647,2185,3837, 858,1679,1897,1719, # 4096
2954,2324,1806, 402, 670, 167,4129,1498,2158,2104, 750,6183, 915, 189,1680,1551, # 4112
455,4356,1501,2455, 405,1095,2955, 338,1586,1266,1819, 570, 641,1324, 237,1556, # 4128
2650,1388,3723,6184,1368,2384,1343,1978,3089,2436, 879,3724, 792,1191, 758,3012, # 4144
1411,2135,1322,4357, 240,4667,1848,3725,1574,6185, 420,3045,1546,1391, 714,4358, # 4160
1967, 941,1864, 863, 664, 426, 560,1731,2680,1785,2864,1949,2363, 403,3330,1415, # 4176
1279,2136,1697,2335, 204, 721,2097,3838, 90,6186,2085,2505, 191,3967, 124,2148, # 4192
1376,1798,1178,1107,1898,1405, 860,4359,1243,1272,2375,2983,1558,2456,1638, 113, # 4208
3621, 578,1923,2609, 880, 386,4130, 784,2186,2266,1422,2956,2172,1722, 497, 263, # 4224
2514,1267,2412,2610, 177,2703,3542, 774,1927,1344, 616,1432,1595,1018, 172,4360, # 4240
2325, 911,4361, 438,1468,3622, 794,3968,2024,2173,1681,1829,2957, 945, 895,3090, # 4256
575,2212,2476, 475,2401,2681, 785,2744,1745,2293,2555,1975,3133,2865, 394,4668, # 4272
3839, 635,4131, 639, 202,1507,2195,2766,1345,1435,2572,3726,1908,1184,1181,2457, # 4288
3727,3134,4362, 843,2611, 437, 916,4669, 234, 769,1884,3046,3047,3623, 833,6187, # 4304
1639,2250,2402,1355,1185,2010,2047, 999, 525,1732,1290,1488,2612, 948,1578,3728, # 4320
2413,2477,1216,2725,2159, 334,3840,1328,3624,2921,1525,4132, 564,1056, 891,4363, # 4336
1444,1698,2385,2251,3729,1365,2281,2235,1717,6188, 864,3841,2515, 444, 527,2767, # 4352
2922,3625, 544, 461,6189, 566, 209,2437,3398,2098,1065,2068,3331,3626,3257,2137, # 4368 #last 512
#Everything below is of no interest for detection purpose
2138,2122,3730,2888,1995,1820,1044,6190,6191,6192,6193,6194,6195,6196,6197,6198, # 4384
6199,6200,6201,6202,6203,6204,6205,4670,6206,6207,6208,6209,6210,6211,6212,6213, # 4400
6214,6215,6216,6217,6218,6219,6220,6221,6222,6223,6224,6225,6226,6227,6228,6229, # 4416
6230,6231,6232,6233,6234,6235,6236,6237,3187,6238,6239,3969,6240,6241,6242,6243, # 4432
6244,4671,6245,6246,4672,6247,6248,4133,6249,6250,4364,6251,2923,2556,2613,4673, # 4448
4365,3970,6252,6253,6254,6255,4674,6256,6257,6258,2768,2353,4366,4675,4676,3188, # 4464
4367,3463,6259,4134,4677,4678,6260,2267,6261,3842,3332,4368,3543,6262,6263,6264, # 4480
3013,1954,1928,4135,4679,6265,6266,2478,3091,6267,4680,4369,6268,6269,1699,6270, # 4496
3544,4136,4681,6271,4137,6272,4370,2804,6273,6274,2593,3971,3972,4682,6275,2236, # 4512
4683,6276,6277,4684,6278,6279,4138,3973,4685,6280,6281,3258,6282,6283,6284,6285, # 4528
3974,4686,2841,3975,6286,6287,3545,6288,6289,4139,4687,4140,6290,4141,6291,4142, # 4544
6292,6293,3333,6294,6295,6296,4371,6297,3399,6298,6299,4372,3976,6300,6301,6302, # 4560
4373,6303,6304,3843,3731,6305,4688,4374,6306,6307,3259,2294,6308,3732,2530,4143, # 4576
6309,4689,6310,6311,6312,3048,6313,6314,4690,3733,2237,6315,6316,2282,3334,6317, # 4592
6318,3844,6319,6320,4691,6321,3400,4692,6322,4693,6323,3049,6324,4375,6325,3977, # 4608
6326,6327,6328,3546,6329,4694,3335,6330,4695,4696,6331,6332,6333,6334,4376,3978, # 4624
6335,4697,3979,4144,6336,3980,4698,6337,6338,6339,6340,6341,4699,4700,4701,6342, # 4640
6343,4702,6344,6345,4703,6346,6347,4704,6348,4705,4706,3135,6349,4707,6350,4708, # 4656
6351,4377,6352,4709,3734,4145,6353,2506,4710,3189,6354,3050,4711,3981,6355,3547, # 4672
3014,4146,4378,3735,2651,3845,3260,3136,2224,1986,6356,3401,6357,4712,2594,3627, # 4688
3137,2573,3736,3982,4713,3628,4714,4715,2682,3629,4716,6358,3630,4379,3631,6359, # 4704
6360,6361,3983,6362,6363,6364,6365,4147,3846,4717,6366,6367,3737,2842,6368,4718, # 4720
2628,6369,3261,6370,2386,6371,6372,3738,3984,4719,3464,4720,3402,6373,2924,3336, # 4736
4148,2866,6374,2805,3262,4380,2704,2069,2531,3138,2806,2984,6375,2769,6376,4721, # 4752
4722,3403,6377,6378,3548,6379,6380,2705,3092,1979,4149,2629,3337,2889,6381,3338, # 4768
4150,2557,3339,4381,6382,3190,3263,3739,6383,4151,4723,4152,2558,2574,3404,3191, # 4784
6384,6385,4153,6386,4724,4382,6387,6388,4383,6389,6390,4154,6391,4725,3985,6392, # 4800
3847,4155,6393,6394,6395,6396,6397,3465,6398,4384,6399,6400,6401,6402,6403,6404, # 4816
4156,6405,6406,6407,6408,2123,6409,6410,2326,3192,4726,6411,6412,6413,6414,4385, # 4832
4157,6415,6416,4158,6417,3093,3848,6418,3986,6419,6420,3849,6421,6422,6423,4159, # 4848
6424,6425,4160,6426,3740,6427,6428,6429,6430,3987,6431,4727,6432,2238,6433,6434, # 4864
4386,3988,6435,6436,3632,6437,6438,2843,6439,6440,6441,6442,3633,6443,2958,6444, # 4880
6445,3466,6446,2364,4387,3850,6447,4388,2959,3340,6448,3851,6449,4728,6450,6451, # 4896
3264,4729,6452,3193,6453,4389,4390,2706,3341,4730,6454,3139,6455,3194,6456,3051, # 4912
2124,3852,1602,4391,4161,3853,1158,3854,4162,3989,4392,3990,4731,4732,4393,2040, # 4928
4163,4394,3265,6457,2807,3467,3855,6458,6459,6460,3991,3468,4733,4734,6461,3140, # 4944
2960,6462,4735,6463,6464,6465,6466,4736,4737,4738,4739,6467,6468,4164,2403,3856, # 4960
6469,6470,2770,2844,6471,4740,6472,6473,6474,6475,6476,6477,6478,3195,6479,4741, # 4976
4395,6480,2867,6481,4742,2808,6482,2493,4165,6483,6484,6485,6486,2295,4743,6487, # 4992
6488,6489,3634,6490,6491,6492,6493,6494,6495,6496,2985,4744,6497,6498,4745,6499, # 5008
6500,2925,3141,4166,6501,6502,4746,6503,6504,4747,6505,6506,6507,2890,6508,6509, # 5024
6510,6511,6512,6513,6514,6515,6516,6517,6518,6519,3469,4167,6520,6521,6522,4748, # 5040
4396,3741,4397,4749,4398,3342,2125,4750,6523,4751,4752,4753,3052,6524,2961,4168, # 5056
6525,4754,6526,4755,4399,2926,4169,6527,3857,6528,4400,4170,6529,4171,6530,6531, # 5072
2595,6532,6533,6534,6535,3635,6536,6537,6538,6539,6540,6541,6542,4756,6543,6544, # 5088
6545,6546,6547,6548,4401,6549,6550,6551,6552,4402,3405,4757,4403,6553,6554,6555, # 5104
4172,3742,6556,6557,6558,3992,3636,6559,6560,3053,2726,6561,3549,4173,3054,4404, # 5120
6562,6563,3993,4405,3266,3550,2809,4406,6564,6565,6566,4758,4759,6567,3743,6568, # 5136
4760,3744,4761,3470,6569,6570,6571,4407,6572,3745,4174,6573,4175,2810,4176,3196, # 5152
4762,6574,4177,6575,6576,2494,2891,3551,6577,6578,3471,6579,4408,6580,3015,3197, # 5168
6581,3343,2532,3994,3858,6582,3094,3406,4409,6583,2892,4178,4763,4410,3016,4411, # 5184
6584,3995,3142,3017,2683,6585,4179,6586,6587,4764,4412,6588,6589,4413,6590,2986, # 5200
6591,2962,3552,6592,2963,3472,6593,6594,4180,4765,6595,6596,2225,3267,4414,6597, # 5216
3407,3637,4766,6598,6599,3198,6600,4415,6601,3859,3199,6602,3473,4767,2811,4416, # 5232
1856,3268,3200,2575,3996,3997,3201,4417,6603,3095,2927,6604,3143,6605,2268,6606, # 5248
3998,3860,3096,2771,6607,6608,3638,2495,4768,6609,3861,6610,3269,2745,4769,4181, # 5264
3553,6611,2845,3270,6612,6613,6614,3862,6615,6616,4770,4771,6617,3474,3999,4418, # 5280
4419,6618,3639,3344,6619,4772,4182,6620,2126,6621,6622,6623,4420,4773,6624,3018, # 5296
6625,4774,3554,6626,4183,2025,3746,6627,4184,2707,6628,4421,4422,3097,1775,4185, # 5312
3555,6629,6630,2868,6631,6632,4423,6633,6634,4424,2414,2533,2928,6635,4186,2387, # 5328
6636,4775,6637,4187,6638,1891,4425,3202,3203,6639,6640,4776,6641,3345,6642,6643, # 5344
3640,6644,3475,3346,3641,4000,6645,3144,6646,3098,2812,4188,3642,3204,6647,3863, # 5360
3476,6648,3864,6649,4426,4001,6650,6651,6652,2576,6653,4189,4777,6654,6655,6656, # 5376
2846,6657,3477,3205,4002,6658,4003,6659,3347,2252,6660,6661,6662,4778,6663,6664, # 5392
6665,6666,6667,6668,6669,4779,4780,2048,6670,3478,3099,6671,3556,3747,4004,6672, # 5408
6673,6674,3145,4005,3748,6675,6676,6677,6678,6679,3408,6680,6681,6682,6683,3206, # 5424
3207,6684,6685,4781,4427,6686,4782,4783,4784,6687,6688,6689,4190,6690,6691,3479, # 5440
6692,2746,6693,4428,6694,6695,6696,6697,6698,6699,4785,6700,6701,3208,2727,6702, # 5456
3146,6703,6704,3409,2196,6705,4429,6706,6707,6708,2534,1996,6709,6710,6711,2747, # 5472
6712,6713,6714,4786,3643,6715,4430,4431,6716,3557,6717,4432,4433,6718,6719,6720, # 5488
6721,3749,6722,4006,4787,6723,6724,3644,4788,4434,6725,6726,4789,2772,6727,6728, # 5504
6729,6730,6731,2708,3865,2813,4435,6732,6733,4790,4791,3480,6734,6735,6736,6737, # 5520
4436,3348,6738,3410,4007,6739,6740,4008,6741,6742,4792,3411,4191,6743,6744,6745, # 5536
6746,6747,3866,6748,3750,6749,6750,6751,6752,6753,6754,6755,3867,6756,4009,6757, # 5552
4793,4794,6758,2814,2987,6759,6760,6761,4437,6762,6763,6764,6765,3645,6766,6767, # 5568
3481,4192,6768,3751,6769,6770,2174,6771,3868,3752,6772,6773,6774,4193,4795,4438, # 5584
3558,4796,4439,6775,4797,6776,6777,4798,6778,4799,3559,4800,6779,6780,6781,3482, # 5600
6782,2893,6783,6784,4194,4801,4010,6785,6786,4440,6787,4011,6788,6789,6790,6791, # 5616
6792,6793,4802,6794,6795,6796,4012,6797,6798,6799,6800,3349,4803,3483,6801,4804, # 5632
4195,6802,4013,6803,6804,4196,6805,4014,4015,6806,2847,3271,2848,6807,3484,6808, # 5648
6809,6810,4441,6811,4442,4197,4443,3272,4805,6812,3412,4016,1579,6813,6814,4017, # 5664
6815,3869,6816,2964,6817,4806,6818,6819,4018,3646,6820,6821,4807,4019,4020,6822, # 5680
6823,3560,6824,6825,4021,4444,6826,4198,6827,6828,4445,6829,6830,4199,4808,6831, # 5696
6832,6833,3870,3019,2458,6834,3753,3413,3350,6835,4809,3871,4810,3561,4446,6836, # 5712
6837,4447,4811,4812,6838,2459,4448,6839,4449,6840,6841,4022,3872,6842,4813,4814, # 5728
6843,6844,4815,4200,4201,4202,6845,4023,6846,6847,4450,3562,3873,6848,6849,4816, # 5744
4817,6850,4451,4818,2139,6851,3563,6852,6853,3351,6854,6855,3352,4024,2709,3414, # 5760
4203,4452,6856,4204,6857,6858,3874,3875,6859,6860,4819,6861,6862,6863,6864,4453, # 5776
3647,6865,6866,4820,6867,6868,6869,6870,4454,6871,2869,6872,6873,4821,6874,3754, # 5792
6875,4822,4205,6876,6877,6878,3648,4206,4455,6879,4823,6880,4824,3876,6881,3055, # 5808
4207,6882,3415,6883,6884,6885,4208,4209,6886,4210,3353,6887,3354,3564,3209,3485, # 5824
2652,6888,2728,6889,3210,3755,6890,4025,4456,6891,4825,6892,6893,6894,6895,4211, # 5840
6896,6897,6898,4826,6899,6900,4212,6901,4827,6902,2773,3565,6903,4828,6904,6905, # 5856
6906,6907,3649,3650,6908,2849,3566,6909,3567,3100,6910,6911,6912,6913,6914,6915, # 5872
4026,6916,3355,4829,3056,4457,3756,6917,3651,6918,4213,3652,2870,6919,4458,6920, # 5888
2438,6921,6922,3757,2774,4830,6923,3356,4831,4832,6924,4833,4459,3653,2507,6925, # 5904
4834,2535,6926,6927,3273,4027,3147,6928,3568,6929,6930,6931,4460,6932,3877,4461, # 5920
2729,3654,6933,6934,6935,6936,2175,4835,2630,4214,4028,4462,4836,4215,6937,3148, # 5936
4216,4463,4837,4838,4217,6938,6939,2850,4839,6940,4464,6941,6942,6943,4840,6944, # 5952
4218,3274,4465,6945,6946,2710,6947,4841,4466,6948,6949,2894,6950,6951,4842,6952, # 5968
4219,3057,2871,6953,6954,6955,6956,4467,6957,2711,6958,6959,6960,3275,3101,4843, # 5984
6961,3357,3569,6962,4844,6963,6964,4468,4845,3570,6965,3102,4846,3758,6966,4847, # 6000
3878,4848,4849,4029,6967,2929,3879,4850,4851,6968,6969,1733,6970,4220,6971,6972, # 6016
6973,6974,6975,6976,4852,6977,6978,6979,6980,6981,6982,3759,6983,6984,6985,3486, # 6032
3487,6986,3488,3416,6987,6988,6989,6990,6991,6992,6993,6994,6995,6996,6997,4853, # 6048
6998,6999,4030,7000,7001,3211,7002,7003,4221,7004,7005,3571,4031,7006,3572,7007, # 6064
2614,4854,2577,7008,7009,2965,3655,3656,4855,2775,3489,3880,4222,4856,3881,4032, # 6080
3882,3657,2730,3490,4857,7010,3149,7011,4469,4858,2496,3491,4859,2283,7012,7013, # 6096
7014,2365,4860,4470,7015,7016,3760,7017,7018,4223,1917,7019,7020,7021,4471,7022, # 6112
2776,4472,7023,7024,7025,7026,4033,7027,3573,4224,4861,4034,4862,7028,7029,1929, # 6128
3883,4035,7030,4473,3058,7031,2536,3761,3884,7032,4036,7033,2966,2895,1968,4474, # 6144
3276,4225,3417,3492,4226,2105,7034,7035,1754,2596,3762,4227,4863,4475,3763,4864, # 6160
3764,2615,2777,3103,3765,3658,3418,4865,2296,3766,2815,7036,7037,7038,3574,2872, # 6176
3277,4476,7039,4037,4477,7040,7041,4038,7042,7043,7044,7045,7046,7047,2537,7048, # 6192
7049,7050,7051,7052,7053,7054,4478,7055,7056,3767,3659,4228,3575,7057,7058,4229, # 6208
7059,7060,7061,3660,7062,3212,7063,3885,4039,2460,7064,7065,7066,7067,7068,7069, # 6224
7070,7071,7072,7073,7074,4866,3768,4867,7075,7076,7077,7078,4868,3358,3278,2653, # 6240
7079,7080,4479,3886,7081,7082,4869,7083,7084,7085,7086,7087,7088,2538,7089,7090, # 6256
7091,4040,3150,3769,4870,4041,2896,3359,4230,2930,7092,3279,7093,2967,4480,3213, # 6272
4481,3661,7094,7095,7096,7097,7098,7099,7100,7101,7102,2461,3770,7103,7104,4231, # 6288
3151,7105,7106,7107,4042,3662,7108,7109,4871,3663,4872,4043,3059,7110,7111,7112, # 6304
3493,2988,7113,4873,7114,7115,7116,3771,4874,7117,7118,4232,4875,7119,3576,2336, # 6320
4876,7120,4233,3419,4044,4877,4878,4482,4483,4879,4484,4234,7121,3772,4880,1045, # 6336
3280,3664,4881,4882,7122,7123,7124,7125,4883,7126,2778,7127,4485,4486,7128,4884, # 6352
3214,3887,7129,7130,3215,7131,4885,4045,7132,7133,4046,7134,7135,7136,7137,7138, # 6368
7139,7140,7141,7142,7143,4235,7144,4886,7145,7146,7147,4887,7148,7149,7150,4487, # 6384
4047,4488,7151,7152,4888,4048,2989,3888,7153,3665,7154,4049,7155,7156,7157,7158, # 6400
7159,7160,2931,4889,4890,4489,7161,2631,3889,4236,2779,7162,7163,4891,7164,3060, # 6416
7165,1672,4892,7166,4893,4237,3281,4894,7167,7168,3666,7169,3494,7170,7171,4050, # 6432
7172,7173,3104,3360,3420,4490,4051,2684,4052,7174,4053,7175,7176,7177,2253,4054, # 6448
7178,7179,4895,7180,3152,3890,3153,4491,3216,7181,7182,7183,2968,4238,4492,4055, # 6464
7184,2990,7185,2479,7186,7187,4493,7188,7189,7190,7191,7192,4896,7193,4897,2969, # 6480
4494,4898,7194,3495,7195,7196,4899,4495,7197,3105,2731,7198,4900,7199,7200,7201, # 6496
4056,7202,3361,7203,7204,4496,4901,4902,7205,4497,7206,7207,2315,4903,7208,4904, # 6512
7209,4905,2851,7210,7211,3577,7212,3578,4906,7213,4057,3667,4907,7214,4058,2354, # 6528
3891,2376,3217,3773,7215,7216,7217,7218,7219,4498,7220,4908,3282,2685,7221,3496, # 6544
4909,2632,3154,4910,7222,2337,7223,4911,7224,7225,7226,4912,4913,3283,4239,4499, # 6560
7227,2816,7228,7229,7230,7231,7232,7233,7234,4914,4500,4501,7235,7236,7237,2686, # 6576
7238,4915,7239,2897,4502,7240,4503,7241,2516,7242,4504,3362,3218,7243,7244,7245, # 6592
4916,7246,7247,4505,3363,7248,7249,7250,7251,3774,4506,7252,7253,4917,7254,7255, # 6608
3284,2991,4918,4919,3219,3892,4920,3106,3497,4921,7256,7257,7258,4922,7259,4923, # 6624
3364,4507,4508,4059,7260,4240,3498,7261,7262,4924,7263,2992,3893,4060,3220,7264, # 6640
7265,7266,7267,7268,7269,4509,3775,7270,2817,7271,4061,4925,4510,3776,7272,4241, # 6656
4511,3285,7273,7274,3499,7275,7276,7277,4062,4512,4926,7278,3107,3894,7279,7280, # 6672
4927,7281,4513,7282,7283,3668,7284,7285,4242,4514,4243,7286,2058,4515,4928,4929, # 6688
4516,7287,3286,4244,7288,4517,7289,7290,7291,3669,7292,7293,4930,4931,4932,2355, # 6704
4933,7294,2633,4518,7295,4245,7296,7297,4519,7298,7299,4520,4521,4934,7300,4246, # 6720
4522,7301,7302,7303,3579,7304,4247,4935,7305,4936,7306,7307,7308,7309,3777,7310, # 6736
4523,7311,7312,7313,4248,3580,7314,4524,3778,4249,7315,3581,7316,3287,7317,3221, # 6752
7318,4937,7319,7320,7321,7322,7323,7324,4938,4939,7325,4525,7326,7327,7328,4063, # 6768
7329,7330,4940,7331,7332,4941,7333,4526,7334,3500,2780,1741,4942,2026,1742,7335, # 6784
7336,3582,4527,2388,7337,7338,7339,4528,7340,4250,4943,7341,7342,7343,4944,7344, # 6800
7345,7346,3020,7347,4945,7348,7349,7350,7351,3895,7352,3896,4064,3897,7353,7354, # 6816
7355,4251,7356,7357,3898,7358,3779,7359,3780,3288,7360,7361,4529,7362,4946,4530, # 6832
2027,7363,3899,4531,4947,3222,3583,7364,4948,7365,7366,7367,7368,4949,3501,4950, # 6848
3781,4951,4532,7369,2517,4952,4252,4953,3155,7370,4954,4955,4253,2518,4533,7371, # 6864
7372,2712,4254,7373,7374,7375,3670,4956,3671,7376,2389,3502,4065,7377,2338,7378, # 6880
7379,7380,7381,3061,7382,4957,7383,7384,7385,7386,4958,4534,7387,7388,2993,7389, # 6896
3062,7390,4959,7391,7392,7393,4960,3108,4961,7394,4535,7395,4962,3421,4536,7396, # 6912
4963,7397,4964,1857,7398,4965,7399,7400,2176,3584,4966,7401,7402,3422,4537,3900, # 6928
3585,7403,3782,7404,2852,7405,7406,7407,4538,3783,2654,3423,4967,4539,7408,3784, # 6944
3586,2853,4540,4541,7409,3901,7410,3902,7411,7412,3785,3109,2327,3903,7413,7414, # 6960
2970,4066,2932,7415,7416,7417,3904,3672,3424,7418,4542,4543,4544,7419,4968,7420, # 6976
7421,4255,7422,7423,7424,7425,7426,4067,7427,3673,3365,4545,7428,3110,2559,3674, # 6992
7429,7430,3156,7431,7432,3503,7433,3425,4546,7434,3063,2873,7435,3223,4969,4547, # 7008
4548,2898,4256,4068,7436,4069,3587,3786,2933,3787,4257,4970,4971,3788,7437,4972, # 7024
3064,7438,4549,7439,7440,7441,7442,7443,4973,3905,7444,2874,7445,7446,7447,7448, # 7040
3021,7449,4550,3906,3588,4974,7450,7451,3789,3675,7452,2578,7453,4070,7454,7455, # 7056
7456,4258,3676,7457,4975,7458,4976,4259,3790,3504,2634,4977,3677,4551,4260,7459, # 7072
7460,7461,7462,3907,4261,4978,7463,7464,7465,7466,4979,4980,7467,7468,2213,4262, # 7088
7469,7470,7471,3678,4981,7472,2439,7473,4263,3224,3289,7474,3908,2415,4982,7475, # 7104
4264,7476,4983,2655,7477,7478,2732,4552,2854,2875,7479,7480,4265,7481,4553,4984, # 7120
7482,7483,4266,7484,3679,3366,3680,2818,2781,2782,3367,3589,4554,3065,7485,4071, # 7136
2899,7486,7487,3157,2462,4072,4555,4073,4985,4986,3111,4267,2687,3368,4556,4074, # 7152
3791,4268,7488,3909,2783,7489,2656,1962,3158,4557,4987,1963,3159,3160,7490,3112, # 7168
4988,4989,3022,4990,4991,3792,2855,7491,7492,2971,4558,7493,7494,4992,7495,7496, # 7184
7497,7498,4993,7499,3426,4559,4994,7500,3681,4560,4269,4270,3910,7501,4075,4995, # 7200
4271,7502,7503,4076,7504,4996,7505,3225,4997,4272,4077,2819,3023,7506,7507,2733, # 7216
4561,7508,4562,7509,3369,3793,7510,3590,2508,7511,7512,4273,3113,2994,2616,7513, # 7232
7514,7515,7516,7517,7518,2820,3911,4078,2748,7519,7520,4563,4998,7521,7522,7523, # 7248
7524,4999,4274,7525,4564,3682,2239,4079,4565,7526,7527,7528,7529,5000,7530,7531, # 7264
5001,4275,3794,7532,7533,7534,3066,5002,4566,3161,7535,7536,4080,7537,3162,7538, # 7280
7539,4567,7540,7541,7542,7543,7544,7545,5003,7546,4568,7547,7548,7549,7550,7551, # 7296
7552,7553,7554,7555,7556,5004,7557,7558,7559,5005,7560,3795,7561,4569,7562,7563, # 7312
7564,2821,3796,4276,4277,4081,7565,2876,7566,5006,7567,7568,2900,7569,3797,3912, # 7328
7570,7571,7572,4278,7573,7574,7575,5007,7576,7577,5008,7578,7579,4279,2934,7580, # 7344
7581,5009,7582,4570,7583,4280,7584,7585,7586,4571,4572,3913,7587,4573,3505,7588, # 7360
5010,7589,7590,7591,7592,3798,4574,7593,7594,5011,7595,4281,7596,7597,7598,4282, # 7376
5012,7599,7600,5013,3163,7601,5014,7602,3914,7603,7604,2734,4575,4576,4577,7605, # 7392
7606,7607,7608,7609,3506,5015,4578,7610,4082,7611,2822,2901,2579,3683,3024,4579, # 7408
3507,7612,4580,7613,3226,3799,5016,7614,7615,7616,7617,7618,7619,7620,2995,3290, # 7424
7621,4083,7622,5017,7623,7624,7625,7626,7627,4581,3915,7628,3291,7629,5018,7630, # 7440
7631,7632,7633,4084,7634,7635,3427,3800,7636,7637,4582,7638,5019,4583,5020,7639, # 7456
3916,7640,3801,5021,4584,4283,7641,7642,3428,3591,2269,7643,2617,7644,4585,3592, # 7472
7645,4586,2902,7646,7647,3227,5022,7648,4587,7649,4284,7650,7651,7652,4588,2284, # 7488
7653,5023,7654,7655,7656,4589,5024,3802,7657,7658,5025,3508,4590,7659,7660,7661, # 7504
1969,5026,7662,7663,3684,1821,2688,7664,2028,2509,4285,7665,2823,1841,7666,2689, # 7520
3114,7667,3917,4085,2160,5027,5028,2972,7668,5029,7669,7670,7671,3593,4086,7672, # 7536
4591,4087,5030,3803,7673,7674,7675,7676,7677,7678,7679,4286,2366,4592,4593,3067, # 7552
2328,7680,7681,4594,3594,3918,2029,4287,7682,5031,3919,3370,4288,4595,2856,7683, # 7568
3509,7684,7685,5032,5033,7686,7687,3804,2784,7688,7689,7690,7691,3371,7692,7693, # 7584
2877,5034,7694,7695,3920,4289,4088,7696,7697,7698,5035,7699,5036,4290,5037,5038, # 7600
5039,7700,7701,7702,5040,5041,3228,7703,1760,7704,5042,3229,4596,2106,4089,7705, # 7616
4597,2824,5043,2107,3372,7706,4291,4090,5044,7707,4091,7708,5045,3025,3805,4598, # 7632
4292,4293,4294,3373,7709,4599,7710,5046,7711,7712,5047,5048,3806,7713,7714,7715, # 7648
5049,7716,7717,7718,7719,4600,5050,7720,7721,7722,5051,7723,4295,3429,7724,7725, # 7664
7726,7727,3921,7728,3292,5052,4092,7729,7730,7731,7732,7733,7734,7735,5053,5054, # 7680
7736,7737,7738,7739,3922,3685,7740,7741,7742,7743,2635,5055,7744,5056,4601,7745, # 7696
7746,2560,7747,7748,7749,7750,3923,7751,7752,7753,7754,7755,4296,2903,7756,7757, # 7712
7758,7759,7760,3924,7761,5057,4297,7762,7763,5058,4298,7764,4093,7765,7766,5059, # 7728
3925,7767,7768,7769,7770,7771,7772,7773,7774,7775,7776,3595,7777,4299,5060,4094, # 7744
7778,3293,5061,7779,7780,4300,7781,7782,4602,7783,3596,7784,7785,3430,2367,7786, # 7760
3164,5062,5063,4301,7787,7788,4095,5064,5065,7789,3374,3115,7790,7791,7792,7793, # 7776
7794,7795,7796,3597,4603,7797,7798,3686,3116,3807,5066,7799,7800,5067,7801,7802, # 7792
4604,4302,5068,4303,4096,7803,7804,3294,7805,7806,5069,4605,2690,7807,3026,7808, # 7808
7809,7810,7811,7812,7813,7814,7815,7816,7817,7818,7819,7820,7821,7822,7823,7824, # 7824
7825,7826,7827,7828,7829,7830,7831,7832,7833,7834,7835,7836,7837,7838,7839,7840, # 7840
7841,7842,7843,7844,7845,7846,7847,7848,7849,7850,7851,7852,7853,7854,7855,7856, # 7856
7857,7858,7859,7860,7861,7862,7863,7864,7865,7866,7867,7868,7869,7870,7871,7872, # 7872
7873,7874,7875,7876,7877,7878,7879,7880,7881,7882,7883,7884,7885,7886,7887,7888, # 7888
7889,7890,7891,7892,7893,7894,7895,7896,7897,7898,7899,7900,7901,7902,7903,7904, # 7904
7905,7906,7907,7908,7909,7910,7911,7912,7913,7914,7915,7916,7917,7918,7919,7920, # 7920
7921,7922,7923,7924,3926,7925,7926,7927,7928,7929,7930,7931,7932,7933,7934,7935, # 7936
7936,7937,7938,7939,7940,7941,7942,7943,7944,7945,7946,7947,7948,7949,7950,7951, # 7952
7952,7953,7954,7955,7956,7957,7958,7959,7960,7961,7962,7963,7964,7965,7966,7967, # 7968
7968,7969,7970,7971,7972,7973,7974,7975,7976,7977,7978,7979,7980,7981,7982,7983, # 7984
7984,7985,7986,7987,7988,7989,7990,7991,7992,7993,7994,7995,7996,7997,7998,7999, # 8000
8000,8001,8002,8003,8004,8005,8006,8007,8008,8009,8010,8011,8012,8013,8014,8015, # 8016
8016,8017,8018,8019,8020,8021,8022,8023,8024,8025,8026,8027,8028,8029,8030,8031, # 8032
8032,8033,8034,8035,8036,8037,8038,8039,8040,8041,8042,8043,8044,8045,8046,8047, # 8048
8048,8049,8050,8051,8052,8053,8054,8055,8056,8057,8058,8059,8060,8061,8062,8063, # 8064
8064,8065,8066,8067,8068,8069,8070,8071,8072,8073,8074,8075,8076,8077,8078,8079, # 8080
8080,8081,8082,8083,8084,8085,8086,8087,8088,8089,8090,8091,8092,8093,8094,8095, # 8096
8096,8097,8098,8099,8100,8101,8102,8103,8104,8105,8106,8107,8108,8109,8110,8111, # 8112
8112,8113,8114,8115,8116,8117,8118,8119,8120,8121,8122,8123,8124,8125,8126,8127, # 8128
8128,8129,8130,8131,8132,8133,8134,8135,8136,8137,8138,8139,8140,8141,8142,8143, # 8144
8144,8145,8146,8147,8148,8149,8150,8151,8152,8153,8154,8155,8156,8157,8158,8159, # 8160
8160,8161,8162,8163,8164,8165,8166,8167,8168,8169,8170,8171,8172,8173,8174,8175, # 8176
8176,8177,8178,8179,8180,8181,8182,8183,8184,8185,8186,8187,8188,8189,8190,8191, # 8192
8192,8193,8194,8195,8196,8197,8198,8199,8200,8201,8202,8203,8204,8205,8206,8207, # 8208
8208,8209,8210,8211,8212,8213,8214,8215,8216,8217,8218,8219,8220,8221,8222,8223, # 8224
8224,8225,8226,8227,8228,8229,8230,8231,8232,8233,8234,8235,8236,8237,8238,8239, # 8240
8240,8241,8242,8243,8244,8245,8246,8247,8248,8249,8250,8251,8252,8253,8254,8255, # 8256
8256,8257,8258,8259,8260,8261,8262,8263,8264,8265,8266,8267,8268,8269,8270,8271) # 8272
# flake8: noqa
|
gpl-3.0
|
birm/Elemental
|
python/blas_like/level3.py
|
2
|
29133
|
#
# Copyright (c) 2009-2015, Jack Poulson
# All rights reserved.
#
# This file is part of Elemental and is under the BSD 2-Clause License,
# which can be found in the LICENSE file in the root directory, or at
# http://opensource.org/licenses/BSD-2-Clause
#
from ..core import *
# BLAS 3
# ======
# Gemm
# ----
# Emulate an enum for the Gemm algorithm
(GEMM_DEFAULT,GEMM_SUMMA_A,GEMM_SUMMA_B,GEMM_SUMMA_C,GEMM_SUMMA_DOT,
GEMM_CANNON)=(0,1,2,3,4,5)
lib.ElGemm_i.argtypes = [c_uint,c_uint,iType,c_void_p,c_void_p,iType,c_void_p]
lib.ElGemm_s.argtypes = [c_uint,c_uint,sType,c_void_p,c_void_p,sType,c_void_p]
lib.ElGemm_d.argtypes = [c_uint,c_uint,dType,c_void_p,c_void_p,dType,c_void_p]
lib.ElGemm_c.argtypes = [c_uint,c_uint,cType,c_void_p,c_void_p,cType,c_void_p]
lib.ElGemm_z.argtypes = [c_uint,c_uint,zType,c_void_p,c_void_p,zType,c_void_p]
lib.ElGemmXDist_i.argtypes = \
[c_uint,c_uint,iType,c_void_p,c_void_p,iType,c_void_p,c_uint]
lib.ElGemmXDist_s.argtypes = \
[c_uint,c_uint,sType,c_void_p,c_void_p,sType,c_void_p,c_uint]
lib.ElGemmXDist_d.argtypes = \
[c_uint,c_uint,dType,c_void_p,c_void_p,dType,c_void_p,c_uint]
lib.ElGemmXDist_c.argtypes = \
[c_uint,c_uint,cType,c_void_p,c_void_p,cType,c_void_p,c_uint]
lib.ElGemmXDist_z.argtypes = \
[c_uint,c_uint,zType,c_void_p,c_void_p,zType,c_void_p,c_uint]
def Gemm(orientA,orientB,alphaPre,A,B,betaPre,C,alg=GEMM_DEFAULT):
if A.tag != B.tag or B.tag != C.tag:
raise Exception('Datatypes of {A,B,C} must match')
if type(A) is not type(B) or type(B) is not type(C):
raise Exception('Matrix types of {A,B,C} must match')
alpha = TagToType(A.tag)(alphaPre)
beta = TagToType(A.tag)(betaPre)
args = [orientA,orientB,alpha,A.obj,B.obj,beta,C.obj]
argsAlg = [orientA,orientB,alpha,A.obj,B.obj,beta,C.obj,alg]
if type(A) is Matrix:
if B.tag == iTag: lib.ElGemm_i(*args)
elif B.tag == sTag: lib.ElGemm_s(*args)
elif B.tag == dTag: lib.ElGemm_d(*args)
elif B.tag == cTag: lib.ElGemm_c(*args)
elif B.tag == zTag: lib.ElGemm_z(*args)
else: DataExcept()
elif type(A) is DistMatrix:
if B.tag == iTag: lib.ElGemmXDist_i(*args)
elif B.tag == sTag: lib.ElGemmXDist_s(*args)
elif B.tag == dTag: lib.ElGemmXDist_d(*args)
elif B.tag == cTag: lib.ElGemmXDist_c(*args)
elif B.tag == zTag: lib.ElGemmXDist_z(*args)
else: DataExcept()
else: TypeExcept()
# Symm/Hemm
# ---------
lib.ElSymm_s.argtypes = [c_uint,c_uint,sType,c_void_p,c_void_p,sType,c_void_p]
lib.ElSymm_d.argtypes = [c_uint,c_uint,dType,c_void_p,c_void_p,dType,c_void_p]
lib.ElSymm_c.argtypes = [c_uint,c_uint,cType,c_void_p,c_void_p,cType,c_void_p]
lib.ElSymm_z.argtypes = [c_uint,c_uint,zType,c_void_p,c_void_p,zType,c_void_p]
lib.ElSymmDist_s.argtypes = \
[c_uint,c_uint,sType,c_void_p,c_void_p,sType,c_void_p]
lib.ElSymmDist_d.argtypes = \
[c_uint,c_uint,dType,c_void_p,c_void_p,dType,c_void_p]
lib.ElSymmDist_c.argtypes = \
[c_uint,c_uint,cType,c_void_p,c_void_p,cType,c_void_p]
lib.ElSymmDist_z.argtypes = \
[c_uint,c_uint,zType,c_void_p,c_void_p,zType,c_void_p]
lib.ElHemm_c.argtypes = [c_uint,c_uint,cType,c_void_p,c_void_p,cType,c_void_p]
lib.ElHemm_z.argtypes = [c_uint,c_uint,zType,c_void_p,c_void_p,zType,c_void_p]
lib.ElHemmDist_c.argtypes = \
[c_uint,c_uint,cType,c_void_p,c_void_p,cType,c_void_p]
lib.ElHemmDist_z.argtypes = \
[c_uint,c_uint,zType,c_void_p,c_void_p,zType,c_void_p]
def Symm(side,uplo,alphaPre,A,B,betaPre,C,conj=False):
if A.tag != B.tag or B.tag != C.tag:
raise Exception('Datatypes of {A,B,C} must match')
if type(A) is not type(B) or type(B) is not type(C):
raise Exception('Matrix types must match')
alpha = TagToType(A.tag)(alphaPre)
beta = TagToType(A.tag)(betaPre)
args = [side,uplo,alpha,A.obj,B.obj,beta,C.obj]
if type(A) is Matrix:
if A.tag == sTag: lib.ElSymm_s(*args)
elif A.tag == dTag: lib.ElSymm_d(*args)
elif A.tag == cTag:
if conj: lib.ElHemm_c(*args)
else: lib.ElSymm_c(*args)
elif A.tag == zTag:
if conj: lib.ElHemm_z(*args)
else: lib.ElSymm_z(*args)
else: DataExcept()
elif type(A) is DistMatrix:
if A.tag == sTag: lib.ElSymmDist_s(*args)
elif A.tag == dTag: lib.ElSymmDist_d(*args)
elif A.tag == cTag:
if conj: lib.ElHemmDist_c(*args)
else: lib.ElSymmDist_c(*args)
elif A.tag == zTag:
if conj: lib.ElHemmDist_z(*args)
else: lib.ElSymmDist_z(*args)
else: DataExcept()
else: TypeExcept()
def Hemm(side,uplo,alpha,A,B,betaPre,C):
Symm(side,uplo,alpha,A,B,betaPre,C,True)
# Syrk/Herk
# ---------
lib.ElSyrk_s.argtypes = \
lib.ElSyrkDist_s.argtypes = \
lib.ElSyrkSparse_s.argtypes = \
lib.ElSyrkDistSparse_s.argtypes = \
[c_uint,c_uint,sType,c_void_p,sType,c_void_p]
lib.ElSyrk_d.argtypes = \
lib.ElSyrkDist_d.argtypes = \
lib.ElSyrkSparse_d.argtypes = \
lib.ElSyrkDistSparse_d.argtypes = \
[c_uint,c_uint,dType,c_void_p,dType,c_void_p]
lib.ElSyrk_c.argtypes = \
lib.ElSyrkDist_c.argtypes = \
lib.ElSyrkSparse_c.argtypes = \
lib.ElSyrkDistSparse_c.argtypes = \
[c_uint,c_uint,cType,c_void_p,cType,c_void_p]
lib.ElSyrk_z.argtypes = \
lib.ElSyrkDist_z.argtypes = \
lib.ElSyrkSparse_z.argtypes = \
lib.ElSyrkDistSparse_z.argtypes = \
[c_uint,c_uint,zType,c_void_p,zType,c_void_p]
lib.ElHerk_c.argtypes = \
lib.ElHerkDist_c.argtypes = \
lib.ElHerkSparse_c.argtypes = \
lib.ElHerkDistSparse_c.argtypes = \
[c_uint,c_uint,sType,c_void_p,sType,c_void_p]
lib.ElHerk_z.argtypes = \
lib.ElHerkDist_z.argtypes = \
lib.ElHerkSparse_z.argtypes = \
lib.ElHerkDistSparse_z.argtypes = \
[c_uint,c_uint,dType,c_void_p,dType,c_void_p]
def Syrk(uplo,orient,alphaPre,A,betaPre,C,conj=False):
if A.tag != C.tag: raise Exception('Datatypes of A and C must match')
if type(A) is not type(C): raise Exception('Matrix types must match')
alpha = TagToType(A.tag)(alphaPre)
beta = TagToType(A.tag)(betaPre)
args = [uplo,orient,alpha,A.obj,beta,C.obj]
if type(A) is Matrix:
if A.tag == sTag: lib.ElSyrk_s(*args)
elif A.tag == dTag: lib.ElSyrk_d(*args)
elif A.tag == cTag:
if conj: lib.ElHerk_c(uplo,orient,alpha.real,A.obj,beta.real,C.obj)
else: lib.ElSyrk_c(*args)
elif A.tag == zTag:
if conj: lib.ElHerk_z(uplo,orient,alpha.real,A.obj,beta.real,C.obj)
else: lib.ElSyrk_z(*args)
else: DataExcept()
elif type(A) is DistMatrix:
if A.tag == sTag: lib.ElSyrkDist_s(*args)
elif A.tag == dTag: lib.ElSyrkDist_d(*args)
elif A.tag == cTag:
if conj: lib.ElHerkDist_c(uplo,orient,alpha.real,A.obj,beta.real,C.obj)
else: lib.ElSyrkDist_c(*args)
elif A.tag == zTag:
if conj: lib.ElHerkDist_z(uplo,orient,alpha.real,A.obj,beta.real,C.obj)
else: lib.ElSyrkDist_z(*args)
else: DataExcept()
elif type(A) is SparseMatrix:
if A.tag == sTag: lib.ElSyrkSparse_s(*args)
elif A.tag == dTag: lib.ElSyrkSparse_d(*args)
elif A.tag == cTag:
if conj: lib.ElHerkSparse_c(orient,alpha.real,A.obj,beta.real,C.obj)
else: lib.ElSyrkSparse_c(*args)
elif A.tag == zTag:
if conj: lib.ElHerkSparse_z(orient,alpha.real,A.obj,beta.real,C.obj)
else: lib.ElSyrkSparse_z(*args)
else: DataExcept()
elif type(A) is DistSparseMatrix:
if A.tag == sTag: lib.ElSyrkDistSparse_s(*args)
elif A.tag == dTag: lib.ElSyrkDistSparse_d(*args)
elif A.tag == cTag:
if conj: lib.ElHerkDistSparse_c(orient,alpha.real,A.obj,beta.real,C.obj)
else: lib.ElSyrkDistSparse_c(*args)
elif A.tag == zTag:
if conj: lib.ElHerkDistSparse_z(orient,alpha.real,A.obj,beta.real,C.obj)
else: lib.ElSyrkDistSparse_z(*args)
else: DataExcept()
else: TypeExcept()
def Herk(uplo,orient,alpha,A,beta,C):
Syrk(uplo,orient,alpha,A,beta,C,True)
# Syr2k/Her2k
# -----------
lib.ElSyr2k_s.argtypes = [c_uint,c_uint,sType,c_void_p,c_void_p,sType,c_void_p]
lib.ElSyr2k_d.argtypes = [c_uint,c_uint,dType,c_void_p,c_void_p,dType,c_void_p]
lib.ElSyr2k_c.argtypes = [c_uint,c_uint,cType,c_void_p,c_void_p,cType,c_void_p]
lib.ElSyr2k_z.argtypes = [c_uint,c_uint,zType,c_void_p,c_void_p,zType,c_void_p]
lib.ElSyr2kDist_s.argtypes = \
[c_uint,c_uint,sType,c_void_p,c_void_p,sType,c_void_p]
lib.ElSyr2kDist_d.argtypes = \
[c_uint,c_uint,dType,c_void_p,c_void_p,dType,c_void_p]
lib.ElSyr2kDist_c.argtypes = \
[c_uint,c_uint,cType,c_void_p,c_void_p,cType,c_void_p]
lib.ElSyr2kDist_z.argtypes = \
[c_uint,c_uint,zType,c_void_p,c_void_p,zType,c_void_p]
lib.ElHer2k_c.argtypes = [c_uint,c_uint,cType,c_void_p,c_void_p,sType,c_void_p]
lib.ElHer2k_z.argtypes = [c_uint,c_uint,zType,c_void_p,c_void_p,dType,c_void_p]
lib.ElHer2kDist_c.argtypes = \
[c_uint,c_uint,cType,c_void_p,c_void_p,sType,c_void_p]
lib.ElHer2kDist_z.argtypes = \
[c_uint,c_uint,zType,c_void_p,c_void_p,dType,c_void_p]
def Syr2k(uplo,orient,alphaPre,A,B,beta,C,conj=False):
if A.tag != B.tag or B.tag != C.tag:
raise Exception('Datatypes of {A,B,C} must match')
if type(A) is not type(B) or type(B) is not type(C):
raise Exception('Matrix types must match')
alpha = TagToType(A.tag)(alphaPre)
args = [uplo,orient,alpha,A.obj,B.obj,beta,C.obj]
if type(A) is Matrix:
if A.tag == sTag: lib.ElSyr2k_s(*args)
elif A.tag == dTag: lib.ElSyr2k_d(*args)
elif A.tag == cTag:
if conj: lib.ElHer2k_c(uplo,orient,alpha,A.obj,B.obj,beta.real,C.obj)
else: lib.ElSyr2k_c(*args)
elif A.tag == zTag:
if conj: lib.ElHer2k_z(uplo,orient,alpha,A.obj,B.obj,beta.real,C.obj)
else: lib.ElSyr2k_z(*args)
else: DataExcept()
elif type(A) is DistMatrix:
if A.tag == sTag: lib.ElSyr2kDist_s(*args)
elif A.tag == dTag: lib.ElSyr2kDist_d(*args)
elif A.tag == cTag:
if conj:
lib.ElHer2kDist_c(uplo,orient,alpha,A.obj,B.obj,beta.real,C.obj)
else: lib.ElSyr2kDist_c(*args)
elif A.tag == zTag:
if conj:
lib.ElHer2kDist_z(uplo,orient,alpha,A.obj,B.obj,beta.real,C.obj)
else: lib.ElSyr2kDist_z(*args)
else: DataExcept()
else: TypeExcept()
def Her2k(uplo,orient,alpha,A,B,beta,C):
Syr2k(uplo,orient,alpha,A,B,beta,C,True)
# Multiply
# --------
lib.ElMultiply_i.argtypes = \
[c_uint,iType,c_void_p,c_void_p,iType,c_void_p]
lib.ElMultiply_s.argtypes = \
[c_uint,sType,c_void_p,c_void_p,sType,c_void_p]
lib.ElMultiply_d.argtypes = \
[c_uint,dType,c_void_p,c_void_p,dType,c_void_p]
lib.ElMultiply_c.argtypes = \
[c_uint,cType,c_void_p,c_void_p,cType,c_void_p]
lib.ElMultiply_z.argtypes = \
[c_uint,zType,c_void_p,c_void_p,zType,c_void_p]
lib.ElMultiplyDist_i.argtypes = \
[c_uint,iType,c_void_p,c_void_p,iType,c_void_p]
lib.ElMultiplyDist_s.argtypes = \
[c_uint,sType,c_void_p,c_void_p,sType,c_void_p]
lib.ElMultiplyDist_d.argtypes = \
[c_uint,dType,c_void_p,c_void_p,dType,c_void_p]
lib.ElMultiplyDist_c.argtypes = \
[c_uint,cType,c_void_p,c_void_p,cType,c_void_p]
lib.ElMultiplyDist_z.argtypes = \
[c_uint,zType,c_void_p,c_void_p,zType,c_void_p]
def Multiply(orient,alpha,A,X,beta,Y):
if type(A) is SparseMatrix:
if type(X) is not Matrix or type(Y) is not Matrix:
raise Exception("Types of X and Y must match")
if A.tag != X.tag or X.tag != Y.tag:
raise Exception("Datatypes of {A,X,Y} must match")
args = [orient,alpha,A.obj,X.obj,beta,Y.obj]
if A.tag == iTag: lib.ElMultiply_i(*args)
elif A.tag == sTag: lib.ElMultiply_s(*args)
elif A.tag == dTag: lib.ElMultiply_d(*args)
elif A.tag == cTag: lib.ElMultiply_c(*args)
elif A.tag == zTag: lib.ElMultiply_z(*args)
else: DataExcept()
elif type(A) is DistSparseMatrix:
if type(X) is not DistMultiVec or type(Y) is not DistMultiVec:
raise Exception("Types of X and Y must match")
if A.tag != X.tag or X.tag != Y.tag:
raise Exception("Datatypes of {A,X,Y} must match")
args = [orient,alpha,A.obj,X.obj,beta,Y.obj]
if A.tag == iTag: lib.ElMultiplyDist_i(*args)
elif A.tag == sTag: lib.ElMultiplyDist_s(*args)
elif A.tag == dTag: lib.ElMultiplyDist_d(*args)
elif A.tag == cTag: lib.ElMultiplyDist_c(*args)
elif A.tag == zTag: lib.ElMultiplyDist_z(*args)
else: DataExcept()
else: TypeExcept()
# MultiShiftQuasiTrsm
# -------------------
lib.ElMultiShiftQuasiTrsm_s.argtypes = \
lib.ElMultiShiftQuasiTrsmDist_s.argtypes = \
[c_uint,c_uint,c_uint,sType,c_void_p,c_void_p,c_void_p]
lib.ElMultiShiftQuasiTrsm_d.argtypes = \
lib.ElMultiShiftQuasiTrsmDist_d.argtypes = \
[c_uint,c_uint,c_uint,dType,c_void_p,c_void_p,c_void_p]
lib.ElMultiShiftQuasiTrsm_c.argtypes = \
lib.ElMultiShiftQuasiTrsmDist_c.argtypes = \
[c_uint,c_uint,c_uint,cType,c_void_p,c_void_p,c_void_p]
lib.ElMultiShiftQuasiTrsm_z.argtypes = \
lib.ElMultiShiftQuasiTrsmDist_z.argtypes = \
[c_uint,c_uint,c_uint,zType,c_void_p,c_void_p,c_void_p]
def MultiShiftQuasiTrsm(side,uplo,orient,alphaPre,A,shifts,B):
if type(A) is not type(shifts) or type(shifts) is not type(B):
raise Exception('Types of A and B must match')
if A.tag != shifts.tag or shifts.tag != B.tag:
raise Exception('Datatypes of A and B must match')
alpha = TagToType(A.tag)(alphaPre)
args = [side,uplo,orient,alpha,A.obj,shifts.obj,B.obj]
if type(A) is Matrix:
if A.tag == sTag: lib.ElMultiShiftQuasiTrsm_s(*args)
elif A.tag == dTag: lib.ElMultiShiftQuasiTrsm_d(*args)
elif A.tag == cTag: lib.ElMultiShiftQuasiTrsm_c(*args)
elif A.tag == zTag: lib.ElMultiShiftQuasiTrsm_z(*args)
else: DataExcept()
elif type(A) is DistMatrix:
if A.tag == sTag: lib.ElMultiShiftQuasiTrsmDist_s(*args)
elif A.tag == dTag: lib.ElMultiShiftQuasiTrsmDist_d(*args)
elif A.tag == cTag: lib.ElMultiShiftQuasiTrsmDist_c(*args)
elif A.tag == zTag: lib.ElMultiShiftQuasiTrsmDist_z(*args)
else: DataExcept()
else: TypeExcept()
# MultiShiftTrsm
# --------------
lib.ElMultiShiftTrsm_s.argtypes = \
lib.ElMultiShiftTrsmDist_s.argtypes = \
[c_uint,c_uint,c_uint,sType,c_void_p,c_void_p,c_void_p]
lib.ElMultiShiftTrsm_d.argtypes = \
lib.ElMultiShiftTrsmDist_d.argtypes = \
[c_uint,c_uint,c_uint,dType,c_void_p,c_void_p,c_void_p]
lib.ElMultiShiftTrsm_c.argtypes = \
lib.ElMultiShiftTrsmDist_c.argtypes = \
[c_uint,c_uint,c_uint,cType,c_void_p,c_void_p,c_void_p]
lib.ElMultiShiftTrsm_z.argtypes = \
lib.ElMultiShiftTrsmDist_z.argtypes = \
[c_uint,c_uint,c_uint,zType,c_void_p,c_void_p,c_void_p]
def MultiShiftTrsm(side,uplo,orient,alphaPre,A,shifts,B):
if type(A) is not type(shifts) or type(shifts) is not type(B):
raise Exception('Types of A and B must match')
if A.tag != shifts.tag or shifts.tag != B.tag:
raise Exception('Datatypes of A and B must match')
alpha = TagToType(A.tag)(alphaPre)
args = [side,uplo,orient,alpha,A.obj,shifts.obj,B.obj]
if type(A) is Matrix:
if A.tag == sTag: lib.ElMultiShiftTrsm_s(*args)
elif A.tag == dTag: lib.ElMultiShiftTrsm_d(*args)
elif A.tag == cTag: lib.ElMultiShiftTrsm_c(*args)
elif A.tag == zTag: lib.ElMultiShiftTrsm_z(*args)
else: DataExcept()
elif type(A) is DistMatrix:
if A.tag == sTag: lib.ElMultiShiftTrsmDist_s(*args)
elif A.tag == dTag: lib.ElMultiShiftTrsmDist_d(*args)
elif A.tag == cTag: lib.ElMultiShiftTrsmDist_c(*args)
elif A.tag == zTag: lib.ElMultiShiftTrsmDist_z(*args)
else: DataExcept()
else: TypeExcept()
# QuasiTrsm
# ---------
lib.ElQuasiTrsm_s.argtypes = \
lib.ElQuasiTrsmDist_s.argtypes = \
[c_uint,c_uint,c_uint,sType,c_void_p,c_void_p]
lib.ElQuasiTrsm_d.argtypes = \
lib.ElQuasiTrsmDist_d.argtypes = \
[c_uint,c_uint,c_uint,dType,c_void_p,c_void_p]
lib.ElQuasiTrsm_c.argtypes = \
lib.ElQuasiTrsmDist_c.argtypes = \
[c_uint,c_uint,c_uint,cType,c_void_p,c_void_p]
lib.ElQuasiTrsm_z.argtypes = \
lib.ElQuasiTrsmDist_z.argtypes = \
[c_uint,c_uint,c_uint,zType,c_void_p,c_void_p]
def QuasiTrsm(side,uplo,orient,alphaPre,A,B):
if type(A) is not type(B): raise Exception('Types of A and B must match')
if A.tag != B.tag: raise Exception('Datatypes of A and B must match')
alpha = TagToType(A.tag)(alphaPre)
args = [side,uplo,orient,alpha,A.obj,B.obj]
if type(A) is Matrix:
if A.tag == sTag: lib.ElQuasiTrsm_s(*args)
elif A.tag == dTag: lib.ElQuasiTrsm_d(*args)
elif A.tag == cTag: lib.ElQuasiTrsm_c(*args)
elif A.tag == zTag: lib.ElQuasiTrsm_z(*args)
else: DataExcept()
elif type(A) is DistMatrix:
if A.tag == sTag: lib.ElQuasiTrsmDist_s(*args)
elif A.tag == dTag: lib.ElQuasiTrsmDist_d(*args)
elif A.tag == cTag: lib.ElQuasiTrsmDist_c(*args)
elif A.tag == zTag: lib.ElQuasiTrsmDist_z(*args)
else: DataExcept()
else: TypeExcept()
# Trdtrmm
# -------
lib.ElTrdtrmm_s.argtypes = \
lib.ElTrdtrmm_d.argtypes = \
lib.ElTrdtrmmDist_s.argtypes = \
lib.ElTrdtrmmDist_d.argtypes = \
[c_uint,c_void_p]
lib.ElTrdtrmm_c.argtypes = \
lib.ElTrdtrmm_z.argtypes = \
lib.ElTrdtrmmDist_c.argtypes = \
lib.ElTrdtrmmDist_z.argtypes = \
[c_uint,c_void_p,bType]
def Trdtrmm(uplo,A,conjugate=False):
args = [uplo,A.obj]
argsCpx = [uplo,A.obj,conjugate]
if type(A) is Matrix:
if A.tag == sTag: lib.ElTrdtrmm_s(*args)
elif A.tag == dTag: lib.ElTrdtrmm_d(*args)
elif A.tag == cTag: lib.ElTrdtrmm_c(*argsCpx)
elif A.tag == zTag: lib.ElTrdtrmm_z(*argsCpx)
else: DataExcept()
elif type(A) is DistMatrix:
if A.tag == sTag: lib.ElTrdtrmmDist_s(*args)
elif A.tag == dTag: lib.ElTrdtrmmDist_d(*args)
elif A.tag == cTag: lib.ElTrdtrmmDist_c(*argsCpx)
elif A.tag == zTag: lib.ElTrdtrmmDist_z(*argsCpx)
else: DataExcept()
else: TypeExcept()
# TrdtrmmQuasi
# ------------
lib.ElTrdtrmmQuasi_s.argtypes = \
lib.ElTrdtrmmQuasi_d.argtypes = \
lib.ElTrdtrmmQuasiDist_s.argtypes = \
lib.ElTrdtrmmQuasiDist_d.argtypes = \
[c_uint,c_void_p,c_void_p]
lib.ElTrdtrmmQuasi_c.argtypes = \
lib.ElTrdtrmmQuasi_z.argtypes = \
lib.ElTrdtrmmQuasiDist_c.argtypes = \
lib.ElTrdtrmmQuasiDist_z.argtypes = \
[c_uint,c_void_p,c_void_p,bType]
def TrdtrmmQuasi(uplo,A,dOff,conjugate=False):
if type(A) is not type(dOff):
raise Exception('Types of A and dOff must match')
if A.tag != dOff.tag:
raise Exception('Datatypes of A and dOff must match')
args = [uplo,A.obj]
argsCpx = [uplo,A.obj,conjugate]
if type(A) is Matrix:
if A.tag == sTag: lib.ElTrdtrmmQuasi_s(*args)
elif A.tag == dTag: lib.ElTrdtrmmQuasi_d(*args)
elif A.tag == cTag: lib.ElTrdtrmmQuasi_c(*argsCpx)
elif A.tag == zTag: lib.ElTrdtrmmQuasi_z(*argsCpx)
else: DataExcept()
elif type(A) is DistMatrix:
if A.tag == sTag: lib.ElTrdtrmmQuasiDist_s(*args)
elif A.tag == dTag: lib.ElTrdtrmmQuasiDist_d(*args)
elif A.tag == cTag: lib.ElTrdtrmmQuasiDist_c(*argsCpx)
elif A.tag == zTag: lib.ElTrdtrmmQuasiDist_z(*argsCpx)
else: DataExcept()
else: TypeExcept()
# Trmm
# ----
lib.ElTrmm_s.argtypes = \
lib.ElTrmmDist_s.argtypes = \
[c_uint,c_uint,c_uint,c_uint,sType,c_void_p,c_void_p]
lib.ElTrmm_d.argtypes = \
lib.ElTrmmDist_d.argtypes = \
[c_uint,c_uint,c_uint,c_uint,dType,c_void_p,c_void_p]
lib.ElTrmm_c.argtypes = \
lib.ElTrmmDist_c.argtypes = \
[c_uint,c_uint,c_uint,c_uint,cType,c_void_p,c_void_p]
lib.ElTrmm_z.argtypes = \
lib.ElTrmmDist_z.argtypes = \
[c_uint,c_uint,c_uint,c_uint,zType,c_void_p,c_void_p]
def Trmm(side,uplo,orient,diag,alphaPre,A,B):
if type(A) is not type(B): raise Exception('Types of A and B must match')
if A.tag != B.tag: raise Exception('Datatypes of A and B must match')
alpha = TagToType(A.tag)(alphaPre)
args = [side,uplo,orient,diag,alpha,A.obj,B.obj]
if type(A) is Matrix:
if A.tag == sTag: lib.ElTrmm_s(*args)
elif A.tag == dTag: lib.ElTrmm_d(*args)
elif A.tag == cTag: lib.ElTrmm_c(*args)
elif A.tag == zTag: lib.ElTrmm_z(*args)
else: DataExcept()
elif type(A) is DistMatrix:
if A.tag == sTag: lib.ElTrmmDist_s(*args)
elif A.tag == dTag: lib.ElTrmmDist_d(*args)
elif A.tag == cTag: lib.ElTrmmDist_c(*args)
elif A.tag == zTag: lib.ElTrmmDist_z(*args)
else: DataExcept()
else: TypeExcept()
# Trrk
# ----
lib.ElTrrk_s.argtypes = \
lib.ElTrrkDist_s.argtypes = \
[c_uint,c_uint,c_uint,sType,c_void_p,c_void_p,sType,c_void_p]
lib.ElTrrk_d.argtypes = \
lib.ElTrrkDist_d.argtypes = \
[c_uint,c_uint,c_uint,dType,c_void_p,c_void_p,dType,c_void_p]
lib.ElTrrk_c.argtypes = \
lib.ElTrrkDist_c.argtypes = \
[c_uint,c_uint,c_uint,cType,c_void_p,c_void_p,cType,c_void_p]
lib.ElTrrk_z.argtypes = \
lib.ElTrrkDist_z.argtypes = \
[c_uint,c_uint,c_uint,zType,c_void_p,c_void_p,zType,c_void_p]
def Trrk(uplo,orientA,orientB,alphaPre,A,B,betaPre,C):
if type(A) is not type(B) or type(B) is not type(C):
raise Exception('Types of {A,B,C} must match')
if A.tag != B.tag or B.tag != C.tag:
raise Exception('Datatypes of {A,B,C} must match')
alpha = TagToType(A.tag)(alphaPre)
beta = TagToType(A.tag)(betaPre)
args = [uplo,orientA,orientB,alpha,A.obj,B.obj,beta,C.obj]
if type(A) is Matrix:
if A.tag == sTag: lib.ElTrrk_s(*args)
elif A.tag == dTag: lib.ElTrrk_d(*args)
elif A.tag == cTag: lib.ElTrrk_c(*args)
elif A.tag == zTag: lib.ElTrrk_z(*args)
else: DataExcept()
elif type(A) is DistMatrix:
if A.tag == sTag: lib.ElTrrkDist_s(*args)
elif A.tag == dTag: lib.ElTrrkDist_d(*args)
elif A.tag == cTag: lib.ElTrrkDist_c(*args)
elif A.tag == zTag: lib.ElTrrkDist_z(*args)
else: DataExcept()
else: TypeExcept()
# Trr2k
# -----
#lib.ElTrr2k_s.argtypes = \
# [c_uint,c_uint,c_uint,c_uint,c_uint,
# sType,c_void_p,c_void_p,sType,c_void_p,c_void_p,sType,c_void_p]
#lib.ElTrr2k_d.argtypes = \
# [c_uint,c_uint,c_uint,c_uint,c_uint,
# dType,c_void_p,c_void_p,dType,c_void_p,c_void_p,dType,c_void_p]
#lib.ElTrr2k_c.argtypes = \
# [c_uint,c_uint,c_uint,c_uint,c_uint,
# cType,c_void_p,c_void_p,cType,c_void_p,c_void_p,cType,c_void_p]
#lib.ElTrr2k_z.argtypes = \
# [c_uint,c_uint,c_uint,c_uint,c_uint,
# zType,c_void_p,c_void_p,zType,c_void_p,c_void_p,zType,c_void_p]
lib.ElTrr2kDist_s.argtypes = \
[c_uint,c_uint,c_uint,c_uint,c_uint,
sType,c_void_p,c_void_p,sType,c_void_p,c_void_p,sType,c_void_p]
lib.ElTrr2kDist_d.argtypes = \
[c_uint,c_uint,c_uint,c_uint,c_uint,
dType,c_void_p,c_void_p,dType,c_void_p,c_void_p,dType,c_void_p]
lib.ElTrr2kDist_c.argtypes = \
[c_uint,c_uint,c_uint,c_uint,c_uint,
cType,c_void_p,c_void_p,cType,c_void_p,c_void_p,cType,c_void_p]
lib.ElTrr2kDist_z.argtypes = \
[c_uint,c_uint,c_uint,c_uint,c_uint,
zType,c_void_p,c_void_p,zType,c_void_p,c_void_p,zType,c_void_p]
def Trr2k(uplo,orientA,orientB,orientC,orientD,
alphaPre,A,B,betaPre,C,D,gammaPre,E):
if type(A) is not type(B) or type(B) is not type(C) or \
type(C) is not type(D) or type(D) is not type(E):
raise Exception('Types of {A,B,C,D,E} must match')
if A.tag != B.tag or B.tag != C.tag or C.tag != D.tag or D.tag != E.tag:
raise Exception('Datatypes of {A,B,C,D,E} must match')
alpha = TagToType(A.tag)(alphaPre)
beta = TagToType(A.tag)(betaPre)
gamma = TagToType(A.tag)(gammaPre)
args = [uplo,orientA,orientB,orientC,orientD,
alpha,A.obj,B.obj,beta,C.obj,D.obj,gamma,E.obj]
if type(A) is Matrix:
raise Exception('Sequential implementation does not yet exist')
elif type(A) is DistMatrix:
if A.tag == sTag: lib.ElTrr2kDist_s(*args)
elif A.tag == dTag: lib.ElTrr2kDist_d(*args)
elif A.tag == cTag: lib.ElTrr2kDist_c(*args)
elif A.tag == zTag: lib.ElTrr2kDist_z(*args)
else: DataExcept()
else: TypeExcept()
# Trsm
# ----
lib.ElTrsm_s.argtypes = \
lib.ElTrsmDist_s.argtypes = \
[c_uint,c_uint,c_uint,c_uint,sType,c_void_p,c_void_p]
lib.ElTrsm_d.argtypes = \
lib.ElTrsmDist_d.argtypes = \
[c_uint,c_uint,c_uint,c_uint,dType,c_void_p,c_void_p]
lib.ElTrsm_c.argtypes = \
lib.ElTrsmDist_c.argtypes = \
[c_uint,c_uint,c_uint,c_uint,cType,c_void_p,c_void_p]
lib.ElTrsm_z.argtypes = \
lib.ElTrsmDist_z.argtypes = \
[c_uint,c_uint,c_uint,c_uint,zType,c_void_p,c_void_p]
def Trsm(side,uplo,orient,diag,alphaPre,A,B):
if type(A) is not type(B): raise Exception('Types of A and B must match')
if A.tag != B.tag: raise Exception('Datatypes of A and B must match')
alpha = TagToType(A.tag)(alphaPre)
args = [side,uplo,orient,diag,alpha,A.obj,B.obj]
if type(A) is Matrix:
if A.tag == sTag: lib.ElTrsm_s(*args)
elif A.tag == dTag: lib.ElTrsm_d(*args)
elif A.tag == cTag: lib.ElTrsm_c(*args)
elif A.tag == zTag: lib.ElTrsm_z(*args)
else: DataExcept()
elif type(A) is DistMatrix:
if A.tag == sTag: lib.ElTrsmDist_s(*args)
elif A.tag == dTag: lib.ElTrsmDist_d(*args)
elif A.tag == cTag: lib.ElTrsmDist_c(*args)
elif A.tag == zTag: lib.ElTrsmDist_z(*args)
else: DataExcept()
else: TypeExcept()
# Trstrm
# ------
lib.ElTrstrm_s.argtypes = \
lib.ElTrstrmDist_s.argtypes = \
[c_uint,c_uint,c_uint,c_uint,sType,c_void_p,c_void_p]
lib.ElTrstrm_d.argtypes = \
lib.ElTrstrmDist_d.argtypes = \
[c_uint,c_uint,c_uint,c_uint,dType,c_void_p,c_void_p]
lib.ElTrstrm_c.argtypes = \
lib.ElTrstrmDist_c.argtypes = \
[c_uint,c_uint,c_uint,c_uint,cType,c_void_p,c_void_p]
lib.ElTrstrm_z.argtypes = \
lib.ElTrstrmDist_z.argtypes = \
[c_uint,c_uint,c_uint,c_uint,zType,c_void_p,c_void_p]
def Trstrm(side,uplo,orient,diag,alphaPre,A,B):
if type(A) is not type(B): raise Exception('Types of A and B must match')
if A.tag != B.tag: raise Exception('Datatypes of A and B must match')
alpha = TagToType(A.tag)(alphaPre)
args = [side,uplo,orient,diag,alpha,A.obj,B.obj]
if type(A) is Matrix:
if A.tag == sTag: lib.ElTrstrm_s(*args)
elif A.tag == dTag: lib.ElTrstrm_d(*args)
elif A.tag == cTag: lib.ElTrstrm_c(*args)
elif A.tag == zTag: lib.ElTrstrm_z(*args)
else: DataExcept()
elif type(A) is DistMatrix:
if A.tag == sTag: lib.ElTrstrmDist_s(*args)
elif A.tag == dTag: lib.ElTrstrmDist_d(*args)
elif A.tag == cTag: lib.ElTrstrmDist_c(*args)
elif A.tag == zTag: lib.ElTrstrmDist_z(*args)
else: DataExcept()
else: TypeExcept()
# Trtrmm
# ------
lib.ElTrtrmm_s.argtypes = \
lib.ElTrtrmm_d.argtypes = \
lib.ElTrtrmmDist_s.argtypes = \
lib.ElTrtrmmDist_d.argtypes = \
[c_uint,c_void_p]
lib.ElTrtrmm_c.argtypes = \
lib.ElTrtrmm_z.argtypes = \
lib.ElTrtrmmDist_c.argtypes = \
lib.ElTrtrmmDist_z.argtypes = \
[c_uint,c_void_p,bType]
def Trtrmm(uplo,A,conjugate=False):
args = [uplo,A.obj]
argsCpx = [uplo,A.obj,conjugate]
if type(A) is Matrix:
if A.tag == sTag: lib.ElTrtrmm_s(*args)
elif A.tag == dTag: lib.ElTrtrmm_d(*args)
elif A.tag == cTag: lib.ElTrtrmm_c(*argsCpx)
elif A.tag == zTag: lib.ElTrtrmm_z(*argsCpx)
else: DataExcept()
elif type(A) is DistMatrix:
if A.tag == sTag: lib.ElTrtrmmDist_s(*args)
elif A.tag == dTag: lib.ElTrtrmmDist_d(*args)
elif A.tag == cTag: lib.ElTrtrmmDist_c(*argsCpx)
elif A.tag == zTag: lib.ElTrtrmmDist_z(*argsCpx)
else: DataExcept()
else: TypeExcept()
# Two-sided Trmm
# --------------
lib.ElTwoSidedTrmm_s.argtypes = \
lib.ElTwoSidedTrmm_d.argtypes = \
lib.ElTwoSidedTrmm_c.argtypes = \
lib.ElTwoSidedTrmm_z.argtypes = \
lib.ElTwoSidedTrmmDist_s.argtypes = \
lib.ElTwoSidedTrmmDist_d.argtypes = \
lib.ElTwoSidedTrmmDist_c.argtypes = \
lib.ElTwoSidedTrmmDist_z.argtypes = \
[c_uint,c_uint,c_void_p,c_void_p]
def TwoSidedTrmm(uplo,diag,A,B):
if type(A) is not type(B): raise Exception('Types of A and B must match')
if A.tag != B.tag: raise Exception('Datatypes of A and B must match')
args = [uplo,diag,A.obj,B.obj]
if type(A) is Matrix:
if A.tag == sTag: lib.ElTwoSidedTrmm_s(*args)
elif A.tag == dTag: lib.ElTwoSidedTrmm_d(*args)
elif A.tag == cTag: lib.ElTwoSidedTrmm_c(*args)
elif A.tag == zTag: lib.ElTwoSidedTrmm_z(*args)
else: DataExcept()
elif type(A) is DistMatrix:
if A.tag == sTag: lib.ElTwoSidedTrmmDist_s(*args)
elif A.tag == dTag: lib.ElTwoSidedTrmmDist_d(*args)
elif A.tag == cTag: lib.ElTwoSidedTrmmDist_c(*args)
elif A.tag == zTag: lib.ElTwoSidedTrmmDist_z(*args)
else: DataExcept()
else: TypeExcept()
# Two-sided Trsm
# --------------
lib.ElTwoSidedTrsm_s.argtypes = \
lib.ElTwoSidedTrsm_d.argtypes = \
lib.ElTwoSidedTrsm_c.argtypes = \
lib.ElTwoSidedTrsm_z.argtypes = \
lib.ElTwoSidedTrsmDist_s.argtypes = \
lib.ElTwoSidedTrsmDist_d.argtypes = \
lib.ElTwoSidedTrsmDist_c.argtypes = \
lib.ElTwoSidedTrsmDist_z.argtypes = \
[c_uint,c_uint,c_void_p,c_void_p]
def TwoSidedTrsm(uplo,diag,A,B):
if type(A) is not type(B): raise Exception('Types of A and B must match')
if A.tag != B.tag: raise Exception('Datatypes of A and B must match')
args = [uplo,diag,A.obj,B.obj]
if type(A) is Matrix:
if A.tag == sTag: lib.ElTwoSidedTrsm_s(*args)
elif A.tag == dTag: lib.ElTwoSidedTrsm_d(*args)
elif A.tag == cTag: lib.ElTwoSidedTrsm_c(*args)
elif A.tag == zTag: lib.ElTwoSidedTrsm_z(*args)
else: DataExcept()
elif type(A) is DistMatrix:
if A.tag == sTag: lib.ElTwoSidedTrsmDist_s(*args)
elif A.tag == dTag: lib.ElTwoSidedTrsmDist_d(*args)
elif A.tag == cTag: lib.ElTwoSidedTrsmDist_c(*args)
elif A.tag == zTag: lib.ElTwoSidedTrsmDist_z(*args)
else: DataExcept()
else: TypeExcept()
|
bsd-3-clause
|
pyjs/pyjs
|
pyjswidgets/pyjamas/ui/StackPanel.py
|
9
|
5080
|
# Copyright 2006 James Tauber and contributors
# Copyright (C) 2009, 2010 Luke Kenneth Casson Leighton <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from pyjamas import DOM
from pyjamas import Factory
from pyjamas.ui.CellPanel import CellPanel
from pyjamas.ui import Event
class StackPanel(CellPanel):
def __init__(self, **kwargs):
self.visibleStack = -1
self.indices = {}
self.stackListeners = []
kwargs['StyleName'] = kwargs.get('StyleName', "gwt-StackPanel")
CellPanel.__init__(self, **kwargs)
DOM.sinkEvents(self.getElement(), Event.ONCLICK)
def addStackChangeListener(self, listener):
self.stackListeners.append(listener)
def add(self, widget, stackText="", asHTML=False):
widget.removeFromParent()
index = self.getWidgetCount()
tr = DOM.createTR()
td = DOM.createTD()
DOM.appendChild(self.body, tr)
DOM.appendChild(tr, td)
self.setStyleName(td, "gwt-StackPanelItem", True)
self._setIndex(td, index)
DOM.setAttribute(td, "height", "1px")
tr = DOM.createTR()
td = DOM.createTD()
DOM.appendChild(self.body, tr)
DOM.appendChild(tr, td)
DOM.setAttribute(td, "height", "100%")
DOM.setAttribute(td, "vAlign", "top")
CellPanel.add(self, widget, td)
self.setStackVisible(index, False)
if self.visibleStack == -1:
self.showStack(0)
if stackText != "":
self.setStackText(self.getWidgetCount() - 1, stackText, asHTML)
def onBrowserEvent(self, event):
if DOM.eventGetType(event) == "click":
index = self.getDividerIndex(DOM.eventGetTarget(event))
if index != -1:
self.showStack(index)
# also callable as remove(child) and remove(index)
def remove(self, child, index=None):
if index is None:
if isinstance(child, int):
index = child
child = self.getWidget(child)
else:
index = self.getWidgetIndex(child)
if child.getParent() != self:
return False
if self.visibleStack == index:
self.visibleStack = -1
elif self.visibleStack > index:
self.visibleStack -= 1
rowIndex = 2 * index
tr = DOM.getChild(self.body, rowIndex)
DOM.removeChild(self.body, tr)
tr = DOM.getChild(self.body, rowIndex)
DOM.removeChild(self.body, tr)
CellPanel.remove(self, child)
rows = self.getWidgetCount() * 2
#for (int i = rowIndex; i < rows; i = i + 2) {
for i in range(rowIndex, rows, 2):
childTR = DOM.getChild(self.body, i)
td = DOM.getFirstChild(childTR)
curIndex = self._getIndex(td)
self._setIndex(td, index)
index += 1
return True
def _setIndex(self, td, index):
self.indices[td] = index
def _getIndex(self, td):
return self.indices.get(td)
def setStackText(self, index, text, asHTML=False):
if index >= self.getWidgetCount():
return
td = DOM.getChild(DOM.getChild(self.body, index * 2), 0)
if asHTML:
DOM.setInnerHTML(td, text)
else:
DOM.setInnerText(td, text)
def showStack(self, index):
if (index >= self.getWidgetCount()) or (index == self.visibleStack):
return
if self.visibleStack >= 0:
self.setStackVisible(self.visibleStack, False)
self.visibleStack = index
self.setStackVisible(self.visibleStack, True)
for listener in self.stackListeners:
listener.onStackChanged(self, index)
def getDividerIndex(self, elem):
while (elem is not None) and not DOM.compare(elem, self.getElement()):
expando = self._getIndex(elem)
if expando is not None:
return int(expando)
elem = DOM.getParent(elem)
return -1
def setStackVisible(self, index, visible):
tr = DOM.getChild(self.body, (index * 2))
if tr is None:
return
td = DOM.getFirstChild(tr)
self.setStyleName(td, "gwt-StackPanelItem-selected", visible)
tr = DOM.getChild(self.body, (index * 2) + 1)
self.setVisible(tr, visible)
self.getWidget(index).setVisible(visible)
def getSelectedIndex(self):
return self.visibleStack
Factory.registerClass('pyjamas.ui.StackPanel', 'StackPanel', StackPanel)
|
apache-2.0
|
jbedorf/tensorflow
|
tensorflow/contrib/image/python/ops/sparse_image_warp.py
|
19
|
8517
|
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Image warping using sparse flow defined at control points."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.image.python.ops import dense_image_warp
from tensorflow.contrib.image.python.ops import interpolate_spline
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
def _get_grid_locations(image_height, image_width):
"""Wrapper for np.meshgrid."""
y_range = np.linspace(0, image_height - 1, image_height)
x_range = np.linspace(0, image_width - 1, image_width)
y_grid, x_grid = np.meshgrid(y_range, x_range, indexing='ij')
return np.stack((y_grid, x_grid), -1)
def _expand_to_minibatch(np_array, batch_size):
"""Tile arbitrarily-sized np_array to include new batch dimension."""
tiles = [batch_size] + [1] * np_array.ndim
return np.tile(np.expand_dims(np_array, 0), tiles)
def _get_boundary_locations(image_height, image_width, num_points_per_edge):
"""Compute evenly-spaced indices along edge of image."""
y_range = np.linspace(0, image_height - 1, num_points_per_edge + 2)
x_range = np.linspace(0, image_width - 1, num_points_per_edge + 2)
ys, xs = np.meshgrid(y_range, x_range, indexing='ij')
is_boundary = np.logical_or(
np.logical_or(xs == 0, xs == image_width - 1),
np.logical_or(ys == 0, ys == image_height - 1))
return np.stack([ys[is_boundary], xs[is_boundary]], axis=-1)
def _add_zero_flow_controls_at_boundary(control_point_locations,
control_point_flows, image_height,
image_width, boundary_points_per_edge):
"""Add control points for zero-flow boundary conditions.
Augment the set of control points with extra points on the
boundary of the image that have zero flow.
Args:
control_point_locations: input control points
control_point_flows: their flows
image_height: image height
image_width: image width
boundary_points_per_edge: number of points to add in the middle of each
edge (not including the corners).
The total number of points added is
4 + 4*(boundary_points_per_edge).
Returns:
merged_control_point_locations: augmented set of control point locations
merged_control_point_flows: augmented set of control point flows
"""
batch_size = tensor_shape.dimension_value(control_point_locations.shape[0])
boundary_point_locations = _get_boundary_locations(image_height, image_width,
boundary_points_per_edge)
boundary_point_flows = np.zeros([boundary_point_locations.shape[0], 2])
type_to_use = control_point_locations.dtype
boundary_point_locations = constant_op.constant(
_expand_to_minibatch(boundary_point_locations, batch_size),
dtype=type_to_use)
boundary_point_flows = constant_op.constant(
_expand_to_minibatch(boundary_point_flows, batch_size), dtype=type_to_use)
merged_control_point_locations = array_ops.concat(
[control_point_locations, boundary_point_locations], 1)
merged_control_point_flows = array_ops.concat(
[control_point_flows, boundary_point_flows], 1)
return merged_control_point_locations, merged_control_point_flows
def sparse_image_warp(image,
source_control_point_locations,
dest_control_point_locations,
interpolation_order=2,
regularization_weight=0.0,
num_boundary_points=0,
name='sparse_image_warp'):
"""Image warping using correspondences between sparse control points.
Apply a non-linear warp to the image, where the warp is specified by
the source and destination locations of a (potentially small) number of
control points. First, we use a polyharmonic spline
(`tf.contrib.image.interpolate_spline`) to interpolate the displacements
between the corresponding control points to a dense flow field.
Then, we warp the image using this dense flow field
(`tf.contrib.image.dense_image_warp`).
Let t index our control points. For regularization_weight=0, we have:
warped_image[b, dest_control_point_locations[b, t, 0],
dest_control_point_locations[b, t, 1], :] =
image[b, source_control_point_locations[b, t, 0],
source_control_point_locations[b, t, 1], :].
For regularization_weight > 0, this condition is met approximately, since
regularized interpolation trades off smoothness of the interpolant vs.
reconstruction of the interpolant at the control points.
See `tf.contrib.image.interpolate_spline` for further documentation of the
interpolation_order and regularization_weight arguments.
Args:
image: `[batch, height, width, channels]` float `Tensor`
source_control_point_locations: `[batch, num_control_points, 2]` float
`Tensor`
dest_control_point_locations: `[batch, num_control_points, 2]` float
`Tensor`
interpolation_order: polynomial order used by the spline interpolation
regularization_weight: weight on smoothness regularizer in interpolation
num_boundary_points: How many zero-flow boundary points to include at
each image edge.Usage:
num_boundary_points=0: don't add zero-flow points
num_boundary_points=1: 4 corners of the image
num_boundary_points=2: 4 corners and one in the middle of each edge
(8 points total)
num_boundary_points=n: 4 corners and n-1 along each edge
name: A name for the operation (optional).
Note that image and offsets can be of type tf.half, tf.float32, or
tf.float64, and do not necessarily have to be the same type.
Returns:
warped_image: `[batch, height, width, channels]` float `Tensor` with same
type as input image.
flow_field: `[batch, height, width, 2]` float `Tensor` containing the dense
flow field produced by the interpolation.
"""
image = ops.convert_to_tensor(image)
source_control_point_locations = ops.convert_to_tensor(
source_control_point_locations)
dest_control_point_locations = ops.convert_to_tensor(
dest_control_point_locations)
control_point_flows = (
dest_control_point_locations - source_control_point_locations)
clamp_boundaries = num_boundary_points > 0
boundary_points_per_edge = num_boundary_points - 1
with ops.name_scope(name):
batch_size, image_height, image_width, _ = image.get_shape().as_list()
# This generates the dense locations where the interpolant
# will be evaluated.
grid_locations = _get_grid_locations(image_height, image_width)
flattened_grid_locations = np.reshape(grid_locations,
[image_height * image_width, 2])
flattened_grid_locations = constant_op.constant(
_expand_to_minibatch(flattened_grid_locations, batch_size), image.dtype)
if clamp_boundaries:
(dest_control_point_locations,
control_point_flows) = _add_zero_flow_controls_at_boundary(
dest_control_point_locations, control_point_flows, image_height,
image_width, boundary_points_per_edge)
flattened_flows = interpolate_spline.interpolate_spline(
dest_control_point_locations, control_point_flows,
flattened_grid_locations, interpolation_order, regularization_weight)
dense_flows = array_ops.reshape(flattened_flows,
[batch_size, image_height, image_width, 2])
warped_image = dense_image_warp.dense_image_warp(image, dense_flows)
return warped_image, dense_flows
|
apache-2.0
|
britcey/ansible
|
lib/ansible/modules/notification/mattermost.py
|
49
|
5291
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Benjamin Jolivot <[email protected]>
# Inspired by slack module :
# # (c) 2017, Steve Pletcher <[email protected]>
# # (c) 2016, René Moser <[email protected]>
# # (c) 2015, Stefan Berggren <[email protected]>
# # (c) 2014, Ramon de la Fuente <[email protected]>)
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
module: mattermost
short_description: Send Mattermost notifications
description:
- Sends notifications to U(http://your.mattermost.url) via the Incoming WebHook integration.
version_added: "2.3"
author: "Benjamin Jolivot (@bjolivot)"
options:
url:
description:
- Mattermost url (i.e. http://mattermost.yourcompany.com).
required: true
api_key:
description:
- Mattermost webhook api key. Log into your mattermost site, go to
Menu -> Integration -> Incomming Webhook -> Add Incomming Webhook.
This will give you full URL. api_key is the last part.
http://mattermost.example.com/hooks/C(API_KEY)
required: true
text:
description:
- Text to send. Note that the module does not handle escaping characters.
required: true
channel:
description:
- Channel to send the message to. If absent, the message goes to the channel selected for the I(api_key).
username:
description:
- This is the sender of the message (Username Override need to be enabled by mattermost admin, see mattermost doc.
default: Ansible
icon_url:
description:
- Url for the message sender's icon.
default: https://www.ansible.com/favicon.ico
validate_certs:
description:
- If C(no), SSL certificates will not be validated. This should only be used
on personally controlled sites using self-signed certificates.
default: yes
choices:
- 'yes'
- 'no'
"""
EXAMPLES = """
- name: Send notification message via Mattermost
mattermost:
url: http://mattermost.example.com
api_key: my_api_key
text: '{{ inventory_hostname }} completed'
- name: Send notification message via Mattermost all options
mattermost:
url: http://mattermost.example.com
api_key: my_api_key
text: '{{ inventory_hostname }} completed'
channel: notifications
username: 'Ansible on {{ inventory_hostname }}'
icon_url: http://www.example.com/some-image-file.png
"""
RETURN = '''
payload:
description: Mattermost payload
returned: success
type: string
webhook_url:
description: URL the webhook is sent to
returned: success
type: string
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.urls import fetch_url
def main():
module = AnsibleModule(
supports_check_mode=True,
argument_spec = dict(
url = dict(type='str', required=True),
api_key = dict(type='str', required=True, no_log=True),
text = dict(type='str', required=True),
channel = dict(type='str', default=None),
username = dict(type='str', default='Ansible'),
icon_url = dict(type='str', default='https://www.ansible.com/favicon.ico'),
validate_certs = dict(default='yes', type='bool'),
)
)
#init return dict
result = dict(changed=False, msg="OK")
#define webhook
webhook_url = "{0}/hooks/{1}".format(module.params['url'],module.params['api_key'])
result['webhook_url'] = webhook_url
#define payload
payload = { }
for param in ['text', 'channel', 'username', 'icon_url']:
if module.params[param] is not None:
payload[param] = module.params[param]
payload=module.jsonify(payload)
result['payload'] = payload
#http headers
headers = {
'Content-Type': 'application/json',
'Accept': 'application/json',
}
#notes:
#Nothing is done in check mode
#it'll pass even if your server is down or/and if your token is invalid.
#If someone find good way to check...
#send request if not in test mode
if module.check_mode is False:
response, info = fetch_url(module=module, url=webhook_url, headers=headers, method='POST', data=payload)
#somthing's wrong
if info['status'] != 200:
#some problem
result['msg'] = "Failed to send mattermost message, the error was: {0}".format(info['msg'])
module.fail_json(**result)
#Looks good
module.exit_json(**result)
if __name__ == '__main__':
main()
|
gpl-3.0
|
zuphilip/ocropy
|
ocrolib/exceptions.py
|
5
|
2139
|
import inspect
import numpy
def summary(x):
"""Summarize a datatype as a string (for display and debugging)."""
if type(x)==numpy.ndarray:
return "<ndarray %s %s>"%(x.shape,x.dtype)
if type(x)==str and len(x)>10:
return '"%s..."'%x
if type(x)==list and len(x)>10:
return '%s...'%x
return str(x)
################################################################
### Ocropy exceptions
################################################################
class OcropusException(Exception):
trace = 1
def __init__(self,*args,**kw):
Exception.__init__(self,*args,**kw)
class Unimplemented(OcropusException):
trace = 1
"Exception raised when a feature is unimplemented."
def __init__(self,s):
Exception.__init__(self,inspect.stack()[1][3])
class Internal(OcropusException):
trace = 1
"Exception raised when a feature is unimplemented."
def __init__(self,s):
Exception.__init__(self,inspect.stack()[1][3])
class RecognitionError(OcropusException):
trace = 1
"Some kind of error during recognition."
def __init__(self,explanation,**kw):
self.context = kw
s = [explanation]
s += ["%s=%s"%(k,summary(kw[k])) for k in kw]
message = " ".join(s)
Exception.__init__(self,message)
class Warning(OcropusException):
trace = 0
def __init__(self,*args,**kw):
OcropusException.__init__(self,*args,**kw)
class BadClassLabel(OcropusException):
trace = 0
"Exception for bad class labels in a dataset or input."
def __init__(self,s):
Exception.__init__(self,s)
class BadImage(OcropusException):
trace = 0
def __init__(self,*args,**kw):
OcropusException.__init__(self,*args)
class BadInput(OcropusException):
trace = 0
def __init__(self,*args,**kw):
OcropusException.__init__(self,*args,**kw)
class FileNotFound(OcropusException):
trace = 0
"""Some file-not-found error during OCRopus processing."""
def __init__(self,fname):
self.fname = fname
def __str__(self):
return "file not found %s"%(self.fname,)
|
apache-2.0
|
ksrajkumar/openerp-6.1
|
openerp/addons/sale/report/sale_order.py
|
10
|
1447
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from report import report_sxw
class order(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context=None):
super(order, self).__init__(cr, uid, name, context=context)
self.localcontext.update({
'time': time,
})
report_sxw.report_sxw('report.sale.order', 'sale.order', 'addons/sale/report/sale_order.rml', parser=order, header="external")
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
mu-editor/mu
|
tests/test_logic.py
|
2
|
115644
|
# -*- coding: utf-8 -*-
"""
Tests for the Editor and REPL logic.
"""
import sys
import os
import atexit
import codecs
import contextlib
import json
import locale
import random
import re
import shutil
import subprocess
import tempfile
from unittest import mock
import uuid
import pytest
import mu.config
import mu.logic
import mu.settings
from mu.virtual_environment import venv
from PyQt5.QtWidgets import QMessageBox
from PyQt5.QtCore import pyqtSignal, QObject, Qt
from mu import __version__
SESSION = json.dumps(
{
"theme": "night",
"mode": "python",
"paths": ["path/foo.py", "path/bar.py"],
"envars": [["name", "value"]],
}
)
ENCODING_COOKIE = "# -*- coding: {} -*-{}".format(
mu.logic.ENCODING, mu.logic.NEWLINE
)
#
# Testing support functions
# These functions generate testing scenarios or mocks making
# the test more readable and easier to spot the element being
# tested from among the boilerplate setup code
#
def rstring(length=10, characters="abcdefghijklmnopqrstuvwxyz"):
letters = list(characters)
random.shuffle(letters)
return "".join(letters[:length])
def _generate_python_files(contents, dirpath):
"""Generate a series of .py files, one for each element in an iterable
contents should be an iterable (typically a list) containing one
string for each of a the number of files to be created. The files
will be created in the dirpath directory passed in which will neither
be created nor destroyed by this function.
"""
for i, c in enumerate(contents):
name = uuid.uuid1().hex
filepath = os.path.join(dirpath, "%03d-%s.py" % (1 + i, name))
#
# Write using newline="" so line-ending tests can work!
# If a binary write is needed (eg for an encoding test) pass
# a list of empty strings as contents and then write the bytes
# as part of the test.
#
with open(filepath, "w", encoding=mu.logic.ENCODING, newline="") as f:
f.write(c)
yield filepath
@contextlib.contextmanager
def generate_python_files(contents, dirpath=None):
"""
Create a temp directory and populate it with .py files, then remove it.
"""
dirpath = dirpath or tempfile.mkdtemp(prefix="mu-")
yield list(_generate_python_files(contents, dirpath))
shutil.rmtree(dirpath)
@contextlib.contextmanager
def generate_python_file(text="", dirpath=None):
"""
Create a temp directory and populate it with on .py file, then remove it.
"""
dirpath = dirpath or tempfile.mkdtemp(prefix="mu-")
for filepath in _generate_python_files([text], dirpath):
yield filepath
break
shutil.rmtree(dirpath)
@contextlib.contextmanager
def generate_session(
theme="day",
mode="python",
file_contents=None,
envars=[["name", "value"]],
minify=False,
microbit_runtime=None,
zoom_level=2,
window=None,
venv_path=None,
**kwargs
):
"""Generate a temporary session file for one test
By default, the session file will be created inside a temporary directory
which will be removed afterwards. If filepath is specified the session
file will be created with that fully-specified path and filename.
If an iterable of file contents is specified (referring to text files to
be reloaded from a previous session) then files will be created in the
a directory with the contents provided.
If None is passed to any of the parameters that item will not be included
in the session data. Once all parameters have been considered if no session
data is present, the file will *not* be created.
Any additional kwargs are created as items in the data (eg to generate
invalid file contents)
The mu.logic.get_session_path function is mocked to return the
temporary filepath from this session.
The session is yielded to the contextmanager so the typical usage is:
with generate_session(mode="night") as session:
# do some test
assert <whatever>.mode == session['mode']
"""
dirpath = tempfile.mkdtemp(prefix="mu-")
session_data = {}
if theme:
session_data["theme"] = theme
if mode:
session_data["mode"] = mode
if file_contents:
paths = _generate_python_files(file_contents, dirpath)
session_data["paths"] = list(paths)
if envars:
session_data["envars"] = envars
if minify is not None:
session_data["minify"] = minify
if microbit_runtime:
session_data["microbit_runtime"] = microbit_runtime
if zoom_level:
session_data["zoom_level"] = zoom_level
if window:
session_data["window"] = window
if venv_path:
session_data["venv_path"] = venv_path
session_data.update(**kwargs)
session = mu.settings.SessionSettings()
session.reset()
session.update(session_data)
with mock.patch("mu.settings.session", session):
yield session
shutil.rmtree(dirpath)
def mocked_view(text, path, newline):
"""Create a mocked view with path, newline and text"""
view = mock.MagicMock()
view.current_tab = mock.MagicMock()
view.current_tab.path = path
view.current_tab.newline = newline
view.current_tab.text = mock.MagicMock(return_value=text)
view.add_tab = mock.MagicMock()
view.get_save_path = mock.MagicMock(return_value=path)
view.get_load_path = mock.MagicMock()
view.add_tab = mock.MagicMock()
return view
def mocked_editor(mode="python", text=None, path=None, newline=None):
"""Return a mocked editor with a mocked view
This is intended to assist the several tests where a mocked editor
is needed but where the length of setup code to get there tends to
obscure the intent of the test
"""
view = mocked_view(text, path, newline)
ed = mu.logic.Editor(view)
ed.select_mode = mock.MagicMock()
mock_mode = mock.MagicMock()
mock_mode.save_timeout = 5
mock_mode.workspace_dir.return_value = "/fake/path"
mock_mode.api.return_value = ["API Specification"]
ed.modes = {mode: mock_mode}
return ed
@pytest.fixture(scope="module")
def prevent_settings_autosave():
"""Prevent the settings from auto-saving"""
atexit._clear()
@pytest.fixture
def mocked_session():
"""Mock the save-session functionality"""
with mock.patch.object(mu.settings, "session") as mocked_session:
yield mocked_session
def test_CONSTANTS():
"""
Ensure the expected constants exist.
"""
assert mu.config.HOME_DIRECTORY
assert mu.config.DATA_DIR
assert mu.config.WORKSPACE_NAME
@pytest.fixture
def microbit_com1():
microbit = mu.logic.Device(
0x0D28,
0x0204,
"COM1",
123456,
"ARM",
"BBC micro:bit",
"microbit",
"BBC micro:bit",
)
return microbit
@pytest.fixture
def microbit_com2():
microbit = mu.logic.Device(
0x0D28,
0x0204,
"COM2",
123456,
"ARM",
"BBC micro:bit",
"microbit",
"BBC micro:bit",
)
return microbit
@pytest.fixture
def adafruit_feather():
adafruit_feather = mu.logic.Device(
0x239A,
0x800B,
"COM1",
123456,
"ARM",
"CircuitPython",
"circuitpython",
"Adafruit Feather",
)
return adafruit_feather
@pytest.fixture
def esp_device():
esp_device = mu.logic.Device(
0x0403,
0x6015,
"COM1",
123456,
"Sparkfun",
"ESP MicroPython",
"esp",
# No board_name specified
)
return esp_device
def test_write_and_flush():
"""
Ensure the write and flush function tries to write to the filesystem and
flush so the write happens immediately.
"""
mock_fd = mock.MagicMock()
mock_content = mock.MagicMock()
with mock.patch("mu.logic.os.fsync") as fsync:
mu.logic.write_and_flush(mock_fd, mock_content)
fsync.assert_called_once_with(mock_fd)
mock_fd.write.assert_called_once_with(mock_content)
mock_fd.flush.assert_called_once_with()
def test_save_and_encode():
"""
When saving, ensure that encoding cookies are honoured, otherwise fall back
to the default encoding (UTF-8 -- as per Python standard practice).
"""
encoding_cookie = "# -*- coding: latin-1 -*-"
text = encoding_cookie + '\n\nprint("Hello")'
mock_open = mock.MagicMock()
mock_wandf = mock.MagicMock()
# Valid cookie
with mock.patch("mu.logic.open", mock_open), mock.patch(
"mu.logic.write_and_flush", mock_wandf
):
mu.logic.save_and_encode(text, "foo.py")
mock_open.assert_called_once_with(
"foo.py", "w", encoding="latin-1", newline=""
)
assert mock_wandf.call_count == 1
mock_open.reset_mock()
mock_wandf.reset_mock()
# Invalid cookie
encoding_cookie = "# -*- coding: utf-42 -*-"
text = encoding_cookie + '\n\nprint("Hello")'
with mock.patch("mu.logic.open", mock_open), mock.patch(
"mu.logic.write_and_flush", mock_wandf
):
mu.logic.save_and_encode(text, "foo.py")
mock_open.assert_called_once_with(
"foo.py", "w", encoding=mu.logic.ENCODING, newline=""
)
assert mock_wandf.call_count == 1
mock_open.reset_mock()
mock_wandf.reset_mock()
# No cookie
text = 'print("Hello")'
with mock.patch("mu.logic.open", mock_open), mock.patch(
"mu.logic.write_and_flush", mock_wandf
):
mu.logic.save_and_encode(text, "foo.py")
mock_open.assert_called_once_with(
"foo.py", "w", encoding=mu.logic.ENCODING, newline=""
)
assert mock_wandf.call_count == 1
def test_sniff_encoding_from_BOM():
"""
Ensure an expected BOM detected at the start of the referenced file is
used to set the expected encoding.
"""
with mock.patch(
"mu.logic.open", mock.mock_open(read_data=codecs.BOM_UTF8 + b"# hello")
):
assert mu.logic.sniff_encoding("foo.py") == "utf-8-sig"
def test_sniff_encoding_from_cookie():
"""
If there's a cookie present, then use that to work out the expected
encoding.
"""
encoding_cookie = b"# -*- coding: latin-1 -*-"
mock_locale = mock.MagicMock()
mock_locale.getpreferredencoding.return_value = "UTF-8"
with mock.patch(
"mu.logic.open", mock.mock_open(read_data=encoding_cookie)
), mock.patch("mu.logic.locale", mock_locale):
assert mu.logic.sniff_encoding("foo.py") == "latin-1"
def test_sniff_encoding_from_bad_cookie():
"""
If there's a cookie present but we can't even read it, then return None.
"""
encoding_cookie = "# -*- coding: silly-你好 -*-".encode("utf-8")
mock_locale = mock.MagicMock()
mock_locale.getpreferredencoding.return_value = "ascii"
with mock.patch(
"mu.logic.open", mock.mock_open(read_data=encoding_cookie)
), mock.patch("mu.logic.locale", mock_locale):
assert mu.logic.sniff_encoding("foo.py") is None
def test_sniff_encoding_fallback_to_locale():
"""
If there's no encoding information in the file, just return None.
"""
mock_locale = mock.MagicMock()
mock_locale.getpreferredencoding.return_value = "ascii"
with mock.patch(
"mu.logic.open", mock.mock_open(read_data=b"# hello")
), mock.patch("mu.logic.locale", mock_locale):
assert mu.logic.sniff_encoding("foo.py") is None
def test_sniff_newline_convention():
"""
Ensure sniff_newline_convention returns the expected newline convention.
"""
text = "the\r\ncat\nsat\non\nthe\r\nmat"
assert mu.logic.sniff_newline_convention(text) == "\n"
def test_sniff_newline_convention_local():
"""
Ensure sniff_newline_convention returns the local newline convention if it
cannot determine it from the text.
"""
text = "There are no new lines here"
assert mu.logic.sniff_newline_convention(text) == os.linesep
@pytest.mark.skip("No longer needed post PR #1200")
def test_get_session_path():
"""
Ensure the result of calling get_admin_file_path with session.json returns
the expected result.
"""
mock_func = mock.MagicMock(return_value="foo")
with mock.patch("mu.logic.get_admin_file_path", mock_func):
assert mu.logic.get_session_path() == "foo"
mock_func.assert_called_once_with("session.json")
@pytest.mark.skip("No longer needed post PR #1200")
def test_get_settings_path():
"""
Ensure the result of calling get_admin_file_path with settings.json returns
the expected result.
"""
mock_func = mock.MagicMock(return_value="foo")
with mock.patch("mu.logic.get_admin_file_path", mock_func):
assert mu.logic.get_settings_path() == "foo"
mock_func.assert_called_once_with("settings.json")
def test_extract_envars():
"""
Given a correct textual representation, get the expected list
representation of user defined environment variables.
"""
raw = "FOO=BAR\n BAZ = Q=X \n\n\n"
expected = mu.logic.extract_envars(raw)
assert expected == [["FOO", "BAR"], ["BAZ", "Q=X"]]
def test_check_flake():
"""
Ensure the check_flake method calls PyFlakes with the expected code
reporter.
"""
mock_r = mock.MagicMock()
mock_r.log = [{"line_no": 2, "column": 0, "message": "b"}]
with mock.patch(
"mu.logic.MuFlakeCodeReporter", return_value=mock_r
), mock.patch("mu.logic.check", return_value=None) as mock_check:
result = mu.logic.check_flake("foo.py", "some code")
assert result == {2: mock_r.log}
mock_check.assert_called_once_with("some code", "foo.py", mock_r)
def test_check_flake_needing_expansion():
"""
Ensure the check_flake method calls PyFlakes with the expected code
reporter.
"""
mock_r = mock.MagicMock()
msg = "'microbit.foo' imported but unused"
mock_r.log = [{"line_no": 2, "column": 0, "message": msg}]
with mock.patch(
"mu.logic.MuFlakeCodeReporter", return_value=mock_r
), mock.patch("mu.logic.check", return_value=None) as mock_check:
code = "from microbit import *"
result = mu.logic.check_flake("foo.py", code)
assert result == {}
mock_check.assert_called_once_with(
mu.logic.EXPANDED_IMPORT, "foo.py", mock_r
)
def test_check_flake_with_builtins():
"""
If a list of assumed builtin symbols is passed, any "undefined name"
messages for them are ignored.
"""
mock_r = mock.MagicMock()
mock_r.log = [
{"line_no": 2, "column": 0, "message": "undefined name 'foo'"}
]
with mock.patch(
"mu.logic.MuFlakeCodeReporter", return_value=mock_r
), mock.patch("mu.logic.check", return_value=None) as mock_check:
result = mu.logic.check_flake("foo.py", "some code", builtins=["foo"])
assert result == {}
mock_check.assert_called_once_with("some code", "foo.py", mock_r)
def test_check_real_flake_output_with_builtins():
"""
Check that passing builtins correctly suppresses undefined name errors
using real .check_flake() output.
"""
ok_result = mu.logic.check_flake("foo.py", "print(foo)", builtins=["foo"])
assert ok_result == {}
bad_result = mu.logic.check_flake("foo.py", "print(bar)", builtins=["foo"])
assert len(bad_result) == 1
def test_check_pycodestyle_E121():
"""
Ensure the expected result is generated from the PEP8 style validator.
Should ensure we honor a mu internal override of E123 error
"""
code = "mylist = [\n 1, 2,\n 3, 4,\n ]" # would have Generated E123
result = mu.logic.check_pycodestyle(code)
assert len(result) == 0
def test_check_pycodestyle_custom_override():
"""
Ensure the expected result if generated from the PEP8 style validator.
For this test we have overridden the E265 error check via a custom
override "pycodestyle" file in a directory pointed to by the content of
scripts/codecheck.ini. We should "not" get and E265 error due to the
lack of space after the #
"""
code = "# OK\n#this is ok if we override the E265 check\n"
result = mu.logic.check_pycodestyle(code, "tests/scripts/pycodestyle")
assert len(result) == 0
def test_check_pycodestyle():
"""
Ensure the expected result if generated from the PEP8 style validator.
"""
code = "import foo\n\n\n\n\n\ndef bar():\n pass\n" # Generate E303
result = mu.logic.check_pycodestyle(code)
assert len(result) == 1
assert result[6][0]["line_no"] == 6
assert result[6][0]["column"] == 0
assert " above this line" in result[6][0]["message"]
assert result[6][0]["code"] == "E303"
def test_check_pycodestyle_with_non_ascii():
"""
Ensure pycodestyle can at least see a file with non-ASCII characters
"""
code = "x='\u2005'\n"
try:
mu.logic.check_pycodestyle(code)
except Exception as exc:
assert False, "Exception was raised: %s" % exc
#
# Doesn't actually matter what pycodestyle returns; we just want to make
# sure it didn't error out
#
def test_MuFlakeCodeReporter_init():
"""
Check state is set up as expected.
"""
r = mu.logic.MuFlakeCodeReporter()
assert r.log == []
def test_MuFlakeCodeReporter_unexpected_error():
"""
Check the reporter handles unexpected errors.
"""
r = mu.logic.MuFlakeCodeReporter()
r.unexpectedError("foo.py", "Nobody expects the Spanish Inquisition!")
assert len(r.log) == 1
assert r.log[0]["line_no"] == 0
assert r.log[0]["filename"] == "foo.py"
assert r.log[0]["message"] == "Nobody expects the Spanish Inquisition!"
def test_MuFlakeCodeReporter_syntax_error():
"""
Check the reporter handles syntax errors in a humane and kid friendly
manner.
"""
msg = (
"Syntax error. Python cannot understand this line. Check for "
"missing characters!"
)
r = mu.logic.MuFlakeCodeReporter()
r.syntaxError(
"foo.py", "something incomprehensible to kids", "2", 3, "source"
)
assert len(r.log) == 1
assert r.log[0]["line_no"] == 1
assert r.log[0]["message"] == msg
assert r.log[0]["column"] == 2
assert r.log[0]["source"] == "source"
def test_MuFlakeCodeReporter_flake_matched():
"""
Check the reporter handles flake (regular) errors that match the expected
message structure.
"""
r = mu.logic.MuFlakeCodeReporter()
err = "foo.py:4:0 something went wrong"
r.flake(err)
assert len(r.log) == 1
assert r.log[0]["line_no"] == 3
assert r.log[0]["column"] == 0
assert r.log[0]["message"] == "something went wrong"
def test_MuFlakeCodeReporter_flake_real_output():
"""
Check the reporter handles real output from flake, to catch format
change regressions.
"""
check = mu.logic.check
reporter = mu.logic.MuFlakeCodeReporter()
code = "a = 1\nb = 2\nc\n"
check(code, "filename", reporter)
assert reporter.log[0]["line_no"] == 2
assert reporter.log[0]["message"] == "undefined name 'c'"
assert reporter.log[0]["column"] == 1
def test_MuFlakeCodeReporter_flake_un_matched():
"""
Check the reporter handles flake errors that do not conform to the expected
message structure.
"""
r = mu.logic.MuFlakeCodeReporter()
err = "something went wrong"
r.flake(err)
assert len(r.log) == 1
assert r.log[0]["line_no"] == 0
assert r.log[0]["column"] == 0
assert r.log[0]["message"] == "something went wrong"
def test_device__init(adafruit_feather):
assert adafruit_feather.vid == 0x239A
assert adafruit_feather.pid == 0x800B
assert adafruit_feather.port == "COM1"
assert adafruit_feather.serial_number == 123456
assert adafruit_feather.manufacturer == "ARM"
assert adafruit_feather.long_mode_name == "CircuitPython"
assert adafruit_feather.short_mode_name == "circuitpython"
assert adafruit_feather.board_name == "Adafruit Feather"
def test_device_name(esp_device, adafruit_feather):
"""
Test that devices without a boardname (such as the esp_device),
are the long mode name with " device" appended
"""
assert esp_device.name == "ESP MicroPython device"
assert adafruit_feather.name == "Adafruit Feather"
def test_device_equality(microbit_com1):
assert microbit_com1 == microbit_com1
def test_device_inequality(microbit_com1, microbit_com2):
assert microbit_com1 != microbit_com2
def test_device_ordering_lt(microbit_com1, adafruit_feather):
assert adafruit_feather < microbit_com1
def test_device_ordering_gt(microbit_com1, adafruit_feather):
assert microbit_com1 > adafruit_feather
def test_device_ordering_le(microbit_com1, adafruit_feather):
assert adafruit_feather <= microbit_com1
def test_device_ordering_ge(microbit_com1, adafruit_feather):
assert microbit_com1 >= adafruit_feather
def test_device_to_string(adafruit_feather):
assert (
str(adafruit_feather)
== "Adafruit Feather on COM1 (VID: 0x239A, PID: 0x800B)"
)
def test_device_hash(microbit_com1, microbit_com2):
assert hash(microbit_com1) == hash(microbit_com1)
assert hash(microbit_com1) != hash(microbit_com2)
def test_devicelist_index(microbit_com1):
modes = {}
dl = mu.logic.DeviceList(modes)
dl.add_device(microbit_com1)
assert dl[0] == microbit_com1
def test_devicelist_length(microbit_com1, microbit_com2):
modes = {}
dl = mu.logic.DeviceList(modes)
assert len(dl) == 0
dl.add_device(microbit_com1)
assert len(dl) == 1
dl.add_device(microbit_com2)
assert len(dl) == 2
def test_devicelist_rowCount(microbit_com1, microbit_com2):
modes = {}
dl = mu.logic.DeviceList(modes)
assert dl.rowCount(None) == 0
dl.add_device(microbit_com1)
assert dl.rowCount(None) == 1
dl.add_device(microbit_com2)
assert dl.rowCount(None) == 2
def test_devicelist_data(microbit_com1, adafruit_feather):
modes = {}
dl = mu.logic.DeviceList(modes)
dl.add_device(microbit_com1)
dl.add_device(adafruit_feather)
tooltip = dl.data(dl.index(0), Qt.ToolTipRole)
display = dl.data(dl.index(0), Qt.DisplayRole)
assert display == adafruit_feather.name
assert tooltip == str(adafruit_feather)
tooltip = dl.data(dl.index(1), Qt.ToolTipRole)
display = dl.data(dl.index(1), Qt.DisplayRole)
assert display == microbit_com1.name
assert tooltip == str(microbit_com1)
def test_devicelist_add_device_in_sorted_order(
microbit_com1, adafruit_feather
):
modes = {}
dl = mu.logic.DeviceList(modes)
dl.add_device(microbit_com1)
assert dl[0] == microbit_com1
dl.add_device(adafruit_feather)
assert dl[0] == adafruit_feather
assert dl[1] == microbit_com1
xyz_device = mu.logic.Device(
0x123B, 0x333A, "COM1", 123456, "ARM", "ESP Mode", "esp", "xyz"
)
dl.add_device(xyz_device)
assert dl[2] == xyz_device
def test_devicelist_remove_device(microbit_com1, adafruit_feather):
modes = {}
dl = mu.logic.DeviceList(modes)
dl.add_device(microbit_com1)
assert len(dl) == 1
dl.remove_device(microbit_com1)
assert len(dl) == 0
dl.add_device(microbit_com1)
dl.add_device(adafruit_feather)
assert len(dl) == 2
dl.remove_device(adafruit_feather)
assert len(dl) == 1
dl.remove_device(microbit_com1)
assert len(dl) == 0
def test_editor_init():
"""
Ensure a new instance is set-up correctly and creates the required folders
upon first start.
"""
view = mock.MagicMock()
# Check the editor attempts to create required directories if they don't
# already exist.
with mock.patch("os.path.exists", return_value=False), mock.patch(
"os.makedirs", return_value=None
) as mkd:
e = mu.logic.Editor(view)
assert e._view == view
assert e.theme == "day"
assert e.mode == "python"
assert e.modes == {}
assert e.envars == []
assert e.minify is False
assert e.microbit_runtime == ""
# assert e.connected_devices == set()
assert e.find == ""
assert e.replace == ""
assert e.global_replace is False
assert e.selecting_mode is False
assert mkd.call_count == 1
assert mkd.call_args_list[0][0][0] == mu.logic.DATA_DIR
def test_editor_setup():
"""
An editor should have a modes attribute.
"""
view = mock.MagicMock()
e = mu.logic.Editor(view)
mock_mode = mock.MagicMock()
mock_mode.workspace_dir.return_value = "foo"
mock_modes = {"python": mock_mode}
with mock.patch("os.path.exists", return_value=False), mock.patch(
"os.makedirs", return_value=None
) as mkd, mock.patch("shutil.copy") as mock_shutil_copy, mock.patch(
"shutil.copytree"
) as mock_shutil_copytree:
e.setup(mock_modes)
assert mkd.call_count == 5
assert mkd.call_args_list[0][0][0] == "foo"
asset_len = len(mu.logic.DEFAULT_IMAGES) + len(mu.logic.DEFAULT_SOUNDS)
assert mock_shutil_copy.call_count == asset_len
assert mock_shutil_copytree.call_count == 2
assert e.modes == mock_modes
view.set_usb_checker.assert_called_once_with(
1, e.connected_devices.check_usb
)
def test_editor_connect_to_status_bar():
"""
Check that the Window status bar is connected appropriately
to Editor-pane and to modes
"""
view = mock.MagicMock()
e = mu.logic.Editor(view)
mock_python_mode = mock.MagicMock()
mock_esp_mode = mock.MagicMock()
mock_python_mode.workspace_dir.return_value = "foo"
mock_modes = {"python": mock_python_mode, "esp": mock_esp_mode}
mock_device_selector = mock.MagicMock()
with mock.patch("os.path.exists", return_value=False), mock.patch(
"os.makedirs", return_value=None
), mock.patch("shutil.copy"), mock.patch("shutil.copytree"):
e.setup(mock_modes)
sb = mock.MagicMock()
sb.device_selector = mock_device_selector
e.connect_to_status_bar(sb)
# Check device_changed signal is connected to both editor and modes
assert sb.device_selector.device_changed.connect.call_count == 3
def test_editor_restore_session_existing_runtime():
"""
A correctly specified session is restored properly.
"""
mode, theme = "python", "night"
file_contents = ["", ""]
ed = mocked_editor(mode)
with mock.patch("os.path.isfile", return_value=True):
with mock.patch.object(venv, "relocate") as venv_relocate:
with mock.patch.object(venv, "ensure"), mock.patch.object(
venv, "create"
):
with generate_session(
theme,
mode,
file_contents,
microbit_runtime="/foo",
zoom_level=5,
venv_path="foo",
):
ed.restore_session()
assert ed.theme == theme
assert ed._view.add_tab.call_count == len(file_contents)
ed._view.set_theme.assert_called_once_with(theme)
assert ed.envars == [["name", "value"]]
assert ed.minify is False
assert ed.microbit_runtime == "/foo"
assert ed._view.zoom_position == 5
assert venv_relocate.called_with("foo")
def test_editor_restore_session_missing_runtime():
"""
If the referenced microbit_runtime file doesn't exist, reset to '' so Mu
uses the built-in runtime.
"""
mode, theme = "python", "night"
file_contents = ["", ""]
ed = mocked_editor(mode)
with generate_session(theme, mode, file_contents, microbit_runtime="/foo"):
ed.restore_session()
assert ed.theme == theme
assert ed._view.add_tab.call_count == len(file_contents)
ed._view.set_theme.assert_called_once_with(theme)
assert ed.envars == [["name", "value"]]
assert ed.minify is False
assert ed.microbit_runtime == "" # File does not exist so set to ''
def test_editor_restore_session_missing_files():
"""
Missing files that were opened tabs in the previous session are safely
ignored when attempting to restore them.
"""
mode, theme = "python", "night"
ed = mocked_editor(mode)
with generate_session(theme, mode) as session:
session["paths"] = ["*does not exist*"]
ed.restore_session()
assert ed._view.add_tab.call_count == 0
def test_editor_restore_session_invalid_mode():
"""
As Mu's modes are added and/or renamed, invalid mode names may need to be
ignored (this happens regularly when changing versions when developing
Mu itself).
"""
valid_mode, invalid_mode = "python", uuid.uuid1().hex
ed = mocked_editor(valid_mode)
with generate_session(mode=invalid_mode):
ed.restore_session()
ed.select_mode.assert_called_once_with(None)
def test_editor_restore_session_no_session_file():
"""
If there's no prior session file (such as upon first start) then simply
start up the editor with an empty untitled tab.
Strictly, this isn't now a check for no session file but for an
empty session object (which might have arisen from no file)
"""
ed = mocked_editor()
ed._view.tab_count = 0
ed._view.add_tab = mock.MagicMock()
session = mu.settings.SessionSettings()
filepath = os.path.abspath(rstring())
assert not os.path.exists(filepath)
session.load(filepath)
with mock.patch.object(mu.settings, "session", session):
ed.restore_session()
ed._view.add_tab.call_count == 1
ed.select_mode.assert_called_once_with(None)
def test_editor_restore_session_invalid_file(tmp_path):
"""
A malformed JSON file is correctly detected and app behaves the same as if
there was no session file.
"""
ed = mocked_editor()
ed._view.tab_count = 0
ed._view.add_tab = mock.MagicMock()
session = mu.settings.SessionSettings()
filepath = os.path.join(str(tmp_path), rstring())
with open(filepath, "w") as f:
f.write(rstring())
session.load(filepath)
with mock.patch.object(mu.settings, "session", session):
ed.restore_session()
ed._view.add_tab.call_count == 1
ed.select_mode.assert_called_once_with(None)
def test_restore_session_open_tabs_in_the_same_order():
"""
Editor.restore_session() loads editor tabs in the same order as the 'paths'
array in the session.json file.
"""
ed = mocked_editor()
ed.direct_load = mock.MagicMock()
settings_paths = ["a.py", "b.py", "c.py", "d.py"]
with generate_session(paths=settings_paths):
ed.restore_session()
direct_load_calls_args = [
os.path.basename(args[0])
for args, _kwargs in ed.direct_load.call_args_list
]
assert direct_load_calls_args == settings_paths
def test_editor_restore_saved_window_geometry():
"""
Window geometry specified in the session file is restored properly.
"""
ed = mocked_editor()
window = {"x": 10, "y": 20, "w": 1000, "h": 600}
with mock.patch("os.path.isfile", return_value=True):
with generate_session(window=window):
ed.restore_session()
ed._view.size_window.assert_called_once_with(**window)
def test_editor_restore_default_window_geometry():
"""
Window is sized by default if no geometry exists in the session file.
"""
ed = mocked_editor()
with mock.patch("os.path.isfile", return_value=True):
with generate_session():
ed.restore_session()
ed._view.size_window.assert_called_once_with()
def test_editor_open_focus_passed_file():
"""
A file passed in by the OS is opened
"""
ed = mocked_editor()
ed.direct_load = mock.MagicMock()
filepath = uuid.uuid1().hex
with generate_session():
ed.restore_session(paths=[filepath])
assert ed.direct_load.called_with(filepath)
def test_editor_session_and_open_focus_passed_file():
"""
A passed in file is merged with session, opened last
so it receives focus
It will be the middle position in the session
"""
ed = mocked_editor()
ed.direct_load = mock.MagicMock()
filepath = os.path.abspath(uuid.uuid1().hex)
with generate_session(file_contents=[""]) as session:
ed.restore_session(paths=[filepath])
args_only = [args for (args, _) in ed.direct_load.call_args_list]
call_args = [a[0] for a in args_only]
expected_call_args = session["paths"] + [filepath]
assert call_args == expected_call_args
def test_toggle_theme_to_night():
"""
The current theme is 'day' so toggle to night. Expect the state to be
updated and the appropriate call to the UI layer is made.
"""
view = mock.MagicMock()
view.set_theme = mock.MagicMock()
ed = mu.logic.Editor(view)
ed.theme = "day"
ed.toggle_theme()
assert ed.theme == "night"
view.set_theme.assert_called_once_with(ed.theme)
def test_toggle_theme_to_day():
"""
The current theme is 'contrast' so toggle to day. Expect the state to be
updated and the appropriate call to the UI layer is made.
"""
view = mock.MagicMock()
view.set_theme = mock.MagicMock()
ed = mu.logic.Editor(view)
ed.theme = "contrast"
ed.toggle_theme()
assert ed.theme == "day"
view.set_theme.assert_called_once_with(ed.theme)
def test_toggle_theme_to_contrast():
"""
The current theme is 'night' so toggle to contrast. Expect the state to be
updated and the appropriate call to the UI layer is made.
"""
view = mock.MagicMock()
view.set_theme = mock.MagicMock()
ed = mu.logic.Editor(view)
ed.theme = "night"
ed.toggle_theme()
assert ed.theme == "contrast"
view.set_theme.assert_called_once_with(ed.theme)
def test_new():
"""
Ensure an untitled tab is added to the UI.
"""
view = mock.MagicMock()
view.add_tab = mock.MagicMock()
mock_mode = mock.MagicMock()
api = ["API specification"]
mock_mode.api.return_value = api
mock_mode.code_template = "new code template" + mu.logic.NEWLINE
ed = mu.logic.Editor(view)
ed.modes = {"python": mock_mode}
ed.new()
py = mock_mode.code_template + mu.logic.NEWLINE
view.add_tab.assert_called_once_with(None, py, api, mu.logic.NEWLINE)
def test_load_checks_file_exists():
"""
If the passed in path does not exist, this is logged and no other side
effect happens.
"""
view = mock.MagicMock()
ed = mu.logic.Editor(view)
with mock.patch("os.path.isfile", return_value=False), mock.patch(
"mu.logic.logger.info"
) as mock_info:
ed._load("not_a_file")
msg1 = "Loading script from: not_a_file"
msg2 = "The file not_a_file does not exist."
assert mock_info.call_args_list[0][0][0] == msg1
assert mock_info.call_args_list[1][0][0] == msg2
def test_load_python_file():
"""
If the user specifies a Python file (*.py) then ensure it's loaded and
added as a tab.
"""
text, newline = "python", "\n"
ed = mocked_editor()
with generate_python_file(text) as filepath:
ed._view.get_load_path.return_value = filepath
with mock.patch("mu.logic.read_and_decode") as mock_read:
mock_read.return_value = text, newline
ed.load()
mock_read.assert_called_once_with(filepath)
ed._view.add_tab.assert_called_once_with(
filepath, text, ed.modes[ed.mode].api(), newline
)
def test_load_python_file_case_insensitive_file_type():
"""
If the user specifies a Python file (*.PY) then ensure it's loaded and
added as a tab.
"""
text, newline = "python", "\n"
ed = mocked_editor()
with generate_python_file(text) as filepath:
ed._view.get_load_path.return_value = filepath.upper()
with mock.patch("mu.logic.read_and_decode") as mock_read, mock.patch(
"os.path.isfile", return_value=True
):
mock_read.return_value = text, newline
ed.load()
mock_read.assert_called_once_with(filepath.upper())
ed._view.add_tab.assert_called_once_with(
filepath.upper(), text, ed.modes[ed.mode].api(), newline
)
def test_load_python_unicode_error():
"""
If Mu encounters a UnicodeDecodeError when trying to read and decode the
file, it should display a helpful message explaining the problem.
"""
text = "not utf encoded content"
ed = mocked_editor()
with generate_python_file(text) as filepath:
ed._view.get_load_path.return_value = filepath
with mock.patch("mu.logic.read_and_decode") as mock_read:
mock_read.side_effect = UnicodeDecodeError(
"funnycodec", b"\x00\x00", 1, 2, "A fake reason!"
)
ed.load()
assert ed._view.show_message.call_count == 1
def test_no_duplicate_load_python_file():
"""
If the user specifies a file already loaded, ensure this is detected.
"""
brown_script = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
"scripts",
"contains_brown.py",
)
editor_window = mock.MagicMock()
editor_window.show_message = mock.MagicMock()
editor_window.focus_tab = mock.MagicMock()
editor_window.add_tab = mock.MagicMock()
brown_tab = mock.MagicMock()
brown_tab.path = brown_script
unsaved_tab = mock.MagicMock()
unsaved_tab.path = None
editor_window.widgets = [unsaved_tab, brown_tab]
editor_window.get_load_path = mock.MagicMock(return_value=brown_script)
editor_window.current_tab.path = "path"
# Create the "editor" that'll control the "window".
editor = mu.logic.Editor(view=editor_window)
mock_mode = mock.MagicMock()
mock_mode.workspace_dir.return_value = "/fake/path"
editor.modes = {"python": mock_mode}
editor.load()
message = 'The file "{}" is already open.'.format(
os.path.basename(brown_script)
)
editor_window.show_message.assert_called_once_with(message)
editor_window.add_tab.assert_not_called()
def test_no_duplicate_load_python_file_widget_file_no_longer_exists():
"""
If the user specifies a file already loaded (but which no longer exists),
ensure this is detected, logged and Mu doesn't crash..! See:
https://github.com/mu-editor/mu/issues/774
for context.
"""
brown_script = os.path.join(
os.path.dirname(os.path.realpath(__file__)),
"scripts",
"contains_brown.py",
)
editor_window = mock.MagicMock()
editor_window.show_message = mock.MagicMock()
editor_window.focus_tab = mock.MagicMock()
editor_window.add_tab = mock.MagicMock()
missing_tab = mock.MagicMock()
missing_tab.path = "not_a_file.py"
editor_window.widgets = [missing_tab]
editor_window.current_tab.path = "path"
# Create the "editor" that'll control the "window".
editor = mu.logic.Editor(view=editor_window)
mock_mode = mock.MagicMock()
mock_mode.workspace_dir.return_value = "/fake/path"
editor.modes = {"python": mock_mode}
with mock.patch("mu.logic.logger") as mock_logger:
editor._load(brown_script)
assert mock_logger.info.call_count == 3
log = mock_logger.info.call_args_list[1][0][0]
assert log == "The file not_a_file.py no longer exists."
def test_load_other_file():
"""
If the user specifies a file supported by a Mu mode (like a .hex file) then
ensure it's loaded and added as a tab.
"""
view = mock.MagicMock()
view.get_load_path = mock.MagicMock(return_value="foo.hex")
view.add_tab = mock.MagicMock()
view.show_confirmation = mock.MagicMock()
view.current_tab.path = "path"
ed = mu.logic.Editor(view)
ed.change_mode = mock.MagicMock()
api = ["API specification"]
file_content = "PYTHON CODE"
mock_py = mock.MagicMock()
mock_py.file_extensions = None
mock_py.open_file.return_value = None
mock_mb = mock.MagicMock()
mock_mb.api.return_value = api
mock_mb.workspace_dir.return_value = "/fake/path"
mock_mb.open_file.return_value = (file_content, os.linesep)
mock_mb.file_extensions = ["hex"]
ed.modes = {"python": mock_py, "microbit": mock_mb}
ed.mode = "microbit"
with mock.patch("builtins.open", mock.mock_open()), mock.patch(
"os.path.isfile", return_value=True
):
ed.load()
assert view.get_load_path.call_count == 1
assert view.show_confirmation.call_count == 0
assert ed.change_mode.call_count == 0
view.add_tab.assert_called_once_with(None, file_content, api, os.linesep)
def test_load_other_file_change_mode():
"""
If the user specifies a file supported by a Mu mode (like a .html file)
that is not currently active, then ensure it's loaded, added as a tab, andi
it asks the user to change mode.
"""
view = mock.MagicMock()
view.get_load_path = mock.MagicMock(return_value="foo.html")
view.add_tab = mock.MagicMock()
view.show_confirmation = mock.MagicMock(return_value=QMessageBox.Ok)
view.current_tab.path = "path"
ed = mu.logic.Editor(view)
ed.change_mode = mock.MagicMock()
api = ["API specification"]
file_content = "<html></html>"
mock_py = mock.MagicMock()
mock_py.open_file.return_value = None
mock_py.api.return_value = api
mock_py.workspace_dir.return_value = "/fake/path"
mock_mb = mock.MagicMock()
mock_mb.api.return_value = api
mock_mb.workspace_dir.return_value = "/fake/path"
mock_mb.open_file.return_value = (file_content, os.linesep)
mock_mb.file_extensions = ["hex"]
ed.modes = {"python": mock_py, "microbit": mock_mb}
ed.mode = "python"
with mock.patch("builtins.open", mock.mock_open()), mock.patch(
"os.path.isfile", return_value=True
):
ed.load()
assert view.get_load_path.call_count == 1
assert view.show_confirmation.call_count == 1
assert ed.change_mode.call_count == 1
view.add_tab.assert_called_once_with(
"foo.html", file_content, api, os.linesep
)
def test_load_other_file_with_exception():
"""
If the user specifies a file supported by a Mu mode (like a .hex file) try
to open it and check it ignores it if it throws an unexpected exception.
"""
view = mock.MagicMock()
view.get_load_path = mock.MagicMock(return_value="foo.hex")
view.add_tab = mock.MagicMock()
view.show_confirmation = mock.MagicMock()
view.current_tab.path = "path"
ed = mu.logic.Editor(view)
ed.change_mode = mock.MagicMock()
mock_mb = mock.MagicMock()
mock_mb.workspace_dir.return_value = "/fake/path"
mock_mb.open_file = mock.MagicMock(side_effect=Exception(":("))
mock_mb.file_extensions = ["hex"]
ed.modes = {"microbit": mock_mb}
ed.mode = "microbit"
mock_open = mock.mock_open()
with mock.patch("builtins.open", mock_open), mock.patch(
"os.path.isfile", return_value=True
):
ed.load()
assert view.get_load_path.call_count == 1
assert view.show_message.call_count == 1
assert view.show_confirmation.call_count == 0
assert ed.change_mode.call_count == 0
assert view.add_tab.call_count == 0
def test_load_not_python_or_hex():
"""
If the user tries to open a file that isn't .py or .hex then Mu should
report a helpful message.
"""
view = mock.MagicMock()
ed = mu.logic.Editor(view)
with mock.patch("os.path.isfile", return_value=True):
ed._load("unknown_filetype.foo")
assert view.show_message.call_count == 1
def test_load_recovers_from_oserror():
"""
If loading the file results in an OSError (for example, the user doesn't
have permission to read the file), then a helpful message is displayed.
"""
text = "python"
ed = mocked_editor()
with generate_python_file(text) as filepath, mock.patch(
"mu.logic.read_and_decode", side_effect=OSError("boom")
):
ed._view.get_load_path.return_value = filepath
ed.load()
assert ed._view.show_message.call_count == 1
#
# When loading files Mu makes a note of the majority line-ending convention
# in use in the file. When it is saved, that convention is used.
#
def test_load_stores_newline():
"""
When a file is loaded, its newline convention should be held on the tab
for use when saving.
"""
newline = "r\n"
text = newline.join("the cat sat on the mat".split())
editor = mocked_editor()
with generate_python_file("abc\r\ndef") as filepath:
editor._view.get_load_path.return_value = filepath
editor.load()
assert editor._view.add_tab.called_with(
filepath, text, editor.modes[editor.mode].api(), "\r\n"
)
def test_save_restores_newline():
"""
When a file is saved the newline convention noted originally should
be used.
"""
newline = "\r\n"
test_text = mu.logic.NEWLINE.join("the cat sat on the mat".split())
with generate_python_file(test_text) as filepath:
with mock.patch("mu.logic.save_and_encode") as mock_save:
ed = mocked_editor(text=test_text, newline=newline, path=filepath)
ed.save()
assert mock_save.called_with(test_text, filepath, newline)
def test_save_strips_trailing_spaces():
"""
When a file is saved any trailing spaces should be removed from each line
leaving any newlines intact. NB we inadvertently strip trailing newlines
in any case via save_and_encode
"""
words = "the cat sat on the mat".split()
test_text = mu.logic.NEWLINE.join("%s " % w for w in words)
stripped_text = mu.logic.NEWLINE.join(words)
with generate_python_file(test_text) as filepath:
mu.logic.save_and_encode(test_text, filepath)
with open(filepath) as f:
assert f.read() == stripped_text + "\n"
def test_load_error():
"""
Ensure that anything else is just ignored.
"""
view = mock.MagicMock()
view.get_load_path = mock.MagicMock(return_value="foo.py")
view.add_tab = mock.MagicMock()
view.current_tab.path = "path"
ed = mu.logic.Editor(view)
mock_open = mock.MagicMock(side_effect=FileNotFoundError())
mock_mode = mock.MagicMock()
mock_mode.workspace_dir.return_value = "/fake/path"
ed.modes = {"python": mock_mode}
with mock.patch("builtins.open", mock_open):
ed.load()
assert view.get_load_path.call_count == 1
assert view.add_tab.call_count == 0
def test_load_sets_current_path():
"""
When a path has been selected for loading by the OS's file selector,
ensure that the directory containing the selected file is set as the
self.current_path for re-use later on.
"""
view = mock.MagicMock()
view.get_load_path = mock.MagicMock(
return_value=os.path.join("path", "foo.py")
)
view.current_tab.path = os.path.join("old_path", "foo.py")
ed = mu.logic.Editor(view)
ed._load = mock.MagicMock()
mock_mode = mock.MagicMock()
mock_mode.workspace_dir.return_value = "/fake/path"
mock_mode.file_extensions = ["html", "css"]
ed.modes = {"python": mock_mode}
ed.load()
assert ed.current_path == os.path.abspath("path")
def test_load_no_current_path():
"""
If there is no self.current_path the default location to look for a file
to load is the directory containing the file currently being edited.
"""
view = mock.MagicMock()
view.get_load_path = mock.MagicMock(
return_value=os.path.join("path", "foo.py")
)
view.current_tab.path = os.path.join("old_path", "foo.py")
ed = mu.logic.Editor(view)
ed._load = mock.MagicMock()
mock_mode = mock.MagicMock()
mock_mode.workspace_dir.return_value = "/fake/path"
mock_mode.file_extensions = []
ed.modes = {"python": mock_mode}
ed.load()
expected = os.path.abspath("old_path")
view.get_load_path.assert_called_once_with(
expected, "*.py *.pyw *.PY *.PYW", allow_previous=True
)
def test_load_no_current_path_no_current_tab():
"""
If there is no self.current_path nor is there a current file being edited
then the default location to look for a file to load is the current
mode's workspace directory. This used to be the default behaviour, but now
acts as a sensible fall-back.
"""
view = mock.MagicMock()
view.get_load_path = mock.MagicMock(
return_value=os.path.join("path", "foo.py")
)
view.current_tab = None
ed = mu.logic.Editor(view)
ed._load = mock.MagicMock()
mock_mode = mock.MagicMock()
mock_mode.workspace_dir.return_value = os.path.join("fake", "path")
mock_mode.file_extensions = []
ed.modes = {"python": mock_mode}
ed.load()
expected = mock_mode.workspace_dir()
view.get_load_path.assert_called_once_with(
expected, "*.py *.pyw *.PY *.PYW", allow_previous=True
)
def test_load_has_current_path_does_not_exist():
"""
If there is a self.current_path but it doesn't exist, then use the expected
fallback as the location to look for a file to load.
"""
view = mock.MagicMock()
view.get_load_path = mock.MagicMock(
return_value=os.path.join("path", "foo.py")
)
view.current_tab = None
ed = mu.logic.Editor(view)
ed._load = mock.MagicMock()
ed.current_path = "foo"
mock_mode = mock.MagicMock()
mock_mode.workspace_dir.return_value = os.path.join("fake", "path")
mock_mode.file_extensions = []
ed.modes = {"python": mock_mode}
ed.load()
expected = mock_mode.workspace_dir()
view.get_load_path.assert_called_once_with(
expected, "*.py *.pyw *.PY *.PYW", allow_previous=True
)
def test_load_has_current_path():
"""
If there is a self.current_path then use this as the location to look for
a file to load.
"""
view = mock.MagicMock()
view.get_load_path = mock.MagicMock(
return_value=os.path.join("path", "foo.py")
)
view.current_tab = None
ed = mu.logic.Editor(view)
ed._load = mock.MagicMock()
ed.current_path = "foo"
mock_mode = mock.MagicMock()
mock_mode.workspace_dir.return_value = os.path.join("fake", "path")
mock_mode.file_extensions = []
ed.modes = {"python": mock_mode}
with mock.patch("os.path.isdir", return_value=True):
ed.load()
view.get_load_path.assert_called_once_with(
"foo", "*.py *.pyw *.PY *.PYW", allow_previous=True
)
def test_load_has_default_path():
"""
If there is a default_path argument then use this as the location to look
for a file to load.
"""
view = mock.MagicMock()
view.get_load_path = mock.MagicMock(
return_value=os.path.join("path", "foo.py")
)
view.current_tab = None
ed = mu.logic.Editor(view)
ed._load = mock.MagicMock()
mock_mode = mock.MagicMock()
mock_mode.workspace_dir.return_value = os.path.join("fake", "path")
mock_mode.file_extensions = []
ed.modes = {"python": mock_mode}
with mock.patch("os.path.isdir", return_value=True):
ed.load(default_path="foo")
view.get_load_path.assert_called_once_with(
"foo", "*.py *.pyw *.PY *.PYW", allow_previous=False
)
def test_check_for_shadow_module_with_match():
"""
If the name of the file in the path passed into check_for_shadow_module
(without the .py file extension) is found in module_names then return
True since the filename shadows that of a module found on the Python path.
"""
view = mock.MagicMock()
ed = mu.logic.Editor(view)
mock_mode = mock.MagicMock()
mock_mode.module_names = set(["foo", "bar", "baz"])
ed.modes = {"python": mock_mode}
ed.mode = "python"
assert ed.check_for_shadow_module("/a/long/path/with/foo.py")
def test_save_no_tab():
"""
If there's no active tab then do nothing.
"""
view = mock.MagicMock()
view.current_tab = None
ed = mu.logic.Editor(view)
ed.save()
# If the code fell through then the tab state would be modified.
assert view.current_tab is None
def test_save_no_path():
"""
If there's no path associated with the tab then request the user provide
one.
"""
text, path, newline = "foo", "foo.py", "\n"
ed = mocked_editor(text=text, path=None, newline=newline)
ed._view.get_save_path.return_value = path
ed.check_for_shadow_module = mock.MagicMock(return_value=False)
with mock.patch("mu.logic.save_and_encode") as mock_save:
ed.save()
mock_save.assert_called_with(text, path, newline)
def test_save_no_path_no_path_given():
"""
If there's no path associated with the tab and the user cancels providing
one, ensure the path is correctly re-set.
"""
text, newline = "foo", "\n"
ed = mocked_editor(text=text, path=None, newline=newline)
ed._view.get_save_path.return_value = ""
ed.save()
# The path isn't the empty string returned from get_save_path.
assert ed._view.current_tab.path is None
def test_save_path_shadows_module():
"""
If the filename in the path shadows a module then display a warning message
and abort.
"""
text, newline = "foo", "\n"
ed = mocked_editor(text=text, path=None, newline=newline)
ed._view.get_save_path.return_value = "/a/long/path/foo.py"
mock_mode = mock.MagicMock()
mock_mode.module_names = set(["foo", "bar", "baz"])
ed.modes = {"python": mock_mode}
ed.mode = "python"
ed.save()
# The path isn't the empty string returned from get_save_path.
assert ed._view.show_message.call_count == 1
assert ed._view.current_tab.path is None
def test_save_file_with_exception():
"""
If the file cannot be written, return an error message.
"""
view = mock.MagicMock()
view.current_tab = mock.MagicMock()
view.current_tab.path = "foo.py"
view.current_tab.text = mock.MagicMock(return_value="foo")
view.current_tab.setModified = mock.MagicMock(return_value=None)
view.show_message = mock.MagicMock()
mock_open = mock.MagicMock(side_effect=OSError())
ed = mu.logic.Editor(view)
with mock.patch("builtins.open", mock_open):
ed.save()
assert view.current_tab.setModified.call_count == 0
assert view.show_message.call_count == 1
def test_save_file_with_encoding_error():
"""
If Mu encounters a UnicodeEncodeError when trying to write the file,
it should display a helpful message explaining the problem.
"""
text, path, newline = "foo", "foo", "\n"
ed = mocked_editor(text=text, path=path, newline=newline)
with mock.patch("mu.logic.save_and_encode") as mock_save:
mock_save.side_effect = UnicodeEncodeError(
mu.logic.ENCODING, "", 0, 0, "Unable to encode"
)
ed.save()
assert ed._view.current_tab.setModified.call_count == 0
def test_save_python_file():
"""
If the path is a Python file (ending in *.py) then save it and reset the
modified flag.
"""
path, contents, newline = "foo.py", "foo", "\n"
view = mock.MagicMock()
view.current_tab = mock.MagicMock()
view.current_tab.path = path
view.current_tab.text = mock.MagicMock(return_value=contents)
view.current_tab.newline = "\n"
view.get_save_path = mock.MagicMock(return_value=path)
view.current_tab.setModified = mock.MagicMock(return_value=None)
ed = mu.logic.Editor(view)
with mock.patch("mu.logic.save_and_encode") as mock_save:
ed.save()
mock_save.assert_called_once_with(contents, path, newline)
assert view.get_save_path.call_count == 0
view.current_tab.setModified.assert_called_once_with(False)
def test_save_with_non_py_file_extension():
"""
If the path ends in an extension, save it using the extension
"""
text, path, newline = "foo", "foo.txt", "\n"
ed = mocked_editor(text=text, path=path, newline=newline)
ed._view.get_save_path.return_value = path
with mock.patch("mu.logic.save_and_encode") as mock_save:
ed.save()
mock_save.assert_called_once_with(text, path, newline)
ed._view.get_save_path.call_count == 0
def test_get_tab_existing_tab():
"""
Ensure that an existing tab is returned if its path matches.
"""
view = mock.MagicMock()
mock_tab = mock.MagicMock()
mock_tab.path = "foo"
view.widgets = [mock_tab]
ed = mu.logic.Editor(view)
view.focus_tab.reset_mock()
tab = ed.get_tab("foo")
assert tab == mock_tab
view.focus_tab.assert_called_once_with(mock_tab)
def test_get_tab_new_tab():
"""
If the path is not represented by an existing tab, ensure it is loaded and
the new tab is returned.
"""
view = mock.MagicMock()
mock_tab = mock.MagicMock()
mock_tab.path = "foo"
view.widgets = [mock_tab]
ed = mu.logic.Editor(view)
ed.direct_load = mock.MagicMock()
tab = ed.get_tab("bar")
ed.direct_load.assert_called_once_with("bar")
assert tab == view.current_tab
def test_get_tab_no_path():
"""
Any tabs with no associated path are ignored (i.e. tabs that have been
newly created but remain unsaved).
"""
view = mock.MagicMock()
mock_tab = mock.MagicMock()
mock_tab.path = None
view.widgets = [mock_tab]
ed = mu.logic.Editor(view)
ed.direct_load = mock.MagicMock()
tab = ed.get_tab("bar")
ed.direct_load.assert_called_once_with("bar")
assert tab == view.current_tab
def test_zoom_in():
"""
Ensure the UI layer is zoomed in.
"""
view = mock.MagicMock()
view.zoom_in = mock.MagicMock(return_value=None)
ed = mu.logic.Editor(view)
ed.zoom_in()
assert view.zoom_in.call_count == 1
def test_zoom_out():
"""
Ensure the UI layer is zoomed out.
"""
view = mock.MagicMock()
view.zoom_out = mock.MagicMock(return_value=None)
ed = mu.logic.Editor(view)
ed.zoom_out()
assert view.zoom_out.call_count == 1
def test_check_code_on():
"""
Checking code correctly results in something the UI layer can parse.
"""
view = mock.MagicMock()
tab = mock.MagicMock()
tab.has_annotations = False
tab.path = "foo.py"
tab.text.return_value = "import this\n"
view.current_tab = tab
flake = {2: {"line_no": 2, "message": "a message"}}
pep8 = {
2: [{"line_no": 2, "message": "another message"}],
3: [{"line_no": 3, "message": "yet another message"}],
}
mock_mode = mock.MagicMock()
mock_mode.builtins = None
with mock.patch("mu.logic.check_flake", return_value=flake), mock.patch(
"mu.logic.check_pycodestyle", return_value=pep8
):
ed = mu.logic.Editor(view)
ed.modes = {"python": mock_mode}
ed.check_code()
assert tab.has_annotations is True
view.reset_annotations.assert_called_once_with()
view.annotate_code.assert_has_calls(
[mock.call(flake, "error"), mock.call(pep8, "style")],
any_order=True,
)
def test_check_code_no_problems():
"""
If no problems are found in the code, ensure a status message is shown to
the user to confirm the fact. See #337
"""
view = mock.MagicMock()
tab = mock.MagicMock()
tab.has_annotations = False
tab.path = "foo.py"
tab.text.return_value = "import this\n"
view.current_tab = tab
flake = {}
pep8 = {}
mock_mode = mock.MagicMock()
mock_mode.builtins = None
with mock.patch("mu.logic.check_flake", return_value=flake), mock.patch(
"mu.logic.check_pycodestyle", return_value=pep8
):
ed = mu.logic.Editor(view)
ed.show_status_message = mock.MagicMock()
ed.modes = {"python": mock_mode}
ed.check_code()
assert ed.show_status_message.call_count == 1
def test_check_code_off():
"""
If the tab already has annotations, toggle them off.
"""
view = mock.MagicMock()
tab = mock.MagicMock()
tab.has_annotations = True
view.current_tab = tab
ed = mu.logic.Editor(view)
ed.check_code()
assert tab.has_annotations is False
view.reset_annotations.assert_called_once_with()
def test_check_code_no_tab():
"""
Checking code when there is no tab containing code aborts the process.
"""
view = mock.MagicMock()
view.current_tab = None
ed = mu.logic.Editor(view)
ed.check_code()
assert view.annotate_code.call_count == 0
def test_check_code_not_python():
"""
Checking code when the tab does not contain Python code aborts the process.
"""
view = mock.MagicMock()
view.current_tab = mock.MagicMock()
view.current_tab.path = "foo.html"
ed = mu.logic.Editor(view)
ed.check_code()
assert view.annotate_code.call_count == 0
def test_show_help():
"""
Help should attempt to open up the user's browser and point it to the
expected help documentation.
"""
view = mock.MagicMock()
ed = mu.logic.Editor(view)
with mock.patch(
"mu.logic.webbrowser.open_new", return_value=None
) as wb, mock.patch("mu.i18n.language_code", "en_GB"):
ed.show_help()
version = ".".join(__version__.split(".")[:2])
url = "https://codewith.mu/en/help/{}".format(version)
wb.assert_called_once_with(url)
def test_quit_modified_cancelled_from_button():
"""
If the user quits and there's unsaved work, and they cancel the "quit" then
do nothing.
"""
view = mock.MagicMock()
view.modified = True
view.show_confirmation = mock.MagicMock(return_value=QMessageBox.Cancel)
ed = mu.logic.Editor(view)
mock_open = mock.MagicMock()
mock_open.return_value.__enter__ = lambda s: s
mock_open.return_value.__exit__ = mock.Mock()
mock_open.return_value.write = mock.MagicMock()
with mock.patch("sys.exit", return_value=None), mock.patch(
"builtins.open", mock_open
):
ed.quit()
assert view.show_confirmation.call_count == 1
assert mock_open.call_count == 0
def test_quit_modified_cancelled_from_event():
"""
If the user quits and there's unsaved work, and they cancel the "quit" then
do nothing.
"""
view = mock.MagicMock()
view.modified = True
view.show_confirmation = mock.MagicMock(return_value=QMessageBox.Cancel)
ed = mu.logic.Editor(view)
mock_open = mock.MagicMock()
mock_open.return_value.__enter__ = lambda s: s
mock_open.return_value.__exit__ = mock.Mock()
mock_open.return_value.write = mock.MagicMock()
mock_event = mock.MagicMock()
mock_event.ignore = mock.MagicMock(return_value=None)
with mock.patch("sys.exit", return_value=None), mock.patch(
"builtins.open", mock_open
):
ed.quit(mock_event)
assert view.show_confirmation.call_count == 1
assert mock_event.ignore.call_count == 1
assert mock_open.call_count == 0
def test_quit_modified_ok():
"""
If the user quits and there's unsaved work that's ignored then proceed to
save the session.
"""
view = mock.MagicMock()
view.modified = True
view.show_confirmation = mock.MagicMock(return_value=True)
ed = mu.logic.Editor(view)
mock_mode = mock.MagicMock()
mock_mode.workspace_dir.return_value = "foo/bar"
mock_mode.get_hex_path.return_value = "foo/bar"
mock_debug_mode = mock.MagicMock()
mock_debug_mode.is_debugger = True
ed.modes = {
"python": mock_mode,
"microbit": mock_mode,
"debugger": mock_debug_mode,
}
ed.mode = "debugger"
mock_open = mock.MagicMock()
mock_open.return_value.__enter__ = lambda s: s
mock_open.return_value.__exit__ = mock.Mock()
mock_open.return_value.write = mock.MagicMock()
mock_event = mock.MagicMock()
#
# FIXME TJG: not sure what the ignore functionality being mocked here is doing
#
mock_event.ignore = mock.MagicMock(return_value=None)
with mock.patch("sys.exit", return_value=None), mock.patch(
"mu.settings.SessionSettings.save"
) as mocked_save:
ed.quit(mock_event)
mock_debug_mode.stop.assert_called_once_with()
assert view.show_confirmation.call_count == 1
assert mock_event.ignore.call_count == 0
assert mocked_save.called
def _editor_view_mock():
"""
Return a mocked mu.interface.Window to be used as a mu.logic.Editor view
in the test_quit_save* tests.
"""
view = mock.MagicMock()
view.modified = True
view.zoom_position = 2
view.show_confirmation = mock.MagicMock(return_value=True)
view.x.return_value = 100
view.y.return_value = 200
view.width.return_value = 300
view.height.return_value = 400
return view
def test_quit_save_tabs_with_paths():
"""
When saving the session, ensure those tabs with associated paths are
logged in the session file.
"""
view = _editor_view_mock()
w1 = mock.MagicMock()
w1.path = "foo.py"
view.widgets = [w1]
ed = mu.logic.Editor(view)
mock_mode = mock.MagicMock()
mock_mode.workspace_dir.return_value = "foo/bar"
mock_mode.get_hex_path.return_value = "foo/bar"
ed.modes = {"python": mock_mode, "microbit": mock_mode}
with mock.patch.object(sys, "exit"):
with mock.patch.object(mu.logic, "save_session") as mock_save_session:
ed.quit()
[session], _ = mock_save_session.call_args
assert os.path.abspath("foo.py") in session["paths"]
def test_quit_save_theme():
"""
When saving the session, ensure the theme is logged in the session file.
"""
view = _editor_view_mock()
w1 = mock.MagicMock()
w1.path = "foo.py"
view.widgets = [w1]
ed = mu.logic.Editor(view)
ed.theme = "night"
mock_mode = mock.MagicMock()
mock_mode.workspace_dir.return_value = "foo/bar"
mock_mode.get_hex_path.return_value = "foo/bar"
ed.modes = {"python": mock_mode, "microbit": mock_mode}
with mock.patch.object(sys, "exit"):
with mock.patch.object(mu.logic, "save_session") as mock_save_session:
ed.quit()
[session], _ = mock_save_session.call_args
assert session["theme"] == "night"
def test_quit_save_envars():
"""
When saving the session, ensure the user defined envars are logged in the
session file.
"""
view = _editor_view_mock()
w1 = mock.MagicMock()
w1.path = "foo.py"
view.widgets = [w1]
ed = mu.logic.Editor(view)
ed.theme = "night"
mock_mode = mock.MagicMock()
mock_mode.workspace_dir.return_value = "foo/bar"
mock_mode.get_hex_path.return_value = "foo/bar"
ed.modes = {"python": mock_mode, "microbit": mock_mode}
ed.envars = [["name1", "value1"], ["name2", "value2"]]
with mock.patch.object(sys, "exit"):
with mock.patch.object(mu.logic, "save_session") as mock_save_session:
ed.quit()
[session], _ = mock_save_session.call_args
assert session["envars"] == [["name1", "value1"], ["name2", "value2"]]
def test_quit_save_zoom_level():
"""
When saving the session, ensure the zoom level is logged in the session
file.
"""
view = _editor_view_mock()
w1 = mock.MagicMock()
w1.path = "foo.py"
view.widgets = [w1]
ed = mu.logic.Editor(view)
ed.theme = "night"
mock_mode = mock.MagicMock()
mock_mode.workspace_dir.return_value = "foo/bar"
mock_mode.get_hex_path.return_value = "foo/bar"
ed.modes = {"python": mock_mode, "microbit": mock_mode}
with mock.patch.object(sys, "exit"):
with mock.patch.object(mu.logic, "save_session") as mock_save_session:
ed.quit()
[session], _ = mock_save_session.call_args
#
# FIXME: not clear where this is set. Default?
#
assert session["zoom_level"] == 2
def test_quit_save_window_geometry():
"""
When saving the session, ensure the window geometry is saved in the session
file.
"""
view = _editor_view_mock()
w1 = mock.MagicMock()
w1.path = "foo.py"
view.widgets = [w1]
ed = mu.logic.Editor(view)
ed.theme = "night"
mock_mode = mock.MagicMock()
mock_mode.workspace_dir.return_value = "foo/bar"
mock_mode.get_hex_path.return_value = "foo/bar"
ed.modes = {"python": mock_mode, "microbit": mock_mode}
with mock.patch.object(sys, "exit"):
with mock.patch.object(mu.logic, "save_session") as mock_save_session:
ed.quit()
[session], _ = mock_save_session.call_args
#
# FIXME: not clear where this is set. Default?
#
assert session["window"] == {"x": 100, "y": 200, "w": 300, "h": 400}
def test_quit_calls_mode_stop():
"""
Ensure that the current mode's stop method is called.
"""
view = mock.MagicMock()
view.modified = True
view.show_confirmation = mock.MagicMock(return_value=True)
w1 = mock.MagicMock()
w1.path = "foo.py"
view.widgets = [w1]
ed = mu.logic.Editor(view)
ed.theme = "night"
ed.modes = {"python": mock.MagicMock(), "microbit": mock.MagicMock()}
ed.mode = "python"
mock_open = mock.MagicMock()
mock_open.return_value.__enter__ = lambda s: s
mock_open.return_value.__exit__ = mock.Mock()
mock_open.return_value.write = mock.MagicMock()
mock_event = mock.MagicMock()
mock_event.ignore = mock.MagicMock(return_value=None)
with mock.patch("sys.exit", return_value=None), mock.patch(
"builtins.open", mock_open
), mock.patch("mu.settings.session.save"):
ed.quit(mock_event)
ed.modes[ed.mode].stop.assert_called_once_with()
def test_quit_calls_sys_exit(mocked_session):
"""
Ensure that sys.exit(0) is called.
"""
view = mock.MagicMock()
view.modified = True
view.show_confirmation = mock.MagicMock(return_value=True)
w1 = mock.MagicMock()
w1.path = "foo.py"
view.widgets = [w1]
ed = mu.logic.Editor(view)
ed.theme = "night"
ed.modes = {"python": mock.MagicMock(), "microbit": mock.MagicMock()}
mock_open = mock.MagicMock()
mock_open.return_value.__enter__ = lambda s: s
mock_open.return_value.__exit__ = mock.Mock()
mock_open.return_value.write = mock.MagicMock()
mock_event = mock.MagicMock()
mock_event.ignore = mock.MagicMock(return_value=None)
with mock.patch("sys.exit", return_value=None) as ex, mock.patch(
"builtins.open", mock_open
):
ed.quit(mock_event)
ex.assert_called_once_with(0)
def test_show_admin():
"""
Ensure the expected admin dialog is displayed to the end user.
"""
view = mock.MagicMock()
ed = mu.logic.Editor(view)
ed.modes = {"python": mock.MagicMock()}
ed.sync_package_state = mock.MagicMock()
ed.envars = [["name", "value"]]
ed.minify = True
ed.microbit_runtime = "/foo/bar"
settings = {
"envars": "name=value",
"minify": True,
"microbit_runtime": "/foo/bar",
}
new_settings = {
"envars": "name=value",
"minify": True,
"microbit_runtime": "/foo/bar",
"packages": "baz\n",
}
view.show_admin.return_value = new_settings
with mock.patch.object(
venv, "installed_packages", return_value=([], ["Foo", "bar"])
):
mock_open = mock.mock_open()
with mock.patch("builtins.open", mock_open), mock.patch(
"os.path.isfile", return_value=True
):
ed.show_admin()
mock_open.assert_called_once_with(
mu.logic.LOG_FILE, "r", encoding="utf8"
)
assert view.show_admin.call_count == 1
assert view.show_admin.call_args[0][1] == settings
assert ed.envars == [["name", "value"]]
assert ed.minify is True
assert ed.microbit_runtime == "/foo/bar"
# Expect package names to be normalised to lowercase.
ed.sync_package_state.assert_called_once_with(
["foo", "bar"], ["baz"]
)
def test_show_admin_no_change():
"""
If the dialog is cancelled, no changes are made to settings.
"""
view = mock.MagicMock()
ed = mu.logic.Editor(view)
ed.modes = {"python": mock.MagicMock()}
ed.sync_package_state = mock.MagicMock()
ed.envars = [["name", "value"]]
ed.minify = True
ed.microbit_runtime = "/foo/bar"
new_settings = {}
view.show_admin.return_value = new_settings
mock_open = mock.mock_open()
with mock.patch.object(
venv, "installed_packages", return_value=([], ["Foo", "bar"])
):
with mock.patch("builtins.open", mock_open), mock.patch(
"os.path.isfile", return_value=True
):
ed.show_admin(None)
assert ed.sync_package_state.call_count == 0
def test_show_admin_missing_microbit_runtime():
"""
Ensure the microbit_runtime result is '' and a warning message is displayed
if the specified microbit_runtime doesn't actually exist.
"""
view = mock.MagicMock()
ed = mu.logic.Editor(view)
ed.modes = {"python": mock.MagicMock()}
ed.sync_package_state = mock.MagicMock()
ed.envars = [["name", "value"]]
ed.minify = True
ed.microbit_runtime = "/foo/bar"
settings = {
"envars": "name=value",
"minify": True,
"microbit_runtime": "/foo/bar",
}
new_settings = {
"envars": "name=value",
"minify": True,
"microbit_runtime": "/foo/bar",
"packages": "baz\n",
}
view.show_admin.return_value = new_settings
mock_open = mock.mock_open()
with mock.patch.object(
venv, "installed_packages", return_value=([], ["Foo", "bar"])
):
with mock.patch("builtins.open", mock_open), mock.patch(
"os.path.isfile", return_value=False
):
ed.show_admin(None)
mock_open.assert_called_once_with(
mu.logic.LOG_FILE, "r", encoding="utf8"
)
assert view.show_admin.call_count == 1
assert view.show_admin.call_args[0][1] == settings
assert ed.envars == [["name", "value"]]
assert ed.minify is True
assert ed.microbit_runtime == ""
assert view.show_message.call_count == 1
ed.sync_package_state.assert_called_once_with(
["foo", "bar"], ["baz"]
)
def test_sync_package_state():
"""
Ensure that the expected set operations are carried out so that the
view's sync_packages method is called with the correct packages.
"""
view = mock.MagicMock()
ed = mu.logic.Editor(view)
old_packages = ["foo", "bar"]
new_packages = ["bar", "baz"]
ed.sync_package_state(old_packages, new_packages)
args, _ = view.sync_packages.call_args
assert args[:2] == ({"foo"}, {"baz"})
def test_select_mode():
"""
It's possible to select and update to a new mode.
"""
view = mock.MagicMock()
view.select_mode.return_value = "foo"
mode = mock.MagicMock()
mode.is_debugger = False
ed = mu.logic.Editor(view)
ed.modes = {"python": mode}
ed.change_mode = mock.MagicMock()
ed.select_mode(None)
assert view.select_mode.call_count == 1
ed.change_mode.assert_called_once_with("foo")
def test_select_mode_debug_mode():
"""
It's NOT possible to select and update to a new mode if you're in debug
mode.
"""
view = mock.MagicMock()
mode = mock.MagicMock()
mode.debugger = True
ed = mu.logic.Editor(view)
ed.modes = {"debugger": mode}
ed.mode = "debugger"
ed.change_mode = mock.MagicMock()
ed.select_mode(None)
assert ed.mode == "debugger"
assert ed.change_mode.call_count == 0
def test_change_mode():
"""
It should be possible to change modes in the expected fashion (buttons get
correctly connected to event handlers).
"""
view = mock.MagicMock()
mock_button_bar = mock.MagicMock()
view.button_bar = mock_button_bar
view.change_mode = mock.MagicMock()
ed = mu.logic.Editor(view)
old_mode = mock.MagicMock()
old_mode.save_timeout = 5
old_mode.actions.return_value = [
{"name": "name", "handler": "handler", "shortcut": "Ctrl+X"}
]
mode = mock.MagicMock()
mode.save_timeout = 5
mode.name = "Python"
mode.actions.return_value = [
{"name": "name", "handler": "handler", "shortcut": "Ctrl+X"}
]
ed.modes = {"microbit": old_mode, "python": mode}
ed.mode = "microbit"
ed.change_mode("python")
# Check the old mode is closed properly.
old_mode.remove_repl.assert_called_once_with()
old_mode.remove_fs.assert_called_once_with()
old_mode.remove_plotter.assert_called_once_with()
# Check the new mode is set up correctly.
assert ed.mode == "python"
view.change_mode.assert_called_once_with(mode)
if sys.version_info < (3, 6):
assert mock_button_bar.connect.call_count == 11
else:
assert mock_button_bar.connect.call_count == 12
view.status_bar.set_mode.assert_called_once_with("Python")
view.set_timer.assert_called_once_with(5, ed.autosave)
def test_change_mode_no_timer():
"""
It should be possible to change modes in the expected fashion (buttons get
correctly connected to event handlers).
"""
view = mock.MagicMock()
mock_button_bar = mock.MagicMock()
view.button_bar = mock_button_bar
view.change_mode = mock.MagicMock()
ed = mu.logic.Editor(view)
mode = mock.MagicMock()
mode.save_timeout = 0
mode.name = "Python"
mode.actions.return_value = [
{"name": "name", "handler": "handler", "shortcut": "Ctrl+X"}
]
ed.modes = {"python": mode}
ed.change_mode("python")
assert ed.mode == "python"
view.change_mode.assert_called_once_with(mode)
if sys.version_info < (3, 6):
assert mock_button_bar.connect.call_count == 11
else:
assert mock_button_bar.connect.call_count == 12
view.status_bar.set_mode.assert_called_once_with("Python")
view.stop_timer.assert_called_once_with()
def test_change_mode_reset_breakpoints():
"""
When changing modes, if the new mode does NOT require a debugger, then
breakpoints should be reset.
"""
view = mock.MagicMock()
mock_tab = mock.MagicMock()
mock_tab.breakpoint_handles = set([1, 2, 3])
view.widgets = [mock_tab]
ed = mu.logic.Editor(view)
mode = mock.MagicMock()
mode.has_debugger = False
mode.is_debugger = False
mode.save_timeout = 5
ed.modes = {"microbit": mode, "debug": mock.MagicMock()}
ed.mode = "debug"
ed.change_mode("microbit")
assert ed.mode == "microbit"
assert mock_tab.breakpoint_handles == set()
mock_tab.reset_annotations.assert_called_once_with()
def test_change_mode_workspace_dir_exception():
"""
Check that any mode.workspace_dir() raising an exception doesn't crash Mu,
but uses Python mode's workspace_dir as a default.
"""
ed = mu.logic.Editor(mock.MagicMock())
mode = mock.MagicMock()
mode.save_timeout = 0
mode.workspace_dir = mock.MagicMock(side_effect=ValueError("Some error."))
python_mode = mock.MagicMock()
ed.modes = {
"circuitpython": mode,
"python": python_mode,
"debug": mock.MagicMock(),
}
ed.mode = "debug"
with mock.patch("mu.logic.logger.error") as mock_error:
ed.change_mode("circuitpython")
assert mock_error.call_count == 1
assert ed.mode == "circuitpython"
assert python_mode.workspace_dir.called_once()
def test_autosave():
"""
Ensure the autosave callback does the expected things to the tabs.
"""
view = mock.MagicMock()
view.modified = True
mock_tab = mock.MagicMock()
mock_tab.path = "foo"
mock_tab.isModified.return_value = True
view.widgets = [mock_tab]
ed = mu.logic.Editor(view)
ed.save_tab_to_file = mock.MagicMock()
ed.autosave()
ed.save_tab_to_file.assert_called_once_with(
mock_tab, show_error_messages=False
)
def test_check_usb(microbit_com1):
"""
Ensure the check_usb callback actually checks for connected USB devices.
"""
mode_py = mock.MagicMock()
mode_py.name = "Python3"
mode_py.runner = None
mode_py.find_devices.return_value = []
mode_mb = mock.MagicMock()
mode_mb.name = "BBC micro:bit"
mode_mb.find_devices.return_value = [microbit_com1]
modes = {"microbit": mode_mb, "python": mode_py}
device_list = mu.logic.DeviceList(modes)
device_list.device_connected = mock.MagicMock()
device_list.check_usb()
device_list.device_connected.emit.assert_called_with(microbit_com1)
def test_check_usb_remove_disconnected_devices(microbit_com1):
"""
Ensure that if a device is no longer connected, it is removed from
the set of connected devices.
"""
# No modes, so no devices should be detected
modes = {}
device_list = mu.logic.DeviceList(modes)
device_list.add_device(microbit_com1)
device_list.check_usb()
assert len(device_list) == 0
def test_ask_to_change_mode_confirm():
"""
Ensure the ask_to_change_mode calls change_mode, if user confirms.
"""
view = mock.MagicMock()
view.show_confirmation = mock.MagicMock(return_value=QMessageBox.Ok)
ed = mu.logic.Editor(view)
ed.change_mode = mock.MagicMock()
mode_py = mock.MagicMock()
mode_py.name = "Python3"
mode_py.runner = None
mode_mb = mock.MagicMock()
mode_mb.name = "BBC micro:bit"
ed.modes = {"microbit": mode_mb, "python": mode_py}
ed.ask_to_change_mode("microbit", "python", "New device detected")
assert view.show_confirmation.called
ed.change_mode.assert_called_once_with("microbit")
def test_ask_to_change_mode_cancel(adafruit_feather):
"""
Ensure the ask_to_change_mode doesn't change mode if confirmation cancelled
by user.
"""
view = mock.MagicMock()
view.show_confirmation = mock.MagicMock(return_value=QMessageBox.Cancel)
ed = mu.logic.Editor(view)
ed.change_mode = mock.MagicMock()
mode_py = mock.MagicMock()
mode_py.name = "Python3"
mode_py.runner = None
mode_cp = mock.MagicMock()
mode_cp.name = "CircuitPlayground"
ed.modes = {"circuitplayground": mode_cp, "python": mode_py}
ed.ask_to_change_mode(mode_cp, mode_py, "New device detected")
assert view.show_confirmation.called
ed.change_mode.assert_not_called()
def test_ask_to_change_mode_already_in_mode(microbit_com1):
"""
Ensure the ask_to_change_mode doesn't ask to change mode if already
selected.
"""
view = mock.MagicMock()
view.show_confirmation = mock.MagicMock(return_value=QMessageBox.Ok)
ed = mu.logic.Editor(view)
ed.change_mode = mock.MagicMock()
mode_mb = mock.MagicMock()
mode_mb.name = "BBC micro:bit"
mode_mb.find_devices.return_value = [microbit_com1]
mode_cp = mock.MagicMock()
mode_cp.find_devices.return_value = []
ed.modes = {"microbit": mode_mb, "circuitplayground": mode_cp}
ed.mode = "microbit"
ed.show_status_message = mock.MagicMock()
ed.ask_to_change_mode(mode_mb, mode_mb, "New device detected")
view.show_confirmation.assert_not_called()
ed.change_mode.assert_not_called()
def test_ask_to_change_mode_currently_running_code(microbit_com1):
"""
Ensure the ask_to_check_mode doesn't ask to change mode if the current mode
is running code.
"""
view = mock.MagicMock()
view.show_confirmation = mock.MagicMock(return_value=QMessageBox.Ok)
ed = mu.logic.Editor(view)
ed.change_mode = mock.MagicMock()
mode_py = mock.MagicMock()
mode_py.name = "Python3"
mode_py.runner = True
mode_py.find_device.return_value = []
mode_mb = mock.MagicMock()
mode_mb.name = "BBC micro:bit"
mode_mb.find_devices.return_value = [microbit_com1]
ed.modes = {"microbit": mode_mb, "python": mode_py}
ed.show_status_message = mock.MagicMock()
ed.ask_to_change_mode(mode_mb, mode_py, "New device detected")
view.show_confirmation.assert_not_called()
ed.change_mode.assert_not_called()
def test_ask_to_change_mode_when_selecting_mode_is_silent(adafruit_feather):
"""
Ensure ask_to_change_mode doesn't ask to change mode if the user has
the mode selection dialog active (indicated by the selecting_mode flag).
"""
view = mock.MagicMock()
view.show_confirmation = mock.MagicMock(return_value=QMessageBox.Cancel)
ed = mu.logic.Editor(view)
ed.change_mode = mock.MagicMock()
mode_py = mock.MagicMock()
mode_py.name = "Python3"
mode_py.runner = None
mode_py.find_devices.return_value = []
mode_cp = mock.MagicMock()
mode_cp.name = "CircuitPlayground"
mode_cp.find_devices.return_value = [adafruit_feather]
ed.modes = {"circuitplayground": mode_cp, "python": mode_py}
ed.selecting_mode = True
ed.ask_to_change_mode(mode_cp, mode_py, "New device detected")
assert view.show_confirmation.call_count == 0
ed.change_mode.assert_not_called()
def test_device_changed(microbit_com1, adafruit_feather):
view = mock.MagicMock()
ed = mu.logic.Editor(view)
ed.ask_to_change_mode = mock.MagicMock()
ed.device_changed(adafruit_feather)
ed.ask_to_change_mode.assert_called_once_with(
"circuitpython",
"CircuitPython",
"Detected new Adafruit Feather device.",
)
assert ed.current_device == adafruit_feather
ed.device_changed(microbit_com1)
assert ed.ask_to_change_mode.call_count == 2
assert ed.current_device == microbit_com1
def test_show_status_message():
"""
Ensure the method calls the status_bar in the view layer.
"""
msg = "Hello, World!"
view = mock.MagicMock()
ed = mu.logic.Editor(view)
ed.show_status_message(msg, 8)
view.status_bar.set_message.assert_called_once_with(msg, 8000)
def test_debug_toggle_breakpoint_as_debugger():
"""
If a breakpoint is toggled in debug mode, pass it to the toggle_breakpoint
method in the debug client.
"""
view = mock.MagicMock()
view.current_tab.text.return_value = 'print("Hello")'
ed = mu.logic.Editor(view)
mock_debugger = mock.MagicMock()
mock_debugger.has_debugger = False
mock_debugger.is_debugger = True
ed.modes = {"debugger": mock_debugger}
ed.mode = "debugger"
ed.debug_toggle_breakpoint(1, 10, False)
mock_debugger.toggle_breakpoint.assert_called_once_with(
10, view.current_tab
)
def test_debug_toggle_breakpoint_on():
"""
Toggle the breakpoint on when not in debug mode by tracking it in the
tab.breakpoint_handles set.
"""
view = mock.MagicMock()
view.current_tab.breakpoint_handles = set()
view.current_tab.markersAtLine.return_value = False
view.current_tab.markerAdd.return_value = 999 # the tracked marker handle.
ed = mu.logic.Editor(view)
mock_debugger = mock.MagicMock()
mock_debugger.has_debugger = True
mock_debugger.is_debugger = False
ed.modes = {"python": mock_debugger}
ed.mode = "python"
with mock.patch("mu.logic.is_breakpoint_line", return_value=True):
ed.debug_toggle_breakpoint(1, 10, False)
view.current_tab.markerAdd.assert_called_once_with(
10, view.current_tab.BREAKPOINT_MARKER
)
assert 999 in view.current_tab.breakpoint_handles
def test_debug_toggle_breakpoint_off():
"""
Toggle the breakpoint off when not in debug mode by tracking it in the
tab.breakpoint_handles set.
"""
view = mock.MagicMock()
view.current_tab.breakpoint_handles = set([10])
ed = mu.logic.Editor(view)
mock_debugger = mock.MagicMock()
mock_debugger.has_debugger = True
mock_debugger.is_debugger = False
ed.modes = {"python": mock_debugger}
ed.mode = "python"
with mock.patch("mu.logic.is_breakpoint_line", return_value=True):
ed.debug_toggle_breakpoint(1, 10, False)
view.current_tab.markerDelete.assert_called_once_with(10, -1)
def test_debug_toggle_breakpoint_on_invalid_breakpoint_line():
"""
If a breakpoint is toggled on, it won't work if the line isn't a valid
breakpoint line.
"""
view = mock.MagicMock()
view.current_tab.text.return_value = '#print("Hello")'
ed = mu.logic.Editor(view)
mock_debugger = mock.MagicMock()
mock_debugger.has_debugger = False
mock_debugger.is_debugger = True
ed.modes = {"debugger": mock_debugger}
ed.mode = "debugger"
ed.debug_toggle_breakpoint(1, 10, False)
assert view.show_message.call_count == 1
def test_debug_toggle_breakpoint_off_invalid_breakpoint_line():
"""
It should be possible to remove breakpoints from *invalid* breakpoint
lines.
"""
view = mock.MagicMock()
view.current_tab.text.return_value = '#print("Hello")'
view.current_tab.markersAtLine.return_value = True
view.current_tab.breakpoint_handles = set([10])
ed = mu.logic.Editor(view)
mock_mode = mock.MagicMock()
mock_mode.has_debugger = True
mock_mode.is_debugger = False
ed.modes = {"python": mock_mode}
ed.mode = "python"
ed.debug_toggle_breakpoint(1, 10, False)
view.current_tab.markerDelete.assert_called_once_with(10, -1)
def test_rename_tab_no_tab_id():
"""
If no tab id is supplied (i.e. this method was triggered by the shortcut
instead of the double-click event), then use the tab currently in focus.
"""
view = mock.MagicMock()
view.get_save_path.return_value = "foo"
mock_tab = mock.MagicMock()
mock_tab.path = "old.py"
view.current_tab = mock_tab
ed = mu.logic.Editor(view)
ed.save = mock.MagicMock()
ed.check_for_shadow_module = mock.MagicMock(return_value=False)
ed.rename_tab()
view.get_save_path.assert_called_once_with("old.py")
assert mock_tab.path == "foo.py"
ed.save.assert_called_once_with()
def test_rename_tab():
"""
If there's a tab id, the function being tested is reacting to a double-tap
so make sure the expected tab is grabbed from the view.
"""
view = mock.MagicMock()
view.get_save_path.return_value = "foo"
mock_tab = mock.MagicMock()
mock_tab.path = "old.py"
view.tabs.widget.return_value = mock_tab
ed = mu.logic.Editor(view)
ed.save = mock.MagicMock()
ed.check_for_shadow_module = mock.MagicMock(return_value=False)
ed.rename_tab(1)
view.get_save_path.assert_called_once_with("old.py")
view.tabs.widget.assert_called_once_with(1)
assert mock_tab.path == "foo.py"
ed.save.assert_called_once_with()
def test_rename_tab_with_shadow_module():
"""
If the user attempts to rename the tab to a filename which shadows a
Python module, then a warning should appear and the process aborted.
"""
view = mock.MagicMock()
view.get_save_path.return_value = "foo"
mock_tab = mock.MagicMock()
mock_tab.path = "old.py"
view.tabs.widget.return_value = mock_tab
ed = mu.logic.Editor(view)
ed.save = mock.MagicMock()
ed.check_for_shadow_module = mock.MagicMock(return_value=True)
ed.rename_tab(1)
view.get_save_path.assert_called_once_with("old.py")
view.tabs.widget.assert_called_once_with(1)
assert view.show_message.call_count == 1
assert mock_tab.path == "old.py"
assert ed.save.call_count == 0
def test_rename_tab_avoid_duplicating_other_tab_name():
"""
If the user attempts to rename the tab to a filename used by another tab
then show an error message and don't rename anything.
"""
view = mock.MagicMock()
view.get_save_path.return_value = "foo"
mock_other_tab = mock.MagicMock()
mock_other_tab.path = "foo.py"
view.widgets = [mock_other_tab]
mock_tab = mock.MagicMock()
mock_tab.path = "old.py"
view.tabs.widget.return_value = mock_tab
ed = mu.logic.Editor(view)
ed.check_for_shadow_module = mock.MagicMock(return_value=False)
ed.rename_tab(1)
view.show_message.assert_called_once_with(
"Could not rename file.",
"A file of that name is already " "open in Mu.",
)
assert mock_tab.path == "old.py"
def test_logic_independent_import_logic():
"""
It should be possible to import the logic and app
modules from the mu package independently of each
other.
"""
subprocess.run([sys.executable, "-c", "from mu import logic"], check=True)
def test_logic_independent_import_app():
"""
It should be possible to import the logic and app
modules from the mu package independently of each
other.
"""
subprocess.run([sys.executable, "-c", "from mu import app"], check=True)
#
# Tests for newline detection
# Mu should detect the majority newline convention
# in a loaded file and use that convention when writing
# the file out again. Internally all newlines are MU_NEWLINE
#
def test_read_newline_no_text():
"""If the file being loaded is empty, use the platform default newline"""
with generate_python_file() as filepath:
text, newline = mu.logic.read_and_decode(filepath)
assert text.count("\r\n") == 0
assert newline == os.linesep
def test_read_newline_all_unix():
"""If the file being loaded has only the Unix convention, use that"""
with generate_python_file("abc\ndef") as filepath:
text, newline = mu.logic.read_and_decode(filepath)
assert text.count("\r\n") == 0
assert newline == "\n"
def test_read_newline_all_windows():
"""If the file being loaded has only the Windows convention, use that"""
with generate_python_file("abc\r\ndef") as filepath:
text, newline = mu.logic.read_and_decode(filepath)
assert text.count("\r\n") == 0
assert newline == "\r\n"
def test_read_newline_most_unix():
"""If the file being loaded has mostly the Unix convention, use that"""
with generate_python_file("\nabc\r\ndef\n") as filepath:
text, newline = mu.logic.read_and_decode(filepath)
assert text.count("\r\n") == 0
assert newline == "\n"
def test_read_newline_most_windows():
"""If the file being loaded has mostly the Windows convention, use that"""
with generate_python_file("\r\nabc\ndef\r\n") as filepath:
text, newline = mu.logic.read_and_decode(filepath)
assert text.count("\r\n") == 0
assert newline == "\r\n"
def test_read_newline_equal_match():
"""If the file being loaded has an equal number of Windows and
Unix newlines, use the platform default
"""
with generate_python_file("\r\nabc\ndef") as filepath:
text, newline = mu.logic.read_and_decode(filepath)
assert text.count("\r\n") == 0
assert newline == os.linesep
#
# When writing Mu should honour the line-ending convention found inbound
#
def test_write_newline_to_unix():
"""If the file had Unix newlines it should be saved with Unix newlines
(In principle this check is unnecessary as Unix newlines are currently
the Mu internal default; but we leave it here in case that situation
changes)
"""
with generate_python_file() as filepath:
test_string = "\r\n".join("the cat sat on the mat".split())
mu.logic.save_and_encode(test_string, filepath, "\n")
with open(filepath, newline="") as f:
text = f.read()
assert text.count("\r\n") == 0
assert text.count("\n") == test_string.count("\r\n") + 1
def test_write_newline_to_windows():
"""If the file had Windows newlines it should be saved with Windows
newlines
"""
with generate_python_file() as filepath:
test_string = "\n".join("the cat sat on the mat".split())
mu.logic.save_and_encode(test_string, filepath, "\r\n")
with open(filepath, newline="") as f:
text = f.read()
assert len(re.findall("[^\r]\n", text)) == 0
assert text.count("\r\n") == test_string.count("\n") + 1
#
# Generate a Unicode test string which includes all the usual
# 7-bit characters but also an 8th-bit range which tends to
# trip things up between encodings
#
BYTES_TEST_STRING = bytes(range(0x20, 0x80)) + bytes(range(0xA0, 0xFF))
UNICODE_TEST_STRING = BYTES_TEST_STRING.decode("iso-8859-1")
#
# Tests for encoding detection
# Mu should detect:
# - BOM (UTF8/16)
# - Encoding cooke, eg # -*- coding: utf-8 -*-
# - fallback to the platform default (locale.getpreferredencoding())
#
def test_read_utf8bom():
"""Successfully decode from utf-8 encoded with BOM"""
with generate_python_file() as filepath:
with open(filepath, "w", encoding="utf-8-sig") as f:
f.write(UNICODE_TEST_STRING)
text, _ = mu.logic.read_and_decode(filepath)
assert text == UNICODE_TEST_STRING
def test_read_utf16bebom():
"""Successfully decode from utf-16 BE encoded with BOM"""
with generate_python_file() as filepath:
with open(filepath, "wb") as f:
f.write(codecs.BOM_UTF16_BE)
f.write(UNICODE_TEST_STRING.encode("utf-16-be"))
text, _ = mu.logic.read_and_decode(filepath)
assert text == UNICODE_TEST_STRING
def test_read_utf16lebom():
"""Successfully decode from utf-16 LE encoded with BOM"""
with generate_python_file() as filepath:
with open(filepath, "wb") as f:
f.write(codecs.BOM_UTF16_LE)
f.write(UNICODE_TEST_STRING.encode("utf-16-le"))
text, _ = mu.logic.read_and_decode(filepath)
assert text == UNICODE_TEST_STRING
def test_read_encoding_cookie():
"""Successfully decode from iso-8859-1 with an encoding cookie"""
encoding_cookie = ENCODING_COOKIE.replace(mu.logic.ENCODING, "iso-8859-1")
test_string = encoding_cookie + UNICODE_TEST_STRING
with generate_python_file() as filepath:
with open(filepath, "wb") as f:
f.write(test_string.encode("iso-8859-1"))
text, _ = mu.logic.read_and_decode(filepath)
assert text == test_string
def test_read_encoding_mu_default():
"""Successfully decode from the mu default"""
test_string = UNICODE_TEST_STRING.encode(mu.logic.ENCODING)
with generate_python_file() as filepath:
with open(filepath, "wb") as f:
f.write(test_string)
text, _ = mu.logic.read_and_decode(filepath)
assert text == UNICODE_TEST_STRING
def test_read_encoding_default():
"""Successfully decode from the default locale"""
test_string = UNICODE_TEST_STRING.encode(locale.getpreferredencoding())
with generate_python_file() as filepath:
with open(filepath, "wb") as f:
f.write(test_string)
text, _ = mu.logic.read_and_decode(filepath)
assert text == UNICODE_TEST_STRING
def test_read_encoding_unsuccessful():
"""Fail to decode encoded text"""
#
# Have to work quite hard to produce text which will definitely
# fail to decode since UTF-8 and cp1252 (the default on this
# computer) will, between them, decode nearly anything!
#
with generate_python_file() as filepath:
with open(filepath, "wb") as f:
f.write(codecs.BOM_UTF8)
f.write(b"\xd8\x00")
with pytest.raises(UnicodeDecodeError):
text, _ = mu.logic.read_and_decode(filepath)
#
# When writing, if the text has an encoding cookie, then that encoding
# should be used. Otherwise, UTF-8 should be used and no encoding cookie
# added
#
def test_write_encoding_cookie_no_cookie():
"""If the text has no cookie of its own utf-8 will be used
when saving and no cookie added
"""
test_string = UNICODE_TEST_STRING
with generate_python_file() as filepath:
mu.logic.save_and_encode(test_string, filepath)
with open(filepath, encoding=mu.logic.ENCODING) as f:
for line in f:
assert line == test_string + "\n"
break
def test_write_encoding_cookie_existing_cookie():
"""If the text has a encoding cookie of its own then that encoding will
be used when saving and no change made to the cookie
"""
encoding = "iso-8859-1"
cookie = ENCODING_COOKIE.replace(mu.logic.ENCODING, encoding)
test_string = cookie + UNICODE_TEST_STRING
with generate_python_file() as filepath:
mu.logic.save_and_encode(test_string, filepath)
with open(filepath, encoding=encoding) as f:
assert next(f) == cookie
assert next(f) == UNICODE_TEST_STRING + "\n"
def test_write_invalid_codec():
"""If an encoding cookie is present but specifies an unknown codec,
utf-8 will be used instead
"""
encoding = "INVALID"
cookie = ENCODING_COOKIE.replace(mu.logic.ENCODING, encoding)
test_string = cookie + UNICODE_TEST_STRING
with generate_python_file() as filepath:
mu.logic.save_and_encode(test_string, filepath)
with open(filepath, encoding=mu.logic.ENCODING) as f:
assert next(f) == cookie
assert next(f) == UNICODE_TEST_STRING + "\n"
def test_handle_open_file():
"""
Ensure on_open_file event handler fires as expected with the editor's
direct_load when the view's open_file signal is emitted.
"""
class Dummy(QObject):
open_file = pyqtSignal(str)
venv = None
view = Dummy()
edit = mu.logic.Editor(view)
m = mock.MagicMock()
edit.direct_load = m
view.open_file.emit("/test/path.py")
m.assert_called_once_with("/test/path.py")
def test_load_cli():
"""
Ensure loading paths specified from the command line works as expected.
"""
mock_view = mock.MagicMock()
ed = mu.logic.Editor(mock_view)
m = mock.MagicMock()
ed.direct_load = m
ed.load_cli(["test.py"])
m.assert_called_once_with(os.path.abspath("test.py"))
m = mock.MagicMock()
ed.direct_load = m
ed.load_cli([5])
assert m.call_count == 0
def test_abspath():
"""
Ensure a set of unique absolute paths is returned, given a list of
arbitrary paths.
"""
ed = mu.logic.Editor(mock.MagicMock())
paths = ["foo", "bar", "bar"]
result = ed._abspath(paths)
assert len(result) == 2
assert os.path.abspath("foo") in result
assert os.path.abspath("bar") in result
def test_abspath_fail():
"""
If given a problematic arbitrary path, _abspath will log the problem but
continue to process the "good" paths.
"""
ed = mu.logic.Editor(mock.MagicMock())
paths = ["foo", "bar", 5, "bar"]
with mock.patch("mu.logic.logger.error") as mock_error:
result = ed._abspath(paths)
assert mock_error.call_count == 1
assert len(result) == 2
assert os.path.abspath("foo") in result
assert os.path.abspath("bar") in result
def test_find_replace_cancelled():
"""
If the activated find/replace dialog is cancelled, no status message is
displayed.
"""
mock_view = mock.MagicMock()
mock_view.show_find_replace.return_value = False
ed = mu.logic.Editor(mock_view)
ed.show_status_message = mock.MagicMock()
ed.find_replace()
ed.show_status_message.call_count == 0
def test_find_replace_no_find():
"""
If the user fails to supply something to find, display a modal warning
message to explain the problem.
"""
mock_view = mock.MagicMock()
mock_view.show_find_replace.return_value = ("", "", False)
ed = mu.logic.Editor(mock_view)
ed.show_message = mock.MagicMock()
ed.find_replace()
msg = "You must provide something to find."
info = "Please try again, this time with something in the find box."
mock_view.show_message.assert_called_once_with(msg, info)
def test_find_again_no_find():
"""
If the user fails to supply something to find again (forward or backward),
display a modal warning message to explain the problem.
"""
mock_view = mock.MagicMock()
ed = mu.logic.Editor(mock_view)
ed.find = False
ed.show_message = mock.MagicMock()
ed.find_again()
msg = "You must provide something to find."
info = "Please try again, this time with something in the find box."
mock_view.show_message.assert_called_once_with(msg, info)
ed.find_again_backward(forward=False)
assert mock_view.show_message.call_count == 2
def test_find_replace_find_matched():
"""
If the user just supplies a find target and it is matched in the code then
the expected status message should be shown.
"""
mock_view = mock.MagicMock()
mock_view.show_find_replace.return_value = ("foo", "", False)
mock_view.highlight_text.return_value = True
ed = mu.logic.Editor(mock_view)
ed.show_status_message = mock.MagicMock()
ed.find_replace()
mock_view.highlight_text.assert_called_once_with("foo")
assert ed.find == "foo"
assert ed.replace == ""
assert ed.global_replace is False
ed.show_status_message.assert_called_once_with(
'Highlighting matches for "foo".'
)
def test_find_again_find_matched():
"""
If the user supplies a find target to find again (forward or backward) and
it is matched in the code then the expected status message should be
shown.
"""
mock_view = mock.MagicMock()
mock_view.highlight_text.return_value = True
ed = mu.logic.Editor(mock_view)
ed.show_status_message = mock.MagicMock()
ed.find = "foo"
ed.find_again()
mock_view.highlight_text.assert_called_once_with("foo", True)
assert ed.find == "foo"
assert ed.replace == ""
assert ed.global_replace is False
ed.show_status_message.assert_called_once_with(
'Highlighting matches for "foo".'
)
ed.find_again_backward()
assert ed.show_status_message.call_count == 2
def test_find_replace_find_unmatched():
"""
If the user just supplies a find target and it is UN-matched in the code
then the expected status message should be shown.
"""
mock_view = mock.MagicMock()
mock_view.show_find_replace.return_value = ("foo", "", False)
mock_view.highlight_text.return_value = False
ed = mu.logic.Editor(mock_view)
ed.show_status_message = mock.MagicMock()
ed.find_replace()
ed.show_status_message.assert_called_once_with('Could not find "foo".')
def test_find_again_find_unmatched():
"""
If the user supplies a find target to find_again or find_again_backward
and it is UN-matched in the code then the expected status message should
be shown.
"""
mock_view = mock.MagicMock()
mock_view.highlight_text.return_value = False
ed = mu.logic.Editor(mock_view)
ed.find = "foo"
ed.show_status_message = mock.MagicMock()
ed.find_again()
ed.show_status_message.assert_called_once_with('Could not find "foo".')
ed.find_again_backward()
ed.show_status_message.assert_called_with('Could not find "foo".')
assert ed.show_status_message.call_count == 2
def test_find_replace_replace_no_match():
"""
If the user supplies both a find and replace target and the find target is
UN-matched in the code, then the expected status message should be shown.
"""
mock_view = mock.MagicMock()
mock_view.show_find_replace.return_value = ("foo", "bar", False)
mock_view.replace_text.return_value = 0
ed = mu.logic.Editor(mock_view)
ed.show_status_message = mock.MagicMock()
ed.find_replace()
assert ed.find == "foo"
assert ed.replace == "bar"
assert ed.global_replace is False
mock_view.replace_text.assert_called_once_with("foo", "bar", False)
ed.show_status_message.assert_called_once_with('Could not find "foo".')
def test_find_replace_replace_single_match():
"""
If the user supplies both a find and replace target and the find target is
matched once in the code, then the expected status message should be shown.
"""
mock_view = mock.MagicMock()
mock_view.show_find_replace.return_value = ("foo", "bar", False)
mock_view.replace_text.return_value = 1
ed = mu.logic.Editor(mock_view)
ed.show_status_message = mock.MagicMock()
ed.find_replace()
assert ed.find == "foo"
assert ed.replace == "bar"
assert ed.global_replace is False
mock_view.replace_text.assert_called_once_with("foo", "bar", False)
ed.show_status_message.assert_called_once_with(
'Replaced "foo" with "bar".'
)
def test_find_replace_replace_multi_match():
"""
If the user supplies both a find and replace target and the find target is
matched many times in the code, then the expected status message should be
shown.
"""
mock_view = mock.MagicMock()
mock_view.show_find_replace.return_value = ("foo", "bar", True)
mock_view.replace_text.return_value = 4
ed = mu.logic.Editor(mock_view)
ed.show_status_message = mock.MagicMock()
ed.find_replace()
assert ed.find == "foo"
assert ed.replace == "bar"
assert ed.global_replace is True
mock_view.replace_text.assert_called_once_with("foo", "bar", True)
ed.show_status_message.assert_called_once_with(
'Replaced 4 matches of "foo" with "bar".'
)
def test_toggle_comments():
"""
Ensure the method in the view for toggling comments on and off is called.
"""
mock_view = mock.MagicMock()
ed = mu.logic.Editor(mock_view)
ed.toggle_comments()
mock_view.toggle_comments.assert_called_once_with()
@pytest.mark.skipif(sys.version_info < (3, 6), reason="Requires Python3.6")
def test_tidy_code_no_tab():
"""
If there's no current tab ensure black isn't called.
"""
mock_view = mock.MagicMock()
mock_view.current_tab = None
ed = mu.logic.Editor(mock_view)
ed.show_status_message = mock.MagicMock()
ed.tidy_code()
assert ed.show_status_message.call_count == 0
@pytest.mark.skipif(sys.version_info < (3, 6), reason="Requires Python3.6")
def test_tidy_code_not_python():
"""
If the current tab doesn't contain Python, abort.
"""
mock_view = mock.MagicMock()
mock_view.current_tab = mock.MagicMock()
mock_view.current_tab.path = "foo.html"
ed = mu.logic.Editor(mock_view)
ed.show_status_message = mock.MagicMock()
ed.tidy_code()
assert ed.show_status_message.call_count == 0
@pytest.mark.skipif(sys.version_info < (3, 6), reason="Requires Python3.6")
def test_tidy_code_valid_python():
"""
Ensure the "good case" works as expected (the code is reformatted and Mu
shows a status message to confirm so).
"""
mock_view = mock.MagicMock()
mock_view.current_tab.text.return_value = "print('hello')"
ed = mu.logic.Editor(mock_view)
ed.show_status_message = mock.MagicMock()
ed.tidy_code()
tab = mock_view.current_tab
tab.SendScintilla.assert_called_once_with(
tab.SCI_SETTEXT, b'print("hello")\n'
)
assert ed.show_status_message.call_count == 1
@pytest.mark.skipif(sys.version_info < (3, 6), reason="Requires Python3.6")
def test_tidy_code_invalid_python():
"""
If the code is incorrectly formatted so black can't do its thing, ensure
that a message is shown to the user to say so.
"""
mock_view = mock.MagicMock()
mock_view.current_tab.text.return_value = "print('hello'"
ed = mu.logic.Editor(mock_view)
ed.tidy_code()
assert mock_view.show_message.call_count == 1
@pytest.mark.skipif(sys.version_info < (3, 6), reason="Requires Python3.6")
def test_check_tidy_check_line_too_long():
"""
Check we detect, then correct, lines longer than MAX_LINE_LENGTH.
"""
mock_view = mock.MagicMock()
# a simple to format list running 94 characters long plus newline
long_list = "[{}{}]\n".format(*("(1, 2), " * 10, '"0123456789"'))
tab = mock_view.current_tab
tab.text.return_value = long_list
ed = mu.logic.Editor(mock_view)
too_long = mu.logic.check_pycodestyle(tab.text.return_value)
assert len(too_long) == 1 # One issue found: line too long
ed.tidy_code()
called_with = tab.SendScintilla.call_args[0][1].decode()
tab.text.return_value = called_with
ok = mu.logic.check_pycodestyle(tab.text.return_value)
assert len(ok) == 0 # No issues
assert (
tab.text.return_value
== """[
(1, 2),
(1, 2),
(1, 2),
(1, 2),
(1, 2),
(1, 2),
(1, 2),
(1, 2),
(1, 2),
(1, 2),
"0123456789",
]
"""
)
@pytest.mark.skipif(sys.version_info < (3, 6), reason="Requires Python3.6")
def test_check_tidy_check_short_line():
"""
Check that Cidy and Check leave a short line as-is and respect
MAX_LINE_LENGTH.
"""
mock_view = mock.MagicMock()
# a simple to format list running 94 characters long plus newline
long_list = "[{}{}]\n".format(*("(1, 2), " * 10, '"0123456789"'))
tab = mock_view.current_tab
tab.text.return_value = long_list
with mock.patch("mu.logic.MAX_LINE_LENGTH", 94):
ed = mu.logic.Editor(mock_view)
too_long = mu.logic.check_pycodestyle(tab.text.return_value)
assert len(too_long) == 0 # No issues
ed.tidy_code()
called_with = tab.SendScintilla.call_args[0][1].decode()
tab.text.return_value = called_with
ok = mu.logic.check_pycodestyle(tab.text.return_value)
assert len(ok) == 0 # No issues
assert tab.text.return_value == long_list
def test_device_init(microbit_com1):
"""
Test that all properties are set properly and can be read.
"""
assert microbit_com1.vid == 0x0D28
assert microbit_com1.pid == 0x0204
assert microbit_com1.port == "COM1"
assert microbit_com1.serial_number == 123456
assert microbit_com1.long_mode_name == "BBC micro:bit"
assert microbit_com1.short_mode_name == "microbit"
def test_device_with_no_board_name_is_mode_name(esp_device):
"""
Test that when no board name is given, the board name is the same
as the mode name.
"""
assert esp_device.name == "ESP MicroPython device"
def test_com1_equality(microbit_com1):
"""
Test that two separate Device-objects representing the same device
are recognized as equal.
"""
identical_microbit_com1 = mu.logic.Device(
0x0D28,
0x0204,
"COM1",
123456,
"ARM",
"BBC micro:bit",
"microbit",
"BBC micro:bit",
)
assert microbit_com1 == identical_microbit_com1
def test_com1_not_equal_on_different_ports(microbit_com1):
"""
Test that if two otherwise identical devices differ on the port, they
are not recognized as being equal.
"""
microbit_com2 = mu.logic.Device(
0x0D28,
0x0204,
"COM2",
123456,
"ARM",
"BBC micro:bit",
"microbit",
"BBC micro:bit",
)
assert microbit_com1 != microbit_com2
def test_com1_hash_equality(microbit_com1):
"""
Test that hash function returns the same for two identical Device-objects.
"""
identical_microbit_com1 = mu.logic.Device(
0x0D28,
0x0204,
"COM1",
123456,
"ARM",
"BBC micro:bit",
"microbit",
"BBC micro:bit",
)
assert hash(microbit_com1) == hash(identical_microbit_com1)
def test_com1_hash_not_equal_on_different_ports(microbit_com1):
"""
Test that the hash function differs, when two otherwise identical
devices are connected to two different ports.
"""
microbit_com2 = mu.logic.Device(
0x0D28,
0x0204,
"COM2",
123456,
"ARM",
"BBC micro:bit",
"microbit",
"BBC micro:bit",
)
assert hash(microbit_com1) != hash(microbit_com2)
|
gpl-3.0
|
doirisks/dori
|
models/10.1161:CIRCULATIONAHA.108.816694/example.py
|
1
|
5090
|
"""
example.py
by Ted Morin
contains example code for 30-year CVD calculator - all models
10.1161/CIRCULATIONAHA.108.816694
2009 Predicting the Thirty-year Risk of Cardiovascular Disease
The Framingham Heart Study
"""
#ismale,age,sbp,antihyp,smoke,diabet,totchol,hdlchol
#tests
from modela import model as a
from modelb import model as b
from modelc import model as c
from modeld import model as d
ascores = []
ascores.append( a(1,53,125,1,0,1,161,55) ) #m,53yrs,125sbp,treated,nosmokes, diabetic,161tot,55hdl,bmi20
ascores.append( a(0,61,124,0,1,0,180,47) ) #f,61yrs,124sbp,not treated,smokes,not db,180tot,47hdl,bmi20
ascores.append( a(1,55,118,0,1,1,200,45) ) #m, 55yrs,118sbp, not treated, smokes, db, 200tot, 45hdl,bmi20
ascores.append( a(1,45,129,1,0,1,220,56) ) #m, 45, 129sbp, treated, nosmokes, db, 220tot, 56hdl,bmi20
ascores.append( a(1,71,125,0,1,0,205,47) ) #m, 71, 125sbp, notreated, smokes, nodb,205tot, 47hdl,bmi20
ascores.append( a(0,40,117,0,0,1,160,54) ) #f, 40, 117sbp, notreated, nosmokes, db, 160, 54hdl,bmi20
ascores.append( a(0,43,127,1,0,1,300,67) ) #f, 43, 127sbp, treated, nosmokes, db, 300, 67hdl,bmi20
ascores.append( a(0,52,112,1,1,1,287,56) ) #f, 52, 112sbp, treated, smokes,db, 287, 56hdl,bmi20
ascores.append( a(0,61,141,0,1,0,371,80) ) #f, 61, 141sbp,notreated, smokes,no db, 371tot,80hdl,bmi20
ascores.append( a(0,46,131,0,1,0,211,87) ) #f, 46yrs, 131sbp, notreated,smokes,no db, 211tot,87hdl,bmi20
ascores.append( a(1,39,135,0,0,0,170,60) ) #m, 39yrs, 135sbp, notreated, no smoke, no db, 170tot, 60hdl,bmi20
bscores = []
bscores.append( b(1,53,125,1,0,1,161,55) ) #m,53yrs,125sbp,treated,nosmokes, diabetic,161tot,55hdl,bmi20
bscores.append( b(0,61,124,0,1,0,180,47) ) #f,61yrs,124sbp,not treated,smokes,not db,180tot,47hdl,bmi20
bscores.append( b(1,55,118,0,1,1,200,45) ) #m, 55yrs,118sbp, not treated, smokes, db, 200tot, 45hdl,bmi20
bscores.append( b(1,45,129,1,0,1,220,56) ) #m, 45, 129sbp, treated, nosmokes, db, 220tot, 56hdl,bmi20
bscores.append( b(1,71,125,0,1,0,205,47) ) #m, 71, 125sbp, notreated, smokes, nodb,205tot, 47hdl,bmi20
bscores.append( b(0,40,117,0,0,1,160,54) ) #f, 40, 117sbp, notreated, nosmokes, db, 160, 54hdl,bmi20
bscores.append( b(0,43,127,1,0,1,300,67) ) #f, 43, 127sbp, treated, nosmokes, db, 300, 67hdl,bmi20
bscores.append( b(0,52,112,1,1,1,287,56) ) #f, 52, 112sbp, treated, smokes,db, 287, 56hdl,bmi20
bscores.append( b(0,61,141,0,1,0,371,80) ) #f, 61, 141sbp,notreated, smokes,no db, 371tot,80hdl,bmi20
bscores.append( b(0,46,131,0,1,0,211,87) ) #f, 46yrs, 131sbp, notreated,smokes,no db, 211tot,87hdl,bmi20
bscores.append( b(1,39,135,0,0,0,170,60) ) #m, 39yrs, 135sbp, notreated, no smoke, no db, 170tot, 60hdl,bmi20
cscores = []
cscores.append( c(1,53,125,1,0,1,20) ) #m,53yrs,125sbp,treated,nosmokes, diabetic,161tot,55hdl,bmi20
cscores.append( c(0,61,124,0,1,0,20) ) #f,61yrs,124sbp,not treated,smokes,not db,180tot,47hdl,bmi20
cscores.append( c(1,55,118,0,1,1,20) ) #m, 55yrs,118sbp, not treated, smokes, db, 200tot, 45hdl,bmi20
cscores.append( c(1,45,129,1,0,1,20) ) #m, 45, 129sbp, treated, nosmokes, db, 220tot, 56hdl,bmi20
cscores.append( c(1,71,125,0,1,0,20) ) #m, 71, 125sbp, notreated, smokes, nodb,205tot, 47hdl,bmi20
cscores.append( c(0,40,117,0,0,1,20) ) #f, 40, 117sbp, notreated, nosmokes, db, 160, 54hdl,bmi20
cscores.append( c(0,43,127,1,0,1,20) ) #f, 43, 127sbp, treated, nosmokes, db, 300, 67hdl,bmi20
cscores.append( c(0,52,112,1,1,1,20) ) #f, 52, 112sbp, treated, smokes,db, 287, 56hdl,bmi20
cscores.append( c(0,61,141,0,1,0,20) ) #f, 61, 141sbp,notreated, smokes,no db, 371tot,80hdl,bmi20
cscores.append( c(0,46,131,0,1,0,20) ) #f, 46yrs, 131sbp, notreated,smokes,no db, 211tot,87hdl,bmi20
cscores.append( c(1,39,135,0,0,0,20) ) #m, 39yrs, 135sbp, notreated, no smoke, no db, 170tot, 60hdl,bmi20
dscores = []
dscores.append( d(1,53,125,1,0,1,20) ) #m,53yrs,125sbp,treated,nosmokes, diabetic,161tot,55hdl,bmi20
dscores.append( d(0,61,124,0,1,0,20) ) #f,61yrs,124sbp,not treated,smokes,not db,180tot,47hdl,bmi20
dscores.append( d(1,55,118,0,1,1,20) ) #m, 55yrs,118sbp, not treated, smokes, db, 200tot, 45hdl,bmi20
dscores.append( d(1,45,129,1,0,1,20) ) #m, 45, 129sbp, treated, nosmokes, db, 220tot, 56hdl,bmi20
dscores.append( d(1,71,125,0,1,0,20) ) #m, 71, 125sbp, notreated, smokes, nodb,205tot, 47hdl,bmi20
dscores.append( d(0,40,117,0,0,1,20) ) #f, 40, 117sbp, notreated, nosmokes, db, 160, 54hdl,bmi20
dscores.append( d(0,43,127,1,0,1,20) ) #f, 43, 127sbp, treated, nosmokes, db, 300, 67hdl,bmi20
dscores.append( d(0,52,112,1,1,1,20) ) #f, 52, 112sbp, treated, smokes,db, 287, 56hdl,bmi20
dscores.append( d(0,61,141,0,1,0,20) ) #f, 61, 141sbp,notreated, smokes,no db, 371tot,80hdl,bmi20
dscores.append( d(0,46,131,0,1,0,20) ) #f, 46yrs, 131sbp, notreated,smokes,no db, 211tot,87hdl,bmi20
dscores.append( d(1,39,135,0,0,0,20) ) #m, 39yrs, 135sbp, notreated, no smoke, no db, 170tot, 60hdl,bmi20
for i in range(len(ascores)):
print "%.3f" % (float(ascores[i])*100.), "%.3f" % (float(bscores[i])*100.), "%.3f" % (float(cscores[i])*100.), "%.3f" % (float(dscores[i])*100.)
|
gpl-3.0
|
Br1an6/ACS_Netplumber_Implementation
|
hassel-c/net_plumbing/examples/template/run_reachability.py
|
5
|
1972
|
'''
Created on Aug 1, 2012
@author: Peyman Kazemian
'''
from examples.utils.network_loader import load_network
from config_parser.cisco_router_parser import cisco_router
from utils.wildcard import wildcard_create_bit_repeat
from utils.wildcard_utils import set_header_field
from headerspace.hs import headerspace
from time import time
from headerspace.applications import find_reachability,print_paths
from config_parser.graph_xml_parser import graph_xml
g = graph_xml()
g.set_device_types(["Router"])
g.read_graphs_xml("path_to_graphs.xml")
g.read_links_xml("path_to_links.xml")
g.read_nodes_xml("path_to_nodes.xml")
settings = {"rtr_names":g.generate_node_names(),
"num_layers":3,
"fwd_engine_layer":2,
"input_path":"tf_files",
"switch_id_multiplier":cisco_router.SWITCH_ID_MULTIPLIER,
"port_type_multiplier":cisco_router.PORT_TYPE_MULTIPLIER,
"out_port_type_const":cisco_router.OUTPUT_PORT_TYPE_CONST,
"remove_duplicates":True,
}
(ntf,ttf,name_to_id,id_to_name) = load_network(settings)
# create all-x packet as input headerspace.
all_x = wildcard_create_bit_repeat(ntf.length,0x3)
# uncomment to set some field
#set_header_field(cisco_router.HS_FORMAT(), all_x, "field", value, right_mask)
#set_header_field(cisco_router.HS_FORMAT(), all_x, "vlan", 92, 0)
test_pkt = headerspace(ntf.length)
test_pkt.add_hs(all_x)
#set some input/output ports
output_port_addition = cisco_router.PORT_TYPE_MULTIPLIER * \
cisco_router.OUTPUT_PORT_TYPE_CONST
#TODO: CHANGE THIS IF YOU WANT TO RUN IT FROM/TO DIFFERENT PORTS
src_port_id = name_to_id["ROUTER NAME"]["PORT NAME"]
dst_port_ids = [name_to_id["ROUTER NAME"]["PORT NAME"]+output_port_addition]
#start reachability test and print results
st = time()
paths = find_reachability(ntf, ttf, src_port_id, dst_port_ids, test_pkt)
en = time()
print_paths(paths, id_to_name)
print "Found ",len(paths)," paths in ",en-st," seconds."
|
gpl-2.0
|
keiranFTW/android_kernel_sony_msm8660
|
arch/ia64/scripts/unwcheck.py
|
13143
|
1714
|
#!/usr/bin/python
#
# Usage: unwcheck.py FILE
#
# This script checks the unwind info of each function in file FILE
# and verifies that the sum of the region-lengths matches the total
# length of the function.
#
# Based on a shell/awk script originally written by Harish Patil,
# which was converted to Perl by Matthew Chapman, which was converted
# to Python by David Mosberger.
#
import os
import re
import sys
if len(sys.argv) != 2:
print "Usage: %s FILE" % sys.argv[0]
sys.exit(2)
readelf = os.getenv("READELF", "readelf")
start_pattern = re.compile("<([^>]*)>: \[0x([0-9a-f]+)-0x([0-9a-f]+)\]")
rlen_pattern = re.compile(".*rlen=([0-9]+)")
def check_func (func, slots, rlen_sum):
if slots != rlen_sum:
global num_errors
num_errors += 1
if not func: func = "[%#x-%#x]" % (start, end)
print "ERROR: %s: %lu slots, total region length = %lu" % (func, slots, rlen_sum)
return
num_funcs = 0
num_errors = 0
func = False
slots = 0
rlen_sum = 0
for line in os.popen("%s -u %s" % (readelf, sys.argv[1])):
m = start_pattern.match(line)
if m:
check_func(func, slots, rlen_sum)
func = m.group(1)
start = long(m.group(2), 16)
end = long(m.group(3), 16)
slots = 3 * (end - start) / 16
rlen_sum = 0L
num_funcs += 1
else:
m = rlen_pattern.match(line)
if m:
rlen_sum += long(m.group(1))
check_func(func, slots, rlen_sum)
if num_errors == 0:
print "No errors detected in %u functions." % num_funcs
else:
if num_errors > 1:
err="errors"
else:
err="error"
print "%u %s detected in %u functions." % (num_errors, err, num_funcs)
sys.exit(1)
|
gpl-2.0
|
remh/dd-agent
|
transaction.py
|
22
|
7764
|
# stdlib
from datetime import datetime, timedelta
import logging
from operator import attrgetter
import sys
import time
# project
from checks.check_status import ForwarderStatus
from util import get_tornado_ioloop, plural
log = logging.getLogger(__name__)
FLUSH_LOGGING_PERIOD = 20
FLUSH_LOGGING_INITIAL = 5
class Transaction(object):
def __init__(self):
self._id = None
self._error_count = 0
self._next_flush = datetime.now()
self._size = None
def get_id(self):
return self._id
def set_id(self, new_id):
assert self._id is None
self._id = new_id
def inc_error_count(self):
self._error_count = self._error_count + 1
def get_error_count(self):
return self._error_count
def get_size(self):
if self._size is None:
self._size = sys.getsizeof(self)
return self._size
def get_next_flush(self):
return self._next_flush
def compute_next_flush(self,max_delay):
# Transactions are replayed, try to send them faster for newer transactions
# Send them every MAX_WAIT_FOR_REPLAY at most
td = timedelta(seconds=self._error_count * 20)
if td > max_delay:
td = max_delay
newdate = datetime.now() + td
self._next_flush = newdate.replace(microsecond=0)
def time_to_flush(self,now = datetime.now()):
return self._next_flush < now
def flush(self):
raise NotImplementedError("To be implemented in a subclass")
class TransactionManager(object):
"""Holds any transaction derived object list and make sure they
are all commited, without exceeding parameters (throttling, memory consumption) """
def __init__(self, max_wait_for_replay, max_queue_size, throttling_delay):
self._MAX_WAIT_FOR_REPLAY = max_wait_for_replay
self._MAX_QUEUE_SIZE = max_queue_size
self._THROTTLING_DELAY = throttling_delay
self._flush_without_ioloop = False # useful for tests
self._transactions = [] # List of all non commited transactions
self._total_count = 0 # Maintain size/count not to recompute it everytime
self._total_size = 0
self._flush_count = 0
self._transactions_received = 0
self._transactions_flushed = 0
# Global counter to assign a number to each transaction: we may have an issue
# if this overlaps
self._counter = 0
self._trs_to_flush = None # Current transactions being flushed
self._last_flush = datetime.now() # Last flush (for throttling)
# Track an initial status message.
ForwarderStatus().persist()
def get_transactions(self):
return self._transactions
def print_queue_stats(self):
log.debug("Queue size: at %s, %s transaction(s), %s KB" %
(time.time(), self._total_count, (self._total_size/1024)))
def get_tr_id(self):
self._counter = self._counter + 1
return self._counter
def append(self,tr):
# Give the transaction an id
tr.set_id(self.get_tr_id())
# Check the size
tr_size = tr.get_size()
log.debug("New transaction to add, total size of queue would be: %s KB" %
((self._total_size + tr_size) / 1024))
if (self._total_size + tr_size) > self._MAX_QUEUE_SIZE:
log.warn("Queue is too big, removing old transactions...")
new_trs = sorted(self._transactions,key=attrgetter('_next_flush'), reverse = True)
for tr2 in new_trs:
if (self._total_size + tr_size) > self._MAX_QUEUE_SIZE:
self._transactions.remove(tr2)
self._total_count = self._total_count - 1
self._total_size = self._total_size - tr2.get_size()
log.warn("Removed transaction %s from queue" % tr2.get_id())
# Done
self._transactions.append(tr)
self._total_count += 1
self._transactions_received += 1
self._total_size = self._total_size + tr_size
log.debug("Transaction %s added" % (tr.get_id()))
self.print_queue_stats()
def flush(self):
if self._trs_to_flush is not None:
log.debug("A flush is already in progress, not doing anything")
return
to_flush = []
# Do we have something to do ?
now = datetime.now()
for tr in self._transactions:
if tr.time_to_flush(now):
to_flush.append(tr)
count = len(to_flush)
should_log = self._flush_count + 1 <= FLUSH_LOGGING_INITIAL or (self._flush_count + 1) % FLUSH_LOGGING_PERIOD == 0
if count > 0:
if should_log:
log.info("Flushing %s transaction%s during flush #%s" % (count,plural(count), str(self._flush_count + 1)))
else:
log.debug("Flushing %s transaction%s during flush #%s" % (count,plural(count), str(self._flush_count + 1)))
self._trs_to_flush = to_flush
self.flush_next()
else:
if should_log:
log.info("No transaction to flush during flush #%s" % str(self._flush_count + 1))
else:
log.debug("No transaction to flush during flush #%s" % str(self._flush_count + 1))
if self._flush_count + 1 == FLUSH_LOGGING_INITIAL:
log.info("First flushes done, next flushes will be logged every %s flushes." % FLUSH_LOGGING_PERIOD)
self._flush_count += 1
ForwarderStatus(
queue_length=self._total_count,
queue_size=self._total_size,
flush_count=self._flush_count,
transactions_received=self._transactions_received,
transactions_flushed=self._transactions_flushed).persist()
def flush_next(self):
if len(self._trs_to_flush) > 0:
td = self._last_flush + self._THROTTLING_DELAY - datetime.now()
# Python 2.7 has this built in, python < 2.7 don't...
if hasattr(td,'total_seconds'):
delay = td.total_seconds()
else:
delay = (td.microseconds + (td.seconds + td.days * 24 * 3600) * 10**6) / 10.0**6
if delay <= 0:
tr = self._trs_to_flush.pop()
self._last_flush = datetime.now()
log.debug("Flushing transaction %d" % tr.get_id())
try:
tr.flush()
except Exception,e :
log.exception(e)
self.tr_error(tr)
self.flush_next()
else:
# Wait a little bit more
tornado_ioloop = get_tornado_ioloop()
if tornado_ioloop._running:
tornado_ioloop.add_timeout(time.time() + delay,
lambda: self.flush_next())
elif self._flush_without_ioloop:
# Tornado is no started (ie, unittests), do it manually: BLOCKING
time.sleep(delay)
self.flush_next()
else:
self._trs_to_flush = None
def tr_error(self,tr):
tr.inc_error_count()
tr.compute_next_flush(self._MAX_WAIT_FOR_REPLAY)
log.warn("Transaction %d in error (%s error%s), it will be replayed after %s" %
(tr.get_id(), tr.get_error_count(), plural(tr.get_error_count()),
tr.get_next_flush()))
def tr_success(self,tr):
log.debug("Transaction %d completed" % tr.get_id())
self._transactions.remove(tr)
self._total_count -= 1
self._total_size -= tr.get_size()
self._transactions_flushed += 1
self.print_queue_stats()
|
bsd-3-clause
|
atosorigin/ansible
|
test/units/module_utils/common/parameters/test_check_arguments.py
|
11
|
2059
|
# -*- coding: utf-8 -*-
# Copyright (c) 2020 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
import pytest
from ansible.module_utils.common.parameters import get_unsupported_parameters
@pytest.fixture
def argument_spec():
return {
'state': {'aliases': ['status']},
'enabled': {},
}
def mock_handle_aliases(*args):
aliases = {}
legal_inputs = [
'_ansible_check_mode',
'_ansible_debug',
'_ansible_diff',
'_ansible_keep_remote_files',
'_ansible_module_name',
'_ansible_no_log',
'_ansible_remote_tmp',
'_ansible_selinux_special_fs',
'_ansible_shell_executable',
'_ansible_socket',
'_ansible_string_conversion_action',
'_ansible_syslog_facility',
'_ansible_tmpdir',
'_ansible_verbosity',
'_ansible_version',
'state',
'status',
'enabled',
]
return aliases, legal_inputs
@pytest.mark.parametrize(
('module_parameters', 'legal_inputs', 'expected'),
(
({'fish': 'food'}, ['state', 'enabled'], set(['fish'])),
({'state': 'enabled', 'path': '/var/lib/path'}, None, set(['path'])),
({'state': 'enabled', 'path': '/var/lib/path'}, ['state', 'path'], set()),
({'state': 'enabled', 'path': '/var/lib/path'}, ['state'], set(['path'])),
({}, None, set()),
({'state': 'enabled'}, None, set()),
({'status': 'enabled', 'enabled': True, 'path': '/var/lib/path'}, None, set(['path'])),
({'status': 'enabled', 'enabled': True}, None, set()),
)
)
def test_check_arguments(argument_spec, module_parameters, legal_inputs, expected, mocker):
mocker.patch('ansible.module_utils.common.parameters.handle_aliases', side_effect=mock_handle_aliases)
result = get_unsupported_parameters(argument_spec, module_parameters, legal_inputs)
assert result == expected
|
gpl-3.0
|
kabuku/blender-python
|
blenderlib/bpy.props.py
|
1
|
10232
|
'''Property Definitions (bpy.props)
This module defines properties to extend blenders internal data, the result of these functions is used to assign properties to classes registered with blender and can't be used directly.
'''
def BoolProperty(name="", description="", default=False, options={'ANIMATABLE'}, subtype='NONE', update=None, get=None, set=None):
'''Returns a new boolean property definition.
Arguments:
@name (string): Name used in the user interface.
@description (string): Text used for the tooltip and api documentation.
@options (set): Enumerator in ['HIDDEN', 'SKIP_SAVE', 'ANIMATABLE', 'LIBRARY_EDITABLE'].
@subtype (string): Enumerator in ['UNSIGNED', 'PERCENTAGE', 'FACTOR', 'ANGLE', 'TIME', 'DISTANCE', 'NONE'].
@update (function): function to be called when this value is modified,This function must take 2 values (self, context) and return None.
*Warning* there are no safety checks to avoid infinite recursion.
'''
pass
def BoolVectorProperty(name="", description="", default=(False, False, False), options={'ANIMATABLE'}, subtype='NONE', size=3, update=None, get=None, set=None):
'''Returns a new vector boolean property definition.
Arguments:
@name (string): Name used in the user interface.
@description (string): Text used for the tooltip and api documentation.
@default (sequence): sequence of booleans the length of *size*.
@options (set): Enumerator in ['HIDDEN', 'SKIP_SAVE', 'ANIMATABLE', 'LIBRARY_EDITABLE'].
@subtype (string): Enumerator in ['COLOR', 'TRANSLATION', 'DIRECTION', 'VELOCITY', 'ACCELERATION', 'MATRIX', 'EULER', 'QUATERNION', 'AXISANGLE', 'XYZ', 'COLOR_GAMMA', 'LAYER', 'NONE'].
@size (int): Vector dimensions in [1, and 32].
@update (function): function to be called when this value is modified,This function must take 2 values (self, context) and return None.
*Warning* there are no safety checks to avoid infinite recursion.
'''
pass
def CollectionProperty(items, type="", description="", options={'ANIMATABLE'}):
'''Returns a new collection property definition.
Arguments:
@type (class): A subclass of bpy.types.PropertyGroup.
@name (string): Name used in the user interface.
@description (string): Text used for the tooltip and api documentation.
@options (set): Enumerator in ['HIDDEN', 'SKIP_SAVE', 'ANIMATABLE', 'LIBRARY_EDITABLE'].
'''
pass
def EnumProperty(items, name="", description="", default="", options={'ANIMATABLE'}, update=None, get=None, set=None):
'''Returns a new enumerator property definition.
Arguments:
@name (string): Name used in the user interface.
@description (string): Text used for the tooltip and api documentation.
@default (string or set): The default value for this enum, A string when *ENUM_FLAG*is disabled otherwise a set which may only contain string identifiers
used in *items*.
@options (set): Enumerator in ['HIDDEN', 'SKIP_SAVE', 'ANIMATABLE', 'ENUM_FLAG', 'LIBRARY_EDITABLE'].
@items (sequence of string triples or a function): sequence of enum items formatted:[(identifier, name, description, icon, number), ...] where the identifier is used
for python access and other values are used for the interface.
Note the item is optional.
For dynamic values a callback can be passed which returns a list in
the same format as the static list.
This function must take 2 arguments (self, context)
WARNING: Do not use generators here (they will work the first time, but will lead to empty values
in some unload/reload scenarii)!
@update (function): function to be called when this value is modified,This function must take 2 values (self, context) and return None.
*Warning* there are no safety checks to avoid infinite recursion.
'''
pass
def FloatProperty(name="", description="", default=0.0, min=sys.float_info.min, max=sys.float_info.max, soft_min=sys.float_info.min, soft_max=sys.float_info.max, step=3, precision=2, options={'ANIMATABLE'}, subtype='NONE', unit='NONE', update=None, get=None, set=None):
'''Returns a new float property definition.
Arguments:
@name (string): Name used in the user interface.
@description (string): Text used for the tooltip and api documentation.
@options (set): Enumerator in ['HIDDEN', 'SKIP_SAVE', 'ANIMATABLE', 'LIBRARY_EDITABLE'].
@subtype (string): Enumerator in ['UNSIGNED', 'PERCENTAGE', 'FACTOR', 'ANGLE', 'TIME', 'DISTANCE', 'NONE'].
@unit (string): Enumerator in ['NONE', 'LENGTH', 'AREA', 'VOLUME', 'ROTATION', 'TIME', 'VELOCITY', 'ACCELERATION'].
@update (function): function to be called when this value is modified,This function must take 2 values (self, context) and return None.
*Warning* there are no safety checks to avoid infinite recursion.
'''
pass
def FloatVectorProperty(name="", description="", default=(0.0, 0.0, 0.0), min=sys.float_info.min, max=sys.float_info.max, soft_min=sys.float_info.min, soft_max=sys.float_info.max, step=3, precision=2, options={'ANIMATABLE'}, subtype='NONE', size=3, update=None, get=None, set=None):
'''Returns a new vector float property definition.
Arguments:
@name (string): Name used in the user interface.
@description (string): Text used for the tooltip and api documentation.
@default (sequence): sequence of floats the length of *size*.
@options (set): Enumerator in ['HIDDEN', 'SKIP_SAVE', 'ANIMATABLE', 'LIBRARY_EDITABLE'].
@subtype (string): Enumerator in ['COLOR', 'TRANSLATION', 'DIRECTION', 'VELOCITY', 'ACCELERATION', 'MATRIX', 'EULER', 'QUATERNION', 'AXISANGLE', 'XYZ', 'COLOR_GAMMA', 'LAYER', 'NONE'].
@unit (string): Enumerator in ['NONE', 'LENGTH', 'AREA', 'VOLUME', 'ROTATION', 'TIME', 'VELOCITY', 'ACCELERATION'].
@size (int): Vector dimensions in [1, and 32].
@update (function): function to be called when this value is modified,This function must take 2 values (self, context) and return None.
*Warning* there are no safety checks to avoid infinite recursion.
'''
pass
def IntProperty(name="", description="", default=0, min=-sys.maxint, max=sys.maxint, soft_min=-sys.maxint, soft_max=sys.maxint, step=1, options={'ANIMATABLE'}, subtype='NONE', update=None, get=None, set=None):
'''Returns a new int property definition.
Arguments:
@name (string): Name used in the user interface.
@description (string): Text used for the tooltip and api documentation.
@options (set): Enumerator in ['HIDDEN', 'SKIP_SAVE', 'ANIMATABLE', 'LIBRARY_EDITABLE'].
@subtype (string): Enumerator in ['UNSIGNED', 'PERCENTAGE', 'FACTOR', 'ANGLE', 'TIME', 'DISTANCE', 'NONE'].
@update (function): function to be called when this value is modified,This function must take 2 values (self, context) and return None.
*Warning* there are no safety checks to avoid infinite recursion.
'''
pass
def IntVectorProperty(name="", description="", default=(0, 0, 0), min=-sys.maxint, max=sys.maxint, soft_min=-sys.maxint, soft_max=sys.maxint, options={'ANIMATABLE'}, subtype='NONE', size=3, update=None, get=None, set=None):
'''Returns a new vector int property definition.
Arguments:
@name (string): Name used in the user interface.
@description (string): Text used for the tooltip and api documentation.
@default (sequence): sequence of ints the length of *size*.
@options (set): Enumerator in ['HIDDEN', 'SKIP_SAVE', 'ANIMATABLE', 'LIBRARY_EDITABLE'].
@subtype (string): Enumerator in ['COLOR', 'TRANSLATION', 'DIRECTION', 'VELOCITY', 'ACCELERATION', 'MATRIX', 'EULER', 'QUATERNION', 'AXISANGLE', 'XYZ', 'COLOR_GAMMA', 'LAYER', 'NONE'].
@size (int): Vector dimensions in [1, and 32].
@update (function): function to be called when this value is modified,This function must take 2 values (self, context) and return None.
*Warning* there are no safety checks to avoid infinite recursion.
'''
pass
def PointerProperty(type="", description="", options={'ANIMATABLE'}, update=None):
'''Returns a new pointer property definition.
Arguments:
@type (class): A subclass of bpy.types.PropertyGroup.
@name (string): Name used in the user interface.
@description (string): Text used for the tooltip and api documentation.
@options (set): Enumerator in ['HIDDEN', 'SKIP_SAVE', 'ANIMATABLE', 'LIBRARY_EDITABLE'].
@update (function): function to be called when this value is modified,This function must take 2 values (self, context) and return None.
*Warning* there are no safety checks to avoid infinite recursion.
'''
pass
def RemoveProperty(cls, attr):
'''Removes a dynamically defined property.
Arguments:
@cls (type): The class containing the property (must be a positional argument).
@attr (string): Property name (must be passed as a keyword).
Note: Typically this function doesn't need to be accessed directly.Instead use del cls.attr
'''
pass
def StringProperty(name="", description="", default="", maxlen=0, options={'ANIMATABLE'}, subtype='NONE', update=None, get=None, set=None):
'''Returns a new string property definition.
Arguments:
@name (string): Name used in the user interface.
@description (string): Text used for the tooltip and api documentation.
@options (set): Enumerator in ['HIDDEN', 'SKIP_SAVE', 'ANIMATABLE', 'LIBRARY_EDITABLE'].
@subtype (string): Enumerator in ['FILE_PATH', 'DIR_PATH', 'FILE_NAME', 'NONE'].
@update (function): function to be called when this value is modified,This function must take 2 values (self, context) and return None.
*Warning* there are no safety checks to avoid infinite recursion.
'''
pass
|
mit
|
kant/inasafe
|
safe/postprocessors/building_type_postprocessor.py
|
4
|
8986
|
# -*- coding: utf-8 -*-
"""**Postprocessors package.**
"""
__author__ = 'Marco Bernasocchi <[email protected]>'
__revision__ = '$Format:%H$'
__date__ = '10/10/2012'
__license__ = "GPL"
__copyright__ = 'Copyright 2012, Australia Indonesia Facility for '
__copyright__ += 'Disaster Reduction'
from collections import OrderedDict
import itertools
from safe.postprocessors.abstract_postprocessor import AbstractPostprocessor
from safe.utilities.i18n import tr
class BuildingTypePostprocessor(AbstractPostprocessor):
"""
Postprocessor that calculates building types related statistics.
see the _calculate_* methods to see indicator specific documentation
see :mod:`safe.defaults` for default values information
"""
def __init__(self):
"""
Constructor for postprocessor class,
It takes care of defining self.impact_total
"""
AbstractPostprocessor.__init__(self)
self.impact_total = None
self.impact_attrs = None
self.target_field = None
self.no_features = None
self.type_fields = None
self.valid_type_fields = None
self.fields_values = OrderedDict([
('Medical', ['Clinic/Doctor', 'Hospital']),
('Schools', ['School', 'University/College', ]),
('Places of worship', ['Place of Worship - Unitarian',
'Place of Worship - Islam',
'Place of Worship - Buddhist',
'Place of Worship']),
('Residential', ['Residential']),
('Government', ['Government']),
('Public Building', ['Public Building']),
('Fire Station', ['Fire Station']),
('Police Station', ['Police Station']),
('Supermarket', ['Supermarket']),
('Commercial', ['Commercial']),
('Industrial', ['Industrial']),
('Utility', ['Utility']),
('Sports Facility', ['Sports Facility']),
('Other', [])])
self.known_types = []
self._update_known_types()
def description(self):
"""Describe briefly what the post processor does.
:returns: The translated description.
:rtype: str
"""
return tr('Calculates building types related statistics.')
def setup(self, params):
"""concrete implementation it takes care of the needed parameters being
initialized
:param params: dict of parameters to pass to the post processor
"""
AbstractPostprocessor.setup(self, None)
if (self.impact_total is not None or
self.impact_attrs is not None or
self.target_field is not None or
self.valid_type_fields is not None or
self.type_fields is not None):
self._raise_error('clear needs to be called before setup')
self.impact_total = params['impact_total']
self.impact_attrs = params['impact_attrs']
self.target_field = params['target_field']
self.valid_type_fields = params['key_attribute']
# find which attribute field has to be used
self.type_fields = []
try:
for key in self.impact_attrs[0].iterkeys():
if key.lower() in self.valid_type_fields:
self.type_fields.append(key)
except IndexError:
pass
if len(self.type_fields) == 0:
self.type_fields = None
self.no_features = False
# there are no features in this postprocessing polygon
if self.impact_attrs == []:
self.no_features = True
def process(self):
"""Concrete implementation that performs all indicators calculations.
"""
AbstractPostprocessor.process(self)
if (self.impact_total is None or
self.impact_attrs is None or
self.target_field is None):
self._log_message('%s not all params have been correctly '
'initialized, setup needs to be called before '
'process. Skipping this postprocessor'
% self.__class__.__name__)
else:
self._calculate_total()
for title, field_values in self.fields_values.iteritems():
self._calculate_type(title, field_values)
def clear(self):
"""concrete implementation that ensures needed parameters are cleared.
"""
AbstractPostprocessor.clear(self)
self.impact_total = None
self.impact_attrs = None
self.target_field = None
self.type_fields = None
self.valid_type_fields = None
def _calculate_total(self):
"""Indicator that shows total affected buildings.
This indicator reports the total affected buildings in this region.
"""
name = tr('Total Affected')
result = 0
if self.type_fields is not None:
try:
for building in self.impact_attrs:
field_value = building[self.target_field]
if isinstance(field_value, basestring):
if field_value != 'Not Affected':
result += 1
else:
if field_value:
# See issue #2258. Since we are only working with
# one building at a time we should only add 1.
result += 1
result = int(round(result))
except (ValueError, KeyError):
result = self.NO_DATA_TEXT
else:
if self.no_features:
result = 0
else:
result = self.NO_DATA_TEXT
self._append_result(name, result)
def _calculate_type(self, title, fields_values):
"""Indicator that shows total population.
this indicator reports the building by type. the logic is:
- look for the fields that occurs with a name included in
self.valid_type_fields
- look in those fields for any of the values of self.fields_values
- if a record has one of the valid fields with one of the valid
fields_values then it is considered affected
"""
title = tr(title)
result = 0
if self.type_fields is not None:
try:
for building in self.impact_attrs:
for type_field in self.type_fields:
building_type = building[type_field]
if building_type in fields_values:
field_value = building[self.target_field]
if isinstance(field_value, basestring):
if field_value != 'Not Affected':
result += 1
else:
if field_value:
# See issue #2258. Since we are only
# working with one building at a time we
# should only add 1.
result += 1
break
elif self._is_unknown_type(building_type):
self._update_known_types(building_type)
result = int(round(result))
except (ValueError, KeyError):
result = self.NO_DATA_TEXT
else:
if self.no_features:
result = 0
else:
result = self.NO_DATA_TEXT
self._append_result(title, result)
def _is_unknown_type(self, building_type):
"""check if the given type is in any of the known_types dictionary
:param building_type: the name of the type
:type building_type: str
:returns: Flag indicating if the building_type is unknown
:rtype: boolean
"""
is_unknown = building_type not in self.known_types
return is_unknown
def _update_known_types(self, building_type=None):
"""
Adds a building_type (if passed) and updates the known_types list
this is called each time a new unknown type is found and is needed so
that self._is_unknown_type (which is called many times) to perform
only a simple 'in' check
:param building_type: the name of the type to add to the known types
:type building_type: str
"""
if type is not None:
self.fields_values['Other'].append(building_type)
# flatten self.fields_values.values()
# using http://stackoverflow.com/questions/5286541/#5286614
self.known_types = list(itertools.chain.from_iterable(
itertools.repeat(x, 1) if isinstance(x, str) else x for x in
self.fields_values.values()))
|
gpl-3.0
|
Eivindbergman/Skrapa
|
libs/chardet/big5prober.py
|
291
|
1757
|
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .mbcharsetprober import MultiByteCharSetProber
from .codingstatemachine import CodingStateMachine
from .chardistribution import Big5DistributionAnalysis
from .mbcssm import BIG5_SM_MODEL
class Big5Prober(MultiByteCharSetProber):
def __init__(self):
super(Big5Prober, self).__init__()
self.coding_sm = CodingStateMachine(BIG5_SM_MODEL)
self.distribution_analyzer = Big5DistributionAnalysis()
self.reset()
@property
def charset_name(self):
return "Big5"
@property
def language(self):
return "Chinese"
|
gpl-3.0
|
titienmiami/mmc.repository
|
plugin.video.tvalacarta/servers/bitshare.py
|
1
|
1991
|
# -*- coding: utf-8 -*-
#------------------------------------------------------------
# pelisalacarta - XBMC Plugin
# Conector para bitshare
# http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/
#------------------------------------------------------------
import urlparse,urllib2,urllib,re
import os
from core import scrapertools
from core import logger
from core import config
def test_video_exists( page_url ):
logger.info("[bitshare.py] test_video_exists(page_url='%s')" % page_url)
# Existe: http://bitshare.com/files/v1ehsvu3/Nikita.S02E15.HDTV.XviD-ASAP.avi.html
# No existe:
data = scrapertools.cache_page(page_url)
patron = '<h1>Descargando([^<]+)</h1>'
matches = re.compile(patronvideos,re.DOTALL).findall(data)
if len(matches)>0:
return True,""
else:
patron = '<h1>(Error - Archivo no disponible)</h1>'
matches = re.compile(patron,re.DOTALL).findall(data)
if len(matches)>0:
return False,"File not found"
return True,""
def get_video_url( page_url , premium = False , user="" , password="", video_password="" ):
logger.info("[bitshare.py] get_video_url(page_url='%s')" % page_url)
video_urls = []
return video_urls
# Encuentra vídeos del servidor en el texto pasado
def find_videos(data):
encontrados = set()
devuelve = []
# http://bitshare.com/files/tn74w9tm/Rio.2011.DVDRip.LATiNO.XviD.by.Glad31.avi.html
patronvideos = '(http://bitshare.com/files/[^/]+/.*?\.html)'
logger.info("[bitshare.py] find_videos #"+patronvideos+"#")
matches = re.compile(patronvideos,re.DOTALL).findall(data)
for match in matches:
titulo = "[bitshare]"
url = match
if url not in encontrados:
logger.info(" url="+url)
devuelve.append( [ titulo , url , 'bitshare' ] )
encontrados.add(url)
else:
logger.info(" url duplicada="+url)
return devuelve
|
gpl-2.0
|
QinerTech/QinerApps
|
openerp/tools/mail.py
|
1
|
31289
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from lxml import etree
import cgi
import logging
import lxml.html
import lxml.html.clean as clean
import random
import re
import socket
import threading
import time
from email.utils import getaddresses, formataddr
import openerp
from openerp.loglevels import ustr
from openerp.tools.translate import _
_logger = logging.getLogger(__name__)
#----------------------------------------------------------
# HTML Sanitizer
#----------------------------------------------------------
tags_to_kill = ["script", "head", "meta", "title", "link", "style", "frame", "iframe", "base", "object", "embed"]
tags_to_remove = ['html', 'body']
whitelist_classes = set(['WordSection1', 'MsoNormal', 'SkyDrivePlaceholder', 'oe_mail_expand', 'stopSpelling'])
# allow new semantic HTML5 tags
allowed_tags = clean.defs.tags | frozenset('article section header footer hgroup nav aside figure main'.split() + [etree.Comment])
safe_attrs = clean.defs.safe_attrs | frozenset(
['style',
'data-oe-model', 'data-oe-id', 'data-oe-field', 'data-oe-type', 'data-oe-expression', 'data-oe-translation-id', 'data-oe-nodeid',
'data-publish', 'data-id', 'data-res_id', 'data-member_id', 'data-view-id'
])
class _Cleaner(clean.Cleaner):
def allow_element(self, el):
if el.tag == 'object' and el.get('type') == "image/svg+xml":
return True
return super(_Cleaner, self).allow_element(el)
def html_sanitize(src, silent=True, strict=False, strip_style=False, strip_classes=False):
if not src:
return src
src = ustr(src, errors='replace')
logger = logging.getLogger(__name__ + '.html_sanitize')
# html encode email tags
part = re.compile(r"(<(([^a<>]|a[^<>\s])[^<>]*)@[^<>]+>)", re.IGNORECASE | re.DOTALL)
src = part.sub(lambda m: cgi.escape(m.group(1)), src)
# html encode mako tags <% ... %> to decode them later and keep them alive, otherwise they are stripped by the cleaner
src = src.replace('<%', cgi.escape('<%'))
src = src.replace('%>', cgi.escape('%>'))
kwargs = {
'page_structure': True,
'style': strip_style, # True = remove style tags/attrs
'forms': True, # remove form tags
'remove_unknown_tags': False,
'allow_tags': allowed_tags,
'comments': False,
'processing_instructions': False
}
if etree.LXML_VERSION >= (2, 3, 1):
# kill_tags attribute has been added in version 2.3.1
kwargs.update({
'kill_tags': tags_to_kill,
'remove_tags': tags_to_remove,
})
else:
kwargs['remove_tags'] = tags_to_kill + tags_to_remove
if strict:
if etree.LXML_VERSION >= (3, 1, 0):
# lxml < 3.1.0 does not allow to specify safe_attrs. We keep all attributes in order to keep "style"
if strip_classes:
current_safe_attrs = safe_attrs - frozenset(['class'])
else:
current_safe_attrs = safe_attrs
kwargs.update({
'safe_attrs_only': True,
'safe_attrs': current_safe_attrs,
})
else:
kwargs['safe_attrs_only'] = False # keep oe-data attributes + style
kwargs['frames'] = False, # do not remove frames (embbed video in CMS blogs)
try:
# some corner cases make the parser crash (such as <SCRIPT/XSS SRC=\"http://ha.ckers.org/xss.js\"></SCRIPT> in test_mail)
cleaner = _Cleaner(**kwargs)
cleaned = cleaner.clean_html(src)
# MAKO compatibility: $, { and } inside quotes are escaped, preventing correct mako execution
cleaned = cleaned.replace('%24', '$')
cleaned = cleaned.replace('%7B', '{')
cleaned = cleaned.replace('%7D', '}')
cleaned = cleaned.replace('%20', ' ')
cleaned = cleaned.replace('%5B', '[')
cleaned = cleaned.replace('%5D', ']')
cleaned = cleaned.replace('<%', '<%')
cleaned = cleaned.replace('%>', '%>')
except etree.ParserError, e:
if 'empty' in str(e):
return ""
if not silent:
raise
logger.warning('ParserError obtained when sanitizing %r', src, exc_info=True)
cleaned = '<p>ParserError when sanitizing</p>'
except Exception:
if not silent:
raise
logger.warning('unknown error obtained when sanitizing %r', src, exc_info=True)
cleaned = '<p>Unknown error when sanitizing</p>'
# this is ugly, but lxml/etree tostring want to put everything in a 'div' that breaks the editor -> remove that
if cleaned.startswith('<div>') and cleaned.endswith('</div>'):
cleaned = cleaned[5:-6]
return cleaned
#----------------------------------------------------------
# HTML Cleaner
#----------------------------------------------------------
def html_email_clean(html, remove=False, shorten=False, max_length=300, expand_options=None,
protect_sections=False):
""" html_email_clean: clean the html by doing the following steps:
- try to strip email quotes, by removing blockquotes or having some client-
specific heuristics
- try to strip signatures
- shorten the html to a maximum number of characters if requested
Some specific use case:
- MsOffice: ``div.style = border-top:solid;`` delimitates the beginning of
a quote; detecting by finding WordSection1 of MsoNormal
- Hotmail: ``hr.stopSpelling`` delimitates the beginning of a quote; detect
Hotmail by funding ``SkyDrivePlaceholder``
:param string html: sanitized html; tags like html or head should not
be present in the html string. This method therefore
takes as input html code coming from a sanitized source,
like fields.html.
:param boolean remove: remove the html code that is unwanted; otherwise it
is only flagged and tagged
:param boolean shorten: shorten the html; every excessing content will
be flagged as to remove
:param int max_length: if shortening, maximum number of characters before
shortening
:param dict expand_options: options for the read more link when shortening
the content.The used keys are the following:
- oe_expand_container_tag: class applied to the
container of the whole read more link
- oe_expand_container_class: class applied to the
link container (default: oe_mail_expand)
- oe_expand_container_content: content of the
container (default: ...)
- oe_expand_separator_node: optional separator, like
adding ... <br /><br /> <a ...>read more</a> (default: void)
- oe_expand_a_href: href of the read more link itself
(default: #)
- oe_expand_a_class: class applied to the <a> containing
the link itself (default: oe_mail_expand)
- oe_expand_a_content: content of the <a> (default: read more)
The formatted read more link is the following:
<cont_tag class="oe_expand_container_class">
oe_expand_container_content
if expand_options.get('oe_expand_separator_node'):
<oe_expand_separator_node/>
<a href="oe_expand_a_href" class="oe_expand_a_class">
oe_expand_a_content
</a>
</span>
"""
def _replace_matching_regex(regex, source, replace=''):
""" Replace all matching expressions in source by replace """
if not source:
return source
dest = ''
idx = 0
for item in re.finditer(regex, source):
dest += source[idx:item.start()] + replace
idx = item.end()
dest += source[idx:]
return dest
def _create_node(tag, text, tail=None, attrs={}):
new_node = etree.Element(tag)
new_node.text = text
new_node.tail = tail
for key, val in attrs.iteritems():
new_node.set(key, val)
return new_node
def _insert_new_node(node, index, new_node_tag, new_node_text, new_node_tail=None, new_node_attrs={}):
new_node = _create_node(new_node_tag, new_node_text, new_node_tail, new_node_attrs)
node.insert(index, new_node)
return new_node
def _tag_matching_regex_in_text(regex, node, new_node_tag='span', new_node_attrs={}):
text = node.text or ''
if not re.search(regex, text):
return
cur_node = node
node.text = ''
idx, iteration = 0, 0
for item in re.finditer(regex, text):
if iteration == 0:
cur_node.text = text[idx:item.start()]
else:
_insert_new_node(node, (iteration - 1) * 2 + 1, new_node_tag, text[idx:item.start()])
new_node = _insert_new_node(node, iteration * 2, new_node_tag, text[item.start():item.end()], None, new_node_attrs)
cur_node = new_node
idx = item.end()
iteration += 1
new_node = _insert_new_node(node, -1, new_node_tag, text[idx:] + (cur_node.tail or ''), None, {})
def _truncate_node(node, position, simplify_whitespaces=True):
""" Truncate a node text at a given position. This algorithm will shorten
at the end of the word whose ending character exceeds position.
:param bool simplify_whitespaces: whether to try to count all successive
whitespaces as one character. This
option should not be True when trying
to keep 'pre' consistency.
"""
if node.text is None:
node.text = ''
truncate_idx = -1
if simplify_whitespaces:
cur_char_nbr = 0
word = None
node_words = node.text.strip(' \t\r\n').split()
for word in node_words:
cur_char_nbr += len(word)
if cur_char_nbr >= position:
break
if word:
truncate_idx = node.text.find(word) + len(word)
else:
truncate_idx = position
if truncate_idx == -1 or truncate_idx > len(node.text):
truncate_idx = len(node.text)
# compose new text bits
innertext = node.text[0:truncate_idx]
outertext = node.text[truncate_idx:]
node.text = innertext
# create <span> ... <a href="#">read more</a></span> node
read_more_node = _create_node(
expand_options.get('oe_expand_container_tag', 'span'),
expand_options.get('oe_expand_container_content', ' ... '),
None,
{'class': expand_options.get('oe_expand_container_class', 'oe_mail_expand')}
)
if expand_options.get('oe_expand_separator_node'):
read_more_separator_node = _create_node(
expand_options.get('oe_expand_separator_node'),
'',
None,
{}
)
read_more_node.append(read_more_separator_node)
read_more_link_node = _create_node(
'a',
expand_options.get('oe_expand_a_content', _('read more')),
None,
{
'href': expand_options.get('oe_expand_a_href', '#'),
'class': expand_options.get('oe_expand_a_class', 'oe_mail_expand'),
}
)
read_more_node.append(read_more_link_node)
# create outertext node
overtext_node = _create_node('span', outertext)
# tag node
overtext_node.set('in_overlength', '1')
# add newly created nodes in dom
node.append(read_more_node)
node.append(overtext_node)
if expand_options is None:
expand_options = {}
whitelist_classes_local = whitelist_classes.copy()
if expand_options.get('oe_expand_container_class'):
whitelist_classes_local.add(expand_options.get('oe_expand_container_class'))
if expand_options.get('oe_expand_a_class'):
whitelist_classes_local.add(expand_options.get('oe_expand_a_class'))
if not html or not isinstance(html, basestring):
return html
html = ustr(html)
# Pre processing
# ------------------------------------------------------------
# TDE TODO: --- MAIL ORIGINAL ---: '[\-]{4,}([^\-]*)[\-]{4,}'
# html: remove encoding attribute inside tags
doctype = re.compile(r'(<[^>]*\s)(encoding=(["\'][^"\']*?["\']|[^\s\n\r>]+)(\s[^>]*|/)?>)', re.IGNORECASE | re.DOTALL)
html = doctype.sub(r"", html)
# html: ClEditor seems to love using <div><br /><div> -> replace with <br />
br_div_tags = re.compile(r'(<div>\s*<br\s*\/>\s*<\/div>)', re.IGNORECASE)
inner_html = _replace_matching_regex(br_div_tags, html, '<br />')
# form a tree
root = lxml.html.fromstring(inner_html)
if not len(root) and root.text is None and root.tail is None:
inner_html = '<div>%s</div>' % inner_html
root = lxml.html.fromstring(inner_html)
quote_tags = re.compile(r'(\n(>)+[^\n\r]*)')
signature = re.compile(r'(^[-]{2,}[\s]?[\r\n]{1,2}[\s\S]+)', re.M)
for node in root.iter():
# remove all tails and replace them by a span element, because managing text and tails can be a pain in the ass
if node.tail:
tail_node = _create_node('span', node.tail)
node.tail = None
node.addnext(tail_node)
# form node and tag text-based quotes and signature
_tag_matching_regex_in_text(quote_tags, node, 'span', {'text_quote': '1'})
_tag_matching_regex_in_text(signature, node, 'span', {'text_signature': '1'})
# Processing
# ------------------------------------------------------------
# tree: tag nodes
# signature_begin = False # try dynamic signature recognition
quoted = False
quote_begin = False
overlength = False
replace_class = False
overlength_section_id = None
overlength_section_count = 0
cur_char_nbr = 0
for node in root.iter():
# comments do not need processing
# note: bug in node.get(value, default) for HtmlComments, default never returned
if node.tag == etree.Comment:
continue
# do not take into account multiple spaces that are displayed as max 1 space in html
node_text = ' '.join((node.text and node.text.strip(' \t\r\n') or '').split())
# remove unwanted classes from node
if node.get('class'):
sanitize_classes = []
for _class in node.get('class').split(' '):
if _class in whitelist_classes_local:
sanitize_classes.append(_class)
else:
sanitize_classes.append('cleaned_'+_class)
replace_class = True
node.set('class', ' '.join(sanitize_classes))
# root: try to tag the client used to write the html
if 'WordSection1' in node.get('class', '') or 'MsoNormal' in node.get('class', ''):
root.set('msoffice', '1')
if 'SkyDrivePlaceholder' in node.get('class', '') or 'SkyDrivePlaceholder' in node.get('id', ''):
root.set('hotmail', '1')
# protect sections by tagging section limits and blocks contained inside sections, using an increasing id to re-find them later
if node.tag == 'section':
overlength_section_count += 1
node.set('section_closure', str(overlength_section_count))
if node.getparent() is not None and (node.getparent().get('section_closure') or node.getparent().get('section_inner')):
node.set('section_inner', str(overlength_section_count))
# state of the parsing: flag quotes and tails to remove
if quote_begin:
node.set('in_quote', '1')
node.set('tail_remove', '1')
# state of the parsing: flag when being in over-length content, depending on section content if defined (only when having protect_sections)
if overlength:
if not overlength_section_id or int(node.get('section_inner', overlength_section_count + 1)) > overlength_section_count:
node.set('in_overlength', '1')
node.set('tail_remove', '1')
# find quote in msoffice / hotmail / blockquote / text quote and signatures
if root.get('msoffice') and node.tag == 'div' and 'border-top:solid' in node.get('style', ''):
quote_begin = True
node.set('in_quote', '1')
node.set('tail_remove', '1')
if root.get('hotmail') and node.tag == 'hr' and ('stopSpelling' in node.get('class', '') or 'stopSpelling' in node.get('id', '')):
quote_begin = True
node.set('in_quote', '1')
node.set('tail_remove', '1')
if node.tag == 'blockquote' or node.get('text_quote') or node.get('text_signature'):
# here no quote_begin because we want to be able to remove some quoted
# text without removing all the remaining context
quoted = True
node.set('in_quote', '1')
if node.getparent() is not None and node.getparent().get('in_quote'):
# inside a block of removed text but not in quote_begin (see above)
quoted = True
node.set('in_quote', '1')
# shorten:
# if protect section:
# 1/ find the first parent not being inside a section
# 2/ add the read more link
# else:
# 1/ truncate the text at the next available space
# 2/ create a 'read more' node, next to current node
# 3/ add the truncated text in a new node, next to 'read more' node
node_text = (node.text or '').strip().strip('\n').strip()
if shorten and not overlength and cur_char_nbr + len(node_text) > max_length:
node_to_truncate = node
while node_to_truncate.getparent() is not None:
if node_to_truncate.get('in_quote'):
node_to_truncate = node_to_truncate.getparent()
elif protect_sections and (node_to_truncate.getparent().get('section_inner') or node_to_truncate.getparent().get('section_closure')):
node_to_truncate = node_to_truncate.getparent()
overlength_section_id = node_to_truncate.get('section_closure')
else:
break
overlength = True
node_to_truncate.set('truncate', '1')
if node_to_truncate == node:
node_to_truncate.set('truncate_position', str(max_length - cur_char_nbr))
else:
node_to_truncate.set('truncate_position', str(len(node.text or '')))
cur_char_nbr += len(node_text)
# Tree modification
# ------------------------------------------------------------
for node in root.iter():
if node.get('truncate'):
_truncate_node(node, int(node.get('truncate_position', '0')), node.tag != 'pre')
# Post processing
# ------------------------------------------------------------
to_remove = []
for node in root.iter():
if node.get('in_quote') or node.get('in_overlength'):
# copy the node tail into parent text
if node.tail and not node.get('tail_remove'):
parent = node.getparent()
parent.tail = node.tail + (parent.tail or '')
to_remove.append(node)
if node.get('tail_remove'):
node.tail = ''
# clean node
for attribute_name in ['in_quote', 'tail_remove', 'in_overlength', 'msoffice', 'hotmail', 'truncate', 'truncate_position']:
node.attrib.pop(attribute_name, None)
for node in to_remove:
if remove:
node.getparent().remove(node)
else:
if not expand_options.get('oe_expand_a_class', 'oe_mail_expand') in node.get('class', ''): # trick: read more link should be displayed even if it's in overlength
node_class = node.get('class', '') + ' oe_mail_cleaned'
node.set('class', node_class)
if not overlength and not quote_begin and not quoted and not replace_class:
return html
# html: \n that were tail of elements have been encapsulated into <span> -> back to \n
html = etree.tostring(root, pretty_print=False, encoding='UTF-8')
linebreaks = re.compile(r'<span[^>]*>([\s]*[\r\n]+[\s]*)<\/span>', re.IGNORECASE | re.DOTALL)
html = _replace_matching_regex(linebreaks, html, '\n')
return ustr(html)
#----------------------------------------------------------
# HTML/Text management
#----------------------------------------------------------
def html_keep_url(text):
""" Transform the url into clickable link with <a/> tag """
idx = 0
final = ''
link_tags = re.compile(r"""(?<!["'])((ftp|http|https):\/\/(\w+:{0,1}\w*@)?([^\s<"']+)(:[0-9]+)?(\/|\/([^\s<"']))?)(?![^\s<"']*["']|[^\s<"']*</a>)""")
for item in re.finditer(link_tags, text):
final += text[idx:item.start()]
final += '<a href="%s" target="_blank">%s</a>' % (item.group(0), item.group(0))
idx = item.end()
final += text[idx:]
return final
def html2plaintext(html, body_id=None, encoding='utf-8'):
""" From an HTML text, convert the HTML to plain text.
If @param body_id is provided then this is the tag where the
body (not necessarily <body>) starts.
"""
## (c) Fry-IT, www.fry-it.com, 2007
## <[email protected]>
## download here: http://www.peterbe.com/plog/html2plaintext
html = ustr(html)
if not html:
return ''
tree = etree.fromstring(html, parser=etree.HTMLParser())
if body_id is not None:
source = tree.xpath('//*[@id=%s]' % (body_id,))
else:
source = tree.xpath('//body')
if len(source):
tree = source[0]
url_index = []
i = 0
for link in tree.findall('.//a'):
url = link.get('href')
if url:
i += 1
link.tag = 'span'
link.text = '%s [%s]' % (link.text, i)
url_index.append(url)
html = ustr(etree.tostring(tree, encoding=encoding))
# \r char is converted into , must remove it
html = html.replace(' ', '')
html = html.replace('<strong>', '*').replace('</strong>', '*')
html = html.replace('<b>', '*').replace('</b>', '*')
html = html.replace('<h3>', '*').replace('</h3>', '*')
html = html.replace('<h2>', '**').replace('</h2>', '**')
html = html.replace('<h1>', '**').replace('</h1>', '**')
html = html.replace('<em>', '/').replace('</em>', '/')
html = html.replace('<tr>', '\n')
html = html.replace('</p>', '\n')
html = re.sub('<br\s*/?>', '\n', html)
html = re.sub('<.*?>', ' ', html)
html = html.replace(' ' * 2, ' ')
html = html.replace('>', '>')
html = html.replace('<', '<')
html = html.replace('&', '&')
# strip all lines
html = '\n'.join([x.strip() for x in html.splitlines()])
html = html.replace('\n' * 2, '\n')
for i, url in enumerate(url_index):
if i == 0:
html += '\n\n'
html += ustr('[%s] %s\n') % (i + 1, url)
return html
def plaintext2html(text, container_tag=False):
""" Convert plaintext into html. Content of the text is escaped to manage
html entities, using cgi.escape().
- all \n,\r are replaced by <br />
- enclose content into <p>
- convert url into clickable link
- 2 or more consecutive <br /> are considered as paragraph breaks
:param string container_tag: container of the html; by default the
content is embedded into a <div>
"""
text = cgi.escape(ustr(text))
# 1. replace \n and \r
text = text.replace('\n', '<br/>')
text = text.replace('\r', '<br/>')
# 2. clickable links
text = html_keep_url(text)
# 3-4: form paragraphs
idx = 0
final = '<p>'
br_tags = re.compile(r'(([<]\s*[bB][rR]\s*\/?[>]\s*){2,})')
for item in re.finditer(br_tags, text):
final += text[idx:item.start()] + '</p><p>'
idx = item.end()
final += text[idx:] + '</p>'
# 5. container
if container_tag:
final = '<%s>%s</%s>' % (container_tag, final, container_tag)
return ustr(final)
def append_content_to_html(html, content, plaintext=True, preserve=False, container_tag=False):
""" Append extra content at the end of an HTML snippet, trying
to locate the end of the HTML document (</body>, </html>, or
EOF), and converting the provided content in html unless ``plaintext``
is False.
Content conversion can be done in two ways:
- wrapping it into a pre (preserve=True)
- use plaintext2html (preserve=False, using container_tag to wrap the
whole content)
A side-effect of this method is to coerce all HTML tags to
lowercase in ``html``, and strip enclosing <html> or <body> tags in
content if ``plaintext`` is False.
:param str html: html tagsoup (doesn't have to be XHTML)
:param str content: extra content to append
:param bool plaintext: whether content is plaintext and should
be wrapped in a <pre/> tag.
:param bool preserve: if content is plaintext, wrap it into a <pre>
instead of converting it into html
"""
html = ustr(html)
if plaintext and preserve:
content = u'\n<pre>%s</pre>\n' % ustr(content)
elif plaintext:
content = '\n%s\n' % plaintext2html(content, container_tag)
else:
content = re.sub(r'(?i)(</?(?:html|body|head|!\s*DOCTYPE)[^>]*>)', '', content)
content = u'\n%s\n' % ustr(content)
# Force all tags to lowercase
html = re.sub(r'(</?)\W*(\w+)([ >])',
lambda m: '%s%s%s' % (m.group(1), m.group(2).lower(), m.group(3)), html)
insert_location = html.find('</body>')
if insert_location == -1:
insert_location = html.find('</html>')
if insert_location == -1:
return '%s%s' % (html, content)
return '%s%s%s' % (html[:insert_location], content, html[insert_location:])
#----------------------------------------------------------
# Emails
#----------------------------------------------------------
# matches any email in a body of text
email_re = re.compile(r"""([a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,63})""", re.VERBOSE)
# matches a string containing only one email
single_email_re = re.compile(r"""^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,63}$""", re.VERBOSE)
res_re = re.compile(r"\[([0-9]+)\]", re.UNICODE)
command_re = re.compile("^Set-([a-z]+) *: *(.+)$", re.I + re.UNICODE)
# Updated in 7.0 to match the model name as well
# Typical form of references is <timestamp-openerp-record_id-model_name@domain>
# group(1) = the record ID ; group(2) = the model (if any) ; group(3) = the domain
reference_re = re.compile("<.*-open(?:object|erp)-(\\d+)(?:-([\w.]+))?[^>]*@([^>]*)>", re.UNICODE)
def generate_tracking_message_id(res_id):
"""Returns a string that can be used in the Message-ID RFC822 header field
Used to track the replies related to a given object thanks to the "In-Reply-To"
or "References" fields that Mail User Agents will set.
"""
try:
rnd = random.SystemRandom().random()
except NotImplementedError:
rnd = random.random()
rndstr = ("%.15f" % rnd)[2:]
return "<%.15f.%s-openerp-%s@%s>" % (time.time(), rndstr, res_id, socket.gethostname())
def email_send(email_from, email_to, subject, body, email_cc=None, email_bcc=None, reply_to=False,
attachments=None, message_id=None, references=None, openobject_id=False, debug=False, subtype='plain', headers=None,
smtp_server=None, smtp_port=None, ssl=False, smtp_user=None, smtp_password=None, cr=None, uid=None):
"""Low-level function for sending an email (deprecated).
:deprecate: since OpenERP 6.1, please use ir.mail_server.send_email() instead.
:param email_from: A string used to fill the `From` header, if falsy,
config['email_from'] is used instead. Also used for
the `Reply-To` header if `reply_to` is not provided
:param email_to: a sequence of addresses to send the mail to.
"""
# If not cr, get cr from current thread database
local_cr = None
if not cr:
db_name = getattr(threading.currentThread(), 'dbname', None)
if db_name:
local_cr = cr = openerp.registry(db_name).cursor()
else:
raise Exception("No database cursor found, please pass one explicitly")
# Send Email
try:
mail_server_pool = openerp.registry(cr.dbname)['ir.mail_server']
res = False
# Pack Message into MIME Object
email_msg = mail_server_pool.build_email(email_from, email_to, subject, body, email_cc, email_bcc, reply_to,
attachments, message_id, references, openobject_id, subtype, headers=headers)
res = mail_server_pool.send_email(cr, uid or 1, email_msg, mail_server_id=None,
smtp_server=smtp_server, smtp_port=smtp_port, smtp_user=smtp_user, smtp_password=smtp_password,
smtp_encryption=('ssl' if ssl else None), smtp_debug=debug)
except Exception:
_logger.exception("tools.email_send failed to deliver email")
return False
finally:
if local_cr:
cr.close()
return res
def email_split(text):
""" Return a list of the email addresses found in ``text`` """
if not text:
return []
return [addr[1] for addr in getaddresses([text])
# getaddresses() returns '' when email parsing fails, and
# sometimes returns emails without at least '@'. The '@'
# is strictly required in RFC2822's `addr-spec`.
if addr[1]
if '@' in addr[1]]
def email_split_and_format(text):
""" Return a list of email addresses found in ``text``, formatted using
formataddr. """
if not text:
return []
return [formataddr((addr[0], addr[1])) for addr in getaddresses([text])
# getaddresses() returns '' when email parsing fails, and
# sometimes returns emails without at least '@'. The '@'
# is strictly required in RFC2822's `addr-spec`.
if addr[1]
if '@' in addr[1]]
|
gpl-3.0
|
a-parhom/edx-platform
|
common/lib/capa/capa/capa_problem.py
|
10
|
50215
|
#
# File: capa/capa_problem.py
#
# Nomenclature:
#
# A capa Problem is a collection of text and capa Response questions.
# Each Response may have one or more Input entry fields.
# The capa problem may include a solution.
#
"""
Main module which shows problems (of "capa" type).
This is used by capa_module.
"""
import logging
import os.path
import re
from collections import OrderedDict
from copy import deepcopy
from datetime import datetime
from xml.sax.saxutils import unescape
from lxml import etree
from pytz import UTC
import capa.customrender as customrender
import capa.inputtypes as inputtypes
import capa.responsetypes as responsetypes
import capa.xqueue_interface as xqueue_interface
from capa.correctmap import CorrectMap
from capa.safe_exec import safe_exec
from capa.util import contextualize_text, convert_files_to_filenames
from openedx.core.djangolib.markup import HTML
from xmodule.stringify import stringify_children
# extra things displayed after "show answers" is pressed
solution_tags = ['solution']
# fully accessible capa input types
ACCESSIBLE_CAPA_INPUT_TYPES = [
'checkboxgroup',
'radiogroup',
'choicegroup',
'optioninput',
'textline',
'formulaequationinput',
'textbox',
]
# these get captured as student responses
response_properties = ["codeparam", "responseparam", "answer", "openendedparam"]
# special problem tags which should be turned into innocuous HTML
html_transforms = {
'problem': {'tag': 'div'},
'text': {'tag': 'span'},
'math': {'tag': 'span'},
}
# These should be removed from HTML output, including all subelements
html_problem_semantics = [
"codeparam",
"responseparam",
"answer",
"script",
"hintgroup",
"openendedparam",
"openendedrubric",
]
log = logging.getLogger(__name__)
#-----------------------------------------------------------------------------
# main class for this module
class LoncapaSystem(object):
"""
An encapsulation of resources needed from the outside.
These interfaces are collected here so that a caller of LoncapaProblem
can provide these resources however make sense for their environment, and
this code can remain independent.
Attributes:
i18n: an object implementing the `gettext.Translations` interface so
that we can use `.ugettext` to localize strings.
See :class:`ModuleSystem` for documentation of other attributes.
"""
def __init__( # pylint: disable=invalid-name
self,
ajax_url,
anonymous_student_id,
cache,
can_execute_unsafe_code,
get_python_lib_zip,
DEBUG, # pylint: disable=invalid-name
filestore,
i18n,
node_path,
render_template,
seed, # Why do we do this if we have self.seed?
STATIC_URL, # pylint: disable=invalid-name
xqueue,
matlab_api_key=None
):
self.ajax_url = ajax_url
self.anonymous_student_id = anonymous_student_id
self.cache = cache
self.can_execute_unsafe_code = can_execute_unsafe_code
self.get_python_lib_zip = get_python_lib_zip
self.DEBUG = DEBUG # pylint: disable=invalid-name
self.filestore = filestore
self.i18n = i18n
self.node_path = node_path
self.render_template = render_template
self.seed = seed # Why do we do this if we have self.seed?
self.STATIC_URL = STATIC_URL # pylint: disable=invalid-name
self.xqueue = xqueue
self.matlab_api_key = matlab_api_key
class LoncapaProblem(object):
"""
Main class for capa Problems.
"""
def __init__(self, problem_text, id, capa_system, capa_module, # pylint: disable=redefined-builtin
state=None, seed=None, minimal_init=False, extract_tree=True):
"""
Initializes capa Problem.
Arguments:
problem_text (string): xml defining the problem.
id (string): identifier for this problem, often a filename (no spaces).
capa_system (LoncapaSystem): LoncapaSystem instance which provides OS,
rendering, user context, and other resources.
capa_module: instance needed to access runtime/logging
state (dict): containing the following keys:
- `seed` (int) random number generator seed
- `student_answers` (dict) maps input id to the stored answer for that input
- 'has_saved_answers' (Boolean) True if the answer has been saved since last submit.
- `correct_map` (CorrectMap) a map of each input to their 'correctness'
- `done` (bool) indicates whether or not this problem is considered done
- `input_state` (dict) maps input_id to a dictionary that holds the state for that input
seed (int): random number generator seed.
minimal_init (bool): whether to skip pre-processing student answers
extract_tree (bool): whether to parse the problem XML and store the HTML
"""
## Initialize class variables from state
self.do_reset()
self.problem_id = id
self.capa_system = capa_system
self.capa_module = capa_module
state = state or {}
# Set seed according to the following priority:
# 1. Contained in problem's state
# 2. Passed into capa_problem via constructor
self.seed = state.get('seed', seed)
assert self.seed is not None, "Seed must be provided for LoncapaProblem."
self.student_answers = state.get('student_answers', {})
self.has_saved_answers = state.get('has_saved_answers', False)
if 'correct_map' in state:
self.correct_map.set_dict(state['correct_map'])
self.done = state.get('done', False)
self.input_state = state.get('input_state', {})
# Convert startouttext and endouttext to proper <text></text>
problem_text = re.sub(r"startouttext\s*/", "text", problem_text)
problem_text = re.sub(r"endouttext\s*/", "/text", problem_text)
self.problem_text = problem_text
# parse problem XML file into an element tree
self.tree = etree.XML(problem_text)
self.make_xml_compatible(self.tree)
# handle any <include file="foo"> tags
self._process_includes()
# construct script processor context (eg for customresponse problems)
if minimal_init:
self.context = {}
else:
self.context = self._extract_context(self.tree)
# Pre-parse the XML tree: modifies it to add ID's and perform some in-place
# transformations. This also creates the dict (self.responders) of Response
# instances for each question in the problem. The dict has keys = xml subtree of
# Response, values = Response instance
self.problem_data = self._preprocess_problem(self.tree, minimal_init)
if not minimal_init:
if not self.student_answers: # True when student_answers is an empty dict
self.set_initial_display()
# dictionary of InputType objects associated with this problem
# input_id string -> InputType object
self.inputs = {}
# Run response late_transforms last (see MultipleChoiceResponse)
# Sort the responses to be in *_1 *_2 ... order.
responses = self.responders.values()
responses = sorted(responses, key=lambda resp: int(resp.id[resp.id.rindex('_') + 1:]))
for response in responses:
if hasattr(response, 'late_transforms'):
response.late_transforms(self)
if extract_tree:
self.extracted_tree = self._extract_html(self.tree)
def make_xml_compatible(self, tree):
"""
Adjust tree xml in-place for compatibility before creating
a problem from it.
The idea here is to provide a central point for XML translation,
for example, supporting an old XML format. At present, there just two translations.
1. <additional_answer> compatibility translation:
old: <additional_answer>ANSWER</additional_answer>
convert to
new: <additional_answer answer="ANSWER">OPTIONAL-HINT</addional_answer>
2. <optioninput> compatibility translation:
optioninput works like this internally:
<optioninput options="('yellow','blue','green')" correct="blue" />
With extended hints there is a new <option> tag, like this
<option correct="True">blue <optionhint>sky color</optionhint> </option>
This translation takes in the new format and synthesizes the old option= attribute
so all downstream logic works unchanged with the new <option> tag format.
"""
additionals = tree.xpath('//stringresponse/additional_answer')
for additional in additionals:
answer = additional.get('answer')
text = additional.text
if not answer and text: # trigger of old->new conversion
additional.set('answer', text)
additional.text = ''
for optioninput in tree.xpath('//optioninput'):
correct_option = None
child_options = []
for option_element in optioninput.findall('./option'):
option_name = option_element.text.strip()
if option_element.get('correct').upper() == 'TRUE':
correct_option = option_name
child_options.append("'" + option_name + "'")
if len(child_options) > 0:
options_string = '(' + ','.join(child_options) + ')'
optioninput.attrib.update({'options': options_string})
if correct_option:
optioninput.attrib.update({'correct': correct_option})
def do_reset(self):
"""
Reset internal state to unfinished, with no answers
"""
self.student_answers = dict()
self.has_saved_answers = False
self.correct_map = CorrectMap()
self.done = False
def set_initial_display(self):
"""
Set the student's answers to the responders' initial displays, if specified.
"""
initial_answers = dict()
for responder in self.responders.values():
if hasattr(responder, 'get_initial_display'):
initial_answers.update(responder.get_initial_display())
self.student_answers = initial_answers
def __unicode__(self):
return u"LoncapaProblem ({0})".format(self.problem_id)
def get_state(self):
"""
Stored per-user session data neeeded to:
1) Recreate the problem
2) Populate any student answers.
"""
return {'seed': self.seed,
'student_answers': self.student_answers,
'has_saved_answers': self.has_saved_answers,
'correct_map': self.correct_map.get_dict(),
'input_state': self.input_state,
'done': self.done}
def get_max_score(self):
"""
Return the maximum score for this problem.
"""
maxscore = 0
for responder in self.responders.values():
maxscore += responder.get_max_score()
return maxscore
def calculate_score(self, correct_map=None):
"""
Compute score for this problem. The score is the number of points awarded.
Returns a dictionary {'score': integer, from 0 to get_max_score(),
'total': get_max_score()}.
Takes an optional correctness map for use in the rescore workflow.
"""
if correct_map is None:
correct_map = self.correct_map
correct = 0
for key in correct_map:
try:
correct += correct_map.get_npoints(key)
except Exception:
log.error('key=%s, correct_map = %s', key, correct_map)
raise
return {'score': correct, 'total': self.get_max_score()}
def update_score(self, score_msg, queuekey):
"""
Deliver grading response (e.g. from async code checking) to
the specific ResponseType that requested grading
Returns an updated CorrectMap
"""
cmap = CorrectMap()
cmap.update(self.correct_map)
for responder in self.responders.values():
if hasattr(responder, 'update_score'):
# Each LoncapaResponse will update its specific entries in cmap
# cmap is passed by reference
responder.update_score(score_msg, cmap, queuekey)
self.correct_map.set_dict(cmap.get_dict())
return cmap
def ungraded_response(self, xqueue_msg, queuekey):
"""
Handle any responses from the xqueue that do not contain grades
Will try to pass the queue message to all inputtypes that can handle ungraded responses
Does not return any value
"""
# check against each inputtype
for the_input in self.inputs.values():
# if the input type has an ungraded function, pass in the values
if hasattr(the_input, 'ungraded_response'):
the_input.ungraded_response(xqueue_msg, queuekey)
def is_queued(self):
"""
Returns True if any part of the problem has been submitted to an external queue
(e.g. for grading.)
"""
return any(self.correct_map.is_queued(answer_id) for answer_id in self.correct_map)
def get_recentmost_queuetime(self):
"""
Returns a DateTime object that represents the timestamp of the most recent
queueing request, or None if not queued
"""
if not self.is_queued():
return None
# Get a list of timestamps of all queueing requests, then convert it to a DateTime object
queuetime_strs = [
self.correct_map.get_queuetime_str(answer_id)
for answer_id in self.correct_map
if self.correct_map.is_queued(answer_id)
]
queuetimes = [
datetime.strptime(qt_str, xqueue_interface.dateformat).replace(tzinfo=UTC)
for qt_str in queuetime_strs
]
return max(queuetimes)
def grade_answers(self, answers):
"""
Grade student responses. Called by capa_module.submit_problem.
`answers` is a dict of all the entries from request.POST, but with the first part
of each key removed (the string before the first "_").
Thus, for example, input_ID123 -> ID123, and input_fromjs_ID123 -> fromjs_ID123
Calls the Response for each question in this problem, to do the actual grading.
"""
# if answers include File objects, convert them to filenames.
self.student_answers = convert_files_to_filenames(answers)
new_cmap = self.get_grade_from_current_answers(answers)
self.correct_map = new_cmap
return self.correct_map
def supports_rescoring(self):
"""
Checks that the current problem definition permits rescoring.
More precisely, it checks that there are no response types in
the current problem that are not fully supported (yet) for rescoring.
This includes responsetypes for which the student's answer
is not properly stored in state, i.e. file submissions. At present,
we have no way to know if an existing response was actually a real
answer or merely the filename of a file submitted as an answer.
It turns out that because rescoring is a background task, limiting
it to responsetypes that don't support file submissions also means
that the responsetypes are synchronous. This is convenient as it
permits rescoring to be complete when the rescoring call returns.
"""
return all('filesubmission' not in responder.allowed_inputfields for responder in self.responders.values())
def get_grade_from_current_answers(self, student_answers):
"""
Gets the grade for the currently-saved problem state, but does not save it
to the block.
For new student_answers being graded, `student_answers` is a dict of all the
entries from request.POST, but with the first part of each key removed
(the string before the first "_"). Thus, for example,
input_ID123 -> ID123, and input_fromjs_ID123 -> fromjs_ID123.
For rescoring, `student_answers` is None.
Calls the Response for each question in this problem, to do the actual grading.
"""
# old CorrectMap
oldcmap = self.correct_map
# start new with empty CorrectMap
newcmap = CorrectMap()
# Call each responsetype instance to do actual grading
for responder in self.responders.values():
# File objects are passed only if responsetype explicitly allows
# for file submissions. But we have no way of knowing if
# student_answers contains a proper answer or the filename of
# an earlier submission, so for now skip these entirely.
# TODO: figure out where to get file submissions when rescoring.
if 'filesubmission' in responder.allowed_inputfields and student_answers is None:
_ = self.capa_system.i18n.ugettext
raise Exception(_(u"Cannot rescore problems with possible file submissions"))
# use 'student_answers' only if it is provided, and if it might contain a file
# submission that would not exist in the persisted "student_answers".
if 'filesubmission' in responder.allowed_inputfields and student_answers is not None:
results = responder.evaluate_answers(student_answers, oldcmap)
else:
results = responder.evaluate_answers(self.student_answers, oldcmap)
newcmap.update(results)
return newcmap
def get_question_answers(self):
"""
Returns a dict of answer_ids to answer values. If we cannot generate
an answer (this sometimes happens in customresponses), that answer_id is
not included. Called by "show answers" button JSON request
(see capa_module)
"""
# dict of (id, correct_answer)
answer_map = dict()
for response in self.responders.keys():
results = self.responder_answers[response]
answer_map.update(results)
# include solutions from <solution>...</solution> stanzas
for entry in self.tree.xpath("//" + "|//".join(solution_tags)):
answer = etree.tostring(entry)
if answer:
answer_map[entry.get('id')] = contextualize_text(answer, self.context)
log.debug('answer_map = %s', answer_map)
return answer_map
def get_answer_ids(self):
"""
Return the IDs of all the responses -- these are the keys used for
the dicts returned by grade_answers and get_question_answers. (Though
get_question_answers may only return a subset of these.
"""
answer_ids = []
for response in self.responders.keys():
results = self.responder_answers[response]
answer_ids.append(results.keys())
return answer_ids
def find_correct_answer_text(self, answer_id):
"""
Returns the correct answer(s) for the provided answer_id as a single string.
Arguments::
answer_id (str): a string like "98e6a8e915904d5389821a94e48babcf_13_1"
Returns:
str: A string containing the answer or multiple answers separated by commas.
"""
xml_elements = self.tree.xpath('//*[@id="' + answer_id + '"]')
if not xml_elements:
return
xml_element = xml_elements[0]
answer_text = xml_element.xpath('@answer')
if answer_text:
return answer_id[0]
if xml_element.tag == 'optioninput':
return xml_element.xpath('@correct')[0]
return ', '.join(xml_element.xpath('*[@correct="true"]/text()'))
def find_question_label(self, answer_id):
"""
Obtain the most relevant question text for a particular answer.
E.g. in a problem like "How much is 2+2?" "Two"/"Three"/"More than three",
this function returns the "How much is 2+2?" text.
It uses, in order:
- the question prompt, if the question has one
- the <p> or <label> element which precedes the choices (skipping descriptive elements)
- a text like "Question 5" if no other name could be found
Arguments::
answer_id: a string like "98e6a8e915904d5389821a94e48babcf_13_1"
Returns:
a string with the question text
"""
_ = self.capa_system.i18n.ugettext
# Some questions define a prompt with this format: >>This is a prompt<<
prompt = self.problem_data[answer_id].get('label')
if prompt:
question_text = prompt.striptags()
else:
# If no prompt, then we must look for something resembling a question ourselves
#
# We have a structure like:
#
# <p />
# <optionresponse id="a0effb954cca4759994f1ac9e9434bf4_2">
# <optioninput id="a0effb954cca4759994f1ac9e9434bf4_3_1" />
# <optionresponse>
#
# Starting from answer (the optioninput in this example) we go up and backwards
xml_elems = self.tree.xpath('//*[@id="' + answer_id + '"]')
assert len(xml_elems) == 1
xml_elem = xml_elems[0].getparent()
# Get the element that probably contains the question text
questiontext_elem = xml_elem.getprevious()
# Go backwards looking for a <p> or <label>, but skip <description> because it doesn't
# contain the question text.
#
# E.g if we have this:
# <p /> <description /> <optionresponse /> <optionresponse />
#
# then from the first optionresponse we'll end with the <p>.
# If we start in the second optionresponse, we'll find another response in the way,
# stop early, and instead of a question we'll report "Question 2".
SKIP_ELEMS = ['description']
LABEL_ELEMS = ['p', 'label']
while questiontext_elem is not None and questiontext_elem.tag in SKIP_ELEMS:
questiontext_elem = questiontext_elem.getprevious()
if questiontext_elem is not None and questiontext_elem.tag in LABEL_ELEMS:
question_text = questiontext_elem.text
else:
# For instance 'd2e35c1d294b4ba0b3b1048615605d2a_2_1' contains 2,
# which is used in question number 1 (see example XML in comment above)
# There's no question 0 (question IDs start at 1, answer IDs at 2)
question_nr = int(answer_id.split('_')[-2]) - 1
question_text = _("Question {0}").format(question_nr)
return question_text
def find_answer_text(self, answer_id, current_answer):
"""
Process a raw answer text to make it more meaningful.
E.g. in a choice problem like "How much is 2+2?" "Two"/"Three"/"More than three",
this function will transform "choice_1" (which is the internal response given by
many capa methods) to the human version, e.g. "More than three".
If the answers are multiple (e.g. because they're from a multiple choice problem),
this will join them with a comma.
If passed a normal string which is already the answer, it doesn't change it.
TODO merge with response_a11y_data?
Arguments:
answer_id: a string like "98e6a8e915904d5389821a94e48babcf_13_1"
current_answer: a data structure as found in `LoncapaProblem.student_answers`
which represents the best response we have until now
Returns:
a string with the human version of the response
"""
if isinstance(current_answer, list):
# Multiple answers. This case happens e.g. in multiple choice problems
answer_text = ", ".join(
self.find_answer_text(answer_id, answer) for answer in current_answer
)
elif isinstance(current_answer, basestring) and current_answer.startswith('choice_'):
# Many problem (e.g. checkbox) report "choice_0" "choice_1" etc.
# Here we transform it
elems = self.tree.xpath('//*[@id="{answer_id}"]//*[@name="{choice_number}"]'.format(
answer_id=answer_id,
choice_number=current_answer
))
assert len(elems) == 1
choicegroup = elems[0].getparent()
input_cls = inputtypes.registry.get_class_for_tag(choicegroup.tag)
choices_map = dict(input_cls.extract_choices(choicegroup, self.capa_system.i18n, text_only=True))
answer_text = choices_map[current_answer]
elif isinstance(current_answer, basestring):
# Already a string with the answer
answer_text = current_answer
else:
raise NotImplementedError()
return answer_text
def do_targeted_feedback(self, tree):
"""
Implements targeted-feedback in-place on <multiplechoiceresponse> --
choice-level explanations shown to a student after submission.
Does nothing if there is no targeted-feedback attribute.
"""
_ = self.capa_system.i18n.ugettext
# Note that the modifications has been done, avoiding problems if called twice.
if hasattr(self, 'has_targeted'):
return
self.has_targeted = True # pylint: disable=attribute-defined-outside-init
for mult_choice_response in tree.xpath('//multiplechoiceresponse[@targeted-feedback]'):
show_explanation = mult_choice_response.get('targeted-feedback') == 'alwaysShowCorrectChoiceExplanation'
# Grab the first choicegroup (there should only be one within each <multiplechoiceresponse> tag)
choicegroup = mult_choice_response.xpath('./choicegroup[@type="MultipleChoice"]')[0]
choices_list = list(choicegroup.iter('choice'))
# Find the student answer key that matches our <choicegroup> id
student_answer = self.student_answers.get(choicegroup.get('id'))
expl_id_for_student_answer = None
# Keep track of the explanation-id that corresponds to the student's answer
# Also, keep track of the solution-id
solution_id = None
choice_correctness_for_student_answer = _('Incorrect')
for choice in choices_list:
if choice.get('name') == student_answer:
expl_id_for_student_answer = choice.get('explanation-id')
if choice.get('correct') == 'true':
choice_correctness_for_student_answer = _('Correct')
if choice.get('correct') == 'true':
solution_id = choice.get('explanation-id')
# Filter out targetedfeedback that doesn't correspond to the answer the student selected
# Note: following-sibling will grab all following siblings, so we just want the first in the list
targetedfeedbackset = mult_choice_response.xpath('./following-sibling::targetedfeedbackset')
if len(targetedfeedbackset) != 0:
targetedfeedbackset = targetedfeedbackset[0]
targetedfeedbacks = targetedfeedbackset.xpath('./targetedfeedback')
# find the legend by id in choicegroup.html for aria-describedby
problem_legend_id = str(choicegroup.get('id')) + '-legend'
for targetedfeedback in targetedfeedbacks:
screenreadertext = etree.Element("span")
targetedfeedback.insert(0, screenreadertext)
screenreadertext.set('class', 'sr')
screenreadertext.text = choice_correctness_for_student_answer
targetedfeedback.set('role', 'group')
targetedfeedback.set('aria-describedby', problem_legend_id)
# Don't show targeted feedback if the student hasn't answer the problem
# or if the target feedback doesn't match the student's (incorrect) answer
if not self.done or targetedfeedback.get('explanation-id') != expl_id_for_student_answer:
targetedfeedbackset.remove(targetedfeedback)
# Do not displace the solution under these circumstances
if not show_explanation or not self.done:
continue
# The next element should either be <solution> or <solutionset>
next_element = targetedfeedbackset.getnext()
parent_element = tree
solution_element = None
if next_element is not None and next_element.tag == 'solution':
solution_element = next_element
elif next_element is not None and next_element.tag == 'solutionset':
solutions = next_element.xpath('./solution')
for solution in solutions:
if solution.get('explanation-id') == solution_id:
parent_element = next_element
solution_element = solution
# If could not find the solution element, then skip the remaining steps below
if solution_element is None:
continue
# Change our correct-choice explanation from a "solution explanation" to within
# the set of targeted feedback, which means the explanation will render on the page
# without the student clicking "Show Answer" or seeing a checkmark next to the correct choice
parent_element.remove(solution_element)
# Add our solution instead to the targetedfeedbackset and change its tag name
solution_element.tag = 'targetedfeedback'
targetedfeedbackset.append(solution_element)
def get_html(self):
"""
Main method called externally to get the HTML to be rendered for this capa Problem.
"""
self.do_targeted_feedback(self.tree)
html = contextualize_text(etree.tostring(self._extract_html(self.tree)), self.context)
return html
def handle_input_ajax(self, data):
"""
InputTypes can support specialized AJAX calls. Find the correct input and pass along the correct data
Also, parse out the dispatch from the get so that it can be passed onto the input type nicely
"""
# pull out the id
input_id = data['input_id']
if self.inputs[input_id]:
dispatch = data['dispatch']
return self.inputs[input_id].handle_ajax(dispatch, data)
else:
log.warning("Could not find matching input for id: %s", input_id)
return {}
# ======= Private Methods Below ========
def _process_includes(self):
"""
Handle any <include file="foo"> tags by reading in the specified file and inserting it
into our XML tree. Fail gracefully if debugging.
"""
includes = self.tree.findall('.//include')
for inc in includes:
filename = inc.get('file').decode('utf-8')
if filename is not None:
try:
# open using LoncapaSystem OSFS filestore
ifp = self.capa_system.filestore.open(filename)
except Exception as err:
log.warning(
'Error %s in problem xml include: %s',
err,
etree.tostring(inc, pretty_print=True)
)
log.warning(
'Cannot find file %s in %s', filename, self.capa_system.filestore
)
# if debugging, don't fail - just log error
# TODO (vshnayder): need real error handling, display to users
if not self.capa_system.DEBUG:
raise
else:
continue
try:
# read in and convert to XML
incxml = etree.XML(ifp.read())
except Exception as err:
log.warning(
'Error %s in problem xml include: %s',
err,
etree.tostring(inc, pretty_print=True)
)
log.warning('Cannot parse XML in %s', (filename))
# if debugging, don't fail - just log error
# TODO (vshnayder): same as above
if not self.capa_system.DEBUG:
raise
else:
continue
# insert new XML into tree in place of include
parent = inc.getparent()
parent.insert(parent.index(inc), incxml)
parent.remove(inc)
log.debug('Included %s into %s', filename, self.problem_id)
def _extract_system_path(self, script):
"""
Extracts and normalizes additional paths for code execution.
For now, there's a default path of data/course/code; this may be removed
at some point.
script : ?? (TODO)
"""
DEFAULT_PATH = ['code']
# Separate paths by :, like the system path.
raw_path = script.get('system_path', '').split(":") + DEFAULT_PATH
# find additional comma-separated modules search path
path = []
for dir in raw_path:
if not dir:
continue
# path is an absolute path or a path relative to the data dir
dir = os.path.join(self.capa_system.filestore.root_path, dir)
# Check that we are within the filestore tree.
reldir = os.path.relpath(dir, self.capa_system.filestore.root_path)
if ".." in reldir:
log.warning("Ignoring Python directory outside of course: %r", dir)
continue
abs_dir = os.path.normpath(dir)
path.append(abs_dir)
return path
def _extract_context(self, tree):
"""
Extract content of <script>...</script> from the problem.xml file, and exec it in the
context of this problem. Provides ability to randomize problems, and also set
variables for problem answer checking.
Problem XML goes to Python execution context. Runs everything in script tags.
"""
context = {}
context['seed'] = self.seed
context['anonymous_student_id'] = self.capa_system.anonymous_student_id
all_code = ''
python_path = []
for script in tree.findall('.//script'):
stype = script.get('type')
if stype:
if 'javascript' in stype:
continue # skip javascript
if 'perl' in stype:
continue # skip perl
# TODO: evaluate only python
for d in self._extract_system_path(script):
if d not in python_path and os.path.exists(d):
python_path.append(d)
XMLESC = {"'": "'", """: '"'}
code = unescape(script.text, XMLESC)
all_code += code
extra_files = []
if all_code:
# An asset named python_lib.zip can be imported by Python code.
zip_lib = self.capa_system.get_python_lib_zip()
if zip_lib is not None:
extra_files.append(("python_lib.zip", zip_lib))
python_path.append("python_lib.zip")
try:
safe_exec(
all_code,
context,
random_seed=self.seed,
python_path=python_path,
extra_files=extra_files,
cache=self.capa_system.cache,
slug=self.problem_id,
unsafely=self.capa_system.can_execute_unsafe_code(),
)
except Exception as err:
log.exception("Error while execing script code: " + all_code)
msg = "Error while executing script code: %s" % str(err).replace('<', '<')
raise responsetypes.LoncapaProblemError(msg)
# Store code source in context, along with the Python path needed to run it correctly.
context['script_code'] = all_code
context['python_path'] = python_path
context['extra_files'] = extra_files or None
return context
def _extract_html(self, problemtree): # private
"""
Main (private) function which converts Problem XML tree to HTML.
Calls itself recursively.
Returns Element tree of XHTML representation of problemtree.
Calls render_html of Response instances to render responses into XHTML.
Used by get_html.
"""
if not isinstance(problemtree.tag, basestring):
# Comment and ProcessingInstruction nodes are not Elements,
# and we're ok leaving those behind.
# BTW: etree gives us no good way to distinguish these things
# other than to examine .tag to see if it's a string. :(
return
if (problemtree.tag == 'script' and problemtree.get('type')
and 'javascript' in problemtree.get('type')):
# leave javascript intact.
return deepcopy(problemtree)
if problemtree.tag in html_problem_semantics:
return
problemid = problemtree.get('id') # my ID
if problemtree.tag in inputtypes.registry.registered_tags():
# If this is an inputtype subtree, let it render itself.
response_data = self.problem_data[problemid]
status = 'unsubmitted'
msg = ''
hint = ''
hintmode = None
input_id = problemtree.get('id')
answervariable = None
if problemid in self.correct_map:
pid = input_id
# If we're withholding correctness, don't show adaptive hints either.
# Note that regular, "demand" hints will be shown, if the course author has added them to the problem.
if not self.capa_module.correctness_available():
status = 'submitted'
else:
# If the the problem has not been saved since the last submit set the status to the
# current correctness value and set the message as expected. Otherwise we do not want to
# display correctness because the answer may have changed since the problem was graded.
if not self.has_saved_answers:
status = self.correct_map.get_correctness(pid)
msg = self.correct_map.get_msg(pid)
hint = self.correct_map.get_hint(pid)
hintmode = self.correct_map.get_hintmode(pid)
answervariable = self.correct_map.get_property(pid, 'answervariable')
value = ''
if self.student_answers and problemid in self.student_answers:
value = self.student_answers[problemid]
if input_id not in self.input_state:
self.input_state[input_id] = {}
# do the rendering
state = {
'value': value,
'status': status,
'id': input_id,
'input_state': self.input_state[input_id],
'answervariable': answervariable,
'response_data': response_data,
'has_saved_answers': self.has_saved_answers,
'feedback': {
'message': msg,
'hint': hint,
'hintmode': hintmode,
}
}
input_type_cls = inputtypes.registry.get_class_for_tag(problemtree.tag)
# save the input type so that we can make ajax calls on it if we need to
self.inputs[input_id] = input_type_cls(self.capa_system, problemtree, state)
return self.inputs[input_id].get_html()
# let each Response render itself
if problemtree in self.responders:
overall_msg = self.correct_map.get_overall_message()
return self.responders[problemtree].render_html(
self._extract_html, response_msg=overall_msg
)
# let each custom renderer render itself:
if problemtree.tag in customrender.registry.registered_tags():
renderer_class = customrender.registry.get_class_for_tag(problemtree.tag)
renderer = renderer_class(self.capa_system, problemtree)
return renderer.get_html()
# otherwise, render children recursively, and copy over attributes
tree = etree.Element(problemtree.tag)
for item in problemtree:
item_xhtml = self._extract_html(item)
if item_xhtml is not None:
tree.append(item_xhtml)
if tree.tag in html_transforms:
tree.tag = html_transforms[problemtree.tag]['tag']
else:
# copy attributes over if not innocufying
for (key, value) in problemtree.items():
tree.set(key, value)
tree.text = problemtree.text
tree.tail = problemtree.tail
return tree
def _preprocess_problem(self, tree, minimal_init): # private
"""
Assign IDs to all the responses
Assign sub-IDs to all entries (textline, schematic, etc.)
Annoted correctness and value
In-place transformation
Also create capa Response instances for each responsetype and save as self.responders
Obtain all responder answers and save as self.responder_answers dict (key = response)
"""
response_id = 1
problem_data = {}
self.responders = {}
for response in tree.xpath('//' + "|//".join(responsetypes.registry.registered_tags())):
responsetype_id = self.problem_id + "_" + str(response_id)
# create and save ID for this response
response.set('id', responsetype_id)
response_id += 1
answer_id = 1
input_tags = inputtypes.registry.registered_tags()
inputfields = tree.xpath(
"|".join(['//' + response.tag + '[@id=$id]//' + x for x in input_tags]),
id=responsetype_id
)
# assign one answer_id for each input type
for entry in inputfields:
entry.attrib['response_id'] = str(response_id)
entry.attrib['answer_id'] = str(answer_id)
entry.attrib['id'] = "%s_%i_%i" % (self.problem_id, response_id, answer_id)
answer_id = answer_id + 1
self.response_a11y_data(response, inputfields, responsetype_id, problem_data)
# instantiate capa Response
responsetype_cls = responsetypes.registry.get_class_for_tag(response.tag)
responder = responsetype_cls(
response, inputfields, self.context, self.capa_system, self.capa_module, minimal_init
)
# save in list in self
self.responders[response] = responder
if not minimal_init:
# get responder answers (do this only once, since there may be a performance cost,
# eg with externalresponse)
self.responder_answers = {}
for response in self.responders.keys():
try:
self.responder_answers[response] = self.responders[response].get_answers()
except:
log.debug('responder %s failed to properly return get_answers()',
self.responders[response]) # FIXME
raise
# <solution>...</solution> may not be associated with any specific response; give
# IDs for those separately
# TODO: We should make the namespaces consistent and unique (e.g. %s_problem_%i).
solution_id = 1
for solution in tree.findall('.//solution'):
solution.attrib['id'] = "%s_solution_%i" % (self.problem_id, solution_id)
solution_id += 1
return problem_data
def response_a11y_data(self, response, inputfields, responsetype_id, problem_data):
"""
Construct data to be used for a11y.
Arguments:
response (object): xml response object
inputfields (list): list of inputfields in a responsetype
responsetype_id (str): responsetype id
problem_data (dict): dict to be filled with response data
"""
# if there are no inputtypes then don't do anything
if not inputfields:
return
element_to_be_deleted = None
label = ''
if len(inputfields) > 1:
response.set('multiple_inputtypes', 'true')
group_label_tag = response.find('label')
group_description_tags = response.findall('description')
group_label_tag_id = u'multiinput-group-label-{}'.format(responsetype_id)
group_label_tag_text = ''
if group_label_tag is not None:
group_label_tag.tag = 'p'
group_label_tag.set('id', group_label_tag_id)
group_label_tag.set('class', 'multi-inputs-group-label')
group_label_tag_text = stringify_children(group_label_tag)
response.set('multiinput-group-label-id', group_label_tag_id)
group_description_ids = []
for index, group_description_tag in enumerate(group_description_tags):
group_description_tag_id = u'multiinput-group-description-{}-{}'.format(responsetype_id, index)
group_description_tag.tag = 'p'
group_description_tag.set('id', group_description_tag_id)
group_description_tag.set('class', 'multi-inputs-group-description question-description')
group_description_ids.append(group_description_tag_id)
if group_description_ids:
response.set('multiinput-group_description_ids', ' '.join(group_description_ids))
for inputfield in inputfields:
problem_data[inputfield.get('id')] = {
'group_label': group_label_tag_text,
'label': inputfield.attrib.get('label', ''),
'descriptions': {}
}
else:
# Extract label value from <label> tag or label attribute from inside the responsetype
responsetype_label_tag = response.find('label')
if responsetype_label_tag is not None:
label = stringify_children(responsetype_label_tag)
# store <label> tag containing question text to delete
# it later otherwise question will be rendered twice
element_to_be_deleted = responsetype_label_tag
elif 'label' in inputfields[0].attrib:
# in this case we have old problems with label attribute and p tag having question in it
# we will pick the first sibling of responsetype if its a p tag and match the text with
# the label attribute text. if they are equal then we will use this text as question.
# Get first <p> tag before responsetype, this <p> may contains the question text.
p_tag = response.xpath('preceding-sibling::*[1][self::p]')
if p_tag and p_tag[0].text == inputfields[0].attrib['label']:
label = stringify_children(p_tag[0])
element_to_be_deleted = p_tag[0]
else:
# In this case the problems don't have tag or label attribute inside the responsetype
# so we will get the first preceding label tag w.r.t to this responsetype.
# This will take care of those multi-question problems that are not using --- in their markdown.
label_tag = response.xpath('preceding-sibling::*[1][self::label]')
if label_tag:
label = stringify_children(label_tag[0])
element_to_be_deleted = label_tag[0]
# delete label or p element only if inputtype is fully accessible
if inputfields[0].tag in ACCESSIBLE_CAPA_INPUT_TYPES and element_to_be_deleted is not None:
element_to_be_deleted.getparent().remove(element_to_be_deleted)
# Extract descriptions and set unique id on each description tag
description_tags = response.findall('description')
description_id = 1
descriptions = OrderedDict()
for description in description_tags:
descriptions[
"description_%s_%i" % (responsetype_id, description_id)
] = HTML(stringify_children(description))
response.remove(description)
description_id += 1
problem_data[inputfields[0].get('id')] = {
'label': HTML(label.strip()) if label else '',
'descriptions': descriptions
}
|
agpl-3.0
|
lumig242/Hue-Integration-with-CDAP
|
desktop/core/ext-py/Django-1.6.10/django/contrib/auth/tests/test_remote_user.py
|
47
|
8774
|
from datetime import datetime
from django.conf import settings
from django.contrib.auth import authenticate
from django.contrib.auth.backends import RemoteUserBackend
from django.contrib.auth.models import User
from django.contrib.auth.tests.utils import skipIfCustomUser
from django.test import TestCase
from django.utils import timezone
@skipIfCustomUser
class RemoteUserTest(TestCase):
urls = 'django.contrib.auth.tests.urls'
middleware = 'django.contrib.auth.middleware.RemoteUserMiddleware'
backend = 'django.contrib.auth.backends.RemoteUserBackend'
# Usernames to be passed in REMOTE_USER for the test_known_user test case.
known_user = 'knownuser'
known_user2 = 'knownuser2'
def setUp(self):
self.curr_middleware = settings.MIDDLEWARE_CLASSES
self.curr_auth = settings.AUTHENTICATION_BACKENDS
settings.MIDDLEWARE_CLASSES += (self.middleware,)
settings.AUTHENTICATION_BACKENDS += (self.backend,)
def test_no_remote_user(self):
"""
Tests requests where no remote user is specified and insures that no
users get created.
"""
num_users = User.objects.count()
response = self.client.get('/remote_user/')
self.assertTrue(response.context['user'].is_anonymous())
self.assertEqual(User.objects.count(), num_users)
response = self.client.get('/remote_user/', REMOTE_USER=None)
self.assertTrue(response.context['user'].is_anonymous())
self.assertEqual(User.objects.count(), num_users)
response = self.client.get('/remote_user/', REMOTE_USER='')
self.assertTrue(response.context['user'].is_anonymous())
self.assertEqual(User.objects.count(), num_users)
def test_unknown_user(self):
"""
Tests the case where the username passed in the header does not exist
as a User.
"""
num_users = User.objects.count()
response = self.client.get('/remote_user/', REMOTE_USER='newuser')
self.assertEqual(response.context['user'].username, 'newuser')
self.assertEqual(User.objects.count(), num_users + 1)
User.objects.get(username='newuser')
# Another request with same user should not create any new users.
response = self.client.get('/remote_user/', REMOTE_USER='newuser')
self.assertEqual(User.objects.count(), num_users + 1)
def test_known_user(self):
"""
Tests the case where the username passed in the header is a valid User.
"""
User.objects.create(username='knownuser')
User.objects.create(username='knownuser2')
num_users = User.objects.count()
response = self.client.get('/remote_user/', REMOTE_USER=self.known_user)
self.assertEqual(response.context['user'].username, 'knownuser')
self.assertEqual(User.objects.count(), num_users)
# Test that a different user passed in the headers causes the new user
# to be logged in.
response = self.client.get('/remote_user/', REMOTE_USER=self.known_user2)
self.assertEqual(response.context['user'].username, 'knownuser2')
self.assertEqual(User.objects.count(), num_users)
def test_last_login(self):
"""
Tests that a user's last_login is set the first time they make a
request but not updated in subsequent requests with the same session.
"""
user = User.objects.create(username='knownuser')
# Set last_login to something so we can determine if it changes.
default_login = datetime(2000, 1, 1)
if settings.USE_TZ:
default_login = default_login.replace(tzinfo=timezone.utc)
user.last_login = default_login
user.save()
response = self.client.get('/remote_user/', REMOTE_USER=self.known_user)
self.assertNotEqual(default_login, response.context['user'].last_login)
user = User.objects.get(username='knownuser')
user.last_login = default_login
user.save()
response = self.client.get('/remote_user/', REMOTE_USER=self.known_user)
self.assertEqual(default_login, response.context['user'].last_login)
def test_header_disappears(self):
"""
Tests that a logged in user is logged out automatically when
the REMOTE_USER header disappears during the same browser session.
"""
User.objects.create(username='knownuser')
# Known user authenticates
response = self.client.get('/remote_user/', REMOTE_USER=self.known_user)
self.assertEqual(response.context['user'].username, 'knownuser')
# During the session, the REMOTE_USER header disappears. Should trigger logout.
response = self.client.get('/remote_user/')
self.assertEqual(response.context['user'].is_anonymous(), True)
# verify the remoteuser middleware will not remove a user
# authenticated via another backend
User.objects.create_user(username='modeluser', password='foo')
self.client.login(username='modeluser', password='foo')
authenticate(username='modeluser', password='foo')
response = self.client.get('/remote_user/')
self.assertEqual(response.context['user'].username, 'modeluser')
def test_user_switch_forces_new_login(self):
"""
Tests that if the username in the header changes between requests
that the original user is logged out
"""
User.objects.create(username='knownuser')
# Known user authenticates
response = self.client.get('/remote_user/',
**{'REMOTE_USER': self.known_user})
self.assertEqual(response.context['user'].username, 'knownuser')
# During the session, the REMOTE_USER changes to a different user.
response = self.client.get('/remote_user/',
**{'REMOTE_USER': "newnewuser"})
# Ensure that the current user is not the prior remote_user
# In backends that create a new user, username is "newnewuser"
# In backends that do not create new users, it is '' (anonymous user)
self.assertNotEqual(response.context['user'].username, 'knownuser')
def tearDown(self):
"""Restores settings to avoid breaking other tests."""
settings.MIDDLEWARE_CLASSES = self.curr_middleware
settings.AUTHENTICATION_BACKENDS = self.curr_auth
class RemoteUserNoCreateBackend(RemoteUserBackend):
"""Backend that doesn't create unknown users."""
create_unknown_user = False
@skipIfCustomUser
class RemoteUserNoCreateTest(RemoteUserTest):
"""
Contains the same tests as RemoteUserTest, but using a custom auth backend
class that doesn't create unknown users.
"""
backend =\
'django.contrib.auth.tests.test_remote_user.RemoteUserNoCreateBackend'
def test_unknown_user(self):
num_users = User.objects.count()
response = self.client.get('/remote_user/', REMOTE_USER='newuser')
self.assertTrue(response.context['user'].is_anonymous())
self.assertEqual(User.objects.count(), num_users)
class CustomRemoteUserBackend(RemoteUserBackend):
"""
Backend that overrides RemoteUserBackend methods.
"""
def clean_username(self, username):
"""
Grabs username before the @ character.
"""
return username.split('@')[0]
def configure_user(self, user):
"""
Sets user's email address.
"""
user.email = '[email protected]'
user.save()
return user
@skipIfCustomUser
class RemoteUserCustomTest(RemoteUserTest):
"""
Tests a custom RemoteUserBackend subclass that overrides the clean_username
and configure_user methods.
"""
backend =\
'django.contrib.auth.tests.test_remote_user.CustomRemoteUserBackend'
# REMOTE_USER strings with email addresses for the custom backend to
# clean.
known_user = '[email protected]'
known_user2 = '[email protected]'
def test_known_user(self):
"""
The strings passed in REMOTE_USER should be cleaned and the known users
should not have been configured with an email address.
"""
super(RemoteUserCustomTest, self).test_known_user()
self.assertEqual(User.objects.get(username='knownuser').email, '')
self.assertEqual(User.objects.get(username='knownuser2').email, '')
def test_unknown_user(self):
"""
The unknown user created should be configured with an email address.
"""
super(RemoteUserCustomTest, self).test_unknown_user()
newuser = User.objects.get(username='newuser')
self.assertEqual(newuser.email, '[email protected]')
|
apache-2.0
|
sergey-shandar/autorest
|
src/generator/AutoRest.Python.Tests/AcceptanceTests/dictionary_tests.py
|
8
|
11966
|
# --------------------------------------------------------------------------
#
# Copyright (c) Microsoft Corporation. All rights reserved.
#
# The MIT License (MIT)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the ""Software""), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED *AS IS*, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
# --------------------------------------------------------------------------
import unittest
import subprocess
import sys
import isodate
import os
from datetime import date, datetime, timedelta
from os.path import dirname, pardir, join, realpath
cwd = dirname(realpath(__file__))
log_level = int(os.environ.get('PythonLogLevel', 30))
tests = realpath(join(cwd, pardir, "Expected", "AcceptanceTests"))
sys.path.append(join(tests, "BodyDictionary"))
from msrest.exceptions import DeserializationError
from auto_rest_swagger_ba_tdictionary_service import AutoRestSwaggerBATdictionaryService
from auto_rest_swagger_ba_tdictionary_service.models import Widget, ErrorException
class DictionaryTests(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.client = AutoRestSwaggerBATdictionaryService(base_url="http://localhost:3000")
return super(DictionaryTests, cls).setUpClass()
def test_dictionary_primitive_types(self):
tfft = {"0":True, "1":False, "2":False, "3":True}
self.assertEqual(tfft, self.client.dictionary.get_boolean_tfft())
self.client.dictionary.put_boolean_tfft(tfft)
invalid_null_dict = {"0":True, "1":None, "2":False}
self.assertEqual(invalid_null_dict, self.client.dictionary.get_boolean_invalid_null())
with self.assertRaises(DeserializationError):
self.client.dictionary.get_boolean_invalid_string()
int_valid = {"0":1, "1":-1, "2":3, "3":300}
self.assertEqual(int_valid, self.client.dictionary.get_integer_valid())
self.client.dictionary.put_integer_valid(int_valid)
int_null_dict = {"0":1, "1":None, "2":0}
self.assertEqual(int_null_dict, self.client.dictionary.get_int_invalid_null())
with self.assertRaises(DeserializationError):
self.client.dictionary.get_int_invalid_string()
long_valid = {"0":1, "1":-1, "2":3, "3":300}
self.assertEqual(long_valid, self.client.dictionary.get_long_valid())
self.client.dictionary.put_long_valid(long_valid)
long_null_dict = {"0":1, "1":None, "2":0}
self.assertEqual(long_null_dict, self.client.dictionary.get_long_invalid_null())
with self.assertRaises(DeserializationError):
self.client.dictionary.get_long_invalid_string()
float_valid = {"0":0, "1":-0.01, "2":-1.2e20}
self.assertEqual(float_valid, self.client.dictionary.get_float_valid())
self.client.dictionary.put_float_valid(float_valid)
float_null_dict = {"0":0.0, "1":None, "2":-1.2e20}
self.assertEqual(float_null_dict, self.client.dictionary.get_float_invalid_null())
with self.assertRaises(DeserializationError):
self.client.dictionary.get_float_invalid_string()
double_valid = {"0":0, "1":-0.01, "2":-1.2e20}
self.assertEqual(double_valid, self.client.dictionary.get_double_valid())
self.client.dictionary.put_double_valid(double_valid)
double_null_dict = {"0":0.0, "1":None, "2":-1.2e20}
self.assertEqual(double_null_dict, self.client.dictionary.get_double_invalid_null())
with self.assertRaises(DeserializationError):
self.client.dictionary.get_double_invalid_string()
string_valid = {"0":"foo1", "1":"foo2", "2":"foo3"}
self.assertEqual(string_valid, self.client.dictionary.get_string_valid())
self.client.dictionary.put_string_valid(string_valid)
string_null_dict = {"0":"foo", "1":None, "2":"foo2"}
string_invalid_dict = {"0":"foo", "1":"123", "2":"foo2"}
self.assertEqual(string_null_dict, self.client.dictionary.get_string_with_null())
self.assertEqual(string_invalid_dict, self.client.dictionary.get_string_with_invalid())
date1 = isodate.parse_date("2000-12-01T00:00:00Z")
date2 = isodate.parse_date("1980-01-02T00:00:00Z")
date3 = isodate.parse_date("1492-10-12T00:00:00Z")
datetime1 = isodate.parse_datetime("2000-12-01T00:00:01Z")
datetime2 = isodate.parse_datetime("1980-01-02T00:11:35+01:00")
datetime3 = isodate.parse_datetime("1492-10-12T10:15:01-08:00")
rfc_datetime1 = isodate.parse_datetime("2000-12-01T00:00:01Z")
rfc_datetime2 = isodate.parse_datetime("1980-01-02T00:11:35Z")
rfc_datetime3 = isodate.parse_datetime("1492-10-12T10:15:01Z")
duration1 = timedelta(days=123, hours=22, minutes=14, seconds=12, milliseconds=11)
duration2 = timedelta(days=5, hours=1)
valid_date_dict = {"0":date1, "1":date2, "2":date3}
date_dictionary = self.client.dictionary.get_date_valid()
self.assertEqual(date_dictionary, valid_date_dict)
self.client.dictionary.put_date_valid(valid_date_dict)
date_null_dict = {"0":isodate.parse_date("2012-01-01"),
"1":None,
"2":isodate.parse_date("1776-07-04")}
self.assertEqual(date_null_dict, self.client.dictionary.get_date_invalid_null())
with self.assertRaises(DeserializationError):
self.client.dictionary.get_date_invalid_chars()
valid_datetime_dict = {"0":datetime1, "1":datetime2, "2":datetime3}
self.assertEqual(valid_datetime_dict, self.client.dictionary.get_date_time_valid())
self.client.dictionary.put_date_time_valid(valid_datetime_dict)
datetime_null_dict = {"0":isodate.parse_datetime("2000-12-01T00:00:01Z"), "1":None}
self.assertEqual(datetime_null_dict, self.client.dictionary.get_date_time_invalid_null())
with self.assertRaises(DeserializationError):
self.client.dictionary.get_date_time_invalid_chars()
valid_rfc_dict = {"0":rfc_datetime1, "1":rfc_datetime2, "2":rfc_datetime3}
self.assertEqual(valid_rfc_dict, self.client.dictionary.get_date_time_rfc1123_valid())
self.client.dictionary.put_date_time_rfc1123_valid(valid_rfc_dict)
valid_duration_dict = {"0":duration1, "1":duration2}
self.assertEqual(valid_duration_dict, self.client.dictionary.get_duration_valid())
self.client.dictionary.put_duration_valid(valid_duration_dict)
bytes1 = bytearray([0x0FF, 0x0FF, 0x0FF, 0x0FA])
bytes2 = bytearray([0x01, 0x02, 0x03])
bytes3 = bytearray([0x025, 0x029, 0x043])
bytes4 = bytearray([0x0AB, 0x0AC, 0x0AD])
bytes_valid = {"0":bytes1, "1":bytes2, "2":bytes3}
self.client.dictionary.put_byte_valid(bytes_valid)
bytes_result = self.client.dictionary.get_byte_valid()
self.assertEqual(bytes_valid, bytes_result)
bytes_null = {"0":bytes4, "1":None}
bytes_result = self.client.dictionary.get_byte_invalid_null()
self.assertEqual(bytes_null, bytes_result)
test_dict = {'0': 'a string that gets encoded with base64url'.encode(),
'1': 'test string'.encode(),
'2': 'Lorem ipsum'.encode()}
self.assertEqual(self.client.dictionary.get_base64_url(), test_dict)
def test_basic_dictionary_parsing(self):
self.assertEqual({}, self.client.dictionary.get_empty())
self.client.dictionary.put_empty({})
self.assertIsNone(self.client.dictionary.get_null())
with self.assertRaises(DeserializationError):
self.client.dictionary.get_invalid()
# {null:"val1"} is not standard JSON format (JSON require key as string. Should we skip this case
#self.assertEqual({"None":"val1"}, self.client.dictionary.get_null_key())
self.assertEqual({"key1":None}, self.client.dictionary.get_null_value())
self.assertEqual({"":"val1"}, self.client.dictionary.get_empty_string_key())
def test_dictionary_composed_types(self):
test_product1 = Widget(integer=1, string="2")
test_product2 = Widget(integer=3, string="4")
test_product3 = Widget(integer=5, string="6")
test_dict = {"0":test_product1, "1":test_product2, "2":test_product3}
self.assertIsNone(self.client.dictionary.get_complex_null())
self.assertEqual({}, self.client.dictionary.get_complex_empty())
self.client.dictionary.put_complex_valid(test_dict)
complex_result = self.client.dictionary.get_complex_valid()
self.assertEqual(test_dict, complex_result)
list_dict = {"0":["1","2","3"], "1":["4","5","6"], "2":["7","8","9"]}
self.client.dictionary.put_array_valid(list_dict)
array_result = self.client.dictionary.get_array_valid()
self.assertEqual(list_dict, array_result)
dict_dict = {"0":{"1":"one","2":"two","3":"three"},
"1":{"4":"four","5":"five","6":"six"},
"2":{"7":"seven","8":"eight","9":"nine"}}
self.client.dictionary.put_dictionary_valid(dict_dict)
dict_result = self.client.dictionary.get_dictionary_valid()
self.assertEqual(dict_dict, dict_result)
self.assertIsNone(self.client.dictionary.get_complex_null())
self.assertEqual({}, self.client.dictionary.get_complex_empty())
test_dict2 = {"0":test_product1, "1":None, "2":test_product3}
complex_result = self.client.dictionary.get_complex_item_null()
self.assertEqual(complex_result, test_dict2)
test_dict3 = {"0":test_product1, "1":Widget(), "2":test_product3}
complex_result = self.client.dictionary.get_complex_item_empty()
self.assertEqual(complex_result, test_dict3)
self.assertIsNone(self.client.dictionary.get_array_null())
self.assertEqual({}, self.client.dictionary.get_array_empty())
list_dict = {"0":["1","2","3"], "1":None, "2":["7","8","9"]}
array_result = self.client.dictionary.get_array_item_null()
self.assertEqual(list_dict, array_result)
list_dict = {"0":["1","2","3"], "1":[], "2":["7","8","9"]}
array_result = self.client.dictionary.get_array_item_empty()
self.assertEqual(list_dict, array_result)
self.assertIsNone(self.client.dictionary.get_dictionary_null())
self.assertEqual({}, self.client.dictionary.get_dictionary_empty())
dict_dict = {"0":{"1":"one","2":"two","3":"three"},
"1":None,
"2":{"7":"seven","8":"eight","9":"nine"}}
dict_result = self.client.dictionary.get_dictionary_item_null()
self.assertEqual(dict_dict, dict_result)
dict_dict = {"0":{"1":"one","2":"two","3":"three"},
"1":{},
"2":{"7":"seven","8":"eight","9":"nine"}}
dict_result = self.client.dictionary.get_dictionary_item_empty()
self.assertEqual(dict_dict, dict_result)
if __name__ == '__main__':
unittest.main()
|
mit
|
Nate-Devv/Tuxemon
|
tuxemon/core/components/eztext.py
|
3
|
12040
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Tuxemon
# Copyright (C) 2014, William Edwards <[email protected]>,
# Benjamin Bean <[email protected]>
#
# This file is part of Tuxemon.
#
# Tuxemon is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Tuxemon is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Tuxemon. If not, see <http://www.gnu.org/licenses/>.
#
# Contributor(s):
#
# Anonymous <http://www.pygame.org/project-EzText-920-.html>
#
#
# core.components.eztext Text input module
#
#
from pygame.locals import *
import pygame, string
class ConfigError(KeyError): pass
class Config:
""" A utility for configuration """
def __init__(self, options, *look_for):
assertions = []
for key in look_for:
if key[0] in options.keys(): exec('self.'+key[0]+' = options[\''+key[0]+'\']')
else: exec('self.'+key[0]+' = '+key[1])
assertions.append(key[0])
for key in options.keys():
if key not in assertions: raise ConfigError(key+' not expected as option')
class Input:
""" A text input for pygame apps """
def __init__(self, **options):
""" Options: x, y, font, color, restricted, maxlength, prompt """
self.options = Config(options, ['x', '0'], ['y', '0'], ['font', 'pygame.font.Font("resources/font/PressStart2P.ttf", 14)'],
['color', '(0,0,0)'], ['restricted', '\'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!"#$%&\\\'()*+,-./:;<=>?@[\]^_`{|}~\''],
['maxlength', '-1'], ['prompt', '\'\''])
self.x = self.options.x; self.y = self.options.y
self.font = self.options.font
self.color = self.options.color
self.restricted = self.options.restricted
self.maxlength = self.options.maxlength
self.prompt = self.options.prompt; self.value = ''
self.shifted = False
def set_pos(self, x, y):
""" Set the position to x, y """
self.x = x
self.y = y
def set_font(self, font):
""" Set the font for the input """
self.font = font
def draw(self, surface):
""" Draw the text input to a surface """
text = self.font.render(self.prompt+self.value, 1, self.color)
surface.blit(text, (self.x, self.y))
def update(self, events):
""" Update the input based on passed events """
for event in events:
if event.type == KEYUP:
if event.key == K_LSHIFT or event.key == K_RSHIFT: self.shifted = False
if event.type == KEYDOWN:
if event.key == K_BACKSPACE: self.value = self.value[:-1]
elif event.key == K_LSHIFT or event.key == K_RSHIFT: self.shifted = True
elif event.key == K_SPACE: self.value += ' '
if not self.shifted:
if event.key == K_a and 'a' in self.restricted: self.value += 'a'
elif event.key == K_b and 'b' in self.restricted: self.value += 'b'
elif event.key == K_c and 'c' in self.restricted: self.value += 'c'
elif event.key == K_d and 'd' in self.restricted: self.value += 'd'
elif event.key == K_e and 'e' in self.restricted: self.value += 'e'
elif event.key == K_f and 'f' in self.restricted: self.value += 'f'
elif event.key == K_g and 'g' in self.restricted: self.value += 'g'
elif event.key == K_h and 'h' in self.restricted: self.value += 'h'
elif event.key == K_i and 'i' in self.restricted: self.value += 'i'
elif event.key == K_j and 'j' in self.restricted: self.value += 'j'
elif event.key == K_k and 'k' in self.restricted: self.value += 'k'
elif event.key == K_l and 'l' in self.restricted: self.value += 'l'
elif event.key == K_m and 'm' in self.restricted: self.value += 'm'
elif event.key == K_n and 'n' in self.restricted: self.value += 'n'
elif event.key == K_o and 'o' in self.restricted: self.value += 'o'
elif event.key == K_p and 'p' in self.restricted: self.value += 'p'
elif event.key == K_q and 'q' in self.restricted: self.value += 'q'
elif event.key == K_r and 'r' in self.restricted: self.value += 'r'
elif event.key == K_s and 's' in self.restricted: self.value += 's'
elif event.key == K_t and 't' in self.restricted: self.value += 't'
elif event.key == K_u and 'u' in self.restricted: self.value += 'u'
elif event.key == K_v and 'v' in self.restricted: self.value += 'v'
elif event.key == K_w and 'w' in self.restricted: self.value += 'w'
elif event.key == K_x and 'x' in self.restricted: self.value += 'x'
elif event.key == K_y and 'y' in self.restricted: self.value += 'y'
elif event.key == K_z and 'z' in self.restricted: self.value += 'z'
elif event.key == K_0 and '0' in self.restricted: self.value += '0'
elif event.key == K_1 and '1' in self.restricted: self.value += '1'
elif event.key == K_2 and '2' in self.restricted: self.value += '2'
elif event.key == K_3 and '3' in self.restricted: self.value += '3'
elif event.key == K_4 and '4' in self.restricted: self.value += '4'
elif event.key == K_5 and '5' in self.restricted: self.value += '5'
elif event.key == K_6 and '6' in self.restricted: self.value += '6'
elif event.key == K_7 and '7' in self.restricted: self.value += '7'
elif event.key == K_8 and '8' in self.restricted: self.value += '8'
elif event.key == K_9 and '9' in self.restricted: self.value += '9'
elif event.key == K_BACKQUOTE and '`' in self.restricted: self.value += '`'
elif event.key == K_MINUS and '-' in self.restricted: self.value += '-'
elif event.key == K_EQUALS and '=' in self.restricted: self.value += '='
elif event.key == K_LEFTBRACKET and '[' in self.restricted: self.value += '['
elif event.key == K_RIGHTBRACKET and ']' in self.restricted: self.value += ']'
elif event.key == K_BACKSLASH and '\\' in self.restricted: self.value += '\\'
elif event.key == K_SEMICOLON and ';' in self.restricted: self.value += ';'
elif event.key == K_QUOTE and '\'' in self.restricted: self.value += '\''
elif event.key == K_COMMA and ',' in self.restricted: self.value += ','
elif event.key == K_PERIOD and '.' in self.restricted: self.value += '.'
elif event.key == K_SLASH and '/' in self.restricted: self.value += '/'
elif self.shifted:
if event.key == K_a and 'A' in self.restricted: self.value += 'A'
elif event.key == K_b and 'B' in self.restricted: self.value += 'B'
elif event.key == K_c and 'C' in self.restricted: self.value += 'C'
elif event.key == K_d and 'D' in self.restricted: self.value += 'D'
elif event.key == K_e and 'E' in self.restricted: self.value += 'E'
elif event.key == K_f and 'F' in self.restricted: self.value += 'F'
elif event.key == K_g and 'G' in self.restricted: self.value += 'G'
elif event.key == K_h and 'H' in self.restricted: self.value += 'H'
elif event.key == K_i and 'I' in self.restricted: self.value += 'I'
elif event.key == K_j and 'J' in self.restricted: self.value += 'J'
elif event.key == K_k and 'K' in self.restricted: self.value += 'K'
elif event.key == K_l and 'L' in self.restricted: self.value += 'L'
elif event.key == K_m and 'M' in self.restricted: self.value += 'M'
elif event.key == K_n and 'N' in self.restricted: self.value += 'N'
elif event.key == K_o and 'O' in self.restricted: self.value += 'O'
elif event.key == K_p and 'P' in self.restricted: self.value += 'P'
elif event.key == K_q and 'Q' in self.restricted: self.value += 'Q'
elif event.key == K_r and 'R' in self.restricted: self.value += 'R'
elif event.key == K_s and 'S' in self.restricted: self.value += 'S'
elif event.key == K_t and 'T' in self.restricted: self.value += 'T'
elif event.key == K_u and 'U' in self.restricted: self.value += 'U'
elif event.key == K_v and 'V' in self.restricted: self.value += 'V'
elif event.key == K_w and 'W' in self.restricted: self.value += 'W'
elif event.key == K_x and 'X' in self.restricted: self.value += 'X'
elif event.key == K_y and 'Y' in self.restricted: self.value += 'Y'
elif event.key == K_z and 'Z' in self.restricted: self.value += 'Z'
elif event.key == K_0 and ')' in self.restricted: self.value += ')'
elif event.key == K_1 and '!' in self.restricted: self.value += '!'
elif event.key == K_2 and '@' in self.restricted: self.value += '@'
elif event.key == K_3 and '#' in self.restricted: self.value += '#'
elif event.key == K_4 and '$' in self.restricted: self.value += '$'
elif event.key == K_5 and '%' in self.restricted: self.value += '%'
elif event.key == K_6 and '^' in self.restricted: self.value += '^'
elif event.key == K_7 and '&' in self.restricted: self.value += '&'
elif event.key == K_8 and '*' in self.restricted: self.value += '*'
elif event.key == K_9 and '(' in self.restricted: self.value += '('
elif event.key == K_BACKQUOTE and '~' in self.restricted: self.value += '~'
elif event.key == K_MINUS and '_' in self.restricted: self.value += '_'
elif event.key == K_EQUALS and '+' in self.restricted: self.value += '+'
elif event.key == K_LEFTBRACKET and '{' in self.restricted: self.value += '{'
elif event.key == K_RIGHTBRACKET and '}' in self.restricted: self.value += '}'
elif event.key == K_BACKSLASH and '|' in self.restricted: self.value += '|'
elif event.key == K_SEMICOLON and ':' in self.restricted: self.value += ':'
elif event.key == K_QUOTE and '"' in self.restricted: self.value += '"'
elif event.key == K_COMMA and '<' in self.restricted: self.value += '<'
elif event.key == K_PERIOD and '>' in self.restricted: self.value += '>'
elif event.key == K_SLASH and '?' in self.restricted: self.value += '?'
if len(self.value) > self.maxlength and self.maxlength >= 0: self.value = self.value[:-1]
|
gpl-3.0
|
clemkoa/scikit-learn
|
examples/covariance/plot_outlier_detection.py
|
15
|
5121
|
"""
==========================================
Outlier detection with several methods.
==========================================
When the amount of contamination is known, this example illustrates three
different ways of performing :ref:`outlier_detection`:
- based on a robust estimator of covariance, which is assuming that the
data are Gaussian distributed and performs better than the One-Class SVM
in that case.
- using the One-Class SVM and its ability to capture the shape of the
data set, hence performing better when the data is strongly
non-Gaussian, i.e. with two well-separated clusters;
- using the Isolation Forest algorithm, which is based on random forests and
hence more adapted to large-dimensional settings, even if it performs
quite well in the examples below.
- using the Local Outlier Factor to measure the local deviation of a given
data point with respect to its neighbors by comparing their local density.
The ground truth about inliers and outliers is given by the points colors
while the orange-filled area indicates which points are reported as inliers
by each method.
Here, we assume that we know the fraction of outliers in the datasets.
Thus rather than using the 'predict' method of the objects, we set the
threshold on the decision_function to separate out the corresponding
fraction.
"""
import numpy as np
from scipy import stats
import matplotlib.pyplot as plt
import matplotlib.font_manager
from sklearn import svm
from sklearn.covariance import EllipticEnvelope
from sklearn.ensemble import IsolationForest
from sklearn.neighbors import LocalOutlierFactor
print(__doc__)
rng = np.random.RandomState(42)
# Example settings
n_samples = 200
outliers_fraction = 0.25
clusters_separation = [0, 1, 2]
# define two outlier detection tools to be compared
classifiers = {
"One-Class SVM": svm.OneClassSVM(nu=0.95 * outliers_fraction + 0.05,
kernel="rbf", gamma=0.1),
"Robust covariance": EllipticEnvelope(contamination=outliers_fraction),
"Isolation Forest": IsolationForest(max_samples=n_samples,
contamination=outliers_fraction,
random_state=rng),
"Local Outlier Factor": LocalOutlierFactor(
n_neighbors=35,
contamination=outliers_fraction)}
# Compare given classifiers under given settings
xx, yy = np.meshgrid(np.linspace(-7, 7, 100), np.linspace(-7, 7, 100))
n_inliers = int((1. - outliers_fraction) * n_samples)
n_outliers = int(outliers_fraction * n_samples)
ground_truth = np.ones(n_samples, dtype=int)
ground_truth[-n_outliers:] = -1
# Fit the problem with varying cluster separation
for i, offset in enumerate(clusters_separation):
np.random.seed(42)
# Data generation
X1 = 0.3 * np.random.randn(n_inliers // 2, 2) - offset
X2 = 0.3 * np.random.randn(n_inliers // 2, 2) + offset
X = np.r_[X1, X2]
# Add outliers
X = np.r_[X, np.random.uniform(low=-6, high=6, size=(n_outliers, 2))]
# Fit the model
plt.figure(figsize=(9, 7))
for i, (clf_name, clf) in enumerate(classifiers.items()):
# fit the data and tag outliers
if clf_name == "Local Outlier Factor":
y_pred = clf.fit_predict(X)
scores_pred = clf.negative_outlier_factor_
else:
clf.fit(X)
scores_pred = clf.decision_function(X)
y_pred = clf.predict(X)
threshold = stats.scoreatpercentile(scores_pred,
100 * outliers_fraction)
n_errors = (y_pred != ground_truth).sum()
# plot the levels lines and the points
if clf_name == "Local Outlier Factor":
# decision_function is private for LOF
Z = clf._decision_function(np.c_[xx.ravel(), yy.ravel()])
else:
Z = clf.decision_function(np.c_[xx.ravel(), yy.ravel()])
Z = Z.reshape(xx.shape)
subplot = plt.subplot(2, 2, i + 1)
subplot.contourf(xx, yy, Z, levels=np.linspace(Z.min(), threshold, 7),
cmap=plt.cm.Blues_r)
a = subplot.contour(xx, yy, Z, levels=[threshold],
linewidths=2, colors='red')
subplot.contourf(xx, yy, Z, levels=[threshold, Z.max()],
colors='orange')
b = subplot.scatter(X[:-n_outliers, 0], X[:-n_outliers, 1], c='white',
s=20, edgecolor='k')
c = subplot.scatter(X[-n_outliers:, 0], X[-n_outliers:, 1], c='black',
s=20, edgecolor='k')
subplot.axis('tight')
subplot.legend(
[a.collections[0], b, c],
['learned decision function', 'true inliers', 'true outliers'],
prop=matplotlib.font_manager.FontProperties(size=10),
loc='lower right')
subplot.set_xlabel("%d. %s (errors: %d)" % (i + 1, clf_name, n_errors))
subplot.set_xlim((-7, 7))
subplot.set_ylim((-7, 7))
plt.subplots_adjust(0.04, 0.1, 0.96, 0.94, 0.1, 0.26)
plt.suptitle("Outlier detection")
plt.show()
|
bsd-3-clause
|
backtou/longlab
|
gnuradio-core/src/python/gnuradio/gr/qa_copy.py
|
18
|
1851
|
#!/usr/bin/env python
#
# Copyright 2009,2010 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr, gr_unittest
class test_copy(gr_unittest.TestCase):
def setUp (self):
self.tb = gr.top_block ()
def tearDown (self):
self.tb = None
def test_copy (self):
src_data = (1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
expected_result = (1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
src = gr.vector_source_b(src_data)
op = gr.copy(gr.sizeof_char)
dst = gr.vector_sink_b()
self.tb.connect(src, op, dst)
self.tb.run()
dst_data = dst.data()
self.assertEqual(expected_result, dst_data)
def test_copy_drop (self):
src_data = (1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
expected_result = ()
src = gr.vector_source_b(src_data)
op = gr.copy(gr.sizeof_char)
op.set_enabled(False)
dst = gr.vector_sink_b()
self.tb.connect(src, op, dst)
self.tb.run()
dst_data = dst.data()
self.assertEqual(expected_result, dst_data)
if __name__ == '__main__':
gr_unittest.run(test_copy, "test_copy.xml")
|
gpl-3.0
|
dimdung/boto
|
tests/unit/vpc/test_internetgateway.py
|
114
|
6080
|
from tests.unit import unittest
from tests.unit import AWSMockServiceTestCase
from boto.vpc import VPCConnection, InternetGateway
class TestDescribeInternetGateway(AWSMockServiceTestCase):
connection_class = VPCConnection
def default_body(self):
return b"""
<DescribeInternetGatewaysResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-01/">
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<internetGatewaySet>
<item>
<internetGatewayId>igw-eaad4883EXAMPLE</internetGatewayId>
<attachmentSet>
<item>
<vpcId>vpc-11ad4878</vpcId>
<state>available</state>
</item>
</attachmentSet>
<tagSet/>
</item>
</internetGatewaySet>
</DescribeInternetGatewaysResponse>
"""
def test_describe_internet_gateway(self):
self.set_http_response(status_code=200)
api_response = self.service_connection.get_all_internet_gateways(
'igw-eaad4883EXAMPLE', filters=[('attachment.state', ['available', 'pending'])])
self.assert_request_parameters({
'Action': 'DescribeInternetGateways',
'InternetGatewayId.1': 'igw-eaad4883EXAMPLE',
'Filter.1.Name': 'attachment.state',
'Filter.1.Value.1': 'available',
'Filter.1.Value.2': 'pending'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
self.assertEquals(len(api_response), 1)
self.assertIsInstance(api_response[0], InternetGateway)
self.assertEqual(api_response[0].id, 'igw-eaad4883EXAMPLE')
class TestCreateInternetGateway(AWSMockServiceTestCase):
connection_class = VPCConnection
def default_body(self):
return b"""
<CreateInternetGatewayResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-01/">
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<internetGateway>
<internetGatewayId>igw-eaad4883</internetGatewayId>
<attachmentSet/>
<tagSet/>
</internetGateway>
</CreateInternetGatewayResponse>
"""
def test_create_internet_gateway(self):
self.set_http_response(status_code=200)
api_response = self.service_connection.create_internet_gateway()
self.assert_request_parameters({
'Action': 'CreateInternetGateway'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
self.assertIsInstance(api_response, InternetGateway)
self.assertEqual(api_response.id, 'igw-eaad4883')
class TestDeleteInternetGateway(AWSMockServiceTestCase):
connection_class = VPCConnection
def default_body(self):
return b"""
<DeleteInternetGatewayResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-01/">
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<return>true</return>
</DeleteInternetGatewayResponse>
"""
def test_delete_internet_gateway(self):
self.set_http_response(status_code=200)
api_response = self.service_connection.delete_internet_gateway('igw-eaad4883')
self.assert_request_parameters({
'Action': 'DeleteInternetGateway',
'InternetGatewayId': 'igw-eaad4883'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
self.assertEquals(api_response, True)
class TestAttachInternetGateway(AWSMockServiceTestCase):
connection_class = VPCConnection
def default_body(self):
return b"""
<AttachInternetGatewayResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-01/">
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<return>true</return>
</AttachInternetGatewayResponse>
"""
def test_attach_internet_gateway(self):
self.set_http_response(status_code=200)
api_response = self.service_connection.attach_internet_gateway(
'igw-eaad4883', 'vpc-11ad4878')
self.assert_request_parameters({
'Action': 'AttachInternetGateway',
'InternetGatewayId': 'igw-eaad4883',
'VpcId': 'vpc-11ad4878'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
self.assertEquals(api_response, True)
class TestDetachInternetGateway(AWSMockServiceTestCase):
connection_class = VPCConnection
def default_body(self):
return b"""
<DetachInternetGatewayResponse xmlns="http://ec2.amazonaws.com/doc/2013-10-01/">
<requestId>59dbff89-35bd-4eac-99ed-be587EXAMPLE</requestId>
<return>true</return>
</DetachInternetGatewayResponse>
"""
def test_detach_internet_gateway(self):
self.set_http_response(status_code=200)
api_response = self.service_connection.detach_internet_gateway(
'igw-eaad4883', 'vpc-11ad4878')
self.assert_request_parameters({
'Action': 'DetachInternetGateway',
'InternetGatewayId': 'igw-eaad4883',
'VpcId': 'vpc-11ad4878'},
ignore_params_values=['AWSAccessKeyId', 'SignatureMethod',
'SignatureVersion', 'Timestamp',
'Version'])
self.assertEquals(api_response, True)
if __name__ == '__main__':
unittest.main()
|
mit
|
tanderegg/ansible-modules-core
|
cloud/amazon/ec2_facts.py
|
110
|
6444
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: ec2_facts
short_description: Gathers facts about remote hosts within ec2 (aws)
version_added: "1.0"
options:
validate_certs:
description:
- If C(no), SSL certificates will not be validated. This should only be used
on personally controlled sites using self-signed certificates.
required: false
default: 'yes'
choices: ['yes', 'no']
version_added: '1.5.1'
description:
- This module fetches data from the metadata servers in ec2 (aws) as per
http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-instance-metadata.html.
The module must be called from within the EC2 instance itself.
notes:
- Parameters to filter on ec2_facts may be added later.
author: "Silviu Dicu (@silviud) <[email protected]>"
'''
EXAMPLES = '''
# Conditional example
- name: Gather facts
action: ec2_facts
- name: Conditional
action: debug msg="This instance is a t1.micro"
when: ansible_ec2_instance_type == "t1.micro"
'''
import socket
import re
socket.setdefaulttimeout(5)
class Ec2Metadata(object):
ec2_metadata_uri = 'http://169.254.169.254/latest/meta-data/'
ec2_sshdata_uri = 'http://169.254.169.254/latest/meta-data/public-keys/0/openssh-key'
ec2_userdata_uri = 'http://169.254.169.254/latest/user-data/'
AWS_REGIONS = ('ap-northeast-1',
'ap-southeast-1',
'ap-southeast-2',
'eu-central-1',
'eu-west-1',
'sa-east-1',
'us-east-1',
'us-west-1',
'us-west-2',
'us-gov-west-1'
)
def __init__(self, module, ec2_metadata_uri=None, ec2_sshdata_uri=None, ec2_userdata_uri=None):
self.module = module
self.uri_meta = ec2_metadata_uri or self.ec2_metadata_uri
self.uri_user = ec2_userdata_uri or self.ec2_userdata_uri
self.uri_ssh = ec2_sshdata_uri or self.ec2_sshdata_uri
self._data = {}
self._prefix = 'ansible_ec2_%s'
def _fetch(self, url):
(response, info) = fetch_url(self.module, url, force=True)
if response:
data = response.read()
else:
data = None
return data
def _mangle_fields(self, fields, uri, filter_patterns=['public-keys-0']):
new_fields = {}
for key, value in fields.iteritems():
split_fields = key[len(uri):].split('/')
if len(split_fields) > 1 and split_fields[1]:
new_key = "-".join(split_fields)
new_fields[self._prefix % new_key] = value
else:
new_key = "".join(split_fields)
new_fields[self._prefix % new_key] = value
for pattern in filter_patterns:
for key in new_fields.keys():
match = re.search(pattern, key)
if match:
new_fields.pop(key)
return new_fields
def fetch(self, uri, recurse=True):
raw_subfields = self._fetch(uri)
if not raw_subfields:
return
subfields = raw_subfields.split('\n')
for field in subfields:
if field.endswith('/') and recurse:
self.fetch(uri + field)
if uri.endswith('/'):
new_uri = uri + field
else:
new_uri = uri + '/' + field
if new_uri not in self._data and not new_uri.endswith('/'):
content = self._fetch(new_uri)
if field == 'security-groups':
sg_fields = ",".join(content.split('\n'))
self._data['%s' % (new_uri)] = sg_fields
else:
self._data['%s' % (new_uri)] = content
def fix_invalid_varnames(self, data):
"""Change ':'' and '-' to '_' to ensure valid template variable names"""
for (key, value) in data.items():
if ':' in key or '-' in key:
newkey = key.replace(':','_').replace('-','_')
del data[key]
data[newkey] = value
def add_ec2_region(self, data):
"""Use the 'ansible_ec2_placement_availability_zone' key/value
pair to add 'ansible_ec2_placement_region' key/value pair with
the EC2 region name.
"""
# Only add a 'ansible_ec2_placement_region' key if the
# 'ansible_ec2_placement_availability_zone' exists.
zone = data.get('ansible_ec2_placement_availability_zone')
if zone is not None:
# Use the zone name as the region name unless the zone
# name starts with a known AWS region name.
region = zone
for r in self.AWS_REGIONS:
if zone.startswith(r):
region = r
break
data['ansible_ec2_placement_region'] = region
def run(self):
self.fetch(self.uri_meta) # populate _data
data = self._mangle_fields(self._data, self.uri_meta)
data[self._prefix % 'user-data'] = self._fetch(self.uri_user)
data[self._prefix % 'public-key'] = self._fetch(self.uri_ssh)
self.fix_invalid_varnames(data)
self.add_ec2_region(data)
return data
def main():
argument_spec = url_argument_spec()
module = AnsibleModule(
argument_spec = argument_spec,
supports_check_mode = True,
)
ec2_facts = Ec2Metadata(module).run()
ec2_facts_result = dict(changed=False, ansible_facts=ec2_facts)
module.exit_json(**ec2_facts_result)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.urls import *
main()
|
gpl-3.0
|
SantosDevelopers/sborganicos
|
venv/lib/python3.5/site-packages/django/contrib/gis/measure.py
|
100
|
12463
|
# Copyright (c) 2007, Robert Coup <[email protected]>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# 3. Neither the name of Distance nor the names of its contributors may be used
# to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
"""
Distance and Area objects to allow for sensible and convenient calculation
and conversions.
Authors: Robert Coup, Justin Bronn, Riccardo Di Virgilio
Inspired by GeoPy (https://github.com/geopy/geopy)
and Geoff Biggs' PhD work on dimensioned units for robotics.
"""
from decimal import Decimal
from functools import total_ordering
from django.utils import six
__all__ = ['A', 'Area', 'D', 'Distance']
NUMERIC_TYPES = six.integer_types + (float, Decimal)
AREA_PREFIX = "sq_"
def pretty_name(obj):
return obj.__name__ if obj.__class__ == type else obj.__class__.__name__
@total_ordering
class MeasureBase(object):
STANDARD_UNIT = None
ALIAS = {}
UNITS = {}
LALIAS = {}
def __init__(self, default_unit=None, **kwargs):
value, self._default_unit = self.default_units(kwargs)
setattr(self, self.STANDARD_UNIT, value)
if default_unit and isinstance(default_unit, six.string_types):
self._default_unit = default_unit
def _get_standard(self):
return getattr(self, self.STANDARD_UNIT)
def _set_standard(self, value):
setattr(self, self.STANDARD_UNIT, value)
standard = property(_get_standard, _set_standard)
def __getattr__(self, name):
if name in self.UNITS:
return self.standard / self.UNITS[name]
else:
raise AttributeError('Unknown unit type: %s' % name)
def __repr__(self):
return '%s(%s=%s)' % (pretty_name(self), self._default_unit, getattr(self, self._default_unit))
def __str__(self):
return '%s %s' % (getattr(self, self._default_unit), self._default_unit)
# **** Comparison methods ****
def __eq__(self, other):
if isinstance(other, self.__class__):
return self.standard == other.standard
else:
return NotImplemented
def __lt__(self, other):
if isinstance(other, self.__class__):
return self.standard < other.standard
else:
return NotImplemented
# **** Operators methods ****
def __add__(self, other):
if isinstance(other, self.__class__):
return self.__class__(
default_unit=self._default_unit,
**{self.STANDARD_UNIT: (self.standard + other.standard)}
)
else:
raise TypeError('%(class)s must be added with %(class)s' % {"class": pretty_name(self)})
def __iadd__(self, other):
if isinstance(other, self.__class__):
self.standard += other.standard
return self
else:
raise TypeError('%(class)s must be added with %(class)s' % {"class": pretty_name(self)})
def __sub__(self, other):
if isinstance(other, self.__class__):
return self.__class__(
default_unit=self._default_unit,
**{self.STANDARD_UNIT: (self.standard - other.standard)}
)
else:
raise TypeError('%(class)s must be subtracted from %(class)s' % {"class": pretty_name(self)})
def __isub__(self, other):
if isinstance(other, self.__class__):
self.standard -= other.standard
return self
else:
raise TypeError('%(class)s must be subtracted from %(class)s' % {"class": pretty_name(self)})
def __mul__(self, other):
if isinstance(other, NUMERIC_TYPES):
return self.__class__(
default_unit=self._default_unit,
**{self.STANDARD_UNIT: (self.standard * other)}
)
else:
raise TypeError('%(class)s must be multiplied with number' % {"class": pretty_name(self)})
def __imul__(self, other):
if isinstance(other, NUMERIC_TYPES):
self.standard *= float(other)
return self
else:
raise TypeError('%(class)s must be multiplied with number' % {"class": pretty_name(self)})
def __rmul__(self, other):
return self * other
def __truediv__(self, other):
if isinstance(other, self.__class__):
return self.standard / other.standard
if isinstance(other, NUMERIC_TYPES):
return self.__class__(
default_unit=self._default_unit,
**{self.STANDARD_UNIT: (self.standard / other)}
)
else:
raise TypeError('%(class)s must be divided with number or %(class)s' % {"class": pretty_name(self)})
def __div__(self, other): # Python 2 compatibility
return type(self).__truediv__(self, other)
def __itruediv__(self, other):
if isinstance(other, NUMERIC_TYPES):
self.standard /= float(other)
return self
else:
raise TypeError('%(class)s must be divided with number' % {"class": pretty_name(self)})
def __idiv__(self, other): # Python 2 compatibility
return type(self).__itruediv__(self, other)
def __bool__(self):
return bool(self.standard)
def __nonzero__(self): # Python 2 compatibility
return type(self).__bool__(self)
def default_units(self, kwargs):
"""
Return the unit value and the default units specified
from the given keyword arguments dictionary.
"""
val = 0.0
default_unit = self.STANDARD_UNIT
for unit, value in six.iteritems(kwargs):
if not isinstance(value, float):
value = float(value)
if unit in self.UNITS:
val += self.UNITS[unit] * value
default_unit = unit
elif unit in self.ALIAS:
u = self.ALIAS[unit]
val += self.UNITS[u] * value
default_unit = u
else:
lower = unit.lower()
if lower in self.UNITS:
val += self.UNITS[lower] * value
default_unit = lower
elif lower in self.LALIAS:
u = self.LALIAS[lower]
val += self.UNITS[u] * value
default_unit = u
else:
raise AttributeError('Unknown unit type: %s' % unit)
return val, default_unit
@classmethod
def unit_attname(cls, unit_str):
"""
Retrieves the unit attribute name for the given unit string.
For example, if the given unit string is 'metre', 'm' would be returned.
An exception is raised if an attribute cannot be found.
"""
lower = unit_str.lower()
if unit_str in cls.UNITS:
return unit_str
elif lower in cls.UNITS:
return lower
elif lower in cls.LALIAS:
return cls.LALIAS[lower]
else:
raise Exception('Could not find a unit keyword associated with "%s"' % unit_str)
class Distance(MeasureBase):
STANDARD_UNIT = "m"
UNITS = {
'chain': 20.1168,
'chain_benoit': 20.116782,
'chain_sears': 20.1167645,
'british_chain_benoit': 20.1167824944,
'british_chain_sears': 20.1167651216,
'british_chain_sears_truncated': 20.116756,
'cm': 0.01,
'british_ft': 0.304799471539,
'british_yd': 0.914398414616,
'clarke_ft': 0.3047972654,
'clarke_link': 0.201166195164,
'fathom': 1.8288,
'ft': 0.3048,
'german_m': 1.0000135965,
'gold_coast_ft': 0.304799710181508,
'indian_yd': 0.914398530744,
'inch': 0.0254,
'km': 1000.0,
'link': 0.201168,
'link_benoit': 0.20116782,
'link_sears': 0.20116765,
'm': 1.0,
'mi': 1609.344,
'mm': 0.001,
'nm': 1852.0,
'nm_uk': 1853.184,
'rod': 5.0292,
'sears_yd': 0.91439841,
'survey_ft': 0.304800609601,
'um': 0.000001,
'yd': 0.9144,
}
# Unit aliases for `UNIT` terms encountered in Spatial Reference WKT.
ALIAS = {
'centimeter': 'cm',
'foot': 'ft',
'inches': 'inch',
'kilometer': 'km',
'kilometre': 'km',
'meter': 'm',
'metre': 'm',
'micrometer': 'um',
'micrometre': 'um',
'millimeter': 'mm',
'millimetre': 'mm',
'mile': 'mi',
'yard': 'yd',
'British chain (Benoit 1895 B)': 'british_chain_benoit',
'British chain (Sears 1922)': 'british_chain_sears',
'British chain (Sears 1922 truncated)': 'british_chain_sears_truncated',
'British foot (Sears 1922)': 'british_ft',
'British foot': 'british_ft',
'British yard (Sears 1922)': 'british_yd',
'British yard': 'british_yd',
"Clarke's Foot": 'clarke_ft',
"Clarke's link": 'clarke_link',
'Chain (Benoit)': 'chain_benoit',
'Chain (Sears)': 'chain_sears',
'Foot (International)': 'ft',
'German legal metre': 'german_m',
'Gold Coast foot': 'gold_coast_ft',
'Indian yard': 'indian_yd',
'Link (Benoit)': 'link_benoit',
'Link (Sears)': 'link_sears',
'Nautical Mile': 'nm',
'Nautical Mile (UK)': 'nm_uk',
'US survey foot': 'survey_ft',
'U.S. Foot': 'survey_ft',
'Yard (Indian)': 'indian_yd',
'Yard (Sears)': 'sears_yd'
}
LALIAS = {k.lower(): v for k, v in ALIAS.items()}
def __mul__(self, other):
if isinstance(other, self.__class__):
return Area(
default_unit=AREA_PREFIX + self._default_unit,
**{AREA_PREFIX + self.STANDARD_UNIT: (self.standard * other.standard)}
)
elif isinstance(other, NUMERIC_TYPES):
return self.__class__(
default_unit=self._default_unit,
**{self.STANDARD_UNIT: (self.standard * other)}
)
else:
raise TypeError('%(distance)s must be multiplied with number or %(distance)s' % {
"distance": pretty_name(self.__class__),
})
class Area(MeasureBase):
STANDARD_UNIT = AREA_PREFIX + Distance.STANDARD_UNIT
# Getting the square units values and the alias dictionary.
UNITS = {'%s%s' % (AREA_PREFIX, k): v ** 2 for k, v in Distance.UNITS.items()}
ALIAS = {k: '%s%s' % (AREA_PREFIX, v) for k, v in Distance.ALIAS.items()}
LALIAS = {k.lower(): v for k, v in ALIAS.items()}
def __truediv__(self, other):
if isinstance(other, NUMERIC_TYPES):
return self.__class__(
default_unit=self._default_unit,
**{self.STANDARD_UNIT: (self.standard / other)}
)
else:
raise TypeError('%(class)s must be divided by a number' % {"class": pretty_name(self)})
def __div__(self, other): # Python 2 compatibility
return type(self).__truediv__(self, other)
# Shortcuts
D = Distance
A = Area
|
mit
|
diagramsoftware/odoo
|
addons/l10n_tr/__openerp__.py
|
259
|
2056
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Turkey - Accounting',
'version': '1.beta',
'category': 'Localization/Account Charts',
'description': """
Türkiye için Tek düzen hesap planı şablonu OpenERP Modülü.
==========================================================
Bu modül kurulduktan sonra, Muhasebe yapılandırma sihirbazı çalışır
* Sihirbaz sizden hesap planı şablonu, planın kurulacağı şirket, banka hesap
bilgileriniz, ilgili para birimi gibi bilgiler isteyecek.
""",
'author': 'Ahmet Altınışık',
'maintainer':'https://launchpad.net/~openerp-turkey',
'website':'https://launchpad.net/openerp-turkey',
'depends': [
'account',
'base_vat',
'account_chart',
],
'data': [
'account_code_template.xml',
'account_tdhp_turkey.xml',
'account_tax_code_template.xml',
'account_chart_template.xml',
'account_tax_template.xml',
'l10n_tr_wizard.xml',
],
'demo': [],
'installable': True,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
agpl-3.0
|
edxnercel/edx-platform
|
lms/djangoapps/course_wiki/tests/test_tab.py
|
158
|
2454
|
"""
Tests for wiki views.
"""
from django.conf import settings
from django.test.client import RequestFactory
from courseware.tabs import get_course_tab_list
from student.tests.factories import AdminFactory, UserFactory
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.tests.factories import CourseFactory
class WikiTabTestCase(ModuleStoreTestCase):
"""Test cases for Wiki Tab."""
def setUp(self):
super(WikiTabTestCase, self).setUp()
self.course = CourseFactory.create()
self.instructor = AdminFactory.create()
self.user = UserFactory()
def get_wiki_tab(self, user, course):
"""Returns true if the "Wiki" tab is shown."""
request = RequestFactory().request()
request.user = user
all_tabs = get_course_tab_list(request, course)
wiki_tabs = [tab for tab in all_tabs if tab.name == 'Wiki']
return wiki_tabs[0] if len(wiki_tabs) == 1 else None
def test_wiki_enabled_and_public(self):
"""
Test wiki tab when Enabled setting is True and the wiki is open to
the public.
"""
settings.WIKI_ENABLED = True
self.course.allow_public_wiki_access = True
self.assertIsNotNone(self.get_wiki_tab(self.user, self.course))
def test_wiki_enabled_and_not_public(self):
"""
Test wiki when it is enabled but not open to the public
"""
settings.WIKI_ENABLED = True
self.course.allow_public_wiki_access = False
self.assertIsNone(self.get_wiki_tab(self.user, self.course))
self.assertIsNotNone(self.get_wiki_tab(self.instructor, self.course))
def test_wiki_enabled_false(self):
"""Test wiki tab when Enabled setting is False"""
settings.WIKI_ENABLED = False
self.assertIsNone(self.get_wiki_tab(self.user, self.course))
self.assertIsNone(self.get_wiki_tab(self.instructor, self.course))
def test_wiki_visibility(self):
"""Test toggling of visibility of wiki tab"""
settings.WIKI_ENABLED = True
self.course.allow_public_wiki_access = True
wiki_tab = self.get_wiki_tab(self.user, self.course)
self.assertIsNotNone(wiki_tab)
self.assertTrue(wiki_tab.is_hideable)
wiki_tab.is_hidden = True
self.assertTrue(wiki_tab['is_hidden'])
wiki_tab['is_hidden'] = False
self.assertFalse(wiki_tab.is_hidden)
|
agpl-3.0
|
edulramirez/nova
|
nova/weights.py
|
70
|
4450
|
# Copyright (c) 2011-2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Pluggable Weighing support
"""
import abc
import six
from nova import loadables
def normalize(weight_list, minval=None, maxval=None):
"""Normalize the values in a list between 0 and 1.0.
The normalization is made regarding the lower and upper values present in
weight_list. If the minval and/or maxval parameters are set, these values
will be used instead of the minimum and maximum from the list.
If all the values are equal, they are normalized to 0.
"""
if not weight_list:
return ()
if maxval is None:
maxval = max(weight_list)
if minval is None:
minval = min(weight_list)
maxval = float(maxval)
minval = float(minval)
if minval == maxval:
return [0] * len(weight_list)
range_ = maxval - minval
return ((i - minval) / range_ for i in weight_list)
class WeighedObject(object):
"""Object with weight information."""
def __init__(self, obj, weight):
self.obj = obj
self.weight = weight
def __repr__(self):
return "<WeighedObject '%s': %s>" % (self.obj, self.weight)
@six.add_metaclass(abc.ABCMeta)
class BaseWeigher(object):
"""Base class for pluggable weighers.
The attributes maxval and minval can be specified to set up the maximum
and minimum values for the weighed objects. These values will then be
taken into account in the normalization step, instead of taking the values
from the calculated weights.
"""
minval = None
maxval = None
def weight_multiplier(self):
"""How weighted this weigher should be.
Override this method in a subclass, so that the returned value is
read from a configuration option to permit operators specify a
multiplier for the weigher.
"""
return 1.0
@abc.abstractmethod
def _weigh_object(self, obj, weight_properties):
"""Weigh an specific object."""
def weigh_objects(self, weighed_obj_list, weight_properties):
"""Weigh multiple objects.
Override in a subclass if you need access to all objects in order
to calculate weights. Do not modify the weight of an object here,
just return a list of weights.
"""
# Calculate the weights
weights = []
for obj in weighed_obj_list:
weight = self._weigh_object(obj.obj, weight_properties)
# Record the min and max values if they are None. If they anything
# but none we assume that the weigher has set them
if self.minval is None:
self.minval = weight
if self.maxval is None:
self.maxval = weight
if weight < self.minval:
self.minval = weight
elif weight > self.maxval:
self.maxval = weight
weights.append(weight)
return weights
class BaseWeightHandler(loadables.BaseLoader):
object_class = WeighedObject
def get_weighed_objects(self, weighers, obj_list, weighing_properties):
"""Return a sorted (descending), normalized list of WeighedObjects."""
weighed_objs = [self.object_class(obj, 0.0) for obj in obj_list]
if len(weighed_objs) <= 1:
return weighed_objs
for weigher in weighers:
weights = weigher.weigh_objects(weighed_objs, weighing_properties)
# Normalize the weights
weights = normalize(weights,
minval=weigher.minval,
maxval=weigher.maxval)
for i, weight in enumerate(weights):
obj = weighed_objs[i]
obj.weight += weigher.weight_multiplier() * weight
return sorted(weighed_objs, key=lambda x: x.weight, reverse=True)
|
apache-2.0
|
Fizzixnerd/frontdown
|
installer.py
|
1
|
1654
|
import subprocess
class InstallError(Exception):
pass
class AptError(InstallError):
pass
class YumError(InstallError):
pass
class UnsupportedInstallerError(InstallError):
pass
class Installer:
"""Abstract Base Class for an installer. Represents the installation
system for the current platform (ie apt, yum, pacman, emerge,
etc).
"""
def __init__(self):
self.update()
def install(self, apps=[]):
raise InstallError("This is a generic installer. Use a specialized one.")
def update(self):
raise InstallError("This is a generic installer. Use a specialized one.")
class AptInstaller(Installer):
"""Installer for apt-based systems.
"""
def _aptget(self, command, args=[]):
command_list = ["sudo", "apt-get", command]
command_list.extend(args)
exit_code = subprocess.call(command_list)
if not exit_code:
return 0
else:
raise AptError("Apt exited with non-zero exit code {} when called with commands {}".format(exit_code, command_list))
def update(self):
return self._aptget("update")
def install(self, apps=[]):
return self._aptget("install", apps)
class UnsupportedInstaller(Installer):
def install(self, apps=[]):
raise UnsupportedInstallerError("This installer isn't supported yet.")
def update(self):
raise UnsupportedInstallerError("This installer isn't supported yet.")
class YumInstaller(UnsupportedInstaller):
pass
class ArchInstaller(UnsupportedInstaller):
pass
class GentooInstaller(UnsupportedInstaller):
pass
|
gpl-3.0
|
darmaa/odoo
|
addons/base_import/test_models.py
|
97
|
2366
|
from openerp.osv import orm, fields
def name(n): return 'base_import.tests.models.%s' % n
class char(orm.Model):
_name = name('char')
_columns = {
'value': fields.char('unknown', size=None)
}
class char_required(orm.Model):
_name = name('char.required')
_columns = {
'value': fields.char('unknown', size=None, required=True)
}
class char_readonly(orm.Model):
_name = name('char.readonly')
_columns = {
'value': fields.char('unknown', size=None, readonly=True)
}
class char_states(orm.Model):
_name = name('char.states')
_columns = {
'value': fields.char('unknown', size=None, readonly=True, states={'draft': [('readonly', False)]})
}
class char_noreadonly(orm.Model):
_name = name('char.noreadonly')
_columns = {
'value': fields.char('unknown', size=None, readonly=True, states={'draft': [('invisible', True)]})
}
class char_stillreadonly(orm.Model):
_name = name('char.stillreadonly')
_columns = {
'value': fields.char('unknown', size=None, readonly=True, states={'draft': [('readonly', True)]})
}
# TODO: complex field (m2m, o2m, m2o)
class m2o(orm.Model):
_name = name('m2o')
_columns = {
'value': fields.many2one(name('m2o.related'))
}
class m2o_related(orm.Model):
_name = name('m2o.related')
_columns = {
'value': fields.integer()
}
_defaults = {
'value': 42
}
class m2o_required(orm.Model):
_name = name('m2o.required')
_columns = {
'value': fields.many2one(name('m2o.required.related'), required=True)
}
class m2o_required_related(orm.Model):
_name = name('m2o.required.related')
_columns = {
'value': fields.integer()
}
_defaults = {
'value': 42
}
class o2m(orm.Model):
_name = name('o2m')
_columns = {
'value': fields.one2many(name('o2m.child'), 'parent_id')
}
class o2m_child(orm.Model):
_name = name('o2m.child')
_columns = {
'parent_id': fields.many2one(name('o2m')),
'value': fields.integer()
}
class preview_model(orm.Model):
_name = name('preview')
_columns = {
'name': fields.char('Name', size=None),
'somevalue': fields.integer('Some Value', required=True),
'othervalue': fields.integer('Other Variable'),
}
|
agpl-3.0
|
agusc/scrapy
|
tests/test_spidermiddleware_offsite.py
|
113
|
2551
|
from unittest import TestCase
from six.moves.urllib.parse import urlparse
from scrapy.http import Response, Request
from scrapy.spiders import Spider
from scrapy.spidermiddlewares.offsite import OffsiteMiddleware
from scrapy.utils.test import get_crawler
class TestOffsiteMiddleware(TestCase):
def setUp(self):
crawler = get_crawler(Spider)
self.spider = crawler._create_spider(**self._get_spiderargs())
self.mw = OffsiteMiddleware.from_crawler(crawler)
self.mw.spider_opened(self.spider)
def _get_spiderargs(self):
return dict(name='foo', allowed_domains=['scrapytest.org', 'scrapy.org'])
def test_process_spider_output(self):
res = Response('http://scrapytest.org')
onsite_reqs = [Request('http://scrapytest.org/1'),
Request('http://scrapy.org/1'),
Request('http://sub.scrapy.org/1'),
Request('http://offsite.tld/letmepass', dont_filter=True)]
offsite_reqs = [Request('http://scrapy2.org'),
Request('http://offsite.tld/'),
Request('http://offsite.tld/scrapytest.org'),
Request('http://offsite.tld/rogue.scrapytest.org'),
Request('http://rogue.scrapytest.org.haha.com'),
Request('http://roguescrapytest.org')]
reqs = onsite_reqs + offsite_reqs
out = list(self.mw.process_spider_output(res, reqs, self.spider))
self.assertEquals(out, onsite_reqs)
class TestOffsiteMiddleware2(TestOffsiteMiddleware):
def _get_spiderargs(self):
return dict(name='foo', allowed_domains=None)
def test_process_spider_output(self):
res = Response('http://scrapytest.org')
reqs = [Request('http://a.com/b.html'), Request('http://b.com/1')]
out = list(self.mw.process_spider_output(res, reqs, self.spider))
self.assertEquals(out, reqs)
class TestOffsiteMiddleware3(TestOffsiteMiddleware2):
def _get_spider(self):
return Spider('foo')
class TestOffsiteMiddleware4(TestOffsiteMiddleware3):
def _get_spider(self):
bad_hostname = urlparse('http:////scrapytest.org').hostname
return dict(name='foo', allowed_domains=['scrapytest.org', None, bad_hostname])
def test_process_spider_output(self):
res = Response('http://scrapytest.org')
reqs = [Request('http://scrapytest.org/1')]
out = list(self.mw.process_spider_output(res, reqs, self.spider))
self.assertEquals(out, reqs)
|
bsd-3-clause
|
kontais/EFI-MIPS
|
ToolKit/cmds/python/Lib/stringprep.py
|
12
|
13492
|
# This file is generated by mkstringprep.py. DO NOT EDIT.
"""Library that exposes various tables found in the StringPrep RFC 3454.
There are two kinds of tables: sets, for which a member test is provided,
and mappings, for which a mapping function is provided.
"""
import unicodedata
assert unicodedata.unidata_version == '3.2.0'
def in_table_a1(code):
if unicodedata.category(code) != 'Cn': return False
c = ord(code)
if 0xFDD0 <= c < 0xFDF0: return False
return (c & 0xFFFF) not in (0xFFFE, 0xFFFF)
b1_set = set([173, 847, 6150, 6155, 6156, 6157, 8203, 8204, 8205, 8288, 65279] + range(65024,65040))
def in_table_b1(code):
return ord(code) in b1_set
b3_exceptions = {
0xb5:u'\u03bc', 0xdf:u'ss', 0x130:u'i\u0307', 0x149:u'\u02bcn',
0x17f:u's', 0x1f0:u'j\u030c', 0x345:u'\u03b9', 0x37a:u' \u03b9',
0x390:u'\u03b9\u0308\u0301', 0x3b0:u'\u03c5\u0308\u0301', 0x3c2:u'\u03c3', 0x3d0:u'\u03b2',
0x3d1:u'\u03b8', 0x3d2:u'\u03c5', 0x3d3:u'\u03cd', 0x3d4:u'\u03cb',
0x3d5:u'\u03c6', 0x3d6:u'\u03c0', 0x3f0:u'\u03ba', 0x3f1:u'\u03c1',
0x3f2:u'\u03c3', 0x3f5:u'\u03b5', 0x587:u'\u0565\u0582', 0x1e96:u'h\u0331',
0x1e97:u't\u0308', 0x1e98:u'w\u030a', 0x1e99:u'y\u030a', 0x1e9a:u'a\u02be',
0x1e9b:u'\u1e61', 0x1f50:u'\u03c5\u0313', 0x1f52:u'\u03c5\u0313\u0300', 0x1f54:u'\u03c5\u0313\u0301',
0x1f56:u'\u03c5\u0313\u0342', 0x1f80:u'\u1f00\u03b9', 0x1f81:u'\u1f01\u03b9', 0x1f82:u'\u1f02\u03b9',
0x1f83:u'\u1f03\u03b9', 0x1f84:u'\u1f04\u03b9', 0x1f85:u'\u1f05\u03b9', 0x1f86:u'\u1f06\u03b9',
0x1f87:u'\u1f07\u03b9', 0x1f88:u'\u1f00\u03b9', 0x1f89:u'\u1f01\u03b9', 0x1f8a:u'\u1f02\u03b9',
0x1f8b:u'\u1f03\u03b9', 0x1f8c:u'\u1f04\u03b9', 0x1f8d:u'\u1f05\u03b9', 0x1f8e:u'\u1f06\u03b9',
0x1f8f:u'\u1f07\u03b9', 0x1f90:u'\u1f20\u03b9', 0x1f91:u'\u1f21\u03b9', 0x1f92:u'\u1f22\u03b9',
0x1f93:u'\u1f23\u03b9', 0x1f94:u'\u1f24\u03b9', 0x1f95:u'\u1f25\u03b9', 0x1f96:u'\u1f26\u03b9',
0x1f97:u'\u1f27\u03b9', 0x1f98:u'\u1f20\u03b9', 0x1f99:u'\u1f21\u03b9', 0x1f9a:u'\u1f22\u03b9',
0x1f9b:u'\u1f23\u03b9', 0x1f9c:u'\u1f24\u03b9', 0x1f9d:u'\u1f25\u03b9', 0x1f9e:u'\u1f26\u03b9',
0x1f9f:u'\u1f27\u03b9', 0x1fa0:u'\u1f60\u03b9', 0x1fa1:u'\u1f61\u03b9', 0x1fa2:u'\u1f62\u03b9',
0x1fa3:u'\u1f63\u03b9', 0x1fa4:u'\u1f64\u03b9', 0x1fa5:u'\u1f65\u03b9', 0x1fa6:u'\u1f66\u03b9',
0x1fa7:u'\u1f67\u03b9', 0x1fa8:u'\u1f60\u03b9', 0x1fa9:u'\u1f61\u03b9', 0x1faa:u'\u1f62\u03b9',
0x1fab:u'\u1f63\u03b9', 0x1fac:u'\u1f64\u03b9', 0x1fad:u'\u1f65\u03b9', 0x1fae:u'\u1f66\u03b9',
0x1faf:u'\u1f67\u03b9', 0x1fb2:u'\u1f70\u03b9', 0x1fb3:u'\u03b1\u03b9', 0x1fb4:u'\u03ac\u03b9',
0x1fb6:u'\u03b1\u0342', 0x1fb7:u'\u03b1\u0342\u03b9', 0x1fbc:u'\u03b1\u03b9', 0x1fbe:u'\u03b9',
0x1fc2:u'\u1f74\u03b9', 0x1fc3:u'\u03b7\u03b9', 0x1fc4:u'\u03ae\u03b9', 0x1fc6:u'\u03b7\u0342',
0x1fc7:u'\u03b7\u0342\u03b9', 0x1fcc:u'\u03b7\u03b9', 0x1fd2:u'\u03b9\u0308\u0300', 0x1fd3:u'\u03b9\u0308\u0301',
0x1fd6:u'\u03b9\u0342', 0x1fd7:u'\u03b9\u0308\u0342', 0x1fe2:u'\u03c5\u0308\u0300', 0x1fe3:u'\u03c5\u0308\u0301',
0x1fe4:u'\u03c1\u0313', 0x1fe6:u'\u03c5\u0342', 0x1fe7:u'\u03c5\u0308\u0342', 0x1ff2:u'\u1f7c\u03b9',
0x1ff3:u'\u03c9\u03b9', 0x1ff4:u'\u03ce\u03b9', 0x1ff6:u'\u03c9\u0342', 0x1ff7:u'\u03c9\u0342\u03b9',
0x1ffc:u'\u03c9\u03b9', 0x20a8:u'rs', 0x2102:u'c', 0x2103:u'\xb0c',
0x2107:u'\u025b', 0x2109:u'\xb0f', 0x210b:u'h', 0x210c:u'h',
0x210d:u'h', 0x2110:u'i', 0x2111:u'i', 0x2112:u'l',
0x2115:u'n', 0x2116:u'no', 0x2119:u'p', 0x211a:u'q',
0x211b:u'r', 0x211c:u'r', 0x211d:u'r', 0x2120:u'sm',
0x2121:u'tel', 0x2122:u'tm', 0x2124:u'z', 0x2128:u'z',
0x212c:u'b', 0x212d:u'c', 0x2130:u'e', 0x2131:u'f',
0x2133:u'm', 0x213e:u'\u03b3', 0x213f:u'\u03c0', 0x2145:u'd',
0x3371:u'hpa', 0x3373:u'au', 0x3375:u'ov', 0x3380:u'pa',
0x3381:u'na', 0x3382:u'\u03bca', 0x3383:u'ma', 0x3384:u'ka',
0x3385:u'kb', 0x3386:u'mb', 0x3387:u'gb', 0x338a:u'pf',
0x338b:u'nf', 0x338c:u'\u03bcf', 0x3390:u'hz', 0x3391:u'khz',
0x3392:u'mhz', 0x3393:u'ghz', 0x3394:u'thz', 0x33a9:u'pa',
0x33aa:u'kpa', 0x33ab:u'mpa', 0x33ac:u'gpa', 0x33b4:u'pv',
0x33b5:u'nv', 0x33b6:u'\u03bcv', 0x33b7:u'mv', 0x33b8:u'kv',
0x33b9:u'mv', 0x33ba:u'pw', 0x33bb:u'nw', 0x33bc:u'\u03bcw',
0x33bd:u'mw', 0x33be:u'kw', 0x33bf:u'mw', 0x33c0:u'k\u03c9',
0x33c1:u'm\u03c9', 0x33c3:u'bq', 0x33c6:u'c\u2215kg', 0x33c7:u'co.',
0x33c8:u'db', 0x33c9:u'gy', 0x33cb:u'hp', 0x33cd:u'kk',
0x33ce:u'km', 0x33d7:u'ph', 0x33d9:u'ppm', 0x33da:u'pr',
0x33dc:u'sv', 0x33dd:u'wb', 0xfb00:u'ff', 0xfb01:u'fi',
0xfb02:u'fl', 0xfb03:u'ffi', 0xfb04:u'ffl', 0xfb05:u'st',
0xfb06:u'st', 0xfb13:u'\u0574\u0576', 0xfb14:u'\u0574\u0565', 0xfb15:u'\u0574\u056b',
0xfb16:u'\u057e\u0576', 0xfb17:u'\u0574\u056d', 0x1d400:u'a', 0x1d401:u'b',
0x1d402:u'c', 0x1d403:u'd', 0x1d404:u'e', 0x1d405:u'f',
0x1d406:u'g', 0x1d407:u'h', 0x1d408:u'i', 0x1d409:u'j',
0x1d40a:u'k', 0x1d40b:u'l', 0x1d40c:u'm', 0x1d40d:u'n',
0x1d40e:u'o', 0x1d40f:u'p', 0x1d410:u'q', 0x1d411:u'r',
0x1d412:u's', 0x1d413:u't', 0x1d414:u'u', 0x1d415:u'v',
0x1d416:u'w', 0x1d417:u'x', 0x1d418:u'y', 0x1d419:u'z',
0x1d434:u'a', 0x1d435:u'b', 0x1d436:u'c', 0x1d437:u'd',
0x1d438:u'e', 0x1d439:u'f', 0x1d43a:u'g', 0x1d43b:u'h',
0x1d43c:u'i', 0x1d43d:u'j', 0x1d43e:u'k', 0x1d43f:u'l',
0x1d440:u'm', 0x1d441:u'n', 0x1d442:u'o', 0x1d443:u'p',
0x1d444:u'q', 0x1d445:u'r', 0x1d446:u's', 0x1d447:u't',
0x1d448:u'u', 0x1d449:u'v', 0x1d44a:u'w', 0x1d44b:u'x',
0x1d44c:u'y', 0x1d44d:u'z', 0x1d468:u'a', 0x1d469:u'b',
0x1d46a:u'c', 0x1d46b:u'd', 0x1d46c:u'e', 0x1d46d:u'f',
0x1d46e:u'g', 0x1d46f:u'h', 0x1d470:u'i', 0x1d471:u'j',
0x1d472:u'k', 0x1d473:u'l', 0x1d474:u'm', 0x1d475:u'n',
0x1d476:u'o', 0x1d477:u'p', 0x1d478:u'q', 0x1d479:u'r',
0x1d47a:u's', 0x1d47b:u't', 0x1d47c:u'u', 0x1d47d:u'v',
0x1d47e:u'w', 0x1d47f:u'x', 0x1d480:u'y', 0x1d481:u'z',
0x1d49c:u'a', 0x1d49e:u'c', 0x1d49f:u'd', 0x1d4a2:u'g',
0x1d4a5:u'j', 0x1d4a6:u'k', 0x1d4a9:u'n', 0x1d4aa:u'o',
0x1d4ab:u'p', 0x1d4ac:u'q', 0x1d4ae:u's', 0x1d4af:u't',
0x1d4b0:u'u', 0x1d4b1:u'v', 0x1d4b2:u'w', 0x1d4b3:u'x',
0x1d4b4:u'y', 0x1d4b5:u'z', 0x1d4d0:u'a', 0x1d4d1:u'b',
0x1d4d2:u'c', 0x1d4d3:u'd', 0x1d4d4:u'e', 0x1d4d5:u'f',
0x1d4d6:u'g', 0x1d4d7:u'h', 0x1d4d8:u'i', 0x1d4d9:u'j',
0x1d4da:u'k', 0x1d4db:u'l', 0x1d4dc:u'm', 0x1d4dd:u'n',
0x1d4de:u'o', 0x1d4df:u'p', 0x1d4e0:u'q', 0x1d4e1:u'r',
0x1d4e2:u's', 0x1d4e3:u't', 0x1d4e4:u'u', 0x1d4e5:u'v',
0x1d4e6:u'w', 0x1d4e7:u'x', 0x1d4e8:u'y', 0x1d4e9:u'z',
0x1d504:u'a', 0x1d505:u'b', 0x1d507:u'd', 0x1d508:u'e',
0x1d509:u'f', 0x1d50a:u'g', 0x1d50d:u'j', 0x1d50e:u'k',
0x1d50f:u'l', 0x1d510:u'm', 0x1d511:u'n', 0x1d512:u'o',
0x1d513:u'p', 0x1d514:u'q', 0x1d516:u's', 0x1d517:u't',
0x1d518:u'u', 0x1d519:u'v', 0x1d51a:u'w', 0x1d51b:u'x',
0x1d51c:u'y', 0x1d538:u'a', 0x1d539:u'b', 0x1d53b:u'd',
0x1d53c:u'e', 0x1d53d:u'f', 0x1d53e:u'g', 0x1d540:u'i',
0x1d541:u'j', 0x1d542:u'k', 0x1d543:u'l', 0x1d544:u'm',
0x1d546:u'o', 0x1d54a:u's', 0x1d54b:u't', 0x1d54c:u'u',
0x1d54d:u'v', 0x1d54e:u'w', 0x1d54f:u'x', 0x1d550:u'y',
0x1d56c:u'a', 0x1d56d:u'b', 0x1d56e:u'c', 0x1d56f:u'd',
0x1d570:u'e', 0x1d571:u'f', 0x1d572:u'g', 0x1d573:u'h',
0x1d574:u'i', 0x1d575:u'j', 0x1d576:u'k', 0x1d577:u'l',
0x1d578:u'm', 0x1d579:u'n', 0x1d57a:u'o', 0x1d57b:u'p',
0x1d57c:u'q', 0x1d57d:u'r', 0x1d57e:u's', 0x1d57f:u't',
0x1d580:u'u', 0x1d581:u'v', 0x1d582:u'w', 0x1d583:u'x',
0x1d584:u'y', 0x1d585:u'z', 0x1d5a0:u'a', 0x1d5a1:u'b',
0x1d5a2:u'c', 0x1d5a3:u'd', 0x1d5a4:u'e', 0x1d5a5:u'f',
0x1d5a6:u'g', 0x1d5a7:u'h', 0x1d5a8:u'i', 0x1d5a9:u'j',
0x1d5aa:u'k', 0x1d5ab:u'l', 0x1d5ac:u'm', 0x1d5ad:u'n',
0x1d5ae:u'o', 0x1d5af:u'p', 0x1d5b0:u'q', 0x1d5b1:u'r',
0x1d5b2:u's', 0x1d5b3:u't', 0x1d5b4:u'u', 0x1d5b5:u'v',
0x1d5b6:u'w', 0x1d5b7:u'x', 0x1d5b8:u'y', 0x1d5b9:u'z',
0x1d5d4:u'a', 0x1d5d5:u'b', 0x1d5d6:u'c', 0x1d5d7:u'd',
0x1d5d8:u'e', 0x1d5d9:u'f', 0x1d5da:u'g', 0x1d5db:u'h',
0x1d5dc:u'i', 0x1d5dd:u'j', 0x1d5de:u'k', 0x1d5df:u'l',
0x1d5e0:u'm', 0x1d5e1:u'n', 0x1d5e2:u'o', 0x1d5e3:u'p',
0x1d5e4:u'q', 0x1d5e5:u'r', 0x1d5e6:u's', 0x1d5e7:u't',
0x1d5e8:u'u', 0x1d5e9:u'v', 0x1d5ea:u'w', 0x1d5eb:u'x',
0x1d5ec:u'y', 0x1d5ed:u'z', 0x1d608:u'a', 0x1d609:u'b',
0x1d60a:u'c', 0x1d60b:u'd', 0x1d60c:u'e', 0x1d60d:u'f',
0x1d60e:u'g', 0x1d60f:u'h', 0x1d610:u'i', 0x1d611:u'j',
0x1d612:u'k', 0x1d613:u'l', 0x1d614:u'm', 0x1d615:u'n',
0x1d616:u'o', 0x1d617:u'p', 0x1d618:u'q', 0x1d619:u'r',
0x1d61a:u's', 0x1d61b:u't', 0x1d61c:u'u', 0x1d61d:u'v',
0x1d61e:u'w', 0x1d61f:u'x', 0x1d620:u'y', 0x1d621:u'z',
0x1d63c:u'a', 0x1d63d:u'b', 0x1d63e:u'c', 0x1d63f:u'd',
0x1d640:u'e', 0x1d641:u'f', 0x1d642:u'g', 0x1d643:u'h',
0x1d644:u'i', 0x1d645:u'j', 0x1d646:u'k', 0x1d647:u'l',
0x1d648:u'm', 0x1d649:u'n', 0x1d64a:u'o', 0x1d64b:u'p',
0x1d64c:u'q', 0x1d64d:u'r', 0x1d64e:u's', 0x1d64f:u't',
0x1d650:u'u', 0x1d651:u'v', 0x1d652:u'w', 0x1d653:u'x',
0x1d654:u'y', 0x1d655:u'z', 0x1d670:u'a', 0x1d671:u'b',
0x1d672:u'c', 0x1d673:u'd', 0x1d674:u'e', 0x1d675:u'f',
0x1d676:u'g', 0x1d677:u'h', 0x1d678:u'i', 0x1d679:u'j',
0x1d67a:u'k', 0x1d67b:u'l', 0x1d67c:u'm', 0x1d67d:u'n',
0x1d67e:u'o', 0x1d67f:u'p', 0x1d680:u'q', 0x1d681:u'r',
0x1d682:u's', 0x1d683:u't', 0x1d684:u'u', 0x1d685:u'v',
0x1d686:u'w', 0x1d687:u'x', 0x1d688:u'y', 0x1d689:u'z',
0x1d6a8:u'\u03b1', 0x1d6a9:u'\u03b2', 0x1d6aa:u'\u03b3', 0x1d6ab:u'\u03b4',
0x1d6ac:u'\u03b5', 0x1d6ad:u'\u03b6', 0x1d6ae:u'\u03b7', 0x1d6af:u'\u03b8',
0x1d6b0:u'\u03b9', 0x1d6b1:u'\u03ba', 0x1d6b2:u'\u03bb', 0x1d6b3:u'\u03bc',
0x1d6b4:u'\u03bd', 0x1d6b5:u'\u03be', 0x1d6b6:u'\u03bf', 0x1d6b7:u'\u03c0',
0x1d6b8:u'\u03c1', 0x1d6b9:u'\u03b8', 0x1d6ba:u'\u03c3', 0x1d6bb:u'\u03c4',
0x1d6bc:u'\u03c5', 0x1d6bd:u'\u03c6', 0x1d6be:u'\u03c7', 0x1d6bf:u'\u03c8',
0x1d6c0:u'\u03c9', 0x1d6d3:u'\u03c3', 0x1d6e2:u'\u03b1', 0x1d6e3:u'\u03b2',
0x1d6e4:u'\u03b3', 0x1d6e5:u'\u03b4', 0x1d6e6:u'\u03b5', 0x1d6e7:u'\u03b6',
0x1d6e8:u'\u03b7', 0x1d6e9:u'\u03b8', 0x1d6ea:u'\u03b9', 0x1d6eb:u'\u03ba',
0x1d6ec:u'\u03bb', 0x1d6ed:u'\u03bc', 0x1d6ee:u'\u03bd', 0x1d6ef:u'\u03be',
0x1d6f0:u'\u03bf', 0x1d6f1:u'\u03c0', 0x1d6f2:u'\u03c1', 0x1d6f3:u'\u03b8',
0x1d6f4:u'\u03c3', 0x1d6f5:u'\u03c4', 0x1d6f6:u'\u03c5', 0x1d6f7:u'\u03c6',
0x1d6f8:u'\u03c7', 0x1d6f9:u'\u03c8', 0x1d6fa:u'\u03c9', 0x1d70d:u'\u03c3',
0x1d71c:u'\u03b1', 0x1d71d:u'\u03b2', 0x1d71e:u'\u03b3', 0x1d71f:u'\u03b4',
0x1d720:u'\u03b5', 0x1d721:u'\u03b6', 0x1d722:u'\u03b7', 0x1d723:u'\u03b8',
0x1d724:u'\u03b9', 0x1d725:u'\u03ba', 0x1d726:u'\u03bb', 0x1d727:u'\u03bc',
0x1d728:u'\u03bd', 0x1d729:u'\u03be', 0x1d72a:u'\u03bf', 0x1d72b:u'\u03c0',
0x1d72c:u'\u03c1', 0x1d72d:u'\u03b8', 0x1d72e:u'\u03c3', 0x1d72f:u'\u03c4',
0x1d730:u'\u03c5', 0x1d731:u'\u03c6', 0x1d732:u'\u03c7', 0x1d733:u'\u03c8',
0x1d734:u'\u03c9', 0x1d747:u'\u03c3', 0x1d756:u'\u03b1', 0x1d757:u'\u03b2',
0x1d758:u'\u03b3', 0x1d759:u'\u03b4', 0x1d75a:u'\u03b5', 0x1d75b:u'\u03b6',
0x1d75c:u'\u03b7', 0x1d75d:u'\u03b8', 0x1d75e:u'\u03b9', 0x1d75f:u'\u03ba',
0x1d760:u'\u03bb', 0x1d761:u'\u03bc', 0x1d762:u'\u03bd', 0x1d763:u'\u03be',
0x1d764:u'\u03bf', 0x1d765:u'\u03c0', 0x1d766:u'\u03c1', 0x1d767:u'\u03b8',
0x1d768:u'\u03c3', 0x1d769:u'\u03c4', 0x1d76a:u'\u03c5', 0x1d76b:u'\u03c6',
0x1d76c:u'\u03c7', 0x1d76d:u'\u03c8', 0x1d76e:u'\u03c9', 0x1d781:u'\u03c3',
0x1d790:u'\u03b1', 0x1d791:u'\u03b2', 0x1d792:u'\u03b3', 0x1d793:u'\u03b4',
0x1d794:u'\u03b5', 0x1d795:u'\u03b6', 0x1d796:u'\u03b7', 0x1d797:u'\u03b8',
0x1d798:u'\u03b9', 0x1d799:u'\u03ba', 0x1d79a:u'\u03bb', 0x1d79b:u'\u03bc',
0x1d79c:u'\u03bd', 0x1d79d:u'\u03be', 0x1d79e:u'\u03bf', 0x1d79f:u'\u03c0',
0x1d7a0:u'\u03c1', 0x1d7a1:u'\u03b8', 0x1d7a2:u'\u03c3', 0x1d7a3:u'\u03c4',
0x1d7a4:u'\u03c5', 0x1d7a5:u'\u03c6', 0x1d7a6:u'\u03c7', 0x1d7a7:u'\u03c8',
0x1d7a8:u'\u03c9', 0x1d7bb:u'\u03c3', }
def map_table_b3(code):
r = b3_exceptions.get(ord(code))
if r is not None: return r
return code.lower()
def map_table_b2(a):
al = map_table_b3(a)
b = unicodedata.normalize("NFKC", al)
bl = u"".join([map_table_b3(ch) for ch in b])
c = unicodedata.normalize("NFKC", bl)
if b != c:
return c
else:
return al
def in_table_c11(code):
return code == u" "
def in_table_c12(code):
return unicodedata.category(code) == "Zs" and code != u" "
def in_table_c11_c12(code):
return unicodedata.category(code) == "Zs"
def in_table_c21(code):
return ord(code) < 128 and unicodedata.category(code) == "Cc"
c22_specials = set([1757, 1807, 6158, 8204, 8205, 8232, 8233, 65279] + range(8288,8292) + range(8298,8304) + range(65529,65533) + range(119155,119163))
def in_table_c22(code):
c = ord(code)
if c < 128: return False
if unicodedata.category(code) == "Cc": return True
return c in c22_specials
def in_table_c21_c22(code):
return unicodedata.category(code) == "Cc" or \
ord(code) in c22_specials
def in_table_c3(code):
return unicodedata.category(code) == "Co"
def in_table_c4(code):
c = ord(code)
if c < 0xFDD0: return False
if c < 0xFDF0: return True
return (ord(code) & 0xFFFF) in (0xFFFE, 0xFFFF)
def in_table_c5(code):
return unicodedata.category(code) == "Cs"
c6_set = set(range(65529,65534))
def in_table_c6(code):
return ord(code) in c6_set
c7_set = set(range(12272,12284))
def in_table_c7(code):
return ord(code) in c7_set
c8_set = set([832, 833, 8206, 8207] + range(8234,8239) + range(8298,8304))
def in_table_c8(code):
return ord(code) in c8_set
c9_set = set([917505] + range(917536,917632))
def in_table_c9(code):
return ord(code) in c9_set
def in_table_d1(code):
return unicodedata.bidirectional(code) in ("R","AL")
def in_table_d2(code):
return unicodedata.bidirectional(code) == "L"
|
bsd-3-clause
|
bhavin04890/finaldashboard
|
modules/savage/graph/__init__.py
|
24
|
9731
|
from base import BaseGraph, UnifiedGraph
from canvas import ScatterCanvas, DoubleScatterCanvas, BarCanvas, HorizontalBarCanvas, PieCanvas, LineCanvas
from axes import YAxis
from ..utils.struct import Vector as V
from ..graphics.utils import ViewBox, Translate, Rotate, addAttr, blank, boolean
from ..graphics.color import hex_to_color, Color
from ..graphics.shapes import Line, Rectangle, Text
from ..graphics.group import Group
class Graph (BaseGraph):
def __init__ (self, **attr):
BaseGraph.__init__ (self, None, **attr)
class ScatterPlot (UnifiedGraph):
def __init__ (self, regLine = True, settings = None):
UnifiedGraph.__init__ (self,
ScatterCanvas,
regLine = regLine,
settings = settings)
#self.addScript (self.jsLocator ())
def jsLocator (self):
return """
function Locator (root) {
var canvasRoot = root;
}
registerEvent (window, 'load', function () {
var root = document.getElementById ('canvas-root');
var l = new Locator (root);
}
"""
def jsPosition (self):
return """
function showPosition (element) {
}
"""
def formatSettings (self, settings):
UnifiedGraph.formatSettings (self, settings)
addAttr (settings, 'markerSize', float, 2.0)
addAttr (settings, 'markerType', str, 'circle')
addAttr (settings, 'colorScheme', str, 'tripleAxis')
addAttr (settings, 'color1', hex_to_color, hex_to_color ('ff0000'))
addAttr (settings, 'color2', hex_to_color, hex_to_color ('00ff00'))
addAttr (settings, 'color3', hex_to_color, hex_to_color ('0000ff'))
addAttr (settings, 'regLineColor', hex_to_color, hex_to_color('000000'))
addAttr (settings, 'regLineWidth', float, 1.0)
def setColors (self, color1= None, color2 = None, color3 = None):
self.settings.color1 = color1
self.settings.color2 = color2
self.settings.color3 = color3
def setProperties (self):
self.xaxis = True
self.yaxis = True
self.y2axis = False
def addPoint (self, x, y, name = None):
self.canvas.drawPoint (name, x, y)
class DoubleScatterPlot (ScatterPlot):
def __init__ (self, **attr):
UnifiedGraph.__init__ (self, DoubleScatterCanvas, **attr)
def jsPosition (self):
return """
function showPosition (element) {
}
"""
def formatSettings (self, settings):
UnifiedGraph.formatSettings (self, settings)
addAttr (settings, 'g1MarkerType', str, 'circle')
addAttr (settings, 'g1MarkerSize', float, '2.0')
addAttr (settings, 'g1ColorScheme', str, 'solid')
addAttr (settings, 'g1Color1', hex_to_color, Color (255, 0, 0))
addAttr (settings, 'g1Color2', hex_to_color, Color (0, 255, 0))
addAttr (settings, 'g1Color3', hex_to_color, Color (0, 0, 255))
addAttr (settings, 'g1RegLine', boolean, False)
addAttr (settings, 'g1RegLineColor', hex_to_color, Color (0, 0, 0))
addAttr (settings, 'g1RegLineWidth', float, 1.0)
addAttr (settings, 'g2MarkerType', str, 'square')
addAttr (settings, 'g2MarkerSize', float, '4.0')
addAttr (settings, 'g2ColorScheme', str, 'solid')
addAttr (settings, 'g2Color1', hex_to_color, Color (0, 0, 255))
addAttr (settings, 'g2Color2', hex_to_color, Color (0, 255, 0))
addAttr (settings, 'g2Color3', hex_to_color, Color (255, 0, 0))
addAttr (settings, 'g2RegLine', boolean, False)
addAttr (settings, 'g2RegLineColor', hex_to_color, Color (0, 0, 0))
addAttr (settings, 'g2RegLineWidth', float, 1.0)
def setColors (self, color1, color2):
raise NotImplementedError ()
def setProperties (self):
self.xaxis = True
self.yaxis = True
self.y2axis = True
"""def setY2Bounds (self):
return (self.canvas.minY2, self.canvas.maxY2)"""
def addPoint1 (self, x, y, name = None):
self.canvas.drawPoint (name, x, y)
def addPoint2 (self, x, y, name = None):
self.canvas.drawPoint2 (name, x, y)
class BarGraph (UnifiedGraph):
def __init__ (self, **attr):
"""if attr.has_key ('horizontal') and attr['horizontal']:
self.horizontal = True
UnifiedGraph.__init__ (self, HorizontalBarCanvas, **attr)
else:
self.horizontal = False
UnifiedGraph.__init__ (self, BarCanvas, **attr)"""
UnifiedGraph.__init__ (self, None, **attr)
if self.settings.horizontal == True:
self.attachCanvas (HorizontalBarCanvas)
else:
self.attachCanvas (BarCanvas)
#self.addScript ('hs/static/highlight.js')
def formatSettings (self, settings):
UnifiedGraph.formatSettings (self, settings)
addAttr (settings, 'barColor', hex_to_color, Color (210, 10, 10))
addAttr (settings, 'barWidth', float, 1.0)
addAttr (settings, 'barSpacing', float, .1)
addAttr (settings, 'blankSpace', float, .5)
addAttr (settings, 'horizontal', boolean, False)
def jsChangeTooltipPos (self):
if not self.settings.horizontal:
return UnifiedGraph.jsChangeTooltipPos (self)
else:
return """
if (target.getAttribute ('width'))
targetWidth = parseFloat (target.getAttribute ('width'));
else
targetWidth = 0;
v.x += targetWidth"""
def setProperties (self):
if self.settings.horizontal:
self.xaxis = True
self.yaxis = True
self.y2axis = False
else:
self.xaxis = True
self.yaxis = True
self.y2axis = False
def setColors (self, colors):
self.canvas.colors = colors
def addSpace (self):
self.canvas.addSpace ()
def addBar (self, name, data):
self.canvas.addBar (None, name, data)
#if self.horizontal:
# self.ylabels.append (name)
def addGroup (self, name, data):
for key, value in data:
self.canvas.addBar (name, key, value)
self.canvas.addSpace ()
def createXAxisSpace (self):
if self.settings.horizontal:
UnifiedGraph.createXAxisSpace (self)
else:
h = self.settings.xAxisTextHeight
width = []
for child in self.canvas.data:
if child.xml.has_key ('data') and not child.xml['data'] is None :
w = Text.textWidth (child.xml['data'], h)
width.append (w)
if len (width) > 0:
maxWidth = max (width)
else:
maxWidth = 0
delta = self.settings.xAxisSpace + maxWidth
self.canvas.move (0, delta)
self.canvas.changeSize (0, -delta)
def createYAxis (self):
if not self.settings.horizontal:
UnifiedGraph.createYAxis (self)
else:
for child in self.canvas.data:
self.ypositions.append (child.y + (child.height / 2.0))
self.ylabels.append (child.xml['data'])
UnifiedGraph.createYAxis (self)
def createXAxis (self):
ax = Group ()
x = self.canvas.x - self.canvas.height
y = self.canvas.y + self.canvas.height
ax.appendTransform (Rotate (-90, self.canvas.x, y))
if self.settings.horizontal:
UnifiedGraph.createXAxis (self)
else:
textProperties = {'textHeight': self.settings.xAxisTextHeight,
'verticalAnchor': 'middle',
'horizontalAnchor': 'right',
}
xaxis = YAxis (id = 'x-axis',
inf = self.canvas.y + self.canvas.height,
sup = self.canvas.y + self.canvas.height - self.canvas.width,
x = self.canvas.x - self.canvas.height - self.settings.xAxisSpace,
lower = self.xbounds[0],
upper = self.xbounds[1],
textProperties = textProperties)
ticks = []
labels = []
for child in self.canvas.data:
if child.xml.has_key ('name'):
ticks.append (child.x + child.width / 2.0)
labels.append (child.xml['data'])
xaxis.createTicks (ticks)
xaxis.setText (map (str, labels))
xaxis.drawTicks ()
ax.draw (xaxis)
self.dataGroup.drawAt (ax, 0)
def setSVG (self):
attr = UnifiedGraph.setSVG (self)
return attr
class LineChart (UnifiedGraph):
def __init__ (self, **attr):
UnifiedGraph.__init__ (self, LineCanvas, **attr)
def setProperties (self):
self.xaxis = True
self.yaxis = True
self.y2axis = False
def setColors (self, colorDict):
self.canvas.colors.update (colorDict)
def addSeries (self, name, series):
self.canvas.addData (name, *series)
def setSeriesNames (self, seriesNames):
self.xlabels = seriesNames
class PieChart (BaseGraph):
def __init__ (self, **attr):
BaseGraph.__init__ (self, PieCanvas)
self.addScript ('hs/static/pie.js')
def addWedge (self, name, value):
self.canvas.addData (name, float (value))
def finalize (self):
BaseGraph.finalize (self)
self.canvas.finalize ()
|
mit
|
lianqiw/maos
|
scripts/interface.py
|
1
|
12202
|
#!/Usr/bin/env python
#Use ctypes to interface with C libraries
#POINTER is the class type of pointer
#pointer() acts on actual array, while POINTER() works on class type.
#pointer(cell(arr)) #creates cell Structure for np.array type arr and makes a pointer
#pcell=POINTER(cell) ; pcell() #Creates a class for cell Structure and then makes an object pointer with no content.
#c_int is a ctypes type
#a=c_int(42) #creates a ctypes int object
#p=pointer(a) creates C compatible pointers to object a
#p.contents retreates the contents of pointer p
#addressof(p) retreates the address of p
#use pd=cast(p, POINTER(c_double)) to convert pointer p to c_double pointer
#use pd=cast(address, POINTER(c_double)) to convert address in c_double pointer
#cast(addressof(a), POINTER(c_double)).contents #reinterpreted int a as double
#pointer() creates a real pointer to a ctype object (Sturecture)
#byref() is a simplified version of pointer(). It cannot be used as byref(byref())
#can use byref(pointer())
#Set restype to return correct value
#Set argtypes for type checking for input into C code
#be careful regarding memory management.
#TODO: investigate whether CFFI is a better solution.
import os
import sys
from pdb import set_trace as keyboard
from ctypes import *
import json
import numpy as np
import scipy.sparse as sp
from warnings import warn
aolib_so=os.environ.get('MAOS_AOLIB', 'aolib.so')
try:
lib=cdll.LoadLibrary(aolib_so)
except:
raise Exception('aolib.so is not found at '+aolib_so)
id2ctype={
#obtain type information from MAOS id.
#The value is (type, is complex, kind(0:dense, 1: sparse, 2: loc, 10: cell))
25600: (c_double,1,1), #M_CSP64
25601: (c_double,0,1), #M_SP64
25602: (c_double,0,0), #'M_DBL'
25603: (c_long, 0,0), #'M_INT64'
25604: (c_double,1,0), #'M_CMP'
25605: (c_int, 0,0), #'M_INT32',),
25606: (c_float, 1,1), #'M_CSP32',),
25607: (c_float, 0,1), #'M_SP32',),
25608: (c_float, 0,0), #'M_FLT',),
25609: (c_float, 1,0), #'M_ZMP',),
25610: (c_char, 0,0), #'M_INT8',),
25611: (c_short, 0,0), # 'M_INT16',),
25633: (c_void_p,0,10),#MC_ANY
222210: (c_double,0,2),#M_LOC64
}
#convert C array pointer to numpy array. Freeing C memory
def pt2py(pointer):
if bool(pointer):
out=pointer.contents.as_array()
pointer.contents.free()
return out
else:
return None
#convert C vector to numpy array. Memory is copied.
def as_array(arr, id, shape):
''' convert C array arr to numpy based in id'''
(tt, iscomplex, issparse)=id2ctype.get(id)
if tt is None or not bool(arr) or shape[0]==0:
return np.empty((0,))
else:
parr=cast(arr, POINTER(tt))
if iscomplex:
nparr=np.ctypeslib.as_array(parr, shape=(*shape,2))
nparr2=nparr[...,0]+1j*nparr[...,1]
else:
nparr=np.ctypeslib.as_array(parr, shape=shape)
nparr2=np.copy(nparr)
return nparr2
#convert numpy array to any C array adaptively
def py2cell(arr):
if type(arr) is list:
arr=np.asarray(arr)
if sp.isspmatrix_csc(arr):
return csc(arr)
else:
return cell(arr)
#convert numpy array to any C array pointer adaptively
def py2cellref(arr):
if type(arr) is list:
arr = np.asarray(arr)
if type(arr) is np.ndarray:
if arr.size==0:
return None #turn empty ndarray to Null pointer. do not use 0
elif sp.isspmatrix_csc(arr):
return byref(csc(arr))
else:
return byref(cell(arr))
else:
return byref(arr)
class cell(Structure):
_fields_ = [ #fields compatible with C type
('id', c_uint32),
('p', c_void_p),
('nx', c_long),
('ny', c_long),
('header', c_char_p),
('mmap', c_void_p),
('nref', c_void_p),
('fft', c_void_p),
]
def __init__(self, arr=None):#convert from numpy to C. Memory is borrowed
dtype2id={#Conversion from numpy type to maos id
np.double:25602,
np.complex128:25604,
np.int64: 25603,
np.object_:25633,
}
if type(arr) is list:
arr=np.asarray(arr)
if arr is not None:
if arr.strides[-1]!=arr.itemsize:
raise(Exception('Non standard indexing is not supported. Please make a copy.'))
self.id=dtype2id.get(arr.dtype.type)
if self.id is None:
print("init: Unknown data" +str( arr.dtype.type))
return None
if arr.ndim>2:
print("init: Only use 2 dimensions\n");
if arr.ndim>0:
self.nx=arr.shape[-1]
if arr.ndim>1:
self.ny=arr.shape[-2]
else:
if self.nx>0:
self.ny=1
else:
self.ny=0
if self.nx==0:
self.p=0
elif arr.dtype.kind != 'O':
self.p=arr.ctypes.data_as(c_void_p)
else:
self.qarr=np.zeros(self.shape(1), dtype=object)
self.parr=np.zeros(self.shape(1), dtype=c_void_p) #store pointers
for iy in range(self.ny):
for ix in range(self.nx):
if arr.ndim==1:
arri=arr[ix]
else:
arri=arr[iy,ix]
if arri is not None:
self.qarr[iy,ix]=py2cell(arri) #keep reference
self.parr[iy,ix]=addressof(self.qarr[iy,ix]) #pointer
else:
self.parr[iy,ix]=0
self.p=self.parr.ctypes.data_as(c_void_p)
else:
self.id=25633
self.p=None
self.nx=0
self.ny=0
self.header=None
self.mmap=None
self.nref=None
self.fft=None
def shape(self, twod):
if self.ny > 1 or twod:
return (self.ny, self.nx)
else:
return (self.nx,) #last , is necessary
def as_array(self): #convert form C to numpy. Memory is copied
try:
(tt, iscomplex, kind)=id2ctype.get(self.id)
except:
kind=-1
if kind==0: #dense matrix
if self.header:
print(self.header)
return as_array(self.p, self.id, self.shape(0))
elif kind==1: #sparse matrix
return cast(addressof(self), POINTER(csc)).contents.as_array()
elif kind==2: #loc
return cast(addressof(self), POINTER(loc)).contents.as_array()
elif kind==10: #cell
res=np.empty(self.shape(1), dtype=object)
parr=cast(self.p, POINTER(c_void_p))
for iy in range(self.ny):
for ix in range(self.nx):
address=parr[ix+self.nx*iy]
if address is not None:
pp=cast(int(address), POINTER(cell))
res[iy, ix]=pp.contents.as_array() #recursive
else:
res[iy, ix]=np.empty(())
if self.ny==1:
res=res[0,]
return res
else:
print('as_array: Unknown data, id='+ str(self.id))
return np.empty((),dtype=object)
def free(self):
lib.cellfree_do(byref(self)) #will fail if memory is not allocated by C
class loc(Structure):
_fields_ = [
('id', c_uint32),
('locx', c_void_p),
('locy', c_void_p),
('nloc', c_long),
('dx', c_double),
('dy', c_double),
('ht', c_double),
('iac', c_double),
('locstat_t', c_void_p),
('map', c_void_p),
('npad', c_int),
('nref', c_void_p),
]
def __init__(self, arr=None): #convert from numpy to C. Memory is borrowed
self.id= 222210 #0x036402 #M_LOC64
if arr is not None:
if len(arr.shape)!=2 or arr.shape[0] !=2 :
raise(Exception('Array has to of shape 2xn'))
else:
self.nloc=arr.shape[1]
self.locx=arr[0,].ctypes.data_as(c_void_p)
self.locy=arr[1,].ctypes.data_as(c_void_p)
dlocx=arr[0,1:]-arr[0,0:-1]
self.dx=min(dlocx[dlocx>0]);
dlocy=arr[1,1:]-arr[1,0:-1]
self.dy=min(dlocy[dlocy>0]);
#print('loc: dx={0}, dy={1}'.format(self.dx, self.dy))
else:
self.nloc=0
self.locx=None
self.locy=None
self.dx=0
self.dy=0
self.ht=0
self.iac=0
self.locstat_t=0
self.map=0
self.npad=0
self.nref=0
def as_array(self): #convert form C to numpy. Memory is copied
if(self.locx):
if self.id!=222210:
raise(Exception('Wrong type'))
else:
arr=np.empty((2, self.nloc))
arr[0,]=as_array(self.locx, 25602, shape=(self.nloc,))
arr[1,]=as_array(self.locy, 25602, shape=(self.nloc,))
return arr
def free(self):
lib.cellfree_do(byref(self))
class csc(Structure):#CSC sparse matrix
_fields_=[
('id', c_uint32),
('x', c_void_p),
('nx', c_long),
('ny', c_long),
('header', c_char_p),
('nzmax', c_long),
('p', c_void_p),
('i', c_void_p),
('nref', c_void_p),
]
def __init__(self, arr=None): #convert from numpy to C. Memory is borrowed
dtype2id={#Conversion from sparse type to maos id
np.float32: 25607,
np.float64: 25601,
np.complex64: 25606,
np.complex128:25600,
}
if arr is not None and sp.isspmatrix_csc(arr):
self.id=dtype2id.get(arr.dtype.type)
#save subarrays
self.xp=arr.data
self.ip=arr.indices.astype(np.long)
self.pp=arr.indptr.astype(np.long) #p
self.x=self.xp.ctypes.data_as(c_void_p) #data
self.i=self.ip.ctypes.data_as(c_void_p) #row index
self.p=self.pp.ctypes.data_as(c_void_p)
self.nx, self.ny=arr.shape #Fortran order
self.nzmax=self.pp[-1]
else:
self.id=dtype2id.get(np.float64)
self.x=None
self.i=None
self.p=None
self.nx=0
self.ny=0
self.nzmax=0
self.header=None
self.nref=None
def as_array(self): #convert form C to numpy. Memory is copied
if self.nzmax>0:
self.xp=as_array(self.x, self.id, (self.nzmax,))
self.ip=as_array(self.i, 25603, (self.nzmax,))
self.pp=as_array(self.p, 25603, (self.ny+1,))
return sp.csc_matrix((self.xp, self.ip, self.pp), shape=(self.nx, self.ny))
else:
return sp.csc_matrix((self.nx,self.ny))
def free(self):
lib.cellfree_do(byref(self))
def convert_fields(fields):
val2type={
'*':c_void_p,
'double':c_double,
'long':c_long,
'int':c_int,
}
newfields=[]
for key,val in fields.items():
if val[-1]=='*':
val=c_void_p
else:
val=val2type[val]
newfields.append((key,val))
return newfields
#Create a ctypes class with field listed
def make_class(name, fields):
newfields=convert_fields(fields)
class newclass(Structure):
pass
def as_array(self):#convert struct into dictionary
out=dict()
for ff in self._fields_:
#convert C pointers to POINTER then to array
if ff[1] is c_void_p:
exec('out[\''+ff[0]+'\']=cast(self.'+ff[0]+',POINTER(cell)).contents.as_array()')
else:
exec('out[\''+ff[0]+'\']=self.'+ff[0])
return out
def free(self):
print('to implement: free');
newclass._fields_=newfields
return newclass
|
gpl-3.0
|
Chilledheart/gyp
|
test/compiler-override/gyptest-compiler-env.py
|
14
|
3332
|
#!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies that the user can override the compiler and linker using CC/CXX/LD
environment variables.
"""
import TestGyp
import os
import copy
import sys
here = os.path.dirname(os.path.abspath(__file__))
if sys.platform == 'win32':
# cross compiling not supported by ninja on windows
# and make not supported on windows at all.
sys.exit(0)
# Clear any existing compiler related env vars.
for key in ['CC', 'CXX', 'LINK', 'CC_host', 'CXX_host', 'LINK_host']:
if key in os.environ:
del os.environ[key]
def CheckCompiler(test, gypfile, check_for, run_gyp):
if run_gyp:
test.run_gyp(gypfile)
test.build(gypfile)
test.must_contain_all_lines(test.stdout(), check_for)
test = TestGyp.TestGyp(formats=['ninja', 'make'])
def TestTargetOveride():
expected = ['my_cc.py', 'my_cxx.py', 'FOO' ]
if test.format != 'ninja': # ninja just uses $CC / $CXX as linker.
expected.append('FOO_LINK')
# Check that CC, CXX and LD set target compiler
oldenv = os.environ.copy()
try:
os.environ['CC'] = 'python %s/my_cc.py FOO' % here
os.environ['CXX'] = 'python %s/my_cxx.py FOO' % here
os.environ['LINK'] = 'python %s/my_ld.py FOO_LINK' % here
CheckCompiler(test, 'compiler-exe.gyp', expected, True)
finally:
os.environ.clear()
os.environ.update(oldenv)
# Run the same tests once the eviron has been restored. The
# generated should have embedded all the settings in the
# project files so the results should be the same.
CheckCompiler(test, 'compiler-exe.gyp', expected, False)
def TestTargetOverideCompilerOnly():
# Same test again but with that CC, CXX and not LD
oldenv = os.environ.copy()
try:
os.environ['CC'] = 'python %s/my_cc.py FOO' % here
os.environ['CXX'] = 'python %s/my_cxx.py FOO' % here
CheckCompiler(test, 'compiler-exe.gyp',
['my_cc.py', 'my_cxx.py', 'FOO'],
True)
finally:
os.environ.clear()
os.environ.update(oldenv)
# Run the same tests once the eviron has been restored. The
# generated should have embedded all the settings in the
# project files so the results should be the same.
CheckCompiler(test, 'compiler-exe.gyp',
['my_cc.py', 'my_cxx.py', 'FOO'],
False)
def TestHostOveride():
expected = ['my_cc.py', 'my_cxx.py', 'HOST' ]
if test.format != 'ninja': # ninja just uses $CC / $CXX as linker.
expected.append('HOST_LINK')
# Check that CC_host sets host compilee
oldenv = os.environ.copy()
try:
os.environ['CC_host'] = 'python %s/my_cc.py HOST' % here
os.environ['CXX_host'] = 'python %s/my_cxx.py HOST' % here
os.environ['LINK_host'] = 'python %s/my_ld.py HOST_LINK' % here
CheckCompiler(test, 'compiler-host.gyp', expected, True)
finally:
os.environ.clear()
os.environ.update(oldenv)
# Run the same tests once the eviron has been restored. The
# generated should have embedded all the settings in the
# project files so the results should be the same.
CheckCompiler(test, 'compiler-host.gyp', expected, False)
TestTargetOveride()
TestTargetOverideCompilerOnly()
TestHostOveride()
test.pass_test()
|
bsd-3-clause
|
pfnet/chainercv
|
chainercv/links/model/faster_rcnn/region_proposal_network.py
|
3
|
6706
|
import numpy as np
import chainer
from chainer.backends import cuda
import chainer.functions as F
import chainer.links as L
from chainercv.links.model.faster_rcnn.utils.generate_anchor_base import \
generate_anchor_base
from chainercv.links.model.faster_rcnn.utils.proposal_creator import \
ProposalCreator
class RegionProposalNetwork(chainer.Chain):
"""Region Proposal Network introduced in Faster R-CNN.
This is Region Proposal Network introduced in Faster R-CNN [#]_.
This takes features extracted from images and propose
class agnostic bounding boxes around "objects".
.. [#] Shaoqing Ren, Kaiming He, Ross Girshick, Jian Sun. \
Faster R-CNN: Towards Real-Time Object Detection with \
Region Proposal Networks. NIPS 2015.
Args:
in_channels (int): The channel size of input.
mid_channels (int): The channel size of the intermediate tensor.
ratios (list of floats): This is ratios of width to height of
the anchors.
anchor_scales (list of numbers): This is areas of anchors.
Those areas will be the product of the square of an element in
:obj:`anchor_scales` and the original area of the reference
window.
feat_stride (int): Stride size after extracting features from an
image.
initialW (callable): Initial weight value. If :obj:`None` then this
function uses Gaussian distribution scaled by 0.1 to
initialize weight.
May also be a callable that takes an array and edits its values.
proposal_creator_params (dict): Key valued paramters for
:class:`~chainercv.links.model.faster_rcnn.ProposalCreator`.
.. seealso::
:class:`~chainercv.links.model.faster_rcnn.ProposalCreator`
"""
def __init__(
self, in_channels=512, mid_channels=512, ratios=[0.5, 1, 2],
anchor_scales=[8, 16, 32], feat_stride=16,
initialW=None,
proposal_creator_params={},
):
self.anchor_base = generate_anchor_base(
anchor_scales=anchor_scales, ratios=ratios)
self.feat_stride = feat_stride
self.proposal_layer = ProposalCreator(**proposal_creator_params)
n_anchor = self.anchor_base.shape[0]
super(RegionProposalNetwork, self).__init__()
with self.init_scope():
self.conv1 = L.Convolution2D(
in_channels, mid_channels, 3, 1, 1, initialW=initialW)
self.score = L.Convolution2D(
mid_channels, n_anchor * 2, 1, 1, 0, initialW=initialW)
self.loc = L.Convolution2D(
mid_channels, n_anchor * 4, 1, 1, 0, initialW=initialW)
def forward(self, x, img_size, scales=None):
"""Forward Region Proposal Network.
Here are notations.
* :math:`N` is batch size.
* :math:`C` channel size of the input.
* :math:`H` and :math:`W` are height and witdh of the input feature.
* :math:`A` is number of anchors assigned to each pixel.
Args:
x (~chainer.Variable): The Features extracted from images.
Its shape is :math:`(N, C, H, W)`.
img_size (tuple of ints): A tuple :obj:`height, width`,
which contains image size after scaling.
scales (tuple of floats): The amount of scaling done to each input
image during preprocessing.
Returns:
(~chainer.Variable, ~chainer.Variable, array, array, array):
This is a tuple of five following values.
* **rpn_locs**: Predicted bounding box offsets and scales for \
anchors. Its shape is :math:`(N, H W A, 4)`.
* **rpn_scores**: Predicted foreground scores for \
anchors. Its shape is :math:`(N, H W A, 2)`.
* **rois**: A bounding box array containing coordinates of \
proposal boxes. This is a concatenation of bounding box \
arrays from multiple images in the batch. \
Its shape is :math:`(R', 4)`. Given :math:`R_i` predicted \
bounding boxes from the :math:`i` th image, \
:math:`R' = \\sum _{i=1} ^ N R_i`.
* **roi_indices**: An array containing indices of images to \
which RoIs correspond to. Its shape is :math:`(R',)`.
* **anchor**: Coordinates of enumerated shifted anchors. \
Its shape is :math:`(H W A, 4)`.
"""
n, _, hh, ww = x.shape
if scales is None:
scales = [1.0] * n
if not isinstance(scales, chainer.utils.collections_abc.Iterable):
scales = [scales] * n
anchor = _enumerate_shifted_anchor(
self.xp.array(self.anchor_base), self.feat_stride, hh, ww)
n_anchor = anchor.shape[0] // (hh * ww)
h = F.relu(self.conv1(x))
rpn_locs = self.loc(h)
rpn_locs = rpn_locs.transpose((0, 2, 3, 1)).reshape((n, -1, 4))
rpn_scores = self.score(h)
rpn_scores = rpn_scores.transpose((0, 2, 3, 1))
rpn_fg_scores =\
rpn_scores.reshape((n, hh, ww, n_anchor, 2))[:, :, :, :, 1]
rpn_fg_scores = rpn_fg_scores.reshape((n, -1))
rpn_scores = rpn_scores.reshape((n, -1, 2))
rois = []
roi_indices = []
for i in range(n):
roi = self.proposal_layer(
rpn_locs[i].array, rpn_fg_scores[i].array, anchor, img_size,
scale=scales[i])
batch_index = i * self.xp.ones((len(roi),), dtype=np.int32)
rois.append(roi)
roi_indices.append(batch_index)
rois = self.xp.concatenate(rois, axis=0)
roi_indices = self.xp.concatenate(roi_indices, axis=0)
return rpn_locs, rpn_scores, rois, roi_indices, anchor
def _enumerate_shifted_anchor(anchor_base, feat_stride, height, width):
# Enumerate all shifted anchors:
#
# add A anchors (1, A, 4) to
# cell K shifts (K, 1, 4) to get
# shift anchors (K, A, 4)
# reshape to (K*A, 4) shifted anchors
xp = cuda.get_array_module(anchor_base)
shift_y = xp.arange(0, height * feat_stride, feat_stride)
shift_x = xp.arange(0, width * feat_stride, feat_stride)
shift_x, shift_y = xp.meshgrid(shift_x, shift_y)
shift = xp.stack((shift_y.ravel(), shift_x.ravel(),
shift_y.ravel(), shift_x.ravel()), axis=1)
A = anchor_base.shape[0]
K = shift.shape[0]
anchor = anchor_base.reshape((1, A, 4)) + \
shift.reshape((1, K, 4)).transpose((1, 0, 2))
anchor = anchor.reshape((K * A, 4)).astype(np.float32)
return anchor
|
mit
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.