repo_name
stringlengths 5
100
| path
stringlengths 4
299
| copies
stringclasses 990
values | size
stringlengths 4
7
| content
stringlengths 666
1.03M
| license
stringclasses 15
values | hash
int64 -9,223,351,895,964,839,000
9,223,297,778B
| line_mean
float64 3.17
100
| line_max
int64 7
1k
| alpha_frac
float64 0.25
0.98
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
anentropic/py-mysql2pgsql | tests/test_reader.py | 1 | 4143 | from __future__ import with_statement
import sys
import os
import unittest
from contextlib import closing
import MySQLdb
sys.path.append(os.path.abspath('../'))
from mysql2pgsql.lib.config import Config
from mysql2pgsql.lib.mysql_reader import MysqlReader
from mysql2pgsql.lib.errors import ConfigurationFileNotFound
class TestMysqlReader(unittest.TestCase):
def setUp(self):
try:
self.config_file = os.path.join(os.path.dirname(__file__), 'mysql2pgsql-test.yml')
config = Config(self.config_file, False)
except ConfigurationFileNotFound:
print("In order to run this test you must create the file %s" % config)
sys.exit(-1)
self.options = config.options['mysql']
self.args = {
'user': self.options.get('username', 'root'),
'db': self.options['database'],
'use_unicode': True,
'charset': 'utf8',
}
if self.options.get('password', None):
self.args['passwd'] = self.options.get('password', None),
if self.options.get('socket', None):
self.args['unix_socket'] = self.options['socket']
else:
self.args['host'] = self.options.get('hostname', 'localhost')
self.args['port'] = self.options.get('port', 3306)
self.args['compress'] = self.options.get('compress', True)
with open(os.path.join(os.path.dirname(__file__), 'schema.sql')) as sql:
self.sql = sql.read()
with closing(MySQLdb.connect(**self.args)) as conn:
with closing(conn.cursor()) as cur:
for cmd in self.sql.split('-- SPLIT'):
cur.execute(cmd)
conn.commit()
self.reader = MysqlReader(self.options)
self.type_to_pos = {
'text': (21, 22),
'float': (83, 84, 85, 86, 87, 88, 89, 90),
'numeric': (75, 76, 77, 78),
'datetime': (113, 114, 115, 116, 117, 118),
'char': (9, 10, 11, 12),
'boolean': (49, 50),
"enum('small','medium','large')": (1, 2, 3, 4),
'bit(8)': (37, 38, 39, 40),
'mediumblob': (27, 28),
'mediumtext': (19, 20),
'blob': (29, 30),
"set('a','b','c','d','e')": (5, 6, 7, 8),
'varchar': (13, 14, 15, 16),
'timestamp': (125, 126, 127, 128, 129, 130),
'binary(3)': (33, 34),
'varbinary(50)': (35, 36),
'date': (107, 108, 109, 110, 111, 112),
'integer': (0, 51, 52, 53, 54, 59, 60, 61, 62, 63, 64, 65, 66, 71, 72, 73, 74),
'double precision': (91, 92, 93, 94, 95, 96, 97, 98),
'tinytext': (17, 18),
'decimal': (99, 100, 101, 102, 103, 104, 105, 106, 136, 137, 138, 139, 140, 141, 142, 143),
'longtext': (23, 24),
'tinyint': (41, 42, 43, 44, 45, 46, 47, 48, 55, 56, 57, 58, 131, 132, 133, 134, 135),
'bigint': (67, 68, 69, 70, 79, 80, 81, 82),
'time': (119, 120, 121, 122, 123, 124),
'tinyblob': (25, 26),
'longblob': (31, 32)
}
def tearDown(self):
self.reader.close()
'''
with closing(MySQLdb.connect(**self.args)) as conn:
with closing(conn.cursor()) as cur:
for cmd in self.sql.split('-- SPLIT')[:2]:
cur.execute(cmd)
conn.commit()
'''
def test_tables(self):
table_list = list(self.reader.tables)
assert table_list
assert len(table_list) == 2
def test_columns(self):
for table in self.reader.tables:
columns = table.columns
if table.name == 'type_conversion_test_1':
for k, v in self.type_to_pos.iteritems():
assert all(columns[i]['type'] == k for i in v)
def test_indexes(self):
for table in self.reader.tables:
assert table.indexes
def test_constraints(self):
assert list(self.reader.tables)[1].foreign_keys
| mit | 8,853,520,178,353,498,000 | 36.324324 | 103 | 0.508569 | false |
miguelgaio/parameter-framework | test/functional-tests-legacy/PfwTestCase/Types/tFP32_Q31_0.py | 8 | 11258 | # -*-coding:utf-8 -*
# Copyright (c) 2011-2015, Intel Corporation
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation and/or
# other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
# ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
Fixed-Point parameter type testcases - FP32_Q31.0
List of tested functions :
--------------------------
- [setParameter] function
- [getParameter] function
Initial Settings :
------------------
FP32_Q31.0 :
- size = 32 bits
- 31 integer bits, 0 fractionnal bits
- range : [-2147483648, 2147483647]
Test cases :
------------
- FP32_Q31.0 parameter min value = -2147483648
- FP32_Q31.0 parameter min value out of bounds = -4147483649
- FP32_Q31.0 parameter max value = 2147483647
- FP32_Q31.0 parameter max value out of bounds = 12147483648
- FP32_Q31.0 parameter in nominal case = 2222
"""
import os
from Util.PfwUnitTestLib import PfwTestCase
from Util import ACTLogging
log=ACTLogging.Logger()
# Test of type FP32_Q31.0 - range [-2147483648,2147483647]
class TestCases(PfwTestCase):
def setUp(self):
self.param_name = "/Test/Test/TEST_DIR/FP32_Q31.0"
self.pfw.sendCmd("setTuningMode", "on")
def tearDown(self):
self.pfw.sendCmd("setTuningMode", "off")
def test_Nominal_Case(self):
"""
Testing FP32_Q31.0 in nominal case = 2222
-----------------------------------------
Test case description :
~~~~~~~~~~~~~~~~~~~~~~~
- set FP32_Q31.0 parameter in nominal case = 2222
Tested commands :
~~~~~~~~~~~~~~~~~
- [setParameter] function
Used commands :
~~~~~~~~~~~~~~~
- [getParameter] function
Expected result :
~~~~~~~~~~~~~~~~~
- FP32_Q31.0 parameter set to 2222
- Blackboard and filesystem values checked
"""
log.D(self.test_Nominal_Case.__doc__)
log.I("FP32_Q31.0 parameter in nominal case = 2222")
value = "2222"
hex_value = "0x8ae"
#Set parameter value
out, err = self.pfw.sendCmd("setParameter", self.param_name, value)
assert err == None, log.E("when setting parameter %s : %s"
% (self.param_name, err))
assert out == "Done", log.F("when setting parameter %s : %s"
% (self.param_name, out))
#Check parameter value on blackboard
out, err = self.pfw.sendCmd("getParameter", self.param_name, "")
assert err == None, log.E("when setting parameter %s : %s"
% (self.param_name, err))
assert float(out) == float(value), log.F("BLACKBOARD : Incorrect value for %s, expected: %s, found: %s"
% (self.param_name, value, out))
#Check parameter value on filesystem
assert open(os.environ["PFW_RESULT"] + "/FP32_Q31.0").read()[:-1] == hex_value, log.F("FILESYSTEM : parameter update error")
log.I("test OK")
def test_TypeMin(self):
"""
Testing FP32_Q31.0 minimal value = -2147483648
----------------------------------------------
Test case description :
~~~~~~~~~~~~~~~~~~~~~~~
- set FP32_Q31.0 parameter min value = -2147483648
Tested commands :
~~~~~~~~~~~~~~~~~
- [setParameter] function
Used commands :
~~~~~~~~~~~~~~~
- [getParameter] function
Expected result :
~~~~~~~~~~~~~~~~~
- FP32_Q31.0 parameter set to -2147483648
- Blackboard and filesystem values checked
"""
log.D(self.test_TypeMin.__doc__)
log.I("FP32_Q31.0 parameter min value = -2147483648")
value = "-2147483648"
hex_value = "0x80000000"
#Set parameter value
out, err = self.pfw.sendCmd("setParameter", self.param_name, value)
assert err == None, log.E("when setting parameter %s : %s"
% (self.param_name, err))
assert out == "Done", log.F("when setting parameter %s : %s"
% (self.param_name, out))
#Check parameter value on blackboard
out, err = self.pfw.sendCmd("getParameter", self.param_name, "")
assert err == None, log.E("when setting parameter %s : %s"
% (self.param_name, err))
assert float(out) == float(value), log.F("BLACKBOARD : Incorrect value for %s, expected: %s, found: %s"
% (self.param_name, value, out))
#Check parameter value on filesystem
assert open(os.environ["PFW_RESULT"] + "/FP32_Q31.0").read()[:-1] == hex_value, log.F("FILESYSTEM : parameter update error")
log.I("test OK")
def test_TypeMin_Overflow(self):
"""
Testing FP32_Q31.0 parameter value out of negative range
--------------------------------------------------------
Test case description :
~~~~~~~~~~~~~~~~~~~~~~~
- set FP32_Q31.0 to -4147483649
Tested commands :
~~~~~~~~~~~~~~~~~
- [setParameter] function
Used commands :
~~~~~~~~~~~~~~~
- [getParameter] function
Expected result :
~~~~~~~~~~~~~~~~~
- error detected
- FP32_Q31.0 parameter not updated
- Blackboard and filesystem values checked
"""
log.D(self.test_TypeMin_Overflow.__doc__)
log.I("FP32_Q31.0 parameter min value out of bounds = -4147483649")
value = "-4147483649"
param_check = open(os.environ["PFW_RESULT"] + "/FP32_Q31.0").read()[:-1]
#Set parameter value
out, err = self.pfw.sendCmd("setParameter", self.param_name, value, expectSuccess=False)
assert err == None, log.E("when setting parameter %s : %s"
% (self.param_name, err))
assert out != "Done", log.F("PFW : Error not detected when setting parameter %s out of bounds"
% (self.param_name))
#Check parameter value on filesystem
assert open(os.environ["PFW_RESULT"] + "/FP32_Q31.0").read()[:-1] == param_check, log.F("FILESYSTEM : Forbiden parameter change")
log.I("test OK")
def test_TypeMax(self):
"""
Testing FP32_Q31.0 parameter maximum value
------------------------------------------
Test case description :
~~~~~~~~~~~~~~~~~~~~~~~
- set FP32_Q31.0 to 2147483647
Tested commands :
~~~~~~~~~~~~~~~~~
- [setParameter] function
Used commands :
~~~~~~~~~~~~~~~
- [getParameter] function
Expected result :
~~~~~~~~~~~~~~~~~
- FP32_Q31.0 parameter set to 2147483647
- Blackboard and filesystem values checked
"""
log.D(self.test_TypeMax.__doc__)
log.I("FP32_Q31.0 parameter max value = 2147483647")
value = "2147483647"
hex_value = "0x7fffffff"
#Set parameter value
out, err = self.pfw.sendCmd("setParameter", self.param_name, value)
assert err == None, log.E("when setting parameter %s : %s"
% (self.param_name, err))
assert out == "Done", log.F("when setting parameter %s : %s"
% (self.param_name, out))
#Check parameter value on blackboard
out, err = self.pfw.sendCmd("getParameter", self.param_name, "")
assert err == None, log.E("when setting parameter %s : %s"
% (self.param_name, err))
assert float(out) == float(value), log.F("BLACKBOARD : Incorrect value for %s, expected: %s, found: %s"
% (self.param_name, value, out))
#Check parameter value on filesystem
assert open(os.environ["PFW_RESULT"] + "/FP32_Q31.0").read()[:-1] == hex_value, log.F("FILESYSTEM : parameter update error")
log.I("test OK")
def test_TypeMax_Overflow(self):
"""
Testing FP32_Q31.0 parameter value out of positive range
--------------------------------------------------------
Test case description :
~~~~~~~~~~~~~~~~~~~~~~~
- set FP32_Q31.0 to 12147483648
Tested commands :
~~~~~~~~~~~~~~~~~
- [setParameter] function
Used commands :
~~~~~~~~~~~~~~~
- [getParameter] function
Expected result :
~~~~~~~~~~~~~~~~~
- error detected
- FP32_Q31.0 parameter not updated
- Blackboard and filesystem values checked
"""
log.D(self.test_TypeMax_Overflow.__doc__)
log.I("FP32_Q31.0 parameter max value out of bounds = 12147483648")
value = "12147483648"
param_check = open(os.environ["PFW_RESULT"] + "/FP32_Q31.0").read()[:-1]
#Set parameter value
out, err = self.pfw.sendCmd("setParameter", self.param_name, value, expectSuccess=False)
assert err == None, log.E("when setting parameter %s : %s"
% (self.param_name, err))
assert out != "Done", log.F("PFW : Error not detected when setting parameter %s out of bounds"
% (self.param_name))
#Check parameter value on filesystem
assert open(os.environ["PFW_RESULT"] + "/FP32_Q31.0").read()[:-1] == param_check, log.F("FILESYSTEM : Forbiden parameter change")
log.I("test OK")
| bsd-3-clause | 3,009,918,813,096,130,000 | 44.764228 | 137 | 0.535797 | false |
nanditav/15712-TensorFlow | tensorflow/python/kernel_tests/pooling_ops_3d_test.py | 10 | 14157 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Functional tests for 3d pooling operations."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
class PoolingTest(tf.test.TestCase):
def _VerifyValues(self, pool_func, input_sizes, window, strides, padding,
expected):
"""Verifies the output values of the pooling function.
Args:
pool_func: Function to be called: co.MaxPool, co.AvgPool.
input_sizes: Input tensor dimensions.
window: Tuple of kernel dims: planes, rows, cols.
strides: Tuple of strides for dims: planes, rows, cols.
padding: Padding type.
expected: An array containing the expected operation outputs.
"""
total_size = 1
for s in input_sizes:
total_size *= s
# Initializes the input tensor with array containing incrementing
# numbers from 1.
x = [f * 1.0 for f in range(1, total_size + 1)]
with self.test_session(use_gpu=True) as sess:
t = tf.constant(x, shape=input_sizes)
t = pool_func(t,
ksize=[1, window[0], window[1], window[2], 1],
strides=[1, strides[0], strides[1], strides[2], 1],
padding=padding)
vals = sess.run(t)
# Verifies values.
actual = vals.flatten()
self.assertAllClose(expected, actual)
def testAvgPool3dValidPadding(self):
expected_output = [20.5, 21.5, 22.5]
self._VerifyValues(tf.nn.avg_pool3d,
input_sizes=[1, 3, 3, 3, 3],
window=(2, 2, 2),
strides=(2, 2, 2),
padding="VALID",
expected=expected_output)
def testAvgPool3dSamePadding(self):
expected_output = [20.5, 21.5, 22.5, 26.5, 27.5, 28.5]
self._VerifyValues(tf.nn.avg_pool3d,
input_sizes=[1, 2, 2, 4, 3],
window=(2, 2, 2),
strides=(2, 2, 2),
padding="SAME",
expected=expected_output)
def testAvgPool3dSamePaddingDifferentStrides(self):
expected_output = [1.5, 4.5, 7.5, 17.5, 20.5, 23.5, 33.5, 36.5, 39.5]
self._VerifyValues(tf.nn.avg_pool3d,
input_sizes=[1, 5, 8, 1, 1],
window=(1, 2, 3),
strides=(2, 3, 1),
padding="SAME",
expected=expected_output)
def testMaxPool3dValidPadding(self):
expected_output = [40.0, 41.0, 42.0]
self._VerifyValues(tf.nn.max_pool3d,
input_sizes=[1, 3, 3, 3, 3],
window=(2, 2, 2),
strides=(2, 2, 2),
padding="VALID",
expected=expected_output)
def testMaxPool3dSamePadding(self):
expected_output = [31., 32., 33., 34., 35., 36.]
self._VerifyValues(tf.nn.max_pool3d,
input_sizes=[1, 2, 2, 3, 3],
window=(2, 2, 2),
strides=(2, 2, 2),
padding="SAME",
expected=expected_output)
def testMaxPool3dSamePaddingDifferentStrides(self):
expected_output = [2., 5., 8., 18., 21., 24., 34., 37., 40.]
self._VerifyValues(tf.nn.max_pool3d,
input_sizes=[1, 5, 8, 1, 1],
window=(1, 2, 3),
strides=(2, 3, 1),
padding="SAME",
expected=expected_output)
# Test pooling on a larger input, with different stride and kernel
# size for the 'z' dimension.
# Simulate max pooling in numpy to get the expected output.
input_data = np.arange(1, 5 * 27 * 27 * 64 + 1).reshape((5, 27, 27, 64))
input_data = np.pad(input_data, [[0, 0], [0, 1], [0, 1], [0, 0]],
mode="constant")
expected_output = input_data[:, 1::2, 1::2, :]
expected_output[:, -1, :, :] = input_data[:, -2, 1::2, :]
expected_output[:, :, -1, :] = input_data[:, 1::2, -2, :]
expected_output[:, -1, -1, :] = input_data[:, -2, -2, :]
self._VerifyValues(tf.nn.max_pool3d,
input_sizes=[1, 5, 27, 27, 64],
window=(1, 2, 2),
strides=(1, 2, 2),
padding="SAME",
expected=expected_output.flatten())
def testKernelSmallerThanStride(self):
self._VerifyValues(tf.nn.max_pool3d, input_sizes=[1, 3, 3, 3, 1],
window=[1, 1, 1], strides=[2, 2, 2],
padding="SAME",
expected=[1, 3, 7, 9, 19, 21, 25, 27])
self._VerifyValues(tf.nn.max_pool3d, input_sizes=[1, 7, 7, 7, 1],
window=[2, 2, 2], strides=[3, 3, 3],
padding="VALID",
expected=[58, 61, 79, 82, 205, 208, 226, 229])
self._VerifyValues(tf.nn.avg_pool3d, input_sizes=[1, 3, 3, 3, 1],
window=[1, 1, 1], strides=[2, 2, 2],
padding="SAME",
expected=[1, 3, 7, 9, 19, 21, 25, 27])
self._VerifyValues(tf.nn.avg_pool3d, input_sizes=[1, 7, 7, 7, 1],
window=[2, 2, 2], strides=[3, 3, 3],
padding="VALID",
expected=[29.5, 32.5, 50.5, 53.5,
176.5, 179.5, 197.5, 200.5])
def _ConstructAndTestGradient(self,
pool_func,
input_sizes,
output_sizes,
window,
strides,
padding,
x_init_value=None):
"""Verifies the gradients of the avg pooling function.
Args:
pool_func: Function to be called, co.MaxPool, co.AvgPool,
or the Lua version.
input_sizes: Input tensor dimensions.
output_sizes: Output tensor dimensions.
window: Tuple of kernel dims: planes, rows, cols.
strides: Tuple of strides for dims: planes, rows, cols.
padding: Padding type.
x_init_value: Values to be passed to the gradient checker.
"""
total_size = 1
for s in input_sizes:
total_size *= s
# Initializes the input tensor with array containing incrementing
# numbers from 1.
x = [f * 1.0 for f in range(1, total_size + 1)]
with self.test_session(use_gpu=True):
input_tensor = tf.constant(x, shape=input_sizes, name="input")
err_margin = 1e-3
if pool_func == tf.nn.avg_pool3d:
func_name = "avg_pool3d"
else:
if x_init_value is None:
x_init_value = np.asfarray(
np.arange(1, total_size + 1),
dtype=np.float32).reshape(input_sizes)
func_name = "max_pool3d"
t = pool_func(input_tensor,
ksize=[1, window[0], window[1], window[2], 1],
strides=[1, strides[0], strides[1], strides[2], 1],
padding=padding,
name=func_name)
err = tf.test.compute_gradient_error(input_tensor,
input_sizes,
t,
output_sizes,
x_init_value=x_init_value,
delta=1e-2)
print("%s gradient error = " % func_name, err)
self.assertLess(err, err_margin)
def testMaxPoolGradValidPadding1_1_3d(self):
self._ConstructAndTestGradient(tf.nn.max_pool3d,
input_sizes=[1, 3, 3, 3, 1],
output_sizes=[1, 3, 3, 3, 1],
window=(1, 1, 1),
strides=(1, 1, 1),
padding="VALID")
def testMaxPoolGradValidPadding2_1_6_3d(self):
self._ConstructAndTestGradient(tf.nn.max_pool3d,
input_sizes=[2, 3, 3, 6, 3],
output_sizes=[2, 2, 2, 5, 3],
window=(2, 2, 2),
strides=(1, 1, 1),
padding="VALID")
def testMaxPoolGradValidPadding2_1_7_3d(self):
self._ConstructAndTestGradient(tf.nn.max_pool3d,
input_sizes=[2, 3, 5, 7, 3],
output_sizes=[2, 2, 4, 6, 3],
window=(2, 2, 2),
strides=(1, 1, 1),
padding="VALID")
def testMaxPoolGradValidPadding2_2_3d(self):
self._ConstructAndTestGradient(tf.nn.max_pool3d,
input_sizes=[2, 2, 2, 2, 3],
output_sizes=[2, 1, 1, 1, 3],
window=(2, 2, 2),
strides=(2, 2, 2),
padding="VALID")
def testMaxPoolGradSamePadding1_1_3d(self):
self._ConstructAndTestGradient(tf.nn.max_pool3d,
input_sizes=[2, 3, 2, 4, 1],
output_sizes=[2, 3, 2, 4, 1],
window=(1, 1, 1),
strides=(1, 1, 1),
padding="SAME")
def testMaxPoolGradSamePadding2_1_3d(self):
self._ConstructAndTestGradient(tf.nn.max_pool3d,
input_sizes=[2, 3, 2, 4, 1],
output_sizes=[2, 3, 2, 4, 1],
window=(2, 2, 2),
strides=(1, 1, 1),
padding="SAME")
def testMaxPoolGradSamePadding2_2_3d(self):
self._ConstructAndTestGradient(tf.nn.max_pool3d,
input_sizes=[2, 5, 2, 4, 3],
output_sizes=[2, 3, 1, 2, 3],
window=(2, 2, 2),
strides=(2, 2, 2),
padding="SAME")
def testMaxPoolGradSamePadding3_1_3d(self):
self._ConstructAndTestGradient(tf.nn.max_pool3d,
input_sizes=[1, 3, 3, 7, 1],
output_sizes=[1, 3, 3, 7, 1],
window=(3, 3, 3),
strides=(1, 1, 1),
padding="SAME")
def testAvgPoolGradValidPadding1_1_3d(self):
self._ConstructAndTestGradient(tf.nn.avg_pool3d,
input_sizes=[2, 3, 3, 3, 3],
output_sizes=[2, 3, 3, 3, 3],
window=(1, 1, 1),
strides=(1, 1, 1),
padding="VALID")
def testAvgPoolGradValidPadding2_1_3d(self):
self._ConstructAndTestGradient(tf.nn.avg_pool3d,
input_sizes=[2, 3, 3, 3, 3],
output_sizes=[2, 2, 2, 2, 3],
window=(2, 2, 2),
strides=(1, 1, 1),
padding="VALID")
def testAvgPoolGradValidPadding2_2_3d(self):
self._ConstructAndTestGradient(tf.nn.avg_pool3d,
input_sizes=[2, 2, 2, 2, 3],
output_sizes=[2, 1, 1, 1, 3],
window=(2, 2, 2),
strides=(2, 2, 2),
padding="VALID")
def testAvgPoolGradSamePadding1_1_3d(self):
self._ConstructAndTestGradient(tf.nn.avg_pool3d,
input_sizes=[2, 3, 2, 4, 3],
output_sizes=[2, 3, 2, 4, 3],
window=(1, 1, 1),
strides=(1, 1, 1),
padding="SAME")
def testAvgPoolGradSamePadding2_1_3d(self):
self._ConstructAndTestGradient(tf.nn.avg_pool3d,
input_sizes=[1, 2, 2, 2, 1],
output_sizes=[1, 2, 2, 2, 1],
window=(2, 2, 2),
strides=(1, 1, 1),
padding="SAME")
def testAvgPoolGradSamePadding2_2_3d(self):
self._ConstructAndTestGradient(tf.nn.avg_pool3d,
input_sizes=[2, 5, 2, 4, 3],
output_sizes=[2, 3, 1, 2, 3],
window=(2, 2, 2),
strides=(2, 2, 2),
padding="SAME")
def testAvgPoolGradSamePadding3_1_3d(self):
self._ConstructAndTestGradient(tf.nn.avg_pool3d,
input_sizes=[1, 3, 6, 7, 1],
output_sizes=[1, 3, 6, 7, 1],
window=(3, 3, 3),
strides=(1, 1, 1),
padding="SAME")
if __name__ == "__main__":
tf.test.main()
| apache-2.0 | -5,481,266,219,832,327,000 | 42.56 | 80 | 0.445575 | false |
awslabs/sockeye | test/unit/test_lr_scheduler.py | 1 | 4115 | # Copyright 2017--2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You may not
# use this file except in compliance with the License. A copy of the License
# is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "license" file accompanying this file. This file is distributed on
# an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
import pytest
import numpy as np
from sockeye import lr_scheduler
@pytest.mark.parametrize('learning_rate_warmup,learning_rate_t_scale',
[(1, 1), (3, 2), (10, .5), (20, 1)])
def test_inv_sqrt_decay_scheduler(learning_rate_warmup, learning_rate_t_scale):
scheduler = lr_scheduler.get_lr_scheduler('inv-sqrt-decay',
learning_rate_t_scale=learning_rate_t_scale,
learning_rate_reduce_factor=0,
learning_rate_reduce_num_not_improved=0,
learning_rate_warmup=learning_rate_warmup,
max_updates=10)
scheduler.base_lr = 1
# Reference formula from Transformer paper, plus time scaling
alternate_implementation = lambda t: min((t * learning_rate_t_scale)**-0.5,
(t * learning_rate_t_scale) * learning_rate_warmup**-1.5)
expected_schedule = [alternate_implementation(t) for t in range(1, 11)]
actual_schedule = [scheduler(t) for t in range(1, 11)]
assert np.isclose(expected_schedule, actual_schedule).all()
def test_linear_decay_scheduler():
scheduler = lr_scheduler.get_lr_scheduler('linear-decay',
learning_rate_t_scale=1,
learning_rate_reduce_factor=0,
learning_rate_reduce_num_not_improved=0,
learning_rate_warmup=3,
max_updates=10)
scheduler.base_lr = 1
# Warmup term * decay term
expected_schedule = [
(1/3) * (9/10),
(2/3) * (8/10),
(3/3) * (7/10),
(3/3) * (6/10),
(3/3) * (5/10),
(3/3) * (4/10),
(3/3) * (3/10),
(3/3) * (2/10),
(3/3) * (1/10),
(3/3) * (0/10),
]
actual_schedule = [scheduler(t) for t in range(1, 11)]
assert np.isclose(expected_schedule, actual_schedule).all()
@pytest.mark.parametrize('scheduler_type, expected_instance',
[('none', None),
('inv-sqrt-decay', lr_scheduler.LearningRateSchedulerInvSqrtDecay),
('linear-decay', lr_scheduler.LearningRateSchedulerLinearDecay),
('plateau-reduce', lr_scheduler.LearningRateSchedulerPlateauReduce)])
def test_get_lr_scheduler(scheduler_type, expected_instance):
scheduler = lr_scheduler.get_lr_scheduler(scheduler_type,
learning_rate_t_scale=1,
learning_rate_reduce_factor=0.5,
learning_rate_reduce_num_not_improved=16,
learning_rate_warmup=1000,
max_updates=10000)
if expected_instance is None:
assert scheduler is None
else:
assert isinstance(scheduler, expected_instance)
def test_get_lr_scheduler_no_reduce():
scheduler = lr_scheduler.get_lr_scheduler('plateau-reduce',
learning_rate_t_scale=1,
learning_rate_reduce_factor=1.0,
learning_rate_reduce_num_not_improved=16)
assert scheduler is None
| apache-2.0 | 1,460,599,644,210,426,000 | 43.247312 | 102 | 0.528554 | false |
maxwell-demon/fbthrift | thrift/tutorial/php/runserver.py | 16 | 1145 | #!/usr/bin/env python
#
# encoding: ascii-8bit
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import os
import BaseHTTPServer
import CGIHTTPServer
# chdir(2) into the tutorial directory.
os.chdir(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))
class Handler(CGIHTTPServer.CGIHTTPRequestHandler):
cgi_directories = ['/php']
BaseHTTPServer.HTTPServer(('', 8080), Handler).serve_forever()
| apache-2.0 | -6,504,052,604,616,540,000 | 33.69697 | 70 | 0.765939 | false |
plcode7/rad2py | ide2py/psp.py | 7 | 52257 | #!/usr/bin/env python
# coding:utf-8
"Personal Software Process (TM) Integrated & Automatic Metrics Collection"
__author__ = "Mariano Reingart ([email protected])"
__copyright__ = "Copyright (C) 2011 Mariano Reingart"
__license__ = "GPL 3.0"
# PSP Time Toolbar & Defect Log inspired by PSP Dashboard (java/open source)
# Most GUI classes are based on wxPython demos
import datetime
import os, os.path
import sys
import hashlib, uuid
import cPickle as pickle
import wx
import wx.grid
from wx.lib.mixins.listctrl import CheckListCtrlMixin, ListCtrlAutoWidthMixin
import wx.lib.agw.aui as aui
from wx.lib.agw.pygauge import PyGauge
import images
import simplejsonrpc
from database import DictShelf, ListShelf
try:
from camera import Camera # camera sensor needs OpenCV
except ImportError:
Camera = None
PSP_PHASES = ["planning", "design", "code", "review", "compile", "test", "postmortem"]
PSP_TIMES = ["plan", "actual", "interruption", "off_task", "comments"]
PSP_DEFECT_TYPES = {10: 'Documentation', 20: 'Synax', 30: 'Build',
40: 'Assignment', 50: 'Interface', 60: 'Checking', 70: 'Data',
80: 'Function', 90: 'System', 100: 'Enviroment'}
PSP_EVENT_LOG_FORMAT = "%(timestamp)s %(uuid)s %(phase)s %(event)s %(comment)s"
ID_START, ID_PAUSE, ID_STOP, ID_CHECK, ID_METADATA, ID_DIFF, ID_PHASE, \
ID_DEFECT, ID_DEL, ID_DEL_ALL, ID_EDIT, ID_FIXED, ID_WONTFIX, ID_FIX, \
ID_UP, ID_DOWN, ID_WIKI, ID_COMPILE, ID_TEST \
= [wx.NewId() for i in range(19)]
WX_VERSION = tuple([int(v) for v in wx.version().split()[0].split(".")])
def pretty_time(counter):
"return formatted string of a time count in seconds (days/hours/min/seg)"
# find time unit and convert to it
if counter is None:
return ""
counter = int(counter)
for factor, unit in ((1., 's'), (60., 'm'), (3600., 'h')):
if counter < (60 * factor):
break
# only print fraction if it is not an integer result
if counter % factor:
return "%0.2f %s" % (counter/factor, unit)
else:
return "%d %s" % (counter/factor, unit)
def parse_time(user_input):
"analyze user input, return a time count number in seconds"
# sanity checks on user input:
user_input = str(user_input).strip().lower()
if not user_input:
return 0
elif ' ' in user_input:
user_time, user_unit = user_input.split()
elif not user_input[-1].isdigit():
user_time, user_unit = user_input[:-1], user_input[-1]
else:
user_time, user_unit = user_input, ""
# find time unit and convert from it to seconds
user_time = user_time.replace(",", ".")
for factor, unit in ((1, 's'), (60, 'm'), (3600, 'h')):
if unit == user_unit:
break
return float(user_time) * factor
class PlanSummaryTable(wx.grid.PyGridTableBase):
"PSP Planning tracking summary (actual vs estimated)"
def __init__(self, grid):
wx.grid.PyGridTableBase.__init__(self)
self.rows = PSP_PHASES
self.cols = PSP_TIMES
self.Clear()
self.grid = grid
self.UpdateValues()
def __del__(self):
if self.data is not None:
self.data.close()
def GetNumberRows(self):
return len(self.rows)
def GetNumberCols(self):
return len(self.cols)
def IsEmptyCell(self, row, col):
key_phase = PSP_PHASES[row]
key_time = PSP_TIMES[col]
if not self.data:
return None
return self.data.get(key_phase, {}).get(key_time, {}) and True or False
def GetValue(self, row, col):
key_phase = PSP_PHASES[row]
key_time = PSP_TIMES[col]
if self.data is not None:
val = self.data.get(key_phase, {}).get(key_time, 0)
if key_time != "comments":
return pretty_time(val)
elif val:
return '; '.join(['%s %s' % (msg, pretty_time(delta))
for msg, delta in val])
return ''
def SetValue(self, row, col, value):
if self.data is not None:
value = parse_time(value)
key_phase = PSP_PHASES[row]
key_time = PSP_TIMES[col]
self.data.setdefault(key_phase, {})[key_time] = value
self.data.sync()
def GetColLabelValue(self, col):
return self.cols[col].capitalize()
def GetRowLabelValue(self, row):
return self.rows[row].capitalize()
def count(self, phase, interruption, active=True):
"Increment actual user time according selected phase"
if self.data is not None:
key_phase = phase
key_time = "plan"
plan = self.data.get(key_phase, {}).get(key_time, 0)
if not active:
key_time = "off_task"
elif interruption:
key_time = "interruption"
else:
key_time = "actual"
value = (self.data.get(phase, {}).get(key_time) or 0) + 1
self.data.setdefault(key_phase, {})[key_time] = value
self.data.sync()
row = PSP_PHASES.index(phase)
col = PSP_TIMES.index(key_time)
self.UpdateValues(row, col)
self.grid.SelectRow(-1)
self.grid.SelectRow(row)
return self.data.get(phase, {})
def comment(self, phase, message, delta):
"Record the comment of an interruption in selected phase"
return
key_phase = phase
comments = self.data.get(key_phase, {}).get('comments', [])
comments.append((message, delta))
self.data[key_phase]['comments'] = comments
self.data.sync()
row = PSP_PHASES.index(phase)
self.UpdateValues(row)
self.grid.SelectRow(row)
def UpdateValues(self, row=-1, col=-1):
if not self.grid.IsCellEditControlEnabled():
self.grid.BeginBatch()
msg = wx.grid.GridTableMessage(self,
wx.grid.GRIDTABLE_REQUEST_VIEW_GET_VALUES,
row, col)
self.grid.ProcessTableMessage(msg)
#self.grid.ForceRefresh()
self.grid.EndBatch()
def Clear(self):
self.data = None
def Load(self, data):
self.data = data
self.UpdateValues()
class DefectListCtrl(wx.ListCtrl, CheckListCtrlMixin, ListCtrlAutoWidthMixin):
"Defect recording log facilities"
def __init__(self, parent):
wx.ListCtrl.__init__(self, parent, -1,
style=wx.LC_REPORT | wx.LC_SINGLE_SEL | wx.LC_ALIGN_LEFT)
ListCtrlAutoWidthMixin.__init__(self)
CheckListCtrlMixin.__init__(self)
#TextEditMixin.__init__(self)
self.Bind(wx.EVT_LIST_ITEM_ACTIVATED, self.OnItemActivated)
self.parent = parent
self.col_defs = {
"number": (0, wx.LIST_FORMAT_RIGHT, 50),
"summary": (1, wx.LIST_FORMAT_LEFT, wx.LIST_AUTOSIZE),
"description": (1, wx.LIST_FORMAT_LEFT, 0),
"date": (2, wx.LIST_FORMAT_CENTER, 80),
"type": (3, wx.LIST_FORMAT_LEFT, 50),
"inject_phase": (4, wx.LIST_FORMAT_LEFT, 75),
"remove_phase": (5, wx.LIST_FORMAT_LEFT, 75),
"fix_time": (6, wx.LIST_FORMAT_RIGHT, 75),
"fix_defect": (7, wx.LIST_FORMAT_LEFT, 25),
"filename": (8, wx.LIST_FORMAT_LEFT, 50),
"lineno": (9, wx.LIST_FORMAT_RIGHT, 50),
"offset": (10, wx.LIST_FORMAT_RIGHT, 0),
"uuid": (11, wx.LIST_FORMAT_RIGHT, 0),
}
for col_key, col_def in sorted(self.col_defs.items(), key=lambda k: k[1][0]):
col_name = col_key.replace("_", " ").capitalize()
i = col_def[0]
col_fmt, col_size = col_def[1:3]
self.InsertColumn(i, col_name, col_fmt)
self.SetColumnWidth(i, col_size)
if col_size == wx.LIST_AUTOSIZE:
self.setResizeColumn(i+1)
self.Bind(wx.EVT_LIST_ITEM_SELECTED, self.OnItemSelected, self)
self.Bind(wx.EVT_LIST_ITEM_DESELECTED, self.OnItemDeselected, self)
self.selecteditemindex = None
self.key_map = {} # list item data (defect_id) -> key (uuid)
self.data = None
# make a popup-menu
self.menu = wx.Menu()
self.menu.Append(ID_EDIT, "Edit")
self.menu.Append(ID_FIXED, "Mark Fixed")
self.menu.Append(ID_WONTFIX, "Mark Wontfix")
self.menu.Append(ID_DEL, "Delete")
self.menu.Append(ID_DEL_ALL, "Delete All")
self.Bind(wx.EVT_MENU, self.OnChangeItem, id=ID_FIXED)
self.Bind(wx.EVT_MENU, self.OnChangeItem, id=ID_WONTFIX)
self.Bind(wx.EVT_MENU, self.OnEditItem, id=ID_EDIT)
self.Bind(wx.EVT_MENU, self.OnDeleteItem, id=ID_DEL)
self.Bind(wx.EVT_MENU, self.OnDeleteAllItems, id=ID_DEL_ALL)
self.Bind(wx.EVT_KEY_DOWN, self.OnKeyDown)
# for wxMSW
self.Bind(wx.EVT_COMMAND_RIGHT_CLICK, self.OnRightClick)
# for wxGTK
self.Bind(wx.EVT_RIGHT_UP, self.OnRightClick)
self.selected_index = None
def __del__(self):
if self.data is not None:
self.data.close()
def AddItem(self, item, key=None):
# ignore defect if psp task was not selected
if self.data is None:
return
# check for duplicates (if defect already exists, do not add again!)
if key is None:
for defect in self.data.values():
if (defect["summary"] == item["summary"] and
defect["date"] == item["date"] and
defect["filename"] == item["filename"] and
defect["lineno"] == item["lineno"] and
defect["offset"] == item["offset"]):
key = defect['uuid']
self.parent.psp_log_event("dup_defect", uuid=key, comment=str(item))
return
if "checked" not in item:
item["checked"] = False
index = self.InsertStringItem(sys.maxint, str(item["number"]))
# calculate max number + 1
if item['number'] is None:
if self.data:
numbers = [int(defect['number'] or 0) for defect in self.data.values()]
item['number'] = str(max(numbers) + 1)
else:
item['number'] = 1
# create a unique string key to store it
if key is None:
key = str(uuid.uuid1())
item['uuid'] = key
self.data[key] = item
self.data[key].save()
item = self.data[key]
self.data.sync()
self.parent.psp_log_event("new_defect", uuid=key, comment=str(self.data[key]))
for col_key, col_def in self.col_defs.items():
val = item.get(col_key, "")
if col_key == 'fix_time':
val = pretty_time(val)
elif isinstance(val, str):
val = val.decode("utf8", "replace")
elif isinstance(val, unicode):
val = val
elif val is not None:
val = str(val)
else:
val = ""
self.SetStringItem(index, col_def[0], val)
self.key_map[item['defect_id']] = key
self.SetItemData(index, item['defect_id'])
if item["checked"]:
self.ToggleItem(index)
def OnRightClick(self, event):
self.PopupMenu(self.menu)
def OnItemActivated(self, evt):
#self.ToggleItem(evt.m_itemIndex)
defect_id = long(self.GetItemData(evt.m_itemIndex))
key = self.key_map[defect_id]
item = self.data[key]
event = item["filename"], item["lineno"], item["offset"] or 0
if item["filename"] and item["lineno"]:
self.parent.GotoFileLine(event,running=False)
self.selecteditemindex = evt.m_itemIndex
self.parent.psp_log_event("activate_defect", uuid=key)
def OnChangeItem(self, event):
"Change item status -fixed, wontfix-"
wontfix = event.GetId() == ID_WONTFIX
self.OnCheckItem(self.selected_index, True, wontfix)
self.ToggleItem(self.selected_index)
# this is called by the base class when an item is checked/unchecked
def OnCheckItem(self, index, flag, wontfix=False):
defect_id = long(self.GetItemData(index))
key = self.key_map[defect_id]
item = self.data[key]
title = item["number"]
if item.get("checked") != flag:
if wontfix:
item["fix_time"] = None
col_key = 'fix_time' # clean fix time (wontfix mark)
col_index = self.col_defs[col_key][0]
self.SetStringItem(index, col_index, "")
if flag:
what = "checked"
col_key = 'remove_phase' # update phase when removed
col_index = self.col_defs[col_key][0]
if not item[col_key]:
phase = item[col_key] = self.parent.GetPSPPhase()
self.SetStringItem(index, col_index, str(phase))
else:
what = "unchecked"
self.parent.psp_log_event("%s_defect" % what, uuid=key)
item["checked"] = flag
self.data.sync()
def OnKeyDown(self, event):
key = event.GetKeyCode()
control = event.ControlDown()
#shift=event.ShiftDown()
alt = event.AltDown()
if key == wx.WXK_DELETE:
self.OnDeleteItem(event)
else:
event.Skip()
def OnDeleteItem(self, evt):
if self.selected_index is not None:
defect_id = long(self.GetItemData(self.selected_index))
key = self.key_map[defect_id]
del self.data[key]
self.DeleteItem(self.selected_index)
self.data.sync()
# refresh new selected item
if not self.data:
self.selected_index = None
elif self.selected_index == len(self.data):
self.selected_index = len(self.data) - 1
if self.selected_index is not None:
self.Select(self.selected_index)
def OnDeleteAllItems(self, evt):
dlg = wx.MessageDialog(self, "Delete all defects?", "PSP Defect List",
wx.YES_NO | wx.NO_DEFAULT | wx.ICON_QUESTION)
if dlg.ShowModal() == wx.ID_YES:
self.DeleteAllItems()
dlg.Destroy()
def OnEditItem(self, evt):
defect_id = long(self.GetItemData(self.selected_index))
key = self.key_map[defect_id]
item = self.data[key]
dlg = DefectDialog(None, -1, "Edit Defect No. %s" % item['number'],
size=(350, 200), style=wx.DEFAULT_DIALOG_STYLE, )
dlg.CenterOnScreen()
dlg.SetValue(item)
if dlg.ShowModal() == wx.ID_OK:
item.update(dlg.GetValue())
self.UpdateItem(self.selected_index, item)
self.data.sync()
def UpdateItems(self):
"Refresh all items at once"
# do a reverse map lookup for each item data -> index (position)
for index in range(self.GetItemCount()):
defect_id = long(self.GetItemData(index))
print "index, defect_id", index, defect_id, self.key_map
key = self.key_map[defect_id]
item = self.data[key]
self.UpdateItem(index, item)
def UpdateItem(self, index, item):
"Refresh an item given the index and data"
for col_key, col_def in self.col_defs.items():
val = item.get(col_key, "")
if col_key == 'fix_time':
val = pretty_time(val)
elif val is not None:
val = str(val)
else:
val = ""
self.SetStringItem(index, col_def[0], val)
def DeleteAllItems(self):
self.data = None
self.selected_index = None
wx.ListCtrl.DeleteAllItems(self)
def OnItemSelected(self, evt):
self.selected_index = evt.m_itemIndex
def OnItemDeselected(self, evt):
self.selected_index = None
def count(self, phase):
"Increment actual user time to fix selected defect"
if self.selecteditemindex is not None:
index = self.selecteditemindex
defect_id = long(self.GetItemData(index))
key = self.key_map[defect_id]
col_key = "fix_time"
col_index = self.col_defs[col_key][0]
flag = self.data[key]["checked"]
if not flag:
value = self.data[key][col_key] + 1
self.data[key][col_key] = value
self.data.sync()
self.SetStringItem(index, col_index, pretty_time(value))
def Load(self, data):
self.data = data
# refresh UI
for key, item in data.items():
self.AddItem(item, key)
class DefectDialog(wx.Dialog):
def __init__(self, parent, ID, title, size=wx.DefaultSize,
pos=wx.DefaultPosition, style=wx.DEFAULT_DIALOG_STYLE, ):
wx.Dialog.__init__(self, parent, ID, title, size=size, pos=pos, style=style)
sizer = wx.BoxSizer(wx.VERTICAL)
self.label = wx.StaticText(self, -1, "Defect Nº - date - UUID")
sizer.Add(self.label, 0, wx.ALIGN_CENTRE, 10)
grid1 = wx.FlexGridSizer( 0, 2, 5, 5 )
label = wx.StaticText(self, -1, "Summary:")
grid1.Add(label, 0, wx.ALIGN_LEFT, 5)
self.summary = wx.TextCtrl(self, -1, "", size=(200, -1), )
grid1.Add(self.summary, 1, wx.EXPAND, 5)
label = wx.StaticText(self, -1, "Description:")
grid1.Add(label, 0, wx.ALIGN_LEFT, 5)
self.description = wx.TextCtrl(self, -1, "", size=(200, 100),
style=wx.TE_MULTILINE)
grid1.Add(self.description, 1, wx.EXPAND, 5)
self.types = sorted(PSP_DEFECT_TYPES.keys())
self.phases = phases = [""] + PSP_PHASES
types = ["%s: %s" % (k, PSP_DEFECT_TYPES[k]) for k in self.types]
label = wx.StaticText(self, -1, "Defect Type:")
grid1.Add(label, 0, wx.ALIGN_LEFT, 5)
self.defect_type = wx.Choice(self, -1, choices=types, size=(80,-1))
grid1.Add(self.defect_type, 1, wx.EXPAND, 5)
label = wx.StaticText(self, -1, "Inject Phase:")
grid1.Add(label, 0, wx.ALIGN_LEFT, 5)
self.inject_phase = wx.Choice(self, -1, choices=phases, size=(80,-1))
grid1.Add(self.inject_phase, 1, wx.EXPAND, 5)
label = wx.StaticText(self, -1, "Remove Phase:")
grid1.Add(label, 0, wx.ALIGN_LEFT, 5)
self.remove_phase = wx.Choice(self, -1, choices=phases, size=(80,-1))
grid1.Add(self.remove_phase, 1, wx.EXPAND, 5)
label = wx.StaticText(self, -1, "Fix time:")
grid1.Add(label, 0, wx.ALIGN_LEFT, 5)
self.fix_time = wx.TextCtrl(self, -1, "", size=(80,-1))
grid1.Add(self.fix_time, 1, wx.ALIGN_LEFT, 5)
label = wx.StaticText(self, -1, "Fix defect:")
grid1.Add(label, 0, wx.ALIGN_LEFT, 5)
self.fix_defect = wx.TextCtrl(self, -1, "", size=(80,-1))
grid1.Add(self.fix_defect, 1, wx.ALIGN_LEFT, 5)
sizer.Add(grid1, 0, wx.GROW|wx.ALIGN_CENTER_VERTICAL|wx.ALL, 5)
btnsizer = wx.StdDialogButtonSizer()
btn = wx.Button(self, wx.ID_OK)
btn.SetDefault()
btnsizer.AddButton(btn)
btn = wx.Button(self, wx.ID_CANCEL)
btnsizer.AddButton(btn)
btnsizer.Realize()
sizer.Add(btnsizer, 0, wx.ALIGN_CENTER_VERTICAL|wx.ALL, 5)
self.SetSizer(sizer)
sizer.Fit(self)
def SetValue(self, item):
self.label.SetLabel(str(item.get("date", "")))
self.summary.SetValue(item.get("summary", ""))
self.description.SetValue(item.get("description", ""))
if 'type' in item:
self.defect_type.SetSelection(self.types.index(int(item['type'])))
if 'inject_phase' in item:
self.inject_phase.SetSelection(self.phases.index(item['inject_phase']))
if 'remove_phase' in item:
self.remove_phase.SetSelection(self.phases.index(item['remove_phase']))
if 'fix_time' in item:
self.fix_time.SetValue(pretty_time(item.get("fix_time", 0)))
self.fix_defect.SetValue(item.get("fix_defect", "") or '')
def GetValue(self):
item = {"summary": self.summary.GetValue(),
"description": self.description.GetValue(),
"type": self.types[self.defect_type.GetCurrentSelection()],
"inject_phase": self.phases[self.inject_phase.GetCurrentSelection()],
"remove_phase": self.phases[self.remove_phase.GetCurrentSelection()],
"fix_time": parse_time(self.fix_time.GetValue()),
"fix_defect": self.fix_defect.GetValue(),
}
return item
class PSPMixin(object):
"ide2py extension for integrated PSP support"
def __init__(self):
cfg = wx.GetApp().get_config("PSP")
# create psp tables structure
self.db.create("defect", defect_id=int, task_id=int,
number=int, summary=str, description=str, line_uuid=str,
date=str, type=int, inject_phase=str, remove_phase=str,
fix_time=float, fix_defect=int, checked=bool,
filename=str, lineno=int, offset=int, uuid=str)
self.db.create("time_summary", time_summary_id=int, task_id=int,
phase=str, plan=float, actual=float, off_task=float,
interruption=float, total_time=float)
self.db.create("metadata", metadata_id=int, filename=str, uuid=str,
lineno=int, origin=int, phase=str, text=str)
# text recording logs
psp_event_log_filename = cfg.get("psp_event_log", "psp_event_log.txt")
self.psp_event_log_file = open(psp_event_log_filename, "a")
self._current_psp_phase = None
self.psp_metadata_cache = {}
tb4 = self.CreatePSPToolbar()
grid = self.CreatePSPPlanSummaryGrid()
self._mgr.AddPane(grid, aui.AuiPaneInfo().
Caption("PSP Plan Summary Times").Name("psp_plan").
Bottom().Position(1).Row(2).
FloatingSize(wx.Size(200, 200)).CloseButton(True).MaximizeButton(True))
self.psp_defect_list = self.CreatePSPDefectRecordingLog()
self._mgr.AddPane(self.psp_defect_list, aui.AuiPaneInfo().
Caption("PSP Defect Recording Log").Name("psp_defects").
Bottom().Row(2).
FloatingSize(wx.Size(300, 200)).CloseButton(True).MaximizeButton(True))
self._mgr.Update()
# flags for time not spent on psp task
self.psp_interruption = None
self.psp_off_task = 0
self.psp_automatic_stopwatch = True
self.AppendWindowMenuItem('PSP',
('psp_plan', 'psp_defects', 'psp_toolbar', ), self.OnWindowMenu)
# web2py json rpc client
self.psp_rpc_client = simplejsonrpc.ServiceProxy(cfg.get("server_url"))
self.psp_wiki_url = cfg.get("wiki_url")
self.Bind(wx.EVT_CHOICE, self.OnPSPPhaseChoice, self.psp_phase_choice)
self.SetPSPPhase(cfg.get("current_phase"))
self.CreatePSPMenu()
# start up a browser on psp2py app
url = cfg.get("psp2py_url")
if False and url:
import webbrowser
wx.CallAfter(webbrowser.open, url)
# initialize the camera sensor (after one second to not delay startup)
if Camera:
wx.CallLater(1000., self.CreatePSPCamera)
def CreatePSPPlanSummaryGrid(self):
grid = wx.grid.Grid(self)
self.psptimetable = PlanSummaryTable(grid)
grid.SetTable(self.psptimetable, True)
return grid
def CreatePSPDefectRecordingLog(self):
list = DefectListCtrl(self)
return list
def CreatePSPMenu(self):
# create the menu items
psp_menu = self.menu['task']
psp_menu.Append(ID_PHASE, "Change PSP Phase")
psp_menu.Append(ID_UP, "Upload metrics")
psp_menu.Append(ID_DOWN, "Download metrics")
psp_menu.AppendSeparator()
psp_menu.Append(ID_START, "Start stopwatch")
psp_menu.Append(ID_PAUSE, "Pause stopwatch\tPause")
psp_menu.Append(ID_STOP, "Stop stopwatch")
psp_menu.AppendSeparator()
psp_menu.Append(ID_DEFECT, "Add Defect\tCtrl-D")
psp_menu.Append(ID_CHECK, "Check Completion\tCtrl-F5")
psp_menu.Append(ID_METADATA, "Show Metadata")
psp_menu.Append(ID_DIFF, "Diff && Count LOC")
self.menu['run'].InsertSeparator(2)
self.menu['run'].Insert(3, ID_COMPILE, "Compile && Check\tCtrl-F5",
"Check syntax, PEP8 style and PyFlakes static analysis")
self.menu['run'].Insert(4, ID_TEST, "Test\tAlt-F5",
"Run doctests & unittests")
self.Bind(wx.EVT_MENU, self.OnCheckPSP, id=ID_COMPILE)
self.Bind(wx.EVT_MENU, self.OnCheckPSP, id=ID_TEST)
def CreatePSPToolbar(self):
# old version of wx, dont use text text
tb4 = self.task_toolbar
tsize = wx.Size(16, 16)
GetBmp = lambda id: wx.ArtProvider.GetBitmap(id, wx.ART_TOOLBAR, tsize)
tb4.SetToolBitmapSize(tsize)
if WX_VERSION < (2, 8, 11): # TODO: prevent SEGV!
tb4.AddSpacer(200)
tb4.AddSimpleTool(ID_START, "Start", images.record.GetBitmap(),
short_help_string="Start stopwatch (start phase)")
tb4.AddCheckTool(ID_PAUSE, "Pause", images.pause.GetBitmap(), wx.NullBitmap,
short_help_string="Pause stopwatch (interruption)")
tb4.AddSimpleTool(ID_STOP, "Stop", images.stop.GetBitmap(),
short_help_string="Stop stopwatch (finish phase)")
tb4.EnableTool(ID_START, True)
tb4.EnableTool(ID_PAUSE, False)
tb4.EnableTool(ID_STOP, False)
##tb4.AddLabel(-1, "Phase:", width=50)
self.psp_phase_choice = wx.Choice(tb4, -1, size=(150,-1), choices=PSP_PHASES + [""])
if WX_VERSION > (2, 8, 11): # TODO: prevent SEGV!
tb4.AddControl(self.psp_phase_choice, "PSP Phase")
self.psp_gauge = PyGauge(tb4, -1, size=(100, 22))
if WX_VERSION > (2, 8, 11): # TODO: prevent SEGV!
tb4.AddControl(self.psp_gauge, "Progressbar")
self.psp_gauge.SetValue([0, 0])
self.psp_gauge.SetBarColor([wx.Colour(255,159,176), wx.Colour(162,255,178)])
self.psp_gauge.SetBackgroundColour(wx.WHITE)
self.psp_gauge.SetBorderColor(wx.SystemSettings.GetColour(wx.SYS_COLOUR_BTNSHADOW))
self.psp_gauge.SetBorderPadding(2)
tb4.AddSimpleTool(ID_DEFECT, "Defect", images.GetDebuggingBitmap(),
short_help_string="Add a PSP defect")
tb4.AddSimpleTool(ID_CHECK, "Check", images.ok_16.GetBitmap(),
short_help_string="Check and finish phase")
tb4.AddSimpleTool(ID_WIKI, "Help", images.gnome_help.GetBitmap(),
short_help_string="PSP Wiki")
self.Bind(wx.EVT_TIMER, self.TimerHandler)
self.timer = wx.Timer(self)
self.Bind(wx.EVT_MENU, self.OnPhasePSP, id=ID_PHASE)
self.Bind(wx.EVT_MENU, self.OnStartPSP, id=ID_START)
self.Bind(wx.EVT_MENU, self.OnPausePSP, id=ID_PAUSE)
self.Bind(wx.EVT_MENU, self.OnStopPSP, id=ID_STOP)
self.Bind(wx.EVT_MENU, self.OnDefectPSP, id=ID_DEFECT)
self.Bind(wx.EVT_MENU, self.OnUploadProjectPSP, id=ID_UP)
self.Bind(wx.EVT_MENU, self.OnDownloadProjectPSP, id=ID_DOWN)
self.Bind(wx.EVT_MENU, self.OnCheckPSP, id=ID_CHECK)
self.Bind(wx.EVT_MENU, self.OnMetadataPSP, id=ID_METADATA)
self.Bind(wx.EVT_MENU, self.OnDiffPSP, id=ID_DIFF)
self.Bind(wx.EVT_MENU, self.OnWikiPSP, id=ID_WIKI)
tb4.Realize()
return tb4
def CreatePSPCamera(self):
self.camera = Camera(self)
self._mgr.AddPane(self.camera, aui.AuiPaneInfo().
Name("psp_camera").Caption("PSP Camera").
MinSize(wx.Size(50, 40)).BestSize(wx.Size(96, 72)).
MaxSize(wx.Size(360, 240)).Bottom().Right().
Layer(1).Position(2).
Float().CloseButton(True).MinimizeButton(True))
self._mgr.Update()
# move the camera to the bottom right sector of the screen:
pane = self._mgr.GetPane(self.camera)
dw, dh = wx.DisplaySize()
w, h = pane.floating_size
art_provider = self._mgr.GetArtProvider()
caption_size = art_provider.GetMetric(aui.AUI_DOCKART_CAPTION_SIZE)
border_size = 4*art_provider.GetMetric(aui.AUI_DOCKART_PANE_BORDER_SIZE)
pane.FloatingPosition((dw - w - border_size, dh - h - caption_size))
self._mgr.Update()
def set_current_psp_phase(self, phase):
if self._current_psp_phase:
print "Updating metadata", self._current_psp_phase, "->", phase
self.UpdateMetadataPSP()
self._current_psp_phase = phase
def get_current_psp_phase(self):
return self._current_psp_phase
current_psp_phase = property(get_current_psp_phase, set_current_psp_phase)
def show_psp_plan_pane(self):
self._mgr.GetPane("psp_plan").Show(True)
self._mgr.Update()
def SetPSPPhase(self, phase):
if phase:
self.psp_phase_choice.SetSelection(PSP_PHASES.index(phase))
else:
self.psp_phase_choice.SetSelection(len(PSP_PHASES))
self.current_psp_phase = phase
def GetPSPPhase(self):
phase = self.psp_phase_choice.GetCurrentSelection()
if phase >= 0 and phase < len(PSP_PHASES):
return PSP_PHASES[phase]
else:
return ''
def OnPSPPhaseChoice(self, event):
# store current phase in config file
phase = self.GetPSPPhase()
wx.GetApp().config.set('PSP', 'current_phase', phase)
wx.GetApp().write_config()
self.current_psp_phase = self.GetPSPPhase()
def OnPhasePSP(self, event):
"Event to change the current PSP phase"
dlg = wx.SingleChoiceDialog(self, 'Select next phase', 'PSP Phase',
PSP_PHASES, wx.CHOICEDLG_STYLE)
if dlg.ShowModal() == wx.ID_OK:
self.SetPSPPhase(dlg.GetStringSelection())
dlg.Destroy()
def OnStartPSP(self, event):
# check if the user manually clicked the button (not the camera sensor):
self.psp_automatic_stopwatch = event is None
self.timer.Start(1000)
self.psp_log_event("start")
self.task_toolbar.EnableTool(ID_START, False)
self.task_toolbar.EnableTool(ID_PAUSE, True)
self.task_toolbar.EnableTool(ID_STOP, True)
self.show_psp_plan_pane()
def PSPInterrupt(self, message=""):
"Start the PSP interruption counter"
# Do not track time if user manually turned off the stopwatch:
if not self.psp_automatic_stopwatch or self.task_suspended:
return
# if PSP time tracking is not started, activate it:
if not self.timer.IsRunning():
self.OnStartPSP(None)
# ignore interrupt state change if already being counted (paused):
if self.psp_interruption is None:
self.psp_interruption = 0
self.psp_log_event("pausing!", comment=message)
self.task_toolbar.ToggleTool(ID_PAUSE, True)
self.task_toolbar.Refresh(False)
self.task_toolbar.Update()
def PSPResume(self, message=""):
"Disable the PSP interruption counter"
# Do not track time if user manually turned off the stopwatch:
if not self.psp_automatic_stopwatch or self.task_suspended:
return
# if PSP time tracking is not started, activate it:
if not self.timer.IsRunning():
self.OnStartPSP(None)
# ignore resume state change if already being counted (resumed):
if self.psp_interruption is not None:
self.psp_interruption = None
phase = self.GetPSPPhase()
if message:
self.psptimetable.comment(phase, message, self.psp_interruption)
self.psp_log_event("resuming", comment=message)
self.task_toolbar.ToggleTool(ID_PAUSE, False)
self.task_toolbar.Refresh(False)
self.task_toolbar.Update()
def OnPausePSP(self, event):
# check if the user manually clicked the button (not the camera sensor):
self.psp_automatic_stopwatch = event is None
# check if we are in a interruption delta or not:
if self.psp_interruption is not None:
# don't ask for a message if interruption was detected automatically
if event:
dlg = wx.TextEntryDialog(self,
'Enter a comment for the time recording log:',
'Interruption', 'phone call')
message = dlg.GetValue() if dlg.ShowModal() == wx.ID_OK else ""
dlg.Destroy()
else:
message = ""
self.PSPResume(message)
else:
self.PSPInterrupt()
self.show_psp_plan_pane()
def OnStopPSP(self, event):
# check if the user manually clicked the button (not the camera sensor):
self.psp_automatic_stopwatch = event is None
self.timer.Stop()
self.psp_log_event("stop")
if self.psp_interruption:
self.OnPausePSP(event)
self.task_toolbar.ToggleTool(ID_PAUSE, False)
self.task_toolbar.EnableTool(ID_START, True)
self.task_toolbar.EnableTool(ID_PAUSE, False)
self.task_toolbar.EnableTool(ID_STOP, False)
self.show_psp_plan_pane()
def TimerHandler(self, event):
# increment interruption delta time counter (if any)
if self.psp_interruption is not None:
self.psp_interruption += 1
# ignore actual time or interruptions if the IDE is not "focused"
active = wx.GetApp().IsActive() or self.executing
# update task total time
if self.task_id and active and not self.psp_interruption:
self.tick_task_context()
phase = self.GetPSPPhase()
if phase and self.task_id and not self.task_suspended:
# register variation and calculate total elapsed time
psp_times = self.psptimetable.count(phase, self.psp_interruption,
active)
if not psp_times:
return
actual = psp_times.get('actual') or 0
interruption = psp_times.get('interruption') or 0
plan = float(psp_times.get('plan') or 0)
total = float(max(plan, (actual + interruption)))
# Draw progress bar accordingly
if total and plan:
self.psp_gauge.SetRange(total)
self.psp_gauge.SetValue([interruption, interruption + actual])
# TODO: properly use effects (incremental Update):
self.psp_gauge.Refresh()
# NOTE: percentage could be bigger than > 100 % (plan < elapsed)
percentage = int((actual + interruption) / plan * 100.)
if percentage < 75:
colour = wx.BLUE
elif percentage <= 100:
colour = wx.NamedColour("ORANGE")
else:
colour = wx.RED
self.psp_gauge.SetDrawValue(font=wx.SMALL_FONT, colour=colour,
formatString="%d %%" % percentage)
else:
self.psp_gauge.SetRange(100)
self.psp_gauge.SetValue([0, 0])
if not self.psp_interruption:
self.psp_defect_list.count(phase)
def __del__(self):
self.OnStop(None)
close(self.psp_event_log_file)
def OnDefectPSP(self, event):
"Manually create a new PSP defect"
dlg = DefectDialog(None, -1, "New Defect", size=(350, 200),
style=wx.DEFAULT_DIALOG_STYLE,
)
dlg.CenterOnScreen()
phase = self.GetPSPPhase()
filename = lineno = None
# seek current file metadata for inject phase
if self.active_child:
filename = self.active_child.GetFilename()
lineno = self.active_child.GetCurrentLine()
if filename and lineno:
metadata = self.get_metadata(filename)
phase = metadata[lineno-1]['phase']
dlg.SetValue({'inject_phase': phase})
if dlg.ShowModal() == wx.ID_OK:
item = dlg.GetValue()
item["date"] = datetime.date.today()
item["number"] = None
item["filename"] = filename
item["lineno"] = lineno
item["offset"] = None
self.psp_defect_list.AddItem(item)
def NotifyDefect(self, summary="", type="20", filename=None, lineno=0, offset=0, description=""):
no = None
# if filename and line number, get injected psp phase from metadata
if filename and lineno and not filename.startswith("<"):
metadata = self.get_metadata(filename)
phase = metadata[lineno-1]['phase']
line_uuid = metadata[lineno-1]['uuid']
else:
phase = "" #self.GetPSPPhase()
line_uuid = None
item = {'number': no, 'summary': summary, "date": datetime.date.today(),
"type": type, "inject_phase": phase, "remove_phase": "", "fix_time": 0,
"fix_defect": "", "description": description,
"filename": filename, "lineno": lineno, "offset": offset,
"line_uuid": line_uuid, }
self.psp_defect_list.AddItem(item)
self._mgr.GetPane("psp_defects").Show(True)
self._mgr.Update()
def NotifyModification(self, filename=None):
"Update defects line numbers, called by editor on modification events"
if self.psp_defect_list.data is not None:
if filename and not filename.startswith("<"):
metadata = self.get_metadata(filename)
# reverse map uuid and line numbers (position, zero-based)
linenos = [datum['uuid'] for datum in metadata]
for item in self.psp_defect_list.data.values():
try:
# get the new line number (if any)
new = linenos.index(item['line_uuid']) + 1
item['lineno'] = new
except (KeyError, ValueError):
# line has been deleted or uuid is not available, clean it:
item['lineno'] = None
# update the UI
self.psp_defect_list.UpdateItems()
def psp_log_event(self, event, uuid="-", comment=""):
phase = self.GetPSPPhase()
timestamp = str(datetime.datetime.now())
msg = PSP_EVENT_LOG_FORMAT % {'timestamp': timestamp, 'phase': phase,
'event': event, 'comment': comment, 'uuid': uuid}
print msg
self.psp_event_log_file.write("%s\r\n" % msg)
self.psp_event_log_file.flush()
def deactivate_task(self):
# store current data:
self.psp_save_project()
super(PSPMixin, self).deactivate_task()
self.OnStopPSP(None)
# clean up previous data:
self.psp_load_project()
def activate_task(self, *args, **kwargs):
super(PSPMixin, self).activate_task(*args, **kwargs)
if self.task_id:
self.psp_load_project()
self.OnStartPSP(None)
def suspend_task(self):
super(PSPMixin, self).suspend_task()
self.OnStopPSP(None)
self.psp_automatic_stopwatch = False
def resume_task(self):
super(PSPMixin, self).resume_task()
self.OnStartPSP(None)
def OnUploadProjectPSP(self, event):
self.psp_save_project()
def OnDownloadProjectPSP(self, event):
self.psp_load_project()
def psp_save_project(self):
"Send metrics to remote server (times and defects)"
# convert to plain dictionaries to be searialized and sent to web2py DAL
# remove GUI implementation details, match psp2py database model
if self.task_id:
task = self.db["task"][self.task_id]
self.psp_defect_list.data.sync()
self.psptimetable.data.sync()
if self.psp_rpc_client:
pass ##self.psp_save_project_rpc(task["task_name"]):
return True
def psp_save_project_rpc(self, task_name):
defects = []
for defect in self.psp_defect_list.data.values():
defect = defect.copy()
defect['date'] = str(defect['date'])
del defect['checked']
defects.append(defect)
time_summaries = []
comments = []
for phase, times in self.psptimetable.data.items():
time_summary = {'phase': phase}
time_summary.update(times)
for message, delta in time_summary.pop('comments', []):
comment = {'phase': phase, 'message': message, 'delta': delta}
comments.append(comment)
time_summaries.append(time_summary)
self.psp_rpc_client.save_project(task['task_name'],
defects,
time_summaries,
comments)
return True
def psp_update_project(self, locs, objects):
"Update metrics to remote server (only size now)"
# convert to plain dictionaries to be searialized and sent to web2py DAL
# remove GUI implementation details, match psp2py database model
# this function is supposed to be called on postmortem phase (diff)
if self.task_id:
task = self.db["task"][self.task_id]
# data received:
# objects = [[7, 'Test', '__init__', 1, 'filename.py'],
# locs = {'new': 0, 'total': 6, 'modified': 1, 'comments': 1})
#TODO: split new+modifed+reused loc count
actual_loc = locs.get('new', 0) + locs.get('modified', 0)
reuse_library_entries = []
for obj in objects:
entry = {
"filename": obj[4],
"class_name": obj[1],
"function_name": obj[2],
"lineno": obj[0],
"loc": obj[3],
}
reuse_library_entries.append(entry)
self.psp_rpc_client.update_project(task['task_name'],
actual_loc,
reuse_library_entries)
return True
def psp_load_project(self):
"Receive metrics from remote server (times and defects)"
# clean up any previous metrics data:
self.psp_defect_list.DeleteAllItems()
self.psptimetable.Clear()
# fetch and deserialize database internal rows to GUI data structures
if self.task_id:
task = self.db["task"][self.task_id]
data = DictShelf(self.db, "defect", "uuid", task_id=self.task_id)
self.psp_defect_list.Load(data)
data = DictShelf(self.db, "time_summary", "phase", task_id=self.task_id)
self.psptimetable.Load(data)
if self.psp_rpc_client:
pass ##self.psp_load_project_rpc(task['task_name'])
def psp_load_project_rpc(self, task_name):
defects, times, comments = self.psp_rpc_client.load_project(task_name)
defects.sort(key=lambda defect: int(defect['number']))
for defect in defects:
defect["date"] = datetime.datetime.strptime(defect["date"], "%Y-%m-%d")
self.psp_defect_list.AddItem(defect)
for time_summary in time_summaries:
self.psptimetable.data[str(time_summary['phase'])] = time_summary
for comment in comments:
phase, message, delta = comment['phase'], comment['message'], comment['delta']
self.psptimetable.comment(str(phase), message, delta)
self.psptimetable.UpdateValues()
return True
def OnCheckPSP(self, event):
"Find defects and errors, if complete, change to the next phase"
evt_id = event.GetId()
if evt_id == ID_COMPILE:
self.SetPSPPhase('compile')
elif evt_id == ID_TEST:
self.SetPSPPhase('test')
if self.active_child:
phase = self.GetPSPPhase()
defects = [] # static checks and failed tests
errors = [] # sanity checks (planning & postmortem)
if phase == "planning":
# check plan summary completeness
for phase, times in self.psptimetable.data.items():
if not times['plan']:
errors.append("Complete %s estimate time!" % phase)
elif phase == "design" or phase == "code":
#TODO: review checklist?
pass
elif phase == "compile":
# run "static" chekers to find coding defects (pep8, pyflakes)
import checker
defects.extend(checker.check(self.active_child.GetFilename()))
elif phase == "test":
# run doctests to find defects
import tester
defects.extend(tester.test(self.active_child.GetFilename()))
elif phase == "postmortem":
# check that all defects are fixed
for defect in self.psp_defect_list.data.values():
if not defect['remove_phase']:
errors.append("Defect %(number)s not fixed!" % defect)
# add found defects (highlight them in the editor window)
line_numbers = set()
for defect in defects:
self.NotifyDefect(**defect)
errors.append("Defect found: %(summary)s" % defect)
if defect['lineno'] is not None:
line_numbers.add(defect['lineno'])
self.active_child.HighlightLines(line_numbers)
# show errors
if errors:
dlg = wx.MessageDialog(self, "\n".join(errors),
"PSP Check Phase Errors", wx.ICON_EXCLAMATION | wx.OK)
dlg.ShowModal()
dlg.Destroy()
self._mgr.GetPane("psp_defects").Show(True)
self._mgr.Update()
# phase completed? project completed?
if not defects and not errors:
i = PSP_PHASES.index(phase) + 1
if i < len(PSP_PHASES):
phase = PSP_PHASES[i]
else:
phase = ""
self.OnStopPSP(event)
self.SetPSPPhase(phase)
else:
dlg = wx.MessageDialog(self, "No active file, cannot check it.\n"
"Change PSP phase manually if desired.",
"PSP Check Phase Errors", wx.ICON_EXCLAMATION)
dlg.ShowModal()
dlg.Destroy()
def OnMetadataPSP(self, event):
"Event to update and show metadata"
self.UpdateMetadataPSP(show=True)
def OnDiffPSP(self, event):
"Event to calc diff and update metadata"
# this is a temporary and auxiliar function just to rebuild metadata
if self.active_child:
import fileutil
import locutil
filename = self.active_child.GetFilename()
if filename:
with open(filename, "r") as f:
new_text, encoding, bom, eol, new_newlines = fileutil.unicode_file_read(f, "utf8")
dlg = wx.TextEntryDialog(self, 'Compare with:',
'PSP DIFF', filename)
if dlg.ShowModal() == wx.ID_OK:
old_filename = dlg.GetValue()
with open(old_filename, "r") as f:
old_text, encoding, bom, eol, old_newlines = fileutil.unicode_file_read(f, "utf8")
else:
old_filename = ''
old_text = u''
old_newlines = '\n'
dlg.Destroy()
# re-encode unicode to same encoding
#old_text = old_text.encode("utf8")
#new_text = new_text.encode("utf8")
# render the diff
from wxpydiff import PyDiff
PyDiff(None, 'wxPyDiff (PSP)', old_filename, filename, old_text, new_text)
# compare old and new lines:
old_lines = old_text.split(old_newlines)
new_lines = new_text.split(new_newlines)
changes = locutil.analize_line_changes(old_lines, new_lines)
objects, locs = locutil.count_logical_lines_per_object(filename,
changes=changes)
# add filename to each object (TODO: check several files)
for obj in objects:
obj.append(filename)
# send metrics to remote server
self.psp_update_project(locs, objects)
def UpdateMetadataPSP(self, show=False):
if self.active_child:
filename = self.active_child.GetFilename()
if filename:
self.get_metadata(filename, show)
def get_metadata(self, filename, show=False):
"Build a persistent list of dicts with line metadata (uuid, phase, ...)"
# check if it is in cache:
metadata = self.psp_metadata_cache.get(filename)
# if not metadata valid in cache, query the database:
if metadata is None:
# create a new db to isolate the transaction for each file
db = wx.GetApp().get_db(new=True)
metadata = ListShelf(db, "metadata", "lineno", filename=filename,
autocommit=False)
self.psp_metadata_cache[filename] = metadata
if show:
msg = '\n'.join(["%(uuid)s %(phase)10s - %(lineno)5s: %(text)s" %
metadata[key] for key in sorted(metadata.keys())])
dlg = wx.lib.dialogs.ScrolledMessageDialog(self, msg, "PSP METADATA")
dlg.ShowModal()
dlg.Destroy()
return metadata
def save_metadata(self, filename, discard=False):
"Store metadata to the database"
self.get_metadata(filename).sync(commit=True)
def OnWikiPSP(self, event):
# create the HTML "browser" window:
ctrl = wx.html.HtmlWindow(self, -1, wx.DefaultPosition, wx.Size(400, 300))
if "gtk2" in wx.PlatformInfo:
ctrl.SetStandardFonts()
ctrl.LoadPage(self.psp_wiki_url)
self._mgr.AddPane(ctrl, aui.AuiPaneInfo().
Caption("PSP Wiki").
Float().
FloatingSize(wx.Size(300, 200)).MinimizeButton(True))
self._mgr.Update()
if __name__ == "__main__":
app = wx.App()
dlg = DefectDialog(None, -1, "Sample Dialog", size=(350, 200),
#style=wx.CAPTION | wx.SYSTEM_MENU | wx.THICK_FRAME,
style=wx.DEFAULT_DIALOG_STYLE, # & ~wx.CLOSE_BOX,
)
dlg.CenterOnScreen()
# this does not return until the dialog is closed.
val = dlg.ShowModal()
print dlg.GetValue()
#dlg.Destroy()
app.MainLoop()
| gpl-3.0 | -7,311,089,349,396,490,000 | 40.146457 | 102 | 0.562194 | false |
pk-hackerrank/hr-ai | statistics_and_machine_learning/correlation_and_regression_lines_2.py | 1 | 1427 | # Slope of a regression line
# a = r*(Sy/Sx)
# r = Correlation coefficient, Sx = Standard deviation of x data, Sy = Standard deviation of y data.
# r is already calculated in correlation_and_regression_lines_1.py
# Standard Deviation = sqrt(variance)
# variance = (sigsum((x - mu)^2)) / len(list)
# mu = mean of list
import math
def mean(nums):
return sum(nums)/len(nums)
def variance(nums):
m = mean(nums)
variance = mean([abs(n - m)**2 for n in nums])
return variance
def standard_deviation(nums):
stdev = math.sqrt(variance(nums))
return stdev
def correlation(listx, listy):
elmntPrdListXandListY = [a*b for a,b in zip(listx, listy)]
numerator = len(listx) * sum(elmntPrdListXandListY) - sum(listx) * sum(listy)
denominator = math.sqrt((len(listx) * sum([elx**2 for elx in listx]) - (sum(listx))**2) * (len(listy) * sum([ely**2 for ely in listy]) - (sum(listy))**2))
pearson_coefficient = numerator / denominator
return pearson_coefficient
# Why listx as physics, because treating Physics as the independent variable
listx = [15, 12, 8, 8, 7, 7, 7, 6, 5, 3] # Physics
listy = [10, 25, 17, 11, 13, 17, 20, 13, 9, 15] # History
stnd_dvtn_x = standard_deviation(listx)
stnd_dvtn_y = standard_deviation(listy)
correlation_coeff = correlation(listx, listy)
slope_of_regression_line = correlation_coeff * (stnd_dvtn_y/stnd_dvtn_x)
print("{:.3f}".format(slope_of_regression_line)) | mit | 6,662,367,892,761,468,000 | 37.594595 | 158 | 0.683252 | false |
sekikn/incubator-airflow | airflow/contrib/operators/gcp_function_operator.py | 7 | 2343 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""This module is deprecated. Please use `airflow.providers.google.cloud.operators.functions`."""
import warnings
from airflow.providers.google.cloud.operators.functions import (
CloudFunctionDeleteFunctionOperator,
CloudFunctionDeployFunctionOperator,
)
warnings.warn(
"This module is deprecated. Please use `airflow.providers.google.cloud.operators.functions`.",
DeprecationWarning,
stacklevel=2,
)
class GcfFunctionDeleteOperator(CloudFunctionDeleteFunctionOperator):
"""
This class is deprecated.
Please use `airflow.providers.google.cloud.operators.function.CloudFunctionDeleteFunctionOperator`.
"""
def __init__(self, *args, **kwargs):
warnings.warn(
"""This class is deprecated.
Please use
`airflow.providers.google.cloud.operators.function.CloudFunctionDeleteFunctionOperator`.""",
DeprecationWarning,
stacklevel=3,
)
super().__init__(*args, **kwargs)
class GcfFunctionDeployOperator(CloudFunctionDeployFunctionOperator):
"""
This class is deprecated.
Please use `airflow.providers.google.cloud.operators.function.CloudFunctionDeployFunctionOperator`.
"""
def __init__(self, *args, **kwargs):
warnings.warn(
"""This class is deprecated.
Please use
`airflow.providers.google.cloud.operators.function.CloudFunctionDeployFunctionOperator`.""",
DeprecationWarning,
stacklevel=3,
)
super().__init__(*args, **kwargs)
| apache-2.0 | -5,870,233,675,561,367,000 | 35.046154 | 104 | 0.711481 | false |
powersjcb/two_scoops_project | config/wsgi.py | 1 | 1630 | """
WSGI config for two_scoops_project project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
from django.core.wsgi import get_wsgi_application
from whitenoise.django import DjangoWhiteNoise
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "config.settings.production"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.production")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
application = get_wsgi_application()
# Use Whitenoise to serve static files
# See: https://whitenoise.readthedocs.org/
application = DjangoWhiteNoise(application)
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| bsd-3-clause | 3,850,920,244,339,554,300 | 39.75 | 79 | 0.796319 | false |
jerryli27/my-neural-style | loss_visualize.py | 1 | 4097 | """
This file will take two images, one...
"""
"""
This file uses the texture nets technique to generate an image by combining style of an input and the content of
another input. The code skeleton mainly comes from https://github.com/anishathalye/neural-style.
"""
from argparse import ArgumentParser
import numpy as np
from matplotlib import pyplot as plt
import loss_visualize_util
from general_util import *
# TODO: Needs reformatting.
# default arguments
VGG_PATH = 'imagenet-vgg-verydeep-19.mat'
CONTENT_IMG = '/home/jerryli27/PycharmProjects/my-neural-style/source_compressed/chicago.jpg'
STYLE_IMG = '/home/jerryli27/PycharmProjects/my-neural-style/output/mirror-nstyle-van_gogh_starry_sky-iter-80000-batchsize-8-lr-0.001000-use_mrf-False-johnson-style-200-content-5-stylenum-0_67500.jpg'
OUTPUT_PATH=''
def build_parser():
parser = ArgumentParser()
parser.add_argument('--content_img', type=str,
dest='content_img', help='test image path',
metavar='CONTENT_IMG', default = CONTENT_IMG)
parser.add_argument('--style_img',
dest='style_img', help='one style image',
metavar='STYLE_IMG', default=STYLE_IMG)
parser.add_argument('--output',
dest='output', help='output path',
metavar='OUTPUT', default=OUTPUT_PATH)
parser.add_argument('--height', type=int,
dest='height', help='output height',
metavar='HEIGHT', default=256)
parser.add_argument('--width', type=int,
dest='width', help='output width',
metavar='WIDTH', default=256)
parser.add_argument('--network',
dest='network', help='path to network parameters (default %(default)s)',
metavar='VGG_PATH', default=VGG_PATH)
parser.add_argument('--use_mrf',
dest='use_mrf', help='If true, we use Markov Random Fields loss instead of Gramian loss.'
' (default %(default)s).', action='store_true')
parser.set_defaults(use_mrf=False)
return parser
def main():
parser = build_parser()
options = parser.parse_args()
if not os.path.isfile(options.network):
parser.error("Network %s does not exist. (Did you forget to download it?)" % options.network)
target_shape = (options.height, options.width)
content_image = imread(options.content_img, shape=target_shape)
style_image = imread(options.style_img, shape=target_shape)
layers = (
'conv1_1', 'relu1_1', 'conv1_2', 'relu1_2', 'pool1',
'conv2_1', 'relu2_1', 'conv2_2', 'relu2_2', 'pool2',
'conv3_1', 'relu3_1', 'conv3_2', 'relu3_2', 'conv3_3',
'relu3_3', 'conv3_4', 'relu3_4', 'pool3',
'conv4_1', 'relu4_1', 'conv4_2', 'relu4_2', 'conv4_3',
'relu4_3', 'conv4_4', 'relu4_4', 'pool4',
'conv5_1', 'relu5_1', 'conv5_2', 'relu5_2', 'conv5_3',
'relu5_3', 'conv5_4', 'relu5_4'
)
plt.ion()
fig, ax = plt.subplots()
plt.subplots_adjust(left=0.25, bottom=0.25)
ax.set_title("Loss visualization")
fig.show()
plt.pause(0.001)
losses = loss_visualize_util.style_synthesis_net(content_image, style_image,layers, loss_visualize_util.per_pixel_gram_loss, options.network)
for layer in layers:
#plt.ion()
# We must do this clip step before we display the image. Otherwise the color will be off.
image = np.clip(losses[layer], 0, 255).astype(np.uint8)
# Change the data in place instead of create a new window.
for feature_i in image.shape[3]:
ax.set_title(str('%s, %d'% (layer, feature_i)))
feature = image[0,:,:, feature_i]
feature = np.dstack((feature,feature,feature))
im = ax.imshow(feature,vmin=0,vmax=255) # Blank starting image
im.axes.figure.canvas.draw()
plt.show()
plt.pause(0.01)
plt.ioff()
raw_input('Showing layer %s, press enter to continue.' %layer)
plt.ion()
if __name__ == '__main__':
main() | gpl-3.0 | 201,143,636,363,375,550 | 36.59633 | 200 | 0.612399 | false |
Enlik/entropy | lib/entropy/db/sql.py | 5 | 189281 | # -*- coding: utf-8 -*-
"""
@author: Fabio Erculiani <[email protected]>
@contact: [email protected]
@copyright: Fabio Erculiani
@license: GPL-2
I{EntropyRepository} is an abstract class that implements
most of the EntropyRepository methods using standard SQL.
"""
import os
import hashlib
import itertools
import time
import threading
from entropy.const import etpConst, const_debug_write, \
const_debug_enabled, const_isunicode, const_convert_to_unicode, \
const_get_buffer, const_convert_to_rawstring, const_is_python3, \
const_get_stringtype
from entropy.exceptions import SystemDatabaseError, SPMError
from entropy.spm.plugins.factory import get_default_instance as get_spm
from entropy.output import bold, red
from entropy.misc import ParallelTask
from entropy.i18n import _
import entropy.dep
import entropy.tools
from entropy.db.skel import EntropyRepositoryBase
from entropy.db.cache import EntropyRepositoryCacher
from entropy.db.exceptions import Warning, Error, InterfaceError, \
DatabaseError, DataError, OperationalError, IntegrityError, \
InternalError, ProgrammingError, NotSupportedError
class SQLConnectionWrapper(object):
"""
This class wraps an implementation dependent
Connection object, exposing a common API which
resembles the Python DBAPI 2.0.
All the underlying library calls are wrapped
around using a proxy method in order to catch
and then raise entropy.db.exceptions exceptions.
"""
def __init__(self, connection, exceptions):
self._con = connection
self._excs = exceptions
def __hash__(self):
return id(self)
@staticmethod
def _proxy_call(exceptions, method, *args, **kwargs):
"""
This method is tricky because it wraps every
underlying engine call catching all its exceptions
and respawning them as entropy.db.exceptions.
This provides optimum abstraction.
"""
try:
# do not change the exception handling
# order, we need to reverse the current
# hierarchy to avoid catching super
# classes before their subs.
return method(*args, **kwargs)
except exceptions.InterfaceError as err:
raise InterfaceError(err)
except exceptions.DataError as err:
raise DataError(err)
except exceptions.OperationalError as err:
raise OperationalError(err)
except exceptions.IntegrityError as err:
raise IntegrityError(err)
except exceptions.InternalError as err:
raise InternalError(err)
except exceptions.ProgrammingError as err:
raise ProgrammingError(err)
except exceptions.NotSupportedError as err:
raise NotSupportedError(err)
except exceptions.DatabaseError as err:
# this is the parent of all the above
raise DatabaseError(err)
except exceptions.Error as err:
raise Error(err)
except exceptions.Warning as err:
raise Warning(err)
@staticmethod
def connect(module_proxy, module, subclass, *args, **kwargs):
conn_impl = SQLConnectionWrapper._proxy_call(
module_proxy.exceptions(), module.connect,
*args, **kwargs)
return subclass(conn_impl, module_proxy.exceptions())
def commit(self):
return self._proxy_call(self._excs, self._con.commit)
def rollback(self):
return self._proxy_call(self._excs, self._con.rollback)
def close(self):
return self._proxy_call(self._excs, self._con.close)
def cursor(self):
return self._proxy_call(self._excs, self._con.cursor)
def ping(self):
"""
Ping the underlying connection to keep it alive.
"""
raise NotImplementedError()
def unicode(self):
"""
Enforce Unicode strings.
"""
raise NotImplementedError()
def rawstring(self):
"""
Enforce byte strings.
"""
raise NotImplementedError()
def interrupt(self):
"""
Interrupt any pending activity.
"""
raise NotImplementedError()
class SQLCursorWrapper(object):
"""
This class wraps an implementation dependent
Cursor object, exposing a common API which
resembles the Python DBAPI 2.0.
All the underlying library calls are wrapped
around using a proxy method in order to catch
and then raise entropy.db.exceptions exceptions.
"""
def __init__(self, cursor, exceptions):
self._cur = cursor
self._excs = exceptions
def _proxy_call(self, method, *args, **kwargs):
"""
This method is tricky because it wraps every
underlying engine call catching all its exceptions
and respawning them as entropy.db.exceptions.
This provides optimum abstraction.
"""
try:
# do not change the exception handling
# order, we need to reverse the current
# hierarchy to avoid catching super
# classes before their subs.
return method(*args, **kwargs)
except self._excs.InterfaceError as err:
raise InterfaceError(err)
except self._excs.DataError as err:
raise DataError(err)
except self._excs.OperationalError as err:
raise OperationalError(err)
except self._excs.IntegrityError as err:
raise IntegrityError(err)
except self._excs.InternalError as err:
raise InternalError(err)
except self._excs.ProgrammingError as err:
raise ProgrammingError(err)
except self._excs.NotSupportedError as err:
raise NotSupportedError(err)
except self._excs.DatabaseError as err:
# this is the parent of all the above
raise DatabaseError(err)
except self._excs.Error as err:
raise Error(err)
except self._excs.Warning as err:
raise Warning(err)
def wrap(self, method, *args, **kwargs):
return self._proxy_call(method, *args, **kwargs)
def execute(self, *args, **kwargs):
raise NotImplementedError()
def executemany(self, *args, **kwargs):
raise NotImplementedError()
def close(self, *args, **kwargs):
raise NotImplementedError()
def fetchone(self, *args, **kwargs):
raise NotImplementedError()
def fetchall(self, *args, **kwargs):
raise NotImplementedError()
def fetchmany(self, *args, **kwargs):
raise NotImplementedError()
def executescript(self, *args, **kwargs):
raise NotImplementedError()
def callproc(self, *args, **kwargs):
raise NotImplementedError()
def nextset(self, *args, **kwargs):
raise NotImplementedError()
def __iter__(self):
raise NotImplementedError()
def __next__(self):
raise NotImplementedError()
def next(self):
raise NotImplementedError()
@property
def lastrowid(self):
return self._cur.lastrowid
@property
def rowcount(self):
return self._cur.rowcount
@property
def description(self):
return self._cur.description
class EntropySQLRepository(EntropyRepositoryBase):
"""
EntropySQLRepository partially implements a SQL based repository
storage.
"""
class Schema(object):
def get_init(self):
data = """
CREATE TABLE baseinfo (
idpackage INTEGER PRIMARY KEY AUTOINCREMENT,
atom VARCHAR,
category VARCHAR,
name VARCHAR,
version VARCHAR,
versiontag VARCHAR,
revision INTEGER,
branch VARCHAR,
slot VARCHAR,
license VARCHAR,
etpapi INTEGER,
trigger INTEGER
);
CREATE TABLE extrainfo (
idpackage INTEGER PRIMARY KEY,
description VARCHAR,
homepage VARCHAR,
download VARCHAR,
size VARCHAR,
chost VARCHAR,
cflags VARCHAR,
cxxflags VARCHAR,
digest VARCHAR,
datecreation VARCHAR,
FOREIGN KEY(idpackage)
REFERENCES baseinfo(idpackage) ON DELETE CASCADE
);
CREATE TABLE content (
idpackage INTEGER,
file VARCHAR,
type VARCHAR,
FOREIGN KEY(idpackage)
REFERENCES baseinfo(idpackage) ON DELETE CASCADE
);
CREATE TABLE contentsafety (
idpackage INTEGER,
file VARCHAR,
mtime FLOAT,
sha256 VARCHAR,
FOREIGN KEY(idpackage)
REFERENCES baseinfo(idpackage) ON DELETE CASCADE
);
CREATE TABLE provide (
idpackage INTEGER,
atom VARCHAR,
is_default INTEGER DEFAULT 0,
FOREIGN KEY(idpackage)
REFERENCES baseinfo(idpackage) ON DELETE CASCADE
);
CREATE TABLE dependencies (
idpackage INTEGER,
iddependency INTEGER,
type INTEGER,
FOREIGN KEY(idpackage)
REFERENCES baseinfo(idpackage) ON DELETE CASCADE
);
CREATE TABLE dependenciesreference (
iddependency INTEGER PRIMARY KEY AUTOINCREMENT,
dependency VARCHAR
);
CREATE TABLE conflicts (
idpackage INTEGER,
conflict VARCHAR,
FOREIGN KEY(idpackage)
REFERENCES baseinfo(idpackage) ON DELETE CASCADE
);
CREATE TABLE mirrorlinks (
mirrorname VARCHAR,
mirrorlink VARCHAR
);
CREATE TABLE sources (
idpackage INTEGER,
idsource INTEGER,
FOREIGN KEY(idpackage)
REFERENCES baseinfo(idpackage) ON DELETE CASCADE
);
CREATE TABLE sourcesreference (
idsource INTEGER PRIMARY KEY AUTOINCREMENT,
source VARCHAR
);
CREATE TABLE useflags (
idpackage INTEGER,
idflag INTEGER,
FOREIGN KEY(idpackage)
REFERENCES baseinfo(idpackage) ON DELETE CASCADE
);
CREATE TABLE useflagsreference (
idflag INTEGER PRIMARY KEY AUTOINCREMENT,
flagname VARCHAR
);
CREATE TABLE keywords (
idpackage INTEGER,
idkeyword INTEGER,
FOREIGN KEY(idpackage)
REFERENCES baseinfo(idpackage) ON DELETE CASCADE
);
CREATE TABLE keywordsreference (
idkeyword INTEGER PRIMARY KEY AUTOINCREMENT,
keywordname VARCHAR
);
CREATE TABLE configprotect (
idpackage INTEGER PRIMARY KEY,
idprotect INTEGER,
FOREIGN KEY(idpackage)
REFERENCES baseinfo(idpackage) ON DELETE CASCADE
);
CREATE TABLE configprotectmask (
idpackage INTEGER PRIMARY KEY,
idprotect INTEGER,
FOREIGN KEY(idpackage)
REFERENCES baseinfo(idpackage) ON DELETE CASCADE
);
CREATE TABLE configprotectreference (
idprotect INTEGER PRIMARY KEY AUTOINCREMENT,
protect VARCHAR
);
CREATE TABLE systempackages (
idpackage INTEGER PRIMARY KEY,
FOREIGN KEY(idpackage)
REFERENCES baseinfo(idpackage) ON DELETE CASCADE
);
CREATE TABLE injected (
idpackage INTEGER PRIMARY KEY,
FOREIGN KEY(idpackage)
REFERENCES baseinfo(idpackage) ON DELETE CASCADE
);
CREATE TABLE installedtable (
idpackage INTEGER PRIMARY KEY,
repositoryname VARCHAR,
source INTEGER,
FOREIGN KEY(idpackage)
REFERENCES baseinfo(idpackage) ON DELETE CASCADE
);
CREATE TABLE sizes (
idpackage INTEGER PRIMARY KEY,
size INTEGER,
FOREIGN KEY(idpackage)
REFERENCES baseinfo(idpackage) ON DELETE CASCADE
);
CREATE TABLE counters (
counter INTEGER,
idpackage INTEGER,
branch VARCHAR,
PRIMARY KEY(idpackage,branch),
FOREIGN KEY(idpackage)
REFERENCES baseinfo(idpackage) ON DELETE CASCADE
);
CREATE TABLE trashedcounters (
counter INTEGER
);
CREATE TABLE needed_libs (
idpackage INTEGER,
lib_user_path VARCHAR,
lib_user_soname VARCHAR,
soname VARCHAR,
elfclass INTEGER,
rpath VARCHAR,
FOREIGN KEY(idpackage)
REFERENCES baseinfo(idpackage) ON DELETE CASCADE
);
CREATE TABLE provided_libs (
idpackage INTEGER,
library VARCHAR,
path VARCHAR,
elfclass INTEGER,
FOREIGN KEY(idpackage)
REFERENCES baseinfo(idpackage) ON DELETE CASCADE
);
CREATE TABLE treeupdates (
repository VARCHAR PRIMARY KEY,
digest VARCHAR
);
CREATE TABLE treeupdatesactions (
idupdate INTEGER PRIMARY KEY AUTOINCREMENT,
repository VARCHAR,
command VARCHAR,
branch VARCHAR,
date VARCHAR
);
CREATE TABLE licensedata (
licensename VARCHAR UNIQUE,
text BLOB,
compressed INTEGER
);
CREATE TABLE licenses_accepted (
licensename VARCHAR UNIQUE
);
CREATE TABLE triggers (
idpackage INTEGER PRIMARY KEY,
data BLOB,
FOREIGN KEY(idpackage)
REFERENCES baseinfo(idpackage) ON DELETE CASCADE
);
CREATE TABLE entropy_misc_counters (
idtype INTEGER PRIMARY KEY,
counter INTEGER
);
CREATE TABLE categoriesdescription (
category VARCHAR,
locale VARCHAR,
description VARCHAR
);
CREATE TABLE packagesets (
setname VARCHAR,
dependency VARCHAR
);
CREATE TABLE packagechangelogs (
category VARCHAR,
name VARCHAR,
changelog BLOB,
PRIMARY KEY (category, name)
);
CREATE TABLE automergefiles (
idpackage INTEGER,
configfile VARCHAR,
md5 VARCHAR,
FOREIGN KEY(idpackage)
REFERENCES baseinfo(idpackage) ON DELETE CASCADE
);
CREATE TABLE packagedesktopmime (
idpackage INTEGER,
name VARCHAR,
mimetype VARCHAR,
executable VARCHAR,
icon VARCHAR,
FOREIGN KEY(idpackage)
REFERENCES baseinfo(idpackage) ON DELETE CASCADE
);
CREATE TABLE packagedownloads (
idpackage INTEGER,
download VARCHAR,
type VARCHAR,
size INTEGER,
disksize INTEGER,
md5 VARCHAR,
sha1 VARCHAR,
sha256 VARCHAR,
sha512 VARCHAR,
gpg BLOB,
FOREIGN KEY(idpackage)
REFERENCES baseinfo(idpackage) ON DELETE CASCADE
);
CREATE TABLE provided_mime (
mimetype VARCHAR,
idpackage INTEGER,
FOREIGN KEY(idpackage)
REFERENCES baseinfo(idpackage) ON DELETE CASCADE
);
CREATE TABLE packagesignatures (
idpackage INTEGER PRIMARY KEY,
sha1 VARCHAR,
sha256 VARCHAR,
sha512 VARCHAR,
gpg BLOB,
FOREIGN KEY(idpackage)
REFERENCES baseinfo(idpackage) ON DELETE CASCADE
);
CREATE TABLE packagespmphases (
idpackage INTEGER PRIMARY KEY,
phases VARCHAR,
FOREIGN KEY(idpackage)
REFERENCES baseinfo(idpackage) ON DELETE CASCADE
);
CREATE TABLE packagespmrepository (
idpackage INTEGER PRIMARY KEY,
repository VARCHAR,
FOREIGN KEY(idpackage)
REFERENCES baseinfo(idpackage) ON DELETE CASCADE
);
CREATE TABLE entropy_branch_migration (
repository VARCHAR,
from_branch VARCHAR,
to_branch VARCHAR,
post_migration_md5sum VARCHAR,
post_upgrade_md5sum VARCHAR,
PRIMARY KEY (repository, from_branch, to_branch)
);
CREATE TABLE preserved_libs (
library VARCHAR,
elfclass INTEGER,
path VARCHAR,
atom VARCHAR,
PRIMARY KEY (library, path, elfclass)
);
CREATE TABLE xpakdata (
idpackage INTEGER PRIMARY KEY,
data BLOB
);
CREATE TABLE settings (
setting_name VARCHAR,
setting_value VARCHAR,
PRIMARY KEY(setting_name)
);
"""
return data
# the "INSERT OR REPLACE" dialect
# For SQLite3 it's "INSERT OR REPLACE"
# while for MySQL it's "REPLACE"
_INSERT_OR_REPLACE = None
# the "INSERT OR IGNORE" dialect
_INSERT_OR_IGNORE = None
## Optionals
# If not None, must contain the
# "UPDATE OR REPLACE" dialect
_UPDATE_OR_REPLACE = None
_MAIN_THREAD = threading.current_thread()
@classmethod
def isMainThread(cls, thread_obj):
return thread_obj is cls._MAIN_THREAD
# Generic repository name to use when none is given.
GENERIC_NAME = "__generic__"
def __init__(self, db, read_only, skip_checks, indexing,
xcache, temporary, name, direct=False, cache_policy=None):
# connection and cursor automatic cleanup support
self._cleanup_monitor_cache_mutex = threading.Lock()
self._cleanup_monitor_cache = {}
self._db = db
self._indexing = indexing
self._skip_checks = skip_checks
self._settings_cache = {}
self.__connection_pool = {}
self.__connection_pool_mutex = threading.RLock()
self.__cursor_pool_mutex = threading.RLock()
self.__cursor_pool = {}
if name is None:
name = self.GENERIC_NAME
self._live_cacher = EntropyRepositoryCacher()
EntropyRepositoryBase.__init__(self, read_only, xcache,
temporary, name, direct=direct,
cache_policy=cache_policy)
def _cursor_connection_pool_key(self):
"""
Return the Cursor and Connection Pool key
that can be used inside the current thread
and process (multiprocessing not supported).
"""
current_thread = threading.current_thread()
thread_id = current_thread.ident
pid = os.getpid()
return self._db, thread_id, pid
def _cleanup_all(self, _cleanup_main_thread=True):
"""
Clean all the Cursor and Connection resources
left open.
"""
if const_debug_enabled():
const_debug_write(
__name__,
"called _cleanup_all() for %s" % (self,))
with self._cursor_pool_mutex():
with self._connection_pool_mutex():
th_data = set(self._cursor_pool().keys())
th_data.update(self._connection_pool().keys())
for c_key in th_data:
self._cleanup_killer(
c_key,
_cleanup_main_thread=_cleanup_main_thread)
def _cleanup_monitor(self, target_thread, c_key):
"""
Execute Cursor and Connection resources
termination.
"""
# join on thread
target_thread.join()
if const_debug_enabled():
const_debug_write(
__name__,
"thread '%s' exited [%s], cleaning: %s" % (
target_thread, hex(target_thread.ident),
c_key,))
with self._cleanup_monitor_cache_mutex:
self._cleanup_monitor_cache.pop(c_key, None)
self._cleanup_killer(c_key)
def _start_cleanup_monitor(self, current_thread, c_key):
"""
Allocate a new thread that monitors the thread object
passed as "current_thread". Once this thread terminates,
all its resources are automatically released.
current_thread is actually joined() and live cursor and
connections are checked against thread identity value
clashing (because thread.ident values are recycled).
For the main thread, this method is a NO-OP.
The allocated thread is a daemon thread, so it should
be safe to use join() on daemon threads as well.
"""
if self.isMainThread(current_thread):
const_debug_write(
__name__,
"NOT setting up a cleanup monitor for the MainThread")
# do not install any cleanup monitor then
return
with self._cleanup_monitor_cache_mutex:
mon = self._cleanup_monitor_cache.get(c_key)
if mon is not None:
# there is already a monitor for this
# cache key, quit straight away
return
self._cleanup_monitor_cache[c_key] = mon
if const_debug_enabled():
const_debug_write(
__name__,
"setting up a new cleanup monitor")
mon = ParallelTask(self._cleanup_monitor,
current_thread, c_key)
mon.name = "CleanupMonitor"
mon.daemon = True
mon.start()
def _cleanup_killer(self, c_key, _cleanup_main_thread=False):
"""
Cursor and Connection cleanup method.
"""
db, th_ident, pid = c_key
with self._cursor_pool_mutex():
with self._connection_pool_mutex():
cursor_pool = self._cursor_pool()
cur = None
threads = set()
cur_data = cursor_pool.get(c_key)
if cur_data is not None:
cur, threads = cur_data
connection_pool = self._connection_pool()
conn_data = connection_pool.get(c_key)
conn = None
if conn_data is not None:
conn, _threads = conn_data
threads |= _threads
if not threads:
# no threads?
return
# now cleanup threads set() first
# if it's empty, we can kill both
# connection and cursor
_dead_threads = set(
(x for x in threads if not x.is_alive()))
# we need to use the method rather than
# the operator, because we alter its data in
# place.
threads.difference_update(_dead_threads)
# also remove myself from the list
current_thread = threading.current_thread()
threads.discard(current_thread)
# closing the main thread objects (forcibly)
# is VERY dangerous, but it turned out
# that the original version of EntropyRepository.close()
# did that (that is why we have rwsems encapsulating
# entropy calls in RigoDaemon and Rigo).
# Also, one expects that close() really terminates
# all the connections and releases all the resources.
if _cleanup_main_thread and threads:
threads.discard(self._MAIN_THREAD)
if threads:
if const_debug_enabled():
const_debug_write(
__name__,
"_cleanup_killer: "
"there are still alive threads, "
"not cleaning up resources, "
"alive: %s -- dead: %s" % (
threads, _dead_threads,))
return
cursor_pool.pop(c_key, None)
connection_pool.pop(c_key, None)
if const_debug_enabled():
const_debug_write(
__name__,
"_cleanup_killer: "
"all threads sharing the same "
"ident are gone, i canz kill thread "
"ids: %s." % (hex(th_ident),))
# WARNING !! BEHAVIOUR CHANGE
# no more implicit commit()
# caller has to do it!
try:
conn.close()
except OperationalError as err:
if const_debug_enabled():
const_debug_write(
__name__,
"_cleanup_killer_1: %s" % (err,))
try:
conn.interrupt()
conn.close()
except OperationalError as err:
# heh, unable to close due to
# unfinalized statements
# interpreter shutdown?
if const_debug_enabled():
const_debug_write(
__name__,
"_cleanup_killer_2: %s" % (err,))
def _concatOperator(self, fields):
"""
Return the SQL for the CONCAT() function
of the given field elements.
For instance, MySQL has CONCAT(el_1, el_2, ...)
while SQLite3 uses the || n-ary operator.
So, the output of this method is a string, which
for MySQL is something like "CONCAT(el_1, el_2, ...)"
"""
raise NotImplementedError()
def _isBaseinfoExtrainfo2010(self):
"""
This method is mainly for old adapters that were
using a less-optimized more-normalized schema.
Just return True if you don't know what it does mean.
"""
return True
def clearCache(self):
"""
Reimplemented from EntropyRepositoryBase.
"""
self._live_cacher.clear()
super(EntropySQLRepository, self).clearCache()
self._live_cacher.clear()
def _clearLiveCache(self, key):
"""
Remove any in-memory cache pointed by key.
"""
self._live_cacher.clear_key(self._getLiveCacheKey() + key)
def _discardLiveCache(self):
"""
Invalidate all the in-memory cache.
"""
self._live_cacher.discard(self._getLiveCacheKey())
def _setLiveCache(self, key, value):
"""
Save a new key -> value pair to the in-memory cache.
"""
self._live_cacher.set(self._getLiveCacheKey() + key, value)
def _getLiveCache(self, key):
"""
Lookup a key value from the in-memory cache.
"""
return self._live_cacher.get(self._getLiveCacheKey() + key)
def _getLiveCacheKey(self):
"""
Reimplemented from EntropySQLRepository.
"""
return etpConst['systemroot'] + "_" + self._db + "_" + \
self.name + "_"
def _connection(self):
"""
Return a valid Connection object for this thread.
Must be implemented by subclasses and must return
a SQLConnectionWrapper object.
"""
raise NotImplementedError()
def _cursor(self):
"""
Return a valid Cursor object for this thread.
Must be implemented by subclasses and must return
a SQLCursorWrapper object.
"""
raise NotImplementedError()
def _cur2frozenset(self, cur):
"""
Flatten out a cursor content (usually some kind of list of lists)
and transform it into an immutable frozenset object.
"""
content = set()
for x in cur:
content |= set(x)
return frozenset(content)
def _cur2tuple(self, cur):
"""
Flatten out a cursor content (usually some kind of list of lists)
and transform it into an immutable tuple object.
"""
return tuple(itertools.chain.from_iterable(cur))
def _connection_pool(self):
"""
Return the Connection Pool mapping object
"""
return self.__connection_pool
def _connection_pool_mutex(self):
"""
Return the Connection Pool mapping mutex
"""
return self.__connection_pool_mutex
def _cursor_pool(self):
"""
Return the Cursor Pool mapping object
"""
return self.__cursor_pool
def _cursor_pool_mutex(self):
"""
Return the Cursor Pool mapping mutex
"""
return self.__cursor_pool_mutex
def _doesTableExist(self, table, temporary = False):
"""
Return whether a table exists.
"""
raise NotImplementedError()
def _doesColumnInTableExist(self, table, column):
"""
Return whether a column in table exists.
"""
raise NotImplementedError()
@staticmethod
def update(entropy_client, repository_id, force, gpg):
"""
Reimplemented from EntropyRepositoryBase.
"""
return EntropyRepositoryBase.update(
entropy_client, repository_id, force, gpg)
@staticmethod
def revision(repository_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
return EntropyRepositoryBase.revision(repository_id)
@staticmethod
def remote_revision(repository_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
return EntropyRepositoryBase.remote_revision(repository_id)
def setIndexing(self, indexing):
"""
Enable or disable metadata indexing.
@param indexing: True, to enable indexing.
@type indexing: bool
"""
self._indexing = bool(indexing)
def close(self, safe=False):
"""
Reimplemented from EntropyRepositoryBase.
Needs to call superclass method. This is a stub,
please implement the SQL logic.
"""
super(EntropySQLRepository, self).close(safe=safe)
def vacuum(self):
"""
Reimplemented from EntropyRepositoryBase.
"""
raise NotImplementedError()
def commit(self, force = False, no_plugins = False):
"""
Reimplemented from EntropyRepositoryBase.
Needs to call superclass method.
"""
if const_debug_enabled():
const_debug_write(
__name__,
"commit(), "
"force: %s, no_plugins: %s, readonly: %s | %s" % (
force, no_plugins, self.readonly(), self))
if force or not self.readonly():
# NOTE: the actual commit MUST be executed before calling
# the superclass method (that is going to call EntropyRepositoryBase
# plugins). This to avoid that other connection to the same exact
# database file are opened and used before data is actually written
# to disk, causing a tricky race condition hard to exploit.
# So, FIRST commit changes, then call plugins.
try:
self._connection().commit()
except OperationalError as err:
# catch stupid sqlite3 error
# 'cannot commit - no transaction is active'
# and ignore.
if str(err.message).find("no transaction is active") == -1:
raise
super(EntropySQLRepository, self).commit(
force = force, no_plugins = no_plugins)
def rollback(self):
"""
Reimplemented from EntropyRepositoryBase.
"""
self._connection().rollback()
def initializeRepository(self):
"""
Reimplemented from EntropyRepositoryBase.
Needs to call superclass method. This is a stub,
please implement the SQL logic.
"""
super(EntropySQLRepository, self).initializeRepository()
def handlePackage(self, pkg_data, revision = None,
formattedContent = False):
"""
Reimplemented from EntropyRepositoryBase. Raises NotImplementedError.
Subclasses have to reimplement this.
@raise NotImplementedError: guess what, you need to implement this.
"""
raise NotImplementedError()
def _addCompileFlags(self, chost, cflags, cxxflags):
"""
NOTE: only working with _baseinfo_extrainfo_2010 disabled
Add package Compiler flags used to repository.
Return its identifier (idflags).
@param chost: CHOST string
@type chost: string
@param cflags: CFLAGS string
@type cflags: string
@param cxxflags: CXXFLAGS string
@type cxxflags: string
@return: Compiler flags triple identifier (idflags)
@rtype: int
"""
cur = self._cursor().execute("""
INSERT INTO flags VALUES (NULL,?,?,?)
""", (chost, cflags, cxxflags,))
return cur.lastrowid
def _areCompileFlagsAvailable(self, chost, cflags, cxxflags):
"""
NOTE: only working with _baseinfo_extrainfo_2010 disabled
Return whether given Compiler FLAGS are available in repository.
@param chost: CHOST flag
@type chost: string
@param cflags: CFLAGS flag
@type cflags: string
@param cxxflags: CXXFLAGS flag
@type cxxflags: string
@return: availability (True if available)
@rtype: bool
"""
cur = self._cursor().execute("""
SELECT idflags FROM flags WHERE chost = (?)
AND cflags = (?) AND cxxflags = (?) LIMIT 1
""",
(chost, cflags, cxxflags,)
)
result = cur.fetchone()
if result:
return result[0]
return -1
def _isLicenseAvailable(self, pkglicense):
"""
NOTE: only working with _baseinfo_extrainfo_2010 disabled
Return whether license metdatatum (NOT license name) is available
in repository.
@param pkglicense: "license" package metadatum (returned by
retrieveLicense)
@type pkglicense: string
@return: "license" metadatum identifier (idlicense)
@rtype: int
"""
if not entropy.tools.is_valid_string(pkglicense):
pkglicense = ' '
cur = self._cursor().execute("""
SELECT idlicense FROM licenses WHERE license = (?) LIMIT 1
""", (pkglicense,))
result = cur.fetchone()
if result:
return result[0]
return -1
def _addLicense(self, pkglicense):
"""
NOTE: only working with _baseinfo_extrainfo_2010 disabled
Add package license name string to repository.
Return its identifier (idlicense).
@param pkglicense: license name string
@type pkglicense: string
@return: license name identifier (idlicense)
@rtype: int
"""
if not entropy.tools.is_valid_string(pkglicense):
pkglicense = ' ' # workaround for broken license entries
cur = self._cursor().execute("""
INSERT INTO licenses VALUES (NULL,?)
""", (pkglicense,))
return cur.lastrowid
def _isCategoryAvailable(self, category):
"""
NOTE: only working with _baseinfo_extrainfo_2010 disabled
Return whether given category is available in repository.
@param category: category name
@type category: string
@return: availability (True if available)
@rtype: bool
"""
cur = self._cursor().execute("""
SELECT idcategory FROM categories WHERE category = (?) LIMIT 1
""", (category,))
result = cur.fetchone()
if result:
return result[0]
return -1
def _addCategory(self, category):
"""
NOTE: only working with _baseinfo_extrainfo_2010 disabled
Add package category string to repository. Return its identifier
(idcategory).
@param category: name of the category to add
@type category: string
@return: category identifier (idcategory)
@rtype: int
"""
cur = self._cursor().execute("""
INSERT INTO categories VALUES (NULL,?)
""", (category,))
return cur.lastrowid
def _addPackage(self, pkg_data, revision = -1, package_id = None,
formatted_content = False):
"""
Reimplemented from EntropyRepositoryBase.
"""
if revision == -1:
try:
revision = int(pkg_data['revision'])
except (KeyError, ValueError):
pkg_data['revision'] = 0 # revision not specified
revision = 0
elif 'revision' not in pkg_data:
pkg_data['revision'] = revision
_baseinfo_extrainfo_2010 = self._isBaseinfoExtrainfo2010()
catid = None
licid = None
idflags = None
if not _baseinfo_extrainfo_2010:
# create new category if it doesn't exist
catid = self._isCategoryAvailable(pkg_data['category'])
if catid == -1:
catid = self._addCategory(pkg_data['category'])
# create new license if it doesn't exist
licid = self._isLicenseAvailable(pkg_data['license'])
if licid == -1:
licid = self._addLicense(pkg_data['license'])
idflags = self._areCompileFlagsAvailable(pkg_data['chost'],
pkg_data['cflags'], pkg_data['cxxflags'])
if idflags == -1:
idflags = self._addCompileFlags(pkg_data['chost'],
pkg_data['cflags'], pkg_data['cxxflags'])
idprotect = self._isProtectAvailable(pkg_data['config_protect'])
if idprotect == -1:
idprotect = self._addProtect(pkg_data['config_protect'])
idprotect_mask = self._isProtectAvailable(
pkg_data['config_protect_mask'])
if idprotect_mask == -1:
idprotect_mask = self._addProtect(
pkg_data['config_protect_mask'])
trigger = 0
if pkg_data['trigger']:
trigger = 1
# baseinfo
pkgatom = entropy.dep.create_package_atom_string(
pkg_data['category'], pkg_data['name'], pkg_data['version'],
pkg_data['versiontag'])
# add atom metadatum
pkg_data['atom'] = pkgatom
if not _baseinfo_extrainfo_2010:
mybaseinfo_data = (pkgatom, catid, pkg_data['name'],
pkg_data['version'], pkg_data['versiontag'], revision,
pkg_data['branch'], pkg_data['slot'],
licid, pkg_data['etpapi'], trigger,
)
else:
mybaseinfo_data = (pkgatom, pkg_data['category'], pkg_data['name'],
pkg_data['version'], pkg_data['versiontag'], revision,
pkg_data['branch'], pkg_data['slot'],
pkg_data['license'], pkg_data['etpapi'], trigger,
)
mypackage_id_string = 'NULL'
if package_id is not None:
manual_deps = self.retrieveManualDependencies(package_id,
resolve_conditional_deps = False)
# does it exist?
self.removePackage(package_id, from_add_package = True)
mypackage_id_string = '?'
mybaseinfo_data = (package_id,)+mybaseinfo_data
# merge old manual dependencies
m_dep_id = etpConst['dependency_type_ids']['mdepend_id']
for manual_dep in manual_deps:
pkg_data['pkg_dependencies'] += ((manual_dep, m_dep_id),)
cur = self._cursor().execute("""
INSERT INTO baseinfo VALUES
(%s, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)""" % (
mypackage_id_string,), mybaseinfo_data)
if package_id is None:
package_id = cur.lastrowid
# extrainfo
if not _baseinfo_extrainfo_2010:
self._cursor().execute(
'INSERT INTO extrainfo VALUES (?, ?, ?, ?, ?, ?, ?, ?)',
( package_id,
pkg_data['description'],
pkg_data['homepage'],
pkg_data['download'],
pkg_data['size'],
idflags,
pkg_data['digest'],
pkg_data['datecreation'],
)
)
else:
self._cursor().execute(
'INSERT INTO extrainfo VALUES (?,?,?,?,?,?,?,?,?,?)',
( package_id,
pkg_data['description'],
pkg_data['homepage'],
pkg_data['download'],
pkg_data['size'],
pkg_data['chost'],
pkg_data['cflags'],
pkg_data['cxxflags'],
pkg_data['digest'],
pkg_data['datecreation'],
)
)
# baseinfo and extrainfo are tainted
self.clearCache()
### other information iserted below are not as
### critical as these above
if "needed_libs" in pkg_data:
needed_libs = pkg_data['needed_libs']
else: # needed, kept for backward compatibility.
needed_libs = [("", "", soname, elfclass, "")
for soname, elfclass in pkg_data['needed']]
self._insertNeededLibs(package_id, needed_libs)
self.insertDependencies(package_id, pkg_data['pkg_dependencies'])
self._insertSources(package_id, pkg_data['sources'])
self._insertUseflags(package_id, pkg_data['useflags'])
self._insertKeywords(package_id, pkg_data['keywords'])
self._insertLicenses(pkg_data['licensedata'])
self._insertMirrors(pkg_data['mirrorlinks'])
# packages and file association metadata
desktop_mime = pkg_data.get('desktop_mime')
if desktop_mime:
self._insertDesktopMime(package_id, desktop_mime)
provided_mime = pkg_data.get('provided_mime')
if provided_mime:
self._insertProvidedMime(package_id, provided_mime)
# package ChangeLog
if pkg_data.get('changelog'):
self._insertChangelog(pkg_data['category'], pkg_data['name'],
pkg_data['changelog'])
# package signatures
if pkg_data.get('signatures'):
signatures = pkg_data['signatures']
sha1, sha256, sha512, gpg = signatures['sha1'], \
signatures['sha256'], signatures['sha512'], \
signatures.get('gpg')
self._insertSignatures(package_id, sha1, sha256, sha512,
gpg = gpg)
# extra package download URLs
if pkg_data.get('extra_download'):
extra_download = pkg_data['extra_download']
self._insertExtraDownload(package_id, extra_download)
if pkg_data.get('provided_libs'):
self._insertProvidedLibraries(
package_id, pkg_data['provided_libs'])
# spm phases
if pkg_data.get('spm_phases') is not None:
self._insertSpmPhases(package_id, pkg_data['spm_phases'])
if pkg_data.get('spm_repository') is not None:
self._insertSpmRepository(
package_id, pkg_data['spm_repository'])
# not depending on other tables == no select done
self.insertContent(package_id, pkg_data['content'],
already_formatted = formatted_content)
# insert content safety metadata (checksum, mtime)
# if metadatum exists
content_safety = pkg_data.get('content_safety')
if content_safety is not None:
self._insertContentSafety(package_id, content_safety)
# handle SPM UID<->package_id binding
pkg_data['counter'] = int(pkg_data['counter'])
if not pkg_data['injected'] and (pkg_data['counter'] != -1):
pkg_data['counter'] = self._bindSpmPackageUid(
package_id, pkg_data['counter'], pkg_data['branch'])
self._insertOnDiskSize(package_id, pkg_data['disksize'])
if pkg_data['trigger']:
self._insertTrigger(package_id, pkg_data['trigger'])
self.insertConflicts(package_id, pkg_data['conflicts'])
self._insertProvide(package_id, pkg_data['provide_extended'])
self._insertConfigProtect(package_id, idprotect)
self._insertConfigProtect(package_id, idprotect_mask, mask = True)
# injected?
if pkg_data.get('injected'):
self.setInjected(package_id)
# is it a system package?
if pkg_data.get('systempackage'):
self._setSystemPackage(package_id)
# this will always be optional !
# (see entropy.client.interfaces.package)
original_repository = pkg_data.get('original_repository')
if original_repository is not None:
self.storeInstalledPackage(package_id, original_repository)
# baseinfo and extrainfo are tainted
# ensure that cache is clear even here
self.clearCache()
return package_id
def addPackage(self, pkg_data, revision = -1, package_id = None,
formatted_content = False):
"""
Reimplemented from EntropyRepositoryBase.
Needs to call superclass method.
"""
try:
package_id = self._addPackage(pkg_data, revision = revision,
package_id = package_id,
formatted_content = formatted_content)
super(EntropySQLRepository, self).addPackage(
pkg_data, revision = revision,
package_id = package_id,
formatted_content = formatted_content)
return package_id
except:
self._connection().rollback()
raise
def removePackage(self, package_id, from_add_package = False):
"""
Reimplemented from EntropyRepositoryBase.
Needs to call superclass method.
"""
try:
self.clearCache()
super(EntropySQLRepository, self).removePackage(
package_id, from_add_package = from_add_package)
self.clearCache()
return self._removePackage(package_id,
from_add_package = from_add_package)
except:
self._connection().rollback()
raise
def _removePackage(self, package_id, from_add_package = False):
"""
Reimplemented from EntropyRepositoryBase.
This method uses "ON DELETE CASCADE" to implement quick
and reliable package removal.
"""
self._cursor().execute(
"DELETE FROM baseinfo WHERE idpackage = ?", (package_id,))
def _removeMirrorEntries(self, mirrorname):
"""
Remove source packages mirror entries from database for the given
mirror name. This is a representation of Portage's "thirdpartymirrors".
@param mirrorname: mirror name
@type mirrorname: string
"""
self._cursor().execute("""
DELETE FROM mirrorlinks WHERE mirrorname = ?
""", (mirrorname,))
def _addMirrors(self, mirrorname, mirrorlist):
"""
Add source package mirror entry to database.
This is a representation of Portage's "thirdpartymirrors".
@param mirrorname: name of the mirror from which "mirrorlist" belongs
@type mirrorname: string
@param mirrorlist: list of URLs belonging to the given mirror name
@type mirrorlist: list
"""
self._cursor().executemany("""
INSERT INTO mirrorlinks VALUES (?, ?)
""", [(mirrorname, x,) for x in mirrorlist])
def _addProtect(self, protect):
"""
Add a single, generic CONFIG_PROTECT (not defined as _MASK/whatever
here) path. Return its identifier (idprotect).
@param protect: CONFIG_PROTECT path to add
@type protect: string
@return: protect identifier (idprotect)
@rtype: int
"""
cur = self._cursor().execute("""
INSERT INTO configprotectreference VALUES (NULL, ?)
""", (protect,))
return cur.lastrowid
def _addSource(self, source):
"""
Add source code package download path to repository. Return its
identifier (idsource).
@param source: source package download path
@type source: string
@return: source identifier (idprotect)
@rtype: int
"""
cur = self._cursor().execute("""
INSERT INTO sourcesreference VALUES (NULL, ?)
""", (source,))
return cur.lastrowid
def _addDependency(self, dependency):
"""
Add dependency string to repository. Return its identifier
(iddependency).
@param dependency: dependency string
@type dependency: string
@return: dependency identifier (iddependency)
@rtype: int
"""
cur = self._cursor().execute("""
INSERT INTO dependenciesreference VALUES (NULL, ?)
""", (dependency,))
return cur.lastrowid
def _addKeyword(self, keyword):
"""
Add package SPM keyword string to repository.
Return its identifier (idkeyword).
@param keyword: keyword string
@type keyword: string
@return: keyword identifier (idkeyword)
@rtype: int
"""
cur = self._cursor().execute("""
INSERT INTO keywordsreference VALUES (NULL, ?)
""", (keyword,))
return cur.lastrowid
def _addUseflag(self, useflag):
"""
Add package USE flag string to repository.
Return its identifier (iduseflag).
@param useflag: useflag string
@type useflag: string
@return: useflag identifier (iduseflag)
@rtype: int
"""
cur = self._cursor().execute("""
INSERT INTO useflagsreference VALUES (NULL, ?)
""", (useflag,))
return cur.lastrowid
def _setSystemPackage(self, package_id):
"""
Mark a package as system package, which means that entropy.client
will deny its removal.
@param package_id: package identifier
@type package_id: int
"""
self._cursor().execute("""
INSERT INTO systempackages VALUES (?)
""", (package_id,))
def setInjected(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
if not self.isInjected(package_id):
self._cursor().execute("""
INSERT INTO injected VALUES (?)
""", (package_id,))
def setCreationDate(self, package_id, date):
"""
Reimplemented from EntropyRepositoryBase.
"""
self._cursor().execute("""
UPDATE extrainfo SET datecreation = ? WHERE idpackage = ?
""", (str(date), package_id,))
def setDigest(self, package_id, digest):
"""
Reimplemented from EntropyRepositoryBase.
"""
self._cursor().execute("""
UPDATE extrainfo SET digest = ? WHERE idpackage = ?
""", (digest, package_id,))
def setSignatures(self, package_id, sha1, sha256, sha512, gpg = None):
"""
Reimplemented from EntropyRepositoryBase.
"""
self._cursor().execute("""
UPDATE packagesignatures SET sha1 = ?, sha256 = ?, sha512 = ?,
gpg = ? WHERE idpackage = ?
""", (sha1, sha256, sha512, gpg, package_id))
def setDownloadURL(self, package_id, url):
"""
Set download URL prefix for package.
@param package_id: package indentifier
@type package_id: int
@param url: URL prefix to set
@type url: string
"""
self._cursor().execute("""
UPDATE extrainfo SET download = ? WHERE idpackage = ?
""", (url, package_id,))
def setCategory(self, package_id, category):
"""
Reimplemented from EntropyRepositoryBase.
"""
self._cursor().execute("""
UPDATE baseinfo SET category = ? WHERE idpackage = ?
""", (category, package_id,))
def setCategoryDescription(self, category, description_data):
"""
Reimplemented from EntropyRepositoryBase.
"""
self._cursor().execute("""
DELETE FROM categoriesdescription WHERE category = ?
""", (category,))
for locale in description_data:
mydesc = description_data[locale]
self._cursor().execute("""
INSERT INTO categoriesdescription VALUES (?, ?, ?)
""", (category, locale, mydesc,))
def setName(self, package_id, name):
"""
Reimplemented from EntropyRepositoryBase.
"""
self._cursor().execute("""
UPDATE baseinfo SET name = ? WHERE idpackage = ?
""", (name, package_id,))
def setDependency(self, iddependency, dependency):
"""
Reimplemented from EntropyRepositoryBase.
"""
self._cursor().execute("""
UPDATE dependenciesreference SET dependency = ?
WHERE iddependency = ?
""", (dependency, iddependency,))
def setAtom(self, package_id, atom):
"""
Reimplemented from EntropyRepositoryBase.
"""
self._cursor().execute("""
UPDATE baseinfo SET atom = ? WHERE idpackage = ?
""", (atom, package_id,))
def setSlot(self, package_id, slot):
"""
Reimplemented from EntropyRepositoryBase.
"""
self._cursor().execute("""
UPDATE baseinfo SET slot = ? WHERE idpackage = ?
""", (slot, package_id,))
def setRevision(self, package_id, revision):
"""
Reimplemented from EntropyRepositoryBase.
"""
self._cursor().execute("""
UPDATE baseinfo SET revision = ? WHERE idpackage = ?
""", (revision, package_id,))
def removeDependencies(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
self._cursor().execute("""
DELETE FROM dependencies WHERE idpackage = ?
""", (package_id,))
def insertDependencies(self, package_id, depdata):
"""
Reimplemented from EntropyRepositoryBase.
"""
def insert_list():
deps = []
for dep in depdata:
deptype = 0
if isinstance(depdata, dict):
deptype = depdata[dep]
elif not isinstance(dep, const_get_stringtype()):
dep, deptype = dep
iddep = self._isDependencyAvailable(dep)
if iddep == -1:
iddep = self._addDependency(dep)
deps.append((package_id, iddep, deptype,))
return deps
self._cursor().executemany("""
INSERT INTO dependencies VALUES (?, ?, ?)
""", insert_list())
def removeConflicts(self, package_id):
"""
Remove all the conflicts of package.
@param package_id: package indentifier
@type package_id: int
"""
self._cursor().execute("""
DELETE FROM conflicts WHERE idpackage = ?
""", (package_id,))
def insertConflicts(self, package_id, conflicts):
"""
Insert dependency conflicts for package.
@param package_id: package indentifier
@type package_id: int
@param conflicts: list of dep. conflicts
@type conflicts: list
"""
self._cursor().executemany("""
INSERT INTO conflicts VALUES (?, ?)
""", [(package_id, x,) for x in conflicts])
def insertContent(self, package_id, content, already_formatted = False):
"""
Reimplemented from EntropyRepositoryBase.
"""
# respect iterators, so that if they're true iterators
# we save a lot of memory.
class MyIter:
def __init__(self, _package_id, _content, _already_fmt):
self._package_id = _package_id
self._content = _content
self._already_fmt = _already_fmt
self._iter = iter(self._content)
def __iter__(self):
# reinit iter
self._iter = iter(self._content)
return self
def __next__(self):
if self._already_fmt:
a, x, y = next(self._iter)
return self._package_id, x, y
else:
x = next(self._iter)
return self._package_id, x, self._content[x]
def next(self):
if self._already_fmt:
a, x, y = self._iter.next()
return self._package_id, x, y
else:
x = self._iter.next()
return self._package_id, x, self._content[x]
if already_formatted:
self._cursor().executemany("""
INSERT INTO content VALUES (?, ?, ?)
""", MyIter(package_id, content, already_formatted))
else:
self._cursor().executemany("""
INSERT INTO content VALUES (?, ?, ?)
""", MyIter(package_id, content, already_formatted))
def _insertContentSafety(self, package_id, content_safety):
"""
Currently supported: sha256, mtime.
Insert into contentsafety table package files sha256sum and mtime.
"""
if isinstance(content_safety, dict):
self._cursor().executemany("""
INSERT INTO contentsafety VALUES (?, ?, ?, ?)
""", [(package_id, k, v['mtime'], v['sha256']) \
for k, v in content_safety.items()])
else:
# support for iterators containing tuples like this:
# (path, sha256, mtime)
class MyIterWrapper:
def __init__(self, _iter):
self._iter = iter(_iter)
def __iter__(self):
# reinit iter
self._iter = iter(self._iter)
return self
def __next__(self):
path, sha256, mtime = next(self._iter)
# this is the insert order, with mtime
# and sha256 swapped.
return package_id, path, mtime, sha256
def next(self):
path, sha256, mtime = self._iter.next()
# this is the insert order, with mtime
# and sha256 swapped.
return package_id, path, mtime, sha256
self._cursor().executemany("""
INSERT INTO contentsafety VALUES (?, ?, ?, ?)
""", MyIterWrapper(content_safety))
def _insertProvidedLibraries(self, package_id, libs_metadata):
"""
Insert library metadata owned by package.
@param package_id: package indentifier
@type package_id: int
@param libs_metadata: provided library metadata composed by list of
tuples of length 3 containing library name, path and ELF class.
@type libs_metadata: list
"""
self._cursor().executemany("""
INSERT INTO provided_libs VALUES (?, ?, ?, ?)
""", [(package_id, x, y, z,) for x, y, z in libs_metadata])
def insertAutomergefiles(self, package_id, automerge_data):
"""
Reimplemented from EntropyRepositoryBase.
"""
self._cursor().executemany("""
INSERT INTO automergefiles VALUES (?, ?, ?)""",
[(package_id, x, y,) for x, y in automerge_data])
def _insertChangelog(self, category, name, changelog_txt):
"""
Insert package changelog for package (in this case using category +
name as key).
@param category: package category
@type category: string
@param name: package name
@type name: string
@param changelog_txt: changelog text
@type changelog_txt: string
"""
mytxt = changelog_txt.encode('raw_unicode_escape')
self._cursor().execute("""
DELETE FROM packagechangelogs WHERE category = ? AND name = ?
""", (category, name,))
self._cursor().execute("""
INSERT INTO packagechangelogs VALUES (?, ?, ?)
""", (category, name, const_get_buffer()(mytxt),))
def _insertLicenses(self, licenses_data):
"""
insert license data (license names and text) into repository.
@param licenses_data: dictionary containing license names as keys and
text as values
@type licenses_data: dict
"""
mylicenses = list(licenses_data.keys())
def my_mf(mylicense):
return not self.isLicenseDataKeyAvailable(mylicense)
def my_mm(mylicense):
lic_data = licenses_data.get(mylicense, '')
# support both utf8 and str input
if const_isunicode(lic_data): # encode to str
try:
lic_data = lic_data.encode('raw_unicode_escape')
except (UnicodeDecodeError,):
lic_data = lic_data.encode('utf-8')
return (mylicense, const_get_buffer()(lic_data), 0,)
# set() used after filter to remove duplicates
self._cursor().executemany("""
%s INTO licensedata VALUES (?, ?, ?)
""" % (self._INSERT_OR_REPLACE,),
list(map(my_mm, set(filter(my_mf, mylicenses)))))
def _insertConfigProtect(self, package_id, idprotect, mask = False):
"""
Insert CONFIG_PROTECT (configuration files protection) entry identifier
for package. This entry is usually a space separated string of directory
and files which are used to handle user-protected configuration files
or directories, those that are going to be stashed in separate paths
waiting for user merge decisions.
@param package_id: package indentifier
@type package_id: int
@param idprotect: configuration files protection identifier
@type idprotect: int
@keyword mask: if True, idproctect will be considered a "mask" entry,
meaning that configuration files starting with paths referenced
by idprotect will be forcefully merged.
@type mask: bool
"""
mytable = 'configprotect'
if mask:
mytable += 'mask'
self._cursor().execute("""
INSERT INTO %s VALUES (?, ?)
""" % (mytable,), (package_id, idprotect,))
def _insertMirrors(self, mirrors):
"""
Insert list of "mirror name" and "mirror list" into repository.
The term "mirror" in this case references to Source Package Manager
package download mirrors.
Argument format is like this for historical reasons and may change in
future.
@todo: change argument format
@param mirrors: list of tuples of length 2 containing string as first
item and list as second.
[('openoffice', ['http://openoffice1', 'http://..."],), ...]
@type mirrors: list
"""
for mirrorname, mirrorlist in mirrors:
# remove old
self._removeMirrorEntries(mirrorname)
# add new
self._addMirrors(mirrorname, mirrorlist)
def _insertKeywords(self, package_id, keywords):
"""
Insert keywords for package. Keywords are strings contained in package
metadata stating what architectures or subarchitectures are supported
by package. It is historically used also for masking packages (making
them not available).
@param package_id: package indentifier
@type package_id: int
@param keywords: list of keywords
@type keywords: list
"""
def mymf(key):
idkeyword = self._isKeywordAvailable(key)
if idkeyword == -1:
# create category
idkeyword = self._addKeyword(key)
return (package_id, idkeyword,)
self._cursor().executemany("""
INSERT INTO keywords VALUES (?, ?)
""", list(map(mymf, keywords)))
def _insertUseflags(self, package_id, useflags):
"""
Insert Source Package Manager USE (components build) flags for package.
@param package_id: package indentifier
@type package_id: int
@param useflags: list of use flags strings
@type useflags: list
"""
def mymf(flag):
iduseflag = self._isUseflagAvailable(flag)
if iduseflag == -1:
# create category
iduseflag = self._addUseflag(flag)
return (package_id, iduseflag,)
self._cursor().executemany("""
INSERT INTO useflags VALUES (?, ?)
""", list(map(mymf, useflags)))
def _insertSignatures(self, package_id, sha1, sha256, sha512, gpg = None):
"""
Insert package file extra hashes (sha1, sha256, sha512) for package.
@param package_id: package indentifier
@type package_id: int
@param sha1: SHA1 hash for package file
@type sha1: string
@param sha256: SHA256 hash for package file
@type sha256: string
@param sha512: SHA512 hash for package file
@type sha512: string
@keyword gpg: GPG signature file content
@type gpg: string
"""
self._cursor().execute("""
INSERT INTO packagesignatures VALUES (?, ?, ?, ?, ?)
""", (package_id, sha1, sha256, sha512, gpg))
def _insertExtraDownload(self, package_id, package_downloads_data):
"""
Insert extra package files download objects to repository.
@param package_id: package indentifier
@type package_id: int
@param package_downloads_data: list of dict composed by
(download, type, size, md5, sha1, sha256, sha512, gpg) as keys
@type package_downloads_data: list
"""
self._cursor().executemany("""
INSERT INTO packagedownloads VALUES
(?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
""", [(package_id, edw['download'], edw['type'], edw['size'],
edw['disksize'], edw['md5'], edw['sha1'], edw['sha256'],
edw['sha512'], edw['gpg']) for edw in \
package_downloads_data])
def _insertDesktopMime(self, package_id, metadata):
"""
Insert file association information for package.
@param package_id: package indentifier
@type package_id: int
@param metadata: list of dict() containing file association metadata
@type metadata: list
"""
mime_data = [(package_id, x['name'],
x['mimetype'], x['executable'],
x['icon']) for x in metadata]
self._cursor().executemany("""
INSERT INTO packagedesktopmime VALUES (?, ?, ?, ?, ?)
""", mime_data)
def _insertProvidedMime(self, package_id, mimetypes):
"""
Insert file association information for package in a way useful for
making direct and inverse queries (having a mimetype or having a
package identifier)
@param package_id: package indentifier
@type package_id: int
@param mimetypes: list of mimetypes supported by package
@type mimetypes: list
"""
self._cursor().executemany("""
INSERT INTO provided_mime VALUES (?, ?)""",
[(x, package_id) for x in mimetypes])
def _insertSpmPhases(self, package_id, phases):
"""
Insert Source Package Manager phases for package.
Entropy can call several Source Package Manager (the PM which Entropy
relies on) package installation/removal phases.
Such phase names are listed here.
@param package_id: package indentifier
@type package_id: int
@param phases: list of available Source Package Manager phases
@type phases: list
"""
self._cursor().execute("""
INSERT INTO packagespmphases VALUES (?, ?)
""", (package_id, phases,))
def _insertSpmRepository(self, package_id, repository):
"""
Insert Source Package Manager repository for package.
This medatatum describes the source repository where package has
been compiled from.
@param package_id: package indentifier
@type package_id: int
@param repository: Source Package Manager repository
@type repository: string
"""
self._cursor().execute("""
INSERT INTO packagespmrepository VALUES (?, ?)
""", (package_id, repository,))
def _insertSources(self, package_id, sources):
"""
Insert source code package download URLs for package_id.
@param package_id: package indentifier
@type package_id: int
@param sources: list of source URLs
@type sources: list
"""
def mymf(source):
if (not source) or \
(not entropy.tools.is_valid_string(source)):
return 0
idsource = self._isSourceAvailable(source)
if idsource == -1:
idsource = self._addSource(source)
return (package_id, idsource,)
self._cursor().executemany("""
INSERT INTO sources VALUES (?, ?)
""", [x for x in map(mymf, sources) if x != 0])
def _insertProvide(self, package_id, provides):
"""
Insert PROVIDE metadata for package_id.
This has been added for supporting Portage Source Package Manager
old-style meta-packages support.
Packages can provide extra atoms, you can see it like aliases, where
these can be given by multiple packages. This allowed to make available
multiple applications providing the same functionality which depending
packages can reference, without forcefully being bound to a single
package.
@param package_id: package indentifier
@type package_id: int
@param provides: list of atom strings
@type provides: list
"""
default_provides = [x for x in provides if x[1]]
self._cursor().executemany("""
INSERT INTO provide VALUES (?, ?, ?)
""", [(package_id, x, y,) for x, y in provides])
if default_provides:
# reset previously set default provides
self._cursor().executemany("""
UPDATE provide SET is_default=0 WHERE atom = ? AND
idpackage != ?
""", default_provides)
def _insertNeededLibs(self, package_id, needed_libs):
"""
Insert package libraries' ELF object NEEDED string for package.
@param package_id: package indentifier
@type package_id: int
@param needed_libs: list of tuples composed of:
(library user path, library user soname, soname, elfclass, rpath)
@type needed_libs: list
"""
self._cursor().executemany("""
INSERT INTO needed_libs VALUES (?, ?, ?, ?, ?, ?)
""", [(package_id,) + tuple(x) for x in needed_libs])
def _insertOnDiskSize(self, package_id, mysize):
"""
Insert on-disk size (bytes) for package.
@param package_id: package indentifier
@type package_id: int
@param mysize: package size (bytes)
@type mysize: int
"""
self._cursor().execute("""
INSERT INTO sizes VALUES (?, ?)
""", (package_id, mysize,))
def _insertTrigger(self, package_id, trigger):
"""
Insert built-in trigger script for package, containing
pre-install, post-install, pre-remove, post-remove hooks.
This feature should be considered DEPRECATED, and kept for convenience.
Please use Source Package Manager features if possible.
@param package_id: package indentifier
@type package_id: int
@param trigger: trigger file dump
@type trigger: string
"""
self._cursor().execute("""
INSERT INTO triggers VALUES (?, ?)
""", (package_id, const_get_buffer()(trigger),))
def insertPreservedLibrary(self, library, elfclass, path, atom):
"""
Reimplemented from EntropyRepositoryBase.
"""
self._cursor().execute("""
%s INTO preserved_libs VALUES (?, ?, ?, ?)
""" % (self._INSERT_OR_REPLACE,), (library, elfclass, path, atom))
def removePreservedLibrary(self, library, elfclass, path):
"""
Reimplemented from EntropyRepositoryBase.
"""
self._cursor().execute("""
DELETE FROM preserved_libs
WHERE library = ? AND elfclass = ? AND path = ?
""", (library, elfclass, path))
def listAllPreservedLibraries(self):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT library, elfclass, path, atom FROM preserved_libs
""")
return tuple(cur)
def retrievePreservedLibraries(self, library, elfclass):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT path FROM preserved_libs WHERE library = ? AND elfclass = ?
""", (library, elfclass))
return self._cur2tuple(cur)
def insertBranchMigration(self, repository, from_branch, to_branch,
post_migration_md5sum, post_upgrade_md5sum):
"""
Reimplemented from EntropyRepositoryBase.
"""
self._cursor().execute("""
%s INTO entropy_branch_migration VALUES (?,?,?,?,?)
""" % (self._INSERT_OR_REPLACE,), (
repository, from_branch,
to_branch, post_migration_md5sum,
post_upgrade_md5sum,
)
)
def setBranchMigrationPostUpgradeMd5sum(self, repository, from_branch,
to_branch, post_upgrade_md5sum):
"""
Reimplemented from EntropyRepositoryBase.
"""
self._cursor().execute("""
UPDATE entropy_branch_migration SET post_upgrade_md5sum = ? WHERE
repository = ? AND from_branch = ? AND to_branch = ?
""", (post_upgrade_md5sum, repository, from_branch, to_branch,))
def _bindSpmPackageUid(self, package_id, spm_package_uid, branch):
"""
Bind Source Package Manager package identifier ("COUNTER" metadata
for Portage) to Entropy package.
If uid <= -2, a new negative UID will be allocated and returned.
Negative UIDs are considered auto-allocated by Entropy.
This is mainly used for binary packages not belonging to any SPM
packages which are just "injected" inside the repository.
@param package_id: package indentifier
@type package_id: int
@param spm_package_uid: Source package Manager unique package identifier
@type spm_package_uid: int
@param branch: current running Entropy branch
@type branch: string
@return: uid set
@rtype: int
"""
my_uid = spm_package_uid
if my_uid <= -2:
# special cases
my_uid = self.getFakeSpmUid()
self._cursor().execute("""
INSERT INTO counters VALUES (?, ?, ?)
""", (my_uid, package_id, branch,))
return my_uid
def insertSpmUid(self, package_id, spm_package_uid):
"""
Reimplemented from EntropyRepositoryBase.
"""
branch = self._settings['repositories']['branch']
self._cursor().execute("""
DELETE FROM counters WHERE counter = ?
AND branch = ?
""", (spm_package_uid, branch,))
# the "OR REPLACE" clause handles the UPDATE
# of the counter value in case of clashing
self._cursor().execute("""
%s INTO counters VALUES (?, ?, ?);
""" % (self._INSERT_OR_REPLACE,),
(spm_package_uid, package_id, branch,))
def removeTrashedUids(self, spm_package_uids):
"""
Remove given Source Package Manager unique package identifiers from
the "trashed" list. This is only used by Entropy Server.
"""
self._cursor().executemany("""
DELETE FROM trashedcounters WHERE counter = ?
""", [(x,) for x in spm_package_uids])
def setTrashedUid(self, spm_package_uid):
"""
Reimplemented from EntropyRepositoryBase.
"""
self._cursor().execute("""
%s INTO trashedcounters VALUES (?)
""" % (self._INSERT_OR_REPLACE,), (spm_package_uid,))
def setSpmUid(self, package_id, spm_package_uid, branch = None):
"""
Reimplemented from EntropyRepositoryBase.
"""
branchstring = ''
insertdata = (spm_package_uid, package_id)
if branch:
branchstring = ', branch = (?)'
insertdata += (branch,)
if self._UPDATE_OR_REPLACE is not None:
self._cursor().execute("""
%s counters SET counter = (?) %s
WHERE idpackage = (?)""" % (
self._UPDATE_OR_REPLACE,
branchstring,), insertdata)
else:
try:
cur = self._cursor().execute("""
UPDATE counters SET counter = ? %s
WHERE idpackage = ?""" % (branchstring,), insertdata)
except IntegrityError as err:
# this was used by MySQL
# errno = self.ModuleProxy.errno()
# if err.args[0].errno != errno['ER_DUP_ENTRY']:
# raise
# fallback to replace
cur = self._cursor().execute("""
%s INTO counters SET counter = ? %s
WHERE idpackage = ?""" % (
self._INSERT_OR_REPLACE,
branchstring,), insertdata)
def setContentSafety(self, package_id, content_safety):
"""
Reimplemented from EntropyRepositoryBase.
"""
self._cursor().execute("""
DELETE FROM contentsafety where idpackage = ?
""", (package_id,))
self._insertContentSafety(package_id, content_safety)
def contentDiff(self, package_id, dbconn, dbconn_package_id,
extended = False):
"""
Reimplemented from EntropyRepositoryBase.
"""
# setup random table name
random_str = "%svs%s_%s" % (package_id, id(dbconn),
dbconn_package_id)
if const_is_python3():
random_str = const_convert_to_rawstring(random_str)
randomtable = "cdiff%s" % (hashlib.md5(random_str).hexdigest(),)
# create random table
self._cursor().executescript("""
DROP TABLE IF EXISTS `%s`;
CREATE TEMPORARY TABLE `%s` (
file VARCHAR(75), ftype VARCHAR(3) );
""" % (randomtable, randomtable,)
)
try:
content_iter = dbconn.retrieveContentIter(dbconn_package_id)
self._cursor().executemany("""
INSERT INTO `%s` VALUES (?, ?)""" % (randomtable,),
content_iter)
# remove this when the one in retrieveContent will be removed
self._connection().unicode()
# now compare
ftype_str = ""
if extended:
ftype_str = ", type"
cur = self._cursor().execute("""
SELECT file%s FROM content
WHERE content.idpackage = ? AND
content.file NOT IN (SELECT file from `%s`)""" % (
ftype_str, randomtable,), (package_id,))
# suck back
if extended:
return tuple(cur)
return self._cur2frozenset(cur)
finally:
self._cursor().execute('DROP TABLE IF EXISTS `%s`' % (
randomtable,))
def clean(self):
"""
Reimplemented from EntropyRepositoryBase.
"""
self._cleanupUseflags()
self._cleanupSources()
self._cleanupDependencies()
self._cleanupChangelogs()
def _cleanupChangelogs(self):
"""
Cleanup "changelog" metadata unused references to save space.
"""
concat = self._concatOperator(("category", "'/'", "name"))
concat_sub = self._concatOperator(
("baseinfo.category", "'/'", "baseinfo.name"))
self._cursor().execute("""
DELETE FROM packagechangelogs
WHERE %s NOT IN
( SELECT %s FROM baseinfo)
""" % (concat, concat_sub,))
def _cleanupUseflags(self):
"""
Cleanup "USE flags" metadata unused references to save space.
"""
self._cursor().execute("""
DELETE FROM useflagsreference
WHERE idflag NOT IN (SELECT idflag FROM useflags)""")
def _cleanupSources(self):
"""
Cleanup "sources" metadata unused references to save space.
"""
self._cursor().execute("""
DELETE FROM sourcesreference
WHERE idsource NOT IN (SELECT idsource FROM sources)""")
def _cleanupDependencies(self):
"""
Cleanup "dependencies" metadata unused references to save space.
"""
self._cursor().execute("""
DELETE FROM dependenciesreference
WHERE iddependency NOT IN (SELECT iddependency FROM dependencies)
""")
def getFakeSpmUid(self):
"""
Reimplemented from EntropyRepositoryBase.
"""
try:
cur = self._cursor().execute("""
SELECT min(counter) FROM counters LIMIT 1
""")
dbcounter = cur.fetchone()
except Error:
# first available counter
return -2
counter = 0
if dbcounter:
counter = dbcounter[0]
if (counter >= -1) or (counter is None):
counter = -2
else:
counter -= 1
return counter
def getApi(self):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT max(etpapi) FROM baseinfo LIMIT 1
""")
api = cur.fetchone()
if api:
return api[0]
return -1
def getDependency(self, iddependency):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT dependency FROM dependenciesreference
WHERE iddependency = ? LIMIT 1
""", (iddependency,))
dep = cur.fetchone()
if dep:
return dep[0]
def getPackageIds(self, atom):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT idpackage FROM baseinfo WHERE atom = ?
""", (atom,))
return self._cur2frozenset(cur)
def getPackageIdFromDownload(self, download_relative_path,
endswith = False):
"""
Reimplemented from EntropyRepositoryBase.
"""
if endswith:
cur = self._cursor().execute("""
SELECT baseinfo.idpackage FROM baseinfo,extrainfo
WHERE extrainfo.download LIKE ? AND
baseinfo.idpackage = extrainfo.idpackage
LIMIT 1
""", ("%"+download_relative_path,))
else:
cur = self._cursor().execute("""
SELECT baseinfo.idpackage FROM baseinfo,extrainfo
WHERE extrainfo.download = ? AND
baseinfo.idpackage = extrainfo.idpackage
LIMIT 1
""", (download_relative_path,))
package_id = cur.fetchone()
if package_id:
return package_id[0]
return -1
def getVersioningData(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT version, versiontag, revision FROM baseinfo
WHERE idpackage = ? LIMIT 1
""", (package_id,))
return cur.fetchone()
def getStrictData(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
concat = self._concatOperator(("category", "'/'", "name"))
cur = self._cursor().execute("""
SELECT %s, slot, version, versiontag, revision, atom
FROM baseinfo
WHERE idpackage = ? LIMIT 1
""" % (concat,), (package_id,))
return cur.fetchone()
def getStrictScopeData(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT atom, slot, revision FROM baseinfo
WHERE idpackage = ? LIMIT 1
""", (package_id,))
return cur.fetchone()
def getScopeData(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT atom, category, name, version, slot, versiontag,
revision, branch, etpapi FROM baseinfo
WHERE baseinfo.idpackage = ? LIMIT 1
""", (package_id,))
return cur.fetchone()
def getBaseData(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
sql = """
SELECT
baseinfo.atom,
baseinfo.name,
baseinfo.version,
baseinfo.versiontag,
extrainfo.description,
baseinfo.category,
extrainfo.chost,
extrainfo.cflags,
extrainfo.cxxflags,
extrainfo.homepage,
baseinfo.license,
baseinfo.branch,
extrainfo.download,
extrainfo.digest,
baseinfo.slot,
baseinfo.etpapi,
extrainfo.datecreation,
extrainfo.size,
baseinfo.revision
FROM
baseinfo,
extrainfo
WHERE
baseinfo.idpackage = ?
AND baseinfo.idpackage = extrainfo.idpackage
LIMIT 1
"""
cur = self._cursor().execute(sql, (package_id,))
return cur.fetchone()
def retrieveRepositoryUpdatesDigest(self, repository):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT digest FROM treeupdates WHERE repository = ? LIMIT 1
""", (repository,))
mydigest = cur.fetchone()
if mydigest:
return mydigest[0]
return -1
def listAllTreeUpdatesActions(self, no_ids_repos = False):
"""
Reimplemented from EntropyRepositoryBase.
"""
if no_ids_repos:
cur = self._cursor().execute("""
SELECT command, branch, date FROM treeupdatesactions
ORDER BY CAST(date AS FLOAT)
""")
else:
cur = self._cursor().execute("""
SELECT idupdate, repository, command, branch, date
FROM treeupdatesactions ORDER BY CAST(date AS FLOAT)
""")
return tuple(cur)
def retrieveTreeUpdatesActions(self, repository):
"""
Reimplemented from EntropyRepositoryBase.
"""
params = (repository,)
cur = self._cursor().execute("""
SELECT command FROM treeupdatesactions WHERE
repository = ? ORDER BY CAST(date AS FLOAT)""", params)
return self._cur2tuple(cur)
def bumpTreeUpdatesActions(self, updates):
"""
Reimplemented from EntropyRepositoryBase.
"""
self._cursor().execute('DELETE FROM treeupdatesactions')
self._cursor().executemany("""
INSERT INTO treeupdatesactions VALUES (?, ?, ?, ?, ?)
""", updates)
def removeTreeUpdatesActions(self, repository):
"""
Reimplemented from EntropyRepositoryBase.
"""
self._cursor().execute("""
DELETE FROM treeupdatesactions WHERE repository = ?
""", (repository,))
def insertTreeUpdatesActions(self, updates, repository):
"""
Reimplemented from EntropyRepositoryBase.
"""
myupdates = [[repository]+list(x) for x in updates]
self._cursor().executemany("""
INSERT INTO treeupdatesactions VALUES (NULL, ?, ?, ?, ?)
""", myupdates)
def setRepositoryUpdatesDigest(self, repository, digest):
"""
Reimplemented from EntropyRepositoryBase.
"""
self._cursor().execute("""
DELETE FROM treeupdates where repository = ?
""", (repository,))
self._cursor().execute("""
INSERT INTO treeupdates VALUES (?, ?)
""", (repository, digest,))
def addRepositoryUpdatesActions(self, repository, actions, branch):
"""
Reimplemented from EntropyRepositoryBase.
"""
mytime = str(time.time())
myupdates = [
(repository, x, branch, mytime,) for x in actions \
if not self._doesTreeupdatesActionExist(repository, x, branch)
]
self._cursor().executemany("""
INSERT INTO treeupdatesactions VALUES (NULL, ?, ?, ?, ?)
""", myupdates)
def _doesTreeupdatesActionExist(self, repository, command, branch):
"""
This method should be considered internal and not suited for general
audience.
Return whether provided "treeupdates" action in repository with
provided branch exists.
@param repository: repository identifier
@type repository: string
@param command: treeupdates command
@type command: string
@param branch: branch metadata bound to command argument value given
@type branch: string
@return: if True, provided treeupdates action already exists
@rtype: bool
"""
cur = self._cursor().execute("""
SELECT idupdate FROM treeupdatesactions
WHERE repository = ? and command = ?
and branch = ? LIMIT 1
""", (repository, command, branch,))
result = cur.fetchone()
if result:
return True
return False
def clearPackageSets(self):
"""
Reimplemented from EntropyRepositoryBase.
"""
self._cursor().execute('DELETE FROM packagesets')
def insertPackageSets(self, sets_data):
"""
Reimplemented from EntropyRepositoryBase.
"""
mysets = []
for setname in sorted(sets_data):
for dependency in sorted(sets_data[setname]):
try:
mysets.append((const_convert_to_unicode(setname),
const_convert_to_unicode(dependency),))
except (UnicodeDecodeError, UnicodeEncodeError,):
continue
self._cursor().executemany("""
INSERT INTO packagesets VALUES (?, ?)
""", mysets)
def retrievePackageSets(self):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT setname, dependency FROM packagesets
""")
sets = {}
for setname, dependency in cur:
obj = sets.setdefault(setname, set())
obj.add(dependency)
return sets
def retrievePackageSet(self, setname):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT dependency FROM packagesets WHERE setname = ?""",
(setname,))
return self._cur2frozenset(cur)
def retrieveAtom(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT atom FROM baseinfo WHERE idpackage = ? LIMIT 1
""", (package_id,))
atom = cur.fetchone()
if atom:
return atom[0]
def retrieveBranch(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT branch FROM baseinfo WHERE idpackage = ? LIMIT 1
""", (package_id,))
branch = cur.fetchone()
if branch:
return branch[0]
def retrieveTrigger(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT data FROM triggers WHERE idpackage = ? LIMIT 1
""", (package_id,))
trigger = cur.fetchone()
if not trigger:
# backward compatibility with <=0.52.x
return const_convert_to_rawstring('')
return const_convert_to_rawstring(trigger[0])
def retrieveDownloadURL(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT download FROM extrainfo WHERE idpackage = ? LIMIT 1
""", (package_id,))
download = cur.fetchone()
if download:
return download[0]
def retrieveDescription(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT description FROM extrainfo WHERE idpackage = ? LIMIT 1
""", (package_id,))
description = cur.fetchone()
if description:
return description[0]
def retrieveHomepage(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT homepage FROM extrainfo WHERE idpackage = ? LIMIT 1
""", (package_id,))
home = cur.fetchone()
if home:
return home[0]
def retrieveSpmUid(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT counters.counter FROM counters,baseinfo
WHERE counters.idpackage = ? AND
baseinfo.idpackage = counters.idpackage AND
baseinfo.branch = counters.branch LIMIT 1
""", (package_id,))
mycounter = cur.fetchone()
if mycounter:
return mycounter[0]
return -1
def retrieveSize(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT size FROM extrainfo WHERE idpackage = ? LIMIT 1
""", (package_id,))
size = cur.fetchone()
if size:
try:
return int(size[0])
except ValueError: # wtf?
return 0
def retrieveOnDiskSize(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT size FROM sizes WHERE idpackage = ? LIMIT 1
""", (package_id,))
size = cur.fetchone()
if size:
return size[0]
return 0
def retrieveDigest(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT digest FROM extrainfo WHERE idpackage = ? LIMIT 1
""", (package_id,))
digest = cur.fetchone()
if digest:
return digest[0]
return None
def retrieveSignatures(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT sha1, sha256, sha512, gpg FROM packagesignatures
WHERE idpackage = ? LIMIT 1
""", (package_id,))
data = cur.fetchone()
if data:
return data
return None, None, None, None
def retrieveExtraDownload(self, package_id, down_type = None):
"""
Reimplemented from EntropyRepositoryBase.
"""
down_type_str = ""
params = [package_id]
if down_type is not None:
down_type_str = " AND down_type = ?"
params.append(down_type)
cur = self._cursor().execute("""
SELECT download, type, size, disksize, md5, sha1,
sha256, sha512, gpg
FROM packagedownloads WHERE idpackage = ?
""" + down_type_str, params)
result = []
for download, d_type, size, d_size, md5, sha1, sha256, sha512, gpg in \
cur:
result.append({
"download": download,
"type": d_type,
"size": size,
"disksize": d_size,
"md5": md5,
"sha1": sha1,
"sha256": sha256,
"sha512": sha512,
"gpg": gpg,
})
return tuple(result)
def retrieveName(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT name FROM baseinfo WHERE idpackage = ? LIMIT 1
""", (package_id,))
name = cur.fetchone()
if name:
return name[0]
def retrieveKeySplit(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT category, name FROM baseinfo
WHERE idpackage = ? LIMIT 1
""", (package_id,))
return cur.fetchone()
def retrieveKeySlot(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
concat = self._concatOperator(("category", "'/'", "name"))
cur = self._cursor().execute("""
SELECT %s, slot FROM baseinfo
WHERE idpackage = ? LIMIT 1
""" % (concat,), (package_id,))
return cur.fetchone()
def retrieveKeySlotAggregated(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
concat = self._concatOperator(
("category",
"'/'",
"name",
"'%s'" % (etpConst['entropyslotprefix'],),
"slot"))
cur = self._cursor().execute("""
SELECT %s FROM baseinfo
WHERE idpackage = ? LIMIT 1
""" % (concat,), (package_id,))
keyslot = cur.fetchone()
if keyslot:
return keyslot[0]
return None
def retrieveKeySlotTag(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
concat = self._concatOperator(("category", "'/'", "name"))
cur = self._cursor().execute("""
SELECT %s, slot, versiontag
FROM baseinfo WHERE
idpackage = ? LIMIT 1
""" % (concat,), (package_id,))
return cur.fetchone()
def retrieveVersion(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT version FROM baseinfo
WHERE idpackage = ? LIMIT 1
""", (package_id,))
version = cur.fetchone()
if version:
return version[0]
return None
def retrieveRevision(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT revision FROM baseinfo
WHERE idpackage = ? LIMIT 1
""", (package_id,))
rev = cur.fetchone()
if rev:
return rev[0]
return None
def retrieveCreationDate(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT datecreation FROM extrainfo WHERE idpackage = ? LIMIT 1
""", (package_id,))
date = cur.fetchone()
if date:
return date[0]
def retrieveApi(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT etpapi FROM baseinfo WHERE idpackage = ? LIMIT 1
""", (package_id,))
api = cur.fetchone()
if api:
return api[0]
def retrieveUseflags(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT useflagsreference.flagname
FROM useflags, useflagsreference
WHERE useflags.idpackage = ?
AND useflags.idflag = useflagsreference.idflag
""", (package_id,))
return self._cur2frozenset(cur)
def retrieveSpmPhases(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT phases FROM packagespmphases WHERE idpackage = ? LIMIT 1
""", (package_id,))
spm_phases = cur.fetchone()
if spm_phases:
return spm_phases[0]
def retrieveSpmRepository(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT repository FROM packagespmrepository
WHERE idpackage = ? LIMIT 1
""", (package_id,))
spm_repo = cur.fetchone()
if spm_repo:
return spm_repo[0]
def retrieveDesktopMime(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT name, mimetype, executable, icon FROM packagedesktopmime
WHERE idpackage = ?""", (package_id,))
data = []
for row in cur:
item = {}
item['name'], item['mimetype'], item['executable'], \
item['icon'] = row
data.append(item)
return data
def retrieveProvidedMime(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT mimetype FROM provided_mime WHERE idpackage = ?
""", (package_id,))
return self._cur2frozenset(cur)
def retrieveNeeded(self, package_id, extended = False, formatted = False):
"""
Reimplemented from EntropyRepositoryBase.
"""
if not self._doesTableExist("needed_libs"):
# TODO: remove in 2016.
return self._compatRetrieveNeeded(
package_id, extended=extended, formatted=formatted)
if extended:
cur = self._cursor().execute("""
SELECT soname, elfclass FROM needed_libs
WHERE idpackage = ? ORDER BY soname
""", (package_id,))
needed = tuple(cur)
else:
cur = self._cursor().execute("""
SELECT soname FROM needed_libs
WHERE idpackage = ? ORDER BY soname
""", (package_id,))
needed = self._cur2tuple(cur)
if extended and formatted:
return dict((lib, elfclass,) for lib, elfclass in needed)
return needed
def _compatRetrieveNeeded(self, package_id, extended = False,
formatted = False):
"""
Backward compatibility schema support for retrieveNeeded().
"""
if extended:
cur = self._cursor().execute("""
SELECT library,elfclass FROM needed,neededreference
WHERE needed.idpackage = ? AND
needed.idneeded = neededreference.idneeded ORDER BY library
""", (package_id,))
needed = tuple(cur)
else:
cur = self._cursor().execute("""
SELECT library FROM needed,neededreference
WHERE needed.idpackage = ? AND
needed.idneeded = neededreference.idneeded ORDER BY library
""", (package_id,))
needed = self._cur2tuple(cur)
if extended and formatted:
return dict((lib, elfclass,) for lib, elfclass in needed)
return needed
def retrieveNeededLibraries(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT lib_user_path, lib_user_soname, soname, elfclass, rpath
FROM needed_libs WHERE idpackage = ?
""", (package_id,))
return frozenset(cur)
def retrieveProvidedLibraries(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT library, path, elfclass FROM provided_libs
WHERE idpackage = ?
""", (package_id,))
return frozenset(cur)
def retrieveConflicts(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT conflict FROM conflicts WHERE idpackage = ?
""", (package_id,))
return self._cur2frozenset(cur)
def retrieveProvide(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT atom, is_default FROM provide WHERE idpackage = ?
""", (package_id,))
return frozenset(cur)
def retrieveDependenciesList(self, package_id, exclude_deptypes = None,
resolve_conditional_deps = True):
"""
Reimplemented from EntropyRepositoryBase.
"""
excluded_deptypes_query = ""
if exclude_deptypes is not None:
for dep_type in exclude_deptypes:
excluded_deptypes_query += \
" AND dependencies.type != %d" % (dep_type,)
concat = self._concatOperator(
("'!'", "conflict"))
cur = self._cursor().execute("""
SELECT dependenciesreference.dependency
FROM dependencies, dependenciesreference
WHERE dependencies.idpackage = (?) AND
dependencies.iddependency = dependenciesreference.iddependency %s
UNION SELECT %s FROM conflicts
WHERE idpackage = (?)""" % (excluded_deptypes_query, concat,),
(package_id, package_id,))
if resolve_conditional_deps:
return frozenset(entropy.dep.expand_dependencies(cur, [self]))
else:
return self._cur2frozenset(cur)
def retrieveBuildDependencies(self, package_id, extended = False,
resolve_conditional_deps = True):
"""
Reimplemented from EntropyRepositoryBase.
"""
return self.retrieveDependencies(package_id, extended = extended,
deptype = etpConst['dependency_type_ids']['bdepend_id'],
resolve_conditional_deps = resolve_conditional_deps)
def retrieveRuntimeDependencies(self, package_id, extended = False,
resolve_conditional_deps = True):
"""
Reimplemented from EntropyRepositoryBase.
"""
return self.retrieveDependencies(package_id, extended = extended,
deptype = etpConst['dependency_type_ids']['rdepend_id'],
resolve_conditional_deps = resolve_conditional_deps)
def retrievePostDependencies(self, package_id, extended = False,
resolve_conditional_deps = True):
"""
Reimplemented from EntropyRepositoryBase.
"""
return self.retrieveDependencies(package_id, extended = extended,
deptype = etpConst['dependency_type_ids']['pdepend_id'],
resolve_conditional_deps = resolve_conditional_deps)
def retrieveManualDependencies(self, package_id, extended = False,
resolve_conditional_deps = True):
"""
Reimplemented from EntropyRepositoryBase.
"""
return self.retrieveDependencies(package_id, extended = extended,
deptype = etpConst['dependency_type_ids']['mdepend_id'],
resolve_conditional_deps = resolve_conditional_deps)
def retrieveDependencies(self, package_id, extended = False,
deptype = None, exclude_deptypes = None,
resolve_conditional_deps = True):
"""
Reimplemented from EntropyRepositoryBase.
"""
searchdata = (package_id,)
depstring = ''
if deptype is not None:
depstring = 'and dependencies.type = ?'
searchdata += (deptype,)
excluded_deptypes_query = ""
if exclude_deptypes is not None:
for dep_type in exclude_deptypes:
excluded_deptypes_query += " AND dependencies.type != %d" % (
dep_type,)
cur = None
iter_obj = None
if extended:
cur = self._cursor().execute("""
SELECT dependenciesreference.dependency,dependencies.type
FROM dependencies,dependenciesreference
WHERE dependencies.idpackage = ? AND
dependencies.iddependency =
dependenciesreference.iddependency %s %s""" % (
depstring, excluded_deptypes_query,), searchdata)
iter_obj = tuple
else:
cur = self._cursor().execute("""
SELECT dependenciesreference.dependency
FROM dependencies,dependenciesreference
WHERE dependencies.idpackage = ? AND
dependencies.iddependency =
dependenciesreference.iddependency %s %s""" % (
depstring, excluded_deptypes_query,), searchdata)
iter_obj = frozenset
if resolve_conditional_deps:
return iter_obj(entropy.dep.expand_dependencies(
cur, [self]))
return iter_obj(cur)
def retrieveKeywords(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT keywordname FROM keywords,keywordsreference
WHERE keywords.idpackage = ? AND
keywords.idkeyword = keywordsreference.idkeyword""", (package_id,))
return self._cur2frozenset(cur)
def retrieveProtect(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT protect FROM configprotect,configprotectreference
WHERE configprotect.idpackage = ? AND
configprotect.idprotect = configprotectreference.idprotect
LIMIT 1
""", (package_id,))
protect = cur.fetchone()
if protect:
return protect[0]
return ''
def retrieveProtectMask(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT protect FROM configprotectmask,configprotectreference
WHERE idpackage = ? AND
configprotectmask.idprotect = configprotectreference.idprotect
LIMIT 1
""", (package_id,))
protect = cur.fetchone()
if protect:
return protect[0]
return ''
def retrieveSources(self, package_id, extended = False):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT sourcesreference.source FROM sources, sourcesreference
WHERE idpackage = ? AND
sources.idsource = sourcesreference.idsource
""", (package_id,))
sources = self._cur2frozenset(cur)
if not extended:
return sources
source_data = {}
mirror_str = "mirror://"
for source in sources:
source_data[source] = set()
if source.startswith(mirror_str):
mirrorname = source.split("/")[2]
# avoid leading "/"
mirror_url = source.split("/", 3)[3:][0].lstrip("/")
source_data[source] |= set(
[url.rstrip("/") + "/" + mirror_url for url in \
self.retrieveMirrorData(mirrorname)])
else:
source_data[source].add(source)
return source_data
def retrieveAutomergefiles(self, package_id, get_dict = False):
"""
Reimplemented from EntropyRepositoryBase.
"""
# like portage does
self._connection().unicode()
cur = self._cursor().execute("""
SELECT configfile, md5 FROM automergefiles WHERE idpackage = ?
""", (package_id,))
data = frozenset(cur)
if get_dict:
data = dict(((x, y,) for x, y in data))
return data
def retrieveContent(self, package_id, extended = False,
formatted = False, insert_formatted = False, order_by = None):
"""
Reimplemented from EntropyRepositoryBase.
"""
extstring = ''
if extended:
extstring = ",type"
extstring_package_id = ''
if insert_formatted:
extstring_package_id = 'idpackage,'
searchkeywords = (package_id,)
order_by_string = ''
if order_by is not None:
if order_by not in ("package_id", "idpackage", "file", "type",):
raise AttributeError("invalid order_by argument")
if order_by == "package_id":
order_by = "idpackage"
order_by_string = ' order by %s' % (order_by,)
cur = self._cursor().execute("""
SELECT %s file%s FROM content WHERE idpackage = ? %s""" % (
extstring_package_id, extstring, order_by_string,),
searchkeywords)
if extended and insert_formatted:
fl = tuple(cur)
elif extended and formatted:
fl = {}
items = cur.fetchone()
while items:
fl[items[0]] = items[1]
items = cur.fetchone()
elif extended:
fl = tuple(cur)
else:
if order_by:
fl = self._cur2tuple(cur)
else:
fl = self._cur2frozenset(cur)
return fl
def retrieveContentIter(self, package_id, order_by = None,
reverse = False):
"""
Reimplemented from EntropyRepositoryBase.
"""
class MyIter:
def __init__(self, db, query, keywords):
self._cur = None
self._db = db
self._query = query
self._keywords = keywords
self._init_cur()
def _init_cur(self):
self._cur = self._db._cursor().execute(
self._query, self._keywords)
def __iter__(self):
self._init_cur()
return self
def __next__(self):
return next(self._cur)
def next(self):
return self._cur.next()
searchkeywords = (package_id,)
order_by_string = ''
if order_by is not None:
if order_by not in ("package_id", "idpackage", "file", "type",):
raise AttributeError("invalid order_by argument")
if order_by == "package_id":
order_by = "idpackage"
ordering_term = "ASC"
if reverse:
ordering_term = "DESC"
order_by_string = " order by %s %s" % (
order_by, ordering_term)
query = """
SELECT file, type FROM content WHERE idpackage = ? %s""" % (
order_by_string,)
return MyIter(self, query, searchkeywords)
def retrieveContentSafety(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT file, sha256, mtime from contentsafety WHERE idpackage = ?
""", (package_id,))
return dict((path, {'sha256': sha256, 'mtime': mtime}) for path, \
sha256, mtime in cur)
def retrieveContentSafetyIter(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
class MyIter:
def __init__(self, db, query, keywords):
self._cur = None
self._db = db
self._query = query
self._keywords = keywords
self._init_cur()
def _init_cur(self):
self._cur = self._db._cursor().execute(
self._query, self._keywords)
def __iter__(self):
self._init_cur()
return self
def __next__(self):
return next(self._cur)
def next(self):
return self._cur.next()
query = """
SELECT file, sha256, mtime from contentsafety WHERE idpackage = ?
"""
return MyIter(self, query, (package_id,))
def retrieveChangelog(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT packagechangelogs.changelog
FROM packagechangelogs, baseinfo
WHERE baseinfo.idpackage = ? AND
packagechangelogs.category = baseinfo.category AND
packagechangelogs.name = baseinfo.name
LIMIT 1
""", (package_id,))
changelog = cur.fetchone()
if changelog:
changelog = changelog[0]
try:
return const_convert_to_unicode(changelog)
except UnicodeDecodeError:
return const_convert_to_unicode(
changelog, enctype = 'utf-8')
def retrieveChangelogByKey(self, category, name):
"""
Reimplemented from EntropyRepositoryBase.
"""
self._connection().unicode()
cur = self._cursor().execute("""
SELECT changelog FROM packagechangelogs WHERE category = ? AND
name = ? LIMIT 1
""", (category, name,))
changelog = cur.fetchone()
if changelog:
return const_convert_to_unicode(changelog[0])
def retrieveSlot(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT slot FROM baseinfo
WHERE idpackage = ? LIMIT 1
""", (package_id,))
slot = cur.fetchone()
if slot:
return slot[0]
return None
def retrieveTag(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT versiontag FROM baseinfo
WHERE idpackage = ? LIMIT 1
""", (package_id,))
tag = cur.fetchone()
if tag:
return tag[0]
return None
def retrieveMirrorData(self, mirrorname):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT mirrorlink FROM mirrorlinks WHERE mirrorname = ?
""", (mirrorname,))
return self._cur2frozenset(cur)
def retrieveCategory(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT category FROM baseinfo
WHERE idpackage = ? LIMIT 1
""", (package_id,))
category = cur.fetchone()
if category:
return category[0]
return None
def retrieveCategoryDescription(self, category):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT description, locale FROM categoriesdescription
WHERE category = ?
""", (category,))
return dict((locale, desc,) for desc, locale in cur)
def retrieveLicenseData(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
licenses = self.retrieveLicense(package_id)
if licenses is None:
return {}
licdata = {}
for licname in licenses.split():
if not licname.strip():
continue
if not entropy.tools.is_valid_string(licname):
continue
cur = self._cursor().execute("""
SELECT text FROM licensedata WHERE licensename = ? LIMIT 1
""", (licname,))
lictext = cur.fetchone()
if lictext is not None:
lictext = lictext[0]
try:
licdata[licname] = const_convert_to_unicode(lictext)
except UnicodeDecodeError:
licdata[licname] = \
const_convert_to_unicode(
lictext, enctype = 'utf-8')
return licdata
def retrieveLicenseDataKeys(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
licenses = self.retrieveLicense(package_id)
if licenses is None:
return frozenset()
licdata = set()
for licname in licenses.split():
if not licname.strip():
continue
if not entropy.tools.is_valid_string(licname):
continue
cur = self._cursor().execute("""
SELECT licensename FROM licensedata WHERE licensename = ?
LIMIT 1
""", (licname,))
lic_id = cur.fetchone()
if lic_id:
licdata.add(lic_id[0])
return frozenset(licdata)
def retrieveLicenseText(self, license_name):
"""
Reimplemented from EntropyRepositoryBase.
"""
self._connection().unicode()
cur = self._cursor().execute("""
SELECT text FROM licensedata WHERE licensename = ? LIMIT 1
""", (license_name,))
text = cur.fetchone()
if text:
return const_convert_to_rawstring(text[0])
def retrieveLicense(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT license FROM baseinfo
WHERE idpackage = ? LIMIT 1
""", (package_id,))
licname = cur.fetchone()
if licname:
return licname[0]
def retrieveCompileFlags(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT chost, cflags, cxxflags FROM extrainfo
WHERE extrainfo.idpackage = ? LIMIT 1""", (package_id,))
flags = cur.fetchone()
if not flags:
flags = ("N/A", "N/A", "N/A",)
return flags
def retrieveReverseDependencies(self, package_id, atoms = False,
key_slot = False, exclude_deptypes = None, extended = False):
"""
Reimplemented from EntropyRepositoryBase.
"""
cached = self._getLiveCache("reverseDependenciesMetadata")
if cached is None:
cached = self._generateReverseDependenciesMetadata()
dep_ids = set((k for k, v in cached.items() if package_id in v))
if not dep_ids:
# avoid python3.x memleak
del cached
if key_slot:
return tuple()
return frozenset()
dep_ids_str = ', '.join((str(x) for x in dep_ids))
excluded_deptypes_query = ""
if exclude_deptypes is not None:
for dep_type in exclude_deptypes:
excluded_deptypes_query += " AND dependencies.type != %d" % (
dep_type,)
if atoms:
if extended:
cur = self._cursor().execute("""
SELECT baseinfo.atom, dependenciesreference.dependency
FROM dependencies, baseinfo, dependenciesreference
WHERE baseinfo.idpackage = dependencies.idpackage %s AND
dependencies.iddependency =
dependenciesreference.iddependency AND
dependencies.iddependency IN ( %s )""" % (
excluded_deptypes_query, dep_ids_str,))
result = tuple(cur)
else:
cur = self._cursor().execute("""
SELECT baseinfo.atom FROM dependencies, baseinfo
WHERE baseinfo.idpackage = dependencies.idpackage %s AND
dependencies.iddependency IN ( %s )""" % (
excluded_deptypes_query, dep_ids_str,))
result = self._cur2frozenset(cur)
elif key_slot:
if self._isBaseinfoExtrainfo2010():
concat = self._concatOperator(
("baseinfo.category", "'/'", "baseinfo.name"))
if extended:
cur = self._cursor().execute("""
SELECT %s,
baseinfo.slot, dependenciesreference.dependency
FROM baseinfo, dependencies, dependenciesreference
WHERE baseinfo.idpackage = dependencies.idpackage %s AND
dependencies.iddependency =
dependenciesreference.iddependency AND
dependencies.iddependency IN ( %s )""" % (
concat, excluded_deptypes_query, dep_ids_str,))
else:
cur = self._cursor().execute("""
SELECT %s, baseinfo.slot
FROM baseinfo, dependencies
WHERE baseinfo.idpackage = dependencies.idpackage %s AND
dependencies.iddependency IN ( %s )""" % (
concat, excluded_deptypes_query, dep_ids_str,))
else:
concat = self._concatOperator(
("categories.category", "'/'", "baseinfo.name"))
if extended:
cur = self._cursor().execute("""
SELECT %s,
baseinfo.slot, dependenciesreference.dependency
FROM baseinfo, categories,
dependencies, dependenciesreference
WHERE baseinfo.idpackage = dependencies.idpackage AND
dependencies.iddependency =
dependenciesreference.iddependency AND
categories.idcategory = baseinfo.idcategory %s AND
dependencies.iddependency IN ( %s )""" % (
concat, excluded_deptypes_query, dep_ids_str,))
else:
cur = self._cursor().execute("""
SELECT %s, baseinfo.slot
FROM baseinfo, categories, dependencies
WHERE baseinfo.idpackage = dependencies.idpackage AND
categories.idcategory = baseinfo.idcategory %s AND
dependencies.iddependency IN ( %s )""" % (
concat, excluded_deptypes_query, dep_ids_str,))
result = tuple(cur)
elif excluded_deptypes_query:
if extended:
cur = self._cursor().execute("""
SELECT dependencies.idpackage, dependenciesreference.dependency
FROM dependencies, dependenciesreference
WHERE %s AND
dependencies.iddependency =
dependenciesreference.iddependency AND
dependencies.iddependency IN ( %s )""" % (
excluded_deptypes_query.lstrip("AND "), dep_ids_str,))
result = tuple(cur)
else:
cur = self._cursor().execute("""
SELECT dependencies.idpackage FROM dependencies
WHERE %s AND dependencies.iddependency IN ( %s )""" % (
excluded_deptypes_query.lstrip("AND "), dep_ids_str,))
result = self._cur2frozenset(cur)
else:
if extended:
cur = self._cursor().execute("""
SELECT dependencies.idpackage, dependenciesreference.dependency
FROM dependencies, dependenciesreference
WHERE
dependencies.iddependency =
dependenciesreference.iddependency AND
dependencies.iddependency IN ( %s )""" % (dep_ids_str,))
result = tuple(cur)
else:
cur = self._cursor().execute("""
SELECT dependencies.idpackage FROM dependencies
WHERE dependencies.iddependency IN ( %s )""" % (dep_ids_str,))
result = self._cur2frozenset(cur)
# avoid python3.x memleak
del cached
return result
def retrieveUnusedPackageIds(self):
"""
Reimplemented from EntropyRepositoryBase.
"""
cached = self._getLiveCache("reverseDependenciesMetadata")
if cached is None:
cached = self._generateReverseDependenciesMetadata()
pkg_ids = set()
for v in cached.values():
pkg_ids |= v
if not pkg_ids:
# avoid python3.x memleak
del cached
return tuple()
pkg_ids_str = ', '.join((str(x) for x in pkg_ids))
cur = self._cursor().execute("""
SELECT idpackage FROM baseinfo
WHERE idpackage NOT IN ( %s )
ORDER BY atom
""" % (pkg_ids_str,))
# avoid python3.x memleak
del cached
return self._cur2tuple(cur)
def arePackageIdsAvailable(self, package_ids):
"""
Reimplemented from EntropyRepositoryBase.
"""
sql = """SELECT count(idpackage) FROM baseinfo
WHERE idpackage IN (%s) LIMIT 1""" % (','.join(
[str(x) for x in set(package_ids)]),
)
cur = self._cursor().execute(sql)
count = cur.fetchone()[0]
if count != len(package_ids):
return False
return True
def isPackageIdAvailable(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT idpackage FROM baseinfo WHERE idpackage = ? LIMIT 1
""", (package_id,))
result = cur.fetchone()
if not result:
return False
return True
def _isProtectAvailable(self, protect):
"""
Return whether given CONFIG_PROTECT* entry is available in repository.
@param protect: CONFIG_PROTECT* entry (path to a protected directory
or file that won't be overwritten by Entropy Client during
package merge)
@type protect: string
@return: availability (True if available)
@rtype: bool
"""
cur = self._cursor().execute("""
SELECT idprotect FROM configprotectreference WHERE protect = ?
LIMIT 1
""", (protect,))
result = cur.fetchone()
if result:
return result[0]
return -1
def isFileAvailable(self, path, get_id = False):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT idpackage FROM content WHERE file = ?""", (path,))
result = self._cur2frozenset(cur)
if get_id:
return result
elif result:
return True
return False
def resolveNeeded(self, needed, elfclass = -1, extended = False):
"""
Reimplemented from EntropyRepositoryBase.
"""
args = (needed,)
elfclass_txt = ''
if elfclass != -1:
elfclass_txt = ' AND provided_libs.elfclass = ?'
args = (needed, elfclass,)
if extended:
cur = self._cursor().execute("""
SELECT idpackage, path FROM provided_libs
WHERE library = ?""" + elfclass_txt, args)
return frozenset(cur)
cur = self._cursor().execute("""
SELECT idpackage FROM provided_libs
WHERE library = ?""" + elfclass_txt, args)
return self._cur2frozenset(cur)
def _isSourceAvailable(self, source):
"""
Return whether given source package URL is available in repository.
Returns source package URL identifier (idsource).
@param source: source package URL
@type source: string
@return: source package URL identifier (idsource) or -1 if not found
@rtype: int
"""
cur = self._cursor().execute("""
SELECT idsource FROM sourcesreference WHERE source = ? LIMIT 1
""", (source,))
result = cur.fetchone()
if result:
return result[0]
return -1
def _isDependencyAvailable(self, dependency):
"""
Return whether given dependency string is available in repository.
Returns dependency identifier (iddependency).
@param dependency: dependency string
@type dependency: string
@return: dependency identifier (iddependency) or -1 if not found
@rtype: int
"""
cur = self._cursor().execute("""
SELECT iddependency FROM dependenciesreference WHERE dependency = ?
LIMIT 1
""", (dependency,))
result = cur.fetchone()
if result:
return result[0]
return -1
def _isKeywordAvailable(self, keyword):
"""
Return whether keyword string is available in repository.
Returns keyword identifier (idkeyword)
@param keyword: keyword string
@type keyword: string
@return: keyword identifier (idkeyword) or -1 if not found
@rtype: int
"""
cur = self._cursor().execute("""
SELECT idkeyword FROM keywordsreference WHERE keywordname = ? LIMIT 1
""", (keyword,))
result = cur.fetchone()
if result:
return result[0]
return -1
def _isUseflagAvailable(self, useflag):
"""
Return whether USE flag name is available in repository.
Returns USE flag identifier (idflag).
@param useflag: USE flag name
@type useflag: string
@return: USE flag identifier or -1 if not found
@rtype: int
"""
cur = self._cursor().execute("""
SELECT idflag FROM useflagsreference WHERE flagname = ? LIMIT 1
""", (useflag,))
result = cur.fetchone()
if result:
return result[0]
return -1
def isSpmUidAvailable(self, spm_uid):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT counter FROM counters WHERE counter = ? LIMIT 1
""", (spm_uid,))
result = cur.fetchone()
if result:
return True
return False
def isSpmUidTrashed(self, spm_uid):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT counter FROM trashedcounters WHERE counter = ? LIMIT 1
""", (spm_uid,))
result = cur.fetchone()
if result:
return True
return False
def isLicenseDataKeyAvailable(self, license_name):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT licensename FROM licensedata WHERE licensename = ? LIMIT 1
""", (license_name,))
result = cur.fetchone()
if not result:
return False
return True
def isLicenseAccepted(self, license_name):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT licensename FROM licenses_accepted WHERE licensename = ?
LIMIT 1
""", (license_name,))
result = cur.fetchone()
if not result:
return False
return True
def isSystemPackage(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT idpackage FROM systempackages WHERE idpackage = ? LIMIT 1
""", (package_id,))
result = cur.fetchone()
if result:
return True
return False
def isInjected(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT idpackage FROM injected WHERE idpackage = ? LIMIT 1
""", (package_id,))
result = cur.fetchone()
if result:
return True
return False
def searchBelongs(self, bfile, like = False):
"""
Reimplemented from EntropyRepositoryBase.
"""
if like:
cur = self._cursor().execute("""
SELECT content.idpackage FROM content,baseinfo
WHERE file LIKE ? AND
content.idpackage = baseinfo.idpackage""", (bfile,))
else:
cur = self._cursor().execute("""
SELECT content.idpackage
FROM content, baseinfo WHERE file = ?
AND content.idpackage = baseinfo.idpackage""", (bfile,))
return self._cur2frozenset(cur)
def searchContentSafety(self, sfile):
"""
Search content safety metadata (usually, sha256 and mtime) related to
given file path. A list of dictionaries is returned, each dictionary
item contains at least the following fields "path", "sha256", "mtime").
@param sfile: file path to search
@type sfile: string
@return: content safety metadata list
@rtype: tuple
"""
cur = self._cursor().execute("""
SELECT idpackage, file, sha256, mtime
FROM contentsafety WHERE file = ?""", (sfile,))
return tuple(({'package_id': x, 'path': y, 'sha256': z, 'mtime': m} for
x, y, z, m in cur))
def searchTaggedPackages(self, tag, atoms = False):
"""
Reimplemented from EntropyRepositoryBase.
"""
if atoms:
cur = self._cursor().execute("""
SELECT atom, idpackage FROM baseinfo WHERE versiontag = ?
""", (tag,))
return frozenset(cur)
cur = self._cursor().execute("""
SELECT idpackage FROM baseinfo WHERE versiontag = ?
""", (tag,))
return self._cur2frozenset(cur)
def searchRevisionedPackages(self, revision):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT idpackage FROM baseinfo WHERE revision = ?
""", (revision,))
return self._cur2frozenset(cur)
def acceptLicense(self, license_name):
"""
Reimplemented from EntropyRepositoryBase.
Needs to call superclass method.
"""
super(EntropySQLRepository, self).acceptLicense(license_name)
self._cursor().execute("""
%s INTO licenses_accepted VALUES (?)
""" % (self._INSERT_OR_IGNORE,), (license_name,))
def searchLicense(self, keyword, just_id = False):
"""
Reimplemented from EntropyRepositoryBase.
"""
if not entropy.tools.is_valid_string(keyword):
return frozenset()
if just_id:
cur = self._cursor().execute("""
SELECT baseinfo.idpackage FROM
baseinfo WHERE LOWER(baseinfo.license) LIKE ?
""", ("%"+keyword+"%".lower(),))
return self._cur2frozenset(cur)
else:
cur = self._cursor().execute("""
SELECT baseinfo.atom, baseinfo.idpackage FROM
baseinfo WHERE LOWER(baseinfo.license) LIKE ?
""", ("%"+keyword+"%".lower(),))
return frozenset(cur)
def searchSlotted(self, keyword, just_id = False):
"""
Reimplemented from EntropyRepositoryBase.
"""
if just_id:
cur = self._cursor().execute("""
SELECT idpackage FROM baseinfo WHERE slot = ?""", (keyword,))
return self._cur2frozenset(cur)
else:
cur = self._cursor().execute("""
SELECT atom, idpackage FROM baseinfo WHERE slot = ?
""", (keyword,))
return frozenset(cur)
def searchKeySlot(self, key, slot):
"""
Reimplemented from EntropyRepositoryBase.
"""
concat = self._concatOperator(("category", "'/'", "name"))
cur = self._cursor().execute("""
SELECT idpackage FROM baseinfo
WHERE %s = ? AND slot = ?
""" % (concat,), (key, slot,))
return self._cur2frozenset(cur)
def searchKeySlotTag(self, key, slot, tag):
"""
Reimplemented from EntropyRepositoryBase.
"""
concat = self._concatOperator(("category", "'/'", "name"))
cur = self._cursor().execute("""
SELECT idpackage FROM baseinfo
WHERE %s = ? AND slot = ?
AND versiontag = ?
""" % (concat,), (key, slot, tag))
return self._cur2frozenset(cur)
def searchNeeded(self, needed, elfclass = -1, like = False):
"""
Reimplemented from EntropyRepositoryBase.
"""
if not self._doesTableExist("needed_libs"):
# kept for backward compatibility.
return self._compatSearchNeeded(
needed, elfclass = elfclass, like = like)
likestr = '='
if like:
needed = needed.replace("*", "%")
likestr = 'LIKE'
elfsearch = ''
search_args = (needed,)
if elfclass != -1:
elfsearch = ' AND elfclass = ?'
search_args = (needed, elfclass,)
cur = self._cursor().execute("""
SELECT idpackage FROM needed_libs
WHERE soname %s ? %s
""" % (likestr, elfsearch,), search_args)
return self._cur2frozenset(cur)
def _compatSearchNeeded(self, needed, elfclass = -1, like = False):
"""
searchNeeded() implementation compatible with the old needed schema.
"""
if like:
needed = needed.replace("*", "%")
elfsearch = ''
search_args = (needed,)
if elfclass != -1:
elfsearch = ' AND needed.elfclass = ?'
search_args = (needed, elfclass,)
if like:
cur = self._cursor().execute("""
SELECT needed.idpackage FROM needed,neededreference
WHERE library LIKE ? %s AND
needed.idneeded = neededreference.idneeded
""" % (elfsearch,), search_args)
else:
cur = self._cursor().execute("""
SELECT needed.idpackage FROM needed,neededreference
WHERE library = ? %s AND
needed.idneeded = neededreference.idneeded
""" % (elfsearch,), search_args)
return self._cur2frozenset(cur)
def searchConflict(self, conflict, strings = False):
"""
Reimplemented from EntropyRepositoryBase.
"""
keyword = "%"+conflict+"%"
if strings:
cur = self._cursor().execute("""
SELECT conflict FROM conflicts WHERE conflict LIKE ?
""", (keyword,))
return self._cur2tuple(cur)
cur = self._cursor().execute("""
SELECT idpackage, conflict FROM conflicts WHERE conflict LIKE ?
""", (keyword,))
return tuple(cur)
def searchDependency(self, dep, like = False, multi = False,
strings = False):
"""
Reimplemented from EntropyRepositoryBase.
"""
sign = "="
limit = ""
if like:
sign = "LIKE"
dep = "%"+dep+"%"
item = 'iddependency'
if strings:
item = 'dependency'
if not multi:
limit = "LIMIT 1"
cur = self._cursor().execute("""
SELECT %s FROM dependenciesreference WHERE dependency %s ? %s
""" % (item, sign, limit), (dep,))
if multi:
return self._cur2frozenset(cur)
iddep = cur.fetchone()
if iddep:
return iddep[0]
return -1
def searchPackageIdFromDependencyId(self, dependency_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT idpackage FROM dependencies WHERE iddependency = ?
""", (dependency_id,))
return self._cur2frozenset(cur)
def searchSets(self, keyword):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT DISTINCT(setname) FROM packagesets WHERE setname LIKE ?
""", ("%"+keyword+"%",))
return self._cur2frozenset(cur)
def searchProvidedMime(self, mimetype):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT provided_mime.idpackage FROM provided_mime, baseinfo
WHERE provided_mime.mimetype = ?
AND baseinfo.idpackage = provided_mime.idpackage
ORDER BY baseinfo.atom""",
(mimetype,))
return self._cur2tuple(cur)
def searchSimilarPackages(self, keyword, atom = False):
"""
Reimplemented from EntropyRepositoryBase.
"""
s_item = 'name'
if atom:
s_item = 'atom'
cur = self._cursor().execute("""
SELECT idpackage FROM baseinfo
WHERE soundex(%s) = soundex(?) ORDER BY %s
""" % (s_item, s_item,), (keyword,))
return self._cur2tuple(cur)
def searchPackages(self, keyword, sensitive = False, slot = None,
tag = None, order_by = None, just_id = False):
"""
Reimplemented from EntropyRepositoryBase.
"""
like_keyword = "%"+keyword+"%"
if not sensitive:
like_keyword = like_keyword.lower()
searchkeywords = (like_keyword, like_keyword)
slotstring = ''
if slot:
searchkeywords += (slot,)
slotstring = ' AND slot = ?'
tagstring = ''
if tag:
searchkeywords += (tag,)
tagstring = ' AND versiontag = ?'
order_by_string = ''
if order_by is not None:
valid_order_by = ("atom", "idpackage", "package_id", "branch",
"name", "version", "versiontag", "revision", "slot")
if order_by not in valid_order_by:
raise AttributeError("invalid order_by argument")
if order_by == "package_id":
order_by = "idpackage"
order_by_string = ' ORDER BY %s' % (order_by,)
# atom idpackage branch
# idpackage
search_elements_all = """\
t.atom AS atom, t.idpackage AS idpackage, t.branch AS branch,
t.name AS name, t.version AS version, t.versiontag AS versiontag,
t.revision AS revision, t.slot AS slot"""
search_elements_provide_all = """\
d.atom AS atom, d.idpackage AS idpackage, d.branch AS branch,
d.name AS name, d.version AS version, d.versiontag AS versiontag,
d.revision AS revision, d.slot AS slot"""
search_elements = 'atom, idpackage, branch'
if just_id:
search_elements = 'idpackage'
if sensitive:
cur = self._cursor().execute("""
SELECT DISTINCT %s FROM (
SELECT %s FROM baseinfo t
WHERE t.atom LIKE ?
UNION ALL
SELECT %s FROM baseinfo d, provide as p
WHERE d.idpackage = p.idpackage
AND p.atom LIKE ?
) WHERE 1=1 %s %s %s
""" % (search_elements, search_elements_all,
search_elements_provide_all, slotstring, tagstring,
order_by_string), searchkeywords)
else:
cur = self._cursor().execute("""
SELECT DISTINCT %s FROM (
SELECT %s FROM baseinfo t
WHERE LOWER(t.atom) LIKE ?
UNION ALL
SELECT %s FROM baseinfo d, provide as p
WHERE d.idpackage = p.idpackage
AND LOWER(p.atom) LIKE ?
) WHERE 1=1 %s %s %s
""" % (search_elements, search_elements_all,
search_elements_provide_all, slotstring, tagstring,
order_by_string), searchkeywords)
if just_id:
return self._cur2tuple(cur)
return tuple(cur)
def searchProvidedVirtualPackage(self, keyword):
"""
Search in old-style Portage PROVIDE metadata.
@todo: rewrite docstring :-)
@param keyword: search term
@type keyword: string
@return: found PROVIDE metadata
@rtype: list
"""
cur = self._cursor().execute("""
SELECT baseinfo.idpackage, provide.is_default
FROM baseinfo, provide
WHERE provide.atom = ? AND
provide.idpackage = baseinfo.idpackage
""", (keyword,))
return tuple(cur)
def searchDescription(self, keyword, just_id = False):
"""
Reimplemented from EntropyRepositoryBase.
"""
keyword_split = keyword.split()
query_str_list = []
query_args = []
for sub_keyword in keyword_split:
query_str_list.append("LOWER(extrainfo.description) LIKE ?")
query_args.append("%" + sub_keyword + "%")
query_str = " AND ".join(query_str_list)
if just_id:
cur = self._cursor().execute("""
SELECT baseinfo.idpackage FROM extrainfo, baseinfo
WHERE %s AND
baseinfo.idpackage = extrainfo.idpackage
""" % (query_str,), query_args)
return self._cur2frozenset(cur)
else:
cur = self._cursor().execute("""
SELECT baseinfo.atom, baseinfo.idpackage FROM extrainfo, baseinfo
WHERE %s AND
baseinfo.idpackage = extrainfo.idpackage
""" % (query_str,), query_args)
return frozenset(cur)
def searchUseflag(self, keyword, just_id = False):
"""
Reimplemented from EntropyRepositoryBase.
"""
if just_id:
cur = self._cursor().execute("""
SELECT useflags.idpackage FROM useflags, useflagsreference
WHERE useflags.idflag = useflagsreference.idflag
AND useflagsreference.flagname = ?
""", (keyword,))
return self._cur2frozenset(cur)
else:
cur = self._cursor().execute("""
SELECT baseinfo.atom, useflags.idpackage
FROM baseinfo, useflags, useflagsreference
WHERE useflags.idflag = useflagsreference.idflag
AND baseinfo.idpackage = useflags.idpackage
AND useflagsreference.flagname = ?
""", (keyword,))
return frozenset(cur)
def searchHomepage(self, keyword, just_id = False):
"""
Reimplemented from EntropyRepositoryBase.
"""
if just_id:
cur = self._cursor().execute("""
SELECT baseinfo.idpackage FROM extrainfo, baseinfo
WHERE LOWER(extrainfo.homepage) LIKE ? AND
baseinfo.idpackage = extrainfo.idpackage
""", ("%"+keyword.lower()+"%",))
return self._cur2frozenset(cur)
else:
cur = self._cursor().execute("""
SELECT baseinfo.atom, baseinfo.idpackage FROM extrainfo, baseinfo
WHERE LOWER(extrainfo.homepage) LIKE ? AND
baseinfo.idpackage = extrainfo.idpackage
""", ("%"+keyword.lower()+"%",))
return frozenset(cur)
def searchName(self, keyword, sensitive = False, just_id = False):
"""
Reimplemented from EntropyRepositoryBase.
"""
atomstring = ''
if not just_id:
atomstring = 'atom,'
if sensitive:
cur = self._cursor().execute("""
SELECT %s idpackage FROM baseinfo
WHERE name = ?
""" % (atomstring,), (keyword,))
else:
cur = self._cursor().execute("""
SELECT %s idpackage FROM baseinfo
WHERE LOWER(name) = ?
""" % (atomstring,), (keyword.lower(),))
if just_id:
return self._cur2tuple(cur)
return frozenset(cur)
def searchCategory(self, keyword, like = False, just_id = True):
"""
Reimplemented from EntropyRepositoryBase.
"""
like_string = "= ?"
if like:
like_string = "LIKE ?"
if just_id:
cur = self._cursor().execute("""
SELECT idpackage FROM baseinfo
WHERE baseinfo.category %s
""" % (like_string,), (keyword,))
else:
cur = self._cursor().execute("""
SELECT atom, idpackage FROM baseinfo
WHERE baseinfo.category %s
""" % (like_string,), (keyword,))
if just_id:
return self._cur2frozenset(cur)
return frozenset(cur)
def searchNameCategory(self, name, category, just_id = False):
"""
Reimplemented from EntropyRepositoryBase.
"""
if just_id:
cur = self._cursor().execute("""
SELECT idpackage FROM baseinfo
WHERE name = ? AND category = ?
""", (name, category))
return self._cur2frozenset(cur)
cur = self._cursor().execute("""
SELECT atom, idpackage FROM baseinfo
WHERE name = ? AND category = ?
""", (name, category))
return tuple(cur)
def isPackageScopeAvailable(self, atom, slot, revision):
"""
Reimplemented from EntropyRepositoryBase.
"""
searchdata = (atom, slot, revision,)
cur = self._cursor().execute("""
SELECT idpackage FROM baseinfo
where atom = ? AND slot = ? AND revision = ? LIMIT 1
""", searchdata)
rslt = cur.fetchone()
if rslt: # check if it's masked
return self.maskFilter(rslt[0])
return -1, 0
def isBranchMigrationAvailable(self, repository, from_branch, to_branch):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT post_migration_md5sum, post_upgrade_md5sum
FROM entropy_branch_migration
WHERE repository = ? AND from_branch = ? AND to_branch = ? LIMIT 1
""", (repository, from_branch, to_branch,))
return cur.fetchone()
def listPackageIdsInCategory(self, category, order_by = None):
"""
Reimplemented from EntropyRepositoryBase.
"""
order_by_string = ''
if order_by is not None:
valid_order_by = ("atom", "idpackage", "package_id", "branch",
"name", "version", "versiontag", "revision", "slot")
if order_by not in valid_order_by:
raise AttributeError("invalid order_by argument")
if order_by == "package_id":
order_by = "idpackage"
order_by_string = ' order by %s' % (order_by,)
cur = self._cursor().execute("""
SELECT idpackage FROM baseinfo where category = ?
""" + order_by_string, (category,))
return self._cur2frozenset(cur)
def listAllPackages(self, get_scope = False, order_by = None):
"""
Reimplemented from EntropyRepositoryBase.
"""
order_by_string = ''
if order_by is not None:
valid_order_by = ("atom", "idpackage", "package_id", "branch",
"name", "version", "versiontag", "revision", "slot")
if order_by not in valid_order_by:
raise AttributeError("invalid order_by argument")
if order_by == "package_id":
order_by = "idpackage"
order_by_string = ' order by %s' % (order_by,)
if get_scope:
cur = self._cursor().execute("""
SELECT idpackage,atom,slot,revision FROM baseinfo
""" + order_by_string)
else:
cur = self._cursor().execute("""
SELECT atom,idpackage,branch FROM baseinfo
""" + order_by_string)
return tuple(cur)
def listAllSpmUids(self):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT counter, idpackage FROM counters
""")
return tuple(cur)
def listAllTrashedSpmUids(self):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT counter FROM trashedcounters
""")
return self._cur2frozenset(cur)
def listAllPackageIds(self, order_by = None):
"""
Reimplemented from EntropyRepositoryBase.
"""
order_by_string = ''
if order_by is not None:
valid_order_by = ("atom", "idpackage", "package_id", "branch",
"name", "version", "versiontag", "revision", "slot", "date")
if order_by not in valid_order_by:
raise AttributeError("invalid order_by argument")
if order_by == "package_id":
order_by = "idpackage"
order_by_string = ' order by %s' % (order_by,)
if order_by == "date":
cur = self._cursor().execute("""
SELECT baseinfo.idpackage FROM baseinfo, extrainfo
WHERE baseinfo.idpackage = extrainfo.idpackage
ORDER BY extrainfo.datecreation DESC""")
else:
cur = self._cursor().execute("""
SELECT idpackage FROM baseinfo""" + order_by_string)
try:
if order_by:
return self._cur2tuple(cur)
return self._cur2frozenset(cur)
except OperationalError:
if order_by:
return tuple()
return frozenset()
def listAllInjectedPackageIds(self):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("SELECT idpackage FROM injected")
return self._cur2frozenset(cur)
def listAllSystemPackageIds(self):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("SELECT idpackage FROM systempackages")
return self._cur2frozenset(cur)
def listAllDependencies(self):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT iddependency, dependency FROM dependenciesreference
""")
return tuple(cur)
def listAllDownloads(self, do_sort = True, full_path = False):
"""
Reimplemented from EntropyRepositoryBase.
"""
order_string = ''
if do_sort:
order_string = 'ORDER BY extrainfo.download'
cur = self._cursor().execute("""
SELECT extrainfo.download FROM baseinfo, extrainfo
WHERE baseinfo.idpackage = extrainfo.idpackage %s
""" % (order_string,))
if do_sort:
results = self._cur2tuple(cur)
else:
results = self._cur2frozenset(cur)
if not full_path:
results = tuple((os.path.basename(x) for x in results))
return results
def listAllExtraDownloads(self, do_sort = True):
"""
Reimplemented from EntropyRepositoryBase.
"""
order_string = ''
if do_sort:
order_string = ' ORDER BY download'
cur = self._cursor().execute("""
SELECT download FROM packagedownloads
""" + order_string)
if do_sort:
results = self._cur2tuple(cur)
else:
results = self._cur2frozenset(cur)
return results
def listAllFiles(self, clean = False, count = False):
"""
Reimplemented from EntropyRepositoryBase.
"""
self._connection().unicode()
if count:
cur = self._cursor().execute("""
SELECT count(file) FROM content LIMIT 1
""")
else:
cur = self._cursor().execute("""
SELECT file FROM content
""")
if count:
return cur.fetchone()[0]
if clean:
return self._cur2frozenset(cur)
return self._cur2tuple(cur)
def listAllCategories(self, order_by = None):
"""
Reimplemented from EntropyRepositoryBase.
"""
order_by_string = ''
if order_by is not None:
valid_order_by = ("category",)
if order_by not in valid_order_by:
raise AttributeError("invalid order_by argument")
order_by_string = 'ORDER BY %s' % (order_by,)
cur = self._cursor().execute(
"SELECT DISTINCT category FROM baseinfo %s" % (
order_by_string,))
return self._cur2frozenset(cur)
def listConfigProtectEntries(self, mask = False):
"""
Reimplemented from EntropyRepositoryBase.
"""
mask_t = ''
if mask:
mask_t = 'mask'
dirs = set()
cur = self._cursor().execute("""
SELECT protect FROM configprotectreference WHERE idprotect IN
(SELECT distinct(idprotect) FROM configprotect%s)
""" % (mask_t,))
for mystr in self._cur2frozenset(cur):
dirs.update(mystr.split())
return sorted(dirs)
def switchBranch(self, package_id, tobranch):
"""
Reimplemented from EntropyRepositoryBase.
"""
self._cursor().execute("""
UPDATE baseinfo SET branch = ?
WHERE idpackage = ?""", (tobranch, package_id,))
self.clearCache()
def getSetting(self, setting_name):
"""
Reimplemented from EntropyRepositoryBase.
"""
cached = self._settings_cache.get(setting_name)
if isinstance(cached, KeyError):
raise cached
elif cached is not None:
return cached
try:
cur = self._cursor().execute("""
SELECT setting_value FROM settings WHERE setting_name = ?
LIMIT 1
""", (setting_name,))
except Error:
obj = KeyError("cannot find setting_name '%s'" % (
setting_name,))
self._settings_cache[setting_name] = obj
raise obj
setting = cur.fetchone()
if setting is None:
obj = KeyError("setting unavaliable '%s'" % (setting_name,))
self._settings_cache[setting_name] = obj
raise obj
obj = setting[0]
self._settings_cache[setting_name] = obj
return obj
def _setSetting(self, setting_name, setting_value):
"""
Internal method, set new setting for setting_name with value
setting_value.
"""
# Always force const_convert_to_unicode() to setting_value
# and setting_name or "OR REPLACE" won't work (sqlite3 bug?)
cur = self._cursor().execute("""
%s INTO settings VALUES (?, ?)
""" % (self._INSERT_OR_REPLACE,),
(const_convert_to_unicode(setting_name),
const_convert_to_unicode(setting_value),))
self._settings_cache.clear()
def _setupInitialSettings(self):
"""
Not implemented, subclasses must implement this.
"""
raise NotImplementedError()
def _databaseStructureUpdates(self):
"""
Not implemented, subclasses must implement this.
"""
raise NotImplementedError()
def integrity_check(self):
"""
Not implemented, subclasses must implement this.
"""
raise NotImplementedError()
def validate(self):
"""
Reimplemented from EntropyRepositoryBase.
"""
cached = self._getLiveCache("validate")
if cached is not None:
# avoid python3.x memleak
del cached
return
self._setLiveCache("validate", True)
# avoid python3.x memleak
del cached
mytxt = "Repository is corrupted, missing SQL tables!"
if not (self._doesTableExist("extrainfo") and \
self._doesTableExist("baseinfo") and \
self._doesTableExist("keywords")):
raise SystemDatabaseError(mytxt)
# execute checksum
try:
self.checksum()
except (OperationalError, DatabaseError,) as err:
mytxt = "Repository is corrupted, checksum error"
raise SystemDatabaseError("%s: %s" % (mytxt, err,))
@staticmethod
def importRepository(dumpfile, db, data = None):
"""
Not implemented, subclasses must implement this.
"""
raise NotImplementedError()
def exportRepository(self, dumpfile):
"""
Not implemented, subclasses must implement this.
"""
raise NotImplementedError()
def _listAllTables(self):
"""
Not implemented, subclasses must implement this.
"""
raise NotImplementedError()
def mtime(self):
"""
Not implemented, subclasses must implement this.
"""
raise NotImplementedError()
def checksum(self, do_order = False, strict = True,
include_signatures = False,
include_dependencies = False):
"""
Reimplemented from EntropyRepositoryBase.
"""
cache_key = "checksum_%s_%s_True_%s_%s" % (
do_order, strict, include_signatures, include_dependencies)
cached = self._getLiveCache(cache_key)
if cached is not None:
return cached
# avoid memleak with python3.x
del cached
package_id_order = ""
depenenciesref_order = ""
dependencies_order = ""
if do_order:
package_id_order = "order by idpackage"
dependenciesref_order = "order by iddependency"
dependencies_order = "order by idpackage"
def do_update_hash(m, cursor):
# this could slow things down a lot, so be careful
# NOTE: this function must guarantee platform, architecture,
# interpreter independent results. Cannot use hash() then.
# Even repr() might be risky! But on the other hand, the
# conversion to string cannot take forever.
if const_is_python3():
for record in cursor:
m.update(repr(record).encode("utf-8"))
else:
for record in cursor:
m.update(repr(record))
m = hashlib.sha1()
if not self._doesTableExist("baseinfo"):
m.update(const_convert_to_rawstring("~empty~"))
return m.hexdigest()
if strict:
cur = self._cursor().execute("""
SELECT * FROM baseinfo
%s""" % (package_id_order,))
else:
cur = self._cursor().execute("""
SELECT idpackage, atom, name, version, versiontag, revision,
branch, slot, etpapi, `trigger` FROM baseinfo
%s""" % (package_id_order,))
do_update_hash(m, cur)
if strict:
cur = self._cursor().execute("""
SELECT * FROM extrainfo %s
""" % (package_id_order,))
else:
cur = self._cursor().execute("""
SELECT idpackage, description, homepage, download, size,
digest, datecreation FROM extrainfo %s
""" % (package_id_order,))
do_update_hash(m, cur)
if include_signatures:
# be optimistic and delay if condition,
# _doesColumnInTableExist
# is really slow
cur = self._cursor().execute("""
SELECT idpackage, sha1, gpg FROM
packagesignatures %s""" % (package_id_order,))
do_update_hash(m, cur)
if include_dependencies:
cur = self._cursor().execute("""
SELECT * from dependenciesreference %s
""" % (dependenciesref_order,))
do_update_hash(m, cur)
cur = self._cursor().execute("""
SELECT * from dependencies %s
""" % (dependencies_order,))
do_update_hash(m, cur)
result = m.hexdigest()
self._setLiveCache(cache_key, result)
return result
def storeInstalledPackage(self, package_id, repoid, source = 0):
"""
Reimplemented from EntropySQLRepository.
"""
self._cursor().execute("""
%s INTO installedtable VALUES (?,?,?)
""" % (self._INSERT_OR_REPLACE,),
(package_id, repoid, source,))
def getInstalledPackageRepository(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT repositoryname FROM installedtable
WHERE idpackage = ? LIMIT 1
""", (package_id,))
repo = cur.fetchone()
if repo:
return repo[0]
return None
def getInstalledPackageSource(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
# be optimistic, delay _doesColumnInTableExist as much as
# possible
cur = self._cursor().execute("""
SELECT source FROM installedtable
WHERE idpackage = ? LIMIT 1
""", (package_id,))
source = cur.fetchone()
if source:
return source[0]
return None
def dropInstalledPackageFromStore(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
self._cursor().execute("""
DELETE FROM installedtable
WHERE idpackage = ?""", (package_id,))
def storeSpmMetadata(self, package_id, blob):
"""
Reimplemented from EntropyRepositoryBase.
"""
self._cursor().execute("""
INSERT INTO xpakdata VALUES (?, ?)
""", (package_id, const_get_buffer()(blob),))
def retrieveSpmMetadata(self, package_id):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT data from xpakdata where idpackage = ? LIMIT 1
""", (package_id,))
mydata = cur.fetchone()
if not mydata:
buf = const_get_buffer()
return buf("")
return mydata[0]
def retrieveBranchMigration(self, to_branch):
"""
Reimplemented from EntropyRepositoryBase.
"""
cur = self._cursor().execute("""
SELECT repository, from_branch, post_migration_md5sum,
post_upgrade_md5sum FROM entropy_branch_migration
WHERE to_branch = ?
""", (to_branch,))
meta = {}
for repo, from_branch, post_migration_md5, post_upgrade_md5 in cur:
obj = meta.setdefault(repo, {})
obj[from_branch] = (post_migration_md5, post_upgrade_md5,)
return meta
def dropContent(self):
"""
Reimplemented from EntropyRepositoryBase.
"""
self._cursor().execute('DELETE FROM content')
self.dropContentSafety()
def dropContentSafety(self):
"""
Reimplemented from EntropyRepositoryBase.
"""
self._cursor().execute('DELETE FROM contentsafety')
def dropChangelog(self):
"""
Reimplemented from EntropyRepositoryBase.
"""
self._cursor().execute('DELETE FROM packagechangelogs')
def dropGpgSignatures(self):
"""
Reimplemented from EntropyRepositoryBase.
"""
self._cursor().execute('UPDATE packagesignatures set gpg = NULL')
def dropAllIndexes(self):
"""
Not implemented, subclasses must implement this.
"""
raise NotImplementedError()
def createAllIndexes(self):
"""
Reimplemented from EntropyRepositoryBase.
"""
if not self._indexing:
return
self._createTrashedCountersIndex()
self._createMirrorlinksIndex()
self._createContentIndex()
self._createBaseinfoIndex()
self._createKeywordsIndex()
self._createDependenciesIndex()
self._createProvideIndex()
self._createConflictsIndex()
self._createExtrainfoIndex()
self._createNeededLibsIndex()
self._createUseflagsIndex()
self._createLicensedataIndex()
self._createConfigProtectReferenceIndex()
self._createSourcesIndex()
self._createCountersIndex()
self._createPackagesetsIndex()
self._createAutomergefilesIndex()
self._createProvidedLibsIndex()
self._createDesktopMimeIndex()
self._createProvidedMimeIndex()
self._createPackageDownloadsIndex()
def _createTrashedCountersIndex(self):
try:
self._cursor().execute("""
CREATE INDEX trashedcounters_counter
ON trashedcounters ( counter )""")
except OperationalError:
pass
def _createMirrorlinksIndex(self):
try:
self._cursor().execute("""
CREATE INDEX mirrorlinks_mirrorname
ON mirrorlinks ( mirrorname )""")
except OperationalError:
pass
def _createDesktopMimeIndex(self):
try:
self._cursor().execute("""
CREATE INDEX packagedesktopmime_idpackage
ON packagedesktopmime ( idpackage )""")
except OperationalError:
pass
def _createProvidedMimeIndex(self):
try:
self._cursor().execute("""
CREATE INDEX provided_mime_idpackage
ON provided_mime ( idpackage )""")
except OperationalError:
pass
try:
self._cursor().execute("""
CREATE INDEX provided_mime_mimetype
ON provided_mime ( mimetype )""")
except OperationalError:
pass
def _createPackagesetsIndex(self):
try:
self._cursor().execute("""
CREATE INDEX packagesetsindex
ON packagesets ( setname )""")
except OperationalError:
pass
def _createProvidedLibsIndex(self):
try:
self._cursor().execute("""
CREATE INDEX provided_libs_idpackage
ON provided_libs ( idpackage );
""")
except OperationalError:
pass
try:
self._cursor().execute("""
CREATE INDEX provided_libs_lib_elf
ON provided_libs ( library, elfclass );
""")
except OperationalError:
pass
def _createAutomergefilesIndex(self):
try:
self._cursor().execute("""
CREATE INDEX automergefiles_idpackage
ON automergefiles ( idpackage );
""")
except OperationalError:
pass
try:
self._cursor().execute("""
CREATE INDEX automergefiles_file_md5
ON automergefiles ( configfile, md5 );
""")
except OperationalError:
pass
def _createPackageDownloadsIndex(self):
try:
self._cursor().execute("""
CREATE INDEX packagedownloads_idpackage_type
ON packagedownloads ( idpackage, type );
""")
except OperationalError:
pass
def _createNeededLibsIndex(self):
try:
self._cursor().execute("""
CREATE INDEX needed_libs_idpackage ON needed_libs
( idpackage );
""")
except OperationalError:
pass
try:
self._cursor().execute("""
CREATE INDEX needed_libs_soname_elfclass ON needed_libs
( soname, elfclass );
""")
except OperationalError:
pass
def _createUseflagsIndex(self):
try:
self._cursor().execute("""
CREATE INDEX useflagsindex_useflags_idpackage
ON useflags ( idpackage );
""")
except OperationalError:
pass
try:
self._cursor().execute("""
CREATE INDEX useflagsindex_useflags_idflag
ON useflags ( idflag );
""")
except OperationalError:
pass
try:
self._cursor().execute("""
CREATE INDEX useflagsindex_useflags_idflag_idpk
ON useflags ( idflag, idpackage );
""")
except OperationalError:
pass
try:
self._cursor().execute("""
CREATE INDEX useflagsindex
ON useflagsreference ( flagname );
""")
except OperationalError:
pass
def _createContentIndex(self):
try:
self._cursor().execute("""
CREATE INDEX contentindex_couple
ON content ( idpackage );
""")
except OperationalError:
pass
try:
self._cursor().execute("""
CREATE INDEX contentindex_file
ON content ( file );
""")
except OperationalError:
pass
def _createConfigProtectReferenceIndex(self):
try:
self._cursor().execute("""
CREATE INDEX configprotectreferenceindex
ON configprotectreference ( protect )
""")
except OperationalError:
pass
def _createBaseinfoIndex(self):
try:
self._cursor().execute("""
CREATE INDEX baseindex_atom
ON baseinfo ( atom );
""")
except OperationalError:
pass
try:
self._cursor().execute("""
CREATE INDEX baseindex_branch_name
ON baseinfo ( name, branch );
""")
except OperationalError:
pass
try:
self._cursor().execute("""
CREATE INDEX baseindex_branch_name_category
ON baseinfo ( name, category, branch );
""")
except OperationalError:
pass
try:
self._cursor().execute("""
CREATE INDEX baseindex_category
ON baseinfo ( category );
""")
except OperationalError:
pass
def _createLicensedataIndex(self):
try:
self._cursor().execute("""
CREATE INDEX licensedataindex
ON licensedata ( licensename )
""")
except OperationalError:
pass
def _createKeywordsIndex(self):
try:
self._cursor().execute("""
CREATE INDEX keywordsreferenceindex
ON keywordsreference ( keywordname );
""")
except OperationalError:
pass
try:
self._cursor().execute("""
CREATE INDEX keywordsindex_idpackage_idkw
ON keywords ( idpackage, idkeyword );
""")
except OperationalError:
pass
def _createDependenciesIndex(self):
try:
self._cursor().execute("""
CREATE INDEX dependenciesindex_idpk_iddp_type
ON dependencies ( idpackage, iddependency, type );
""")
except OperationalError:
pass
try:
self._cursor().execute("""
CREATE INDEX dependenciesreferenceindex_dependency
ON dependenciesreference ( dependency );
""")
except OperationalError:
pass
def _createCountersIndex(self):
try:
self._cursor().execute("""
CREATE INDEX countersindex_counter_branch_idpk
ON counters ( counter, branch, idpackage );
""")
except OperationalError:
pass
def _createSourcesIndex(self):
try:
self._cursor().execute("""
CREATE INDEX sourcesindex_idpk_idsource
ON sources ( idpackage, idsource );
""")
except OperationalError:
pass
try:
self._cursor().execute("""
CREATE INDEX sourcesindex_idsource
ON sources ( idsource );
""")
except OperationalError:
pass
try:
self._cursor().execute("""
CREATE INDEX sourcesreferenceindex_source
ON sourcesreference ( source );
""")
except OperationalError:
pass
def _createProvideIndex(self):
try:
self._cursor().execute("""
CREATE INDEX provideindex_idpk_atom
ON provide ( idpackage, atom );
""")
except OperationalError:
pass
def _createConflictsIndex(self):
try:
self._cursor().execute("""
CREATE INDEX conflictsindex_idpackage
ON conflicts ( idpackage );
""")
except OperationalError:
pass
def _createExtrainfoIndex(self):
# no indexes set. However, we may need two of them on
# datecreation and download (two separate I mean)
# to speed up ORDER BY datecreation and ORDER BY download.
# Even though, listAllPackageIds(order_by="date") and
# listAllDownloads(do_sort=True) are not critical
# functions.
pass
def regenerateSpmUidMapping(self):
"""
Reimplemented from EntropyRepositoryBase.
"""
spm = get_spm(self)
# this is necessary now, counters table should be empty
self._cursor().executescript("""
DROP TABLE IF EXISTS counters_regen;
CREATE TEMPORARY TABLE counters_regen (
counter INTEGER,
idpackage INTEGER,
branch VARCHAR,
PRIMARY KEY(idpackage, branch)
);
""")
insert_data = []
for myid in self.listAllPackageIds():
try:
spm_uid = spm.resolve_package_uid(self, myid)
except SPMError as err:
mytxt = "%s: %s: %s" % (
bold(_("ATTENTION")),
red(_("Spm error occurred")),
str(err),
)
self.output(
mytxt,
importance = 1,
level = "warning"
)
continue
if spm_uid is None:
mytxt = "%s: %s: %s" % (
bold(_("ATTENTION")),
red(_("Spm Unique Identifier not found for")),
self.retrieveAtom(myid),
)
self.output(
mytxt,
importance = 1,
level = "warning"
)
continue
mybranch = self.retrieveBranch(myid)
insert_data.append((spm_uid, myid, mybranch))
self._cursor().executemany("""
%s INTO counters_regen VALUES (?,?,?)
""" % (self._INSERT_OR_REPLACE,), insert_data)
self._cursor().executescript("""
DELETE FROM counters;
INSERT INTO counters (counter, idpackage, branch)
SELECT counter, idpackage, branch FROM counters_regen;
""")
def clearTreeupdatesEntries(self, repository):
"""
Reimplemented from EntropyRepositoryBase.
"""
self._cursor().execute("""
DELETE FROM treeupdates WHERE repository = ?
""", (repository,))
def resetTreeupdatesDigests(self):
"""
Reimplemented from EntropyRepositoryBase.
"""
self._cursor().execute("""
UPDATE treeupdates SET digest = '-1'
""")
def _generateReverseDependenciesMetadata(self):
"""
Reverse dependencies dynamic metadata generation.
"""
checksum = self.checksum()
try:
mtime = repr(self.mtime())
except (OSError, IOError):
mtime = "0.0"
hash_str = "%s|%s|%s|%s|%s" % (
repr(self._db),
repr(etpConst['systemroot']),
repr(self.name),
repr(checksum),
mtime,
)
if const_is_python3():
hash_str = hash_str.encode("utf-8")
sha = hashlib.sha1()
sha.update(hash_str)
cache_key = "__generateReverseDependenciesMetadata2_" + \
sha.hexdigest()
rev_deps_data = self._cacher.pop(cache_key)
if rev_deps_data is not None:
self._setLiveCache("reverseDependenciesMetadata",
rev_deps_data)
return rev_deps_data
dep_data = {}
for iddep, atom in self.listAllDependencies():
if iddep == -1:
continue
if atom.endswith(etpConst['entropyordepquestion']):
or_atoms = atom[:-1].split(etpConst['entropyordepsep'])
for or_atom in or_atoms:
# not safe to use cache here, people messing with multiple
# instances can make this crash
package_id, rc = self.atomMatch(or_atom, useCache = False)
if package_id != -1:
obj = dep_data.setdefault(iddep, set())
obj.add(package_id)
else:
# not safe to use cache here, people messing with multiple
# instances can make this crash
package_id, rc = self.atomMatch(atom, useCache = False)
if package_id != -1:
obj = dep_data.setdefault(iddep, set())
obj.add(package_id)
self._setLiveCache("reverseDependenciesMetadata", dep_data)
try:
self._cacher.save(cache_key, dep_data)
except IOError:
# race condition, ignore
pass
return dep_data
def moveSpmUidsToBranch(self, to_branch):
"""
Reimplemented from EntropyRepositoryBase.
"""
self._cursor().execute("""
UPDATE counters SET branch = ?
""", (to_branch,))
self.clearCache()
| gpl-2.0 | -7,141,713,653,637,665,000 | 32.788111 | 80 | 0.544809 | false |
joopert/home-assistant | homeassistant/components/mobile_app/binary_sensor.py | 5 | 1948 | """Binary sensor platform for mobile_app."""
from functools import partial
from homeassistant.components.binary_sensor import BinarySensorDevice
from homeassistant.const import CONF_WEBHOOK_ID
from homeassistant.core import callback
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from .const import (
ATTR_SENSOR_STATE,
ATTR_SENSOR_TYPE_BINARY_SENSOR as ENTITY_TYPE,
ATTR_SENSOR_UNIQUE_ID,
DATA_DEVICES,
DOMAIN,
)
from .entity import MobileAppEntity, sensor_id
async def async_setup_entry(hass, config_entry, async_add_entities):
"""Set up mobile app binary sensor from a config entry."""
entities = list()
webhook_id = config_entry.data[CONF_WEBHOOK_ID]
for config in hass.data[DOMAIN][ENTITY_TYPE].values():
if config[CONF_WEBHOOK_ID] != webhook_id:
continue
device = hass.data[DOMAIN][DATA_DEVICES][webhook_id]
entities.append(MobileAppBinarySensor(config, device, config_entry))
async_add_entities(entities)
@callback
def handle_sensor_registration(webhook_id, data):
if data[CONF_WEBHOOK_ID] != webhook_id:
return
unique_id = sensor_id(data[CONF_WEBHOOK_ID], data[ATTR_SENSOR_UNIQUE_ID])
entity = hass.data[DOMAIN][ENTITY_TYPE][unique_id]
if "added" in entity:
return
entity["added"] = True
device = hass.data[DOMAIN][DATA_DEVICES][data[CONF_WEBHOOK_ID]]
async_add_entities([MobileAppBinarySensor(data, device, config_entry)])
async_dispatcher_connect(
hass,
f"{DOMAIN}_{ENTITY_TYPE}_register",
partial(handle_sensor_registration, webhook_id),
)
class MobileAppBinarySensor(MobileAppEntity, BinarySensorDevice):
"""Representation of an mobile app binary sensor."""
@property
def is_on(self):
"""Return the state of the binary sensor."""
return self._config[ATTR_SENSOR_STATE]
| apache-2.0 | -4,792,789,113,086,191,000 | 28.515152 | 81 | 0.684805 | false |
iychoi/syndicate | old/ms/django_volume/forms.py | 2 | 3934 | from django_lib.forms import LONGEST_CHAR_FIELD, LONGEST_PASS_FIELD, LONGEST_DESC, ReadOnlyWidget
from django import forms
BLOCKSIZE_MULTIPLIER = 1024 # Kilobytes
BLOCKSIZE_CHOICES = (
(10*BLOCKSIZE_MULTIPLIER, "10 kB"),
(20*BLOCKSIZE_MULTIPLIER, 20),
(40*BLOCKSIZE_MULTIPLIER, 40),
(80*BLOCKSIZE_MULTIPLIER, 80),
(160*BLOCKSIZE_MULTIPLIER, 160),
(320*BLOCKSIZE_MULTIPLIER, 320),
(640*BLOCKSIZE_MULTIPLIER, 640),
(1024*BLOCKSIZE_MULTIPLIER,"1 MB"),
)
class CreateVolume(forms.Form):
name = forms.CharField(label="Volume name",
initial="My Volume",
max_length=LONGEST_CHAR_FIELD,
help_text="Your volume's name cannot be changed later.")
private = forms.BooleanField(label="Private",
initial=False,
required=False)
blocksize = forms.ChoiceField(label="Desired size of data blocks",
choices=BLOCKSIZE_CHOICES,
help_text="in kilobytes")
description = forms.CharField(widget=forms.Textarea,
label="Volume description",
initial="This is my new amazing volume.",
max_length=LONGEST_DESC,
help_text=str(LONGEST_DESC) + " characters maximum")
password = forms.CharField(label="Volume password",
max_length=LONGEST_PASS_FIELD,
widget=forms.PasswordInput)
class EditVolume(forms.Form):
private = forms.BooleanField(label="Private",
required=False)
archive = forms.BooleanField(label="Archive",
required=False)
description = forms.CharField( widget=forms.Textarea,
label="Volume description",
max_length=LONGEST_DESC,
help_text=str(LONGEST_DESC) + " characters maximum")
class ChangeVolumeD(forms.Form):
description = forms.CharField(widget=forms.Textarea,
required=False,
label="",
initial="This is my new amazing volume.",
max_length=LONGEST_DESC,
help_text=str(LONGEST_DESC) + " characters maximum")
class DeleteVolume(forms.Form):
confirm_delete = forms.BooleanField(required=True,
label="Yes, I understand that this action is permament and my files will be lost.")
password = forms.CharField(label="Volume password",
max_length=LONGEST_PASS_FIELD,
widget=forms.PasswordInput)
class Gateway(forms.Form):
g_name = forms.CharField(label="Gateway name",
widget=ReadOnlyWidget(),
required=False,
max_length=LONGEST_CHAR_FIELD)
remove = forms.BooleanField(label="Remove",
required=False)
class Permissions(forms.Form):
user = forms.EmailField(label="User email",
widget=ReadOnlyWidget(),
required=False)
read = forms.BooleanField(label="Read",
required=False)
write = forms.BooleanField(label="Write",
required=False)
class AddPermissions(forms.Form):
user = forms.EmailField(label="User email")
read = forms.BooleanField(label="Read",
required=False)
write = forms.BooleanField(label="Write",
required=False) | apache-2.0 | 6,879,888,104,644,555,000 | 35.100917 | 123 | 0.514997 | false |
SambitAcharya/coala | coalib/output/ConfWriter.py | 2 | 4014 | from itertools import chain
from pyprint.ClosableObject import ClosableObject
from coalib.settings.Section import Section
from coalib.parsing.StringProcessing import escape
class ConfWriter(ClosableObject):
def __init__(self,
file_name,
key_value_delimiters=('=',),
comment_seperators=('#',),
key_delimiters=(',', ' '),
section_name_surroundings=None,
section_override_delimiters=(".",),
unsavable_keys=("save",)):
section_name_surroundings = section_name_surroundings or {"[": "]"}
ClosableObject.__init__(self)
self.__file_name = file_name
self.__file = open(self.__file_name, "w")
self.__key_value_delimiters = key_value_delimiters
self.__comment_seperators = comment_seperators
self.__key_delimiters = key_delimiters
self.__section_name_surroundings = section_name_surroundings
self.__section_override_delimiters = section_override_delimiters
self.__unsavable_keys = unsavable_keys
self.__wrote_newline = True
self.__closed = False
self.__key_delimiter = self.__key_delimiters[0]
self.__key_value_delimiter = self.__key_value_delimiters[0]
(self.__section_name_surrounding_beg,
self.__section_name_surrounding_end) = (
tuple(self.__section_name_surroundings.items())[0])
def _close(self):
self.__file.close()
def write_sections(self, sections):
assert not self.__closed
self.__wrote_newline = True
for section in sections:
self.write_section(sections[section])
def write_section(self, section):
assert not self.__closed
if not isinstance(section, Section):
raise TypeError
self.__write_section_name(section.name)
keys = []
val = None
section_iter = section.__iter__(ignore_defaults=True)
try:
while True:
setting = section[next(section_iter)]
if (str(setting) == val and
not self.is_comment(setting.key) and
(
(setting.key not in self.__unsavable_keys) or
(not setting.from_cli))):
keys.append(setting.key)
elif ((setting.key not in self.__unsavable_keys) or
(not setting.from_cli)):
self.__write_key_val(keys, val)
keys = [setting.key]
val = str(setting)
except StopIteration:
self.__write_key_val(keys, val)
def __write_section_name(self, name):
assert not self.__closed
if not self.__wrote_newline:
self.__file.write("\n")
self.__file.write(self.__section_name_surrounding_beg + name +
self.__section_name_surrounding_end + '\n')
self.__wrote_newline = False
def __write_key_val(self, keys, val):
assert not self.__closed
if keys == []:
return
if all(self.is_comment(key) for key in keys):
self.__file.write(val + "\n")
self.__wrote_newline = val == ""
return
# Add escape characters as appropriate
keys = [escape(key, chain(['\\'],
self.__key_value_delimiters,
self.__comment_seperators,
self.__key_delimiters,
self.__section_override_delimiters))
for key in keys]
val = escape(val, chain(['\\'], self.__comment_seperators))
self.__file.write((self.__key_delimiter + " ").join(keys) + " " +
self.__key_value_delimiter + " " + val + "\n")
self.__wrote_newline = False
@staticmethod
def is_comment(key):
return key.lower().startswith("comment")
| agpl-3.0 | 7,641,288,167,429,958,000 | 35.162162 | 75 | 0.532885 | false |
dkdeconti/PAINS-train | training_methods/classifier/basic_stats_plotting.py | 1 | 3480 | __author__ = 'ddeconti'
import FileHandler
import pickle
import sys
from bokeh.palettes import Blues9
from bokeh.plotting import output_file, figure, show, VBox, HBox
from bokeh.charts import Histogram, HeatMap
from rdkit import DataStructs
def similarity_compare(fp):
tanimoto_matrix = [[1] * len(fp)] * len(fp)
for i in xrange(len(fp)):
for j in xrange(len(fp)):
if i == j:
continue
sim = DataStructs.FingerprintSimilarity(fp[i],
fp[j])
tanimoto_matrix[i][j] = sim
return tanimoto_matrix
def get_similarities_list(m):
sim_list = []
for i in xrange(len(m)):
if i >= len(m) - 1:
continue
for j in xrange(i + 1, len(m)):
sim_list.append(m[i][j])
return sim_list
def plot_histogram(pains, control):
'''
distributions = OrderedDict(PAINs=pains, Control=control)
df = pandas.DataFrame(distributions)
distributions = df.to_dict()
for k, v in distributions.items():
distributions[k] = v.values()
'''
df = {"PAINs": pains, "Control": control}
output_file("histograms.html")
hist = Histogram(df, bins=20, legend=True)
return hist
def plot_heatmap(all):
p = HeatMap(all, palette=Blues9)
return p
def main(sa):
sln_filename = sa[0] # PAINs
sdf_filename = sa[1] # Control set
try:
sln_file = pickle.load(open("sln_file.p", "rb"))
except:
sln_file = FileHandler.SlnFile(sln_filename)
pickle.dump(sln_file, open("sln_file.p", "wb"))
try:
sdf_file = pickle.load(open("sdf_file.p", "rb"))
except:
sdf_file = FileHandler.SdfFile(sdf_filename)
pickle.dump(sdf_file, open("sdf_file.p", "wb"))
try:
pains_fp = pickle.load(open("pains_fp.p", "rb"))
control_fp = pickle.load(open("control_fp.p", "rb"))
except:
pains_fp = sln_file.get_fingerprint_list()
control_fp = sdf_file.get_fingerprint_list()
pickle.dump(pains_fp, open("pains_fp.p", "wb"))
pickle.dump(control_fp, open("control_fp.p", "wb"))
sys.stdout.write("Tanimoto similarity of PAINs.\n")
sys.stdout.flush()
try:
pains_tanimoto = pickle.load(open("pains_tanimoto.p", "rb"))
except:
pains_tanimoto = similarity_compare(pains_fp)
pickle.dump(pains_tanimoto, open("pains_tanimoto.p", "wb"))
sys.stdout.write("Tanimoto similarity of Control.\n")
sys.stdout.flush()
try:
control_tanimoto = pickle.load(open("control_tanimoto.p", "rb"))
except:
control_tanimoto = similarity_compare(control_fp)
pickle.dump(control_tanimoto, open("control_tanimoto.p", "wb"))
sys.stdout.write("Tanimoto similarity of both.\n")
sys.stdout.flush()
try:
all_tanimoto = pickle.load(open("all_tanimoto.p", "rb"))
except:
all_tanimoto = similarity_compare(pains_fp + control_fp)
pickle.dump(all_tanimoto, open("all_tanimoto.p", "wb"))
sys.stdout.write("Plotting histograms.\n")
sys.stdout.flush()
hist = plot_histogram(get_similarities_list(pains_tanimoto),
get_similarities_list(control_tanimoto))
sys.stdout.write("Plotting heatmap\n")
sys.stdout.flush()
heatmap = plot_heatmap(all_tanimoto)
output_file("Pains_vs_Control_plots.html")
VBox(hist, heatmap)
show()
if __name__ == "__main__":
main(sys.argv[1:]) | mit | -36,656,988,482,466,296 | 31.231481 | 72 | 0.606034 | false |
rmarkello/pyls | pyls/types/behavioral.py | 1 | 11498 | # -*- coding: utf-8 -*-
import numpy as np
from sklearn.metrics import r2_score
from ..base import BasePLS, gen_splits
from ..structures import _pls_input_docs
from .. import compute, utils
class BehavioralPLS(BasePLS):
def __init__(self, X, Y, *, groups=None, n_cond=1, n_perm=5000,
n_boot=5000, n_split=100, test_size=0.25, test_split=100,
covariance=False, rotate=True, ci=95, permsamples=None,
bootsamples=None, seed=None, verbose=True, n_proc=None,
**kwargs):
super().__init__(X=np.asarray(X), Y=np.asarray(Y), groups=groups,
n_cond=n_cond, n_perm=n_perm, n_boot=n_boot,
n_split=n_split, test_size=test_size,
test_split=test_split, covariance=covariance,
rotate=rotate, ci=ci, permsamples=permsamples,
bootsamples=bootsamples, seed=seed, verbose=verbose,
n_proc=n_proc, **kwargs)
self.results = self.run_pls(self.inputs.X, self.inputs.Y)
def gen_covcorr(self, X, Y, groups, **kwargs):
"""
Computes cross-covariance matrix from `X` and `Y`
Parameters
----------
X : (S, B) array_like
Input data matrix, where `S` is observations and `B` is features
Y : (S, T) array_like
Input data matrix, where `S` is observations and `T` is features
groups : (S, J) array_like
Dummy coded input array, where `S` is observations and `J`
corresponds to the number of different groups x conditions. A value
of 1 indicates that an observation belongs to a specific group or
condition.
Returns
-------
crosscov : (J*T, B) np.ndarray
Cross-covariance matrix
"""
return np.row_stack([
compute.xcorr(X[grp], Y[grp], covariance=self.inputs.covariance)
for grp in groups.T.astype(bool)
])
def gen_distrib(self, X, Y, original, groups, *args, **kwargs):
"""
Finds behavioral correlations for single bootstrap resample
Parameters
----------
X : (S, B) array_like
Input data matrix, where `S` is observations and `B` is features
Y : (S, T) array_like
Input data matrix, where `S` is observations and `T` is features
original : (B, L) array_like
Left singular vectors from bootstrap
groups : (S, J) array_like
Dummy coded input array, where `S` is observations and `J`
corresponds to the number of different groups x conditions. A value
of 1 indicates that an observation belongs to a specific group or
condition.
Returns
-------
distrib : (T, L)
Behavioral correlations for single bootstrap resample
"""
tusc = X @ compute.normalize(original)
return self.gen_covcorr(tusc, Y, groups=groups)
def crossval(self, X, Y, groups=None, seed=None):
"""
Performs cross-validation of SVD of `X` and `Y`
Parameters
----------
X : (S, B) array_like
Input data matrix, where `S` is observations and `B` is features
Y : (S, T) array_like
Input data matrix, where `S` is observations and `T` is features
seed : {int, :obj:`numpy.random.RandomState`, None}, optional
Seed for random number generation. Default: None
Returns
-------
r_scores : (C,) np.ndarray
R (Pearon correlation) scores across train-test splits
r2_scores : (C,) np.ndarray
R^2 (coefficient of determination) scores across train-test splits
"""
if groups is None:
groups = utils.dummy_code(self.inputs.groups, self.inputs.n_cond)
# use gen_splits to handle grouping/condition vars in train/test split
splits = gen_splits(self.inputs.groups,
self.inputs.n_cond,
self.inputs.test_split,
seed=seed,
test_size=self.inputs.test_size)
gen = utils.trange(self.inputs.test_split, verbose=self.inputs.verbose,
desc='Running cross-validation')
with utils.get_par_func(self.inputs.n_proc,
self.__class__._single_crossval) as (par,
func):
out = par(
func(self, X=X, Y=Y, inds=splits[:, i], groups=groups, seed=i)
for i in gen
)
r_scores, r2_scores = [np.stack(o, axis=-1) for o in zip(*out)]
return r_scores, r2_scores
def _single_crossval(self, X, Y, inds, groups=None, seed=None):
"""
Generates single cross-validated r and r^2 score
Parameters
----------
X : (S, B) array_like
Input data matrix, where `S` is observations and `B` is features
Y : (S, T) array_like
Input data matrix, where `S` is observations and `T` is features
inds : (S,) array_like
Train-test split, where train = True and test = False
groups : (S, J) array_like, optional
Dummy coded input array, where `S` is observations and `J`
corresponds to the number of different groups x conditions. A value
of 1 indicates that an observation belongs to a specific group or
condition. If not specified will be generated on-the-fly. Default:
None
seed : {int, :obj:`numpy.random.RandomState`, None}, optional
Seed for random number generation. Default: None
"""
if groups is None:
groups = utils.dummy_code(self.inputs.groups, self.inputs.n_cond)
X_train, Y_train, dummy_train = X[inds], Y[inds], groups[inds]
X_test, Y_test, dummy_test = X[~inds], Y[~inds], groups[~inds]
# perform initial decomposition on train set
U, d, V = self.svd(X_train, Y_train, groups=dummy_train, seed=seed)
# rescale the test set based on the training set
Y_pred = []
for n, V_spl in enumerate(np.split(V, groups.shape[-1])):
tr_grp = dummy_train[:, n].astype(bool)
te_grp = dummy_test[:, n].astype(bool)
rescaled = compute.rescale_test(X_train[tr_grp], X_test[te_grp],
Y_train[tr_grp], U, V_spl)
Y_pred.append(rescaled)
Y_pred = np.row_stack(Y_pred)
# calculate r & r-squared from comp of rescaled test & true values
r_scores = compute.efficient_corr(Y_test, Y_pred)
r2_scores = r2_score(Y_test, Y_pred, multioutput='raw_values')
return r_scores, r2_scores
def run_pls(self, X, Y):
"""
Runs PLS analysis
Parameters
----------
X : (S, B) array_like
Input data matrix, where `S` is observations and `B` is features
Y : (S, T) array_like
Input data matrix, where `S` is observations and `T` is features
"""
res = super().run_pls(X, Y)
# mechanism for splitting outputs along group / condition indices
grps = np.repeat(res['inputs']['groups'], res['inputs']['n_cond'])
res['y_scores'] = np.vstack([
y @ v for (y, v) in zip(np.split(Y, np.cumsum(grps)[:-1]),
np.split(res['y_weights'], len(grps)))
])
# get lvcorrs
groups = utils.dummy_code(self.inputs.groups, self.inputs.n_cond)
res['y_loadings'] = self.gen_covcorr(res['x_scores'], Y, groups)
if self.inputs.n_boot > 0:
# compute bootstraps
distrib, u_sum, u_square = self.bootstrap(X, Y, self.rs)
# add original scaled singular vectors back in
bs = res['x_weights'] @ res['singvals']
u_sum, u_square = u_sum + bs, u_square + (bs ** 2)
# calculate bootstrap ratios and confidence intervals
bsrs, uboot_se = compute.boot_rel(bs, u_sum, u_square,
self.inputs.n_boot + 1)
corrci = np.stack(compute.boot_ci(distrib, ci=self.inputs.ci), -1)
# update results.boot_result dictionary
res['bootres'].update(dict(x_weights_normed=bsrs,
x_weights_stderr=uboot_se,
y_loadings=res['y_loadings'].copy(),
y_loadings_boot=distrib,
y_loadings_ci=corrci,
bootsamples=self.bootsamp))
# compute cross-validated prediction-based metrics
if self.inputs.test_split is not None and self.inputs.test_size > 0:
r, r2 = self.crossval(X, Y, groups=self.dummy, seed=self.rs)
res['cvres'].update(dict(pearson_r=r, r_squared=r2))
# get rid of the stupid diagonal matrix
res['varexp'] = np.diag(compute.varexp(res['singvals']))
res['singvals'] = np.diag(res['singvals'])
return res
# let's make it a function
def behavioral_pls(X, Y, *, groups=None, n_cond=1, n_perm=5000, n_boot=5000,
n_split=0, test_size=0.25, test_split=100,
covariance=False, rotate=True, ci=95, permsamples=None,
bootsamples=None, seed=None, verbose=True, n_proc=None,
**kwargs):
pls = BehavioralPLS(X=X, Y=Y, groups=groups, n_cond=n_cond,
n_perm=n_perm, n_boot=n_boot, n_split=n_split,
test_size=test_size, test_split=test_split,
covariance=covariance, rotate=rotate, ci=ci,
permsamples=permsamples, bootsamples=bootsamples,
seed=seed, verbose=verbose, n_proc=n_proc, **kwargs)
return pls.results
behavioral_pls.__doc__ = r"""
Performs behavioral PLS on `X` and `Y`.
Behavioral PLS is a multivariate statistical approach that relates two sets
of variables together. Traditionally, one of these arrays
represents a set of brain features (e.g., functional connectivity
estimates) and the other represents a set of behavioral variables; however,
these arrays can be any two sets of features belonging to a common group of
samples.
Using a singular value decomposition, behavioral PLS attempts to find
linear combinations of features from the provided arrays that maximally
covary with each other. The decomposition is performed on the cross-
covariance matrix :math:`R`, where :math:`R = Y^{{T}} \times X`, which
represents the covariation of all the input features across samples.
Parameters
----------
{input_matrix}
Y : (S, T) array_like
Input data matrix, where `S` is samples and `T` is features
{groups}
{conditions}
{stat_test}
{split_half}
{cross_val}
{covariance}
{rotate}
{ci}
{resamples}
{proc_options}
Returns
----------
{pls_results}
Notes
-----
{decomposition_narrative}
References
----------
{references}
Misic, B., Betzel, R. F., de Reus, M. A., van den Heuvel, M.P.,
Berman, M. G., McIntosh, A. R., & Sporns, O. (2016). Network level
structure-function relationships in human neocortex. Cerebral Cortex,
26, 3285-96.
""".format(**_pls_input_docs)
| gpl-2.0 | 620,810,701,254,727,300 | 38.108844 | 79 | 0.566098 | false |
babyliynfg/cross | tools/project-creator/Python2.6.6/Lib/lib2to3/refactor.py | 2 | 24840 | # Copyright 2006 Google, Inc. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Refactoring framework.
Used as a main program, this can refactor any number of files and/or
recursively descend down directories. Imported as a module, this
provides infrastructure to write your own refactoring tool.
"""
from __future__ import with_statement
__author__ = "Guido van Rossum <[email protected]>"
# Python imports
import os
import sys
import logging
import operator
import collections
import StringIO
from itertools import chain
# Local imports
from .pgen2 import driver, tokenize, token
from . import pytree, pygram
def get_all_fix_names(fixer_pkg, remove_prefix=True):
"""Return a sorted list of all available fix names in the given package."""
pkg = __import__(fixer_pkg, [], [], ["*"])
fixer_dir = os.path.dirname(pkg.__file__)
fix_names = []
for name in sorted(os.listdir(fixer_dir)):
if name.startswith("fix_") and name.endswith(".py"):
if remove_prefix:
name = name[4:]
fix_names.append(name[:-3])
return fix_names
class _EveryNode(Exception):
pass
def _get_head_types(pat):
""" Accepts a pytree Pattern Node and returns a set
of the pattern types which will match first. """
if isinstance(pat, (pytree.NodePattern, pytree.LeafPattern)):
# NodePatters must either have no type and no content
# or a type and content -- so they don't get any farther
# Always return leafs
if pat.type is None:
raise _EveryNode
return set([pat.type])
if isinstance(pat, pytree.NegatedPattern):
if pat.content:
return _get_head_types(pat.content)
raise _EveryNode # Negated Patterns don't have a type
if isinstance(pat, pytree.WildcardPattern):
# Recurse on each node in content
r = set()
for p in pat.content:
for x in p:
r.update(_get_head_types(x))
return r
raise Exception("Oh no! I don't understand pattern %s" %(pat))
def _get_headnode_dict(fixer_list):
""" Accepts a list of fixers and returns a dictionary
of head node type --> fixer list. """
head_nodes = collections.defaultdict(list)
every = []
for fixer in fixer_list:
if fixer.pattern:
try:
heads = _get_head_types(fixer.pattern)
except _EveryNode:
every.append(fixer)
else:
for node_type in heads:
head_nodes[node_type].append(fixer)
else:
if fixer._accept_type is not None:
head_nodes[fixer._accept_type].append(fixer)
else:
every.append(fixer)
for node_type in chain(pygram.python_grammar.symbol2number.itervalues(),
pygram.python_grammar.tokens):
head_nodes[node_type].extend(every)
return dict(head_nodes)
def get_fixers_from_package(pkg_name):
"""
Return the fully qualified names for fixers in the package pkg_name.
"""
return [pkg_name + "." + fix_name
for fix_name in get_all_fix_names(pkg_name, False)]
def _identity(obj):
return obj
if sys.version_info < (3, 0):
import codecs
_open_with_encoding = codecs.open
# codecs.open doesn't translate newlines sadly.
def _from_system_newlines(input):
return input.replace(u"\r\n", u"\n")
def _to_system_newlines(input):
if os.linesep != "\n":
return input.replace(u"\n", os.linesep)
else:
return input
else:
_open_with_encoding = open
_from_system_newlines = _identity
_to_system_newlines = _identity
def _detect_future_features(source):
have_docstring = False
gen = tokenize.generate_tokens(StringIO.StringIO(source).readline)
def advance():
tok = gen.next()
return tok[0], tok[1]
ignore = frozenset((token.NEWLINE, tokenize.NL, token.COMMENT))
features = set()
try:
while True:
tp, value = advance()
if tp in ignore:
continue
elif tp == token.STRING:
if have_docstring:
break
have_docstring = True
elif tp == token.NAME and value == u"from":
tp, value = advance()
if tp != token.NAME or value != u"__future__":
break
tp, value = advance()
if tp != token.NAME or value != u"import":
break
tp, value = advance()
if tp == token.OP and value == u"(":
tp, value = advance()
while tp == token.NAME:
features.add(value)
tp, value = advance()
if tp != token.OP or value != u",":
break
tp, value = advance()
else:
break
except StopIteration:
pass
return frozenset(features)
class FixerError(Exception):
"""A fixer could not be loaded."""
class RefactoringTool(object):
_default_options = {"print_function" : False}
CLASS_PREFIX = "Fix" # The prefix for fixer classes
FILE_PREFIX = "fix_" # The prefix for modules with a fixer within
def __init__(self, fixer_names, options=None, explicit=None):
"""Initializer.
Args:
fixer_names: a list of fixers to import
options: an dict with configuration.
explicit: a list of fixers to run even if they are explicit.
"""
self.fixers = fixer_names
self.explicit = explicit or []
self.options = self._default_options.copy()
if options is not None:
self.options.update(options)
if self.options["print_function"]:
self.grammar = pygram.python_grammar_no_print_statement
else:
self.grammar = pygram.python_grammar
self.errors = []
self.logger = logging.getLogger("RefactoringTool")
self.fixer_log = []
self.wrote = False
self.driver = driver.Driver(self.grammar,
convert=pytree.convert,
logger=self.logger)
self.pre_order, self.post_order = self.get_fixers()
self.pre_order_heads = _get_headnode_dict(self.pre_order)
self.post_order_heads = _get_headnode_dict(self.post_order)
self.files = [] # List of files that were or should be modified
def get_fixers(self):
"""Inspects the options to load the requested patterns and handlers.
Returns:
(pre_order, post_order), where pre_order is the list of fixers that
want a pre-order AST traversal, and post_order is the list that want
post-order traversal.
"""
pre_order_fixers = []
post_order_fixers = []
for fix_mod_path in self.fixers:
mod = __import__(fix_mod_path, {}, {}, ["*"])
fix_name = fix_mod_path.rsplit(".", 1)[-1]
if fix_name.startswith(self.FILE_PREFIX):
fix_name = fix_name[len(self.FILE_PREFIX):]
parts = fix_name.split("_")
class_name = self.CLASS_PREFIX + "".join([p.title() for p in parts])
try:
fix_class = getattr(mod, class_name)
except AttributeError:
raise FixerError("Can't find %s.%s" % (fix_name, class_name))
fixer = fix_class(self.options, self.fixer_log)
if fixer.explicit and self.explicit is not True and \
fix_mod_path not in self.explicit:
self.log_message("Skipping implicit fixer: %s", fix_name)
continue
self.log_debug("Adding transformation: %s", fix_name)
if fixer.order == "pre":
pre_order_fixers.append(fixer)
elif fixer.order == "post":
post_order_fixers.append(fixer)
else:
raise FixerError("Illegal fixer order: %r" % fixer.order)
key_func = operator.attrgetter("run_order")
pre_order_fixers.sort(key=key_func)
post_order_fixers.sort(key=key_func)
return (pre_order_fixers, post_order_fixers)
def log_error(self, msg, *args, **kwds):
"""Called when an error occurs."""
raise
def log_message(self, msg, *args):
"""Hook to log a message."""
if args:
msg = msg % args
self.logger.info(msg)
def log_debug(self, msg, *args):
if args:
msg = msg % args
self.logger.debug(msg)
def print_output(self, old_text, new_text, filename, equal):
"""Called with the old version, new version, and filename of a
refactored file."""
pass
def refactor(self, items, write=False, doctests_only=False):
"""Refactor a list of files and directories."""
for dir_or_file in items:
if os.path.isdir(dir_or_file):
self.refactor_dir(dir_or_file, write, doctests_only)
else:
self.refactor_file(dir_or_file, write, doctests_only)
def refactor_dir(self, dir_name, write=False, doctests_only=False):
"""Descends down a directory and refactor every Python file found.
Python files are assumed to have a .py extension.
Files and subdirectories starting with '.' are skipped.
"""
for dirpath, dirnames, filenames in os.walk(dir_name):
self.log_debug("Descending into %s", dirpath)
dirnames.sort()
filenames.sort()
for name in filenames:
if not name.startswith(".") and \
os.path.splitext(name)[1].endswith("py"):
fullname = os.path.join(dirpath, name)
self.refactor_file(fullname, write, doctests_only)
# Modify dirnames in-place to remove subdirs with leading dots
dirnames[:] = [dn for dn in dirnames if not dn.startswith(".")]
def _read_python_source(self, filename):
"""
Do our best to decode a Python source file correctly.
"""
try:
f = open(filename, "rb")
except IOError, err:
self.log_error("Can't open %s: %s", filename, err)
return None, None
try:
encoding = tokenize.detect_encoding(f.readline)[0]
finally:
f.close()
with _open_with_encoding(filename, "r", encoding=encoding) as f:
return _from_system_newlines(f.read()), encoding
def refactor_file(self, filename, write=False, doctests_only=False):
"""Refactors a file."""
input, encoding = self._read_python_source(filename)
if input is None:
# Reading the file failed.
return
input += u"\n" # Silence certain parse errors
if doctests_only:
self.log_debug("Refactoring doctests in %s", filename)
output = self.refactor_docstring(input, filename)
if output != input:
self.processed_file(output, filename, input, write, encoding)
else:
self.log_debug("No doctest changes in %s", filename)
else:
tree = self.refactor_string(input, filename)
if tree and tree.was_changed:
# The [:-1] is to take off the \n we added earlier
self.processed_file(unicode(tree)[:-1], filename,
write=write, encoding=encoding)
else:
self.log_debug("No changes in %s", filename)
def refactor_string(self, data, name):
"""Refactor a given input string.
Args:
data: a string holding the code to be refactored.
name: a human-readable name for use in error/log messages.
Returns:
An AST corresponding to the refactored input stream; None if
there were errors during the parse.
"""
features = _detect_future_features(data)
if "print_function" in features:
self.driver.grammar = pygram.python_grammar_no_print_statement
try:
tree = self.driver.parse_string(data)
except Exception, err:
self.log_error("Can't parse %s: %s: %s",
name, err.__class__.__name__, err)
return
finally:
self.driver.grammar = self.grammar
tree.future_features = features
self.log_debug("Refactoring %s", name)
self.refactor_tree(tree, name)
return tree
def refactor_stdin(self, doctests_only=False):
input = sys.stdin.read()
if doctests_only:
self.log_debug("Refactoring doctests in stdin")
output = self.refactor_docstring(input, "<stdin>")
if output != input:
self.processed_file(output, "<stdin>", input)
else:
self.log_debug("No doctest changes in stdin")
else:
tree = self.refactor_string(input, "<stdin>")
if tree and tree.was_changed:
self.processed_file(unicode(tree), "<stdin>", input)
else:
self.log_debug("No changes in stdin")
def refactor_tree(self, tree, name):
"""Refactors a parse tree (modifying the tree in place).
Args:
tree: a pytree.Node instance representing the root of the tree
to be refactored.
name: a human-readable name for this tree.
Returns:
True if the tree was modified, False otherwise.
"""
for fixer in chain(self.pre_order, self.post_order):
fixer.start_tree(tree, name)
self.traverse_by(self.pre_order_heads, tree.pre_order())
self.traverse_by(self.post_order_heads, tree.post_order())
for fixer in chain(self.pre_order, self.post_order):
fixer.finish_tree(tree, name)
return tree.was_changed
def traverse_by(self, fixers, traversal):
"""Traverse an AST, applying a set of fixers to each node.
This is a helper method for refactor_tree().
Args:
fixers: a list of fixer instances.
traversal: a generator that yields AST nodes.
Returns:
None
"""
if not fixers:
return
for node in traversal:
for fixer in fixers[node.type]:
results = fixer.match(node)
if results:
new = fixer.transform(node, results)
if new is not None:
node.replace(new)
node = new
def processed_file(self, new_text, filename, old_text=None, write=False,
encoding=None):
"""
Called when a file has been refactored, and there are changes.
"""
self.files.append(filename)
if old_text is None:
old_text = self._read_python_source(filename)[0]
if old_text is None:
return
equal = old_text == new_text
self.print_output(old_text, new_text, filename, equal)
if equal:
self.log_debug("No changes to %s", filename)
return
if write:
self.write_file(new_text, filename, old_text, encoding)
else:
self.log_debug("Not writing changes to %s", filename)
def write_file(self, new_text, filename, old_text, encoding=None):
"""Writes a string to a file.
It first shows a unified diff between the old text and the new text, and
then rewrites the file; the latter is only done if the write option is
set.
"""
try:
f = _open_with_encoding(filename, "w", encoding=encoding)
except os.error, err:
self.log_error("Can't create %s: %s", filename, err)
return
try:
f.write(_to_system_newlines(new_text))
except os.error, err:
self.log_error("Can't write %s: %s", filename, err)
finally:
f.close()
self.log_debug("Wrote changes to %s", filename)
self.wrote = True
PS1 = ">>> "
PS2 = "... "
def refactor_docstring(self, input, filename):
"""Refactors a docstring, looking for doctests.
This returns a modified version of the input string. It looks
for doctests, which start with a ">>>" prompt, and may be
continued with "..." prompts, as long as the "..." is indented
the same as the ">>>".
(Unfortunately we can't use the doctest module's parser,
since, like most parsers, it is not geared towards preserving
the original source.)
"""
result = []
block = None
block_lineno = None
indent = None
lineno = 0
for line in input.splitlines(True):
lineno += 1
if line.lstrip().startswith(self.PS1):
if block is not None:
result.extend(self.refactor_doctest(block, block_lineno,
indent, filename))
block_lineno = lineno
block = [line]
i = line.find(self.PS1)
indent = line[:i]
elif (indent is not None and
(line.startswith(indent + self.PS2) or
line == indent + self.PS2.rstrip() + u"\n")):
block.append(line)
else:
if block is not None:
result.extend(self.refactor_doctest(block, block_lineno,
indent, filename))
block = None
indent = None
result.append(line)
if block is not None:
result.extend(self.refactor_doctest(block, block_lineno,
indent, filename))
return u"".join(result)
def refactor_doctest(self, block, lineno, indent, filename):
"""Refactors one doctest.
A doctest is given as a block of lines, the first of which starts
with ">>>" (possibly indented), while the remaining lines start
with "..." (identically indented).
"""
try:
tree = self.parse_block(block, lineno, indent)
except Exception, err:
if self.log.isEnabledFor(logging.DEBUG):
for line in block:
self.log_debug("Source: %s", line.rstrip(u"\n"))
self.log_error("Can't parse docstring in %s line %s: %s: %s",
filename, lineno, err.__class__.__name__, err)
return block
if self.refactor_tree(tree, filename):
new = unicode(tree).splitlines(True)
# Undo the adjustment of the line numbers in wrap_toks() below.
clipped, new = new[:lineno-1], new[lineno-1:]
assert clipped == [u"\n"] * (lineno-1), clipped
if not new[-1].endswith(u"\n"):
new[-1] += u"\n"
block = [indent + self.PS1 + new.pop(0)]
if new:
block += [indent + self.PS2 + line for line in new]
return block
def summarize(self):
if self.wrote:
were = "were"
else:
were = "need to be"
if not self.files:
self.log_message("No files %s modified.", were)
else:
self.log_message("Files that %s modified:", were)
for file in self.files:
self.log_message(file)
if self.fixer_log:
self.log_message("Warnings/messages while refactoring:")
for message in self.fixer_log:
self.log_message(message)
if self.errors:
if len(self.errors) == 1:
self.log_message("There was 1 error:")
else:
self.log_message("There were %d errors:", len(self.errors))
for msg, args, kwds in self.errors:
self.log_message(msg, *args, **kwds)
def parse_block(self, block, lineno, indent):
"""Parses a block into a tree.
This is necessary to get correct line number / offset information
in the parser diagnostics and embedded into the parse tree.
"""
tree = self.driver.parse_tokens(self.wrap_toks(block, lineno, indent))
tree.future_features = frozenset()
return tree
def wrap_toks(self, block, lineno, indent):
"""Wraps a tokenize stream to systematically modify start/end."""
tokens = tokenize.generate_tokens(self.gen_lines(block, indent).next)
for type, value, (line0, col0), (line1, col1), line_text in tokens:
line0 += lineno - 1
line1 += lineno - 1
# Don't bother updating the columns; this is too complicated
# since line_text would also have to be updated and it would
# still break for tokens spanning lines. Let the user guess
# that the column numbers for doctests are relative to the
# end of the prompt string (PS1 or PS2).
yield type, value, (line0, col0), (line1, col1), line_text
def gen_lines(self, block, indent):
"""Generates lines as expected by tokenize from a list of lines.
This strips the first len(indent + self.PS1) characters off each line.
"""
prefix1 = indent + self.PS1
prefix2 = indent + self.PS2
prefix = prefix1
for line in block:
if line.startswith(prefix):
yield line[len(prefix):]
elif line == prefix.rstrip() + u"\n":
yield u"\n"
else:
raise AssertionError("line=%r, prefix=%r" % (line, prefix))
prefix = prefix2
while True:
yield ""
class MultiprocessingUnsupported(Exception):
pass
class MultiprocessRefactoringTool(RefactoringTool):
def __init__(self, *args, **kwargs):
super(MultiprocessRefactoringTool, self).__init__(*args, **kwargs)
self.queue = None
self.output_lock = None
def refactor(self, items, write=False, doctests_only=False,
num_processes=1):
if num_processes == 1:
return super(MultiprocessRefactoringTool, self).refactor(
items, write, doctests_only)
try:
import multiprocessing
except ImportError:
raise MultiprocessingUnsupported
if self.queue is not None:
raise RuntimeError("already doing multiple processes")
self.queue = multiprocessing.JoinableQueue()
self.output_lock = multiprocessing.Lock()
processes = [multiprocessing.Process(target=self._child)
for i in xrange(num_processes)]
try:
for p in processes:
p.start()
super(MultiprocessRefactoringTool, self).refactor(items, write,
doctests_only)
finally:
self.queue.join()
for i in xrange(num_processes):
self.queue.put(None)
for p in processes:
if p.is_alive():
p.join()
self.queue = None
def _child(self):
task = self.queue.get()
while task is not None:
args, kwargs = task
try:
super(MultiprocessRefactoringTool, self).refactor_file(
*args, **kwargs)
finally:
self.queue.task_done()
task = self.queue.get()
def refactor_file(self, *args, **kwargs):
if self.queue is not None:
self.queue.put((args, kwargs))
else:
return super(MultiprocessRefactoringTool, self).refactor_file(
*args, **kwargs)
| mit | 2,604,667,639,405,606,000 | 35.579425 | 80 | 0.538607 | false |
jgelens/AutobahnTestSuite | autobahntestsuite/autobahntestsuite/case/case7_3_6.py | 14 | 1702 | ###############################################################################
##
## Copyright 2011 Tavendo GmbH
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
from case import Case
class Case7_3_6(Case):
DESCRIPTION = """Send a close frame with close code and close reason which is too long (124) - total frame payload 126 octets"""
EXPECTATION = """Clean close with protocol error code or dropped TCP connection."""
def init(self):
self.suppressClose = True
def onConnectionLost(self, failedByMe):
Case.onConnectionLost(self, failedByMe)
if self.behaviorClose == Case.WRONG_CODE:
self.behavior = Case.FAILED
self.passed = False
self.result = self.resultClose
def onOpen(self):
self.payload = "*" * 124
self.expected[Case.OK] = []
self.expectedClose = {"closedByMe":True,"closeCode":[self.p.CLOSE_STATUS_CODE_PROTOCOL_ERROR],"requireClean":False}
self.p.sendCloseFrame(self.p.CLOSE_STATUS_CODE_NORMAL, reasonUtf8 = self.payload)
self.p.killAfter(1)
| apache-2.0 | 2,507,701,333,161,673,700 | 37.581395 | 131 | 0.612808 | false |
CJ8664/servo | tests/wpt/web-platform-tests/tools/wptrunner/wptrunner/wptrunner.py | 5 | 12417 | from __future__ import unicode_literals
import json
import os
import sys
import environment as env
import products
import testloader
import wptcommandline
import wptlogging
import wpttest
from font import FontInstaller
from testrunner import ManagerGroup
from browsers.base import NullBrowser
here = os.path.split(__file__)[0]
logger = None
"""Runner for web-platform-tests
The runner has several design goals:
* Tests should run with no modification from upstream.
* Tests should be regarded as "untrusted" so that errors, timeouts and even
crashes in the tests can be handled without failing the entire test run.
* For performance tests can be run in multiple browsers in parallel.
The upstream repository has the facility for creating a test manifest in JSON
format. This manifest is used directly to determine which tests exist. Local
metadata files are used to store the expected test results.
"""
def setup_logging(*args, **kwargs):
global logger
logger = wptlogging.setup(*args, **kwargs)
def get_loader(test_paths, product, ssl_env, debug=None, run_info_extras=None, **kwargs):
if run_info_extras is None:
run_info_extras = {}
run_info = wpttest.get_run_info(kwargs["run_info"], product, debug=debug,
extras=run_info_extras)
test_manifests = testloader.ManifestLoader(test_paths, force_manifest_update=kwargs["manifest_update"],
manifest_download=kwargs["manifest_download"]).load()
manifest_filters = []
meta_filters = []
if kwargs["include"] or kwargs["exclude"] or kwargs["include_manifest"]:
manifest_filters.append(testloader.TestFilter(include=kwargs["include"],
exclude=kwargs["exclude"],
manifest_path=kwargs["include_manifest"],
test_manifests=test_manifests))
if kwargs["tags"]:
meta_filters.append(testloader.TagFilter(tags=kwargs["tags"]))
test_loader = testloader.TestLoader(test_manifests,
kwargs["test_types"],
run_info,
manifest_filters=manifest_filters,
meta_filters=meta_filters,
chunk_type=kwargs["chunk_type"],
total_chunks=kwargs["total_chunks"],
chunk_number=kwargs["this_chunk"],
include_https=ssl_env.ssl_enabled)
return run_info, test_loader
def list_test_groups(test_paths, product, **kwargs):
env.do_delayed_imports(logger, test_paths)
ssl_env = env.ssl_env(logger, **kwargs)
run_info_extras = products.load_product(kwargs["config"], product)[-1](**kwargs)
run_info, test_loader = get_loader(test_paths, product, ssl_env,
run_info_extras=run_info_extras, **kwargs)
for item in sorted(test_loader.groups(kwargs["test_types"])):
print item
def list_disabled(test_paths, product, **kwargs):
env.do_delayed_imports(logger, test_paths)
rv = []
run_info_extras = products.load_product(kwargs["config"], product)[-1](**kwargs)
ssl_env = env.ssl_env(logger, **kwargs)
run_info, test_loader = get_loader(test_paths, product, ssl_env,
run_info_extras=run_info_extras, **kwargs)
for test_type, tests in test_loader.disabled_tests.iteritems():
for test in tests:
rv.append({"test": test.id, "reason": test.disabled()})
print json.dumps(rv, indent=2)
def list_tests(test_paths, product, **kwargs):
env.do_delayed_imports(logger, test_paths)
rv = []
ssl_env = env.ssl_env(logger, **kwargs)
run_info_extras = products.load_product(kwargs["config"], product)[-1](**kwargs)
run_info, test_loader = get_loader(test_paths, product, ssl_env,
run_info_extras=run_info_extras, **kwargs)
for test in test_loader.test_ids:
print test
def get_pause_after_test(test_loader, **kwargs):
total_tests = sum(len(item) for item in test_loader.tests.itervalues())
if kwargs["pause_after_test"] is None:
if kwargs["repeat_until_unexpected"]:
return False
if kwargs["repeat"] == 1 and kwargs["rerun"] == 1 and total_tests == 1:
return True
return False
return kwargs["pause_after_test"]
def run_tests(config, test_paths, product, **kwargs):
with wptlogging.CaptureIO(logger, not kwargs["no_capture_stdio"]):
env.do_delayed_imports(logger, test_paths)
(check_args,
target_browser_cls, get_browser_kwargs,
executor_classes, get_executor_kwargs,
env_options, get_env_extras, run_info_extras) = products.load_product(config, product)
ssl_env = env.ssl_env(logger, **kwargs)
env_extras = get_env_extras(**kwargs)
check_args(**kwargs)
if kwargs["install_fonts"]:
env_extras.append(FontInstaller(
font_dir=kwargs["font_dir"],
ahem=os.path.join(kwargs["tests_root"], "fonts/Ahem.ttf")
))
if "test_loader" in kwargs:
run_info = wpttest.get_run_info(kwargs["run_info"], product, debug=None,
extras=run_info_extras(**kwargs))
test_loader = kwargs["test_loader"]
else:
run_info, test_loader = get_loader(test_paths,
product,
ssl_env,
run_info_extras=run_info_extras(**kwargs),
**kwargs)
test_source_kwargs = {"processes": kwargs["processes"]}
if kwargs["run_by_dir"] is False:
test_source_cls = testloader.SingleTestSource
else:
# A value of None indicates infinite depth
test_source_cls = testloader.PathGroupedSource
test_source_kwargs["depth"] = kwargs["run_by_dir"]
logger.info("Using %i client processes" % kwargs["processes"])
unexpected_total = 0
kwargs["pause_after_test"] = get_pause_after_test(test_loader, **kwargs)
with env.TestEnvironment(test_paths,
ssl_env,
kwargs["pause_after_test"],
kwargs["debug_info"],
env_options,
env_extras) as test_environment:
try:
test_environment.ensure_started()
except env.TestEnvironmentError as e:
logger.critical("Error starting test environment: %s" % e.message)
raise
repeat = kwargs["repeat"]
repeat_count = 0
repeat_until_unexpected = kwargs["repeat_until_unexpected"]
while repeat_count < repeat or repeat_until_unexpected:
repeat_count += 1
if repeat_until_unexpected:
logger.info("Repetition %i" % (repeat_count))
elif repeat > 1:
logger.info("Repetition %i / %i" % (repeat_count, repeat))
unexpected_count = 0
logger.suite_start(test_loader.test_ids, run_info)
for test_type in kwargs["test_types"]:
logger.info("Running %s tests" % test_type)
# WebDriver tests may create and destroy multiple browser
# processes as part of their expected behavior. These
# processes are managed by a WebDriver server binary. This
# obviates the need for wptrunner to provide a browser, so
# the NullBrowser is used in place of the "target" browser
if test_type == "wdspec":
browser_cls = NullBrowser
else:
browser_cls = target_browser_cls
browser_kwargs = get_browser_kwargs(test_type,
run_info,
ssl_env=ssl_env,
**kwargs)
executor_cls = executor_classes.get(test_type)
executor_kwargs = get_executor_kwargs(test_type,
test_environment.external_config,
test_environment.cache_manager,
run_info,
**kwargs)
if executor_cls is None:
logger.error("Unsupported test type %s for product %s" %
(test_type, product))
continue
for test in test_loader.disabled_tests[test_type]:
logger.test_start(test.id)
logger.test_end(test.id, status="SKIP")
if test_type == "testharness":
run_tests = {"testharness": []}
for test in test_loader.tests["testharness"]:
if test.testdriver and not executor_cls.supports_testdriver:
logger.test_start(test.id)
logger.test_end(test.id, status="SKIP")
else:
run_tests["testharness"].append(test)
else:
run_tests = test_loader.tests
with ManagerGroup("web-platform-tests",
kwargs["processes"],
test_source_cls,
test_source_kwargs,
browser_cls,
browser_kwargs,
executor_cls,
executor_kwargs,
kwargs["rerun"],
kwargs["pause_after_test"],
kwargs["pause_on_unexpected"],
kwargs["restart_on_unexpected"],
kwargs["debug_info"]) as manager_group:
try:
manager_group.run(test_type, run_tests)
except KeyboardInterrupt:
logger.critical("Main thread got signal")
manager_group.stop()
raise
unexpected_count += manager_group.unexpected_count()
unexpected_total += unexpected_count
logger.info("Got %i unexpected results" % unexpected_count)
if repeat_until_unexpected and unexpected_total > 0:
break
logger.suite_end()
return unexpected_total == 0
def check_stability(**kwargs):
import stability
return stability.check_stability(logger, **kwargs)
def start(**kwargs):
if kwargs["list_test_groups"]:
list_test_groups(**kwargs)
elif kwargs["list_disabled"]:
list_disabled(**kwargs)
elif kwargs["list_tests"]:
list_tests(**kwargs)
elif kwargs["verify"]:
check_stability(**kwargs)
else:
return not run_tests(**kwargs)
def main():
"""Main entry point when calling from the command line"""
kwargs = wptcommandline.parse_args()
try:
if kwargs["prefs_root"] is None:
kwargs["prefs_root"] = os.path.abspath(os.path.join(here, "prefs"))
setup_logging(kwargs, {"raw": sys.stdout})
return start(**kwargs)
except Exception:
if kwargs["pdb"]:
import pdb, traceback
print traceback.format_exc()
pdb.post_mortem()
else:
raise
| mpl-2.0 | -914,991,515,146,111,400 | 38.798077 | 107 | 0.514134 | false |
viz-dev/viz | contrib/seeds/makeseeds.py | 1 | 5743 | #!/usr/bin/env python3
# Copyright (c) 2013-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Generate seeds.txt from Pieter's DNS seeder
#
NSEEDS=512
MAX_SEEDS_PER_ASN=2
MIN_BLOCKS = 337600
# These are hosts that have been observed to be behaving strangely (e.g.
# aggressively connecting to every node).
SUSPICIOUS_HOSTS = {
"130.211.129.106", "178.63.107.226",
"83.81.130.26", "88.198.17.7", "148.251.238.178", "176.9.46.6",
"54.173.72.127", "54.174.10.182", "54.183.64.54", "54.194.231.211",
"54.66.214.167", "54.66.220.137", "54.67.33.14", "54.77.251.214",
"54.94.195.96", "54.94.200.247"
}
import re
import sys
import dns.resolver
import collections
PATTERN_IPV4 = re.compile(r"^((\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})):(\d+)$")
PATTERN_IPV6 = re.compile(r"^\[([0-9a-z:]+)\]:(\d+)$")
PATTERN_ONION = re.compile(r"^([abcdefghijklmnopqrstuvwxyz234567]{16}\.onion):(\d+)$")
PATTERN_AGENT = re.compile(r"^(/Satoshi:0.12.(0|1|99)/|/Satoshi:0.13.(0|1|2|99)/|/VizCore:0.13.(0|1|2|99)/)$")
def parseline(line):
sline = line.split()
if len(sline) < 11:
return None
m = PATTERN_IPV4.match(sline[0])
sortkey = None
ip = None
if m is None:
m = PATTERN_IPV6.match(sline[0])
if m is None:
m = PATTERN_ONION.match(sline[0])
if m is None:
return None
else:
net = 'onion'
ipstr = sortkey = m.group(1)
port = int(m.group(2))
else:
net = 'ipv6'
if m.group(1) in ['::']: # Not interested in localhost
return None
ipstr = m.group(1)
sortkey = ipstr # XXX parse IPv6 into number, could use name_to_ipv6 from generate-seeds
port = int(m.group(2))
else:
# Do IPv4 sanity check
ip = 0
for i in range(0,4):
if int(m.group(i+2)) < 0 or int(m.group(i+2)) > 255:
return None
ip = ip + (int(m.group(i+2)) << (8*(3-i)))
if ip == 0:
return None
net = 'ipv4'
sortkey = ip
ipstr = m.group(1)
port = int(m.group(6))
# Skip bad results.
if sline[1] == 0:
return None
# Extract uptime %.
uptime30 = float(sline[7][:-1])
# Extract Unix timestamp of last success.
lastsuccess = int(sline[2])
# Extract protocol version.
version = int(sline[10])
# Extract user agent.
agent = sline[11][1:-1]
# Extract service flags.
service = int(sline[9], 16)
# Extract blocks.
blocks = int(sline[8])
# Construct result.
return {
'net': net,
'ip': ipstr,
'port': port,
'ipnum': ip,
'uptime': uptime30,
'lastsuccess': lastsuccess,
'version': version,
'agent': agent,
'service': service,
'blocks': blocks,
'sortkey': sortkey,
}
def filtermultiport(ips):
'''Filter out hosts with more nodes per IP'''
hist = collections.defaultdict(list)
for ip in ips:
hist[ip['sortkey']].append(ip)
return [value[0] for (key,value) in list(hist.items()) if len(value)==1]
# Based on Greg Maxwell's seed_filter.py
def filterbyasn(ips, max_per_asn, max_total):
# Sift out ips by type
ips_ipv4 = [ip for ip in ips if ip['net'] == 'ipv4']
ips_ipv6 = [ip for ip in ips if ip['net'] == 'ipv6']
ips_onion = [ip for ip in ips if ip['net'] == 'onion']
# Filter IPv4 by ASN
result = []
asn_count = {}
for ip in ips_ipv4:
if len(result) == max_total:
break
try:
asn = int([x.to_text() for x in dns.resolver.query('.'.join(reversed(ip['ip'].split('.'))) + '.origin.asn.cymru.com', 'TXT').response.answer][0].split('\"')[1].split(' ')[0])
if asn not in asn_count:
asn_count[asn] = 0
if asn_count[asn] == max_per_asn:
continue
asn_count[asn] += 1
result.append(ip)
except:
sys.stderr.write('ERR: Could not resolve ASN for "' + ip['ip'] + '"\n')
# TODO: filter IPv6 by ASN
# Add back non-IPv4
result.extend(ips_ipv6)
result.extend(ips_onion)
return result
def main():
lines = sys.stdin.readlines()
ips = [parseline(line) for line in lines]
# Skip entries with valid address.
ips = [ip for ip in ips if ip is not None]
# Skip entries from suspicious hosts.
ips = [ip for ip in ips if ip['ip'] not in SUSPICIOUS_HOSTS]
# Enforce minimal number of blocks.
ips = [ip for ip in ips if ip['blocks'] >= MIN_BLOCKS]
# Require service bit 1.
ips = [ip for ip in ips if (ip['service'] & 1) == 1]
# Require at least 50% 30-day uptime.
ips = [ip for ip in ips if ip['uptime'] > 50]
# Require a known and recent user agent.
ips = [ip for ip in ips if PATTERN_AGENT.match(ip['agent'])]
# Sort by availability (and use last success as tie breaker)
ips.sort(key=lambda x: (x['uptime'], x['lastsuccess'], x['ip']), reverse=True)
# Filter out hosts with multiple bitcoin ports, these are likely abusive
ips = filtermultiport(ips)
# Look up ASNs and limit results, both per ASN and globally.
ips = filterbyasn(ips, MAX_SEEDS_PER_ASN, NSEEDS)
# Sort the results by IP address (for deterministic output).
ips.sort(key=lambda x: (x['net'], x['sortkey']))
for ip in ips:
if ip['net'] == 'ipv6':
print('[%s]:%i' % (ip['ip'], ip['port']))
else:
print('%s:%i' % (ip['ip'], ip['port']))
if __name__ == '__main__':
main()
| mit | -2,129,171,284,230,433,300 | 32.389535 | 186 | 0.561205 | false |
jordanemedlock/psychtruths | temboo/Library/Google/ComputeEngine/Addresses/ListAddresses.py | 5 | 6106 | # -*- coding: utf-8 -*-
###############################################################################
#
# ListAddresses
# Retrieves a list of Address resources contained within the specified region
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class ListAddresses(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the ListAddresses Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(ListAddresses, self).__init__(temboo_session, '/Library/Google/ComputeEngine/Addresses/ListAddresses')
def new_input_set(self):
return ListAddressesInputSet()
def _make_result_set(self, result, path):
return ListAddressesResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return ListAddressesChoreographyExecution(session, exec_id, path)
class ListAddressesInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the ListAddresses
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_AccessToken(self, value):
"""
Set the value of the AccessToken input for this Choreo. ((optional, string) A valid access token retrieved during the OAuth process. This is required unless you provide the ClientID, ClientSecret, and RefreshToken to generate a new access token.)
"""
super(ListAddressesInputSet, self)._set_input('AccessToken', value)
def set_ClientID(self, value):
"""
Set the value of the ClientID input for this Choreo. ((conditional, string) The Client ID provided by Google. Required unless providing a valid AccessToken.)
"""
super(ListAddressesInputSet, self)._set_input('ClientID', value)
def set_ClientSecret(self, value):
"""
Set the value of the ClientSecret input for this Choreo. ((conditional, string) The Client Secret provided by Google. Required unless providing a valid AccessToken.)
"""
super(ListAddressesInputSet, self)._set_input('ClientSecret', value)
def set_Fields(self, value):
"""
Set the value of the Fields input for this Choreo. ((optional, string) Comma-seperated list of fields you want to include in the response.)
"""
super(ListAddressesInputSet, self)._set_input('Fields', value)
def set_Filter(self, value):
"""
Set the value of the Filter input for this Choreo. ((optional, string) A filter expression for narrowing results in the form: {field_name} {comparison_string} {literal_string} (e.g. name eq your-address-name). Comparison strings can be eq (equals) or ne (not equals).)
"""
super(ListAddressesInputSet, self)._set_input('Filter', value)
def set_MaxResults(self, value):
"""
Set the value of the MaxResults input for this Choreo. ((optional, integer) The maximum number of results to return.)
"""
super(ListAddressesInputSet, self)._set_input('MaxResults', value)
def set_PageToken(self, value):
"""
Set the value of the PageToken input for this Choreo. ((optional, string) The "nextPageToken" found in the response which is used to page through results.)
"""
super(ListAddressesInputSet, self)._set_input('PageToken', value)
def set_Project(self, value):
"""
Set the value of the Project input for this Choreo. ((required, string) The ID of a Google Compute project.)
"""
super(ListAddressesInputSet, self)._set_input('Project', value)
def set_RefreshToken(self, value):
"""
Set the value of the RefreshToken input for this Choreo. ((conditional, string) An OAuth refresh token used to generate a new access token when the original token is expired. Required unless providing a valid AccessToken.)
"""
super(ListAddressesInputSet, self)._set_input('RefreshToken', value)
def set_Region(self, value):
"""
Set the value of the Region input for this Choreo. ((required, string) The name of the region associated with this request.)
"""
super(ListAddressesInputSet, self)._set_input('Region', value)
class ListAddressesResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the ListAddresses Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. ((json) The response from Google.)
"""
return self._output.get('Response', None)
def get_NewAccessToken(self):
"""
Retrieve the value for the "NewAccessToken" output from this Choreo execution. ((string) Contains a new AccessToken when the RefreshToken is provided.)
"""
return self._output.get('NewAccessToken', None)
class ListAddressesChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return ListAddressesResultSet(response, path)
| apache-2.0 | -6,805,042,870,874,888,000 | 45.257576 | 276 | 0.678185 | false |
sangwook236/SWL | python/src/swl/machine_learning/tensorflow_model.py | 2 | 7887 | import abc
import tensorflow as tf
from swl.machine_learning.learning_model import LearningModel
#--------------------------------------------------------------------
class TensorFlowModel(LearningModel):
"""Learning model for TensorFlow library.
"""
def __init__(self):
super().__init__()
# Loss and accuracy are used in training and evaluation steps.
self._loss = None
self._accuracy = None
@property
def loss(self):
if self._loss is None:
raise ValueError('Loss is None')
return self._loss
@property
def accuracy(self):
if self._accuracy is None:
raise ValueError('Accuracy is None')
return self._accuracy
@abc.abstractmethod
def get_feed_dict(self, data, num_data, *args, **kwargs):
raise NotImplementedError
def _get_loss(self, y, t):
with tf.name_scope('loss'):
"""
if 1 == num_classes:
loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(labels=t, logits=y))
elif num_classes >= 2:
#loss = tf.reduce_mean(-tf.reduce_sum(t * tf.log(y), reduction_indices=[1]))
#loss = tf.reduce_mean(-tf.reduce_sum(t * tf.log(tf.clip_by_value(y, 1e-10, 1.0)), reduction_indices=[1]))
loss = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits_v2(labels=t, logits=y))
else:
assert num_classes > 0, 'Invalid number of classes.'
"""
loss = tf.reduce_mean(tf.nn.softmax_cross_entropy_with_logits_v2(labels=t, logits=y))
tf.summary.scalar('loss', loss)
return loss
def _get_accuracy(self, y, t):
with tf.name_scope('accuracy'):
"""
if 1 == num_classes:
correct_prediction = tf.equal(tf.round(y), tf.round(t))
elif num_classes >= 2:
correct_prediction = tf.equal(tf.argmax(y, axis=-1), tf.argmax(t, axis=-1))
else:
assert num_classes > 0, 'Invalid number of classes.'
"""
correct_prediction = tf.equal(tf.argmax(y, axis=-1), tf.argmax(t, axis=-1))
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
tf.summary.scalar('accuracy', accuracy)
return accuracy
#--------------------------------------------------------------------
class SimpleTensorFlowModel(TensorFlowModel):
"""Single-input single-output learning model for TensorFlow library.
- Fixed-length inputs and outputs.
- Dense tensors for input and output.
"""
def __init__(self, input_shape, output_shape):
super().__init__()
self._input_shape = input_shape
self._output_shape = output_shape
self._input_ph = tf.placeholder(tf.float32, shape=self._input_shape, name='input_ph')
self._output_ph = tf.placeholder(tf.int32, shape=self._output_shape, name='output_ph')
#self._output_ph = tf.placeholder(tf.float32, shape=self._output_shape, name='output_ph')
@abc.abstractmethod
def _create_single_model(self, inputs, input_shape, output_shape, is_training):
raise NotImplementedError
def create_training_model(self):
self._model_output = self._create_single_model(self._input_ph, self._input_shape, self._output_shape, True)
self._loss = self._get_loss(self._model_output, self._output_ph)
self._accuracy = self._get_accuracy(self._model_output, self._output_ph)
def create_evaluation_model(self):
self._model_output = self._create_single_model(self._input_ph, self._input_shape, self._output_shape, False)
self._loss = self._get_loss(self._model_output, self._output_ph)
self._accuracy = self._get_accuracy(self._model_output, self._output_ph)
def create_inference_model(self):
self._model_output = self._create_single_model(self._input_ph, self._input_shape, self._output_shape, False)
self._loss = None
self._accuracy = None
#--------------------------------------------------------------------
class SimpleAuxiliaryInputTensorFlowModel(TensorFlowModel):
"""Single-input single-output learning model for TensorFlow library.
- Auxiliary inputs for training.
- Fixed- or variable-length inputs and outputs.
- Dense tensors for input and output.
"""
def __init__(self, input_shape, aux_input_shape, output_shape):
super().__init__()
self._input_shape = input_shape
self._aux_input_shape = aux_input_shape
self._output_shape = output_shape
self._input_ph = tf.placeholder(tf.float32, shape=self._input_shape, name='input_ph')
self._aux_input_ph = tf.placeholder(tf.float32, shape=self._aux_input_shape, name='aux_input_ph')
self._output_ph = tf.placeholder(tf.int32, shape=self._output_shape, name='output_ph')
#self._output_ph = tf.placeholder(tf.float32, shape=self._output_shape, name='output_ph')
@abc.abstractmethod
def _create_single_model(self, inputs, aux_inputs, input_shape, aux_input_shape, output_shape, is_training):
raise NotImplementedError
def create_training_model(self):
self._model_output = self._create_single_model(self._input_ph, self._aux_input_ph, self._input_shape, self._aux_input_shape, self._output_shape, True)
self._loss = self._get_loss(self._model_output, self._output_ph)
self._accuracy = self._get_accuracy(self._model_output, self._output_ph)
def create_evaluation_model(self):
self._model_output = self._create_single_model(self._input_ph, self._aux_input_ph, self._input_shape, self._aux_input_shape, self._output_shape, False)
self._loss = self._get_loss(self._model_output, self._output_ph)
self._accuracy = self._get_accuracy(self._model_output, self._output_ph)
def create_inference_model(self):
self._model_output = self._create_single_model(self._input_ph, self._aux_input_ph, self._input_shape, self._aux_input_shape, self._output_shape, False)
self._loss = None
self._accuracy = None
#--------------------------------------------------------------------
class SimpleSequentialTensorFlowModel(TensorFlowModel):
"""Single-input single-output learning model for TensorFlow library.
- Fixed- or variable-length inputs and outputs.
- Dense tensors for input.
- Dense or sparse tensors for output.
"""
def __init__(self, input_shape, output_shape, num_classes, is_sparse_output=False, is_time_major=False):
super().__init__()
self._input_shape = input_shape
self._output_shape = output_shape
self._num_classes = num_classes
self._is_sparse_output = is_sparse_output
self._is_time_major = is_time_major
self._input_ph = tf.placeholder(tf.float32, shape=self._input_shape, name='input_ph')
if self._is_sparse_output:
self._output_ph = tf.sparse_placeholder(tf.int32, shape=self._output_shape, name='output_ph')
else:
self._output_ph = tf.placeholder(tf.int32, shape=self._output_shape, name='output_ph')
self._output_len_ph = tf.placeholder(tf.int32, [None], name='output_len_ph')
self._model_output_len_ph = tf.placeholder(tf.int32, [None], name='model_output_len_ph')
@abc.abstractmethod
def _get_loss(self, y, t, y_len, t_len):
raise NotImplementedError
@abc.abstractmethod
def _get_accuracy(self, y, t, y_len):
raise NotImplementedError
@abc.abstractmethod
def _create_single_model(self, inputs, input_shape, num_classes, is_training):
raise NotImplementedError
def create_training_model(self):
self._model_output = self._create_single_model(self._input_ph, self._input_shape, self._num_classes, True)
self._loss = self._get_loss(self._model_output, self._output_ph, self._model_output_len_ph, self._output_len_ph)
self._accuracy = self._get_accuracy(self._model_output, self._output_ph, self._model_output_len_ph)
def create_evaluation_model(self):
self._model_output = self._create_single_model(self._input_ph, self._input_shape, self._num_classes, False)
self._loss = self._get_loss(self._model_output, self._output_ph, self._model_output_len_ph, self._output_len_ph)
self._accuracy = self._get_accuracy(self._model_output, self._output_ph, self._model_output_len_ph)
def create_inference_model(self):
self._model_output = self._create_single_model(self._input_ph, self._input_shape, self._num_classes, False)
self._loss = None
self._accuracy = None
| gpl-3.0 | -8,637,940,679,759,408,000 | 37.473171 | 153 | 0.684417 | false |
acsone/hr | hr_worked_days_hourly_rate/__openerp__.py | 25 | 1743 | # -*- coding:utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Savoir-faire Linux. All Rights Reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Worked Days Hourly Rates',
'version': '1.0',
'license': 'AGPL-3',
'category': 'Generic Modules/Human Resources',
'description': """
Worked Days Hourly Rate
=======================
* Adds hourly rate field on worked days
* Adds date_from and date_to fields
* Adds a rate (%) by which to multiplicate the hourly rate
for overtime or other purposes.
Contributors
------------
* David Dufresne <[email protected]>
* Pierre Lamarche <[email protected]>
""",
'author': "Savoir-faire Linux,Odoo Community Association (OCA)",
'website': 'https://www.savoirfairelinux.com/',
'depends': [
'hr_payroll',
],
'data': [
'hr_payslip_view.xml',
],
'test': [],
'demo': [],
'installable': False,
}
| agpl-3.0 | -2,899,683,305,329,651,000 | 33.86 | 78 | 0.601262 | false |
daniel-e/papershelf | dialogs/correct.py | 1 | 1145 | import pygtk
pygtk.require('2.0')
import gtk
class DialogCorrect(gtk.Dialog):
def __init__(self, title, parent, flag, values):
gtk.Dialog.__init__(self, title, parent, flag)
t = gtk.Table(rows = 3, columns = 2)
t.set_col_spacings(10)
t.set_row_spacings(10)
l = gtk.Label("Filename:")
l.set_alignment(xalign = 1.0, yalign = 0.5)
t.attach(l, 0, 1, 0, 1)
l.show()
l = gtk.Label(values["filename"])
l.set_alignment(xalign = 0.0, yalign = 0.5)
t.attach(l, 1, 2, 0, 1)
l.show()
l = gtk.Label("Location:")
l.set_alignment(xalign = 1.0, yalign = 0.5)
t.attach(l, 0, 1, 1, 2)
l.show()
l = gtk.Label(values["path"])
l.set_alignment(xalign = 0.0, yalign = 0.5)
t.attach(l, 1, 2, 1, 2)
l.show()
l = gtk.Label("Index entry:")
l.set_alignment(xalign = 1.0, yalign = 0.5)
t.attach(l, 0, 1, 2, 3)
l.show()
l = gtk.Entry()
l.set_width_chars(len(values["idxentry"]) + 10)
l.set_text(values["idxentry"])
t.attach(l, 1, 2, 2, 3)
l.show()
self.vbox.pack_start(t)
t.show()
self.add_button("Ok", 1)
self.add_button("Cancel", 2)
| gpl-2.0 | -3,169,762,596,780,770,300 | 25.022727 | 51 | 0.564192 | false |
kohnle-lernmodule/KITexe201based | twisted/test/test_tpfile.py | 17 | 1590 |
# Copyright (c) 2001-2004 Twisted Matrix Laboratories.
# See LICENSE for details.
from twisted.trial import unittest
from twisted.protocols import loopback
from twisted.protocols import basic
from twisted.internet import protocol, abstract
import StringIO
class BufferingServer(protocol.Protocol):
buffer = ''
def dataReceived(self, data):
self.buffer += data
class FileSendingClient(protocol.Protocol):
def __init__(self, f):
self.f = f
def connectionMade(self):
s = basic.FileSender()
d = s.beginFileTransfer(self.f, self.transport, lambda x: x)
d.addCallback(lambda r: self.transport.loseConnection())
class FileSenderTestCase(unittest.TestCase):
def testSendingFile(self):
testStr = 'xyz' * 100 + 'abc' * 100 + '123' * 100
s = BufferingServer()
c = FileSendingClient(StringIO.StringIO(testStr))
loopback.loopbackTCP(s, c)
self.assertEquals(s.buffer, testStr)
def testSendingEmptyFile(self):
fileSender = basic.FileSender()
consumer = abstract.FileDescriptor()
consumer.connected = 1
emptyFile = StringIO.StringIO('')
d = fileSender.beginFileTransfer(emptyFile, consumer, lambda x: x)
# The producer will be immediately exhausted, and so immediately
# unregistered
self.assertEqual(consumer.producer, None)
# Which means the Deferred from FileSender should have been called
self.failUnless(d.called,
'producer unregistered with deferred being called')
| gpl-2.0 | 4,186,322,746,001,290,000 | 30.176471 | 75 | 0.671069 | false |
luzheqi1987/nova-annotation | nova/objects/instance_numa_topology.py | 1 | 4085 | # Copyright 2014 Red Hat Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from nova import db
from nova import exception
from nova.objects import base
from nova.objects import fields
from nova.virt import hardware
class InstanceNUMACell(base.NovaObject):
# Version 1.0: Initial version
# Version 1.1: Add pagesize field
VERSION = '1.1'
fields = {
'id': fields.IntegerField(read_only=True),
'cpuset': fields.SetOfIntegersField(),
'memory': fields.IntegerField(),
'pagesize': fields.IntegerField(nullable=True),
}
class InstanceNUMATopology(base.NovaObject):
# Version 1.0: Initial version
# Version 1.1: Takes into account pagesize
VERSION = '1.1'
fields = {
# NOTE(danms): The 'id' field is no longer used and should be
# removed in the future when convenient
'id': fields.IntegerField(),
'instance_uuid': fields.UUIDField(),
'cells': fields.ListOfObjectsField('InstanceNUMACell'),
}
@classmethod
def obj_from_topology(cls, topology):
if not isinstance(topology, hardware.VirtNUMAInstanceTopology):
raise exception.ObjectActionError(action='obj_from_topology',
reason='invalid topology class')
if topology:
cells = []
for topocell in topology.cells:
pagesize = (topocell.pagesize
and topocell.pagesize.size_kb or None)
cell = InstanceNUMACell(id=topocell.id, cpuset=topocell.cpuset,
memory=topocell.memory,
pagesize=pagesize)
cells.append(cell)
return cls(cells=cells)
def topology_from_obj(self):
cells = []
for objcell in self.cells:
pagesize = (
objcell.pagesize and
hardware.VirtPageSize(objcell.pagesize) or None)
cell = hardware.VirtNUMATopologyCellInstance(objcell.id,
objcell.cpuset,
objcell.memory,
pagesize=pagesize)
cells.append(cell)
return hardware.VirtNUMAInstanceTopology(cells=cells)
@base.remotable
def create(self, context):
topology = self.topology_from_obj()
if not topology:
return
values = {'numa_topology': topology.to_json()}
db.instance_extra_update_by_uuid(context, self.instance_uuid,
values)
self.obj_reset_changes()
@base.remotable_classmethod
def get_by_instance_uuid(cls, context, instance_uuid):
db_topology = db.instance_extra_get_by_instance_uuid(
context, instance_uuid, columns=['numa_topology'])
if not db_topology:
raise exception.NumaTopologyNotFound(instance_uuid=instance_uuid)
if db_topology['numa_topology'] is None:
return None
topo = hardware.VirtNUMAInstanceTopology.from_json(
db_topology['numa_topology'])
obj_topology = cls.obj_from_topology(topo)
obj_topology.id = db_topology['id']
obj_topology.instance_uuid = db_topology['instance_uuid']
# NOTE (ndipanov) not really needed as we never save, but left for
# consistency
obj_topology.obj_reset_changes()
return obj_topology
| apache-2.0 | 1,546,031,224,310,066,000 | 37.904762 | 79 | 0.595594 | false |
evernym/plenum | plenum/common/timer.py | 2 | 2776 | from abc import ABC, abstractmethod
from functools import wraps
from logging import getLogger
from typing import Callable, NamedTuple
import time
from sortedcontainers import SortedListWithKey
logger = getLogger()
class TimerService(ABC):
@abstractmethod
def get_current_time(self) -> float:
pass
@abstractmethod
def schedule(self, delay: float, callback: Callable):
pass
@abstractmethod
def cancel(self, callback: Callable):
pass
class QueueTimer(TimerService):
TimerEvent = NamedTuple('TimerEvent', [('timestamp', float), ('callback', Callable)])
def __init__(self, get_current_time=time.perf_counter):
self._get_current_time = get_current_time
self._events = SortedListWithKey(key=lambda v: v.timestamp)
def queue_size(self):
return len(self._events)
def service(self):
while len(self._events) and self._next_timestamp() <= self._get_current_time():
self._pop_event().callback()
def get_current_time(self) -> float:
return self._get_current_time()
def schedule(self, delay: float, callback: Callable):
timestamp = self._get_current_time() + delay
self._events.add(self.TimerEvent(timestamp=timestamp, callback=callback))
def cancel(self, callback: Callable):
indexes = [i for i, ev in enumerate(self._events) if ev.callback == callback]
for i in reversed(indexes):
del self._events[i]
def _next_timestamp(self):
return self._events[0].timestamp
def _pop_event(self) -> TimerEvent:
return self._events.pop(0)
class RepeatingTimer:
def __init__(self, timer: TimerService, interval: float, callback: Callable, active: bool = True):
@wraps(callback)
def wrapped_callback():
if not self._active:
return
callback()
self._timer.schedule(self._interval, self._callback)
self._timer = timer
self._interval = None
self.update_interval(interval)
self._callback = wrapped_callback
self._active = False
# TODO: Make timer always inactive and require calling start to activate
if active:
self.start()
def start(self):
if self._active or not self._interval:
return
self._active = True
self._timer.schedule(self._interval, self._callback)
def stop(self):
if not self._active:
return
self._active = False
self._timer.cancel(self._callback)
def update_interval(self, interval: float):
if interval <= 0:
logger.debug("RepeatingTimer - incorrect interval {}".format(interval))
return
self._interval = interval
| apache-2.0 | -6,871,454,628,632,271,000 | 28.531915 | 102 | 0.628602 | false |
adlius/osf.io | tests/test_campaigns.py | 6 | 10773 | from datetime import timedelta
from rest_framework import status as http_status
from django.utils import timezone
from nose.tools import * # noqa (PEP8 asserts)
from framework.auth import campaigns, views as auth_views, cas
from website.util import web_url_for
from website.util.metrics import provider_source_tag
from osf_tests import factories
from tests.base import OsfTestCase
from tests.utils import mock_auth
def set_preprint_providers():
"""Populate `PreprintProvider` to test database for testing."""
providers = {
'osf': 'Open Science Framework',
'socarxiv': 'SocArXiv',
'engrxiv': 'EngrXiv',
'psyarxiv': 'PsyArXiv',
}
for key, value in providers.items():
provider = factories.PreprintProviderFactory()
provider._id = key
provider.name = value
provider.save()
# tests for campaign initialization and update
class TestCampaignInitialization(OsfTestCase):
def setUp(self):
super(TestCampaignInitialization, self).setUp()
set_preprint_providers()
self.campaign_lists = [
'prereg',
'erpc',
'institution',
'osf-preprints',
'socarxiv-preprints',
'engrxiv-preprints',
'psyarxiv-preprints',
'osf-registries',
'osf-registered-reports',
]
self.refresh = timezone.now()
campaigns.CAMPAIGNS = None # force campaign refresh now that preprint providers are populated
campaigns.CAMPAIGNS_LAST_REFRESHED = self.refresh
def test_get_campaigns_init(self):
campaign_dict = campaigns.get_campaigns()
assert_equal(len(campaign_dict), len(self.campaign_lists))
for campaign in campaign_dict:
assert_in(campaign, self.campaign_lists)
assert_not_equal(self.refresh, campaigns.CAMPAIGNS_LAST_REFRESHED)
def test_get_campaigns_update_not_expired(self):
campaigns.get_campaigns()
self.refresh = campaigns.CAMPAIGNS_LAST_REFRESHED
campaigns.get_campaigns()
assert_equal(self.refresh, campaigns.CAMPAIGNS_LAST_REFRESHED)
def test_get_campaigns_update_expired(self):
campaigns.get_campaigns()
self.refresh = timezone.now() - timedelta(minutes=5)
campaigns.CAMPAIGNS_LAST_REFRESHED = self.refresh
campaigns.get_campaigns()
assert_not_equal(self.refresh, campaigns.CAMPAIGNS_LAST_REFRESHED)
# tests for campaign helper methods
class TestCampaignMethods(OsfTestCase):
def setUp(self):
super(TestCampaignMethods, self).setUp()
set_preprint_providers()
self.campaign_lists = [
'prereg',
'erpc',
'institution',
'osf-preprints',
'socarxiv-preprints',
'engrxiv-preprints',
'psyarxiv-preprints',
]
self.invalid_campaign = 'invalid_campaign'
campaigns.CAMPAIGNS = None # force campaign refresh now that preprint providers are populated
def test_is_institution_login(self):
for campaign in self.campaign_lists:
institution = campaigns.is_institution_login(campaign)
if campaign == 'institution':
assert_true(institution)
else:
assert_false(institution)
institution = campaigns.is_institution_login(self.invalid_campaign)
assert_true(institution is None)
def test_is_native_login(self):
for campaign in self.campaign_lists:
native = campaigns.is_native_login(campaign)
if campaign == 'prereg' or campaign == 'erpc':
assert_true(native)
else:
assert_false(native)
native = campaigns.is_proxy_login(self.invalid_campaign)
assert_true(native is None)
def test_is_proxy_login(self):
for campaign in self.campaign_lists:
proxy = campaigns.is_proxy_login(campaign)
if campaign.endswith('-preprints'):
assert_true(proxy)
else:
assert_false(proxy)
proxy = campaigns.is_proxy_login(self.invalid_campaign)
assert_true(proxy is None)
def test_system_tag_for_campaign(self):
for campaign in self.campaign_lists:
tag = campaigns.system_tag_for_campaign(campaign)
assert_true(tag is not None)
tag = campaigns.system_tag_for_campaign(self.invalid_campaign)
assert_true(tag is None)
def test_email_template_for_campaign(self):
for campaign in self.campaign_lists:
template = campaigns.email_template_for_campaign(campaign)
if campaigns.is_institution_login(campaign):
assert_true(template is None)
else:
assert_true(template is not None)
template = campaigns.email_template_for_campaign(self.invalid_campaign)
assert_true(template is None)
def test_campaign_url_for(self):
for campaign in self.campaign_lists:
url = campaigns.campaign_url_for(campaign)
assert_true(url is not None)
url = campaigns.campaign_url_for(self.invalid_campaign)
assert_true(url is None)
def test_get_service_provider(self):
for campaign in self.campaign_lists:
provider = campaigns.get_service_provider(campaign)
if campaigns.is_proxy_login(campaign):
assert_true(provider is not None)
else:
assert_true(provider is None)
provider = campaigns.get_service_provider(self.invalid_campaign)
assert_true(provider is None)
def test_campaign_for_user(self):
user = factories.UserFactory()
user.add_system_tag(provider_source_tag('osf', 'preprint'))
user.save()
campaign = campaigns.campaign_for_user(user)
assert_equal(campaign, 'osf-preprints')
# tests for prereg, erpc, which follow similar auth login/register logic
class TestCampaignsAuthViews(OsfTestCase):
def setUp(self):
super(TestCampaignsAuthViews, self).setUp()
self.campaigns = {
'prereg': {
'title_register': 'OSF Preregistration',
'title_landing': 'Welcome to the OSF Preregistration!'
},
'erpc': {
'title_register': 'Election Research Preacceptance Competition',
'title_landing': 'The Election Research Preacceptance Competition is Now Closed'
},
}
for key, value in self.campaigns.items():
value.update({'url_login': web_url_for('auth_login', campaign=key)})
value.update({'url_register': web_url_for('auth_register', campaign=key)})
value.update({'url_landing': campaigns.campaign_url_for(key)})
self.user = factories.AuthUserFactory()
def test_campaign_register_view_logged_in(self):
for key, value in self.campaigns.items():
resp = self.app.get(value['url_register'], auth=self.user.auth)
assert_equal(resp.status_code, http_status.HTTP_302_FOUND)
assert_equal(value['url_landing'], resp.headers['Location'])
def test_campaign_register_view_logged_out(self):
for key, value in self.campaigns.items():
resp = self.app.get(value['url_register'])
assert_equal(resp.status_code, http_status.HTTP_200_OK)
assert_in(value['title_register'], resp)
def test_campaign_login_logged_in(self):
for key, value in self.campaigns.items():
resp = self.app.get(value['url_login'], auth=self.user.auth)
assert_equal(resp.status_code, http_status.HTTP_302_FOUND)
assert_in(value['url_landing'], resp.headers['Location'])
def test_campaign_login_logged_out(self):
for key, value in self.campaigns.items():
resp = self.app.get(value['url_login'])
assert_equal(resp.status_code, http_status.HTTP_302_FOUND)
assert_in(value['url_register'], resp.headers['Location'])
def test_campaign_landing_logged_in(self):
for key, value in self.campaigns.items():
resp = self.app.get(value['url_landing'], auth=self.user.auth)
assert_equal(resp.status_code, http_status.HTTP_200_OK)
def test_auth_prereg_landing_page_logged_out(self):
for key, value in self.campaigns.items():
resp = self.app.get(value['url_landing'])
assert_equal(resp.status_code, http_status.HTTP_200_OK)
# tests for registration through campaigns
class TestRegistrationThroughCampaigns(OsfTestCase):
def setUp(self):
super(TestRegistrationThroughCampaigns, self).setUp()
campaigns.get_campaigns() # Set up global CAMPAIGNS
def test_confirm_email_get_with_campaign(self):
for key, value in campaigns.CAMPAIGNS.items():
user = factories.UnconfirmedUserFactory()
user.add_system_tag(value.get('system_tag'))
user.save()
token = user.get_confirmation_token(user.username)
kwargs = {
'uid': user._id,
}
with self.app.app.test_request_context(), mock_auth(user):
res = auth_views.confirm_email_get(token, **kwargs)
assert_equal(res.status_code, http_status.HTTP_302_FOUND)
assert_equal(res.location, campaigns.campaign_url_for(key))
# tests for institution
class TestCampaignsCASInstitutionLogin(OsfTestCase):
def setUp(self):
super(TestCampaignsCASInstitutionLogin, self).setUp()
self.url_login = web_url_for('auth_login', campaign='institution')
self.url_register = web_url_for('auth_register', campaign='institution')
self.service_url = web_url_for('dashboard', _absolute=True)
# go to CAS institution login page if not logged in
def test_institution_not_logged_in(self):
resp = self.app.get(self.url_login)
assert_equal(resp.status_code, http_status.HTTP_302_FOUND)
assert_in(cas.get_login_url(self.service_url, campaign='institution'), resp.headers['Location'])
# register behave the same as login
resp2 = self.app.get(self.url_register)
assert_equal(resp.headers['Location'], resp2.headers['Location'])
# go to target page (service url_ if logged in
def test_institution_logged_in(self):
resp = self.app.get(self.url_login)
assert_equal(resp.status_code, http_status.HTTP_302_FOUND)
assert_in(self.service_url, resp.headers['Location'])
# register behave the same as login
resp2 = self.app.get(self.url_register)
assert_equal(resp.headers['Location'], resp2.headers['Location'])
| apache-2.0 | -2,940,473,546,609,989,000 | 39.348315 | 104 | 0.635942 | false |
arrabito/DIRAC | DataManagementSystem/scripts/dirac-admin-ban-se.py | 3 | 8530 | #!/usr/bin/env python
########################################################################
# $HeadURL$
########################################################################
""" Ban one or more Storage Elements for usage
"""
__RCSID__ = "$Id$"
import DIRAC
from DIRAC.Core.Base import Script
read = True
write = True
check = True
remove = True
site = ''
mute = False
Script.setUsageMessage( """
Ban one or more Storage Elements for usage
Usage:
%s SE1 [SE2 ...]
""" % Script.scriptName )
Script.registerSwitch( "r" , "BanRead" , " Ban only reading from the storage element" )
Script.registerSwitch( "w" , "BanWrite", " Ban writing to the storage element" )
Script.registerSwitch( "k" , "BanCheck", " Ban check access to the storage element" )
Script.registerSwitch( "v" , "BanRemove", " Ban remove access to the storage element" )
Script.registerSwitch( "m" , "Mute" , " Do not send email" )
Script.registerSwitch( "S:", "Site=" , " Ban all SEs associate to site (note that if writing is allowed, check is always allowed)" )
Script.parseCommandLine( ignoreErrors = True )
ses = Script.getPositionalArgs()
for switch in Script.getUnprocessedSwitches():
if switch[0].lower() == "r" or switch[0].lower() == "banread":
write = False
check = False
remove = False
if switch[0].lower() == "w" or switch[0].lower() == "banwrite":
read = False
check = False
remove = False
if switch[0].lower() == "k" or switch[0].lower() == "bancheck":
read = False
write = False
remove = False
if switch[0].lower() == "v" or switch[0].lower() == "banremove":
read = False
write = False
check = False
if switch[0].lower() == "m" or switch[0].lower() == "mute":
mute = True
if switch[0] == "S" or switch[0].lower() == "site":
site = switch[1]
# from DIRAC.ConfigurationSystem.Client.CSAPI import CSAPI
from DIRAC.Interfaces.API.DiracAdmin import DiracAdmin
from DIRAC import gConfig, gLogger
from DIRAC.ResourceStatusSystem.Client.ResourceStatus import ResourceStatus
from DIRAC.ConfigurationSystem.Client.Helpers.Operations import Operations
from DIRAC.Core.Security.ProxyInfo import getProxyInfo
from DIRAC.DataManagementSystem.Utilities.DMSHelpers import resolveSEGroup
ses = resolveSEGroup( ses )
diracAdmin = DiracAdmin()
exitCode = 0
errorList = []
setup = gConfig.getValue( '/DIRAC/Setup', '' )
if not setup:
print 'ERROR: Could not contact Configuration Service'
exitCode = 2
DIRAC.exit( exitCode )
res = getProxyInfo()
if not res[ 'OK' ]:
gLogger.error( 'Failed to get proxy information', res[ 'Message' ] )
DIRAC.exit( 2 )
userName = res['Value'].get( 'username' )
if not userName:
gLogger.error( 'Failed to get username for proxy' )
DIRAC.exit( 2 )
if site:
res = gConfig.getOptionsDict( '/Resources/Sites/LCG/%s' % site )
if not res[ 'OK' ]:
gLogger.error( 'The provided site (%s) is not known.' % site )
DIRAC.exit( -1 )
ses.extend( res[ 'Value' ][ 'SE' ].replace( ' ', '' ).split( ',' ) )
if not ses:
gLogger.error( 'There were no SEs provided' )
DIRAC.exit( -1 )
readBanned = []
writeBanned = []
checkBanned = []
removeBanned = []
resourceStatus = ResourceStatus()
res = resourceStatus.getElementStatus( ses, "StorageElement" )
if not res['OK']:
gLogger.error( "Storage Element %s does not exist" % ses )
DIRAC.exit( -1 )
reason = 'Forced with dirac-admin-ban-se by %s' % userName
for se, seOptions in res[ 'Value' ].items():
resW = resC = resR = { 'OK' : False }
# Eventually, we will get rid of the notion of InActive, as we always write Banned.
if read and seOptions.has_key( 'ReadAccess' ):
if not seOptions[ 'ReadAccess' ] in [ 'Active', 'Degraded', 'Probing' ]:
gLogger.notice( 'Read option for %s is %s, instead of %s' % ( se, seOptions[ 'ReadAccess' ], [ 'Active', 'Degraded', 'Probing' ] ) )
gLogger.notice( 'Try specifying the command switches' )
else:
resR = resourceStatus.setElementStatus( se, 'StorageElement', 'ReadAccess', 'Banned', reason, userName )
# res = csAPI.setOption( "%s/%s/ReadAccess" % ( storageCFGBase, se ), "InActive" )
if not resR['OK']:
gLogger.error( 'Failed to update %s read access to Banned' % se )
else:
gLogger.notice( 'Successfully updated %s read access to Banned' % se )
readBanned.append( se )
# Eventually, we will get rid of the notion of InActive, as we always write Banned.
if write and seOptions.has_key( 'WriteAccess' ):
if not seOptions[ 'WriteAccess' ] in [ 'Active', 'Degraded', 'Probing' ]:
gLogger.notice( 'Write option for %s is %s, instead of %s' % ( se, seOptions[ 'WriteAccess' ], [ 'Active', 'Degraded', 'Probing' ] ) )
gLogger.notice( 'Try specifying the command switches' )
else:
resW = resourceStatus.setElementStatus( se, 'StorageElement', 'WriteAccess', 'Banned', reason, userName )
# res = csAPI.setOption( "%s/%s/WriteAccess" % ( storageCFGBase, se ), "InActive" )
if not resW['OK']:
gLogger.error( "Failed to update %s write access to Banned" % se )
else:
gLogger.notice( "Successfully updated %s write access to Banned" % se )
writeBanned.append( se )
# Eventually, we will get rid of the notion of InActive, as we always write Banned.
if check and seOptions.has_key( 'CheckAccess' ):
if not seOptions[ 'CheckAccess' ] in [ 'Active', 'Degraded', 'Probing' ]:
gLogger.notice( 'Check option for %s is %s, instead of %s' % ( se, seOptions[ 'CheckAccess' ], [ 'Active', 'Degraded', 'Probing' ] ) )
gLogger.notice( 'Try specifying the command switches' )
else:
resC = resourceStatus.setElementStatus( se, 'StorageElement', 'CheckAccess', 'Banned', reason, userName )
# res = csAPI.setOption( "%s/%s/CheckAccess" % ( storageCFGBase, se ), "InActive" )
if not resC['OK']:
gLogger.error( "Failed to update %s check access to Banned" % se )
else:
gLogger.notice( "Successfully updated %s check access to Banned" % se )
checkBanned.append( se )
# Eventually, we will get rid of the notion of InActive, as we always write Banned.
if remove and seOptions.has_key( 'RemoveAccess' ):
if not seOptions[ 'RemoveAccess' ] in [ 'Active', 'Degraded', 'Probing' ]:
gLogger.notice( 'Remove option for %s is %s, instead of %s' % ( se, seOptions[ 'RemoveAccess' ], [ 'Active', 'Degraded', 'Probing' ] ) )
gLogger.notice( 'Try specifying the command switches' )
else:
resC = resourceStatus.setElementStatus( se, 'StorageElement', 'RemoveAccess', 'Banned', reason, userName )
# res = csAPI.setOption( "%s/%s/CheckAccess" % ( storageCFGBase, se ), "InActive" )
if not resC['OK']:
gLogger.error( "Failed to update %s remove access to Banned" % se )
else:
gLogger.notice( "Successfully updated %s remove access to Banned" % se )
removeBanned.append( se )
if not( resR['OK'] or resW['OK'] or resC['OK'] ):
DIRAC.exit( -1 )
if not ( writeBanned or readBanned or checkBanned or removeBanned ):
gLogger.notice( "No storage elements were banned" )
DIRAC.exit( -1 )
if mute:
gLogger.notice( 'Email is muted by script switch' )
DIRAC.exit( 0 )
subject = '%s storage elements banned for use' % len( writeBanned + readBanned + checkBanned + removeBanned )
addressPath = 'EMail/Production'
address = Operations().getValue( addressPath, '' )
body = ''
if read:
body = "%s\n\nThe following storage elements were banned for reading:" % body
for se in readBanned:
body = "%s\n%s" % ( body, se )
if write:
body = "%s\n\nThe following storage elements were banned for writing:" % body
for se in writeBanned:
body = "%s\n%s" % ( body, se )
if check:
body = "%s\n\nThe following storage elements were banned for check access:" % body
for se in checkBanned:
body = "%s\n%s" % ( body, se )
if remove:
body = "%s\n\nThe following storage elements were banned for remove access:" % body
for se in removeBanned:
body = "%s\n%s" % ( body, se )
if not address:
gLogger.notice( "'%s' not defined in Operations, can not send Mail\n" % addressPath, body )
DIRAC.exit( 0 )
res = diracAdmin.sendMail( address, subject, body )
gLogger.notice( 'Notifying %s' % address )
if res[ 'OK' ]:
gLogger.notice( res[ 'Value' ] )
else:
gLogger.notice( res[ 'Message' ] )
DIRAC.exit( 0 )
| gpl-3.0 | -5,042,081,368,126,277,000 | 37.772727 | 142 | 0.635873 | false |
rickerc/cinder_audit | cinder/brick/iscsi/iscsi.py | 2 | 20195 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Helper code for the iSCSI volume driver.
"""
import contextlib
import os
import re
import stat
import time
from cinder.brick import exception
from cinder.brick import executor
from cinder.openstack.common import fileutils
from cinder.openstack.common.gettextutils import _
from cinder.openstack.common import log as logging
from cinder.openstack.common import processutils as putils
LOG = logging.getLogger(__name__)
class TargetAdmin(executor.Executor):
"""iSCSI target administration.
Base class for iSCSI target admin helpers.
"""
def __init__(self, cmd, root_helper, execute):
super(TargetAdmin, self).__init__(root_helper, execute=execute)
self._cmd = cmd
def _run(self, *args, **kwargs):
self._execute(self._cmd, *args, run_as_root=True, **kwargs)
def create_iscsi_target(self, name, tid, lun, path,
chap_auth=None, **kwargs):
"""Create a iSCSI target and logical unit."""
raise NotImplementedError()
def remove_iscsi_target(self, tid, lun, vol_id, vol_name, **kwargs):
"""Remove a iSCSI target and logical unit."""
raise NotImplementedError()
def _new_target(self, name, tid, **kwargs):
"""Create a new iSCSI target."""
raise NotImplementedError()
def _delete_target(self, tid, **kwargs):
"""Delete a target."""
raise NotImplementedError()
def show_target(self, tid, iqn=None, **kwargs):
"""Query the given target ID."""
raise NotImplementedError()
def _new_logicalunit(self, tid, lun, path, **kwargs):
"""Create a new LUN on a target using the supplied path."""
raise NotImplementedError()
def _delete_logicalunit(self, tid, lun, **kwargs):
"""Delete a logical unit from a target."""
raise NotImplementedError()
class TgtAdm(TargetAdmin):
"""iSCSI target administration using tgtadm."""
def __init__(self, root_helper, volumes_dir,
target_prefix='iqn.2010-10.org.openstack:',
execute=putils.execute):
super(TgtAdm, self).__init__('tgtadm', root_helper, execute)
self.iscsi_target_prefix = target_prefix
self.volumes_dir = volumes_dir
def _get_target(self, iqn):
(out, err) = self._execute('tgt-admin', '--show', run_as_root=True)
lines = out.split('\n')
for line in lines:
if iqn in line:
parsed = line.split()
tid = parsed[1]
return tid[:-1]
return None
def _verify_backing_lun(self, iqn, tid):
backing_lun = True
capture = False
target_info = []
(out, err) = self._execute('tgt-admin', '--show', run_as_root=True)
lines = out.split('\n')
for line in lines:
if iqn in line and "Target %s" % tid in line:
capture = True
if capture:
target_info.append(line)
if iqn not in line and 'Target ' in line:
capture = False
if ' LUN: 1' not in target_info:
backing_lun = False
return backing_lun
def _recreate_backing_lun(self, iqn, tid, name, path):
LOG.warning(_('Attempting recreate of backing lun...'))
# Since we think the most common case of this is a dev busy
# (create vol from snapshot) we're going to add a sleep here
# this will hopefully give things enough time to stabilize
# how long should we wait?? I have no idea, let's go big
# and error on the side of caution
time.sleep(10)
try:
(out, err) = self._execute('tgtadm', '--lld', 'iscsi',
'--op', 'new', '--mode',
'logicalunit', '--tid',
tid, '--lun', '1', '-b',
path, run_as_root=True)
LOG.debug('StdOut from recreate backing lun: %s' % out)
LOG.debug('StdErr from recreate backing lun: %s' % err)
except putils.ProcessExecutionError as e:
LOG.error(_("Failed to recover attempt to create "
"iscsi backing lun for volume "
"id:%(vol_id)s: %(e)s")
% {'vol_id': name, 'e': str(e)})
def create_iscsi_target(self, name, tid, lun, path,
chap_auth=None, **kwargs):
# Note(jdg) tid and lun aren't used by TgtAdm but remain for
# compatibility
fileutils.ensure_tree(self.volumes_dir)
vol_id = name.split(':')[1]
if chap_auth is None:
volume_conf = """
<target %s>
backing-store %s
</target>
""" % (name, path)
else:
volume_conf = """
<target %s>
backing-store %s
%s
</target>
""" % (name, path, chap_auth)
LOG.info(_('Creating iscsi_target for: %s') % vol_id)
volumes_dir = self.volumes_dir
volume_path = os.path.join(volumes_dir, vol_id)
f = open(volume_path, 'w+')
f.write(volume_conf)
f.close()
old_persist_file = None
old_name = kwargs.get('old_name', None)
if old_name is not None:
old_persist_file = os.path.join(volumes_dir, old_name)
try:
(out, err) = self._execute('tgt-admin',
'--update',
name,
run_as_root=True)
LOG.debug("StdOut from tgt-admin --update: %s" % out)
LOG.debug("StdErr from tgt-admin --update: %s" % err)
# Grab targets list for debug
# Consider adding a check for lun 0 and 1 for tgtadm
# before considering this as valid
(out, err) = self._execute('tgtadm',
'--lld',
'iscsi',
'--op',
'show',
'--mode',
'target',
run_as_root=True)
LOG.debug("Targets after update: %s" % out)
except putils.ProcessExecutionError as e:
LOG.error(_("Failed to create iscsi target for volume "
"id:%(vol_id)s: %(e)s")
% {'vol_id': vol_id, 'e': str(e)})
#Don't forget to remove the persistent file we created
os.unlink(volume_path)
raise exception.ISCSITargetCreateFailed(volume_id=vol_id)
iqn = '%s%s' % (self.iscsi_target_prefix, vol_id)
tid = self._get_target(iqn)
if tid is None:
LOG.error(_("Failed to create iscsi target for volume "
"id:%(vol_id)s. Please ensure your tgtd config file "
"contains 'include %(volumes_dir)s/*'") % {
'vol_id': vol_id,
'volumes_dir': volumes_dir,
})
raise exception.NotFound()
# NOTE(jdg): Sometimes we have some issues with the backing lun
# not being created, believe this is due to a device busy
# or something related, so we're going to add some code
# here that verifies the backing lun (lun 1) was created
# and we'll try and recreate it if it's not there
if not self._verify_backing_lun(iqn, tid):
try:
self._recreate_backing_lun(iqn, tid, name, path)
except putils.ProcessExecutionError:
os.unlink(volume_path)
raise exception.ISCSITargetCreateFailed(volume_id=vol_id)
# Finally check once more and if no go, fail and punt
if not self._verify_backing_lun(iqn, tid):
os.unlink(volume_path)
raise exception.ISCSITargetCreateFailed(volume_id=vol_id)
if old_persist_file is not None and os.path.exists(old_persist_file):
os.unlink(old_persist_file)
return tid
def remove_iscsi_target(self, tid, lun, vol_id, vol_name, **kwargs):
LOG.info(_('Removing iscsi_target for: %s') % vol_id)
vol_uuid_file = vol_name
volume_path = os.path.join(self.volumes_dir, vol_uuid_file)
if os.path.isfile(volume_path):
iqn = '%s%s' % (self.iscsi_target_prefix,
vol_uuid_file)
else:
raise exception.ISCSITargetRemoveFailed(volume_id=vol_id)
try:
# NOTE(vish): --force is a workaround for bug:
# https://bugs.launchpad.net/cinder/+bug/1159948
self._execute('tgt-admin',
'--force',
'--delete',
iqn,
run_as_root=True)
except putils.ProcessExecutionError as e:
LOG.error(_("Failed to remove iscsi target for volume "
"id:%(vol_id)s: %(e)s")
% {'vol_id': vol_id, 'e': str(e)})
raise exception.ISCSITargetRemoveFailed(volume_id=vol_id)
os.unlink(volume_path)
def show_target(self, tid, iqn=None, **kwargs):
if iqn is None:
raise exception.InvalidParameterValue(
err=_('valid iqn needed for show_target'))
tid = self._get_target(iqn)
if tid is None:
raise exception.NotFound()
class IetAdm(TargetAdmin):
"""iSCSI target administration using ietadm."""
def __init__(self, root_helper, iet_conf='/etc/iet/ietd.conf',
iscsi_iotype='fileio', execute=putils.execute):
super(IetAdm, self).__init__('ietadm', root_helper, execute)
self.iet_conf = iet_conf
self.iscsi_iotype = iscsi_iotype
def _is_block(self, path):
mode = os.stat(path).st_mode
return stat.S_ISBLK(mode)
def _iotype(self, path):
if self.iscsi_iotype == 'auto':
return 'blockio' if self._is_block(path) else 'fileio'
else:
return self.iscsi_iotype
@contextlib.contextmanager
def temporary_chown(self, path, owner_uid=None):
"""Temporarily chown a path.
:params path: The path to chown
:params owner_uid: UID of temporary owner (defaults to current user)
"""
if owner_uid is None:
owner_uid = os.getuid()
orig_uid = os.stat(path).st_uid
if orig_uid != owner_uid:
putils.execute('chown', owner_uid, path,
root_helper=self._root_helper, run_as_root=True)
try:
yield
finally:
if orig_uid != owner_uid:
putils.execute('chown', orig_uid, path,
root_helper=self._root_helper, run_as_root=True)
def create_iscsi_target(self, name, tid, lun, path,
chap_auth=None, **kwargs):
# NOTE (jdg): Address bug: 1175207
kwargs.pop('old_name', None)
self._new_target(name, tid, **kwargs)
self._new_logicalunit(tid, lun, path, **kwargs)
if chap_auth is not None:
(type, username, password) = chap_auth.split()
self._new_auth(tid, type, username, password, **kwargs)
conf_file = self.iet_conf
if os.path.exists(conf_file):
try:
volume_conf = """
Target %s
%s
Lun 0 Path=%s,Type=%s
""" % (name, chap_auth, path, self._iotype(path))
with self.temporary_chown(conf_file):
f = open(conf_file, 'a+')
f.write(volume_conf)
f.close()
except putils.ProcessExecutionError as e:
vol_id = name.split(':')[1]
LOG.error(_("Failed to create iscsi target for volume "
"id:%(vol_id)s: %(e)s")
% {'vol_id': vol_id, 'e': str(e)})
raise exception.ISCSITargetCreateFailed(volume_id=vol_id)
return tid
def remove_iscsi_target(self, tid, lun, vol_id, vol_name, **kwargs):
LOG.info(_('Removing iscsi_target for volume: %s') % vol_id)
self._delete_logicalunit(tid, lun, **kwargs)
self._delete_target(tid, **kwargs)
vol_uuid_file = vol_name
conf_file = self.iet_conf
if os.path.exists(conf_file):
with self.temporary_chown(conf_file):
try:
iet_conf_text = open(conf_file, 'r+')
full_txt = iet_conf_text.readlines()
new_iet_conf_txt = []
count = 0
for line in full_txt:
if count > 0:
count -= 1
continue
elif re.search(vol_uuid_file, line):
count = 2
continue
else:
new_iet_conf_txt.append(line)
iet_conf_text.seek(0)
iet_conf_text.truncate(0)
iet_conf_text.writelines(new_iet_conf_txt)
finally:
iet_conf_text.close()
def _new_target(self, name, tid, **kwargs):
self._run('--op', 'new',
'--tid=%s' % tid,
'--params', 'Name=%s' % name,
**kwargs)
def _delete_target(self, tid, **kwargs):
self._run('--op', 'delete',
'--tid=%s' % tid,
**kwargs)
def show_target(self, tid, iqn=None, **kwargs):
self._run('--op', 'show',
'--tid=%s' % tid,
**kwargs)
def _new_logicalunit(self, tid, lun, path, **kwargs):
self._run('--op', 'new',
'--tid=%s' % tid,
'--lun=%d' % lun,
'--params', 'Path=%s,Type=%s' % (path, self._iotype(path)),
**kwargs)
def _delete_logicalunit(self, tid, lun, **kwargs):
self._run('--op', 'delete',
'--tid=%s' % tid,
'--lun=%d' % lun,
**kwargs)
def _new_auth(self, tid, type, username, password, **kwargs):
self._run('--op', 'new',
'--tid=%s' % tid,
'--user',
'--params=%s=%s,Password=%s' % (type, username, password),
**kwargs)
class FakeIscsiHelper(object):
def __init__(self):
self.tid = 1
def set_execute(self, execute):
self._execute = execute
def create_iscsi_target(self, *args, **kwargs):
self.tid += 1
return self.tid
class LioAdm(TargetAdmin):
"""iSCSI target administration for LIO using python-rtslib."""
def __init__(self, root_helper, lio_initiator_iqns='',
iscsi_target_prefix='iqn.2010-10.org.openstack:',
execute=putils.execute):
super(LioAdm, self).__init__('rtstool', root_helper, execute)
self.iscsi_target_prefix = iscsi_target_prefix
self.lio_initiator_iqns = lio_initiator_iqns
self._verify_rtstool()
def _verify_rtstool(self):
try:
self._execute('rtstool', 'verify')
except (OSError, putils.ProcessExecutionError):
LOG.error(_('rtstool is not installed correctly'))
raise
def _get_target(self, iqn):
(out, err) = self._execute('rtstool',
'get-targets',
run_as_root=True)
lines = out.split('\n')
for line in lines:
if iqn in line:
return line
return None
def create_iscsi_target(self, name, tid, lun, path,
chap_auth=None, **kwargs):
# tid and lun are not used
vol_id = name.split(':')[1]
LOG.info(_('Creating iscsi_target for volume: %s') % vol_id)
# rtstool requires chap_auth, but unit tests don't provide it
chap_auth_userid = 'test_id'
chap_auth_password = 'test_pass'
if chap_auth is not None:
(chap_auth_userid, chap_auth_password) = chap_auth.split(' ')[1:]
extra_args = []
if self.lio_initiator_iqns:
extra_args.append(self.lio_initiator_iqns)
try:
command_args = ['rtstool',
'create',
path,
name,
chap_auth_userid,
chap_auth_password]
if extra_args:
command_args.extend(extra_args)
self._execute(*command_args, run_as_root=True)
except putils.ProcessExecutionError as e:
LOG.error(_("Failed to create iscsi target for volume "
"id:%s.") % vol_id)
LOG.error("%s" % str(e))
raise exception.ISCSITargetCreateFailed(volume_id=vol_id)
iqn = '%s%s' % (self.iscsi_target_prefix, vol_id)
tid = self._get_target(iqn)
if tid is None:
LOG.error(_("Failed to create iscsi target for volume "
"id:%s.") % vol_id)
raise exception.NotFound()
return tid
def remove_iscsi_target(self, tid, lun, vol_id, vol_name, **kwargs):
LOG.info(_('Removing iscsi_target: %s') % vol_id)
vol_uuid_name = vol_name
iqn = '%s%s' % (self.iscsi_target_prefix, vol_uuid_name)
try:
self._execute('rtstool',
'delete',
iqn,
run_as_root=True)
except putils.ProcessExecutionError as e:
LOG.error(_("Failed to remove iscsi target for volume "
"id:%s.") % vol_id)
LOG.error("%s" % str(e))
raise exception.ISCSITargetRemoveFailed(volume_id=vol_id)
def show_target(self, tid, iqn=None, **kwargs):
if iqn is None:
raise exception.InvalidParameterValue(
err=_('valid iqn needed for show_target'))
tid = self._get_target(iqn)
if tid is None:
raise exception.NotFound()
def initialize_connection(self, volume, connector):
volume_iqn = volume['provider_location'].split(' ')[1]
(auth_method, auth_user, auth_pass) = \
volume['provider_auth'].split(' ', 3)
# Add initiator iqns to target ACL
try:
self._execute('rtstool', 'add-initiator',
volume_iqn,
auth_user,
auth_pass,
connector['initiator'],
run_as_root=True)
except putils.ProcessExecutionError:
LOG.error(_("Failed to add initiator iqn %s to target") %
connector['initiator'])
raise exception.ISCSITargetAttachFailed(volume_id=volume['id'])
| apache-2.0 | -1,404,564,132,860,269,000 | 35.585145 | 79 | 0.511661 | false |
alivecor/tensorflow | tensorflow/python/eager/tape.py | 1 | 6912 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Gradient tape utilites."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import threading
from autograd import container_types
from autograd import core as ag_core
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.util import nest
from tensorflow.python.util import tf_contextlib
class ImplicitTape(object):
"""Global object which can watch tensors and wrap them with autograd."""
def __init__(self):
self.tensors = {}
self.gradients = []
def __eq__(self, other):
return self is other
def __hash__(self):
return id(self)
@ag_core.primitive
def _watch_with_tape_internal(_, tensor):
"""Primitive to wrap a tensor around an ImplicitTape progenitor."""
return tensor
def _watch_with_tape(tape, tensor):
"""Wraps a watched Tensor and keeps track of it in the implicit tape."""
w = _watch_with_tape_internal(tape, tensor)
if ag_core.isnode(tape):
tape.value.tensors[ops.tensor_id(tensor)] = w
return w
def _watch_with_tape_vjp(g, ans, vs, gvs, tape, tensor):
"""Gradient for _watch_with_tape_internal."""
del ans, gvs, tape
def mut_add(implicit_tape):
t = ag_core.getval(tensor)
implicit_tape.gradients.append((t, g))
return implicit_tape
return ag_core.SparseObject(vs, mut_add)
_watch_with_tape_internal.defvjp(_watch_with_tape_vjp, argnum=0)
_watch_with_tape_internal.defvjp(
lambda g, ans, vs, gvs, tape, tensor: g,
argnum=1)
class ImplicitTapeVSpace(ag_core.VSpace):
"""VSpace needed to have ImplicitTape be a valid progenitor."""
def zeros(self):
return ImplicitTape()
class ImplicitTapeNode(ag_core.Node):
"""Node to wrap ImplicitTape in."""
def __eq__(self, other):
return self is other
def __hash__(self):
return id(self)
ag_core.register_node(ImplicitTapeNode, ImplicitTape)
ag_core.register_vspace(ImplicitTapeVSpace, ImplicitTape)
# TODO(apassos) try to not do this.
class NoneVSpace(ag_core.VSpace):
"""VSpace for python None."""
def __init__(self, _):
self.size = 0
def zeros(self):
return 0
ag_core.register_vspace(NoneVSpace, type(None))
class _TapeStack(threading.local):
def __init__(self):
super(_TapeStack, self).__init__()
self._stack = []
@property
def stack(self):
return self._stack
@tf_contextlib.contextmanager
def replace_stack(self, new_stack):
old = self._stack
self._stack = new_stack
yield
self._stack = old
# The global tape stack.
_tape_stack = _TapeStack()
def push_new_tape():
"""Pushes a new tape onto the tape stack."""
progenitor = ag_core.new_progenitor(ImplicitTape())
_tape_stack.stack.append(progenitor)
ag_core.active_progenitors.add(progenitor)
def watch(tensor):
"""Marks this tensor to be watched by all tapes in the stack.
Args:
tensor: tensor to be watched.
Returns:
The tensor, potentially wrapped by all tapes in the stack.
"""
for t in _tape_stack.stack:
tensor = _watch_with_tape(t, tensor)
return tensor
def watch_variable(resource_variable):
"""Marks this ResourceVariable to be watched by all tapes in the stack.
Args:
resource_variable: A ResourceVariable to be watched.
"""
watch(resource_variable.handle) # py-lint: disable=protected-access
def pop_tape():
"""Pops the top tape in the stack, if any."""
if _tape_stack.stack:
return _tape_stack.stack.pop()
return None
def any_tape_has(tensor):
for t in _tape_stack.stack:
if ops.tensor_id(tensor) in t.value.tensors:
return True
return False
def should_record(tensors):
"""Returns true if any tape in the stach watches any of these tensors."""
return any(ag_core.isnode(x) for x in tensors)
class _EagerSequenceNode(container_types.SequenceNode):
"""Eager version of SequenceNode, to live in EagerSequenceVSpace."""
pass
class _EagerSequenceVSpace(container_types.SequenceVSpace):
"""Changes equality on SequenceVSpace to conform to tfe requirements."""
def __init__(self, value):
self.shape = [ag_core.vspace(x) for x in value]
self.size = sum(s.size for s in self.shape)
self.sequence_type = type(value)
def __eq__(self, other):
if type(self) != type(other): # pylint: disable=unidiomatic-typecheck
return False
if len(self.shape) != len(other.shape):
# TODO(apassos) function gradients sometimes return gradients for side
# inputs which breaks this assertion. Understand how to fix it.
return True
for ss, os in zip(self.shape, other.shape):
if ss != os:
if isinstance(ss, NoneVSpace) or isinstance(os, NoneVSpace):
continue
if ss.dtype == dtypes.resource or os.dtype == dtypes.resource:
continue
return False
return True
class EagerList(list):
"""Type used to bypass SequenceVSpace.
SequenceVSpace has a very strict equality check which does not match
tensorflow semantics.
"""
def __init__(self, value):
super(EagerList, self).__init__(value)
for v in value:
assert not ag_core.isnode(v)
ag_core.register_vspace(_EagerSequenceVSpace, EagerList)
ag_core.register_node(_EagerSequenceNode, EagerList)
@ag_core.primitive
def _record_operation(output_tensors, input_tensors, side_outputs,
backward_function):
del input_tensors, side_outputs, backward_function
return EagerList(output_tensors)
def record_operation(o, i, s, b):
"""Primitive to trigger autograd tracing on outputs from inputs."""
inputs = container_types.make_sequence(EagerList, *i)
return _record_operation(o, inputs, s, b)
def _record_operation_vjp(g, ans, vs, gvs, output_tensors, input_tensors,
side_outputs, backward_function):
"""Gradient for _record_operation."""
del vs, gvs, input_tensors, output_tensors
backward_args = tuple(g) + tuple(side_outputs)
backward_args = container_types.make_sequence(
EagerList, *(tuple(ans) + backward_args))
tensors = nest.flatten(backward_function(*backward_args))
return container_types.make_sequence(EagerList, *tensors)
_record_operation.defvjp(_record_operation_vjp, argnum=1)
| apache-2.0 | -5,765,169,987,112,648,000 | 26.537849 | 80 | 0.692274 | false |
vedujoshi/tempest | tempest/api/identity/admin/v2/test_tenant_negative.py | 4 | 6661 | # Copyright 2013 Huawei Technologies Co.,LTD.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.identity import base
from tempest.lib.common.utils import data_utils
from tempest.lib import decorators
from tempest.lib import exceptions as lib_exc
class TenantsNegativeTestJSON(base.BaseIdentityV2AdminTest):
@decorators.attr(type=['negative'])
@decorators.idempotent_id('ca9bb202-63dd-4240-8a07-8ef9c19c04bb')
def test_list_tenants_by_unauthorized_user(self):
# Non-administrator user should not be able to list tenants
self.assertRaises(lib_exc.Forbidden,
self.non_admin_tenants_client.list_tenants)
@decorators.attr(type=['negative'])
@decorators.idempotent_id('df33926c-1c96-4d8d-a762-79cc6b0c3cf4')
def test_list_tenant_request_without_token(self):
# Request to list tenants without a valid token should fail
token = self.client.auth_provider.get_token()
self.client.delete_token(token)
self.assertRaises(lib_exc.Unauthorized,
self.tenants_client.list_tenants)
self.client.auth_provider.clear_auth()
@decorators.attr(type=['negative'])
@decorators.idempotent_id('162ba316-f18b-4987-8c0c-fd9140cd63ed')
def test_tenant_delete_by_unauthorized_user(self):
# Non-administrator user should not be able to delete a tenant
tenant = self.setup_test_tenant()
self.assertRaises(lib_exc.Forbidden,
self.non_admin_tenants_client.delete_tenant,
tenant['id'])
@decorators.attr(type=['negative'])
@decorators.idempotent_id('e450db62-2e9d-418f-893a-54772d6386b1')
def test_tenant_delete_request_without_token(self):
# Request to delete a tenant without a valid token should fail
tenant = self.setup_test_tenant()
token = self.client.auth_provider.get_token()
self.client.delete_token(token)
self.assertRaises(lib_exc.Unauthorized,
self.tenants_client.delete_tenant,
tenant['id'])
self.client.auth_provider.clear_auth()
@decorators.attr(type=['negative'])
@decorators.idempotent_id('9c9a2aed-6e3c-467a-8f5c-89da9d1b516b')
def test_delete_non_existent_tenant(self):
# Attempt to delete a non existent tenant should fail
self.assertRaises(lib_exc.NotFound, self.tenants_client.delete_tenant,
data_utils.rand_uuid_hex())
@decorators.attr(type=['negative'])
@decorators.idempotent_id('af16f44b-a849-46cb-9f13-a751c388f739')
def test_tenant_create_duplicate(self):
# Tenant names should be unique
tenant_name = data_utils.rand_name(name='tenant')
self.setup_test_tenant(name=tenant_name)
self.assertRaises(lib_exc.Conflict, self.tenants_client.create_tenant,
name=tenant_name)
@decorators.attr(type=['negative'])
@decorators.idempotent_id('d26b278a-6389-4702-8d6e-5980d80137e0')
def test_create_tenant_by_unauthorized_user(self):
# Non-administrator user should not be authorized to create a tenant
tenant_name = data_utils.rand_name(name='tenant')
self.assertRaises(lib_exc.Forbidden,
self.non_admin_tenants_client.create_tenant,
name=tenant_name)
@decorators.attr(type=['negative'])
@decorators.idempotent_id('a3ee9d7e-6920-4dd5-9321-d4b2b7f0a638')
def test_create_tenant_request_without_token(self):
# Create tenant request without a token should not be authorized
tenant_name = data_utils.rand_name(name='tenant')
token = self.client.auth_provider.get_token()
self.client.delete_token(token)
self.assertRaises(lib_exc.Unauthorized,
self.tenants_client.create_tenant,
name=tenant_name)
self.client.auth_provider.clear_auth()
@decorators.attr(type=['negative'])
@decorators.idempotent_id('5a2e4ca9-b0c0-486c-9c48-64a94fba2395')
def test_create_tenant_with_empty_name(self):
# Tenant name should not be empty
self.assertRaises(lib_exc.BadRequest,
self.tenants_client.create_tenant,
name='')
@decorators.attr(type=['negative'])
@decorators.idempotent_id('2ff18d1e-dfe3-4359-9dc3-abf582c196b9')
def test_create_tenants_name_length_over_64(self):
# Tenant name length should not be greater than 64 characters
tenant_name = 'a' * 65
self.assertRaises(lib_exc.BadRequest,
self.tenants_client.create_tenant,
name=tenant_name)
@decorators.attr(type=['negative'])
@decorators.idempotent_id('bd20dc2a-9557-4db7-b755-f48d952ad706')
def test_update_non_existent_tenant(self):
# Attempt to update a non existent tenant should fail
self.assertRaises(lib_exc.NotFound, self.tenants_client.update_tenant,
data_utils.rand_uuid_hex())
@decorators.attr(type=['negative'])
@decorators.idempotent_id('41704dc5-c5f7-4f79-abfa-76e6fedc570b')
def test_tenant_update_by_unauthorized_user(self):
# Non-administrator user should not be able to update a tenant
tenant = self.setup_test_tenant()
self.assertRaises(lib_exc.Forbidden,
self.non_admin_tenants_client.update_tenant,
tenant['id'])
@decorators.attr(type=['negative'])
@decorators.idempotent_id('7a421573-72c7-4c22-a98e-ce539219c657')
def test_tenant_update_request_without_token(self):
# Request to update a tenant without a valid token should fail
tenant = self.setup_test_tenant()
token = self.client.auth_provider.get_token()
self.client.delete_token(token)
self.assertRaises(lib_exc.Unauthorized,
self.tenants_client.update_tenant,
tenant['id'])
self.client.auth_provider.clear_auth()
| apache-2.0 | 5,036,718,531,753,227,000 | 45.908451 | 78 | 0.657409 | false |
gerv/bedrock | tests/functional/firefox/desktop/test_customize.py | 8 | 1601 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import pytest
from pages.firefox.desktop.customize import CustomizePage
@pytest.mark.nondestructive
def test_customizer_click_nav_links(base_url, selenium):
page = CustomizePage(selenium, base_url).open()
links = page.customize_links
sections = page.customize_sections
assert sections[0].is_displayed
assert links[0].is_selected
for i in range(1, len(links)):
links[i].click()
assert links[i].is_selected
assert sections[i].is_displayed
@pytest.mark.nondestructive
def test_customizer_click_next(base_url, selenium):
page = CustomizePage(selenium, base_url).open()
sections = page.customize_sections
assert sections[0].is_displayed
for i in range(len(sections)):
assert sections[i].is_displayed
sections[i].click_next()
assert sections[0].is_displayed
@pytest.mark.nondestructive
def test_theme_buttons(base_url, selenium):
page = CustomizePage(selenium, base_url).open()
themes = page.themes
assert themes[-1].is_selected
assert themes[-1].is_image_displayed
for i in range(len(themes) - 2, -1, -1):
themes[i].click_button()
assert themes[i].is_selected
assert themes[i].is_image_displayed
@pytest.mark.nondestructive
def test_sync_button_displayed(base_url, selenium):
page = CustomizePage(selenium, base_url).open()
assert page.is_sync_button_displayed
| mpl-2.0 | 3,626,827,773,186,249,700 | 31.673469 | 69 | 0.703935 | false |
DoctorJellyface/powerline | powerline/lib/unicode.py | 33 | 9268 | # vim:fileencoding=utf-8:noet
from __future__ import (unicode_literals, division, absolute_import, print_function)
import sys
import codecs
from unicodedata import east_asian_width, combining
from powerline.lib.encoding import get_preferred_output_encoding
try:
from __builtin__ import unicode
except ImportError:
unicode = str
try:
from __builtin__ import unichr
except ImportError:
unichr = chr
if sys.maxunicode < 0x10FFFF:
_unichr = unichr
def unichr(ch):
if ch <= sys.maxunicode:
return _unichr(ch)
else:
ch -= 0x10000
return _unichr((ch >> 10) + 0xD800) + _unichr((ch & ((1 << 10) - 1)) + 0xDC00)
def u(s):
'''Return unicode instance assuming UTF-8 encoded string.
'''
if type(s) is unicode:
return s
else:
return unicode(s, 'utf-8')
if sys.version_info < (3,):
def tointiter(s):
'''Convert a byte string to the sequence of integers
'''
return (ord(c) for c in s)
else:
def tointiter(s):
'''Convert a byte string to the sequence of integers
'''
return iter(s)
def powerline_decode_error(e):
if not isinstance(e, UnicodeDecodeError):
raise NotImplementedError
return (''.join((
'<{0:02X}>'.format(c)
for c in tointiter(e.object[e.start:e.end])
)), e.end)
codecs.register_error('powerline_decode_error', powerline_decode_error)
last_swe_idx = 0
def register_strwidth_error(strwidth):
'''Create new encode errors handling method similar to ``replace``
Like ``replace`` this method uses question marks in place of the characters
that cannot be represented in the requested encoding. Unlike ``replace`` the
amount of question marks is identical to the amount of display cells
offending character occupies. Thus encoding ``…`` (U+2026, HORIZONTAL
ELLIPSIS) to ``latin1`` will emit one question mark, but encoding ``A``
(U+FF21, FULLWIDTH LATIN CAPITAL LETTER A) will emit two question marks.
Since width of some characters depends on the terminal settings and
powerline knows how to respect them a single error handling method cannot be
used. Instead of it the generator function is used which takes ``strwidth``
function (function that knows how to compute string width respecting all
needed settings) and emits new error handling method name.
:param function strwidth:
Function that computs string width measured in display cells the string
occupies when displayed.
:return: New error handling method name.
'''
global last_swe_idx
last_swe_idx += 1
def powerline_encode_strwidth_error(e):
if not isinstance(e, UnicodeEncodeError):
raise NotImplementedError
return ('?' * strwidth(e.object[e.start:e.end]), e.end)
ename = 'powerline_encode_strwidth_error_{0}'.format(last_swe_idx)
codecs.register_error(ename, powerline_encode_strwidth_error)
return ename
def out_u(s):
'''Return unicode string suitable for displaying
Unlike other functions assumes get_preferred_output_encoding() first. Unlike
u() does not throw exceptions for invalid unicode strings. Unlike
safe_unicode() does throw an exception if object is not a string.
'''
if isinstance(s, unicode):
return s
elif isinstance(s, bytes):
return unicode(s, get_preferred_output_encoding(), 'powerline_decode_error')
else:
raise TypeError('Expected unicode or bytes instance, got {0}'.format(repr(type(s))))
def safe_unicode(s):
'''Return unicode instance without raising an exception.
Order of assumptions:
* ASCII string or unicode object
* UTF-8 string
* Object with __str__() or __repr__() method that returns UTF-8 string or
unicode object (depending on python version)
* String in powerline.lib.encoding.get_preferred_output_encoding() encoding
* If everything failed use safe_unicode on last exception with which
everything failed
'''
try:
try:
if type(s) is bytes:
return unicode(s, 'ascii')
else:
return unicode(s)
except UnicodeDecodeError:
try:
return unicode(s, 'utf-8')
except TypeError:
return unicode(str(s), 'utf-8')
except UnicodeDecodeError:
return unicode(s, get_preferred_output_encoding())
except Exception as e:
return safe_unicode(e)
class FailedUnicode(unicode):
'''Builtin ``unicode`` subclass indicating fatal error
If your code for some reason wants to determine whether `.render()` method
failed it should check returned string for being a FailedUnicode instance.
Alternatively you could subclass Powerline and override `.render()` method
to do what you like in place of catching the exception and returning
FailedUnicode.
'''
pass
if sys.version_info < (3,):
def string(s):
if type(s) is not str:
return s.encode('utf-8')
else:
return s
else:
def string(s):
if type(s) is not str:
return s.decode('utf-8')
else:
return s
string.__doc__ = (
'''Transform ``unicode`` or ``bytes`` object into ``str`` object
On Python-2 this encodes ``unicode`` to ``bytes`` (which is ``str``) using
UTF-8 encoding; on Python-3 this decodes ``bytes`` to ``unicode`` (which is
``str``) using UTF-8 encoding.
Useful for functions that expect an ``str`` object in both unicode versions,
not caring about the semantic differences between them in Python-2 and
Python-3.
'''
)
def surrogate_pair_to_character(high, low):
'''Transform a pair of surrogate codepoints to one codepoint
'''
return 0x10000 + ((high - 0xD800) << 10) + (low - 0xDC00)
_strwidth_documentation = (
'''Compute string width in display cells
{0}
:param dict width_data:
Dictionary which maps east_asian_width property values to strings
lengths. It is expected to contain the following keys and values (from
`East Asian Width annex <http://www.unicode.org/reports/tr11/>`_):
=== ====== ===========================================================
Key Value Description
=== ====== ===========================================================
F 2 Fullwidth: all characters that are defined as Fullwidth in
the Unicode Standard [Unicode] by having a compatibility
decomposition of type <wide> to characters elsewhere in the
Unicode Standard that are implicitly narrow but unmarked.
H 1 Halfwidth: all characters that are explicitly defined as
Halfwidth in the Unicode Standard by having a compatibility
decomposition of type <narrow> to characters elsewhere in
the Unicode Standard that are implicitly wide but unmarked,
plus U+20A9 ₩ WON SIGN.
W 2 Wide: all other characters that are always wide. These
characters occur only in the context of East Asian
typography where they are wide characters (such as the
Unified Han Ideographs or Squared Katakana Symbols). This
category includes characters that have explicit halfwidth
counterparts.
Na 1 Narrow: characters that are always narrow and have explicit
fullwidth or wide counterparts. These characters are
implicitly narrow in East Asian typography and legacy
character sets because they have explicit fullwidth or wide
counterparts. All of ASCII is an example of East Asian
Narrow characters.
A 1 or 2 Ambigious: characters that may sometimes be wide and
sometimes narrow. Ambiguous characters require additional
information not contained in the character code to further
resolve their width. This information is usually defined in
terminal setting that should in turn respect glyphs widths
in used fonts. Also see :ref:`ambiwidth configuration
option <config-common-ambiwidth>`.
N 1 Neutral characters: character that does not occur in legacy
East Asian character sets.
=== ====== ===========================================================
:param unicode string:
String whose width will be calculated.
:return: unsigned integer.''')
def strwidth_ucs_4(width_data, string):
return sum(((
(
0
) if combining(symbol) else (
width_data[east_asian_width(symbol)]
)
) for symbol in string))
strwidth_ucs_4.__doc__ = _strwidth_documentation.format(
'''This version of function expects that characters above 0xFFFF are
represented using one symbol. This is only the case in UCS-4 Python builds.
.. note:
Even in UCS-4 Python builds it is possible to represent characters above
0xFFFF using surrogate pairs. Characters represented this way are not
supported.''')
def strwidth_ucs_2(width_data, string):
return sum(((
(
width_data[east_asian_width(string[i - 1] + symbol)]
) if 0xDC00 <= ord(symbol) <= 0xDFFF else (
0
) if combining(symbol) or 0xD800 <= ord(symbol) <= 0xDBFF else (
width_data[east_asian_width(symbol)]
)
) for i, symbol in enumerate(string)))
strwidth_ucs_2.__doc__ = _strwidth_documentation.format(
'''This version of function expects that characters above 0xFFFF are
represented using two symbols forming a surrogate pair, which is the only
option in UCS-2 Python builds. It still works correctly in UCS-4 Python
builds, but is slower then its UCS-4 counterpart.''')
| mit | 9,073,864,269,418,027,000 | 31.727915 | 86 | 0.683222 | false |
sdague/home-assistant | homeassistant/components/tellstick/sensor.py | 10 | 5252 | """Support for Tellstick sensors."""
from collections import namedtuple
import logging
from tellcore import telldus
import tellcore.constants as tellcore_constants
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_ID,
CONF_NAME,
CONF_PROTOCOL,
PERCENTAGE,
TEMP_CELSIUS,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
DatatypeDescription = namedtuple("DatatypeDescription", ["name", "unit"])
CONF_DATATYPE_MASK = "datatype_mask"
CONF_ONLY_NAMED = "only_named"
CONF_TEMPERATURE_SCALE = "temperature_scale"
CONF_MODEL = "model"
DEFAULT_DATATYPE_MASK = 127
DEFAULT_TEMPERATURE_SCALE = TEMP_CELSIUS
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(
CONF_TEMPERATURE_SCALE, default=DEFAULT_TEMPERATURE_SCALE
): cv.string,
vol.Optional(
CONF_DATATYPE_MASK, default=DEFAULT_DATATYPE_MASK
): cv.positive_int,
vol.Optional(CONF_ONLY_NAMED, default=[]): vol.All(
cv.ensure_list,
[
vol.Schema(
{
vol.Required(CONF_ID): cv.positive_int,
vol.Required(CONF_NAME): cv.string,
vol.Optional(CONF_PROTOCOL): cv.string,
vol.Optional(CONF_MODEL): cv.string,
}
)
],
),
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the Tellstick sensors."""
sensor_value_descriptions = {
tellcore_constants.TELLSTICK_TEMPERATURE: DatatypeDescription(
"temperature", config.get(CONF_TEMPERATURE_SCALE)
),
tellcore_constants.TELLSTICK_HUMIDITY: DatatypeDescription(
"humidity", PERCENTAGE
),
tellcore_constants.TELLSTICK_RAINRATE: DatatypeDescription("rain rate", ""),
tellcore_constants.TELLSTICK_RAINTOTAL: DatatypeDescription("rain total", ""),
tellcore_constants.TELLSTICK_WINDDIRECTION: DatatypeDescription(
"wind direction", ""
),
tellcore_constants.TELLSTICK_WINDAVERAGE: DatatypeDescription(
"wind average", ""
),
tellcore_constants.TELLSTICK_WINDGUST: DatatypeDescription("wind gust", ""),
}
try:
tellcore_lib = telldus.TelldusCore()
except OSError:
_LOGGER.exception("Could not initialize Tellstick")
return
sensors = []
datatype_mask = config.get(CONF_DATATYPE_MASK)
if config[CONF_ONLY_NAMED]:
named_sensors = {}
for named_sensor in config[CONF_ONLY_NAMED]:
name = named_sensor[CONF_NAME]
proto = named_sensor.get(CONF_PROTOCOL)
model = named_sensor.get(CONF_MODEL)
id_ = named_sensor[CONF_ID]
if proto is not None:
if model is not None:
named_sensors[f"{proto}{model}{id_}"] = name
else:
named_sensors[f"{proto}{id_}"] = name
else:
named_sensors[id_] = name
for tellcore_sensor in tellcore_lib.sensors():
if not config[CONF_ONLY_NAMED]:
sensor_name = str(tellcore_sensor.id)
else:
proto_id = f"{tellcore_sensor.protocol}{tellcore_sensor.id}"
proto_model_id = "{}{}{}".format(
tellcore_sensor.protocol, tellcore_sensor.model, tellcore_sensor.id
)
if tellcore_sensor.id in named_sensors:
sensor_name = named_sensors[tellcore_sensor.id]
elif proto_id in named_sensors:
sensor_name = named_sensors[proto_id]
elif proto_model_id in named_sensors:
sensor_name = named_sensors[proto_model_id]
else:
continue
for datatype in sensor_value_descriptions:
if datatype & datatype_mask and tellcore_sensor.has_value(datatype):
sensor_info = sensor_value_descriptions[datatype]
sensors.append(
TellstickSensor(sensor_name, tellcore_sensor, datatype, sensor_info)
)
add_entities(sensors)
class TellstickSensor(Entity):
"""Representation of a Tellstick sensor."""
def __init__(self, name, tellcore_sensor, datatype, sensor_info):
"""Initialize the sensor."""
self._datatype = datatype
self._tellcore_sensor = tellcore_sensor
self._unit_of_measurement = sensor_info.unit or None
self._value = None
self._name = f"{name} {sensor_info.name}"
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._value
@property
def unit_of_measurement(self):
"""Return the unit of measurement of this entity, if any."""
return self._unit_of_measurement
def update(self):
"""Update tellstick sensor."""
self._value = self._tellcore_sensor.value(self._datatype).value
| apache-2.0 | 3,547,326,447,045,937,000 | 32.240506 | 88 | 0.601104 | false |
knigophil/KindleWisper | WisperNetPrep/extractcover.py | 1 | 2961 | import sys, os, glob
import mobiunpack32
import shutil
import unicodefix
def extractThumbnail(infile, tmpdir, asin):
files = mobiunpack32.fileNames(infile, tmpdir)
# Instantiate the mobiUnpack class
mu = mobiunpack32.mobiUnpack(files)
metadata = mu.getMetaData()
proc = mobiunpack32.processHTML(files, metadata)
imgnames = proc.processImages(mu.firstimg, mu.sect)
imgdir = os.path.join(tmpdir, "images")
destdir = "images.$$$"
os.mkdir(destdir)
imageName = None
if 'ThumbOffset' in metadata:
imageNumber = int(metadata['ThumbOffset'][0])
imageName = imgnames[imageNumber]
if imageName is None:
print "Error: Cover Thumbnail image %s was not recognized as a valid image" % imageNumber
else:
print 'Cover ThumbNail Image "%s"' % imageName
copyCover(destdir, infile, os.path.join(imgdir, imageName), ".thumbnail")
if 'CoverOffset' in metadata:
imageNumber = int(metadata['CoverOffset'][0])
imageName = imgnames[imageNumber]
if imageName is None:
print "Error: Cover image %s was not recognized as a valid image" % imageNumber
else:
print 'Cover Image "%s"' % imageName
copyCover(destdir, infile, os.path.join(imgdir, imageName), ".cover")
if imageName is None:
print 'Neither Cover nor ThumbNail found'
imgpath = max(glob.glob(os.path.join(imgdir, "*")), key=os.path.getsize)
if os.path.splitext(os.path.split(imgpath)[1])[1]=='.jpeg':
print 'Fake Cover Image "%s"' % os.path.split(imgpath)[1]
copyCover(destdir, infile, imgpath, ".cover")
else:
if asin==None:
print 'No candidate for cover image found. Execution interrupted.'
shutil.rmtree(tmpdir)
shutil.rmtree(destdir)
sys.exit(0)
def copyCover(destdir, infile, imgpath, suffix):
infileName = os.path.splitext(infile)[0]
imageExt = os.path.splitext(imgpath)[1]
shutil.copy(imgpath, os.path.join(destdir, infileName + suffix+imageExt))
def processFile(infile, asin):
infileext = os.path.splitext(infile)[1].upper()
if infileext not in ['.MOBI', '.PRC', '.AZW', '.AZW4', '.AZW3']:
print "Error: first parameter must be a Kindle/Mobipocket ebook or a Kindle/Print Replica ebook."
return 1
try:
print 'Extracting...'
extractThumbnail(infile, "tmpdir.$$$", asin);
shutil.rmtree("tmpdir.$$$")
print 'Completed'
except ValueError, e:
print "Error: %s" % e
return 1
return 0
def main(argv=sys.argv):
if len(argv) != 2:
print "Usage:"
print " extractcover.py infile"
return 1
else:
return processFile(argv[1])
if __name__ == "__main__":
sys.exit(main())
| gpl-3.0 | 4,709,375,851,879,552,000 | 35.961538 | 105 | 0.599797 | false |
zhuwenping/python-for-android | python3-alpha/extra_modules/pyxmpp2/__init__.py | 46 | 7738 | #
# (C) Copyright 2003-2011 Jacek Konieczny <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License Version
# 2.1 as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
#
"""
PyXMPP2 - Jabber/XMPP protocol implementation
=============================================
Project Information
-------------------
PyXMPP2 is a Python implementation of the XMPP protocol (:RFC:`6120`,
:RFC:`6121`) and some of `XMPP Extensions`_
`PyXMPP`_ was first implemented by Jacek Konieczny in year 2003, using
`libxml2`_ as the XML framework, then it slowly evolved over years becoming
kind of monster full of 'smart' and legacy code. Also libxml2 proved to be
inadequate base for a Python library.
PyXMPP2 is a rewrite of the original PyXMPP aimed to replace libxml2 with
the more standard :etree:`ElementTree` API and to clean up the API. In fact the
API has completely changed, hopefully for better.
The PyXMPP2 project is hosted at GitHub: http://github.com/Jajcus/pyxmpp/
The API documentation is available at: http://jajcus.github.com/pyxmpp2/api/
The simple API
--------------
If you don't care about all the PyXMPP flexibility and features and just want
to send a Jabber message, please look at the 'Simple API': `pyxmpp2.simple`
Basic components
----------------
XMPP Data
---------
The basic functionality of the XMPP protocol is to send XML data between
entities using XML container elements called 'stanzas'. There are three
types of stanzas:
- ``<message />`` stanzas to send a unicast message to another entity
- ``<iq />`` stanzas for simple request-response exchange
- ``<presence />`` stanzas for broadcast of availability information
They are represented by the following PyXMPP2 classes: `message.Message`,
`iq.Iq` and `presence.Presence`.
The stanzas may carry arbitrary XML payload. It is bound to the stanzas
using the `stanzapayload.StanzaPayload` interface. It can be a generic
`stanzapayload.XMLPayload` implementation or any specialized
`interfaces.StanzaPayload` subclass decoding the XML element as required.
XMPP Streams
------------
The stanzas are sent over XML streams. In PyXMPP the stream functionality
is implemented by the `streambase.StreamBase` class. The class does not
implement actual I/O (see the next secition) or SASL/TLS (these are handled
by `streamsasl.StreamSASLHandler` and `streamtls.StreamTLSHandler`), but
provides the basic logic to handle stanzas and stream negotiation.
Transports
----------
The actual I/O (sending XML data over socket) has been separated from the
`streambase.StreamBase` for cleaner code and to allow alternate transport
implementations (like `BOSH`_). The interface is defined by the
`interfaces.XMPPTransport` abstract class and the standard TCP transport
(:RFC:`6120`) is implemented via `transport.TCPTransport`.
Main event loop
---------------
The transport objects react on I/O events (like data received from a socket)
and an XMPP application usually wants to react on various XMPP events, so
a mechanism to dispatch these events is required. In PyXMPP2 the
`mainloop.interfaces.MainLoop` interface is defined to dispatch the events to
various components. There are also a few implementation of the interface
provided:
- `mainloop.select.SelectMainLoop`: asynchronous I/O loop based on the
:std:`select.select` call.
- `mainloop.poll.PollMainLoop`: asynchronous I/O loop based on the
:std:`select.poll` call. Not available on all platforms.
- `mainloop.threads.ThreadPool`: a thread-based alternative to the above
The default implementation is available as `mainloop.main_loop_factory`.
Chains of responsibility
------------------------
Both `streambase.StreamBase` and main loop implementations constructors
expect a 'handlers' argument with a list of object to handle various events
or elements. Main loop handlers should implement one or more of these
interfaces:
- `pyxmpp2.mainloop.interfaces.IOHandler`: provides a socket or file
descriptor to poll and handles reads from and writes to it. Implemented
e.g. by the `transport.TCPTransport` class.
- `pyxmpp2.mainloop.interfaces.EventHandler`: specially decorated methods
of its subclasess are called on events raise by other components (like
the transport or stream). Also available as
`pyxmpp2.interfaces.EventHandler`
- `pyxmpp2.mainloop.interfaces.TimeoutHandler`: specially decorated methods
of its subclasess are called on selected intervals. Also available as
'pyxmpp2.interfaces.TimeoutHandler`
Stream handlers should implement one or more of:
- `pyxmpp2.interfaces.XMPPFeatureHandler`: specially decorated methods of
its subclasses are called for matching stanzas. The interface will also
provide facilities for XMPP feature discovery and capability
advertisement.
- `pyxmpp2.interfaces.StreamFeatureHandler`: handle or generate
``<stream:features>`` subelement and handle other related stream
elements. Implemented e.g. by `streamsasl.StreamSASLHandler` and
`streamtls.StreamTLSHandler`.
Component configuration
-----------------------
As many parameters may define an XMPP implementation behaviour, class
constructors or other methods would require lots of arguments to handle them
all. Instead, a special `settings.XMPPSettings` object is often used, which can
hold any parameter useful by any part of the PyXMPP2. It is also used as a
simple form of dependency injection.
The Client class
------------------
The `pyxmpp2.client.Client` joins a main loop, a client stream and some basic
handlers (for stream encryption, authentication, resource binding and roster
handling) together, so a client application has only to add its handlers to
make it work.
See the `pyxmpp2.client` module for details. This should be the start point
for most XMPP client applications.
Logging
-------
PyXMPP2 does all it logging via the standard :std:`logging` module. Most
of the PyXMPP2 modules log via 'pyxmpp2.module-name' loggers. There are some
special cases though:
* 'pyxmpp2.IN' logger, which is used for XML data (not parsed yet)
* 'pyxmpp2.OUT' logger, which is used for XML data sent
Those two can be used to intercept the data for application-specific monitoring
(e.g. an 'XML console' in a client GUI).
Most of the log messages generated by PyXMPP have level 'DEBUG', no higher
level messages should appear during normal operation. Some 'WARNING' messages
may be emitted when a remote party misbehaves and 'ERROR' messages in case of
programming errors.
Module hierarchy
----------------
Base XMPP features (:RFC:`6120` and :RFC:`6121`) and core PyXMPP2 framework
features are implemented in direct submodules of `pyxmpp2` package.
`pyxmpp2.sasl` package provides the SASL protocol and mechanisms
implementation.
`pyxmpp2.mainloop` contains the main event loop and I/O framework.
`pyxmpp2.ext` contains `XMPP Extensions`_ implementations.
.. _XMPP Extensions: http://xmpp.org/xmpp-protocols/xmpp-extensions/
.. _PyXMPP: http://pyxmpp.jajcus.net/
.. _libxml2: http://xmlsoft.org/
.. _BOSH: http://xmpp.org/extensions/xep-0124.html
"""
__docformat__ = "restructuredtext en"
# vi: sts=4 et sw=4
| apache-2.0 | -4,878,849,015,479,024,000 | 38.682051 | 79 | 0.751615 | false |
dvitme/odoo-addons | project_long_term/__init__.py | 9 | 1099 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import project_long_term
from . import wizard
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 7,380,458,886,009,467,000 | 42.96 | 78 | 0.618744 | false |
tectronics/pysal | pysal/esda/join_counts.py | 5 | 4171 | """
Spatial autocorrelation for binary attributes
"""
__author__ = "Sergio J. Rey <[email protected]> , Luc Anselin <[email protected]>"
import pysal
import numpy as np
__all__ = ['Join_Counts']
PERMUTATIONS = 999
class Join_Counts:
"""Binary Join Counts
Parameters
----------
y : array
binary variable measured across n spatial units
w : W
spatial weights instance
permutations : int
number of random permutations for calculation of pseudo-p_values
Attributes
----------
y : array
original variable
w : W
original w object
permutations : int
number of permutations
bb : float
number of black-black joins
ww : float
number of white-white joins
bw : float
number of black-white joins
J : float
number of joins
sim_bb : array (if permutations>0)
vector of bb values for permuted samples
p_sim_bb : array (if permutations>0)
p-value based on permutations (one-sided)
null: spatial randomness
alternative: the observed bb is greater than under randomness
mean_bb : average of permuted bb values
min_bb : minimum of permuted bb values
max_bb : maximum of permuted bb values
sim_bw : array (if permutations>0)
vector of bw values for permuted samples
p_sim_bw : array (if permutations>0)
p-value based on permutations (one-sided)
null: spatial randomness
alternative: the observed bw is greater than under randomness
mean_bw : average of permuted bw values
min_bw : minimum of permuted bw values
max_bw : maximum of permuted bw values
Examples
--------
Replicate example from anselin and rey
>>> import numpy as np
>>> w=pysal.lat2W(4,4)
>>> y=np.ones(16)
>>> y[0:8]=0
>>> np.random.seed(12345)
>>> jc=pysal.Join_Counts(y,w)
>>> jc.bb
10.0
>>> jc.bw
4.0
>>> jc.ww
10.0
>>> jc.J
24.0
>>> len(jc.sim_bb)
999
>>> jc.p_sim_bb
0.0030000000000000001
>>> np.mean(jc.sim_bb)
5.5465465465465469
>>> np.max(jc.sim_bb)
10.0
>>> np.min(jc.sim_bb)
0.0
>>> len(jc.sim_bw)
999
>>> jc.p_sim_bw
1.0
>>> np.mean(jc.sim_bw)
12.811811811811811
>>> np.max(jc.sim_bw)
24.0
>>> np.min(jc.sim_bw)
7.0
>>>
"""
def __init__(self, y, w, permutations=PERMUTATIONS):
w.transformation = 'b' # ensure we have binary weights
self.w = w
self.y = y
self.permutations = permutations
self.J = w.s0 / 2.
self.bb, self.ww, self.bw = self.__calc(self.y)
if permutations:
sim = [self.__calc(np.random.permutation(self.y))
for i in xrange(permutations)]
sim_jc = np.array(sim)
self.sim_bb = sim_jc[:, 0]
self.min_bb = np.min(self.sim_bb)
self.mean_bb = np.mean(self.sim_bb)
self.max_bb = np.max(self.sim_bb)
self.sim_bw = sim_jc[:, 2]
self.min_bw = np.min(self.sim_bw)
self.mean_bw = np.mean(self.sim_bw)
self.max_bw = np.max(self.sim_bw)
p_sim_bb = self.__pseudop(self.sim_bb, self.bb)
p_sim_bw = self.__pseudop(self.sim_bw, self.bw)
self.p_sim_bb = p_sim_bb
self.p_sim_bw = p_sim_bw
def __calc(self, z):
zl = pysal.lag_spatial(self.w, z)
bb = sum(z * zl) / 2.0
zw = 1 - z
zl = pysal.lag_spatial(self.w, zw)
ww = sum(zw * zl) / 2.0
bw = self.J - (bb + ww)
return (bb, ww, bw)
def __pseudop(self, sim, jc):
above = sim >= jc
larger = sum(above)
psim = (larger + 1.) / (self.permutations + 1.)
return psim
| bsd-3-clause | -5,729,003,583,354,193,000 | 27.965278 | 86 | 0.506833 | false |
vjorlikowski/plexus | plexus/util.py | 1 | 4984 | # Copyright (c) 2015 Duke University.
# This software is distributed under the terms of the MIT License,
# the text of which is included in this distribution within the file
# named LICENSE.
#
# Portions of this software are derived from the "rest_router" controller
# application included with Ryu (http://osrg.github.io/ryu/), which is:
# Copyright (C) 2013 Nippon Telegraph and Telephone Corporation.
#
# Modifications and additions were made to the original content by the
# following authors:
# Author: Victor J. Orlikowski <[email protected]>
import logging
import json
import socket
from webob import Response
from ryu.lib import addrconv
from ryu.exception import RyuException
from plexus import *
def get_priority(priority_type, vid=0, route=None):
log_msg = None
priority = priority_type
if ((priority_type == PRIORITY_TYPE_ROUTE) and
(route is not None)):
if route.dst_ip:
if route.src_ip:
priority_type = PRIORITY_ADDRESSED_STATIC_ROUTING
else:
priority_type = PRIORITY_STATIC_ROUTING
priority = priority_type + route.dst_netmask
log_msg = 'static routing'
else:
if route.src_ip:
priority_type = PRIORITY_ADDRESSED_DEFAULT_ROUTING
else:
priority_type = PRIORITY_DEFAULT_ROUTING
priority = priority_type
log_msg = 'default routing'
if (vid or (priority_type == PRIORITY_IP_HANDLING) or
(priority_type == PRIORITY_PENALTYBOX)):
priority += PRIORITY_VLAN_SHIFT
if priority_type > PRIORITY_ADDRESSED_STATIC_ROUTING:
priority += PRIORITY_NETMASK_SHIFT
if log_msg is None:
return priority
else:
return priority, log_msg
def get_priority_type(priority, vid):
if vid:
priority -= PRIORITY_VLAN_SHIFT
return priority
# REST command template
def rest_command(func):
def _rest_command(*args, **kwargs):
try:
msg = func(*args, **kwargs)
return Response(content_type='application/json',
body=json.dumps(msg))
except SyntaxError as e:
status = 400
details = e.msg
except (ValueError, NameError) as e:
status = 400
details = e.message
except NotFoundError as msg:
status = 404
details = str(msg)
msg = {REST_RESULT: REST_NG,
REST_DETAILS: details}
return Response(status=status, body=json.dumps(msg))
return _rest_command
def ip_addr_aton(ip_str, err_msg=None):
try:
return addrconv.ipv4.bin_to_text(socket.inet_aton(ip_str))
except (struct.error, socket.error) as e:
if err_msg is not None:
e.message = '%s %s' % (err_msg, e.message)
raise ValueError(e.message)
def ip_addr_ntoa(ip):
return socket.inet_ntoa(addrconv.ipv4.text_to_bin(ip))
def mask_ntob(mask, err_msg=None):
try:
return (UINT32_MAX << (32 - mask)) & UINT32_MAX
except ValueError:
msg = 'illegal netmask'
if err_msg is not None:
msg = '%s %s' % (err_msg, msg)
raise ValueError(msg)
def ipv4_apply_mask(address, prefix_len, err_msg=None):
if not isinstance(address, str):
raise ValueError('ipv4_apply_mask: address parameter was not a string.')
address_int = ipv4_text_to_int(address)
return ipv4_int_to_text(address_int & mask_ntob(prefix_len, err_msg))
def ipv4_int_to_text(ip_int):
if not isinstance(ip_int, (int, long)):
raise ValueError('ipv4_int_to_text: ip_int parameter was not an int or long.')
return addrconv.ipv4.bin_to_text(struct.pack('!I', ip_int))
def ipv4_text_to_int(ip_text):
if ip_text == 0:
return ip_text
if not isinstance(ip_text, str):
raise ValueError('ipv4_text_to_int: ip_text parameter was not a string.')
return struct.unpack('!I', addrconv.ipv4.text_to_bin(ip_text))[0]
def nw_addr_aton(nw_addr, err_msg=None):
ip_mask = nw_addr.split('/')
default_route = ip_addr_aton(ip_mask[0], err_msg=err_msg)
netmask = 32
if len(ip_mask) == 2:
try:
netmask = int(ip_mask[1])
except ValueError as e:
if err_msg is not None:
e.message = '%s %s' % (err_msg, e.message)
raise ValueError(e.message)
if netmask < 0:
msg = 'illegal netmask'
if err_msg is not None:
msg = '%s %s' % (err_msg, msg)
raise ValueError(msg)
nw_addr = ipv4_apply_mask(default_route, netmask, err_msg)
return nw_addr, netmask, default_route
class RouterLoggerAdapter(logging.LoggerAdapter):
def process(self, msg, kwargs):
return '[DPID %16s] %s' % (self.extra['sw_id'], msg), kwargs
class NotFoundError(RyuException):
message = 'Router SW is not connected. : switch_id=%(switch_id)s'
class CommandFailure(RyuException):
pass
| apache-2.0 | 3,878,148,281,228,445,700 | 30.948718 | 86 | 0.620987 | false |
stackArmor/security_monkey | security_monkey/tests/views/test_view_watcher_config.py | 3 | 4138 | # Copyright 2016 Bridgewater Associates
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
.. module: security_monkey.tests.views.test_watcher_config
:platform: Unix
.. version:: $$VERSION$$
.. moduleauthor:: Bridgewater OSS <[email protected]>
"""
from security_monkey.tests.views import SecurityMonkeyApiTestCase
from security_monkey.watcher import watcher_registry
from security_monkey.datastore import WatcherConfig
from security_monkey import db
from mock import patch
import json
# Mock watcher_registry because this is used when building watcher config response
class MockWatcher(object):
def __init__(self, accounts=None, debug=False):
self.accounts = accounts
watcher_configs = [
{'type': 'MockWatcher1', 'index': 'index1', 'interval': 1440},
{'type': 'MockWatcher2', 'index': 'index2', 'interval': 1440},
{'type': 'MockWatcher3', 'index': 'index3', 'interval': 1440}
]
test_watcher_registry = {}
for config in watcher_configs:
watcher = type(config['type'], (MockWatcher,), {'index': config['index'], 'interval': config['interval']})
test_watcher_registry[config['index']] = watcher
@patch.dict(watcher_registry, test_watcher_registry, clear=True)
class WatcherConfigApiTestCase(SecurityMonkeyApiTestCase):
def test_get_empty_watcher_configs(self):
r = self.test_app.get('/api/1/watcher_config', headers=self.headers)
r_json = json.loads(r.data)
assert r.status_code == 200
assert len(r_json['items']) == len(watcher_configs)
assert r_json['items'][0]['id'] == 0
def test_get_watcher_configs(self):
watcher_config = WatcherConfig(index='index1', interval=1440, active=True)
db.session.add(watcher_config)
db.session.commit()
db.session.refresh(watcher_config)
r = self.test_app.get('/api/1/watcher_config', headers=self.headers)
r_json = json.loads(r.data)
assert r.status_code == 200
assert len(r_json['items']) == len(watcher_configs)
assert r_json['items'][0]['id'] != 0
def test_put_watcher_config(self):
watcher_config = WatcherConfig(index='index1', interval=1440, active=True)
db.session.add(watcher_config)
db.session.commit()
db.session.refresh(watcher_config)
d = dict(index='account', interval=1440, active=True)
r = self.test_app.put(
"/api/1/watcher_config/{}".format(watcher_config.id),
headers=self.headers,
data=json.dumps(d)
)
assert r.status_code == 200
# Update the response code when we handle this appropriately (404)
def test_put_watcher_config_wrong_id(self):
watcher_config = WatcherConfig(index='index1', interval=1440, active=True)
db.session.add(watcher_config)
db.session.commit()
db.session.refresh(watcher_config)
d = dict(index='account', interval=1440, active=True)
r = self.test_app.put("/api/1/watcher_config/{}".format('100'), headers=self.headers, data=json.dumps(d))
assert r.status_code == 500
def test_put_watcher_config_wrong_data(self):
watcher_config = WatcherConfig(index='index1', interval=1440, active=True)
db.session.add(watcher_config)
db.session.commit()
db.session.refresh(watcher_config)
d = dict(index='account', foobar=1440, active=True)
r = self.test_app.put(
"/api/1/watcher_config/{}".format(watcher_config.id),
headers=self.headers,
data=json.dumps(d)
)
assert r.status_code == 400
| apache-2.0 | 7,363,521,223,281,635,000 | 37.672897 | 113 | 0.661431 | false |
gale320/newfies-dialer | newfies/apirest/view_bulk_contact.py | 4 | 3400 | # -*- coding: utf-8 -*-
#
# Newfies-Dialer License
# http://www.newfies-dialer.org
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http://mozilla.org/MPL/2.0/.
#
# Copyright (C) 2011-2014 Star2Billing S.L.
#
# The Initial Developer of the Original Code is
# Arezqui Belaid <[email protected]>
#
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework.authentication import BasicAuthentication, SessionAuthentication
from dialer_contact.models import Phonebook, Contact
from dialer_campaign.function_def import dialer_setting_limit, check_dialer_setting
class BulkContactViewSet(APIView):
"""
**Create**:
CURL Usage::
curl -u username:password --dump-header - -H "Content-Type:application/json" -X POST --data '{"phonebook_id": "1", "phoneno_list" : "12345,54344"}' http://localhost:8000/rest-api/bulkcontact/
Response::
HTTP/1.0 200 OK
Date: Mon, 01 Jul 2013 13:14:10 GMT
Server: WSGIServer/0.1 Python/2.7.3
Vary: Accept, Accept-Language, Cookie
Content-Type: application/json; charset=utf-8
Content-Language: en-us
Allow: POST, OPTIONS
{"result": "Bulk contacts are created"}
"""
authentication = (BasicAuthentication, SessionAuthentication)
def post(self, request):
"""
create contacts in bulk
"""
error = {}
if request.method == 'POST':
if not request.DATA:
error['error'] = 'Data set is empty'
if check_dialer_setting(request, check_for="contact"):
error['error'] = "You have too many contacts per campaign. You are allowed a maximum of %s" % \
dialer_setting_limit(request, limit_for="contact")
phonebook_id = request.DATA.get('phonebook_id')
if phonebook_id and phonebook_id != '':
try:
Phonebook.objects.get(id=phonebook_id, user=request.user)
except Phonebook.DoesNotExist:
error['error'] = 'Phonebook is not valid!'
else:
error['error'] = 'Phonebook is not selected!'
if error:
return Response(error)
phoneno_list = request.DATA.get('phoneno_list')
phonebook_id = request.DATA.get('phonebook_id')
phonenolist = list(phoneno_list.split(","))
obj_phonebook = Phonebook.objects.get(id=phonebook_id, user=request.user)
new_contact_count = 0
for phoneno in phonenolist:
# check phoneno in Contact
dup_count = Contact.objects.filter(contact=phoneno, phonebook__user=request.user).count()
# If dup_count is zero, create new contact
if dup_count == 0:
new_contact = Contact.objects.create(
phonebook=obj_phonebook,
contact=phoneno,
)
new_contact_count = new_contact_count + 1
new_contact.save()
else:
error_msg = "The contact duplicated (%s)!\n" % phoneno
return Response({'error': error_msg})
return Response({'result': 'Bulk contacts are created'})
| mpl-2.0 | -9,052,049,498,339,759,000 | 36.777778 | 203 | 0.6 | false |
nitirohilla/upm | examples/python/ads1015.py | 7 | 2834 | # Author: Mihai Tudor Panu <[email protected]>
# Copyright (c) 2016 Intel Corporation.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# This example demonstrates how to use one the ADS1015 ADC on the Grove Joule
# Shield or the Sparkfun ADC Block for Edison with devices that output a small
# differential voltage (e.g. geophones, piezoelectric bands or pads,
# thermocouples).
from __future__ import print_function
from threading import Timer
from time import sleep
from upm import pyupm_ads1x15 as upm
def stop():
global running
running = False
def main():
global running
running = True
fileName = './ads1015.data' # Output filename
id = 0 # Sample number
# Initialize and configure the ADS1015
ads1015 = upm.ADS1015(0, 0x48)
# Put the ADC into differential mode for pins A0 and A1
ads1015.getSample(upm.ADS1X15.DIFF_0_1)
# Set the gain based on expected VIN range to -/+ 2.048 V
# Can be adjusted based on application to as low as -/+ 0.256 V, see API
# documentation for details
ads1015.setGain(upm.ADS1X15.GAIN_TWO)
# Set the sample rate to 3300 samples per second (max) and turn on continuous
# sampling
ads1015.setSPS(upm.ADS1015.SPS_3300)
ads1015.setContinuous(True)
# Open the output file
try:
f = open(fileName, 'w')
except OSError as e:
print('Cannot open output file:', e)
return
# Setup a timer to stop logging after 10 seconds
t = Timer(10, stop)
t.start()
# Read sensor and write to file every 1 ms
while running:
f.write(str(id) + ' %.7f' % ads1015.getLastSample() + '\n')
id += 1
sleep(0.001)
# Close and exit
f.close()
print('Wrote', id, 'samples to file:', fileName)
return
if __name__ == '__main__':
main()
| mit | 817,162,908,502,118,000 | 33.144578 | 81 | 0.705011 | false |
redhat-openstack/swift | test/unit/proxy/test_server.py | 4 | 407033 | # -*- coding: utf-8 -*-
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import print_function
import email.parser
import logging
import math
import os
import pickle
import rfc822
import sys
import unittest
from contextlib import closing, contextmanager, nested
from gzip import GzipFile
from shutil import rmtree
import gc
import time
from textwrap import dedent
from urllib import quote
from hashlib import md5
from pyeclib.ec_iface import ECDriverError
from tempfile import mkdtemp, NamedTemporaryFile
import weakref
import operator
import functools
from swift.obj import diskfile
import re
import random
import mock
from eventlet import sleep, spawn, wsgi, listen, Timeout
from six import BytesIO
from six import StringIO
from six.moves import range
from swift.common.utils import hash_path, json, storage_directory, \
parse_content_type, iter_multipart_mime_documents, public
from test.unit import (
connect_tcp, readuntil2crlfs, FakeLogger, fake_http_connect, FakeRing,
FakeMemcache, debug_logger, patch_policies, write_fake_ring,
mocked_http_conn)
from swift.proxy import server as proxy_server
from swift.proxy.controllers.obj import ReplicatedObjectController
from swift.account import server as account_server
from swift.container import server as container_server
from swift.obj import server as object_server
from swift.common.middleware import proxy_logging, versioned_writes
from swift.common.middleware.acl import parse_acl, format_acl
from swift.common.exceptions import ChunkReadTimeout, DiskFileNotExist, \
APIVersionError
from swift.common import utils, constraints
from swift.common.ring import RingData
from swift.common.utils import mkdirs, normalize_timestamp, NullLogger
from swift.common.wsgi import monkey_patch_mimetools, loadapp
from swift.proxy.controllers import base as proxy_base
from swift.proxy.controllers.base import get_container_memcache_key, \
get_account_memcache_key, cors_validation
import swift.proxy.controllers
import swift.proxy.controllers.obj
from swift.common.swob import Request, Response, HTTPUnauthorized, \
HTTPException, HeaderKeyDict
from swift.common import storage_policy
from swift.common.storage_policy import StoragePolicy, ECStoragePolicy, \
StoragePolicyCollection, POLICIES
from swift.common.request_helpers import get_sys_meta_prefix
# mocks
logging.getLogger().addHandler(logging.StreamHandler(sys.stdout))
STATIC_TIME = time.time()
_test_coros = _test_servers = _test_sockets = _orig_container_listing_limit = \
_testdir = _orig_SysLogHandler = _orig_POLICIES = _test_POLICIES = None
def do_setup(the_object_server):
utils.HASH_PATH_SUFFIX = 'endcap'
global _testdir, _test_servers, _test_sockets, \
_orig_container_listing_limit, _test_coros, _orig_SysLogHandler, \
_orig_POLICIES, _test_POLICIES
_orig_POLICIES = storage_policy._POLICIES
_orig_SysLogHandler = utils.SysLogHandler
utils.SysLogHandler = mock.MagicMock()
monkey_patch_mimetools()
# Since we're starting up a lot here, we're going to test more than
# just chunked puts; we're also going to test parts of
# proxy_server.Application we couldn't get to easily otherwise.
_testdir = \
os.path.join(mkdtemp(), 'tmp_test_proxy_server_chunked')
mkdirs(_testdir)
rmtree(_testdir)
for drive in ('sda1', 'sdb1', 'sdc1', 'sdd1', 'sde1',
'sdf1', 'sdg1', 'sdh1', 'sdi1'):
mkdirs(os.path.join(_testdir, drive, 'tmp'))
conf = {'devices': _testdir, 'swift_dir': _testdir,
'mount_check': 'false', 'allowed_headers':
'content-encoding, x-object-manifest, content-disposition, foo',
'allow_versions': 't'}
prolis = listen(('localhost', 0))
acc1lis = listen(('localhost', 0))
acc2lis = listen(('localhost', 0))
con1lis = listen(('localhost', 0))
con2lis = listen(('localhost', 0))
obj1lis = listen(('localhost', 0))
obj2lis = listen(('localhost', 0))
obj3lis = listen(('localhost', 0))
objsocks = [obj1lis, obj2lis, obj3lis]
_test_sockets = \
(prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis, obj2lis, obj3lis)
account_ring_path = os.path.join(_testdir, 'account.ring.gz')
account_devs = [
{'port': acc1lis.getsockname()[1]},
{'port': acc2lis.getsockname()[1]},
]
write_fake_ring(account_ring_path, *account_devs)
container_ring_path = os.path.join(_testdir, 'container.ring.gz')
container_devs = [
{'port': con1lis.getsockname()[1]},
{'port': con2lis.getsockname()[1]},
]
write_fake_ring(container_ring_path, *container_devs)
storage_policy._POLICIES = StoragePolicyCollection([
StoragePolicy(0, 'zero', True),
StoragePolicy(1, 'one', False),
StoragePolicy(2, 'two', False),
ECStoragePolicy(3, 'ec', ec_type='jerasure_rs_vand',
ec_ndata=2, ec_nparity=1, ec_segment_size=4096)])
obj_rings = {
0: ('sda1', 'sdb1'),
1: ('sdc1', 'sdd1'),
2: ('sde1', 'sdf1'),
# sdg1, sdh1, sdi1 taken by policy 3 (see below)
}
for policy_index, devices in obj_rings.items():
policy = POLICIES[policy_index]
obj_ring_path = os.path.join(_testdir, policy.ring_name + '.ring.gz')
obj_devs = [
{'port': objsock.getsockname()[1], 'device': dev}
for objsock, dev in zip(objsocks, devices)]
write_fake_ring(obj_ring_path, *obj_devs)
# write_fake_ring can't handle a 3-element ring, and the EC policy needs
# at least 3 devs to work with, so we do it manually
devs = [{'id': 0, 'zone': 0, 'device': 'sdg1', 'ip': '127.0.0.1',
'port': obj1lis.getsockname()[1]},
{'id': 1, 'zone': 0, 'device': 'sdh1', 'ip': '127.0.0.1',
'port': obj2lis.getsockname()[1]},
{'id': 2, 'zone': 0, 'device': 'sdi1', 'ip': '127.0.0.1',
'port': obj3lis.getsockname()[1]}]
pol3_replica2part2dev_id = [[0, 1, 2, 0],
[1, 2, 0, 1],
[2, 0, 1, 2]]
obj3_ring_path = os.path.join(_testdir, POLICIES[3].ring_name + '.ring.gz')
part_shift = 30
with closing(GzipFile(obj3_ring_path, 'wb')) as fh:
pickle.dump(RingData(pol3_replica2part2dev_id, devs, part_shift), fh)
prosrv = proxy_server.Application(conf, FakeMemcacheReturnsNone(),
logger=debug_logger('proxy'))
for policy in POLICIES:
# make sure all the rings are loaded
prosrv.get_object_ring(policy.idx)
# don't lose this one!
_test_POLICIES = storage_policy._POLICIES
acc1srv = account_server.AccountController(
conf, logger=debug_logger('acct1'))
acc2srv = account_server.AccountController(
conf, logger=debug_logger('acct2'))
con1srv = container_server.ContainerController(
conf, logger=debug_logger('cont1'))
con2srv = container_server.ContainerController(
conf, logger=debug_logger('cont2'))
obj1srv = the_object_server.ObjectController(
conf, logger=debug_logger('obj1'))
obj2srv = the_object_server.ObjectController(
conf, logger=debug_logger('obj2'))
obj3srv = the_object_server.ObjectController(
conf, logger=debug_logger('obj3'))
_test_servers = \
(prosrv, acc1srv, acc2srv, con1srv, con2srv, obj1srv, obj2srv, obj3srv)
nl = NullLogger()
logging_prosv = proxy_logging.ProxyLoggingMiddleware(prosrv, conf,
logger=prosrv.logger)
prospa = spawn(wsgi.server, prolis, logging_prosv, nl)
acc1spa = spawn(wsgi.server, acc1lis, acc1srv, nl)
acc2spa = spawn(wsgi.server, acc2lis, acc2srv, nl)
con1spa = spawn(wsgi.server, con1lis, con1srv, nl)
con2spa = spawn(wsgi.server, con2lis, con2srv, nl)
obj1spa = spawn(wsgi.server, obj1lis, obj1srv, nl)
obj2spa = spawn(wsgi.server, obj2lis, obj2srv, nl)
obj3spa = spawn(wsgi.server, obj3lis, obj3srv, nl)
_test_coros = \
(prospa, acc1spa, acc2spa, con1spa, con2spa, obj1spa, obj2spa, obj3spa)
# Create account
ts = normalize_timestamp(time.time())
partition, nodes = prosrv.account_ring.get_nodes('a')
for node in nodes:
conn = swift.proxy.controllers.obj.http_connect(node['ip'],
node['port'],
node['device'],
partition, 'PUT', '/a',
{'X-Timestamp': ts,
'x-trans-id': 'test'})
resp = conn.getresponse()
assert(resp.status == 201)
# Create another account
# used for account-to-account tests
ts = normalize_timestamp(time.time())
partition, nodes = prosrv.account_ring.get_nodes('a1')
for node in nodes:
conn = swift.proxy.controllers.obj.http_connect(node['ip'],
node['port'],
node['device'],
partition, 'PUT',
'/a1',
{'X-Timestamp': ts,
'x-trans-id': 'test'})
resp = conn.getresponse()
assert(resp.status == 201)
# Create containers, 1 per test policy
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/c HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\nX-Auth-Token: t\r\n'
'Content-Length: 0\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
assert headers[:len(exp)] == exp, "Expected '%s', encountered '%s'" % (
exp, headers[:len(exp)])
# Create container in other account
# used for account-to-account tests
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a1/c1 HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\nX-Auth-Token: t\r\n'
'Content-Length: 0\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
assert headers[:len(exp)] == exp, "Expected '%s', encountered '%s'" % (
exp, headers[:len(exp)])
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write(
'PUT /v1/a/c1 HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\nX-Auth-Token: t\r\nX-Storage-Policy: one\r\n'
'Content-Length: 0\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
assert headers[:len(exp)] == exp, \
"Expected '%s', encountered '%s'" % (exp, headers[:len(exp)])
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write(
'PUT /v1/a/c2 HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\nX-Auth-Token: t\r\nX-Storage-Policy: two\r\n'
'Content-Length: 0\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
assert headers[:len(exp)] == exp, \
"Expected '%s', encountered '%s'" % (exp, headers[:len(exp)])
def unpatch_policies(f):
"""
This will unset a TestCase level patch_policies to use the module level
policies setup for the _test_servers instead.
N.B. You should NEVER modify the _test_server policies or rings during a
test because they persist for the life of the entire module!
"""
@functools.wraps(f)
def wrapper(*args, **kwargs):
with patch_policies(_test_POLICIES):
return f(*args, **kwargs)
return wrapper
def setup():
do_setup(object_server)
def teardown():
for server in _test_coros:
server.kill()
rmtree(os.path.dirname(_testdir))
utils.SysLogHandler = _orig_SysLogHandler
storage_policy._POLICIES = _orig_POLICIES
def sortHeaderNames(headerNames):
"""
Return the given string of header names sorted.
headerName: a comma-delimited list of header names
"""
headers = [a.strip() for a in headerNames.split(',') if a.strip()]
headers.sort()
return ', '.join(headers)
def parse_headers_string(headers_str):
headers_dict = HeaderKeyDict()
for line in headers_str.split('\r\n'):
if ': ' in line:
header, value = line.split(': ', 1)
headers_dict[header] = value
return headers_dict
def node_error_count(proxy_app, ring_node):
# Reach into the proxy's internals to get the error count for a
# particular node
node_key = proxy_app._error_limit_node_key(ring_node)
return proxy_app._error_limiting.get(node_key, {}).get('errors', 0)
def node_last_error(proxy_app, ring_node):
# Reach into the proxy's internals to get the last error for a
# particular node
node_key = proxy_app._error_limit_node_key(ring_node)
return proxy_app._error_limiting.get(node_key, {}).get('last_error')
def set_node_errors(proxy_app, ring_node, value, last_error):
# Set the node's error count to value
node_key = proxy_app._error_limit_node_key(ring_node)
stats = proxy_app._error_limiting.setdefault(node_key, {})
stats['errors'] = value
stats['last_error'] = last_error
class FakeMemcacheReturnsNone(FakeMemcache):
def get(self, key):
# Returns None as the timestamp of the container; assumes we're only
# using the FakeMemcache for container existence checks.
return None
@contextmanager
def save_globals():
orig_http_connect = getattr(swift.proxy.controllers.base, 'http_connect',
None)
orig_account_info = getattr(swift.proxy.controllers.Controller,
'account_info', None)
orig_container_info = getattr(swift.proxy.controllers.Controller,
'container_info', None)
try:
yield True
finally:
swift.proxy.controllers.Controller.account_info = orig_account_info
swift.proxy.controllers.base.http_connect = orig_http_connect
swift.proxy.controllers.obj.http_connect = orig_http_connect
swift.proxy.controllers.account.http_connect = orig_http_connect
swift.proxy.controllers.container.http_connect = orig_http_connect
swift.proxy.controllers.Controller.container_info = orig_container_info
def set_http_connect(*args, **kwargs):
new_connect = fake_http_connect(*args, **kwargs)
swift.proxy.controllers.base.http_connect = new_connect
swift.proxy.controllers.obj.http_connect = new_connect
swift.proxy.controllers.account.http_connect = new_connect
swift.proxy.controllers.container.http_connect = new_connect
return new_connect
def _make_callback_func(calls):
def callback(ipaddr, port, device, partition, method, path,
headers=None, query_string=None, ssl=False):
context = {}
context['method'] = method
context['path'] = path
context['headers'] = headers or {}
calls.append(context)
return callback
def _limit_max_file_size(f):
"""
This will limit constraints.MAX_FILE_SIZE for the duration of the
wrapped function, based on whether MAX_FILE_SIZE exceeds the
sys.maxsize limit on the system running the tests.
This allows successful testing on 32 bit systems.
"""
@functools.wraps(f)
def wrapper(*args, **kwargs):
test_max_file_size = constraints.MAX_FILE_SIZE
if constraints.MAX_FILE_SIZE >= sys.maxsize:
test_max_file_size = (2 ** 30 + 2)
with mock.patch.object(constraints, 'MAX_FILE_SIZE',
test_max_file_size):
return f(*args, **kwargs)
return wrapper
# tests
class TestController(unittest.TestCase):
def setUp(self):
self.account_ring = FakeRing()
self.container_ring = FakeRing()
self.memcache = FakeMemcache()
app = proxy_server.Application(None, self.memcache,
account_ring=self.account_ring,
container_ring=self.container_ring)
self.controller = swift.proxy.controllers.Controller(app)
class FakeReq(object):
def __init__(self):
self.url = "/foo/bar"
self.method = "METHOD"
def as_referer(self):
return self.method + ' ' + self.url
self.account = 'some_account'
self.container = 'some_container'
self.request = FakeReq()
self.read_acl = 'read_acl'
self.write_acl = 'write_acl'
def test_transfer_headers(self):
src_headers = {'x-remove-base-meta-owner': 'x',
'x-base-meta-size': '151M',
'new-owner': 'Kun'}
dst_headers = {'x-base-meta-owner': 'Gareth',
'x-base-meta-size': '150M'}
self.controller.transfer_headers(src_headers, dst_headers)
expected_headers = {'x-base-meta-owner': '',
'x-base-meta-size': '151M'}
self.assertEqual(dst_headers, expected_headers)
def check_account_info_return(self, partition, nodes, is_none=False):
if is_none:
p, n = None, None
else:
p, n = self.account_ring.get_nodes(self.account)
self.assertEqual(p, partition)
self.assertEqual(n, nodes)
def test_account_info_container_count(self):
with save_globals():
set_http_connect(200, count=123)
partition, nodes, count = \
self.controller.account_info(self.account)
self.assertEqual(count, 123)
with save_globals():
set_http_connect(200, count='123')
partition, nodes, count = \
self.controller.account_info(self.account)
self.assertEqual(count, 123)
with save_globals():
cache_key = get_account_memcache_key(self.account)
account_info = {'status': 200, 'container_count': 1234}
self.memcache.set(cache_key, account_info)
partition, nodes, count = \
self.controller.account_info(self.account)
self.assertEqual(count, 1234)
with save_globals():
cache_key = get_account_memcache_key(self.account)
account_info = {'status': 200, 'container_count': '1234'}
self.memcache.set(cache_key, account_info)
partition, nodes, count = \
self.controller.account_info(self.account)
self.assertEqual(count, 1234)
def test_make_requests(self):
with save_globals():
set_http_connect(200)
partition, nodes, count = \
self.controller.account_info(self.account, self.request)
set_http_connect(201, raise_timeout_exc=True)
self.controller._make_request(
nodes, partition, 'POST', '/', '', '',
self.controller.app.logger.thread_locals)
# tests if 200 is cached and used
def test_account_info_200(self):
with save_globals():
set_http_connect(200)
partition, nodes, count = \
self.controller.account_info(self.account, self.request)
self.check_account_info_return(partition, nodes)
self.assertEqual(count, 12345)
# Test the internal representation in memcache
# 'container_count' changed from int to str
cache_key = get_account_memcache_key(self.account)
container_info = {'status': 200,
'container_count': '12345',
'total_object_count': None,
'bytes': None,
'meta': {},
'sysmeta': {}}
self.assertEqual(container_info,
self.memcache.get(cache_key))
set_http_connect()
partition, nodes, count = \
self.controller.account_info(self.account, self.request)
self.check_account_info_return(partition, nodes)
self.assertEqual(count, 12345)
# tests if 404 is cached and used
def test_account_info_404(self):
with save_globals():
set_http_connect(404, 404, 404)
partition, nodes, count = \
self.controller.account_info(self.account, self.request)
self.check_account_info_return(partition, nodes, True)
self.assertEqual(count, None)
# Test the internal representation in memcache
# 'container_count' changed from 0 to None
cache_key = get_account_memcache_key(self.account)
account_info = {'status': 404,
'container_count': None, # internally keep None
'total_object_count': None,
'bytes': None,
'meta': {},
'sysmeta': {}}
self.assertEqual(account_info,
self.memcache.get(cache_key))
set_http_connect()
partition, nodes, count = \
self.controller.account_info(self.account, self.request)
self.check_account_info_return(partition, nodes, True)
self.assertEqual(count, None)
# tests if some http status codes are not cached
def test_account_info_no_cache(self):
def test(*status_list):
set_http_connect(*status_list)
partition, nodes, count = \
self.controller.account_info(self.account, self.request)
self.assertEqual(len(self.memcache.keys()), 0)
self.check_account_info_return(partition, nodes, True)
self.assertEqual(count, None)
with save_globals():
# We cache if we have two 404 responses - fail if only one
test(503, 503, 404)
test(504, 404, 503)
test(404, 507, 503)
test(503, 503, 503)
def test_account_info_no_account(self):
with save_globals():
self.memcache.store = {}
set_http_connect(404, 404, 404)
partition, nodes, count = \
self.controller.account_info(self.account, self.request)
self.check_account_info_return(partition, nodes, is_none=True)
self.assertEqual(count, None)
def check_container_info_return(self, ret, is_none=False):
if is_none:
partition, nodes, read_acl, write_acl = None, None, None, None
else:
partition, nodes = self.container_ring.get_nodes(self.account,
self.container)
read_acl, write_acl = self.read_acl, self.write_acl
self.assertEqual(partition, ret['partition'])
self.assertEqual(nodes, ret['nodes'])
self.assertEqual(read_acl, ret['read_acl'])
self.assertEqual(write_acl, ret['write_acl'])
def test_container_info_invalid_account(self):
def account_info(self, account, request, autocreate=False):
return None, None
with save_globals():
swift.proxy.controllers.Controller.account_info = account_info
ret = self.controller.container_info(self.account,
self.container,
self.request)
self.check_container_info_return(ret, True)
# tests if 200 is cached and used
def test_container_info_200(self):
with save_globals():
headers = {'x-container-read': self.read_acl,
'x-container-write': self.write_acl}
set_http_connect(200, # account_info is found
200, headers=headers) # container_info is found
ret = self.controller.container_info(
self.account, self.container, self.request)
self.check_container_info_return(ret)
cache_key = get_container_memcache_key(self.account,
self.container)
cache_value = self.memcache.get(cache_key)
self.assertTrue(isinstance(cache_value, dict))
self.assertEqual(200, cache_value.get('status'))
set_http_connect()
ret = self.controller.container_info(
self.account, self.container, self.request)
self.check_container_info_return(ret)
# tests if 404 is cached and used
def test_container_info_404(self):
def account_info(self, account, request):
return True, True, 0
with save_globals():
set_http_connect(503, 204, # account_info found
504, 404, 404) # container_info 'NotFound'
ret = self.controller.container_info(
self.account, self.container, self.request)
self.check_container_info_return(ret, True)
cache_key = get_container_memcache_key(self.account,
self.container)
cache_value = self.memcache.get(cache_key)
self.assertTrue(isinstance(cache_value, dict))
self.assertEqual(404, cache_value.get('status'))
set_http_connect()
ret = self.controller.container_info(
self.account, self.container, self.request)
self.check_container_info_return(ret, True)
set_http_connect(503, 404, 404) # account_info 'NotFound'
ret = self.controller.container_info(
self.account, self.container, self.request)
self.check_container_info_return(ret, True)
cache_key = get_container_memcache_key(self.account,
self.container)
cache_value = self.memcache.get(cache_key)
self.assertTrue(isinstance(cache_value, dict))
self.assertEqual(404, cache_value.get('status'))
set_http_connect()
ret = self.controller.container_info(
self.account, self.container, self.request)
self.check_container_info_return(ret, True)
# tests if some http status codes are not cached
def test_container_info_no_cache(self):
def test(*status_list):
set_http_connect(*status_list)
ret = self.controller.container_info(
self.account, self.container, self.request)
self.assertEqual(len(self.memcache.keys()), 0)
self.check_container_info_return(ret, True)
with save_globals():
# We cache if we have two 404 responses - fail if only one
test(503, 503, 404)
test(504, 404, 503)
test(404, 507, 503)
test(503, 503, 503)
@patch_policies([StoragePolicy(0, 'zero', True, object_ring=FakeRing())])
class TestProxyServer(unittest.TestCase):
def test_get_object_ring(self):
baseapp = proxy_server.Application({},
FakeMemcache(),
container_ring=FakeRing(),
account_ring=FakeRing())
with patch_policies([
StoragePolicy(0, 'a', False, object_ring=123),
StoragePolicy(1, 'b', True, object_ring=456),
StoragePolicy(2, 'd', False, object_ring=789)
]):
# None means legacy so always use policy 0
ring = baseapp.get_object_ring(None)
self.assertEqual(ring, 123)
ring = baseapp.get_object_ring('')
self.assertEqual(ring, 123)
ring = baseapp.get_object_ring('0')
self.assertEqual(ring, 123)
ring = baseapp.get_object_ring('1')
self.assertEqual(ring, 456)
ring = baseapp.get_object_ring('2')
self.assertEqual(ring, 789)
# illegal values
self.assertRaises(ValueError, baseapp.get_object_ring, '99')
self.assertRaises(ValueError, baseapp.get_object_ring, 'asdf')
def test_unhandled_exception(self):
class MyApp(proxy_server.Application):
def get_controller(self, path):
raise Exception('this shouldn\'t be caught')
app = MyApp(None, FakeMemcache(), account_ring=FakeRing(),
container_ring=FakeRing())
req = Request.blank('/v1/account', environ={'REQUEST_METHOD': 'HEAD'})
app.update_request(req)
resp = app.handle_request(req)
self.assertEqual(resp.status_int, 500)
def test_internal_method_request(self):
baseapp = proxy_server.Application({},
FakeMemcache(),
container_ring=FakeRing(),
account_ring=FakeRing())
resp = baseapp.handle_request(
Request.blank('/v1/a', environ={'REQUEST_METHOD': '__init__'}))
self.assertEqual(resp.status, '405 Method Not Allowed')
def test_inexistent_method_request(self):
baseapp = proxy_server.Application({},
FakeMemcache(),
container_ring=FakeRing(),
account_ring=FakeRing())
resp = baseapp.handle_request(
Request.blank('/v1/a', environ={'REQUEST_METHOD': '!invalid'}))
self.assertEqual(resp.status, '405 Method Not Allowed')
def test_calls_authorize_allow(self):
called = [False]
def authorize(req):
called[0] = True
with save_globals():
set_http_connect(200)
app = proxy_server.Application(None, FakeMemcache(),
account_ring=FakeRing(),
container_ring=FakeRing())
req = Request.blank('/v1/a')
req.environ['swift.authorize'] = authorize
app.update_request(req)
app.handle_request(req)
self.assertTrue(called[0])
def test_calls_authorize_deny(self):
called = [False]
def authorize(req):
called[0] = True
return HTTPUnauthorized(request=req)
app = proxy_server.Application(None, FakeMemcache(),
account_ring=FakeRing(),
container_ring=FakeRing())
req = Request.blank('/v1/a')
req.environ['swift.authorize'] = authorize
app.update_request(req)
app.handle_request(req)
self.assertTrue(called[0])
def test_negative_content_length(self):
swift_dir = mkdtemp()
try:
baseapp = proxy_server.Application({'swift_dir': swift_dir},
FakeMemcache(), FakeLogger(),
FakeRing(), FakeRing())
resp = baseapp.handle_request(
Request.blank('/', environ={'CONTENT_LENGTH': '-1'}))
self.assertEqual(resp.status, '400 Bad Request')
self.assertEqual(resp.body, 'Invalid Content-Length')
resp = baseapp.handle_request(
Request.blank('/', environ={'CONTENT_LENGTH': '-123'}))
self.assertEqual(resp.status, '400 Bad Request')
self.assertEqual(resp.body, 'Invalid Content-Length')
finally:
rmtree(swift_dir, ignore_errors=True)
def test_adds_transaction_id(self):
swift_dir = mkdtemp()
try:
logger = FakeLogger()
baseapp = proxy_server.Application({'swift_dir': swift_dir},
FakeMemcache(), logger,
container_ring=FakeLogger(),
account_ring=FakeRing())
baseapp.handle_request(
Request.blank('/info',
environ={'HTTP_X_TRANS_ID_EXTRA': 'sardine',
'REQUEST_METHOD': 'GET'}))
# This is kind of a hokey way to get the transaction ID; it'd be
# better to examine response headers, but the catch_errors
# middleware is what sets the X-Trans-Id header, and we don't have
# that available here.
self.assertTrue(logger.txn_id.endswith('-sardine'))
finally:
rmtree(swift_dir, ignore_errors=True)
def test_adds_transaction_id_length_limit(self):
swift_dir = mkdtemp()
try:
logger = FakeLogger()
baseapp = proxy_server.Application({'swift_dir': swift_dir},
FakeMemcache(), logger,
container_ring=FakeLogger(),
account_ring=FakeRing())
baseapp.handle_request(
Request.blank('/info',
environ={'HTTP_X_TRANS_ID_EXTRA': 'a' * 1000,
'REQUEST_METHOD': 'GET'}))
self.assertTrue(logger.txn_id.endswith(
'-aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'))
finally:
rmtree(swift_dir, ignore_errors=True)
def test_denied_host_header(self):
swift_dir = mkdtemp()
try:
baseapp = proxy_server.Application({'swift_dir': swift_dir,
'deny_host_headers':
'invalid_host.com'},
FakeMemcache(),
container_ring=FakeLogger(),
account_ring=FakeRing())
resp = baseapp.handle_request(
Request.blank('/v1/a/c/o',
environ={'HTTP_HOST': 'invalid_host.com'}))
self.assertEqual(resp.status, '403 Forbidden')
finally:
rmtree(swift_dir, ignore_errors=True)
def test_node_timing(self):
baseapp = proxy_server.Application({'sorting_method': 'timing'},
FakeMemcache(),
container_ring=FakeRing(),
account_ring=FakeRing())
self.assertEqual(baseapp.node_timings, {})
req = Request.blank('/v1/account', environ={'REQUEST_METHOD': 'HEAD'})
baseapp.update_request(req)
resp = baseapp.handle_request(req)
self.assertEqual(resp.status_int, 503) # couldn't connect to anything
exp_timings = {}
self.assertEqual(baseapp.node_timings, exp_timings)
times = [time.time()]
exp_timings = {'127.0.0.1': (0.1, times[0] + baseapp.timing_expiry)}
with mock.patch('swift.proxy.server.time', lambda: times.pop(0)):
baseapp.set_node_timing({'ip': '127.0.0.1'}, 0.1)
self.assertEqual(baseapp.node_timings, exp_timings)
nodes = [{'ip': '127.0.0.1'}, {'ip': '127.0.0.2'}, {'ip': '127.0.0.3'}]
with mock.patch('swift.proxy.server.shuffle', lambda l: l):
res = baseapp.sort_nodes(nodes)
exp_sorting = [{'ip': '127.0.0.2'}, {'ip': '127.0.0.3'},
{'ip': '127.0.0.1'}]
self.assertEqual(res, exp_sorting)
def test_node_affinity(self):
baseapp = proxy_server.Application({'sorting_method': 'affinity',
'read_affinity': 'r1=1'},
FakeMemcache(),
container_ring=FakeRing(),
account_ring=FakeRing())
nodes = [{'region': 2, 'zone': 1, 'ip': '127.0.0.1'},
{'region': 1, 'zone': 2, 'ip': '127.0.0.2'}]
with mock.patch('swift.proxy.server.shuffle', lambda x: x):
app_sorted = baseapp.sort_nodes(nodes)
exp_sorted = [{'region': 1, 'zone': 2, 'ip': '127.0.0.2'},
{'region': 2, 'zone': 1, 'ip': '127.0.0.1'}]
self.assertEqual(exp_sorted, app_sorted)
def test_info_defaults(self):
app = proxy_server.Application({}, FakeMemcache(),
account_ring=FakeRing(),
container_ring=FakeRing())
self.assertTrue(app.expose_info)
self.assertTrue(isinstance(app.disallowed_sections, list))
self.assertEqual(1, len(app.disallowed_sections))
self.assertEqual(['swift.valid_api_versions'],
app.disallowed_sections)
self.assertTrue(app.admin_key is None)
def test_get_info_controller(self):
req = Request.blank('/info')
app = proxy_server.Application({}, FakeMemcache(),
account_ring=FakeRing(),
container_ring=FakeRing())
controller, path_parts = app.get_controller(req)
self.assertTrue('version' in path_parts)
self.assertTrue(path_parts['version'] is None)
self.assertTrue('disallowed_sections' in path_parts)
self.assertTrue('expose_info' in path_parts)
self.assertTrue('admin_key' in path_parts)
self.assertEqual(controller.__name__, 'InfoController')
def test_error_limit_methods(self):
logger = debug_logger('test')
app = proxy_server.Application({}, FakeMemcache(),
account_ring=FakeRing(),
container_ring=FakeRing(),
logger=logger)
node = app.container_ring.get_part_nodes(0)[0]
# error occurred
app.error_occurred(node, 'test msg')
self.assertTrue('test msg' in
logger.get_lines_for_level('error')[-1])
self.assertEqual(1, node_error_count(app, node))
# exception occurred
try:
raise Exception('kaboom1!')
except Exception as e1:
app.exception_occurred(node, 'test1', 'test1 msg')
line = logger.get_lines_for_level('error')[-1]
self.assertTrue('test1 server' in line)
self.assertTrue('test1 msg' in line)
log_args, log_kwargs = logger.log_dict['error'][-1]
self.assertTrue(log_kwargs['exc_info'])
self.assertEqual(log_kwargs['exc_info'][1], e1)
self.assertEqual(2, node_error_count(app, node))
# warning exception occurred
try:
raise Exception('kaboom2!')
except Exception as e2:
app.exception_occurred(node, 'test2', 'test2 msg',
level=logging.WARNING)
line = logger.get_lines_for_level('warning')[-1]
self.assertTrue('test2 server' in line)
self.assertTrue('test2 msg' in line)
log_args, log_kwargs = logger.log_dict['warning'][-1]
self.assertTrue(log_kwargs['exc_info'])
self.assertEqual(log_kwargs['exc_info'][1], e2)
self.assertEqual(3, node_error_count(app, node))
# custom exception occurred
try:
raise Exception('kaboom3!')
except Exception as e3:
e3_info = sys.exc_info()
try:
raise Exception('kaboom4!')
except Exception:
pass
app.exception_occurred(node, 'test3', 'test3 msg',
level=logging.WARNING, exc_info=e3_info)
line = logger.get_lines_for_level('warning')[-1]
self.assertTrue('test3 server' in line)
self.assertTrue('test3 msg' in line)
log_args, log_kwargs = logger.log_dict['warning'][-1]
self.assertTrue(log_kwargs['exc_info'])
self.assertEqual(log_kwargs['exc_info'][1], e3)
self.assertEqual(4, node_error_count(app, node))
def test_valid_api_version(self):
app = proxy_server.Application({}, FakeMemcache(),
account_ring=FakeRing(),
container_ring=FakeRing())
# The version string is only checked for account, container and object
# requests; the raised APIVersionError returns a 404 to the client
for path in [
'/v2/a',
'/v2/a/c',
'/v2/a/c/o']:
req = Request.blank(path)
self.assertRaises(APIVersionError, app.get_controller, req)
# Default valid API versions are ok
for path in [
'/v1/a',
'/v1/a/c',
'/v1/a/c/o',
'/v1.0/a',
'/v1.0/a/c',
'/v1.0/a/c/o']:
req = Request.blank(path)
controller, path_parts = app.get_controller(req)
self.assertTrue(controller is not None)
# Ensure settings valid API version constraint works
for version in ["42", 42]:
try:
with NamedTemporaryFile() as f:
f.write('[swift-constraints]\n')
f.write('valid_api_versions = %s\n' % version)
f.flush()
with mock.patch.object(utils, 'SWIFT_CONF_FILE', f.name):
constraints.reload_constraints()
req = Request.blank('/%s/a' % version)
controller, _ = app.get_controller(req)
self.assertTrue(controller is not None)
# In this case v1 is invalid
req = Request.blank('/v1/a')
self.assertRaises(APIVersionError, app.get_controller, req)
finally:
constraints.reload_constraints()
# Check that the valid_api_versions is not exposed by default
req = Request.blank('/info')
controller, path_parts = app.get_controller(req)
self.assertTrue('swift.valid_api_versions' in
path_parts.get('disallowed_sections'))
@patch_policies([
StoragePolicy(0, 'zero', is_default=True),
StoragePolicy(1, 'one'),
])
class TestProxyServerLoading(unittest.TestCase):
def setUp(self):
self._orig_hash_suffix = utils.HASH_PATH_SUFFIX
utils.HASH_PATH_SUFFIX = 'endcap'
self.tempdir = mkdtemp()
def tearDown(self):
rmtree(self.tempdir)
utils.HASH_PATH_SUFFIX = self._orig_hash_suffix
for policy in POLICIES:
policy.object_ring = None
def test_load_policy_rings(self):
for policy in POLICIES:
self.assertFalse(policy.object_ring)
conf_path = os.path.join(self.tempdir, 'proxy-server.conf')
conf_body = """
[DEFAULT]
swift_dir = %s
[pipeline:main]
pipeline = catch_errors cache proxy-server
[app:proxy-server]
use = egg:swift#proxy
[filter:cache]
use = egg:swift#memcache
[filter:catch_errors]
use = egg:swift#catch_errors
""" % self.tempdir
with open(conf_path, 'w') as f:
f.write(dedent(conf_body))
account_ring_path = os.path.join(self.tempdir, 'account.ring.gz')
write_fake_ring(account_ring_path)
container_ring_path = os.path.join(self.tempdir, 'container.ring.gz')
write_fake_ring(container_ring_path)
for policy in POLICIES:
object_ring_path = os.path.join(self.tempdir,
policy.ring_name + '.ring.gz')
write_fake_ring(object_ring_path)
app = loadapp(conf_path)
# find the end of the pipeline
while hasattr(app, 'app'):
app = app.app
# validate loaded rings
self.assertEqual(app.account_ring.serialized_path,
account_ring_path)
self.assertEqual(app.container_ring.serialized_path,
container_ring_path)
for policy in POLICIES:
self.assertEqual(policy.object_ring,
app.get_object_ring(int(policy)))
def test_missing_rings(self):
conf_path = os.path.join(self.tempdir, 'proxy-server.conf')
conf_body = """
[DEFAULT]
swift_dir = %s
[pipeline:main]
pipeline = catch_errors cache proxy-server
[app:proxy-server]
use = egg:swift#proxy
[filter:cache]
use = egg:swift#memcache
[filter:catch_errors]
use = egg:swift#catch_errors
""" % self.tempdir
with open(conf_path, 'w') as f:
f.write(dedent(conf_body))
ring_paths = [
os.path.join(self.tempdir, 'account.ring.gz'),
os.path.join(self.tempdir, 'container.ring.gz'),
]
for policy in POLICIES:
self.assertFalse(policy.object_ring)
object_ring_path = os.path.join(self.tempdir,
policy.ring_name + '.ring.gz')
ring_paths.append(object_ring_path)
for policy in POLICIES:
self.assertFalse(policy.object_ring)
for ring_path in ring_paths:
self.assertFalse(os.path.exists(ring_path))
self.assertRaises(IOError, loadapp, conf_path)
write_fake_ring(ring_path)
# all rings exist, app should load
loadapp(conf_path)
for policy in POLICIES:
self.assertTrue(policy.object_ring)
@patch_policies([StoragePolicy(0, 'zero', True,
object_ring=FakeRing(base_port=3000))])
class TestObjectController(unittest.TestCase):
def setUp(self):
self.app = proxy_server.Application(
None, FakeMemcache(),
logger=debug_logger('proxy-ut'),
account_ring=FakeRing(),
container_ring=FakeRing())
# clear proxy logger result for each test
_test_servers[0].logger._clear()
def tearDown(self):
self.app.account_ring.set_replicas(3)
self.app.container_ring.set_replicas(3)
for policy in POLICIES:
policy.object_ring = FakeRing(base_port=3000)
def put_container(self, policy_name, container_name):
# Note: only works if called with unpatched policies
prolis = _test_sockets[0]
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/%s HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'Content-Length: 0\r\n'
'X-Storage-Token: t\r\n'
'X-Storage-Policy: %s\r\n'
'\r\n' % (container_name, policy_name))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 2'
self.assertEqual(headers[:len(exp)], exp)
def assert_status_map(self, method, statuses, expected, raise_exc=False):
with save_globals():
kwargs = {}
if raise_exc:
kwargs['raise_exc'] = raise_exc
set_http_connect(*statuses, **kwargs)
self.app.memcache.store = {}
req = Request.blank('/v1/a/c/o',
headers={'Content-Length': '0',
'Content-Type': 'text/plain'})
self.app.update_request(req)
try:
res = method(req)
except HTTPException as res:
pass
self.assertEqual(res.status_int, expected)
# repeat test
set_http_connect(*statuses, **kwargs)
self.app.memcache.store = {}
req = Request.blank('/v1/a/c/o',
headers={'Content-Length': '0',
'Content-Type': 'text/plain'})
self.app.update_request(req)
try:
res = method(req)
except HTTPException as res:
pass
self.assertEqual(res.status_int, expected)
@unpatch_policies
def test_policy_IO(self):
def check_file(policy, cont, devs, check_val):
partition, nodes = policy.object_ring.get_nodes('a', cont, 'o')
conf = {'devices': _testdir, 'mount_check': 'false'}
df_mgr = diskfile.DiskFileManager(conf, FakeLogger())
for dev in devs:
file = df_mgr.get_diskfile(dev, partition, 'a',
cont, 'o',
policy=policy)
if check_val is True:
file.open()
prolis = _test_sockets[0]
prosrv = _test_servers[0]
# check policy 0: put file on c, read it back, check loc on disk
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
obj = 'test_object0'
path = '/v1/a/c/o'
fd.write('PUT %s HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Storage-Token: t\r\n'
'Content-Length: %s\r\n'
'Content-Type: text/plain\r\n'
'\r\n%s' % (path, str(len(obj)), obj))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEqual(headers[:len(exp)], exp)
req = Request.blank(path,
environ={'REQUEST_METHOD': 'GET'},
headers={'Content-Type':
'text/plain'})
res = req.get_response(prosrv)
self.assertEqual(res.status_int, 200)
self.assertEqual(res.body, obj)
check_file(POLICIES[0], 'c', ['sda1', 'sdb1'], True)
check_file(POLICIES[0], 'c', ['sdc1', 'sdd1', 'sde1', 'sdf1'], False)
# check policy 1: put file on c1, read it back, check loc on disk
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
path = '/v1/a/c1/o'
obj = 'test_object1'
fd.write('PUT %s HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Storage-Token: t\r\n'
'Content-Length: %s\r\n'
'Content-Type: text/plain\r\n'
'\r\n%s' % (path, str(len(obj)), obj))
fd.flush()
headers = readuntil2crlfs(fd)
self.assertEqual(headers[:len(exp)], exp)
req = Request.blank(path,
environ={'REQUEST_METHOD': 'GET'},
headers={'Content-Type':
'text/plain'})
res = req.get_response(prosrv)
self.assertEqual(res.status_int, 200)
self.assertEqual(res.body, obj)
check_file(POLICIES[1], 'c1', ['sdc1', 'sdd1'], True)
check_file(POLICIES[1], 'c1', ['sda1', 'sdb1', 'sde1', 'sdf1'], False)
# check policy 2: put file on c2, read it back, check loc on disk
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
path = '/v1/a/c2/o'
obj = 'test_object2'
fd.write('PUT %s HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Storage-Token: t\r\n'
'Content-Length: %s\r\n'
'Content-Type: text/plain\r\n'
'\r\n%s' % (path, str(len(obj)), obj))
fd.flush()
headers = readuntil2crlfs(fd)
self.assertEqual(headers[:len(exp)], exp)
req = Request.blank(path,
environ={'REQUEST_METHOD': 'GET'},
headers={'Content-Type':
'text/plain'})
res = req.get_response(prosrv)
self.assertEqual(res.status_int, 200)
self.assertEqual(res.body, obj)
check_file(POLICIES[2], 'c2', ['sde1', 'sdf1'], True)
check_file(POLICIES[2], 'c2', ['sda1', 'sdb1', 'sdc1', 'sdd1'], False)
@unpatch_policies
def test_policy_IO_override(self):
if hasattr(_test_servers[-1], '_filesystem'):
# ironically, the _filesystem attribute on the object server means
# the in-memory diskfile is in use, so this test does not apply
return
prosrv = _test_servers[0]
# validate container policy is 1
req = Request.blank('/v1/a/c1', method='HEAD')
res = req.get_response(prosrv)
self.assertEqual(res.status_int, 204) # sanity check
self.assertEqual(POLICIES[1].name, res.headers['x-storage-policy'])
# check overrides: put it in policy 2 (not where the container says)
req = Request.blank(
'/v1/a/c1/wrong-o',
environ={'REQUEST_METHOD': 'PUT',
'wsgi.input': BytesIO(b"hello")},
headers={'Content-Type': 'text/plain',
'Content-Length': '5',
'X-Backend-Storage-Policy-Index': '2'})
res = req.get_response(prosrv)
self.assertEqual(res.status_int, 201) # sanity check
# go to disk to make sure it's there
partition, nodes = prosrv.get_object_ring(2).get_nodes(
'a', 'c1', 'wrong-o')
node = nodes[0]
conf = {'devices': _testdir, 'mount_check': 'false'}
df_mgr = diskfile.DiskFileManager(conf, FakeLogger())
df = df_mgr.get_diskfile(node['device'], partition, 'a',
'c1', 'wrong-o', policy=POLICIES[2])
with df.open():
contents = ''.join(df.reader())
self.assertEqual(contents, "hello")
# can't get it from the normal place
req = Request.blank('/v1/a/c1/wrong-o',
environ={'REQUEST_METHOD': 'GET'},
headers={'Content-Type': 'text/plain'})
res = req.get_response(prosrv)
self.assertEqual(res.status_int, 404) # sanity check
# but we can get it from policy 2
req = Request.blank('/v1/a/c1/wrong-o',
environ={'REQUEST_METHOD': 'GET'},
headers={'Content-Type': 'text/plain',
'X-Backend-Storage-Policy-Index': '2'})
res = req.get_response(prosrv)
self.assertEqual(res.status_int, 200)
self.assertEqual(res.body, 'hello')
# and we can delete it the same way
req = Request.blank('/v1/a/c1/wrong-o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'Content-Type': 'text/plain',
'X-Backend-Storage-Policy-Index': '2'})
res = req.get_response(prosrv)
self.assertEqual(res.status_int, 204)
df = df_mgr.get_diskfile(node['device'], partition, 'a',
'c1', 'wrong-o', policy=POLICIES[2])
try:
df.open()
except DiskFileNotExist as e:
self.assertTrue(float(e.timestamp) > 0)
else:
self.fail('did not raise DiskFileNotExist')
@unpatch_policies
def test_GET_newest_large_file(self):
prolis = _test_sockets[0]
prosrv = _test_servers[0]
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
obj = 'a' * (1024 * 1024)
path = '/v1/a/c/o.large'
fd.write('PUT %s HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Storage-Token: t\r\n'
'Content-Length: %s\r\n'
'Content-Type: application/octet-stream\r\n'
'\r\n%s' % (path, str(len(obj)), obj))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEqual(headers[:len(exp)], exp)
req = Request.blank(path,
environ={'REQUEST_METHOD': 'GET'},
headers={'Content-Type':
'application/octet-stream',
'X-Newest': 'true'})
res = req.get_response(prosrv)
self.assertEqual(res.status_int, 200)
self.assertEqual(res.body, obj)
@unpatch_policies
def test_GET_ranges(self):
prolis = _test_sockets[0]
prosrv = _test_servers[0]
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
obj = (''.join(
('beans lots of beans lots of beans lots of beans yeah %04d ' % i)
for i in range(100)))
path = '/v1/a/c/o.beans'
fd.write('PUT %s HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Storage-Token: t\r\n'
'Content-Length: %s\r\n'
'Content-Type: application/octet-stream\r\n'
'\r\n%s' % (path, str(len(obj)), obj))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEqual(headers[:len(exp)], exp)
# one byte range
req = Request.blank(
path,
environ={'REQUEST_METHOD': 'GET'},
headers={'Content-Type': 'application/octet-stream',
'Range': 'bytes=10-200'})
res = req.get_response(prosrv)
self.assertEqual(res.status_int, 206)
self.assertEqual(res.body, obj[10:201])
# multiple byte ranges
req = Request.blank(
path,
environ={'REQUEST_METHOD': 'GET'},
headers={'Content-Type': 'application/octet-stream',
'Range': 'bytes=10-200,1000-1099,4123-4523'})
res = req.get_response(prosrv)
self.assertEqual(res.status_int, 206)
ct, params = parse_content_type(res.headers['Content-Type'])
self.assertEqual(ct, 'multipart/byteranges')
boundary = dict(params).get('boundary')
self.assertTrue(boundary is not None)
got_mime_docs = []
for mime_doc_fh in iter_multipart_mime_documents(StringIO(res.body),
boundary):
headers = HeaderKeyDict(rfc822.Message(mime_doc_fh, 0).items())
body = mime_doc_fh.read()
got_mime_docs.append((headers, body))
self.assertEqual(len(got_mime_docs), 3)
first_range_headers = got_mime_docs[0][0]
first_range_body = got_mime_docs[0][1]
self.assertEqual(first_range_headers['Content-Range'],
'bytes 10-200/5800')
self.assertEqual(first_range_body, obj[10:201])
second_range_headers = got_mime_docs[1][0]
second_range_body = got_mime_docs[1][1]
self.assertEqual(second_range_headers['Content-Range'],
'bytes 1000-1099/5800')
self.assertEqual(second_range_body, obj[1000:1100])
second_range_headers = got_mime_docs[2][0]
second_range_body = got_mime_docs[2][1]
self.assertEqual(second_range_headers['Content-Range'],
'bytes 4123-4523/5800')
self.assertEqual(second_range_body, obj[4123:4524])
@unpatch_policies
def test_GET_bad_range_zero_byte(self):
prolis = _test_sockets[0]
prosrv = _test_servers[0]
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
path = '/v1/a/c/o.zerobyte'
fd.write('PUT %s HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Storage-Token: t\r\n'
'Content-Length: 0\r\n'
'Content-Type: application/octet-stream\r\n'
'\r\n' % (path,))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEqual(headers[:len(exp)], exp)
# bad byte-range
req = Request.blank(
path,
environ={'REQUEST_METHOD': 'GET'},
headers={'Content-Type': 'application/octet-stream',
'Range': 'bytes=spaghetti-carbonara'})
res = req.get_response(prosrv)
self.assertEqual(res.status_int, 200)
self.assertEqual(res.body, '')
# not a byte-range
req = Request.blank(
path,
environ={'REQUEST_METHOD': 'GET'},
headers={'Content-Type': 'application/octet-stream',
'Range': 'Kotta'})
res = req.get_response(prosrv)
self.assertEqual(res.status_int, 200)
self.assertEqual(res.body, '')
@unpatch_policies
def test_GET_ranges_resuming(self):
prolis = _test_sockets[0]
prosrv = _test_servers[0]
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
obj = (''.join(
('Smurf! The smurfing smurf is completely smurfed. %03d ' % i)
for i in range(1000)))
path = '/v1/a/c/o.smurfs'
fd.write('PUT %s HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Storage-Token: t\r\n'
'Content-Length: %s\r\n'
'Content-Type: application/smurftet-stream\r\n'
'\r\n%s' % (path, str(len(obj)), obj))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEqual(headers[:len(exp)], exp)
kaboomed = [0]
bytes_before_timeout = [None]
class FileLikeKaboom(object):
def __init__(self, inner_file_like):
self.inner_file_like = inner_file_like
# close(), etc.
def __getattr__(self, attr):
return getattr(self.inner_file_like, attr)
def readline(self, *a, **kw):
if bytes_before_timeout[0] <= 0:
kaboomed[0] += 1
raise ChunkReadTimeout(None)
result = self.inner_file_like.readline(*a, **kw)
if len(result) > bytes_before_timeout[0]:
result = result[:bytes_before_timeout[0]]
bytes_before_timeout[0] -= len(result)
return result
def read(self, length=None):
result = self.inner_file_like.read(length)
if bytes_before_timeout[0] <= 0:
kaboomed[0] += 1
raise ChunkReadTimeout(None)
if len(result) > bytes_before_timeout[0]:
result = result[:bytes_before_timeout[0]]
bytes_before_timeout[0] -= len(result)
return result
orig_hrtdi = proxy_base.http_response_to_document_iters
# Use this to mock out http_response_to_document_iters. On the first
# call, the result will be sabotaged to blow up with
# ChunkReadTimeout after some number of bytes are read. On
# subsequent calls, no sabotage will be added.
def sabotaged_hrtdi(*a, **kw):
resp_parts = orig_hrtdi(*a, **kw)
for sb, eb, l, h, range_file in resp_parts:
if bytes_before_timeout[0] <= 0:
# simulate being unable to read MIME part of
# multipart/byteranges response
kaboomed[0] += 1
raise ChunkReadTimeout(None)
boomer = FileLikeKaboom(range_file)
yield sb, eb, l, h, boomer
sabotaged = [False]
def single_sabotage_hrtdi(*a, **kw):
if not sabotaged[0]:
sabotaged[0] = True
return sabotaged_hrtdi(*a, **kw)
else:
return orig_hrtdi(*a, **kw)
# We want sort of an end-to-end test of object resuming, so what we
# do is mock out stuff so the proxy thinks it only read a certain
# number of bytes before it got a timeout.
bytes_before_timeout[0] = 300
with mock.patch.object(proxy_base, 'http_response_to_document_iters',
single_sabotage_hrtdi):
req = Request.blank(
path,
environ={'REQUEST_METHOD': 'GET'},
headers={'Content-Type': 'application/octet-stream',
'Range': 'bytes=0-500'})
res = req.get_response(prosrv)
body = res.body # read the whole thing
self.assertEqual(kaboomed[0], 1) # sanity check
self.assertEqual(res.status_int, 206)
self.assertEqual(len(body), 501)
self.assertEqual(body, obj[:501])
# Sanity-check for multi-range resume: make sure we actually break
# in the middle of the second byterange. This test is partially
# about what happens when all the object servers break at once, and
# partially about validating all these mocks we do. After all, the
# point of resuming is that the client can't tell anything went
# wrong, so we need a test where we can't resume and something
# *does* go wrong so we can observe it.
bytes_before_timeout[0] = 700
kaboomed[0] = 0
sabotaged[0] = False
prosrv._error_limiting = {} # clear out errors
with mock.patch.object(proxy_base, 'http_response_to_document_iters',
sabotaged_hrtdi): # perma-broken
req = Request.blank(
path,
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=0-500,1000-1500,2000-2500'})
res = req.get_response(prosrv)
body = ''
try:
for chunk in res.app_iter:
body += chunk
except ChunkReadTimeout:
pass
self.assertEqual(res.status_int, 206)
self.assertTrue(kaboomed[0] > 0) # sanity check
ct, params = parse_content_type(res.headers['Content-Type'])
self.assertEqual(ct, 'multipart/byteranges') # sanity check
boundary = dict(params).get('boundary')
self.assertTrue(boundary is not None) # sanity check
got_byteranges = []
for mime_doc_fh in iter_multipart_mime_documents(StringIO(body),
boundary):
rfc822.Message(mime_doc_fh, 0)
body = mime_doc_fh.read()
got_byteranges.append(body)
self.assertEqual(len(got_byteranges), 2)
self.assertEqual(len(got_byteranges[0]), 501)
self.assertEqual(len(got_byteranges[1]), 199) # partial
# Multi-range resume, resuming in the middle of the first byterange
bytes_before_timeout[0] = 300
kaboomed[0] = 0
sabotaged[0] = False
prosrv._error_limiting = {} # clear out errors
with mock.patch.object(proxy_base, 'http_response_to_document_iters',
single_sabotage_hrtdi):
req = Request.blank(
path,
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=0-500,1000-1500,2000-2500'})
res = req.get_response(prosrv)
body = ''.join(res.app_iter)
self.assertEqual(res.status_int, 206)
self.assertEqual(kaboomed[0], 1) # sanity check
ct, params = parse_content_type(res.headers['Content-Type'])
self.assertEqual(ct, 'multipart/byteranges') # sanity check
boundary = dict(params).get('boundary')
self.assertTrue(boundary is not None) # sanity check
got_byteranges = []
for mime_doc_fh in iter_multipart_mime_documents(StringIO(body),
boundary):
rfc822.Message(mime_doc_fh, 0)
body = mime_doc_fh.read()
got_byteranges.append(body)
self.assertEqual(len(got_byteranges), 3)
self.assertEqual(len(got_byteranges[0]), 501)
self.assertEqual(got_byteranges[0], obj[:501])
self.assertEqual(len(got_byteranges[1]), 501)
self.assertEqual(got_byteranges[1], obj[1000:1501])
self.assertEqual(len(got_byteranges[2]), 501)
self.assertEqual(got_byteranges[2], obj[2000:2501])
# Multi-range resume, first GET dies in the middle of the second set
# of MIME headers
bytes_before_timeout[0] = 501
kaboomed[0] = 0
sabotaged[0] = False
prosrv._error_limiting = {} # clear out errors
with mock.patch.object(proxy_base, 'http_response_to_document_iters',
single_sabotage_hrtdi):
req = Request.blank(
path,
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=0-500,1000-1500,2000-2500'})
res = req.get_response(prosrv)
body = ''.join(res.app_iter)
self.assertEqual(res.status_int, 206)
self.assertTrue(kaboomed[0] >= 1) # sanity check
ct, params = parse_content_type(res.headers['Content-Type'])
self.assertEqual(ct, 'multipart/byteranges') # sanity check
boundary = dict(params).get('boundary')
self.assertTrue(boundary is not None) # sanity check
got_byteranges = []
for mime_doc_fh in iter_multipart_mime_documents(StringIO(body),
boundary):
rfc822.Message(mime_doc_fh, 0)
body = mime_doc_fh.read()
got_byteranges.append(body)
self.assertEqual(len(got_byteranges), 3)
self.assertEqual(len(got_byteranges[0]), 501)
self.assertEqual(got_byteranges[0], obj[:501])
self.assertEqual(len(got_byteranges[1]), 501)
self.assertEqual(got_byteranges[1], obj[1000:1501])
self.assertEqual(len(got_byteranges[2]), 501)
self.assertEqual(got_byteranges[2], obj[2000:2501])
# Multi-range resume, first GET dies in the middle of the second
# byterange
bytes_before_timeout[0] = 750
kaboomed[0] = 0
sabotaged[0] = False
prosrv._error_limiting = {} # clear out errors
with mock.patch.object(proxy_base, 'http_response_to_document_iters',
single_sabotage_hrtdi):
req = Request.blank(
path,
environ={'REQUEST_METHOD': 'GET'},
headers={'Range': 'bytes=0-500,1000-1500,2000-2500'})
res = req.get_response(prosrv)
body = ''.join(res.app_iter)
self.assertEqual(res.status_int, 206)
self.assertTrue(kaboomed[0] >= 1) # sanity check
ct, params = parse_content_type(res.headers['Content-Type'])
self.assertEqual(ct, 'multipart/byteranges') # sanity check
boundary = dict(params).get('boundary')
self.assertTrue(boundary is not None) # sanity check
got_byteranges = []
for mime_doc_fh in iter_multipart_mime_documents(StringIO(body),
boundary):
rfc822.Message(mime_doc_fh, 0)
body = mime_doc_fh.read()
got_byteranges.append(body)
self.assertEqual(len(got_byteranges), 3)
self.assertEqual(len(got_byteranges[0]), 501)
self.assertEqual(got_byteranges[0], obj[:501])
self.assertEqual(len(got_byteranges[1]), 501)
self.assertEqual(got_byteranges[1], obj[1000:1501])
self.assertEqual(len(got_byteranges[2]), 501)
self.assertEqual(got_byteranges[2], obj[2000:2501])
@unpatch_policies
def test_PUT_ec(self):
policy = POLICIES[3]
self.put_container("ec", "ec-con")
obj = 'abCD' * 10 # small, so we don't get multiple EC stripes
prolis = _test_sockets[0]
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/ec-con/o1 HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'Etag: "%s"\r\n'
'Content-Length: %d\r\n'
'X-Storage-Token: t\r\n'
'Content-Type: application/octet-stream\r\n'
'\r\n%s' % (md5(obj).hexdigest(), len(obj), obj))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEqual(headers[:len(exp)], exp)
ecd = policy.pyeclib_driver
expected_pieces = set(ecd.encode(obj))
# go to disk to make sure it's there and all erasure-coded
partition, nodes = policy.object_ring.get_nodes('a', 'ec-con', 'o1')
conf = {'devices': _testdir, 'mount_check': 'false'}
df_mgr = diskfile.DiskFileRouter(conf, FakeLogger())[policy]
got_pieces = set()
got_indices = set()
got_durable = []
for node_index, node in enumerate(nodes):
df = df_mgr.get_diskfile(node['device'], partition,
'a', 'ec-con', 'o1',
policy=policy)
with df.open():
meta = df.get_metadata()
contents = ''.join(df.reader())
got_pieces.add(contents)
# check presence for a .durable file for the timestamp
durable_file = os.path.join(
_testdir, node['device'], storage_directory(
diskfile.get_data_dir(policy),
partition, hash_path('a', 'ec-con', 'o1')),
utils.Timestamp(df.timestamp).internal + '.durable')
if os.path.isfile(durable_file):
got_durable.append(True)
lmeta = dict((k.lower(), v) for k, v in meta.items())
got_indices.add(
lmeta['x-object-sysmeta-ec-frag-index'])
self.assertEqual(
lmeta['x-object-sysmeta-ec-etag'],
md5(obj).hexdigest())
self.assertEqual(
lmeta['x-object-sysmeta-ec-content-length'],
str(len(obj)))
self.assertEqual(
lmeta['x-object-sysmeta-ec-segment-size'],
'4096')
self.assertEqual(
lmeta['x-object-sysmeta-ec-scheme'],
'jerasure_rs_vand 2+1')
self.assertEqual(
lmeta['etag'],
md5(contents).hexdigest())
self.assertEqual(expected_pieces, got_pieces)
self.assertEqual(set(('0', '1', '2')), got_indices)
# verify at least 2 puts made it all the way to the end of 2nd
# phase, ie at least 2 .durable statuses were written
num_durable_puts = sum(d is True for d in got_durable)
self.assertTrue(num_durable_puts >= 2)
@unpatch_policies
def test_PUT_ec_multiple_segments(self):
ec_policy = POLICIES[3]
self.put_container("ec", "ec-con")
pyeclib_header_size = len(ec_policy.pyeclib_driver.encode("")[0])
segment_size = ec_policy.ec_segment_size
# Big enough to have multiple segments. Also a multiple of the
# segment size to get coverage of that path too.
obj = 'ABC' * segment_size
prolis = _test_sockets[0]
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/ec-con/o2 HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'Content-Length: %d\r\n'
'X-Storage-Token: t\r\n'
'Content-Type: application/octet-stream\r\n'
'\r\n%s' % (len(obj), obj))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEqual(headers[:len(exp)], exp)
# it's a 2+1 erasure code, so each fragment archive should be half
# the length of the object, plus three inline pyeclib metadata
# things (one per segment)
expected_length = (len(obj) / 2 + pyeclib_header_size * 3)
partition, nodes = ec_policy.object_ring.get_nodes(
'a', 'ec-con', 'o2')
conf = {'devices': _testdir, 'mount_check': 'false'}
df_mgr = diskfile.DiskFileRouter(conf, FakeLogger())[ec_policy]
got_durable = []
fragment_archives = []
for node in nodes:
df = df_mgr.get_diskfile(
node['device'], partition, 'a',
'ec-con', 'o2', policy=ec_policy)
with df.open():
contents = ''.join(df.reader())
fragment_archives.append(contents)
self.assertEqual(len(contents), expected_length)
# check presence for a .durable file for the timestamp
durable_file = os.path.join(
_testdir, node['device'], storage_directory(
diskfile.get_data_dir(ec_policy),
partition, hash_path('a', 'ec-con', 'o2')),
utils.Timestamp(df.timestamp).internal + '.durable')
if os.path.isfile(durable_file):
got_durable.append(True)
# Verify that we can decode each individual fragment and that they
# are all the correct size
fragment_size = ec_policy.fragment_size
nfragments = int(
math.ceil(float(len(fragment_archives[0])) / fragment_size))
for fragment_index in range(nfragments):
fragment_start = fragment_index * fragment_size
fragment_end = (fragment_index + 1) * fragment_size
try:
frags = [fa[fragment_start:fragment_end]
for fa in fragment_archives]
seg = ec_policy.pyeclib_driver.decode(frags)
except ECDriverError:
self.fail("Failed to decode fragments %d; this probably "
"means the fragments are not the sizes they "
"should be" % fragment_index)
segment_start = fragment_index * segment_size
segment_end = (fragment_index + 1) * segment_size
self.assertEqual(seg, obj[segment_start:segment_end])
# verify at least 2 puts made it all the way to the end of 2nd
# phase, ie at least 2 .durable statuses were written
num_durable_puts = sum(d is True for d in got_durable)
self.assertTrue(num_durable_puts >= 2)
@unpatch_policies
def test_PUT_ec_object_etag_mismatch(self):
ec_policy = POLICIES[3]
self.put_container("ec", "ec-con")
obj = '90:6A:02:60:B1:08-96da3e706025537fc42464916427727e'
prolis = _test_sockets[0]
prosrv = _test_servers[0]
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/ec-con/o3 HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'Etag: %s\r\n'
'Content-Length: %d\r\n'
'X-Storage-Token: t\r\n'
'Content-Type: application/octet-stream\r\n'
'\r\n%s' % (md5('something else').hexdigest(), len(obj), obj))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 422'
self.assertEqual(headers[:len(exp)], exp)
# nothing should have made it to disk on the object servers
partition, nodes = prosrv.get_object_ring(3).get_nodes(
'a', 'ec-con', 'o3')
conf = {'devices': _testdir, 'mount_check': 'false'}
df_mgr = diskfile.DiskFileRouter(conf, FakeLogger())[ec_policy]
for node in nodes:
df = df_mgr.get_diskfile(node['device'], partition,
'a', 'ec-con', 'o3', policy=POLICIES[3])
self.assertRaises(DiskFileNotExist, df.open)
@unpatch_policies
def test_PUT_ec_fragment_archive_etag_mismatch(self):
ec_policy = POLICIES[3]
self.put_container("ec", "ec-con")
# Cause a hash mismatch by feeding one particular MD5 hasher some
# extra data. The goal here is to get exactly one of the hashers in
# an object server.
countdown = [1]
def busted_md5_constructor(initial_str=""):
hasher = md5(initial_str)
if countdown[0] == 0:
hasher.update('wrong')
countdown[0] -= 1
return hasher
obj = 'uvarovite-esurience-cerated-symphysic'
prolis = _test_sockets[0]
prosrv = _test_servers[0]
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
with mock.patch('swift.obj.server.md5', busted_md5_constructor):
fd = sock.makefile()
fd.write('PUT /v1/a/ec-con/pimento HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'Etag: %s\r\n'
'Content-Length: %d\r\n'
'X-Storage-Token: t\r\n'
'Content-Type: application/octet-stream\r\n'
'\r\n%s' % (md5(obj).hexdigest(), len(obj), obj))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 503' # no quorum
self.assertEqual(headers[:len(exp)], exp)
# 2/3 of the fragment archives should have landed on disk
partition, nodes = prosrv.get_object_ring(3).get_nodes(
'a', 'ec-con', 'pimento')
conf = {'devices': _testdir, 'mount_check': 'false'}
df_mgr = diskfile.DiskFileRouter(conf, FakeLogger())[ec_policy]
found = 0
for node in nodes:
df = df_mgr.get_diskfile(node['device'], partition,
'a', 'ec-con', 'pimento',
policy=POLICIES[3])
try:
# diskfile open won't succeed because no durable was written,
# so look under the hood for data files.
files = os.listdir(df._datadir)
num_data_files = len([f for f in files if f.endswith('.data')])
self.assertEqual(1, num_data_files)
found += 1
except OSError:
pass
self.assertEqual(found, 2)
@unpatch_policies
def test_PUT_ec_if_none_match(self):
self.put_container("ec", "ec-con")
obj = 'ananepionic-lepidophyllous-ropewalker-neglectful'
prolis = _test_sockets[0]
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/ec-con/inm HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'Etag: "%s"\r\n'
'Content-Length: %d\r\n'
'X-Storage-Token: t\r\n'
'Content-Type: application/octet-stream\r\n'
'\r\n%s' % (md5(obj).hexdigest(), len(obj), obj))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEqual(headers[:len(exp)], exp)
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/ec-con/inm HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'If-None-Match: *\r\n'
'Etag: "%s"\r\n'
'Content-Length: %d\r\n'
'X-Storage-Token: t\r\n'
'Content-Type: application/octet-stream\r\n'
'\r\n%s' % (md5(obj).hexdigest(), len(obj), obj))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 412'
self.assertEqual(headers[:len(exp)], exp)
@unpatch_policies
def test_GET_ec(self):
self.put_container("ec", "ec-con")
obj = '0123456' * 11 * 17
prolis = _test_sockets[0]
prosrv = _test_servers[0]
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/ec-con/go-get-it HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'Content-Length: %d\r\n'
'X-Storage-Token: t\r\n'
'X-Object-Meta-Color: chartreuse\r\n'
'Content-Type: application/octet-stream\r\n'
'\r\n%s' % (len(obj), obj))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEqual(headers[:len(exp)], exp)
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/ec-con/go-get-it HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Storage-Token: t\r\n'
'\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 200'
self.assertEqual(headers[:len(exp)], exp)
headers = parse_headers_string(headers)
self.assertEqual(str(len(obj)), headers['Content-Length'])
self.assertEqual(md5(obj).hexdigest(), headers['Etag'])
self.assertEqual('chartreuse', headers['X-Object-Meta-Color'])
gotten_obj = ''
while True:
buf = fd.read(64)
if not buf:
break
gotten_obj += buf
self.assertEqual(gotten_obj, obj)
error_lines = prosrv.logger.get_lines_for_level('error')
warn_lines = prosrv.logger.get_lines_for_level('warning')
self.assertEquals(len(error_lines), 0) # sanity
self.assertEquals(len(warn_lines), 0) # sanity
@unpatch_policies
def test_conditional_GET_ec(self):
self.put_container("ec", "ec-con")
obj = 'this object has an etag and is otherwise unimportant'
etag = md5(obj).hexdigest()
not_etag = md5(obj + "blahblah").hexdigest()
prolis = _test_sockets[0]
prosrv = _test_servers[0]
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/ec-con/conditionals HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'Content-Length: %d\r\n'
'X-Storage-Token: t\r\n'
'Content-Type: application/octet-stream\r\n'
'\r\n%s' % (len(obj), obj))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEqual(headers[:len(exp)], exp)
for verb, body in (('GET', obj), ('HEAD', '')):
# If-Match
req = Request.blank(
'/v1/a/ec-con/conditionals',
environ={'REQUEST_METHOD': verb},
headers={'If-Match': etag})
resp = req.get_response(prosrv)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.body, body)
req = Request.blank(
'/v1/a/ec-con/conditionals',
environ={'REQUEST_METHOD': verb},
headers={'If-Match': not_etag})
resp = req.get_response(prosrv)
self.assertEqual(resp.status_int, 412)
req = Request.blank(
'/v1/a/ec-con/conditionals',
environ={'REQUEST_METHOD': verb},
headers={'If-Match': "*"})
resp = req.get_response(prosrv)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.body, body)
# If-None-Match
req = Request.blank(
'/v1/a/ec-con/conditionals',
environ={'REQUEST_METHOD': verb},
headers={'If-None-Match': etag})
resp = req.get_response(prosrv)
self.assertEqual(resp.status_int, 304)
req = Request.blank(
'/v1/a/ec-con/conditionals',
environ={'REQUEST_METHOD': verb},
headers={'If-None-Match': not_etag})
resp = req.get_response(prosrv)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.body, body)
req = Request.blank(
'/v1/a/ec-con/conditionals',
environ={'REQUEST_METHOD': verb},
headers={'If-None-Match': "*"})
resp = req.get_response(prosrv)
self.assertEqual(resp.status_int, 304)
error_lines = prosrv.logger.get_lines_for_level('error')
warn_lines = prosrv.logger.get_lines_for_level('warning')
self.assertEquals(len(error_lines), 0) # sanity
self.assertEquals(len(warn_lines), 0) # sanity
@unpatch_policies
def test_GET_ec_big(self):
self.put_container("ec", "ec-con")
# our EC segment size is 4 KiB, so this is multiple (3) segments;
# we'll verify that with a sanity check
obj = 'a moose once bit my sister' * 400
self.assertTrue(
len(obj) > POLICIES.get_by_name("ec").ec_segment_size * 2,
"object is too small for proper testing")
prolis = _test_sockets[0]
prosrv = _test_servers[0]
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/ec-con/big-obj-get HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'Content-Length: %d\r\n'
'X-Storage-Token: t\r\n'
'Content-Type: application/octet-stream\r\n'
'\r\n%s' % (len(obj), obj))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEqual(headers[:len(exp)], exp)
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/ec-con/big-obj-get HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Storage-Token: t\r\n'
'\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 200'
self.assertEqual(headers[:len(exp)], exp)
headers = parse_headers_string(headers)
self.assertEqual(str(len(obj)), headers['Content-Length'])
self.assertEqual(md5(obj).hexdigest(), headers['Etag'])
gotten_obj = ''
while True:
buf = fd.read(64)
if not buf:
break
gotten_obj += buf
# This may look like a redundant test, but when things fail, this
# has a useful failure message while the subsequent one spews piles
# of garbage and demolishes your terminal's scrollback buffer.
self.assertEqual(len(gotten_obj), len(obj))
self.assertEqual(gotten_obj, obj)
error_lines = prosrv.logger.get_lines_for_level('error')
warn_lines = prosrv.logger.get_lines_for_level('warning')
self.assertEquals(len(error_lines), 0) # sanity
self.assertEquals(len(warn_lines), 0) # sanity
@unpatch_policies
def test_GET_ec_failure_handling(self):
self.put_container("ec", "ec-con")
obj = 'look at this object; it is simply amazing ' * 500
prolis = _test_sockets[0]
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/ec-con/crash-test-dummy HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'Content-Length: %d\r\n'
'X-Storage-Token: t\r\n'
'Content-Type: application/octet-stream\r\n'
'\r\n%s' % (len(obj), obj))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEqual(headers[:len(exp)], exp)
def explodey_iter(inner_iter):
yield next(inner_iter)
raise Exception("doom ba doom")
def explodey_doc_parts_iter(inner_iter_iter):
for item in inner_iter_iter:
item = item.copy() # paranoia about mutable data
item['part_iter'] = explodey_iter(item['part_iter'])
yield item
real_ec_app_iter = swift.proxy.controllers.obj.ECAppIter
def explodey_ec_app_iter(path, policy, iterators, *a, **kw):
# Each thing in `iterators` here is a document-parts iterator,
# and we want to fail after getting a little into each part.
#
# That way, we ensure we've started streaming the response to
# the client when things go wrong.
return real_ec_app_iter(
path, policy,
[explodey_doc_parts_iter(i) for i in iterators],
*a, **kw)
with mock.patch("swift.proxy.controllers.obj.ECAppIter",
explodey_ec_app_iter):
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/ec-con/crash-test-dummy HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Storage-Token: t\r\n'
'\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 200'
self.assertEqual(headers[:len(exp)], exp)
headers = parse_headers_string(headers)
self.assertEqual(str(len(obj)), headers['Content-Length'])
self.assertEqual(md5(obj).hexdigest(), headers['Etag'])
gotten_obj = ''
try:
with Timeout(300): # don't hang the testrun when this fails
while True:
buf = fd.read(64)
if not buf:
break
gotten_obj += buf
except Timeout:
self.fail("GET hung when connection failed")
# Ensure we failed partway through, otherwise the mocks could
# get out of date without anyone noticing
self.assertTrue(0 < len(gotten_obj) < len(obj))
@unpatch_policies
def test_HEAD_ec(self):
self.put_container("ec", "ec-con")
obj = '0123456' * 11 * 17
prolis = _test_sockets[0]
prosrv = _test_servers[0]
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/ec-con/go-head-it HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'Content-Length: %d\r\n'
'X-Storage-Token: t\r\n'
'X-Object-Meta-Color: chartreuse\r\n'
'Content-Type: application/octet-stream\r\n'
'\r\n%s' % (len(obj), obj))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEqual(headers[:len(exp)], exp)
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('HEAD /v1/a/ec-con/go-head-it HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Storage-Token: t\r\n'
'\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 200'
self.assertEqual(headers[:len(exp)], exp)
headers = parse_headers_string(headers)
self.assertEqual(str(len(obj)), headers['Content-Length'])
self.assertEqual(md5(obj).hexdigest(), headers['Etag'])
self.assertEqual('chartreuse', headers['X-Object-Meta-Color'])
error_lines = prosrv.logger.get_lines_for_level('error')
warn_lines = prosrv.logger.get_lines_for_level('warning')
self.assertEquals(len(error_lines), 0) # sanity
self.assertEquals(len(warn_lines), 0) # sanity
@unpatch_policies
def test_GET_ec_404(self):
self.put_container("ec", "ec-con")
prolis = _test_sockets[0]
prosrv = _test_servers[0]
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/ec-con/yes-we-have-no-bananas HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Storage-Token: t\r\n'
'\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 404'
self.assertEqual(headers[:len(exp)], exp)
error_lines = prosrv.logger.get_lines_for_level('error')
warn_lines = prosrv.logger.get_lines_for_level('warning')
self.assertEquals(len(error_lines), 0) # sanity
self.assertEquals(len(warn_lines), 0) # sanity
@unpatch_policies
def test_HEAD_ec_404(self):
self.put_container("ec", "ec-con")
prolis = _test_sockets[0]
prosrv = _test_servers[0]
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('HEAD /v1/a/ec-con/yes-we-have-no-bananas HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Storage-Token: t\r\n'
'\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 404'
self.assertEqual(headers[:len(exp)], exp)
error_lines = prosrv.logger.get_lines_for_level('error')
warn_lines = prosrv.logger.get_lines_for_level('warning')
self.assertEquals(len(error_lines), 0) # sanity
self.assertEquals(len(warn_lines), 0) # sanity
def test_PUT_expect_header_zero_content_length(self):
test_errors = []
def test_connect(ipaddr, port, device, partition, method, path,
headers=None, query_string=None):
if path == '/a/c/o.jpg':
if 'expect' in headers or 'Expect' in headers:
test_errors.append('Expect was in headers for object '
'server!')
with save_globals():
controller = ReplicatedObjectController(
self.app, 'account', 'container', 'object')
# The (201, Exception('test')) tuples in there have the effect of
# changing the status of the initial expect response. The default
# expect response from FakeConn for 201 is 100.
# But the object server won't send a 100 continue line if the
# client doesn't send a expect 100 header (as is the case with
# zero byte PUTs as validated by this test), nevertheless the
# object controller calls getexpect without prejudice. In this
# case the status from the response shows up early in getexpect
# instead of having to wait until getresponse. The Exception is
# in there to ensure that the object controller also *uses* the
# result of getexpect instead of calling getresponse in which case
# our FakeConn will blow up.
success_codes = [(201, Exception('test'))] * 3
set_http_connect(200, 200, *success_codes,
give_connect=test_connect)
req = Request.blank('/v1/a/c/o.jpg', {})
req.content_length = 0
self.app.update_request(req)
self.app.memcache.store = {}
res = controller.PUT(req)
self.assertEqual(test_errors, [])
self.assertTrue(res.status.startswith('201 '), res.status)
def test_PUT_expect_header_nonzero_content_length(self):
test_errors = []
def test_connect(ipaddr, port, device, partition, method, path,
headers=None, query_string=None):
if path == '/a/c/o.jpg':
if 'Expect' not in headers:
test_errors.append('Expect was not in headers for '
'non-zero byte PUT!')
with save_globals():
controller = ReplicatedObjectController(
self.app, 'a', 'c', 'o.jpg')
# the (100, 201) tuples in there are just being extra explicit
# about the FakeConn returning the 100 Continue status when the
# object controller calls getexpect. Which is FakeConn's default
# for 201 if no expect_status is specified.
success_codes = [(100, 201)] * 3
set_http_connect(200, 200, *success_codes,
give_connect=test_connect)
req = Request.blank('/v1/a/c/o.jpg', {})
req.content_length = 1
req.body = 'a'
self.app.update_request(req)
self.app.memcache.store = {}
res = controller.PUT(req)
self.assertEqual(test_errors, [])
self.assertTrue(res.status.startswith('201 '))
def test_PUT_respects_write_affinity(self):
written_to = []
def test_connect(ipaddr, port, device, partition, method, path,
headers=None, query_string=None):
if path == '/a/c/o.jpg':
written_to.append((ipaddr, port, device))
with save_globals():
def is_r0(node):
return node['region'] == 0
object_ring = self.app.get_object_ring(None)
object_ring.max_more_nodes = 100
self.app.write_affinity_is_local_fn = is_r0
self.app.write_affinity_node_count = lambda r: 3
controller = \
ReplicatedObjectController(
self.app, 'a', 'c', 'o.jpg')
set_http_connect(200, 200, 201, 201, 201,
give_connect=test_connect)
req = Request.blank('/v1/a/c/o.jpg', {})
req.content_length = 1
req.body = 'a'
self.app.memcache.store = {}
res = controller.PUT(req)
self.assertTrue(res.status.startswith('201 '))
self.assertEqual(3, len(written_to))
for ip, port, device in written_to:
# this is kind of a hokey test, but in FakeRing, the port is even
# when the region is 0, and odd when the region is 1, so this test
# asserts that we only wrote to nodes in region 0.
self.assertEqual(0, port % 2)
def test_PUT_respects_write_affinity_with_507s(self):
written_to = []
def test_connect(ipaddr, port, device, partition, method, path,
headers=None, query_string=None):
if path == '/a/c/o.jpg':
written_to.append((ipaddr, port, device))
with save_globals():
def is_r0(node):
return node['region'] == 0
object_ring = self.app.get_object_ring(None)
object_ring.max_more_nodes = 100
self.app.write_affinity_is_local_fn = is_r0
self.app.write_affinity_node_count = lambda r: 3
controller = \
ReplicatedObjectController(
self.app, 'a', 'c', 'o.jpg')
self.app.error_limit(
object_ring.get_part_nodes(1)[0], 'test')
set_http_connect(200, 200, # account, container
201, 201, 201, # 3 working backends
give_connect=test_connect)
req = Request.blank('/v1/a/c/o.jpg', {})
req.content_length = 1
req.body = 'a'
self.app.memcache.store = {}
res = controller.PUT(req)
self.assertTrue(res.status.startswith('201 '))
self.assertEqual(3, len(written_to))
# this is kind of a hokey test, but in FakeRing, the port is even when
# the region is 0, and odd when the region is 1, so this test asserts
# that we wrote to 2 nodes in region 0, then went to 1 non-r0 node.
self.assertEqual(0, written_to[0][1] % 2) # it's (ip, port, device)
self.assertEqual(0, written_to[1][1] % 2)
self.assertNotEqual(0, written_to[2][1] % 2)
@unpatch_policies
def test_PUT_no_etag_fallocate(self):
with mock.patch('swift.obj.diskfile.fallocate') as mock_fallocate:
prolis = _test_sockets[0]
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
obj = 'hemoleucocytic-surfactant'
fd.write('PUT /v1/a/c/o HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'Content-Length: %d\r\n'
'X-Storage-Token: t\r\n'
'Content-Type: application/octet-stream\r\n'
'\r\n%s' % (len(obj), obj))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEqual(headers[:len(exp)], exp)
# one for each obj server; this test has 2
self.assertEqual(len(mock_fallocate.mock_calls), 2)
@unpatch_policies
def test_PUT_message_length_using_content_length(self):
prolis = _test_sockets[0]
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
obj = 'j' * 20
fd.write('PUT /v1/a/c/o.content-length HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Storage-Token: t\r\n'
'Content-Length: %s\r\n'
'Content-Type: application/octet-stream\r\n'
'\r\n%s' % (str(len(obj)), obj))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEqual(headers[:len(exp)], exp)
@unpatch_policies
def test_PUT_message_length_using_transfer_encoding(self):
prolis = _test_sockets[0]
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/c/o.chunked HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Storage-Token: t\r\n'
'Content-Type: application/octet-stream\r\n'
'Transfer-Encoding: chunked\r\n\r\n'
'2\r\n'
'oh\r\n'
'4\r\n'
' say\r\n'
'4\r\n'
' can\r\n'
'4\r\n'
' you\r\n'
'4\r\n'
' see\r\n'
'3\r\n'
' by\r\n'
'4\r\n'
' the\r\n'
'8\r\n'
' dawns\'\n\r\n'
'0\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEqual(headers[:len(exp)], exp)
@unpatch_policies
def test_PUT_message_length_using_both(self):
prolis = _test_sockets[0]
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/c/o.chunked HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Storage-Token: t\r\n'
'Content-Type: application/octet-stream\r\n'
'Content-Length: 33\r\n'
'Transfer-Encoding: chunked\r\n\r\n'
'2\r\n'
'oh\r\n'
'4\r\n'
' say\r\n'
'4\r\n'
' can\r\n'
'4\r\n'
' you\r\n'
'4\r\n'
' see\r\n'
'3\r\n'
' by\r\n'
'4\r\n'
' the\r\n'
'8\r\n'
' dawns\'\n\r\n'
'0\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEqual(headers[:len(exp)], exp)
@unpatch_policies
def test_PUT_bad_message_length(self):
prolis = _test_sockets[0]
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/c/o.chunked HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Storage-Token: t\r\n'
'Content-Type: application/octet-stream\r\n'
'Content-Length: 33\r\n'
'Transfer-Encoding: gzip\r\n\r\n'
'2\r\n'
'oh\r\n'
'4\r\n'
' say\r\n'
'4\r\n'
' can\r\n'
'4\r\n'
' you\r\n'
'4\r\n'
' see\r\n'
'3\r\n'
' by\r\n'
'4\r\n'
' the\r\n'
'8\r\n'
' dawns\'\n\r\n'
'0\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 400'
self.assertEqual(headers[:len(exp)], exp)
@unpatch_policies
def test_PUT_message_length_unsup_xfr_encoding(self):
prolis = _test_sockets[0]
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/c/o.chunked HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Storage-Token: t\r\n'
'Content-Type: application/octet-stream\r\n'
'Content-Length: 33\r\n'
'Transfer-Encoding: gzip,chunked\r\n\r\n'
'2\r\n'
'oh\r\n'
'4\r\n'
' say\r\n'
'4\r\n'
' can\r\n'
'4\r\n'
' you\r\n'
'4\r\n'
' see\r\n'
'3\r\n'
' by\r\n'
'4\r\n'
' the\r\n'
'8\r\n'
' dawns\'\n\r\n'
'0\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 501'
self.assertEqual(headers[:len(exp)], exp)
@unpatch_policies
def test_PUT_message_length_too_large(self):
with mock.patch('swift.common.constraints.MAX_FILE_SIZE', 10):
prolis = _test_sockets[0]
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/c/o.chunked HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Storage-Token: t\r\n'
'Content-Type: application/octet-stream\r\n'
'Content-Length: 33\r\n\r\n'
'oh say can you see by the dawns\'\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 413'
self.assertEqual(headers[:len(exp)], exp)
@unpatch_policies
def test_PUT_last_modified(self):
prolis = _test_sockets[0]
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/c/o.last_modified HTTP/1.1\r\n'
'Host: localhost\r\nConnection: close\r\n'
'X-Storage-Token: t\r\nContent-Length: 0\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
lm_hdr = 'Last-Modified: '
self.assertEqual(headers[:len(exp)], exp)
last_modified_put = [line for line in headers.split('\r\n')
if lm_hdr in line][0][len(lm_hdr):]
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('HEAD /v1/a/c/o.last_modified HTTP/1.1\r\n'
'Host: localhost\r\nConnection: close\r\n'
'X-Storage-Token: t\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 200'
self.assertEqual(headers[:len(exp)], exp)
last_modified_head = [line for line in headers.split('\r\n')
if lm_hdr in line][0][len(lm_hdr):]
self.assertEqual(last_modified_put, last_modified_head)
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/c/o.last_modified HTTP/1.1\r\n'
'Host: localhost\r\nConnection: close\r\n'
'If-Modified-Since: %s\r\n'
'X-Storage-Token: t\r\n\r\n' % last_modified_put)
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 304'
self.assertEqual(headers[:len(exp)], exp)
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/c/o.last_modified HTTP/1.1\r\n'
'Host: localhost\r\nConnection: close\r\n'
'If-Unmodified-Since: %s\r\n'
'X-Storage-Token: t\r\n\r\n' % last_modified_put)
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 200'
self.assertEqual(headers[:len(exp)], exp)
def test_PUT_auto_content_type(self):
with save_globals():
controller = ReplicatedObjectController(
self.app, 'account', 'container', 'object')
def test_content_type(filename, expected):
# The three responses here are for account_info() (HEAD to
# account server), container_info() (HEAD to container server)
# and three calls to _connect_put_node() (PUT to three object
# servers)
set_http_connect(201, 201, 201, 201, 201,
give_content_type=lambda content_type:
self.assertEqual(content_type,
next(expected)))
# We need into include a transfer-encoding to get past
# constraints.check_object_creation()
req = Request.blank('/v1/a/c/%s' % filename, {},
headers={'transfer-encoding': 'chunked'})
self.app.update_request(req)
self.app.memcache.store = {}
res = controller.PUT(req)
# If we don't check the response here we could miss problems
# in PUT()
self.assertEqual(res.status_int, 201)
test_content_type('test.jpg', iter(['', '', 'image/jpeg',
'image/jpeg', 'image/jpeg']))
test_content_type('test.html', iter(['', '', 'text/html',
'text/html', 'text/html']))
test_content_type('test.css', iter(['', '', 'text/css',
'text/css', 'text/css']))
def test_custom_mime_types_files(self):
swift_dir = mkdtemp()
try:
with open(os.path.join(swift_dir, 'mime.types'), 'w') as fp:
fp.write('foo/bar foo\n')
proxy_server.Application({'swift_dir': swift_dir},
FakeMemcache(), FakeLogger(),
FakeRing(), FakeRing())
self.assertEqual(proxy_server.mimetypes.guess_type('blah.foo')[0],
'foo/bar')
self.assertEqual(proxy_server.mimetypes.guess_type('blah.jpg')[0],
'image/jpeg')
finally:
rmtree(swift_dir, ignore_errors=True)
def test_PUT(self):
with save_globals():
controller = ReplicatedObjectController(
self.app, 'account', 'container', 'object')
def test_status_map(statuses, expected):
set_http_connect(*statuses)
req = Request.blank('/v1/a/c/o.jpg', {})
req.content_length = 0
self.app.update_request(req)
self.app.memcache.store = {}
res = controller.PUT(req)
expected = str(expected)
self.assertEqual(res.status[:len(expected)], expected)
test_status_map((200, 200, 201, 201, 201), 201)
test_status_map((200, 200, 201, 201, 500), 201)
test_status_map((200, 200, 204, 404, 404), 404)
test_status_map((200, 200, 204, 500, 404), 503)
test_status_map((200, 200, 202, 202, 204), 204)
def test_PUT_connect_exceptions(self):
with save_globals():
controller = ReplicatedObjectController(
self.app, 'account', 'container', 'object')
def test_status_map(statuses, expected):
set_http_connect(*statuses)
self.app.memcache.store = {}
req = Request.blank('/v1/a/c/o.jpg', {})
req.content_length = 0
self.app.update_request(req)
try:
res = controller.PUT(req)
except HTTPException as res:
pass
expected = str(expected)
self.assertEqual(res.status[:len(expected)], expected)
test_status_map((200, 200, 201, 201, -1), 201) # connect exc
# connect errors
test_status_map((200, 200, Timeout(), 201, 201, ), 201)
test_status_map((200, 200, 201, 201, Exception()), 201)
# expect errors
test_status_map((200, 200, (Timeout(), None), 201, 201), 201)
test_status_map((200, 200, (Exception(), None), 201, 201), 201)
# response errors
test_status_map((200, 200, (100, Timeout()), 201, 201), 201)
test_status_map((200, 200, (100, Exception()), 201, 201), 201)
test_status_map((200, 200, 507, 201, 201), 201) # error limited
test_status_map((200, 200, -1, 201, -1), 503)
test_status_map((200, 200, 503, -1, 503), 503)
def test_PUT_send_exceptions(self):
with save_globals():
controller = ReplicatedObjectController(
self.app, 'account', 'container', 'object')
def test_status_map(statuses, expected):
self.app.memcache.store = {}
set_http_connect(*statuses)
req = Request.blank('/v1/a/c/o.jpg',
environ={'REQUEST_METHOD': 'PUT'},
body='some data')
self.app.update_request(req)
try:
res = controller.PUT(req)
except HTTPException as res:
pass
expected = str(expected)
self.assertEqual(res.status[:len(expected)], expected)
test_status_map((200, 200, 201, -1, 201), 201)
test_status_map((200, 200, 201, -1, -1), 503)
test_status_map((200, 200, 503, 503, -1), 503)
def test_PUT_max_size(self):
with save_globals():
set_http_connect(201, 201, 201)
controller = ReplicatedObjectController(
self.app, 'account', 'container', 'object')
req = Request.blank('/v1/a/c/o', {}, headers={
'Content-Length': str(constraints.MAX_FILE_SIZE + 1),
'Content-Type': 'foo/bar'})
self.app.update_request(req)
res = controller.PUT(req)
self.assertEqual(res.status_int, 413)
def test_PUT_bad_content_type(self):
with save_globals():
set_http_connect(201, 201, 201)
controller = ReplicatedObjectController(
self.app, 'account', 'container', 'object')
req = Request.blank('/v1/a/c/o', {}, headers={
'Content-Length': 0, 'Content-Type': 'foo/bar;swift_hey=45'})
self.app.update_request(req)
res = controller.PUT(req)
self.assertEqual(res.status_int, 400)
def test_PUT_getresponse_exceptions(self):
with save_globals():
controller = ReplicatedObjectController(
self.app, 'account', 'container', 'object')
def test_status_map(statuses, expected):
self.app.memcache.store = {}
set_http_connect(*statuses)
req = Request.blank('/v1/a/c/o.jpg', {})
req.content_length = 0
self.app.update_request(req)
try:
res = controller.PUT(req)
except HTTPException as res:
pass
expected = str(expected)
self.assertEqual(res.status[:len(str(expected))],
str(expected))
test_status_map((200, 200, 201, 201, -1), 201)
test_status_map((200, 200, 201, -1, -1), 503)
test_status_map((200, 200, 503, 503, -1), 503)
def test_POST(self):
with save_globals():
self.app.object_post_as_copy = False
def test_status_map(statuses, expected):
set_http_connect(*statuses)
self.app.memcache.store = {}
req = Request.blank('/v1/a/c/o', {}, method='POST',
headers={'Content-Type': 'foo/bar'})
self.app.update_request(req)
res = req.get_response(self.app)
expected = str(expected)
self.assertEqual(res.status[:len(expected)], expected)
test_status_map((200, 200, 202, 202, 202), 202)
test_status_map((200, 200, 202, 202, 500), 202)
test_status_map((200, 200, 202, 500, 500), 503)
test_status_map((200, 200, 202, 404, 500), 503)
test_status_map((200, 200, 202, 404, 404), 404)
test_status_map((200, 200, 404, 500, 500), 503)
test_status_map((200, 200, 404, 404, 404), 404)
@patch_policies([
StoragePolicy(0, 'zero', is_default=True, object_ring=FakeRing()),
StoragePolicy(1, 'one', object_ring=FakeRing()),
])
def test_POST_backend_headers(self):
# reset the router post patch_policies
self.app.obj_controller_router = proxy_server.ObjectControllerRouter()
self.app.object_post_as_copy = False
self.app.sort_nodes = lambda nodes: nodes
backend_requests = []
def capture_requests(ip, port, method, path, headers, *args,
**kwargs):
backend_requests.append((method, path, headers))
req = Request.blank('/v1/a/c/o', {}, method='POST',
headers={'X-Object-Meta-Color': 'Blue'})
# we want the container_info response to says a policy index of 1
resp_headers = {'X-Backend-Storage-Policy-Index': 1}
with mocked_http_conn(
200, 200, 202, 202, 202,
headers=resp_headers, give_connect=capture_requests
) as fake_conn:
resp = req.get_response(self.app)
self.assertRaises(StopIteration, fake_conn.code_iter.next)
self.assertEqual(resp.status_int, 202)
self.assertEqual(len(backend_requests), 5)
def check_request(req, method, path, headers=None):
req_method, req_path, req_headers = req
self.assertEqual(method, req_method)
# caller can ignore leading path parts
self.assertTrue(req_path.endswith(path),
'expected path to end with %s, it was %s' % (
path, req_path))
headers = headers or {}
# caller can ignore some headers
for k, v in headers.items():
self.assertEqual(req_headers[k], v)
account_request = backend_requests.pop(0)
check_request(account_request, method='HEAD', path='/sda/0/a')
container_request = backend_requests.pop(0)
check_request(container_request, method='HEAD', path='/sda/0/a/c')
# make sure backend requests included expected container headers
container_headers = {}
for request in backend_requests:
req_headers = request[2]
device = req_headers['x-container-device']
host = req_headers['x-container-host']
container_headers[device] = host
expectations = {
'method': 'POST',
'path': '/0/a/c/o',
'headers': {
'X-Container-Partition': '0',
'Connection': 'close',
'User-Agent': 'proxy-server %s' % os.getpid(),
'Host': 'localhost:80',
'Referer': 'POST http://localhost/v1/a/c/o',
'X-Object-Meta-Color': 'Blue',
'X-Backend-Storage-Policy-Index': '1'
},
}
check_request(request, **expectations)
expected = {}
for i, device in enumerate(['sda', 'sdb', 'sdc']):
expected[device] = '10.0.0.%d:100%d' % (i, i)
self.assertEqual(container_headers, expected)
# and again with policy override
self.app.memcache.store = {}
backend_requests = []
req = Request.blank('/v1/a/c/o', {}, method='POST',
headers={'X-Object-Meta-Color': 'Blue',
'X-Backend-Storage-Policy-Index': 0})
with mocked_http_conn(
200, 200, 202, 202, 202,
headers=resp_headers, give_connect=capture_requests
) as fake_conn:
resp = req.get_response(self.app)
self.assertRaises(StopIteration, fake_conn.code_iter.next)
self.assertEqual(resp.status_int, 202)
self.assertEqual(len(backend_requests), 5)
for request in backend_requests[2:]:
expectations = {
'method': 'POST',
'path': '/0/a/c/o', # ignore device bit
'headers': {
'X-Object-Meta-Color': 'Blue',
'X-Backend-Storage-Policy-Index': '0',
}
}
check_request(request, **expectations)
# and this time with post as copy
self.app.object_post_as_copy = True
self.app.memcache.store = {}
backend_requests = []
req = Request.blank('/v1/a/c/o', {}, method='POST',
headers={'X-Object-Meta-Color': 'Blue',
'X-Backend-Storage-Policy-Index': 0})
with mocked_http_conn(
200, 200, 200, 200, 200, 201, 201, 201,
headers=resp_headers, give_connect=capture_requests
) as fake_conn:
resp = req.get_response(self.app)
self.assertRaises(StopIteration, fake_conn.code_iter.next)
self.assertEqual(resp.status_int, 202)
self.assertEqual(len(backend_requests), 8)
policy0 = {'X-Backend-Storage-Policy-Index': '0'}
policy1 = {'X-Backend-Storage-Policy-Index': '1'}
expected = [
# account info
{'method': 'HEAD', 'path': '/0/a'},
# container info
{'method': 'HEAD', 'path': '/0/a/c'},
# x-newests
{'method': 'GET', 'path': '/0/a/c/o', 'headers': policy1},
{'method': 'GET', 'path': '/0/a/c/o', 'headers': policy1},
{'method': 'GET', 'path': '/0/a/c/o', 'headers': policy1},
# new writes
{'method': 'PUT', 'path': '/0/a/c/o', 'headers': policy0},
{'method': 'PUT', 'path': '/0/a/c/o', 'headers': policy0},
{'method': 'PUT', 'path': '/0/a/c/o', 'headers': policy0},
]
for request, expectations in zip(backend_requests, expected):
check_request(request, **expectations)
def test_POST_as_copy(self):
with save_globals():
def test_status_map(statuses, expected):
set_http_connect(*statuses)
self.app.memcache.store = {}
req = Request.blank('/v1/a/c/o', {'REQUEST_METHOD': 'POST'},
headers={'Content-Type': 'foo/bar'})
self.app.update_request(req)
res = req.get_response(self.app)
expected = str(expected)
self.assertEqual(res.status[:len(expected)], expected)
test_status_map((200, 200, 200, 200, 200, 202, 202, 202), 202)
test_status_map((200, 200, 200, 200, 200, 202, 202, 500), 202)
test_status_map((200, 200, 200, 200, 200, 202, 500, 500), 503)
test_status_map((200, 200, 200, 200, 200, 202, 404, 500), 503)
test_status_map((200, 200, 200, 200, 200, 202, 404, 404), 404)
test_status_map((200, 200, 200, 200, 200, 404, 500, 500), 503)
test_status_map((200, 200, 200, 200, 200, 404, 404, 404), 404)
def test_DELETE(self):
with save_globals():
def test_status_map(statuses, expected):
set_http_connect(*statuses)
self.app.memcache.store = {}
req = Request.blank('/v1/a/c/o', {'REQUEST_METHOD': 'DELETE'})
self.app.update_request(req)
res = req.get_response(self.app)
self.assertEqual(res.status[:len(str(expected))],
str(expected))
test_status_map((200, 200, 204, 204, 204), 204)
test_status_map((200, 200, 204, 204, 500), 204)
test_status_map((200, 200, 204, 404, 404), 404)
test_status_map((200, 204, 500, 500, 404), 503)
test_status_map((200, 200, 404, 404, 404), 404)
test_status_map((200, 200, 400, 400, 400), 400)
def test_HEAD(self):
with save_globals():
def test_status_map(statuses, expected):
set_http_connect(*statuses)
self.app.memcache.store = {}
req = Request.blank('/v1/a/c/o', {'REQUEST_METHOD': 'HEAD'})
self.app.update_request(req)
res = req.get_response(self.app)
self.assertEqual(res.status[:len(str(expected))],
str(expected))
if expected < 400:
self.assertTrue('x-works' in res.headers)
self.assertEqual(res.headers['x-works'], 'yes')
self.assertTrue('accept-ranges' in res.headers)
self.assertEqual(res.headers['accept-ranges'], 'bytes')
test_status_map((200, 200, 200, 404, 404), 200)
test_status_map((200, 200, 200, 500, 404), 200)
test_status_map((200, 200, 304, 500, 404), 304)
test_status_map((200, 200, 404, 404, 404), 404)
test_status_map((200, 200, 404, 404, 500), 404)
test_status_map((200, 200, 500, 500, 500), 503)
def test_HEAD_newest(self):
with save_globals():
def test_status_map(statuses, expected, timestamps,
expected_timestamp):
set_http_connect(*statuses, timestamps=timestamps)
self.app.memcache.store = {}
req = Request.blank('/v1/a/c/o', {'REQUEST_METHOD': 'HEAD'},
headers={'x-newest': 'true'})
self.app.update_request(req)
res = req.get_response(self.app)
self.assertEqual(res.status[:len(str(expected))],
str(expected))
self.assertEqual(res.headers.get('last-modified'),
expected_timestamp)
# acct cont obj obj obj
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '1',
'2', '3'), '3')
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '1',
'3', '2'), '3')
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '1',
'3', '1'), '3')
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '3',
'3', '1'), '3')
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', None,
None, None), None)
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', None,
None, '1'), '1')
test_status_map((200, 200, 404, 404, 200), 200, ('0', '0', None,
None, '1'), '1')
def test_GET_newest(self):
with save_globals():
def test_status_map(statuses, expected, timestamps,
expected_timestamp):
set_http_connect(*statuses, timestamps=timestamps)
self.app.memcache.store = {}
req = Request.blank('/v1/a/c/o', {'REQUEST_METHOD': 'GET'},
headers={'x-newest': 'true'})
self.app.update_request(req)
res = req.get_response(self.app)
self.assertEqual(res.status[:len(str(expected))],
str(expected))
self.assertEqual(res.headers.get('last-modified'),
expected_timestamp)
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '1',
'2', '3'), '3')
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '1',
'3', '2'), '3')
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '1',
'3', '1'), '3')
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '3',
'3', '1'), '3')
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', None,
None, None), None)
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', None,
None, '1'), '1')
with save_globals():
def test_status_map(statuses, expected, timestamps,
expected_timestamp):
set_http_connect(*statuses, timestamps=timestamps)
self.app.memcache.store = {}
req = Request.blank('/v1/a/c/o', {'REQUEST_METHOD': 'HEAD'})
self.app.update_request(req)
res = req.get_response(self.app)
self.assertEqual(res.status[:len(str(expected))],
str(expected))
self.assertEqual(res.headers.get('last-modified'),
expected_timestamp)
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '1',
'2', '3'), '1')
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '1',
'3', '2'), '1')
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '1',
'3', '1'), '1')
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', '3',
'3', '1'), '3')
test_status_map((200, 200, 200, 200, 200), 200, ('0', '0', None,
'1', '2'), None)
def test_POST_meta_val_len(self):
with save_globals():
limit = constraints.MAX_META_VALUE_LENGTH
self.app.object_post_as_copy = False
ReplicatedObjectController(
self.app, 'account', 'container', 'object')
set_http_connect(200, 200, 202, 202, 202)
# acct cont obj obj obj
req = Request.blank('/v1/a/c/o', {'REQUEST_METHOD': 'POST'},
headers={'Content-Type': 'foo/bar',
'X-Object-Meta-Foo': 'x' * limit})
self.app.update_request(req)
res = req.get_response(self.app)
self.assertEqual(res.status_int, 202)
set_http_connect(202, 202, 202)
req = Request.blank(
'/v1/a/c/o', {'REQUEST_METHOD': 'POST'},
headers={'Content-Type': 'foo/bar',
'X-Object-Meta-Foo': 'x' * (limit + 1)})
self.app.update_request(req)
res = req.get_response(self.app)
self.assertEqual(res.status_int, 400)
def test_POST_as_copy_meta_val_len(self):
with save_globals():
limit = constraints.MAX_META_VALUE_LENGTH
set_http_connect(200, 200, 200, 200, 200, 202, 202, 202)
# acct cont objc objc objc obj obj obj
req = Request.blank('/v1/a/c/o', {'REQUEST_METHOD': 'POST'},
headers={'Content-Type': 'foo/bar',
'X-Object-Meta-Foo': 'x' * limit})
self.app.update_request(req)
res = req.get_response(self.app)
self.assertEqual(res.status_int, 202)
set_http_connect(202, 202, 202)
req = Request.blank(
'/v1/a/c/o', {'REQUEST_METHOD': 'POST'},
headers={'Content-Type': 'foo/bar',
'X-Object-Meta-Foo': 'x' * (limit + 1)})
self.app.update_request(req)
res = req.get_response(self.app)
self.assertEqual(res.status_int, 400)
def test_POST_meta_key_len(self):
with save_globals():
limit = constraints.MAX_META_NAME_LENGTH
self.app.object_post_as_copy = False
set_http_connect(200, 200, 202, 202, 202)
# acct cont obj obj obj
req = Request.blank(
'/v1/a/c/o', {'REQUEST_METHOD': 'POST'},
headers={'Content-Type': 'foo/bar',
('X-Object-Meta-' + 'x' * limit): 'x'})
self.app.update_request(req)
res = req.get_response(self.app)
self.assertEqual(res.status_int, 202)
set_http_connect(202, 202, 202)
req = Request.blank(
'/v1/a/c/o', {'REQUEST_METHOD': 'POST'},
headers={'Content-Type': 'foo/bar',
('X-Object-Meta-' + 'x' * (limit + 1)): 'x'})
self.app.update_request(req)
res = req.get_response(self.app)
self.assertEqual(res.status_int, 400)
def test_POST_as_copy_meta_key_len(self):
with save_globals():
limit = constraints.MAX_META_NAME_LENGTH
set_http_connect(200, 200, 200, 200, 200, 202, 202, 202)
# acct cont objc objc objc obj obj obj
req = Request.blank(
'/v1/a/c/o', {'REQUEST_METHOD': 'POST'},
headers={'Content-Type': 'foo/bar',
('X-Object-Meta-' + 'x' * limit): 'x'})
self.app.update_request(req)
res = req.get_response(self.app)
self.assertEqual(res.status_int, 202)
set_http_connect(202, 202, 202)
req = Request.blank(
'/v1/a/c/o', {'REQUEST_METHOD': 'POST'},
headers={'Content-Type': 'foo/bar',
('X-Object-Meta-' + 'x' * (limit + 1)): 'x'})
self.app.update_request(req)
res = req.get_response(self.app)
self.assertEqual(res.status_int, 400)
def test_POST_meta_count(self):
with save_globals():
limit = constraints.MAX_META_COUNT
headers = dict(
(('X-Object-Meta-' + str(i), 'a') for i in range(limit + 1)))
headers.update({'Content-Type': 'foo/bar'})
set_http_connect(202, 202, 202)
req = Request.blank('/v1/a/c/o', {'REQUEST_METHOD': 'POST'},
headers=headers)
self.app.update_request(req)
res = req.get_response(self.app)
self.assertEqual(res.status_int, 400)
def test_POST_meta_size(self):
with save_globals():
limit = constraints.MAX_META_OVERALL_SIZE
count = limit / 256 # enough to cause the limit to be reached
headers = dict(
(('X-Object-Meta-' + str(i), 'a' * 256)
for i in range(count + 1)))
headers.update({'Content-Type': 'foo/bar'})
set_http_connect(202, 202, 202)
req = Request.blank('/v1/a/c/o', {'REQUEST_METHOD': 'POST'},
headers=headers)
self.app.update_request(req)
res = req.get_response(self.app)
self.assertEqual(res.status_int, 400)
def test_PUT_not_autodetect_content_type(self):
with save_globals():
headers = {'Content-Type': 'something/right', 'Content-Length': 0}
it_worked = []
def verify_content_type(ipaddr, port, device, partition,
method, path, headers=None,
query_string=None):
if path == '/a/c/o.html':
it_worked.append(
headers['Content-Type'].startswith('something/right'))
set_http_connect(204, 204, 201, 201, 201,
give_connect=verify_content_type)
req = Request.blank('/v1/a/c/o.html', {'REQUEST_METHOD': 'PUT'},
headers=headers)
self.app.update_request(req)
req.get_response(self.app)
self.assertNotEquals(it_worked, [])
self.assertTrue(all(it_worked))
def test_PUT_autodetect_content_type(self):
with save_globals():
headers = {'Content-Type': 'something/wrong', 'Content-Length': 0,
'X-Detect-Content-Type': 'True'}
it_worked = []
def verify_content_type(ipaddr, port, device, partition,
method, path, headers=None,
query_string=None):
if path == '/a/c/o.html':
it_worked.append(
headers['Content-Type'].startswith('text/html'))
set_http_connect(204, 204, 201, 201, 201,
give_connect=verify_content_type)
req = Request.blank('/v1/a/c/o.html', {'REQUEST_METHOD': 'PUT'},
headers=headers)
self.app.update_request(req)
req.get_response(self.app)
self.assertNotEquals(it_worked, [])
self.assertTrue(all(it_worked))
def test_client_timeout(self):
with save_globals():
self.app.account_ring.get_nodes('account')
for dev in self.app.account_ring.devs:
dev['ip'] = '127.0.0.1'
dev['port'] = 1
self.app.container_ring.get_nodes('account')
for dev in self.app.container_ring.devs:
dev['ip'] = '127.0.0.1'
dev['port'] = 1
object_ring = self.app.get_object_ring(None)
object_ring.get_nodes('account')
for dev in object_ring.devs:
dev['ip'] = '127.0.0.1'
dev['port'] = 1
class SlowBody(object):
def __init__(self):
self.sent = 0
def read(self, size=-1):
if self.sent < 4:
sleep(0.1)
self.sent += 1
return ' '
return ''
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT',
'wsgi.input': SlowBody()},
headers={'Content-Length': '4',
'Content-Type': 'text/plain'})
self.app.update_request(req)
set_http_connect(200, 200, 201, 201, 201)
# acct cont obj obj obj
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 201)
self.app.client_timeout = 0.05
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT',
'wsgi.input': SlowBody()},
headers={'Content-Length': '4',
'Content-Type': 'text/plain'})
self.app.update_request(req)
set_http_connect(201, 201, 201)
# obj obj obj
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 408)
def test_client_disconnect(self):
with save_globals():
self.app.account_ring.get_nodes('account')
for dev in self.app.account_ring.devs:
dev['ip'] = '127.0.0.1'
dev['port'] = 1
self.app.container_ring.get_nodes('account')
for dev in self.app.container_ring.devs:
dev['ip'] = '127.0.0.1'
dev['port'] = 1
object_ring = self.app.get_object_ring(None)
object_ring.get_nodes('account')
for dev in object_ring.devs:
dev['ip'] = '127.0.0.1'
dev['port'] = 1
class SlowBody(object):
def __init__(self):
self.sent = 0
def read(self, size=-1):
raise Exception('Disconnected')
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT',
'wsgi.input': SlowBody()},
headers={'Content-Length': '4',
'Content-Type': 'text/plain'})
self.app.update_request(req)
set_http_connect(200, 200, 201, 201, 201)
# acct cont obj obj obj
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 499)
def test_node_read_timeout(self):
with save_globals():
self.app.account_ring.get_nodes('account')
for dev in self.app.account_ring.devs:
dev['ip'] = '127.0.0.1'
dev['port'] = 1
self.app.container_ring.get_nodes('account')
for dev in self.app.container_ring.devs:
dev['ip'] = '127.0.0.1'
dev['port'] = 1
object_ring = self.app.get_object_ring(None)
object_ring.get_nodes('account')
for dev in object_ring.devs:
dev['ip'] = '127.0.0.1'
dev['port'] = 1
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'GET'})
self.app.update_request(req)
set_http_connect(200, 200, 200, slow=0.1)
req.sent_size = 0
resp = req.get_response(self.app)
got_exc = False
try:
resp.body
except ChunkReadTimeout:
got_exc = True
self.assertTrue(not got_exc)
self.app.recoverable_node_timeout = 0.1
set_http_connect(200, 200, 200, slow=1.0)
resp = req.get_response(self.app)
got_exc = False
try:
resp.body
except ChunkReadTimeout:
got_exc = True
self.assertTrue(got_exc)
def test_node_read_timeout_retry(self):
with save_globals():
self.app.account_ring.get_nodes('account')
for dev in self.app.account_ring.devs:
dev['ip'] = '127.0.0.1'
dev['port'] = 1
self.app.container_ring.get_nodes('account')
for dev in self.app.container_ring.devs:
dev['ip'] = '127.0.0.1'
dev['port'] = 1
object_ring = self.app.get_object_ring(None)
object_ring.get_nodes('account')
for dev in object_ring.devs:
dev['ip'] = '127.0.0.1'
dev['port'] = 1
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'GET'})
self.app.update_request(req)
self.app.recoverable_node_timeout = 0.1
set_http_connect(200, 200, 200, slow=[1.0, 1.0, 1.0])
resp = req.get_response(self.app)
got_exc = False
try:
self.assertEqual('', resp.body)
except ChunkReadTimeout:
got_exc = True
self.assertTrue(got_exc)
set_http_connect(200, 200, 200, body='lalala',
slow=[1.0, 1.0])
resp = req.get_response(self.app)
got_exc = False
try:
self.assertEqual(resp.body, 'lalala')
except ChunkReadTimeout:
got_exc = True
self.assertTrue(not got_exc)
set_http_connect(200, 200, 200, body='lalala',
slow=[1.0, 1.0], etags=['a', 'a', 'a'])
resp = req.get_response(self.app)
got_exc = False
try:
self.assertEqual(resp.body, 'lalala')
except ChunkReadTimeout:
got_exc = True
self.assertTrue(not got_exc)
set_http_connect(200, 200, 200, body='lalala',
slow=[1.0, 1.0], etags=['a', 'b', 'a'])
resp = req.get_response(self.app)
got_exc = False
try:
self.assertEqual(resp.body, 'lalala')
except ChunkReadTimeout:
got_exc = True
self.assertTrue(not got_exc)
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'GET'})
set_http_connect(200, 200, 200, body='lalala',
slow=[1.0, 1.0], etags=['a', 'b', 'b'])
resp = req.get_response(self.app)
got_exc = False
try:
resp.body
except ChunkReadTimeout:
got_exc = True
self.assertTrue(got_exc)
def test_node_write_timeout(self):
with save_globals():
self.app.account_ring.get_nodes('account')
for dev in self.app.account_ring.devs:
dev['ip'] = '127.0.0.1'
dev['port'] = 1
self.app.container_ring.get_nodes('account')
for dev in self.app.container_ring.devs:
dev['ip'] = '127.0.0.1'
dev['port'] = 1
object_ring = self.app.get_object_ring(None)
object_ring.get_nodes('account')
for dev in object_ring.devs:
dev['ip'] = '127.0.0.1'
dev['port'] = 1
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '4',
'Content-Type': 'text/plain'},
body=' ')
self.app.update_request(req)
set_http_connect(200, 200, 201, 201, 201, slow=0.1)
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 201)
self.app.node_timeout = 0.1
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '4',
'Content-Type': 'text/plain'},
body=' ')
self.app.update_request(req)
set_http_connect(201, 201, 201, slow=1.0)
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 503)
def test_node_request_setting(self):
baseapp = proxy_server.Application({'request_node_count': '3'},
FakeMemcache(),
container_ring=FakeRing(),
account_ring=FakeRing())
self.assertEqual(baseapp.request_node_count(3), 3)
def test_iter_nodes(self):
with save_globals():
try:
object_ring = self.app.get_object_ring(None)
object_ring.max_more_nodes = 2
partition, nodes = object_ring.get_nodes('account',
'container',
'object')
collected_nodes = []
for node in self.app.iter_nodes(object_ring,
partition):
collected_nodes.append(node)
self.assertEqual(len(collected_nodes), 5)
object_ring.max_more_nodes = 20
self.app.request_node_count = lambda r: 20
partition, nodes = object_ring.get_nodes('account',
'container',
'object')
collected_nodes = []
for node in self.app.iter_nodes(object_ring,
partition):
collected_nodes.append(node)
self.assertEqual(len(collected_nodes), 9)
# zero error-limited primary nodes -> no handoff warnings
self.app.log_handoffs = True
self.app.logger = FakeLogger()
self.app.request_node_count = lambda r: 7
object_ring.max_more_nodes = 20
partition, nodes = object_ring.get_nodes('account',
'container',
'object')
collected_nodes = []
for node in self.app.iter_nodes(object_ring, partition):
collected_nodes.append(node)
self.assertEqual(len(collected_nodes), 7)
self.assertEqual(self.app.logger.log_dict['warning'], [])
self.assertEqual(self.app.logger.get_increments(), [])
# one error-limited primary node -> one handoff warning
self.app.log_handoffs = True
self.app.logger = FakeLogger()
self.app.request_node_count = lambda r: 7
self.app._error_limiting = {} # clear out errors
set_node_errors(self.app, object_ring._devs[0], 999,
last_error=(2 ** 63 - 1))
collected_nodes = []
for node in self.app.iter_nodes(object_ring, partition):
collected_nodes.append(node)
self.assertEqual(len(collected_nodes), 7)
self.assertEqual(self.app.logger.log_dict['warning'], [
(('Handoff requested (5)',), {})])
self.assertEqual(self.app.logger.get_increments(),
['handoff_count'])
# two error-limited primary nodes -> two handoff warnings
self.app.log_handoffs = True
self.app.logger = FakeLogger()
self.app.request_node_count = lambda r: 7
self.app._error_limiting = {} # clear out errors
for i in range(2):
set_node_errors(self.app, object_ring._devs[i], 999,
last_error=(2 ** 63 - 1))
collected_nodes = []
for node in self.app.iter_nodes(object_ring, partition):
collected_nodes.append(node)
self.assertEqual(len(collected_nodes), 7)
self.assertEqual(self.app.logger.log_dict['warning'], [
(('Handoff requested (5)',), {}),
(('Handoff requested (6)',), {})])
self.assertEqual(self.app.logger.get_increments(),
['handoff_count',
'handoff_count'])
# all error-limited primary nodes -> four handoff warnings,
# plus a handoff-all metric
self.app.log_handoffs = True
self.app.logger = FakeLogger()
self.app.request_node_count = lambda r: 10
object_ring.set_replicas(4) # otherwise we run out of handoffs
self.app._error_limiting = {} # clear out errors
for i in range(4):
set_node_errors(self.app, object_ring._devs[i], 999,
last_error=(2 ** 63 - 1))
collected_nodes = []
for node in self.app.iter_nodes(object_ring, partition):
collected_nodes.append(node)
self.assertEqual(len(collected_nodes), 10)
self.assertEqual(self.app.logger.log_dict['warning'], [
(('Handoff requested (7)',), {}),
(('Handoff requested (8)',), {}),
(('Handoff requested (9)',), {}),
(('Handoff requested (10)',), {})])
self.assertEqual(self.app.logger.get_increments(),
['handoff_count',
'handoff_count',
'handoff_count',
'handoff_count',
'handoff_all_count'])
finally:
object_ring.max_more_nodes = 0
def test_iter_nodes_calls_sort_nodes(self):
with mock.patch.object(self.app, 'sort_nodes') as sort_nodes:
object_ring = self.app.get_object_ring(None)
for node in self.app.iter_nodes(object_ring, 0):
pass
sort_nodes.assert_called_once_with(
object_ring.get_part_nodes(0))
def test_iter_nodes_skips_error_limited(self):
with mock.patch.object(self.app, 'sort_nodes', lambda n: n):
object_ring = self.app.get_object_ring(None)
first_nodes = list(self.app.iter_nodes(object_ring, 0))
second_nodes = list(self.app.iter_nodes(object_ring, 0))
self.assertTrue(first_nodes[0] in second_nodes)
self.app.error_limit(first_nodes[0], 'test')
second_nodes = list(self.app.iter_nodes(object_ring, 0))
self.assertTrue(first_nodes[0] not in second_nodes)
def test_iter_nodes_gives_extra_if_error_limited_inline(self):
object_ring = self.app.get_object_ring(None)
with nested(
mock.patch.object(self.app, 'sort_nodes', lambda n: n),
mock.patch.object(self.app, 'request_node_count',
lambda r: 6),
mock.patch.object(object_ring, 'max_more_nodes', 99)):
first_nodes = list(self.app.iter_nodes(object_ring, 0))
second_nodes = []
for node in self.app.iter_nodes(object_ring, 0):
if not second_nodes:
self.app.error_limit(node, 'test')
second_nodes.append(node)
self.assertEqual(len(first_nodes), 6)
self.assertEqual(len(second_nodes), 7)
def test_iter_nodes_with_custom_node_iter(self):
object_ring = self.app.get_object_ring(None)
node_list = [dict(id=n, ip='1.2.3.4', port=n, device='D')
for n in range(10)]
with nested(
mock.patch.object(self.app, 'sort_nodes', lambda n: n),
mock.patch.object(self.app, 'request_node_count',
lambda r: 3)):
got_nodes = list(self.app.iter_nodes(object_ring, 0,
node_iter=iter(node_list)))
self.assertEqual(node_list[:3], got_nodes)
with nested(
mock.patch.object(self.app, 'sort_nodes', lambda n: n),
mock.patch.object(self.app, 'request_node_count',
lambda r: 1000000)):
got_nodes = list(self.app.iter_nodes(object_ring, 0,
node_iter=iter(node_list)))
self.assertEqual(node_list, got_nodes)
def test_best_response_sets_headers(self):
controller = ReplicatedObjectController(
self.app, 'account', 'container', 'object')
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'GET'})
resp = controller.best_response(req, [200] * 3, ['OK'] * 3, [''] * 3,
'Object', headers=[{'X-Test': '1'},
{'X-Test': '2'},
{'X-Test': '3'}])
self.assertEqual(resp.headers['X-Test'], '1')
def test_best_response_sets_etag(self):
controller = ReplicatedObjectController(
self.app, 'account', 'container', 'object')
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'GET'})
resp = controller.best_response(req, [200] * 3, ['OK'] * 3, [''] * 3,
'Object')
self.assertEqual(resp.etag, None)
resp = controller.best_response(req, [200] * 3, ['OK'] * 3, [''] * 3,
'Object',
etag='68b329da9893e34099c7d8ad5cb9c940'
)
self.assertEqual(resp.etag, '68b329da9893e34099c7d8ad5cb9c940')
def test_proxy_passes_content_type(self):
with save_globals():
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'GET'})
self.app.update_request(req)
set_http_connect(200, 200, 200)
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.content_type, 'x-application/test')
set_http_connect(200, 200, 200)
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.content_length, 0)
set_http_connect(200, 200, 200, slow=True)
resp = req.get_response(self.app)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.content_length, 4)
def test_proxy_passes_content_length_on_head(self):
with save_globals():
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
self.app.update_request(req)
controller = ReplicatedObjectController(
self.app, 'account', 'container', 'object')
set_http_connect(200, 200, 200)
resp = controller.HEAD(req)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.content_length, 0)
set_http_connect(200, 200, 200, slow=True)
resp = controller.HEAD(req)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.content_length, 4)
def test_error_limiting(self):
with save_globals():
controller = ReplicatedObjectController(
self.app, 'account', 'container', 'object')
controller.app.sort_nodes = lambda l: l
object_ring = controller.app.get_object_ring(None)
self.assert_status_map(controller.HEAD, (200, 200, 503, 200, 200),
200)
self.assertEqual(
node_error_count(controller.app, object_ring.devs[0]), 2)
self.assertTrue(
node_last_error(controller.app, object_ring.devs[0])
is not None)
for _junk in range(self.app.error_suppression_limit):
self.assert_status_map(controller.HEAD, (200, 200, 503, 503,
503), 503)
self.assertEqual(
node_error_count(controller.app, object_ring.devs[0]),
self.app.error_suppression_limit + 1)
self.assert_status_map(controller.HEAD, (200, 200, 200, 200, 200),
503)
self.assertTrue(
node_last_error(controller.app, object_ring.devs[0])
is not None)
self.assert_status_map(controller.PUT, (200, 200, 200, 201, 201,
201), 503)
self.assert_status_map(controller.POST,
(200, 200, 200, 200, 200, 200, 202, 202,
202), 503)
self.assert_status_map(controller.DELETE,
(200, 200, 200, 204, 204, 204), 503)
self.app.error_suppression_interval = -300
self.assert_status_map(controller.HEAD, (200, 200, 200, 200, 200),
200)
self.assertRaises(BaseException,
self.assert_status_map, controller.DELETE,
(200, 200, 200, 204, 204, 204), 503,
raise_exc=True)
def test_error_limiting_survives_ring_reload(self):
with save_globals():
controller = ReplicatedObjectController(
self.app, 'account', 'container', 'object')
controller.app.sort_nodes = lambda l: l
object_ring = controller.app.get_object_ring(None)
self.assert_status_map(controller.HEAD, (200, 200, 503, 200, 200),
200)
self.assertEqual(
node_error_count(controller.app, object_ring.devs[0]), 2)
self.assertTrue(
node_last_error(controller.app, object_ring.devs[0])
is not None)
for _junk in range(self.app.error_suppression_limit):
self.assert_status_map(controller.HEAD, (200, 200, 503, 503,
503), 503)
self.assertEqual(
node_error_count(controller.app, object_ring.devs[0]),
self.app.error_suppression_limit + 1)
# wipe out any state in the ring
for policy in POLICIES:
policy.object_ring = FakeRing(base_port=3000)
# and we still get an error, which proves that the
# error-limiting info survived a ring reload
self.assert_status_map(controller.HEAD, (200, 200, 200, 200, 200),
503)
def test_PUT_error_limiting(self):
with save_globals():
controller = ReplicatedObjectController(
self.app, 'account', 'container', 'object')
controller.app.sort_nodes = lambda l: l
object_ring = controller.app.get_object_ring(None)
# acc con obj obj obj
self.assert_status_map(controller.PUT, (200, 200, 503, 200, 200),
200)
# 2, not 1, because assert_status_map() calls the method twice
odevs = object_ring.devs
self.assertEqual(node_error_count(controller.app, odevs[0]), 2)
self.assertEqual(node_error_count(controller.app, odevs[1]), 0)
self.assertEqual(node_error_count(controller.app, odevs[2]), 0)
self.assertTrue(
node_last_error(controller.app, odevs[0]) is not None)
self.assertTrue(node_last_error(controller.app, odevs[1]) is None)
self.assertTrue(node_last_error(controller.app, odevs[2]) is None)
def test_PUT_error_limiting_last_node(self):
with save_globals():
controller = ReplicatedObjectController(
self.app, 'account', 'container', 'object')
controller.app.sort_nodes = lambda l: l
object_ring = controller.app.get_object_ring(None)
# acc con obj obj obj
self.assert_status_map(controller.PUT, (200, 200, 200, 200, 503),
200)
# 2, not 1, because assert_status_map() calls the method twice
odevs = object_ring.devs
self.assertEqual(node_error_count(controller.app, odevs[0]), 0)
self.assertEqual(node_error_count(controller.app, odevs[1]), 0)
self.assertEqual(node_error_count(controller.app, odevs[2]), 2)
self.assertTrue(node_last_error(controller.app, odevs[0]) is None)
self.assertTrue(node_last_error(controller.app, odevs[1]) is None)
self.assertTrue(
node_last_error(controller.app, odevs[2]) is not None)
def test_acc_or_con_missing_returns_404(self):
with save_globals():
self.app.memcache = FakeMemcacheReturnsNone()
self.app._error_limiting = {}
controller = ReplicatedObjectController(
self.app, 'account', 'container', 'object')
set_http_connect(200, 200, 200, 200, 200, 200)
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'})
self.app.update_request(req)
resp = getattr(controller, 'DELETE')(req)
self.assertEqual(resp.status_int, 200)
set_http_connect(404, 404, 404)
# acct acct acct
# make sure to use a fresh request without cached env
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'})
resp = getattr(controller, 'DELETE')(req)
self.assertEqual(resp.status_int, 404)
set_http_connect(503, 404, 404)
# acct acct acct
# make sure to use a fresh request without cached env
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'})
resp = getattr(controller, 'DELETE')(req)
self.assertEqual(resp.status_int, 404)
set_http_connect(503, 503, 404)
# acct acct acct
# make sure to use a fresh request without cached env
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'})
resp = getattr(controller, 'DELETE')(req)
self.assertEqual(resp.status_int, 404)
set_http_connect(503, 503, 503)
# acct acct acct
# make sure to use a fresh request without cached env
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'})
resp = getattr(controller, 'DELETE')(req)
self.assertEqual(resp.status_int, 404)
set_http_connect(200, 200, 204, 204, 204)
# acct cont obj obj obj
# make sure to use a fresh request without cached env
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'})
resp = getattr(controller, 'DELETE')(req)
self.assertEqual(resp.status_int, 204)
set_http_connect(200, 404, 404, 404)
# acct cont cont cont
# make sure to use a fresh request without cached env
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'})
resp = getattr(controller, 'DELETE')(req)
self.assertEqual(resp.status_int, 404)
set_http_connect(200, 503, 503, 503)
# acct cont cont cont
# make sure to use a fresh request without cached env
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'})
resp = getattr(controller, 'DELETE')(req)
self.assertEqual(resp.status_int, 404)
for dev in self.app.account_ring.devs:
set_node_errors(
self.app, dev, self.app.error_suppression_limit + 1,
time.time())
set_http_connect(200)
# acct [isn't actually called since everything
# is error limited]
# make sure to use a fresh request without cached env
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'})
resp = getattr(controller, 'DELETE')(req)
self.assertEqual(resp.status_int, 404)
for dev in self.app.account_ring.devs:
set_node_errors(self.app, dev, 0, last_error=None)
for dev in self.app.container_ring.devs:
set_node_errors(self.app, dev,
self.app.error_suppression_limit + 1,
time.time())
set_http_connect(200, 200)
# acct cont [isn't actually called since
# everything is error limited]
# make sure to use a fresh request without cached env
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'})
resp = getattr(controller, 'DELETE')(req)
self.assertEqual(resp.status_int, 404)
def test_PUT_POST_requires_container_exist(self):
with save_globals():
self.app.object_post_as_copy = False
self.app.memcache = FakeMemcacheReturnsNone()
controller = ReplicatedObjectController(
self.app, 'account', 'container', 'object')
set_http_connect(200, 404, 404, 404, 200, 200, 200)
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'PUT'})
self.app.update_request(req)
resp = controller.PUT(req)
self.assertEqual(resp.status_int, 404)
set_http_connect(200, 404, 404, 404, 200, 200)
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'Content-Type': 'text/plain'})
self.app.update_request(req)
resp = controller.POST(req)
self.assertEqual(resp.status_int, 404)
def test_PUT_POST_as_copy_requires_container_exist(self):
with save_globals():
self.app.memcache = FakeMemcacheReturnsNone()
controller = ReplicatedObjectController(
self.app, 'account', 'container', 'object')
set_http_connect(200, 404, 404, 404, 200, 200, 200)
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'})
self.app.update_request(req)
resp = controller.PUT(req)
self.assertEqual(resp.status_int, 404)
set_http_connect(200, 404, 404, 404, 200, 200, 200, 200, 200, 200)
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'Content-Type': 'text/plain'})
self.app.update_request(req)
resp = controller.POST(req)
self.assertEqual(resp.status_int, 404)
def test_bad_metadata(self):
with save_globals():
controller = ReplicatedObjectController(
self.app, 'account', 'container', 'object')
set_http_connect(200, 200, 201, 201, 201)
# acct cont obj obj obj
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0'})
self.app.update_request(req)
resp = controller.PUT(req)
self.assertEqual(resp.status_int, 201)
set_http_connect(201, 201, 201)
req = Request.blank(
'/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Object-Meta-' + (
'a' * constraints.MAX_META_NAME_LENGTH): 'v'})
self.app.update_request(req)
resp = controller.PUT(req)
self.assertEqual(resp.status_int, 201)
set_http_connect(201, 201, 201)
req = Request.blank(
'/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={
'Content-Length': '0',
'X-Object-Meta-' + (
'a' * (constraints.MAX_META_NAME_LENGTH + 1)): 'v'})
self.app.update_request(req)
resp = controller.PUT(req)
self.assertEqual(resp.status_int, 400)
set_http_connect(201, 201, 201)
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Object-Meta-Too-Long': 'a' *
constraints.MAX_META_VALUE_LENGTH})
self.app.update_request(req)
resp = controller.PUT(req)
self.assertEqual(resp.status_int, 201)
set_http_connect(201, 201, 201)
req = Request.blank(
'/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Object-Meta-Too-Long': 'a' *
(constraints.MAX_META_VALUE_LENGTH + 1)})
self.app.update_request(req)
resp = controller.PUT(req)
self.assertEqual(resp.status_int, 400)
set_http_connect(201, 201, 201)
headers = {'Content-Length': '0'}
for x in range(constraints.MAX_META_COUNT):
headers['X-Object-Meta-%d' % x] = 'v'
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers=headers)
self.app.update_request(req)
resp = controller.PUT(req)
self.assertEqual(resp.status_int, 201)
set_http_connect(201, 201, 201)
headers = {'Content-Length': '0'}
for x in range(constraints.MAX_META_COUNT + 1):
headers['X-Object-Meta-%d' % x] = 'v'
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers=headers)
self.app.update_request(req)
resp = controller.PUT(req)
self.assertEqual(resp.status_int, 400)
set_http_connect(201, 201, 201)
headers = {'Content-Length': '0'}
header_value = 'a' * constraints.MAX_META_VALUE_LENGTH
size = 0
x = 0
while size < constraints.MAX_META_OVERALL_SIZE - 4 - \
constraints.MAX_META_VALUE_LENGTH:
size += 4 + constraints.MAX_META_VALUE_LENGTH
headers['X-Object-Meta-%04d' % x] = header_value
x += 1
if constraints.MAX_META_OVERALL_SIZE - size > 1:
headers['X-Object-Meta-a'] = \
'a' * (constraints.MAX_META_OVERALL_SIZE - size - 1)
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers=headers)
self.app.update_request(req)
resp = controller.PUT(req)
self.assertEqual(resp.status_int, 201)
set_http_connect(201, 201, 201)
headers['X-Object-Meta-a'] = \
'a' * (constraints.MAX_META_OVERALL_SIZE - size)
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers=headers)
self.app.update_request(req)
resp = controller.PUT(req)
self.assertEqual(resp.status_int, 400)
@contextmanager
def controller_context(self, req, *args, **kwargs):
_v, account, container, obj = utils.split_path(req.path, 4, 4, True)
controller = ReplicatedObjectController(
self.app, account, container, obj)
self.app.update_request(req)
self.app.memcache.store = {}
with save_globals():
new_connect = set_http_connect(*args, **kwargs)
yield controller
unused_status_list = []
while True:
try:
unused_status_list.append(next(new_connect.code_iter))
except StopIteration:
break
if unused_status_list:
raise self.fail('UN-USED STATUS CODES: %r' %
unused_status_list)
def test_basic_put_with_x_copy_from(self):
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': 'c/o'})
status_list = (200, 200, 200, 200, 200, 201, 201, 201)
# acct cont objc objc objc obj obj obj
with self.controller_context(req, *status_list) as controller:
resp = controller.PUT(req)
self.assertEqual(resp.status_int, 201)
self.assertEqual(resp.headers['x-copied-from'], 'c/o')
def test_basic_put_with_x_copy_from_account(self):
req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': 'c/o',
'X-Copy-From-Account': 'a'})
status_list = (200, 200, 200, 200, 200, 200, 200, 201, 201, 201)
# acct cont acc1 con1 objc objc objc obj obj obj
with self.controller_context(req, *status_list) as controller:
resp = controller.PUT(req)
self.assertEqual(resp.status_int, 201)
self.assertEqual(resp.headers['x-copied-from'], 'c/o')
self.assertEqual(resp.headers['x-copied-from-account'], 'a')
def test_basic_put_with_x_copy_from_across_container(self):
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': 'c2/o'})
status_list = (200, 200, 200, 200, 200, 200, 201, 201, 201)
# acct cont conc objc objc objc obj obj obj
with self.controller_context(req, *status_list) as controller:
resp = controller.PUT(req)
self.assertEqual(resp.status_int, 201)
self.assertEqual(resp.headers['x-copied-from'], 'c2/o')
def test_basic_put_with_x_copy_from_across_container_and_account(self):
req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': 'c2/o',
'X-Copy-From-Account': 'a'})
status_list = (200, 200, 200, 200, 200, 200, 200, 201, 201, 201)
# acct cont acc1 con1 objc objc objc obj obj obj
with self.controller_context(req, *status_list) as controller:
resp = controller.PUT(req)
self.assertEqual(resp.status_int, 201)
self.assertEqual(resp.headers['x-copied-from'], 'c2/o')
self.assertEqual(resp.headers['x-copied-from-account'], 'a')
def test_copy_non_zero_content_length(self):
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '5',
'X-Copy-From': 'c/o'})
status_list = (200, 200)
# acct cont
with self.controller_context(req, *status_list) as controller:
resp = controller.PUT(req)
self.assertEqual(resp.status_int, 400)
def test_copy_non_zero_content_length_with_account(self):
req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '5',
'X-Copy-From': 'c/o',
'X-Copy-From-Account': 'a'})
status_list = (200, 200)
# acct cont
with self.controller_context(req, *status_list) as controller:
resp = controller.PUT(req)
self.assertEqual(resp.status_int, 400)
def test_copy_with_slashes_in_x_copy_from(self):
# extra source path parsing
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': 'c/o/o2'})
status_list = (200, 200, 200, 200, 200, 201, 201, 201)
# acct cont objc objc objc obj obj obj
with self.controller_context(req, *status_list) as controller:
resp = controller.PUT(req)
self.assertEqual(resp.status_int, 201)
self.assertEqual(resp.headers['x-copied-from'], 'c/o/o2')
def test_copy_with_slashes_in_x_copy_from_and_account(self):
# extra source path parsing
req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': 'c/o/o2',
'X-Copy-From-Account': 'a'})
status_list = (200, 200, 200, 200, 200, 200, 200, 201, 201, 201)
# acct cont acc1 con1 objc objc objc obj obj obj
with self.controller_context(req, *status_list) as controller:
resp = controller.PUT(req)
self.assertEqual(resp.status_int, 201)
self.assertEqual(resp.headers['x-copied-from'], 'c/o/o2')
self.assertEqual(resp.headers['x-copied-from-account'], 'a')
def test_copy_with_spaces_in_x_copy_from(self):
# space in soure path
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': 'c/o%20o2'})
status_list = (200, 200, 200, 200, 200, 201, 201, 201)
# acct cont objc objc objc obj obj obj
with self.controller_context(req, *status_list) as controller:
resp = controller.PUT(req)
self.assertEqual(resp.status_int, 201)
self.assertEqual(resp.headers['x-copied-from'], 'c/o%20o2')
def test_copy_with_spaces_in_x_copy_from_and_account(self):
# space in soure path
req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': 'c/o%20o2',
'X-Copy-From-Account': 'a'})
status_list = (200, 200, 200, 200, 200, 200, 200, 201, 201, 201)
# acct cont acc1 con1 objc objc objc obj obj obj
with self.controller_context(req, *status_list) as controller:
resp = controller.PUT(req)
self.assertEqual(resp.status_int, 201)
self.assertEqual(resp.headers['x-copied-from'], 'c/o%20o2')
self.assertEqual(resp.headers['x-copied-from-account'], 'a')
def test_copy_with_leading_slash_in_x_copy_from(self):
# repeat tests with leading /
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c/o'})
status_list = (200, 200, 200, 200, 200, 201, 201, 201)
# acct cont objc objc objc obj obj obj
with self.controller_context(req, *status_list) as controller:
resp = controller.PUT(req)
self.assertEqual(resp.status_int, 201)
self.assertEqual(resp.headers['x-copied-from'], 'c/o')
def test_copy_with_leading_slash_in_x_copy_from_and_account(self):
# repeat tests with leading /
req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c/o',
'X-Copy-From-Account': 'a'})
status_list = (200, 200, 200, 200, 200, 200, 200, 201, 201, 201)
# acct cont acc1 con1 objc objc objc obj obj obj
with self.controller_context(req, *status_list) as controller:
resp = controller.PUT(req)
self.assertEqual(resp.status_int, 201)
self.assertEqual(resp.headers['x-copied-from'], 'c/o')
self.assertEqual(resp.headers['x-copied-from-account'], 'a')
def test_copy_with_leading_slash_and_slashes_in_x_copy_from(self):
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c/o/o2'})
status_list = (200, 200, 200, 200, 200, 201, 201, 201)
# acct cont objc objc objc obj obj obj
with self.controller_context(req, *status_list) as controller:
resp = controller.PUT(req)
self.assertEqual(resp.status_int, 201)
self.assertEqual(resp.headers['x-copied-from'], 'c/o/o2')
def test_copy_with_leading_slash_and_slashes_in_x_copy_from_acct(self):
req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c/o/o2',
'X-Copy-From-Account': 'a'})
status_list = (200, 200, 200, 200, 200, 200, 200, 201, 201, 201)
# acct cont acc1 con1 objc objc objc obj obj obj
with self.controller_context(req, *status_list) as controller:
resp = controller.PUT(req)
self.assertEqual(resp.status_int, 201)
self.assertEqual(resp.headers['x-copied-from'], 'c/o/o2')
self.assertEqual(resp.headers['x-copied-from-account'], 'a')
def test_copy_with_no_object_in_x_copy_from(self):
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c'})
status_list = (200, 200)
# acct cont
with self.controller_context(req, *status_list) as controller:
try:
controller.PUT(req)
except HTTPException as resp:
self.assertEqual(resp.status_int // 100, 4) # client error
else:
raise self.fail('Invalid X-Copy-From did not raise '
'client error')
def test_copy_with_no_object_in_x_copy_from_and_account(self):
req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c',
'X-Copy-From-Account': 'a'})
status_list = (200, 200)
# acct cont
with self.controller_context(req, *status_list) as controller:
try:
controller.PUT(req)
except HTTPException as resp:
self.assertEqual(resp.status_int // 100, 4) # client error
else:
raise self.fail('Invalid X-Copy-From did not raise '
'client error')
def test_copy_server_error_reading_source(self):
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c/o'})
status_list = (200, 200, 503, 503, 503)
# acct cont objc objc objc
with self.controller_context(req, *status_list) as controller:
resp = controller.PUT(req)
self.assertEqual(resp.status_int, 503)
def test_copy_server_error_reading_source_and_account(self):
req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c/o',
'X-Copy-From-Account': 'a'})
status_list = (200, 200, 200, 200, 503, 503, 503)
# acct cont acct cont objc objc objc
with self.controller_context(req, *status_list) as controller:
resp = controller.PUT(req)
self.assertEqual(resp.status_int, 503)
def test_copy_not_found_reading_source(self):
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c/o'})
# not found
status_list = (200, 200, 404, 404, 404)
# acct cont objc objc objc
with self.controller_context(req, *status_list) as controller:
resp = controller.PUT(req)
self.assertEqual(resp.status_int, 404)
def test_copy_not_found_reading_source_and_account(self):
req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c/o',
'X-Copy-From-Account': 'a'})
# not found
status_list = (200, 200, 200, 200, 404, 404, 404)
# acct cont acct cont objc objc objc
with self.controller_context(req, *status_list) as controller:
resp = controller.PUT(req)
self.assertEqual(resp.status_int, 404)
def test_copy_with_some_missing_sources(self):
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c/o'})
status_list = (200, 200, 404, 404, 200, 201, 201, 201)
# acct cont objc objc objc obj obj obj
with self.controller_context(req, *status_list) as controller:
resp = controller.PUT(req)
self.assertEqual(resp.status_int, 201)
def test_copy_with_some_missing_sources_and_account(self):
req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c/o',
'X-Copy-From-Account': 'a'})
status_list = (200, 200, 200, 200, 404, 404, 200, 201, 201, 201)
# acct cont acct cont objc objc objc obj obj obj
with self.controller_context(req, *status_list) as controller:
resp = controller.PUT(req)
self.assertEqual(resp.status_int, 201)
def test_copy_with_object_metadata(self):
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c/o',
'X-Object-Meta-Ours': 'okay'})
# test object metadata
status_list = (200, 200, 200, 200, 200, 201, 201, 201)
# acct cont objc objc objc obj obj obj
with self.controller_context(req, *status_list) as controller:
resp = controller.PUT(req)
self.assertEqual(resp.status_int, 201)
self.assertEqual(resp.headers.get('x-object-meta-test'), 'testing')
self.assertEqual(resp.headers.get('x-object-meta-ours'), 'okay')
self.assertEqual(resp.headers.get('x-delete-at'), '9876543210')
def test_copy_with_object_metadata_and_account(self):
req = Request.blank('/v1/a1/c1/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c/o',
'X-Object-Meta-Ours': 'okay',
'X-Copy-From-Account': 'a'})
# test object metadata
status_list = (200, 200, 200, 200, 200, 200, 200, 201, 201, 201)
# acct cont acct cont objc objc objc obj obj obj
with self.controller_context(req, *status_list) as controller:
resp = controller.PUT(req)
self.assertEqual(resp.status_int, 201)
self.assertEqual(resp.headers.get('x-object-meta-test'), 'testing')
self.assertEqual(resp.headers.get('x-object-meta-ours'), 'okay')
self.assertEqual(resp.headers.get('x-delete-at'), '9876543210')
@_limit_max_file_size
def test_copy_source_larger_than_max_file_size(self):
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0',
'X-Copy-From': '/c/o'})
# copy-from object is too large to fit in target object
class LargeResponseBody(object):
def __len__(self):
return constraints.MAX_FILE_SIZE + 1
def __getitem__(self, key):
return ''
copy_from_obj_body = LargeResponseBody()
status_list = (200, 200, 200, 200, 200)
# acct cont objc objc objc
kwargs = dict(body=copy_from_obj_body)
with self.controller_context(req, *status_list,
**kwargs) as controller:
self.app.update_request(req)
self.app.memcache.store = {}
try:
resp = controller.PUT(req)
except HTTPException as resp:
pass
self.assertEqual(resp.status_int, 413)
def test_basic_COPY(self):
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': 'c/o2'})
status_list = (200, 200, 200, 200, 200, 201, 201, 201)
# acct cont objc objc objc obj obj obj
with self.controller_context(req, *status_list) as controller:
resp = controller.COPY(req)
self.assertEqual(resp.status_int, 201)
self.assertEqual(resp.headers['x-copied-from'], 'c/o')
def test_basic_COPY_account(self):
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': 'c1/o2',
'Destination-Account': 'a1'})
status_list = (200, 200, 200, 200, 200, 200, 200, 201, 201, 201)
# acct cont acct cont objc objc objc obj obj obj
with self.controller_context(req, *status_list) as controller:
resp = controller.COPY(req)
self.assertEqual(resp.status_int, 201)
self.assertEqual(resp.headers['x-copied-from'], 'c/o')
self.assertEqual(resp.headers['x-copied-from-account'], 'a')
def test_COPY_across_containers(self):
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': 'c2/o'})
status_list = (200, 200, 200, 200, 200, 200, 201, 201, 201)
# acct cont c2 objc objc objc obj obj obj
with self.controller_context(req, *status_list) as controller:
resp = controller.COPY(req)
self.assertEqual(resp.status_int, 201)
self.assertEqual(resp.headers['x-copied-from'], 'c/o')
def test_COPY_source_with_slashes_in_name(self):
req = Request.blank('/v1/a/c/o/o2',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': 'c/o'})
status_list = (200, 200, 200, 200, 200, 201, 201, 201)
# acct cont objc objc objc obj obj obj
with self.controller_context(req, *status_list) as controller:
resp = controller.COPY(req)
self.assertEqual(resp.status_int, 201)
self.assertEqual(resp.headers['x-copied-from'], 'c/o/o2')
def test_COPY_account_source_with_slashes_in_name(self):
req = Request.blank('/v1/a/c/o/o2',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': 'c1/o',
'Destination-Account': 'a1'})
status_list = (200, 200, 200, 200, 200, 200, 200, 201, 201, 201)
# acct cont acct cont objc objc objc obj obj obj
with self.controller_context(req, *status_list) as controller:
resp = controller.COPY(req)
self.assertEqual(resp.status_int, 201)
self.assertEqual(resp.headers['x-copied-from'], 'c/o/o2')
self.assertEqual(resp.headers['x-copied-from-account'], 'a')
def test_COPY_destination_leading_slash(self):
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c/o'})
status_list = (200, 200, 200, 200, 200, 201, 201, 201)
# acct cont objc objc objc obj obj obj
with self.controller_context(req, *status_list) as controller:
resp = controller.COPY(req)
self.assertEqual(resp.status_int, 201)
self.assertEqual(resp.headers['x-copied-from'], 'c/o')
def test_COPY_account_destination_leading_slash(self):
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c1/o',
'Destination-Account': 'a1'})
status_list = (200, 200, 200, 200, 200, 200, 200, 201, 201, 201)
# acct cont acct cont objc objc objc obj obj obj
with self.controller_context(req, *status_list) as controller:
resp = controller.COPY(req)
self.assertEqual(resp.status_int, 201)
self.assertEqual(resp.headers['x-copied-from'], 'c/o')
self.assertEqual(resp.headers['x-copied-from-account'], 'a')
def test_COPY_source_with_slashes_destination_leading_slash(self):
req = Request.blank('/v1/a/c/o/o2',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c/o'})
status_list = (200, 200, 200, 200, 200, 201, 201, 201)
# acct cont objc objc objc obj obj obj
with self.controller_context(req, *status_list) as controller:
resp = controller.COPY(req)
self.assertEqual(resp.status_int, 201)
self.assertEqual(resp.headers['x-copied-from'], 'c/o/o2')
def test_COPY_account_source_with_slashes_destination_leading_slash(self):
req = Request.blank('/v1/a/c/o/o2',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c1/o',
'Destination-Account': 'a1'})
status_list = (200, 200, 200, 200, 200, 200, 200, 201, 201, 201)
# acct cont acct cont objc objc objc obj obj obj
with self.controller_context(req, *status_list) as controller:
resp = controller.COPY(req)
self.assertEqual(resp.status_int, 201)
self.assertEqual(resp.headers['x-copied-from'], 'c/o/o2')
self.assertEqual(resp.headers['x-copied-from-account'], 'a')
def test_COPY_no_object_in_destination(self):
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': 'c_o'})
status_list = [] # no requests needed
with self.controller_context(req, *status_list) as controller:
self.assertRaises(HTTPException, controller.COPY, req)
def test_COPY_account_no_object_in_destination(self):
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': 'c_o',
'Destination-Account': 'a1'})
status_list = [] # no requests needed
with self.controller_context(req, *status_list) as controller:
self.assertRaises(HTTPException, controller.COPY, req)
def test_COPY_server_error_reading_source(self):
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c/o'})
status_list = (200, 200, 503, 503, 503)
# acct cont objc objc objc
with self.controller_context(req, *status_list) as controller:
resp = controller.COPY(req)
self.assertEqual(resp.status_int, 503)
def test_COPY_account_server_error_reading_source(self):
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c1/o',
'Destination-Account': 'a1'})
status_list = (200, 200, 200, 200, 503, 503, 503)
# acct cont acct cont objc objc objc
with self.controller_context(req, *status_list) as controller:
resp = controller.COPY(req)
self.assertEqual(resp.status_int, 503)
def test_COPY_not_found_reading_source(self):
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c/o'})
status_list = (200, 200, 404, 404, 404)
# acct cont objc objc objc
with self.controller_context(req, *status_list) as controller:
resp = controller.COPY(req)
self.assertEqual(resp.status_int, 404)
def test_COPY_account_not_found_reading_source(self):
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c1/o',
'Destination-Account': 'a1'})
status_list = (200, 200, 200, 200, 404, 404, 404)
# acct cont acct cont objc objc objc
with self.controller_context(req, *status_list) as controller:
resp = controller.COPY(req)
self.assertEqual(resp.status_int, 404)
def test_COPY_with_some_missing_sources(self):
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c/o'})
status_list = (200, 200, 404, 404, 200, 201, 201, 201)
# acct cont objc objc objc obj obj obj
with self.controller_context(req, *status_list) as controller:
resp = controller.COPY(req)
self.assertEqual(resp.status_int, 201)
def test_COPY_account_with_some_missing_sources(self):
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c1/o',
'Destination-Account': 'a1'})
status_list = (200, 200, 200, 200, 404, 404, 200, 201, 201, 201)
# acct cont acct cont objc objc objc obj obj obj
with self.controller_context(req, *status_list) as controller:
resp = controller.COPY(req)
self.assertEqual(resp.status_int, 201)
def test_COPY_with_metadata(self):
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c/o',
'X-Object-Meta-Ours': 'okay'})
status_list = (200, 200, 200, 200, 200, 201, 201, 201)
# acct cont objc objc objc obj obj obj
with self.controller_context(req, *status_list) as controller:
resp = controller.COPY(req)
self.assertEqual(resp.status_int, 201)
self.assertEqual(resp.headers.get('x-object-meta-test'),
'testing')
self.assertEqual(resp.headers.get('x-object-meta-ours'), 'okay')
self.assertEqual(resp.headers.get('x-delete-at'), '9876543210')
def test_COPY_account_with_metadata(self):
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c1/o',
'X-Object-Meta-Ours': 'okay',
'Destination-Account': 'a1'})
status_list = (200, 200, 200, 200, 200, 200, 200, 201, 201, 201)
# acct cont acct cont objc objc objc obj obj obj
with self.controller_context(req, *status_list) as controller:
resp = controller.COPY(req)
self.assertEqual(resp.status_int, 201)
self.assertEqual(resp.headers.get('x-object-meta-test'),
'testing')
self.assertEqual(resp.headers.get('x-object-meta-ours'), 'okay')
self.assertEqual(resp.headers.get('x-delete-at'), '9876543210')
@_limit_max_file_size
def test_COPY_source_larger_than_max_file_size(self):
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c/o'})
class LargeResponseBody(object):
def __len__(self):
return constraints.MAX_FILE_SIZE + 1
def __getitem__(self, key):
return ''
copy_from_obj_body = LargeResponseBody()
status_list = (200, 200, 200, 200, 200)
# acct cont objc objc objc
kwargs = dict(body=copy_from_obj_body)
with self.controller_context(req, *status_list,
**kwargs) as controller:
try:
resp = controller.COPY(req)
except HTTPException as resp:
pass
self.assertEqual(resp.status_int, 413)
@_limit_max_file_size
def test_COPY_account_source_larger_than_max_file_size(self):
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c1/o',
'Destination-Account': 'a1'})
class LargeResponseBody(object):
def __len__(self):
return constraints.MAX_FILE_SIZE + 1
def __getitem__(self, key):
return ''
copy_from_obj_body = LargeResponseBody()
status_list = (200, 200, 200, 200, 200)
# acct cont objc objc objc
kwargs = dict(body=copy_from_obj_body)
with self.controller_context(req, *status_list,
**kwargs) as controller:
try:
resp = controller.COPY(req)
except HTTPException as resp:
pass
self.assertEqual(resp.status_int, 413)
def test_COPY_newest(self):
with save_globals():
controller = ReplicatedObjectController(
self.app, 'a', 'c', 'o')
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c/o'})
req.account = 'a'
controller.object_name = 'o'
set_http_connect(200, 200, 200, 200, 200, 201, 201, 201,
# act cont objc objc objc obj obj obj
timestamps=('1', '1', '1', '3', '2', '4', '4',
'4'))
self.app.memcache.store = {}
resp = controller.COPY(req)
self.assertEqual(resp.status_int, 201)
self.assertEqual(resp.headers['x-copied-from-last-modified'],
'3')
def test_COPY_account_newest(self):
with save_globals():
controller = ReplicatedObjectController(
self.app, 'a', 'c', 'o')
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c1/o',
'Destination-Account': 'a1'})
req.account = 'a'
controller.object_name = 'o'
set_http_connect(200, 200, 200, 200, 200, 200, 200, 201, 201, 201,
# act cont acct cont objc objc objc obj obj obj
timestamps=('1', '1', '1', '1', '3', '2', '1',
'4', '4', '4'))
self.app.memcache.store = {}
resp = controller.COPY(req)
self.assertEqual(resp.status_int, 201)
self.assertEqual(resp.headers['x-copied-from-last-modified'],
'3')
def test_COPY_delete_at(self):
with save_globals():
backend_requests = []
def capture_requests(ipaddr, port, device, partition, method, path,
headers=None, query_string=None):
backend_requests.append((method, path, headers))
controller = ReplicatedObjectController(
self.app, 'a', 'c', 'o')
set_http_connect(200, 200, 200, 200, 200, 201, 201, 201,
give_connect=capture_requests)
self.app.memcache.store = {}
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c/o'})
self.app.update_request(req)
resp = controller.COPY(req)
self.assertEqual(201, resp.status_int) # sanity
for method, path, given_headers in backend_requests:
if method != 'PUT':
continue
self.assertEqual(given_headers.get('X-Delete-At'),
'9876543210')
self.assertTrue('X-Delete-At-Host' in given_headers)
self.assertTrue('X-Delete-At-Device' in given_headers)
self.assertTrue('X-Delete-At-Partition' in given_headers)
self.assertTrue('X-Delete-At-Container' in given_headers)
def test_COPY_account_delete_at(self):
with save_globals():
backend_requests = []
def capture_requests(ipaddr, port, device, partition, method, path,
headers=None, query_string=None):
backend_requests.append((method, path, headers))
controller = ReplicatedObjectController(
self.app, 'a', 'c', 'o')
set_http_connect(200, 200, 200, 200, 200, 200, 200, 201, 201, 201,
give_connect=capture_requests)
self.app.memcache.store = {}
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': '/c1/o',
'Destination-Account': 'a1'})
self.app.update_request(req)
resp = controller.COPY(req)
self.assertEqual(201, resp.status_int) # sanity
for method, path, given_headers in backend_requests:
if method != 'PUT':
continue
self.assertEqual(given_headers.get('X-Delete-At'),
'9876543210')
self.assertTrue('X-Delete-At-Host' in given_headers)
self.assertTrue('X-Delete-At-Device' in given_headers)
self.assertTrue('X-Delete-At-Partition' in given_headers)
self.assertTrue('X-Delete-At-Container' in given_headers)
def test_chunked_put(self):
class ChunkedFile(object):
def __init__(self, bytes):
self.bytes = bytes
self.read_bytes = 0
@property
def bytes_left(self):
return self.bytes - self.read_bytes
def read(self, amt=None):
if self.read_bytes >= self.bytes:
raise StopIteration()
if not amt:
amt = self.bytes_left
data = 'a' * min(amt, self.bytes_left)
self.read_bytes += len(data)
return data
with save_globals():
set_http_connect(201, 201, 201, 201)
controller = ReplicatedObjectController(
self.app, 'account', 'container', 'object')
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Transfer-Encoding': 'chunked',
'Content-Type': 'foo/bar'})
req.body_file = ChunkedFile(10)
self.app.memcache.store = {}
self.app.update_request(req)
res = controller.PUT(req)
self.assertEqual(res.status_int // 100, 2) # success
# test 413 entity to large
set_http_connect(201, 201, 201, 201)
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Transfer-Encoding': 'chunked',
'Content-Type': 'foo/bar'})
req.body_file = ChunkedFile(11)
self.app.memcache.store = {}
self.app.update_request(req)
with mock.patch('swift.common.constraints.MAX_FILE_SIZE', 10):
res = controller.PUT(req)
self.assertEqual(res.status_int, 413)
@unpatch_policies
def test_chunked_put_bad_version(self):
# Check bad version
(prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis,
obj2lis, obj3lis) = _test_sockets
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v0 HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\nContent-Length: 0\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 412'
self.assertEqual(headers[:len(exp)], exp)
@unpatch_policies
def test_chunked_put_bad_path(self):
# Check bad path
(prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis,
obj2lis, obj3lis) = _test_sockets
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET invalid HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\nContent-Length: 0\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 404'
self.assertEqual(headers[:len(exp)], exp)
@unpatch_policies
def test_chunked_put_bad_utf8(self):
# Check invalid utf-8
(prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis,
obj2lis, obj3lis) = _test_sockets
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a%80 HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\nX-Auth-Token: t\r\n'
'Content-Length: 0\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 412'
self.assertEqual(headers[:len(exp)], exp)
@unpatch_policies
def test_chunked_put_bad_path_no_controller(self):
# Check bad path, no controller
(prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis,
obj2lis, obj3lis) = _test_sockets
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1 HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\nX-Auth-Token: t\r\n'
'Content-Length: 0\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 412'
self.assertEqual(headers[:len(exp)], exp)
@unpatch_policies
def test_chunked_put_bad_method(self):
# Check bad method
(prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis,
obj2lis, obj3lis) = _test_sockets
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('LICK /v1/a HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\nX-Auth-Token: t\r\n'
'Content-Length: 0\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 405'
self.assertEqual(headers[:len(exp)], exp)
@unpatch_policies
def test_chunked_put_unhandled_exception(self):
# Check unhandled exception
(prosrv, acc1srv, acc2srv, con1srv, con2srv, obj1srv,
obj2srv, obj3srv) = _test_servers
(prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis,
obj2lis, obj3lis) = _test_sockets
orig_update_request = prosrv.update_request
def broken_update_request(*args, **kwargs):
raise Exception('fake: this should be printed')
prosrv.update_request = broken_update_request
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('HEAD /v1/a HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\nX-Auth-Token: t\r\n'
'Content-Length: 0\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 500'
self.assertEqual(headers[:len(exp)], exp)
prosrv.update_request = orig_update_request
@unpatch_policies
def test_chunked_put_head_account(self):
# Head account, just a double check and really is here to test
# the part Application.log_request that 'enforces' a
# content_length on the response.
(prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis,
obj2lis, obj3lis) = _test_sockets
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('HEAD /v1/a HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\nX-Auth-Token: t\r\n'
'Content-Length: 0\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 204'
self.assertEqual(headers[:len(exp)], exp)
self.assertTrue('\r\nContent-Length: 0\r\n' in headers)
@unpatch_policies
def test_chunked_put_utf8_all_the_way_down(self):
# Test UTF-8 Unicode all the way through the system
ustr = '\xe1\xbc\xb8\xce\xbf\xe1\xbd\xba \xe1\xbc\xb0\xce' \
'\xbf\xe1\xbd\xbb\xce\x87 \xcf\x84\xe1\xbd\xb0 \xcf' \
'\x80\xe1\xbd\xb1\xce\xbd\xcf\x84\xca\xbc \xe1\xbc' \
'\x82\xce\xbd \xe1\xbc\x90\xce\xbe\xe1\xbd\xb5\xce' \
'\xba\xce\xbf\xce\xb9 \xcf\x83\xce\xb1\xcf\x86\xe1' \
'\xbf\x86.Test'
ustr_short = '\xe1\xbc\xb8\xce\xbf\xe1\xbd\xbatest'
# Create ustr container
(prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis,
obj2lis, obj3lis) = _test_sockets
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/%s HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\nX-Storage-Token: t\r\n'
'Content-Length: 0\r\n\r\n' % quote(ustr))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEqual(headers[:len(exp)], exp)
# List account with ustr container (test plain)
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\nX-Storage-Token: t\r\n'
'Content-Length: 0\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 200'
self.assertEqual(headers[:len(exp)], exp)
containers = fd.read().split('\n')
self.assertTrue(ustr in containers)
# List account with ustr container (test json)
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a?format=json HTTP/1.1\r\n'
'Host: localhost\r\nConnection: close\r\n'
'X-Storage-Token: t\r\nContent-Length: 0\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 200'
self.assertEqual(headers[:len(exp)], exp)
listing = json.loads(fd.read())
self.assertTrue(ustr.decode('utf8') in [l['name'] for l in listing])
# List account with ustr container (test xml)
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a?format=xml HTTP/1.1\r\n'
'Host: localhost\r\nConnection: close\r\n'
'X-Storage-Token: t\r\nContent-Length: 0\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 200'
self.assertEqual(headers[:len(exp)], exp)
self.assertTrue('<name>%s</name>' % ustr in fd.read())
# Create ustr object with ustr metadata in ustr container
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/%s/%s HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\nX-Storage-Token: t\r\n'
'X-Object-Meta-%s: %s\r\nContent-Length: 0\r\n\r\n' %
(quote(ustr), quote(ustr), quote(ustr_short),
quote(ustr)))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEqual(headers[:len(exp)], exp)
# List ustr container with ustr object (test plain)
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/%s HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\nX-Storage-Token: t\r\n'
'Content-Length: 0\r\n\r\n' % quote(ustr))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 200'
self.assertEqual(headers[:len(exp)], exp)
objects = fd.read().split('\n')
self.assertTrue(ustr in objects)
# List ustr container with ustr object (test json)
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/%s?format=json HTTP/1.1\r\n'
'Host: localhost\r\nConnection: close\r\n'
'X-Storage-Token: t\r\nContent-Length: 0\r\n\r\n' %
quote(ustr))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 200'
self.assertEqual(headers[:len(exp)], exp)
listing = json.loads(fd.read())
self.assertEqual(listing[0]['name'], ustr.decode('utf8'))
# List ustr container with ustr object (test xml)
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/%s?format=xml HTTP/1.1\r\n'
'Host: localhost\r\nConnection: close\r\n'
'X-Storage-Token: t\r\nContent-Length: 0\r\n\r\n' %
quote(ustr))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 200'
self.assertEqual(headers[:len(exp)], exp)
self.assertTrue('<name>%s</name>' % ustr in fd.read())
# Retrieve ustr object with ustr metadata
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/%s/%s HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\nX-Storage-Token: t\r\n'
'Content-Length: 0\r\n\r\n' %
(quote(ustr), quote(ustr)))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 200'
self.assertEqual(headers[:len(exp)], exp)
self.assertTrue('\r\nX-Object-Meta-%s: %s\r\n' %
(quote(ustr_short).lower(), quote(ustr)) in headers)
@unpatch_policies
def test_chunked_put_chunked_put(self):
# Do chunked object put
(prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis,
obj2lis, obj3lis) = _test_sockets
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
# Also happens to assert that x-storage-token is taken as a
# replacement for x-auth-token.
fd.write('PUT /v1/a/c/o/chunky HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\nX-Storage-Token: t\r\n'
'Transfer-Encoding: chunked\r\n\r\n'
'2\r\noh\r\n4\r\n hai\r\nf\r\n123456789abcdef\r\n'
'0\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEqual(headers[:len(exp)], exp)
# Ensure we get what we put
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/c/o/chunky HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\nX-Auth-Token: t\r\n\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 200'
self.assertEqual(headers[:len(exp)], exp)
body = fd.read()
self.assertEqual(body, 'oh hai123456789abcdef')
@unpatch_policies
def test_conditional_range_get(self):
(prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis, obj2lis,
obj3lis) = _test_sockets
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
# make a container
fd = sock.makefile()
fd.write('PUT /v1/a/con HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\nX-Storage-Token: t\r\n'
'Content-Length: 0\r\n\r\n')
fd.flush()
exp = 'HTTP/1.1 201'
headers = readuntil2crlfs(fd)
self.assertEqual(headers[:len(exp)], exp)
# put an object in it
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/con/o HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Storage-Token: t\r\n'
'Content-Length: 10\r\n'
'Content-Type: text/plain\r\n'
'\r\n'
'abcdefghij\r\n')
fd.flush()
exp = 'HTTP/1.1 201'
headers = readuntil2crlfs(fd)
self.assertEqual(headers[:len(exp)], exp)
# request with both If-None-Match and Range
etag = md5("abcdefghij").hexdigest()
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/con/o HTTP/1.1\r\n' +
'Host: localhost\r\n' +
'Connection: close\r\n' +
'X-Storage-Token: t\r\n' +
'If-None-Match: "' + etag + '"\r\n' +
'Range: bytes=3-8\r\n' +
'\r\n')
fd.flush()
exp = 'HTTP/1.1 304'
headers = readuntil2crlfs(fd)
self.assertEqual(headers[:len(exp)], exp)
def test_mismatched_etags(self):
with save_globals():
# no etag supplied, object servers return success w/ diff values
controller = ReplicatedObjectController(
self.app, 'account', 'container', 'object')
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '0'})
self.app.update_request(req)
set_http_connect(200, 201, 201, 201,
etags=[None,
'68b329da9893e34099c7d8ad5cb9c940',
'68b329da9893e34099c7d8ad5cb9c940',
'68b329da9893e34099c7d8ad5cb9c941'])
resp = controller.PUT(req)
self.assertEqual(resp.status_int // 100, 5) # server error
# req supplies etag, object servers return 422 - mismatch
headers = {'Content-Length': '0',
'ETag': '68b329da9893e34099c7d8ad5cb9c940'}
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers=headers)
self.app.update_request(req)
set_http_connect(200, 422, 422, 503,
etags=['68b329da9893e34099c7d8ad5cb9c940',
'68b329da9893e34099c7d8ad5cb9c941',
None,
None])
resp = controller.PUT(req)
self.assertEqual(resp.status_int // 100, 4) # client error
def test_response_get_accept_ranges_header(self):
with save_globals():
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'GET'})
self.app.update_request(req)
controller = ReplicatedObjectController(
self.app, 'account', 'container', 'object')
set_http_connect(200, 200, 200)
resp = controller.GET(req)
self.assertTrue('accept-ranges' in resp.headers)
self.assertEqual(resp.headers['accept-ranges'], 'bytes')
def test_response_head_accept_ranges_header(self):
with save_globals():
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'HEAD'})
self.app.update_request(req)
controller = ReplicatedObjectController(
self.app, 'account', 'container', 'object')
set_http_connect(200, 200, 200)
resp = controller.HEAD(req)
self.assertTrue('accept-ranges' in resp.headers)
self.assertEqual(resp.headers['accept-ranges'], 'bytes')
def test_GET_calls_authorize(self):
called = [False]
def authorize(req):
called[0] = True
return HTTPUnauthorized(request=req)
with save_globals():
set_http_connect(200, 200, 201, 201, 201)
controller = ReplicatedObjectController(
self.app, 'account', 'container', 'object')
req = Request.blank('/v1/a/c/o')
req.environ['swift.authorize'] = authorize
self.app.update_request(req)
controller.GET(req)
self.assertTrue(called[0])
def test_HEAD_calls_authorize(self):
called = [False]
def authorize(req):
called[0] = True
return HTTPUnauthorized(request=req)
with save_globals():
set_http_connect(200, 200, 201, 201, 201)
controller = ReplicatedObjectController(
self.app, 'account', 'container', 'object')
req = Request.blank('/v1/a/c/o', {'REQUEST_METHOD': 'HEAD'})
req.environ['swift.authorize'] = authorize
self.app.update_request(req)
controller.HEAD(req)
self.assertTrue(called[0])
def test_POST_calls_authorize(self):
called = [False]
def authorize(req):
called[0] = True
return HTTPUnauthorized(request=req)
with save_globals():
self.app.object_post_as_copy = False
set_http_connect(200, 200, 201, 201, 201)
controller = ReplicatedObjectController(
self.app, 'account', 'container', 'object')
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'Content-Length': '5'}, body='12345')
req.environ['swift.authorize'] = authorize
self.app.update_request(req)
controller.POST(req)
self.assertTrue(called[0])
def test_POST_as_copy_calls_authorize(self):
called = [False]
def authorize(req):
called[0] = True
return HTTPUnauthorized(request=req)
with save_globals():
set_http_connect(200, 200, 200, 200, 200, 201, 201, 201)
controller = ReplicatedObjectController(
self.app, 'account', 'container', 'object')
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'Content-Length': '5'}, body='12345')
req.environ['swift.authorize'] = authorize
self.app.update_request(req)
controller.POST(req)
self.assertTrue(called[0])
def test_PUT_calls_authorize(self):
called = [False]
def authorize(req):
called[0] = True
return HTTPUnauthorized(request=req)
with save_globals():
set_http_connect(200, 200, 201, 201, 201)
controller = ReplicatedObjectController(
self.app, 'account', 'container', 'object')
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '5'}, body='12345')
req.environ['swift.authorize'] = authorize
self.app.update_request(req)
controller.PUT(req)
self.assertTrue(called[0])
def test_COPY_calls_authorize(self):
called = [False]
def authorize(req):
called[0] = True
return HTTPUnauthorized(request=req)
with save_globals():
set_http_connect(200, 200, 200, 200, 200, 201, 201, 201)
controller = ReplicatedObjectController(
self.app, 'account', 'container', 'object')
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'COPY'},
headers={'Destination': 'c/o'})
req.environ['swift.authorize'] = authorize
self.app.update_request(req)
controller.COPY(req)
self.assertTrue(called[0])
def test_POST_converts_delete_after_to_delete_at(self):
with save_globals():
self.app.object_post_as_copy = False
controller = ReplicatedObjectController(
self.app, 'account', 'container', 'object')
set_http_connect(200, 200, 202, 202, 202)
self.app.memcache.store = {}
orig_time = time.time
try:
t = time.time()
time.time = lambda: t
req = Request.blank('/v1/a/c/o', {},
headers={'Content-Type': 'foo/bar',
'X-Delete-After': '60'})
self.app.update_request(req)
res = controller.POST(req)
self.assertEqual(res.status, '202 Fake')
self.assertEqual(req.headers.get('x-delete-at'),
str(int(t + 60)))
finally:
time.time = orig_time
@unpatch_policies
def test_ec_client_disconnect(self):
prolis = _test_sockets[0]
# create connection
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
# create container
fd.write('PUT /v1/a/ec-discon HTTP/1.1\r\n'
'Host: localhost\r\n'
'Content-Length: 0\r\n'
'X-Storage-Token: t\r\n'
'X-Storage-Policy: ec\r\n'
'\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 2'
self.assertEqual(headers[:len(exp)], exp)
# create object
obj = 'a' * 4 * 64 * 2 ** 10
fd.write('PUT /v1/a/ec-discon/test HTTP/1.1\r\n'
'Host: localhost\r\n'
'Content-Length: %d\r\n'
'X-Storage-Token: t\r\n'
'Content-Type: donuts\r\n'
'\r\n%s' % (len(obj), obj))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEqual(headers[:len(exp)], exp)
# get object
fd.write('GET /v1/a/ec-discon/test HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Storage-Token: t\r\n'
'\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 200'
self.assertEqual(headers[:len(exp)], exp)
# read most of the object, and disconnect
fd.read(10)
fd.close()
sock.close()
sleep(0)
# check for disconnect message!
expected = ['Client disconnected on read'] * 2
self.assertEqual(
_test_servers[0].logger.get_lines_for_level('warning'),
expected)
@unpatch_policies
def test_leak_1(self):
_request_instances = weakref.WeakKeyDictionary()
_orig_init = Request.__init__
def request_init(self, *args, **kwargs):
_orig_init(self, *args, **kwargs)
_request_instances[self] = None
with mock.patch.object(Request, "__init__", request_init):
prolis = _test_sockets[0]
prosrv = _test_servers[0]
obj_len = prosrv.client_chunk_size * 2
# PUT test file
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/c/test_leak_1 HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Auth-Token: t\r\n'
'Content-Length: %s\r\n'
'Content-Type: application/octet-stream\r\n'
'\r\n%s' % (obj_len, 'a' * obj_len))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEqual(headers[:len(exp)], exp)
# Remember Request instance count, make sure the GC is run for
# pythons without reference counting.
for i in range(4):
sleep(0) # let eventlet do its thing
gc.collect()
else:
sleep(0)
before_request_instances = len(_request_instances)
# GET test file, but disconnect early
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/c/test_leak_1 HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Auth-Token: t\r\n'
'\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 200'
self.assertEqual(headers[:len(exp)], exp)
fd.read(1)
fd.close()
sock.close()
# Make sure the GC is run again for pythons without reference
# counting
for i in range(4):
sleep(0) # let eventlet do its thing
gc.collect()
else:
sleep(0)
self.assertEqual(
before_request_instances, len(_request_instances))
def test_OPTIONS(self):
with save_globals():
controller = ReplicatedObjectController(
self.app, 'a', 'c', 'o.jpg')
def my_empty_container_info(*args):
return {}
controller.container_info = my_empty_container_info
req = Request.blank(
'/v1/a/c/o.jpg',
{'REQUEST_METHOD': 'OPTIONS'},
headers={'Origin': 'http://foo.com',
'Access-Control-Request-Method': 'GET'})
resp = controller.OPTIONS(req)
self.assertEqual(401, resp.status_int)
def my_empty_origin_container_info(*args):
return {'cors': {'allow_origin': None}}
controller.container_info = my_empty_origin_container_info
req = Request.blank(
'/v1/a/c/o.jpg',
{'REQUEST_METHOD': 'OPTIONS'},
headers={'Origin': 'http://foo.com',
'Access-Control-Request-Method': 'GET'})
resp = controller.OPTIONS(req)
self.assertEqual(401, resp.status_int)
def my_container_info(*args):
return {
'cors': {
'allow_origin': 'http://foo.bar:8080 https://foo.bar',
'max_age': '999',
}
}
controller.container_info = my_container_info
req = Request.blank(
'/v1/a/c/o.jpg',
{'REQUEST_METHOD': 'OPTIONS'},
headers={'Origin': 'https://foo.bar',
'Access-Control-Request-Method': 'GET'})
req.content_length = 0
resp = controller.OPTIONS(req)
self.assertEqual(200, resp.status_int)
self.assertEqual(
'https://foo.bar',
resp.headers['access-control-allow-origin'])
for verb in 'OPTIONS COPY GET POST PUT DELETE HEAD'.split():
self.assertTrue(
verb in resp.headers['access-control-allow-methods'])
self.assertEqual(
len(resp.headers['access-control-allow-methods'].split(', ')),
7)
self.assertEqual('999', resp.headers['access-control-max-age'])
req = Request.blank(
'/v1/a/c/o.jpg',
{'REQUEST_METHOD': 'OPTIONS'},
headers={'Origin': 'https://foo.bar'})
req.content_length = 0
resp = controller.OPTIONS(req)
self.assertEqual(401, resp.status_int)
req = Request.blank('/v1/a/c/o.jpg', {'REQUEST_METHOD': 'OPTIONS'})
req.content_length = 0
resp = controller.OPTIONS(req)
self.assertEqual(200, resp.status_int)
for verb in 'OPTIONS COPY GET POST PUT DELETE HEAD'.split():
self.assertTrue(
verb in resp.headers['Allow'])
self.assertEqual(len(resp.headers['Allow'].split(', ')), 7)
req = Request.blank(
'/v1/a/c/o.jpg',
{'REQUEST_METHOD': 'OPTIONS'},
headers={'Origin': 'http://foo.com'})
resp = controller.OPTIONS(req)
self.assertEqual(401, resp.status_int)
req = Request.blank(
'/v1/a/c/o.jpg',
{'REQUEST_METHOD': 'OPTIONS'},
headers={'Origin': 'http://foo.bar',
'Access-Control-Request-Method': 'GET'})
controller.app.cors_allow_origin = ['http://foo.bar', ]
resp = controller.OPTIONS(req)
self.assertEqual(200, resp.status_int)
def my_container_info_wildcard(*args):
return {
'cors': {
'allow_origin': '*',
'max_age': '999',
}
}
controller.container_info = my_container_info_wildcard
req = Request.blank(
'/v1/a/c/o.jpg',
{'REQUEST_METHOD': 'OPTIONS'},
headers={'Origin': 'https://bar.baz',
'Access-Control-Request-Method': 'GET'})
req.content_length = 0
resp = controller.OPTIONS(req)
self.assertEqual(200, resp.status_int)
self.assertEqual('*', resp.headers['access-control-allow-origin'])
for verb in 'OPTIONS COPY GET POST PUT DELETE HEAD'.split():
self.assertTrue(
verb in resp.headers['access-control-allow-methods'])
self.assertEqual(
len(resp.headers['access-control-allow-methods'].split(', ')),
7)
self.assertEqual('999', resp.headers['access-control-max-age'])
def test_CORS_valid(self):
with save_globals():
controller = ReplicatedObjectController(
self.app, 'a', 'c', 'o')
def stubContainerInfo(*args):
return {
'cors': {
'allow_origin': 'http://not.foo.bar'
}
}
controller.container_info = stubContainerInfo
controller.app.strict_cors_mode = False
def objectGET(controller, req):
return Response(headers={
'X-Object-Meta-Color': 'red',
'X-Super-Secret': 'hush',
})
req = Request.blank(
'/v1/a/c/o.jpg',
{'REQUEST_METHOD': 'GET'},
headers={'Origin': 'http://foo.bar'})
resp = cors_validation(objectGET)(controller, req)
self.assertEqual(200, resp.status_int)
self.assertEqual('http://foo.bar',
resp.headers['access-control-allow-origin'])
self.assertEqual('red', resp.headers['x-object-meta-color'])
# X-Super-Secret is in the response, but not "exposed"
self.assertEqual('hush', resp.headers['x-super-secret'])
self.assertTrue('access-control-expose-headers' in resp.headers)
exposed = set(
h.strip() for h in
resp.headers['access-control-expose-headers'].split(','))
expected_exposed = set(['cache-control', 'content-language',
'content-type', 'expires', 'last-modified',
'pragma', 'etag', 'x-timestamp',
'x-trans-id', 'x-object-meta-color'])
self.assertEqual(expected_exposed, exposed)
controller.app.strict_cors_mode = True
req = Request.blank(
'/v1/a/c/o.jpg',
{'REQUEST_METHOD': 'GET'},
headers={'Origin': 'http://foo.bar'})
resp = cors_validation(objectGET)(controller, req)
self.assertEqual(200, resp.status_int)
self.assertTrue('access-control-allow-origin' not in resp.headers)
def test_CORS_valid_with_obj_headers(self):
with save_globals():
controller = ReplicatedObjectController(
self.app, 'a', 'c', 'o')
def stubContainerInfo(*args):
return {
'cors': {
'allow_origin': 'http://foo.bar'
}
}
controller.container_info = stubContainerInfo
def objectGET(controller, req):
return Response(headers={
'X-Object-Meta-Color': 'red',
'X-Super-Secret': 'hush',
'Access-Control-Allow-Origin': 'http://obj.origin',
'Access-Control-Expose-Headers': 'x-trans-id'
})
req = Request.blank(
'/v1/a/c/o.jpg',
{'REQUEST_METHOD': 'GET'},
headers={'Origin': 'http://foo.bar'})
resp = cors_validation(objectGET)(controller, req)
self.assertEqual(200, resp.status_int)
self.assertEqual('http://obj.origin',
resp.headers['access-control-allow-origin'])
self.assertEqual('x-trans-id',
resp.headers['access-control-expose-headers'])
def _gather_x_container_headers(self, controller_call, req, *connect_args,
**kwargs):
header_list = kwargs.pop('header_list', ['X-Container-Device',
'X-Container-Host',
'X-Container-Partition'])
seen_headers = []
def capture_headers(ipaddr, port, device, partition, method,
path, headers=None, query_string=None):
captured = {}
for header in header_list:
captured[header] = headers.get(header)
seen_headers.append(captured)
with save_globals():
self.app.allow_account_management = True
set_http_connect(*connect_args, give_connect=capture_headers,
**kwargs)
resp = controller_call(req)
self.assertEqual(2, resp.status_int // 100) # sanity check
# don't care about the account/container HEADs, so chuck
# the first two requests
return sorted(seen_headers[2:],
key=lambda d: d.get(header_list[0]) or 'z')
def test_PUT_x_container_headers_with_equal_replicas(self):
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '5'}, body='12345')
controller = ReplicatedObjectController(
self.app, 'a', 'c', 'o')
seen_headers = self._gather_x_container_headers(
controller.PUT, req,
200, 200, 201, 201, 201) # HEAD HEAD PUT PUT PUT
self.assertEqual(
seen_headers, [
{'X-Container-Host': '10.0.0.0:1000',
'X-Container-Partition': '0',
'X-Container-Device': 'sda'},
{'X-Container-Host': '10.0.0.1:1001',
'X-Container-Partition': '0',
'X-Container-Device': 'sdb'},
{'X-Container-Host': '10.0.0.2:1002',
'X-Container-Partition': '0',
'X-Container-Device': 'sdc'}])
def test_PUT_x_container_headers_with_fewer_container_replicas(self):
self.app.container_ring.set_replicas(2)
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '5'}, body='12345')
controller = ReplicatedObjectController(
self.app, 'a', 'c', 'o')
seen_headers = self._gather_x_container_headers(
controller.PUT, req,
200, 200, 201, 201, 201) # HEAD HEAD PUT PUT PUT
self.assertEqual(
seen_headers, [
{'X-Container-Host': '10.0.0.0:1000',
'X-Container-Partition': '0',
'X-Container-Device': 'sda'},
{'X-Container-Host': '10.0.0.1:1001',
'X-Container-Partition': '0',
'X-Container-Device': 'sdb'},
{'X-Container-Host': None,
'X-Container-Partition': None,
'X-Container-Device': None}])
def test_PUT_x_container_headers_with_more_container_replicas(self):
self.app.container_ring.set_replicas(4)
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Length': '5'}, body='12345')
controller = ReplicatedObjectController(
self.app, 'a', 'c', 'o')
seen_headers = self._gather_x_container_headers(
controller.PUT, req,
200, 200, 201, 201, 201) # HEAD HEAD PUT PUT PUT
self.assertEqual(
seen_headers, [
{'X-Container-Host': '10.0.0.0:1000,10.0.0.3:1003',
'X-Container-Partition': '0',
'X-Container-Device': 'sda,sdd'},
{'X-Container-Host': '10.0.0.1:1001',
'X-Container-Partition': '0',
'X-Container-Device': 'sdb'},
{'X-Container-Host': '10.0.0.2:1002',
'X-Container-Partition': '0',
'X-Container-Device': 'sdc'}])
def test_POST_x_container_headers_with_more_container_replicas(self):
self.app.container_ring.set_replicas(4)
self.app.object_post_as_copy = False
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'POST'},
headers={'Content-Type': 'application/stuff'})
controller = ReplicatedObjectController(
self.app, 'a', 'c', 'o')
seen_headers = self._gather_x_container_headers(
controller.POST, req,
200, 200, 200, 200, 200) # HEAD HEAD POST POST POST
self.assertEqual(
seen_headers, [
{'X-Container-Host': '10.0.0.0:1000,10.0.0.3:1003',
'X-Container-Partition': '0',
'X-Container-Device': 'sda,sdd'},
{'X-Container-Host': '10.0.0.1:1001',
'X-Container-Partition': '0',
'X-Container-Device': 'sdb'},
{'X-Container-Host': '10.0.0.2:1002',
'X-Container-Partition': '0',
'X-Container-Device': 'sdc'}])
def test_DELETE_x_container_headers_with_more_container_replicas(self):
self.app.container_ring.set_replicas(4)
req = Request.blank('/v1/a/c/o',
environ={'REQUEST_METHOD': 'DELETE'},
headers={'Content-Type': 'application/stuff'})
controller = ReplicatedObjectController(
self.app, 'a', 'c', 'o')
seen_headers = self._gather_x_container_headers(
controller.DELETE, req,
200, 200, 200, 200, 200) # HEAD HEAD DELETE DELETE DELETE
self.assertEqual(seen_headers, [
{'X-Container-Host': '10.0.0.0:1000,10.0.0.3:1003',
'X-Container-Partition': '0',
'X-Container-Device': 'sda,sdd'},
{'X-Container-Host': '10.0.0.1:1001',
'X-Container-Partition': '0',
'X-Container-Device': 'sdb'},
{'X-Container-Host': '10.0.0.2:1002',
'X-Container-Partition': '0',
'X-Container-Device': 'sdc'}
])
@mock.patch('time.time', new=lambda: STATIC_TIME)
def test_PUT_x_delete_at_with_fewer_container_replicas(self):
self.app.container_ring.set_replicas(2)
delete_at_timestamp = int(time.time()) + 100000
delete_at_container = utils.get_expirer_container(
delete_at_timestamp, self.app.expiring_objects_container_divisor,
'a', 'c', 'o')
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Type': 'application/stuff',
'Content-Length': '0',
'X-Delete-At': str(delete_at_timestamp)})
controller = ReplicatedObjectController(
self.app, 'a', 'c', 'o')
seen_headers = self._gather_x_container_headers(
controller.PUT, req,
200, 200, 201, 201, 201, # HEAD HEAD PUT PUT PUT
header_list=('X-Delete-At-Host', 'X-Delete-At-Device',
'X-Delete-At-Partition', 'X-Delete-At-Container'))
self.assertEqual(seen_headers, [
{'X-Delete-At-Host': '10.0.0.0:1000',
'X-Delete-At-Container': delete_at_container,
'X-Delete-At-Partition': '0',
'X-Delete-At-Device': 'sda'},
{'X-Delete-At-Host': '10.0.0.1:1001',
'X-Delete-At-Container': delete_at_container,
'X-Delete-At-Partition': '0',
'X-Delete-At-Device': 'sdb'},
{'X-Delete-At-Host': None,
'X-Delete-At-Container': None,
'X-Delete-At-Partition': None,
'X-Delete-At-Device': None}
])
@mock.patch('time.time', new=lambda: STATIC_TIME)
def test_PUT_x_delete_at_with_more_container_replicas(self):
self.app.container_ring.set_replicas(4)
self.app.expiring_objects_account = 'expires'
self.app.expiring_objects_container_divisor = 60
delete_at_timestamp = int(time.time()) + 100000
delete_at_container = utils.get_expirer_container(
delete_at_timestamp, self.app.expiring_objects_container_divisor,
'a', 'c', 'o')
req = Request.blank('/v1/a/c/o', environ={'REQUEST_METHOD': 'PUT'},
headers={'Content-Type': 'application/stuff',
'Content-Length': 0,
'X-Delete-At': str(delete_at_timestamp)})
controller = ReplicatedObjectController(
self.app, 'a', 'c', 'o')
seen_headers = self._gather_x_container_headers(
controller.PUT, req,
200, 200, 201, 201, 201, # HEAD HEAD PUT PUT PUT
header_list=('X-Delete-At-Host', 'X-Delete-At-Device',
'X-Delete-At-Partition', 'X-Delete-At-Container'))
self.assertEqual(seen_headers, [
{'X-Delete-At-Host': '10.0.0.0:1000,10.0.0.3:1003',
'X-Delete-At-Container': delete_at_container,
'X-Delete-At-Partition': '0',
'X-Delete-At-Device': 'sda,sdd'},
{'X-Delete-At-Host': '10.0.0.1:1001',
'X-Delete-At-Container': delete_at_container,
'X-Delete-At-Partition': '0',
'X-Delete-At-Device': 'sdb'},
{'X-Delete-At-Host': '10.0.0.2:1002',
'X-Delete-At-Container': delete_at_container,
'X-Delete-At-Partition': '0',
'X-Delete-At-Device': 'sdc'}
])
class TestECMismatchedFA(unittest.TestCase):
def tearDown(self):
prosrv = _test_servers[0]
# don't leak error limits and poison other tests
prosrv._error_limiting = {}
def test_mixing_different_objects_fragment_archives(self):
(prosrv, acc1srv, acc2srv, con1srv, con2srv, obj1srv,
obj2srv, obj3srv) = _test_servers
ec_policy = POLICIES[3]
@public
def bad_disk(req):
return Response(status=507, body="borken")
ensure_container = Request.blank(
"/v1/a/ec-crazytown",
environ={"REQUEST_METHOD": "PUT"},
headers={"X-Storage-Policy": "ec", "X-Auth-Token": "t"})
resp = ensure_container.get_response(prosrv)
self.assertTrue(resp.status_int in (201, 202))
obj1 = "first version..."
put_req1 = Request.blank(
"/v1/a/ec-crazytown/obj",
environ={"REQUEST_METHOD": "PUT"},
headers={"X-Auth-Token": "t"})
put_req1.body = obj1
obj2 = u"versión segundo".encode("utf-8")
put_req2 = Request.blank(
"/v1/a/ec-crazytown/obj",
environ={"REQUEST_METHOD": "PUT"},
headers={"X-Auth-Token": "t"})
put_req2.body = obj2
# pyeclib has checks for unequal-length; we don't want to trip those
self.assertEqual(len(obj1), len(obj2))
# Server obj1 will have the first version of the object (obj2 also
# gets it, but that gets stepped on later)
prosrv._error_limiting = {}
with nested(
mock.patch.object(obj3srv, 'PUT', bad_disk),
mock.patch(
'swift.common.storage_policy.ECStoragePolicy.quorum')):
type(ec_policy).quorum = mock.PropertyMock(return_value=2)
resp = put_req1.get_response(prosrv)
self.assertEqual(resp.status_int, 201)
# Servers obj2 and obj3 will have the second version of the object.
prosrv._error_limiting = {}
with nested(
mock.patch.object(obj1srv, 'PUT', bad_disk),
mock.patch(
'swift.common.storage_policy.ECStoragePolicy.quorum')):
type(ec_policy).quorum = mock.PropertyMock(return_value=2)
resp = put_req2.get_response(prosrv)
self.assertEqual(resp.status_int, 201)
# A GET that only sees 1 fragment archive should fail
get_req = Request.blank("/v1/a/ec-crazytown/obj",
environ={"REQUEST_METHOD": "GET"},
headers={"X-Auth-Token": "t"})
prosrv._error_limiting = {}
with nested(
mock.patch.object(obj1srv, 'GET', bad_disk),
mock.patch.object(obj2srv, 'GET', bad_disk)):
resp = get_req.get_response(prosrv)
self.assertEqual(resp.status_int, 503)
# A GET that sees 2 matching FAs will work
get_req = Request.blank("/v1/a/ec-crazytown/obj",
environ={"REQUEST_METHOD": "GET"},
headers={"X-Auth-Token": "t"})
prosrv._error_limiting = {}
with mock.patch.object(obj1srv, 'GET', bad_disk):
resp = get_req.get_response(prosrv)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.body, obj2)
# A GET that sees 2 mismatching FAs will fail
get_req = Request.blank("/v1/a/ec-crazytown/obj",
environ={"REQUEST_METHOD": "GET"},
headers={"X-Auth-Token": "t"})
prosrv._error_limiting = {}
with mock.patch.object(obj2srv, 'GET', bad_disk):
resp = get_req.get_response(prosrv)
self.assertEqual(resp.status_int, 503)
class TestObjectECRangedGET(unittest.TestCase):
def setUp(self):
_test_servers[0].logger._clear()
self.app = proxy_server.Application(
None, FakeMemcache(),
logger=debug_logger('proxy-ut'),
account_ring=FakeRing(),
container_ring=FakeRing())
def tearDown(self):
prosrv = _test_servers[0]
self.assertFalse(prosrv.logger.get_lines_for_level('error'))
self.assertFalse(prosrv.logger.get_lines_for_level('warning'))
@classmethod
def setUpClass(cls):
cls.obj_name = 'range-get-test'
cls.tiny_obj_name = 'range-get-test-tiny'
cls.aligned_obj_name = 'range-get-test-aligned'
# Note: only works if called with unpatched policies
prolis = _test_sockets[0]
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/ec-con HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'Content-Length: 0\r\n'
'X-Storage-Token: t\r\n'
'X-Storage-Policy: ec\r\n'
'\r\n')
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 2'
assert headers[:len(exp)] == exp, "container PUT failed"
seg_size = POLICIES.get_by_name("ec").ec_segment_size
cls.seg_size = seg_size
# EC segment size is 4 KiB, hence this gives 4 segments, which we
# then verify with a quick sanity check
cls.obj = ' my hovercraft is full of eels '.join(
str(s) for s in range(431))
assert seg_size * 4 > len(cls.obj) > seg_size * 3, \
"object is wrong number of segments"
cls.tiny_obj = 'tiny, tiny object'
assert len(cls.tiny_obj) < seg_size, "tiny_obj too large"
cls.aligned_obj = "".join(
"abcdEFGHijkl%04d" % x for x in range(512))
assert len(cls.aligned_obj) % seg_size == 0, "aligned obj not aligned"
for obj_name, obj in ((cls.obj_name, cls.obj),
(cls.tiny_obj_name, cls.tiny_obj),
(cls.aligned_obj_name, cls.aligned_obj)):
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/ec-con/%s HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'Content-Length: %d\r\n'
'X-Storage-Token: t\r\n'
'Content-Type: donuts\r\n'
'\r\n%s' % (obj_name, len(obj), obj))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
assert headers[:len(exp)] == exp, \
"object PUT failed %s" % obj_name
def _get_obj(self, range_value, obj_name=None):
if obj_name is None:
obj_name = self.obj_name
prolis = _test_sockets[0]
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/ec-con/%s HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Storage-Token: t\r\n'
'Range: %s\r\n'
'\r\n' % (obj_name, range_value))
fd.flush()
headers = readuntil2crlfs(fd)
# e.g. "HTTP/1.1 206 Partial Content\r\n..."
status_code = int(headers[9:12])
headers = parse_headers_string(headers)
gotten_obj = ''
while True:
buf = fd.read(64)
if not buf:
break
gotten_obj += buf
# if we get this wrong, clients will either get truncated data or
# they'll hang waiting for bytes that aren't coming, so it warrants
# being asserted for every test case
if 'Content-Length' in headers:
self.assertEqual(int(headers['Content-Length']), len(gotten_obj))
# likewise, if we say MIME and don't send MIME or vice versa,
# clients will be horribly confused
if headers.get('Content-Type', '').startswith('multipart/byteranges'):
self.assertEqual(gotten_obj[:2], "--")
else:
# In general, this isn't true, as you can start an object with
# "--". However, in this test, we don't start any objects with
# "--", or even include "--" in their contents anywhere.
self.assertNotEqual(gotten_obj[:2], "--")
return (status_code, headers, gotten_obj)
def _parse_multipart(self, content_type, body):
parser = email.parser.FeedParser()
parser.feed("Content-Type: %s\r\n\r\n" % content_type)
parser.feed(body)
root_message = parser.close()
self.assertTrue(root_message.is_multipart())
byteranges = root_message.get_payload()
self.assertFalse(root_message.defects)
for i, message in enumerate(byteranges):
self.assertFalse(message.defects, "Part %d had defects" % i)
self.assertFalse(message.is_multipart(),
"Nested multipart at %d" % i)
return byteranges
def test_bogus(self):
status, headers, gotten_obj = self._get_obj("tacos=3-5")
self.assertEqual(status, 200)
self.assertEqual(len(gotten_obj), len(self.obj))
self.assertEqual(gotten_obj, self.obj)
def test_unaligned(self):
# One segment's worth of data, but straddling two segment boundaries
# (so it has data from three segments)
status, headers, gotten_obj = self._get_obj("bytes=3783-7878")
self.assertEqual(status, 206)
self.assertEqual(headers['Content-Length'], "4096")
self.assertEqual(headers['Content-Range'], "bytes 3783-7878/14513")
self.assertEqual(len(gotten_obj), 4096)
self.assertEqual(gotten_obj, self.obj[3783:7879])
def test_aligned_left(self):
# First byte is aligned to a segment boundary, last byte is not
status, headers, gotten_obj = self._get_obj("bytes=0-5500")
self.assertEqual(status, 206)
self.assertEqual(headers['Content-Length'], "5501")
self.assertEqual(headers['Content-Range'], "bytes 0-5500/14513")
self.assertEqual(len(gotten_obj), 5501)
self.assertEqual(gotten_obj, self.obj[:5501])
def test_aligned_range(self):
# Ranged GET that wants exactly one segment
status, headers, gotten_obj = self._get_obj("bytes=4096-8191")
self.assertEqual(status, 206)
self.assertEqual(headers['Content-Length'], "4096")
self.assertEqual(headers['Content-Range'], "bytes 4096-8191/14513")
self.assertEqual(len(gotten_obj), 4096)
self.assertEqual(gotten_obj, self.obj[4096:8192])
def test_aligned_range_end(self):
# Ranged GET that wants exactly the last segment
status, headers, gotten_obj = self._get_obj("bytes=12288-14512")
self.assertEqual(status, 206)
self.assertEqual(headers['Content-Length'], "2225")
self.assertEqual(headers['Content-Range'], "bytes 12288-14512/14513")
self.assertEqual(len(gotten_obj), 2225)
self.assertEqual(gotten_obj, self.obj[12288:])
def test_aligned_range_aligned_obj(self):
# Ranged GET that wants exactly the last segment, which is full-size
status, headers, gotten_obj = self._get_obj("bytes=4096-8191",
self.aligned_obj_name)
self.assertEqual(status, 206)
self.assertEqual(headers['Content-Length'], "4096")
self.assertEqual(headers['Content-Range'], "bytes 4096-8191/8192")
self.assertEqual(len(gotten_obj), 4096)
self.assertEqual(gotten_obj, self.aligned_obj[4096:8192])
def test_byte_0(self):
# Just the first byte, but it's index 0, so that's easy to get wrong
status, headers, gotten_obj = self._get_obj("bytes=0-0")
self.assertEqual(status, 206)
self.assertEqual(headers['Content-Length'], "1")
self.assertEqual(headers['Content-Range'], "bytes 0-0/14513")
self.assertEqual(gotten_obj, self.obj[0])
def test_unsatisfiable(self):
# Goes just one byte too far off the end of the object, so it's
# unsatisfiable
status, _junk, _junk = self._get_obj(
"bytes=%d-%d" % (len(self.obj), len(self.obj) + 100))
self.assertEqual(status, 416)
def test_off_end(self):
# Ranged GET that's mostly off the end of the object, but overlaps
# it in just the last byte
status, headers, gotten_obj = self._get_obj(
"bytes=%d-%d" % (len(self.obj) - 1, len(self.obj) + 100))
self.assertEqual(status, 206)
self.assertEqual(headers['Content-Length'], '1')
self.assertEqual(headers['Content-Range'], 'bytes 14512-14512/14513')
self.assertEqual(gotten_obj, self.obj[-1])
def test_aligned_off_end(self):
# Ranged GET that starts on a segment boundary but asks for a whole lot
status, headers, gotten_obj = self._get_obj(
"bytes=%d-%d" % (8192, len(self.obj) + 100))
self.assertEqual(status, 206)
self.assertEqual(headers['Content-Length'], '6321')
self.assertEqual(headers['Content-Range'], 'bytes 8192-14512/14513')
self.assertEqual(gotten_obj, self.obj[8192:])
def test_way_off_end(self):
# Ranged GET that's mostly off the end of the object, but overlaps
# it in just the last byte, and wants multiple segments' worth off
# the end
status, headers, gotten_obj = self._get_obj(
"bytes=%d-%d" % (len(self.obj) - 1, len(self.obj) * 1000))
self.assertEqual(status, 206)
self.assertEqual(headers['Content-Length'], '1')
self.assertEqual(headers['Content-Range'], 'bytes 14512-14512/14513')
self.assertEqual(gotten_obj, self.obj[-1])
def test_boundaries(self):
# Wants the last byte of segment 1 + the first byte of segment 2
status, headers, gotten_obj = self._get_obj("bytes=4095-4096")
self.assertEqual(status, 206)
self.assertEqual(headers['Content-Length'], '2')
self.assertEqual(headers['Content-Range'], 'bytes 4095-4096/14513')
self.assertEqual(gotten_obj, self.obj[4095:4097])
def test_until_end(self):
# Wants the last byte of segment 1 + the rest
status, headers, gotten_obj = self._get_obj("bytes=4095-")
self.assertEqual(status, 206)
self.assertEqual(headers['Content-Length'], '10418')
self.assertEqual(headers['Content-Range'], 'bytes 4095-14512/14513')
self.assertEqual(gotten_obj, self.obj[4095:])
def test_small_suffix(self):
# Small range-suffix GET: the last 100 bytes (less than one segment)
status, headers, gotten_obj = self._get_obj("bytes=-100")
self.assertEqual(status, 206)
self.assertEqual(headers['Content-Length'], '100')
self.assertEqual(headers['Content-Range'], 'bytes 14413-14512/14513')
self.assertEqual(len(gotten_obj), 100)
self.assertEqual(gotten_obj, self.obj[-100:])
def test_small_suffix_aligned(self):
# Small range-suffix GET: the last 100 bytes, last segment is
# full-size
status, headers, gotten_obj = self._get_obj("bytes=-100",
self.aligned_obj_name)
self.assertEqual(status, 206)
self.assertEqual(headers['Content-Length'], '100')
self.assertEqual(headers['Content-Range'], 'bytes 8092-8191/8192')
self.assertEqual(len(gotten_obj), 100)
def test_suffix_two_segs(self):
# Ask for enough data that we need the last two segments. The last
# segment is short, though, so this ensures we compensate for that.
#
# Note that the total range size is less than one full-size segment.
suffix_len = len(self.obj) % self.seg_size + 1
status, headers, gotten_obj = self._get_obj("bytes=-%d" % suffix_len)
self.assertEqual(status, 206)
self.assertEqual(headers['Content-Length'], str(suffix_len))
self.assertEqual(headers['Content-Range'],
'bytes %d-%d/%d' % (len(self.obj) - suffix_len,
len(self.obj) - 1,
len(self.obj)))
self.assertEqual(len(gotten_obj), suffix_len)
def test_large_suffix(self):
# Large range-suffix GET: the last 5000 bytes (more than one segment)
status, headers, gotten_obj = self._get_obj("bytes=-5000")
self.assertEqual(status, 206)
self.assertEqual(headers['Content-Length'], '5000')
self.assertEqual(headers['Content-Range'], 'bytes 9513-14512/14513')
self.assertEqual(len(gotten_obj), 5000)
self.assertEqual(gotten_obj, self.obj[-5000:])
def test_overlarge_suffix(self):
# The last N+1 bytes of an N-byte object
status, headers, gotten_obj = self._get_obj(
"bytes=-%d" % (len(self.obj) + 1))
self.assertEqual(status, 206)
self.assertEqual(headers['Content-Length'], '14513')
self.assertEqual(headers['Content-Range'], 'bytes 0-14512/14513')
self.assertEqual(len(gotten_obj), len(self.obj))
self.assertEqual(gotten_obj, self.obj)
def test_small_suffix_tiny_object(self):
status, headers, gotten_obj = self._get_obj(
"bytes=-5", self.tiny_obj_name)
self.assertEqual(status, 206)
self.assertEqual(headers['Content-Length'], '5')
self.assertEqual(headers['Content-Range'], 'bytes 12-16/17')
self.assertEqual(gotten_obj, self.tiny_obj[12:])
def test_overlarge_suffix_tiny_object(self):
status, headers, gotten_obj = self._get_obj(
"bytes=-1234567890", self.tiny_obj_name)
self.assertEqual(status, 206)
self.assertEqual(headers['Content-Length'], '17')
self.assertEqual(headers['Content-Range'], 'bytes 0-16/17')
self.assertEqual(len(gotten_obj), len(self.tiny_obj))
self.assertEqual(gotten_obj, self.tiny_obj)
def test_multiple_ranges(self):
status, headers, gotten_obj = self._get_obj(
"bytes=0-100,4490-5010", self.obj_name)
self.assertEqual(status, 206)
self.assertEqual(headers["Content-Length"], str(len(gotten_obj)))
content_type, content_type_params = parse_content_type(
headers['Content-Type'])
content_type_params = dict(content_type_params)
self.assertEqual(content_type, 'multipart/byteranges')
boundary = content_type_params.get('boundary')
self.assertTrue(boundary is not None)
got_byteranges = self._parse_multipart(headers['Content-Type'],
gotten_obj)
self.assertEqual(len(got_byteranges), 2)
first_byterange, second_byterange = got_byteranges
self.assertEqual(first_byterange['Content-Range'],
'bytes 0-100/14513')
self.assertEqual(first_byterange.get_payload(), self.obj[:101])
self.assertEqual(second_byterange['Content-Range'],
'bytes 4490-5010/14513')
self.assertEqual(second_byterange.get_payload(), self.obj[4490:5011])
def test_multiple_ranges_overlapping_in_segment(self):
status, headers, gotten_obj = self._get_obj(
"bytes=0-9,20-29,40-49,60-69,80-89")
self.assertEqual(status, 206)
got_byteranges = self._parse_multipart(headers['Content-Type'],
gotten_obj)
self.assertEqual(len(got_byteranges), 5)
def test_multiple_ranges_off_end(self):
status, headers, gotten_obj = self._get_obj(
"bytes=0-10,14500-14513") # there is no byte 14513, only 0-14512
self.assertEqual(status, 206)
got_byteranges = self._parse_multipart(headers['Content-Type'],
gotten_obj)
self.assertEqual(len(got_byteranges), 2)
self.assertEqual(got_byteranges[0]['Content-Range'],
"bytes 0-10/14513")
self.assertEqual(got_byteranges[1]['Content-Range'],
"bytes 14500-14512/14513")
def test_multiple_ranges_suffix_off_end(self):
status, headers, gotten_obj = self._get_obj(
"bytes=0-10,-13")
self.assertEqual(status, 206)
got_byteranges = self._parse_multipart(headers['Content-Type'],
gotten_obj)
self.assertEqual(len(got_byteranges), 2)
self.assertEqual(got_byteranges[0]['Content-Range'],
"bytes 0-10/14513")
self.assertEqual(got_byteranges[1]['Content-Range'],
"bytes 14500-14512/14513")
def test_multiple_ranges_one_barely_unsatisfiable(self):
# The thing about 14515-14520 is that it comes from the last segment
# in the object. When we turn this range into a fragment range,
# it'll be for the last fragment, so the object servers see
# something satisfiable.
#
# Basically, we'll get 3 byteranges from the object server, but we
# have to filter out the unsatisfiable one on our own.
status, headers, gotten_obj = self._get_obj(
"bytes=0-10,14515-14520,40-50")
self.assertEqual(status, 206)
got_byteranges = self._parse_multipart(headers['Content-Type'],
gotten_obj)
self.assertEqual(len(got_byteranges), 2)
self.assertEqual(got_byteranges[0]['Content-Range'],
"bytes 0-10/14513")
self.assertEqual(got_byteranges[0].get_payload(), self.obj[0:11])
self.assertEqual(got_byteranges[1]['Content-Range'],
"bytes 40-50/14513")
self.assertEqual(got_byteranges[1].get_payload(), self.obj[40:51])
def test_multiple_ranges_some_unsatisfiable(self):
status, headers, gotten_obj = self._get_obj(
"bytes=0-100,4090-5010,999999-9999999", self.obj_name)
self.assertEqual(status, 206)
content_type, content_type_params = parse_content_type(
headers['Content-Type'])
content_type_params = dict(content_type_params)
self.assertEqual(content_type, 'multipart/byteranges')
boundary = content_type_params.get('boundary')
self.assertTrue(boundary is not None)
got_byteranges = self._parse_multipart(headers['Content-Type'],
gotten_obj)
self.assertEqual(len(got_byteranges), 2)
first_byterange, second_byterange = got_byteranges
self.assertEqual(first_byterange['Content-Range'],
'bytes 0-100/14513')
self.assertEqual(first_byterange.get_payload(), self.obj[:101])
self.assertEqual(second_byterange['Content-Range'],
'bytes 4090-5010/14513')
self.assertEqual(second_byterange.get_payload(), self.obj[4090:5011])
def test_two_ranges_one_unsatisfiable(self):
status, headers, gotten_obj = self._get_obj(
"bytes=0-100,999999-9999999", self.obj_name)
self.assertEqual(status, 206)
content_type, content_type_params = parse_content_type(
headers['Content-Type'])
# According to RFC 7233, this could be either a multipart/byteranges
# response with one part or it could be a single-part response (just
# the bytes, no MIME). We're locking it down here: single-part
# response. That's what replicated objects do, and we don't want any
# client-visible differences between EC objects and replicated ones.
self.assertEqual(content_type, 'donuts')
self.assertEqual(gotten_obj, self.obj[:101])
def test_two_ranges_one_unsatisfiable_same_segment(self):
# Like test_two_ranges_one_unsatisfiable(), but where both ranges
# fall within the same EC segment.
status, headers, gotten_obj = self._get_obj(
"bytes=14500-14510,14520-14530")
self.assertEqual(status, 206)
content_type, content_type_params = parse_content_type(
headers['Content-Type'])
self.assertEqual(content_type, 'donuts')
self.assertEqual(gotten_obj, self.obj[14500:14511])
def test_multiple_ranges_some_unsatisfiable_out_of_order(self):
status, headers, gotten_obj = self._get_obj(
"bytes=0-100,99999998-99999999,4090-5010", self.obj_name)
self.assertEqual(status, 206)
content_type, content_type_params = parse_content_type(
headers['Content-Type'])
content_type_params = dict(content_type_params)
self.assertEqual(content_type, 'multipart/byteranges')
boundary = content_type_params.get('boundary')
self.assertTrue(boundary is not None)
got_byteranges = self._parse_multipart(headers['Content-Type'],
gotten_obj)
self.assertEqual(len(got_byteranges), 2)
first_byterange, second_byterange = got_byteranges
self.assertEqual(first_byterange['Content-Range'],
'bytes 0-100/14513')
self.assertEqual(first_byterange.get_payload(), self.obj[:101])
self.assertEqual(second_byterange['Content-Range'],
'bytes 4090-5010/14513')
self.assertEqual(second_byterange.get_payload(), self.obj[4090:5011])
@patch_policies([
StoragePolicy(0, 'zero', True, object_ring=FakeRing(base_port=3000)),
StoragePolicy(1, 'one', False, object_ring=FakeRing(base_port=3000)),
StoragePolicy(2, 'two', False, True, object_ring=FakeRing(base_port=3000))
])
class TestContainerController(unittest.TestCase):
"Test swift.proxy_server.ContainerController"
def setUp(self):
self.app = proxy_server.Application(
None, FakeMemcache(),
account_ring=FakeRing(),
container_ring=FakeRing(base_port=2000),
logger=debug_logger())
def test_convert_policy_to_index(self):
controller = swift.proxy.controllers.ContainerController(self.app,
'a', 'c')
expected = {
'zero': 0,
'ZeRo': 0,
'one': 1,
'OnE': 1,
}
for name, index in expected.items():
req = Request.blank('/a/c', headers={'Content-Length': '0',
'Content-Type': 'text/plain',
'X-Storage-Policy': name})
self.assertEqual(controller._convert_policy_to_index(req), index)
# default test
req = Request.blank('/a/c', headers={'Content-Length': '0',
'Content-Type': 'text/plain'})
self.assertEqual(controller._convert_policy_to_index(req), None)
# negative test
req = Request.blank('/a/c',
headers={'Content-Length': '0',
'Content-Type': 'text/plain',
'X-Storage-Policy': 'nada'})
self.assertRaises(HTTPException, controller._convert_policy_to_index,
req)
# storage policy two is deprecated
req = Request.blank('/a/c', headers={'Content-Length': '0',
'Content-Type': 'text/plain',
'X-Storage-Policy': 'two'})
self.assertRaises(HTTPException, controller._convert_policy_to_index,
req)
def test_convert_index_to_name(self):
policy = random.choice(list(POLICIES))
req = Request.blank('/v1/a/c')
with mocked_http_conn(
200, 200,
headers={'X-Backend-Storage-Policy-Index': int(policy)},
) as fake_conn:
resp = req.get_response(self.app)
self.assertRaises(StopIteration, fake_conn.code_iter.next)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['X-Storage-Policy'], policy.name)
def test_no_convert_index_to_name_when_container_not_found(self):
policy = random.choice(list(POLICIES))
req = Request.blank('/v1/a/c')
with mocked_http_conn(
200, 404, 404, 404,
headers={'X-Backend-Storage-Policy-Index':
int(policy)}) as fake_conn:
resp = req.get_response(self.app)
self.assertRaises(StopIteration, fake_conn.code_iter.next)
self.assertEqual(resp.status_int, 404)
self.assertEqual(resp.headers['X-Storage-Policy'], None)
def test_error_convert_index_to_name(self):
req = Request.blank('/v1/a/c')
with mocked_http_conn(
200, 200,
headers={'X-Backend-Storage-Policy-Index': '-1'}) as fake_conn:
resp = req.get_response(self.app)
self.assertRaises(StopIteration, fake_conn.code_iter.next)
self.assertEqual(resp.status_int, 200)
self.assertEqual(resp.headers['X-Storage-Policy'], None)
error_lines = self.app.logger.get_lines_for_level('error')
self.assertEqual(2, len(error_lines))
for msg in error_lines:
expected = "Could not translate " \
"X-Backend-Storage-Policy-Index ('-1')"
self.assertTrue(expected in msg)
def test_transfer_headers(self):
src_headers = {'x-remove-versions-location': 'x',
'x-container-read': '*:user',
'x-remove-container-sync-key': 'x'}
dst_headers = {'x-versions-location': 'backup'}
controller = swift.proxy.controllers.ContainerController(self.app,
'a', 'c')
controller.transfer_headers(src_headers, dst_headers)
expected_headers = {'x-versions-location': '',
'x-container-read': '*:user',
'x-container-sync-key': ''}
self.assertEqual(dst_headers, expected_headers)
def assert_status_map(self, method, statuses, expected,
raise_exc=False, missing_container=False):
with save_globals():
kwargs = {}
if raise_exc:
kwargs['raise_exc'] = raise_exc
kwargs['missing_container'] = missing_container
set_http_connect(*statuses, **kwargs)
self.app.memcache.store = {}
req = Request.blank('/v1/a/c', headers={'Content-Length': '0',
'Content-Type': 'text/plain'})
self.app.update_request(req)
res = method(req)
self.assertEqual(res.status_int, expected)
set_http_connect(*statuses, **kwargs)
self.app.memcache.store = {}
req = Request.blank('/v1/a/c/', headers={'Content-Length': '0',
'Content-Type': 'text/plain'})
self.app.update_request(req)
res = method(req)
self.assertEqual(res.status_int, expected)
def test_HEAD_GET(self):
with save_globals():
controller = proxy_server.ContainerController(self.app, 'a', 'c')
def test_status_map(statuses, expected,
c_expected=None, a_expected=None, **kwargs):
set_http_connect(*statuses, **kwargs)
self.app.memcache.store = {}
req = Request.blank('/v1/a/c', {})
self.app.update_request(req)
res = controller.HEAD(req)
self.assertEqual(res.status[:len(str(expected))],
str(expected))
if expected < 400:
self.assertTrue('x-works' in res.headers)
self.assertEqual(res.headers['x-works'], 'yes')
if c_expected:
self.assertTrue('swift.container/a/c' in res.environ)
self.assertEqual(
res.environ['swift.container/a/c']['status'],
c_expected)
else:
self.assertTrue('swift.container/a/c' not in res.environ)
if a_expected:
self.assertTrue('swift.account/a' in res.environ)
self.assertEqual(res.environ['swift.account/a']['status'],
a_expected)
else:
self.assertTrue('swift.account/a' not in res.environ)
set_http_connect(*statuses, **kwargs)
self.app.memcache.store = {}
req = Request.blank('/v1/a/c', {})
self.app.update_request(req)
res = controller.GET(req)
self.assertEqual(res.status[:len(str(expected))],
str(expected))
if expected < 400:
self.assertTrue('x-works' in res.headers)
self.assertEqual(res.headers['x-works'], 'yes')
if c_expected:
self.assertTrue('swift.container/a/c' in res.environ)
self.assertEqual(
res.environ['swift.container/a/c']['status'],
c_expected)
else:
self.assertTrue('swift.container/a/c' not in res.environ)
if a_expected:
self.assertTrue('swift.account/a' in res.environ)
self.assertEqual(res.environ['swift.account/a']['status'],
a_expected)
else:
self.assertTrue('swift.account/a' not in res.environ)
# In all the following tests cache 200 for account
# return and ache vary for container
# return 200 and cache 200 for and container
test_status_map((200, 200, 404, 404), 200, 200, 200)
test_status_map((200, 200, 500, 404), 200, 200, 200)
# return 304 don't cache container
test_status_map((200, 304, 500, 404), 304, None, 200)
# return 404 and cache 404 for container
test_status_map((200, 404, 404, 404), 404, 404, 200)
test_status_map((200, 404, 404, 500), 404, 404, 200)
# return 503, don't cache container
test_status_map((200, 500, 500, 500), 503, None, 200)
self.assertFalse(self.app.account_autocreate)
# In all the following tests cache 404 for account
# return 404 (as account is not found) and don't cache container
test_status_map((404, 404, 404), 404, None, 404)
# This should make no difference
self.app.account_autocreate = True
test_status_map((404, 404, 404), 404, None, 404)
def test_PUT_policy_headers(self):
backend_requests = []
def capture_requests(ipaddr, port, device, partition, method,
path, headers=None, query_string=None):
if method == 'PUT':
backend_requests.append(headers)
def test_policy(requested_policy):
with save_globals():
mock_conn = set_http_connect(200, 201, 201, 201,
give_connect=capture_requests)
self.app.memcache.store = {}
req = Request.blank('/v1/a/test', method='PUT',
headers={'Content-Length': 0})
if requested_policy:
expected_policy = requested_policy
req.headers['X-Storage-Policy'] = policy.name
else:
expected_policy = POLICIES.default
res = req.get_response(self.app)
if expected_policy.is_deprecated:
self.assertEqual(res.status_int, 400)
self.assertEqual(0, len(backend_requests))
expected = 'is deprecated'
self.assertTrue(expected in res.body,
'%r did not include %r' % (
res.body, expected))
return
self.assertEqual(res.status_int, 201)
self.assertEqual(
expected_policy.object_ring.replicas,
len(backend_requests))
for headers in backend_requests:
if not requested_policy:
self.assertFalse('X-Backend-Storage-Policy-Index' in
headers)
self.assertTrue(
'X-Backend-Storage-Policy-Default' in headers)
self.assertEqual(
int(expected_policy),
int(headers['X-Backend-Storage-Policy-Default']))
else:
self.assertTrue('X-Backend-Storage-Policy-Index' in
headers)
self.assertEqual(int(headers
['X-Backend-Storage-Policy-Index']),
int(policy))
# make sure all mocked responses are consumed
self.assertRaises(StopIteration, mock_conn.code_iter.next)
test_policy(None) # no policy header
for policy in POLICIES:
backend_requests = [] # reset backend requests
test_policy(policy)
def test_PUT(self):
with save_globals():
controller = proxy_server.ContainerController(self.app, 'account',
'container')
def test_status_map(statuses, expected, **kwargs):
set_http_connect(*statuses, **kwargs)
self.app.memcache.store = {}
req = Request.blank('/v1/a/c', {})
req.content_length = 0
self.app.update_request(req)
res = controller.PUT(req)
expected = str(expected)
self.assertEqual(res.status[:len(expected)], expected)
test_status_map((200, 201, 201, 201), 201, missing_container=True)
test_status_map((200, 201, 201, 500), 201, missing_container=True)
test_status_map((200, 204, 404, 404), 404, missing_container=True)
test_status_map((200, 204, 500, 404), 503, missing_container=True)
self.assertFalse(self.app.account_autocreate)
test_status_map((404, 404, 404), 404, missing_container=True)
self.app.account_autocreate = True
# fail to retrieve account info
test_status_map(
(503, 503, 503), # account_info fails on 503
404, missing_container=True)
# account fail after creation
test_status_map(
(404, 404, 404, # account_info fails on 404
201, 201, 201, # PUT account
404, 404, 404), # account_info fail
404, missing_container=True)
test_status_map(
(503, 503, 404, # account_info fails on 404
503, 503, 503, # PUT account
503, 503, 404), # account_info fail
404, missing_container=True)
# put fails
test_status_map(
(404, 404, 404, # account_info fails on 404
201, 201, 201, # PUT account
200, # account_info success
503, 503, 201), # put container fail
503, missing_container=True)
# all goes according to plan
test_status_map(
(404, 404, 404, # account_info fails on 404
201, 201, 201, # PUT account
200, # account_info success
201, 201, 201), # put container success
201, missing_container=True)
test_status_map(
(503, 404, 404, # account_info fails on 404
503, 201, 201, # PUT account
503, 200, # account_info success
503, 201, 201), # put container success
201, missing_container=True)
def test_PUT_autocreate_account_with_sysmeta(self):
# x-account-sysmeta headers in a container PUT request should be
# transferred to the account autocreate PUT request
with save_globals():
controller = proxy_server.ContainerController(self.app, 'account',
'container')
def test_status_map(statuses, expected, headers=None, **kwargs):
set_http_connect(*statuses, **kwargs)
self.app.memcache.store = {}
req = Request.blank('/v1/a/c', {}, headers=headers)
req.content_length = 0
self.app.update_request(req)
res = controller.PUT(req)
expected = str(expected)
self.assertEqual(res.status[:len(expected)], expected)
self.app.account_autocreate = True
calls = []
callback = _make_callback_func(calls)
key, value = 'X-Account-Sysmeta-Blah', 'something'
headers = {key: value}
# all goes according to plan
test_status_map(
(404, 404, 404, # account_info fails on 404
201, 201, 201, # PUT account
200, # account_info success
201, 201, 201), # put container success
201, missing_container=True,
headers=headers,
give_connect=callback)
self.assertEqual(10, len(calls))
for call in calls[3:6]:
self.assertEqual('/account', call['path'])
self.assertTrue(key in call['headers'],
'%s call, key %s missing in headers %s' %
(call['method'], key, call['headers']))
self.assertEqual(value, call['headers'][key])
def test_POST(self):
with save_globals():
controller = proxy_server.ContainerController(self.app, 'account',
'container')
def test_status_map(statuses, expected, **kwargs):
set_http_connect(*statuses, **kwargs)
self.app.memcache.store = {}
req = Request.blank('/v1/a/c', {})
req.content_length = 0
self.app.update_request(req)
res = controller.POST(req)
expected = str(expected)
self.assertEqual(res.status[:len(expected)], expected)
test_status_map((200, 201, 201, 201), 201, missing_container=True)
test_status_map((200, 201, 201, 500), 201, missing_container=True)
test_status_map((200, 204, 404, 404), 404, missing_container=True)
test_status_map((200, 204, 500, 404), 503, missing_container=True)
self.assertFalse(self.app.account_autocreate)
test_status_map((404, 404, 404), 404, missing_container=True)
self.app.account_autocreate = True
test_status_map((404, 404, 404), 404, missing_container=True)
def test_PUT_max_containers_per_account(self):
with save_globals():
self.app.max_containers_per_account = 12346
controller = proxy_server.ContainerController(self.app, 'account',
'container')
self.assert_status_map(controller.PUT,
(200, 201, 201, 201), 201,
missing_container=True)
self.app.max_containers_per_account = 12345
controller = proxy_server.ContainerController(self.app, 'account',
'container')
self.assert_status_map(controller.PUT,
(200, 200, 201, 201, 201), 201,
missing_container=True)
controller = proxy_server.ContainerController(self.app, 'account',
'container_new')
self.assert_status_map(controller.PUT, (200, 404, 404, 404), 403,
missing_container=True)
self.app.max_containers_per_account = 12345
self.app.max_containers_whitelist = ['account']
controller = proxy_server.ContainerController(self.app, 'account',
'container')
self.assert_status_map(controller.PUT,
(200, 201, 201, 201), 201,
missing_container=True)
def test_PUT_max_container_name_length(self):
with save_globals():
limit = constraints.MAX_CONTAINER_NAME_LENGTH
controller = proxy_server.ContainerController(self.app, 'account',
'1' * limit)
self.assert_status_map(controller.PUT,
(200, 201, 201, 201), 201,
missing_container=True)
controller = proxy_server.ContainerController(self.app, 'account',
'2' * (limit + 1))
self.assert_status_map(controller.PUT, (201, 201, 201), 400,
missing_container=True)
def test_PUT_connect_exceptions(self):
with save_globals():
controller = proxy_server.ContainerController(self.app, 'account',
'container')
self.assert_status_map(controller.PUT, (200, 201, 201, -1), 201,
missing_container=True)
self.assert_status_map(controller.PUT, (200, 201, -1, -1), 503,
missing_container=True)
self.assert_status_map(controller.PUT, (200, 503, 503, -1), 503,
missing_container=True)
def test_acc_missing_returns_404(self):
for meth in ('DELETE', 'PUT'):
with save_globals():
self.app.memcache = FakeMemcacheReturnsNone()
self.app._error_limiting = {}
controller = proxy_server.ContainerController(self.app,
'account',
'container')
if meth == 'PUT':
set_http_connect(200, 200, 200, 200, 200, 200,
missing_container=True)
else:
set_http_connect(200, 200, 200, 200)
self.app.memcache.store = {}
req = Request.blank('/v1/a/c',
environ={'REQUEST_METHOD': meth})
self.app.update_request(req)
resp = getattr(controller, meth)(req)
self.assertEqual(resp.status_int, 200)
set_http_connect(404, 404, 404, 200, 200, 200)
# Make sure it is a blank request wthout env caching
req = Request.blank('/v1/a/c',
environ={'REQUEST_METHOD': meth})
resp = getattr(controller, meth)(req)
self.assertEqual(resp.status_int, 404)
set_http_connect(503, 404, 404)
# Make sure it is a blank request wthout env caching
req = Request.blank('/v1/a/c',
environ={'REQUEST_METHOD': meth})
resp = getattr(controller, meth)(req)
self.assertEqual(resp.status_int, 404)
set_http_connect(503, 404, raise_exc=True)
# Make sure it is a blank request wthout env caching
req = Request.blank('/v1/a/c',
environ={'REQUEST_METHOD': meth})
resp = getattr(controller, meth)(req)
self.assertEqual(resp.status_int, 404)
for dev in self.app.account_ring.devs:
set_node_errors(self.app, dev,
self.app.error_suppression_limit + 1,
time.time())
set_http_connect(200, 200, 200, 200, 200, 200)
# Make sure it is a blank request wthout env caching
req = Request.blank('/v1/a/c',
environ={'REQUEST_METHOD': meth})
resp = getattr(controller, meth)(req)
self.assertEqual(resp.status_int, 404)
def test_put_locking(self):
class MockMemcache(FakeMemcache):
def __init__(self, allow_lock=None):
self.allow_lock = allow_lock
super(MockMemcache, self).__init__()
@contextmanager
def soft_lock(self, key, timeout=0, retries=5):
if self.allow_lock:
yield True
else:
raise NotImplementedError
with save_globals():
controller = proxy_server.ContainerController(self.app, 'account',
'container')
self.app.memcache = MockMemcache(allow_lock=True)
set_http_connect(200, 201, 201, 201,
missing_container=True)
req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': 'PUT'})
self.app.update_request(req)
res = controller.PUT(req)
self.assertEqual(res.status_int, 201)
def test_error_limiting(self):
with save_globals():
controller = proxy_server.ContainerController(self.app, 'account',
'container')
container_ring = controller.app.container_ring
controller.app.sort_nodes = lambda l: l
self.assert_status_map(controller.HEAD, (200, 503, 200, 200), 200,
missing_container=False)
self.assertEqual(
node_error_count(controller.app, container_ring.devs[0]), 2)
self.assertTrue(
node_last_error(controller.app, container_ring.devs[0])
is not None)
for _junk in range(self.app.error_suppression_limit):
self.assert_status_map(controller.HEAD,
(200, 503, 503, 503), 503)
self.assertEqual(
node_error_count(controller.app, container_ring.devs[0]),
self.app.error_suppression_limit + 1)
self.assert_status_map(controller.HEAD, (200, 200, 200, 200), 503)
self.assertTrue(
node_last_error(controller.app, container_ring.devs[0])
is not None)
self.assert_status_map(controller.PUT, (200, 201, 201, 201), 503,
missing_container=True)
self.assert_status_map(controller.DELETE,
(200, 204, 204, 204), 503)
self.app.error_suppression_interval = -300
self.assert_status_map(controller.HEAD, (200, 200, 200, 200), 200)
self.assert_status_map(controller.DELETE, (200, 204, 204, 204),
404, raise_exc=True)
def test_DELETE(self):
with save_globals():
controller = proxy_server.ContainerController(self.app, 'account',
'container')
self.assert_status_map(controller.DELETE,
(200, 204, 204, 204), 204)
self.assert_status_map(controller.DELETE,
(200, 204, 204, 503), 204)
self.assert_status_map(controller.DELETE,
(200, 204, 503, 503), 503)
self.assert_status_map(controller.DELETE,
(200, 204, 404, 404), 404)
self.assert_status_map(controller.DELETE,
(200, 404, 404, 404), 404)
self.assert_status_map(controller.DELETE,
(200, 204, 503, 404), 503)
self.app.memcache = FakeMemcacheReturnsNone()
# 200: Account check, 404x3: Container check
self.assert_status_map(controller.DELETE,
(200, 404, 404, 404), 404)
def test_response_get_accept_ranges_header(self):
with save_globals():
set_http_connect(200, 200, body='{}')
controller = proxy_server.ContainerController(self.app, 'account',
'container')
req = Request.blank('/v1/a/c?format=json')
self.app.update_request(req)
res = controller.GET(req)
self.assertTrue('accept-ranges' in res.headers)
self.assertEqual(res.headers['accept-ranges'], 'bytes')
def test_response_head_accept_ranges_header(self):
with save_globals():
set_http_connect(200, 200, body='{}')
controller = proxy_server.ContainerController(self.app, 'account',
'container')
req = Request.blank('/v1/a/c?format=json')
self.app.update_request(req)
res = controller.HEAD(req)
self.assertTrue('accept-ranges' in res.headers)
self.assertEqual(res.headers['accept-ranges'], 'bytes')
def test_PUT_metadata(self):
self.metadata_helper('PUT')
def test_POST_metadata(self):
self.metadata_helper('POST')
def metadata_helper(self, method):
for test_header, test_value in (
('X-Container-Meta-TestHeader', 'TestValue'),
('X-Container-Meta-TestHeader', ''),
('X-Remove-Container-Meta-TestHeader', 'anything'),
('X-Container-Read', '.r:*'),
('X-Remove-Container-Read', 'anything'),
('X-Container-Write', 'anyone'),
('X-Remove-Container-Write', 'anything')):
test_errors = []
def test_connect(ipaddr, port, device, partition, method, path,
headers=None, query_string=None):
if path == '/a/c':
find_header = test_header
find_value = test_value
if find_header.lower().startswith('x-remove-'):
find_header = \
find_header.lower().replace('-remove', '', 1)
find_value = ''
for k, v in headers.items():
if k.lower() == find_header.lower() and \
v == find_value:
break
else:
test_errors.append('%s: %s not in %s' %
(find_header, find_value, headers))
with save_globals():
controller = \
proxy_server.ContainerController(self.app, 'a', 'c')
set_http_connect(200, 201, 201, 201, give_connect=test_connect)
req = Request.blank(
'/v1/a/c',
environ={'REQUEST_METHOD': method, 'swift_owner': True},
headers={test_header: test_value})
self.app.update_request(req)
getattr(controller, method)(req)
self.assertEqual(test_errors, [])
def test_PUT_bad_metadata(self):
self.bad_metadata_helper('PUT')
def test_POST_bad_metadata(self):
self.bad_metadata_helper('POST')
def bad_metadata_helper(self, method):
with save_globals():
controller = proxy_server.ContainerController(self.app, 'a', 'c')
set_http_connect(200, 201, 201, 201)
req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': method})
self.app.update_request(req)
resp = getattr(controller, method)(req)
self.assertEqual(resp.status_int, 201)
set_http_connect(201, 201, 201)
req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': method},
headers={'X-Container-Meta-' +
('a' * constraints.MAX_META_NAME_LENGTH): 'v'})
self.app.update_request(req)
resp = getattr(controller, method)(req)
self.assertEqual(resp.status_int, 201)
set_http_connect(201, 201, 201)
req = Request.blank(
'/v1/a/c', environ={'REQUEST_METHOD': method},
headers={'X-Container-Meta-' +
('a' * (constraints.MAX_META_NAME_LENGTH + 1)): 'v'})
self.app.update_request(req)
resp = getattr(controller, method)(req)
self.assertEqual(resp.status_int, 400)
set_http_connect(201, 201, 201)
req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': method},
headers={'X-Container-Meta-Too-Long':
'a' * constraints.MAX_META_VALUE_LENGTH})
self.app.update_request(req)
resp = getattr(controller, method)(req)
self.assertEqual(resp.status_int, 201)
set_http_connect(201, 201, 201)
req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': method},
headers={'X-Container-Meta-Too-Long':
'a' * (constraints.MAX_META_VALUE_LENGTH + 1)})
self.app.update_request(req)
resp = getattr(controller, method)(req)
self.assertEqual(resp.status_int, 400)
set_http_connect(201, 201, 201)
headers = {}
for x in range(constraints.MAX_META_COUNT):
headers['X-Container-Meta-%d' % x] = 'v'
req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': method},
headers=headers)
self.app.update_request(req)
resp = getattr(controller, method)(req)
self.assertEqual(resp.status_int, 201)
set_http_connect(201, 201, 201)
headers = {}
for x in range(constraints.MAX_META_COUNT + 1):
headers['X-Container-Meta-%d' % x] = 'v'
req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': method},
headers=headers)
self.app.update_request(req)
resp = getattr(controller, method)(req)
self.assertEqual(resp.status_int, 400)
set_http_connect(201, 201, 201)
headers = {}
header_value = 'a' * constraints.MAX_META_VALUE_LENGTH
size = 0
x = 0
while size < (constraints.MAX_META_OVERALL_SIZE - 4
- constraints.MAX_META_VALUE_LENGTH):
size += 4 + constraints.MAX_META_VALUE_LENGTH
headers['X-Container-Meta-%04d' % x] = header_value
x += 1
if constraints.MAX_META_OVERALL_SIZE - size > 1:
headers['X-Container-Meta-a'] = \
'a' * (constraints.MAX_META_OVERALL_SIZE - size - 1)
req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': method},
headers=headers)
self.app.update_request(req)
resp = getattr(controller, method)(req)
self.assertEqual(resp.status_int, 201)
set_http_connect(201, 201, 201)
headers['X-Container-Meta-a'] = \
'a' * (constraints.MAX_META_OVERALL_SIZE - size)
req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': method},
headers=headers)
self.app.update_request(req)
resp = getattr(controller, method)(req)
self.assertEqual(resp.status_int, 400)
def test_POST_calls_clean_acl(self):
called = [False]
def clean_acl(header, value):
called[0] = True
raise ValueError('fake error')
with save_globals():
set_http_connect(200, 201, 201, 201)
controller = proxy_server.ContainerController(self.app, 'account',
'container')
req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Container-Read': '.r:*'})
req.environ['swift.clean_acl'] = clean_acl
self.app.update_request(req)
controller.POST(req)
self.assertTrue(called[0])
called[0] = False
with save_globals():
set_http_connect(200, 201, 201, 201)
controller = proxy_server.ContainerController(self.app, 'account',
'container')
req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': 'POST'},
headers={'X-Container-Write': '.r:*'})
req.environ['swift.clean_acl'] = clean_acl
self.app.update_request(req)
controller.POST(req)
self.assertTrue(called[0])
def test_PUT_calls_clean_acl(self):
called = [False]
def clean_acl(header, value):
called[0] = True
raise ValueError('fake error')
with save_globals():
set_http_connect(200, 201, 201, 201)
controller = proxy_server.ContainerController(self.app, 'account',
'container')
req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Container-Read': '.r:*'})
req.environ['swift.clean_acl'] = clean_acl
self.app.update_request(req)
controller.PUT(req)
self.assertTrue(called[0])
called[0] = False
with save_globals():
set_http_connect(200, 201, 201, 201)
controller = proxy_server.ContainerController(self.app, 'account',
'container')
req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': 'PUT'},
headers={'X-Container-Write': '.r:*'})
req.environ['swift.clean_acl'] = clean_acl
self.app.update_request(req)
controller.PUT(req)
self.assertTrue(called[0])
def test_GET_no_content(self):
with save_globals():
set_http_connect(200, 204, 204, 204)
controller = proxy_server.ContainerController(self.app, 'account',
'container')
req = Request.blank('/v1/a/c')
self.app.update_request(req)
res = controller.GET(req)
self.assertEqual(res.status_int, 204)
self.assertEqual(
res.environ['swift.container/a/c']['status'], 204)
self.assertEqual(res.content_length, 0)
self.assertTrue('transfer-encoding' not in res.headers)
def test_GET_calls_authorize(self):
called = [False]
def authorize(req):
called[0] = True
return HTTPUnauthorized(request=req)
with save_globals():
set_http_connect(200, 201, 201, 201)
controller = proxy_server.ContainerController(self.app, 'account',
'container')
req = Request.blank('/v1/a/c')
req.environ['swift.authorize'] = authorize
self.app.update_request(req)
res = controller.GET(req)
self.assertEqual(res.environ['swift.container/a/c']['status'], 201)
self.assertTrue(called[0])
def test_HEAD_calls_authorize(self):
called = [False]
def authorize(req):
called[0] = True
return HTTPUnauthorized(request=req)
with save_globals():
set_http_connect(200, 201, 201, 201)
controller = proxy_server.ContainerController(self.app, 'account',
'container')
req = Request.blank('/v1/a/c', {'REQUEST_METHOD': 'HEAD'})
req.environ['swift.authorize'] = authorize
self.app.update_request(req)
controller.HEAD(req)
self.assertTrue(called[0])
def test_unauthorized_requests_when_account_not_found(self):
# verify unauthorized container requests always return response
# from swift.authorize
called = [0, 0]
def authorize(req):
called[0] += 1
return HTTPUnauthorized(request=req)
def account_info(*args):
called[1] += 1
return None, None, None
def _do_test(method):
with save_globals():
swift.proxy.controllers.Controller.account_info = account_info
app = proxy_server.Application(None, FakeMemcache(),
account_ring=FakeRing(),
container_ring=FakeRing())
set_http_connect(201, 201, 201)
req = Request.blank('/v1/a/c', {'REQUEST_METHOD': method})
req.environ['swift.authorize'] = authorize
self.app.update_request(req)
res = app.handle_request(req)
return res
for method in ('PUT', 'POST', 'DELETE'):
# no delay_denial on method, expect one call to authorize
called = [0, 0]
res = _do_test(method)
self.assertEqual(401, res.status_int)
self.assertEqual([1, 0], called)
for method in ('HEAD', 'GET'):
# delay_denial on method, expect two calls to authorize
called = [0, 0]
res = _do_test(method)
self.assertEqual(401, res.status_int)
self.assertEqual([2, 1], called)
def test_authorized_requests_when_account_not_found(self):
# verify authorized container requests always return 404 when
# account not found
called = [0, 0]
def authorize(req):
called[0] += 1
def account_info(*args):
called[1] += 1
return None, None, None
def _do_test(method):
with save_globals():
swift.proxy.controllers.Controller.account_info = account_info
app = proxy_server.Application(None, FakeMemcache(),
account_ring=FakeRing(),
container_ring=FakeRing())
set_http_connect(201, 201, 201)
req = Request.blank('/v1/a/c', {'REQUEST_METHOD': method})
req.environ['swift.authorize'] = authorize
self.app.update_request(req)
res = app.handle_request(req)
return res
for method in ('PUT', 'POST', 'DELETE', 'HEAD', 'GET'):
# expect one call to authorize
called = [0, 0]
res = _do_test(method)
self.assertEqual(404, res.status_int)
self.assertEqual([1, 1], called)
def test_OPTIONS_get_info_drops_origin(self):
with save_globals():
controller = proxy_server.ContainerController(self.app, 'a', 'c')
count = [0]
def my_get_info(app, env, account, container=None,
ret_not_found=False, swift_source=None):
if count[0] > 11:
return {}
count[0] += 1
if not container:
return {'some': 'stuff'}
return proxy_base.was_get_info(
app, env, account, container, ret_not_found, swift_source)
proxy_base.was_get_info = proxy_base.get_info
with mock.patch.object(proxy_base, 'get_info', my_get_info):
proxy_base.get_info = my_get_info
req = Request.blank(
'/v1/a/c',
{'REQUEST_METHOD': 'OPTIONS'},
headers={'Origin': 'http://foo.com',
'Access-Control-Request-Method': 'GET'})
controller.OPTIONS(req)
self.assertTrue(count[0] < 11)
def test_OPTIONS(self):
with save_globals():
controller = proxy_server.ContainerController(self.app, 'a', 'c')
def my_empty_container_info(*args):
return {}
controller.container_info = my_empty_container_info
req = Request.blank(
'/v1/a/c',
{'REQUEST_METHOD': 'OPTIONS'},
headers={'Origin': 'http://foo.com',
'Access-Control-Request-Method': 'GET'})
resp = controller.OPTIONS(req)
self.assertEqual(401, resp.status_int)
def my_empty_origin_container_info(*args):
return {'cors': {'allow_origin': None}}
controller.container_info = my_empty_origin_container_info
req = Request.blank(
'/v1/a/c',
{'REQUEST_METHOD': 'OPTIONS'},
headers={'Origin': 'http://foo.com',
'Access-Control-Request-Method': 'GET'})
resp = controller.OPTIONS(req)
self.assertEqual(401, resp.status_int)
def my_container_info(*args):
return {
'cors': {
'allow_origin': 'http://foo.bar:8080 https://foo.bar',
'max_age': '999',
}
}
controller.container_info = my_container_info
req = Request.blank(
'/v1/a/c',
{'REQUEST_METHOD': 'OPTIONS'},
headers={'Origin': 'https://foo.bar',
'Access-Control-Request-Method': 'GET'})
req.content_length = 0
resp = controller.OPTIONS(req)
self.assertEqual(200, resp.status_int)
self.assertEqual(
'https://foo.bar',
resp.headers['access-control-allow-origin'])
for verb in 'OPTIONS GET POST PUT DELETE HEAD'.split():
self.assertTrue(
verb in resp.headers['access-control-allow-methods'])
self.assertEqual(
len(resp.headers['access-control-allow-methods'].split(', ')),
6)
self.assertEqual('999', resp.headers['access-control-max-age'])
req = Request.blank(
'/v1/a/c',
{'REQUEST_METHOD': 'OPTIONS'},
headers={'Origin': 'https://foo.bar'})
req.content_length = 0
resp = controller.OPTIONS(req)
self.assertEqual(401, resp.status_int)
req = Request.blank('/v1/a/c', {'REQUEST_METHOD': 'OPTIONS'})
req.content_length = 0
resp = controller.OPTIONS(req)
self.assertEqual(200, resp.status_int)
for verb in 'OPTIONS GET POST PUT DELETE HEAD'.split():
self.assertTrue(
verb in resp.headers['Allow'])
self.assertEqual(len(resp.headers['Allow'].split(', ')), 6)
req = Request.blank(
'/v1/a/c',
{'REQUEST_METHOD': 'OPTIONS'},
headers={'Origin': 'http://foo.bar',
'Access-Control-Request-Method': 'GET'})
resp = controller.OPTIONS(req)
self.assertEqual(401, resp.status_int)
req = Request.blank(
'/v1/a/c',
{'REQUEST_METHOD': 'OPTIONS'},
headers={'Origin': 'http://foo.bar',
'Access-Control-Request-Method': 'GET'})
controller.app.cors_allow_origin = ['http://foo.bar', ]
resp = controller.OPTIONS(req)
self.assertEqual(200, resp.status_int)
def my_container_info_wildcard(*args):
return {
'cors': {
'allow_origin': '*',
'max_age': '999',
}
}
controller.container_info = my_container_info_wildcard
req = Request.blank(
'/v1/a/c/o.jpg',
{'REQUEST_METHOD': 'OPTIONS'},
headers={'Origin': 'https://bar.baz',
'Access-Control-Request-Method': 'GET'})
req.content_length = 0
resp = controller.OPTIONS(req)
self.assertEqual(200, resp.status_int)
self.assertEqual('*', resp.headers['access-control-allow-origin'])
for verb in 'OPTIONS GET POST PUT DELETE HEAD'.split():
self.assertTrue(
verb in resp.headers['access-control-allow-methods'])
self.assertEqual(
len(resp.headers['access-control-allow-methods'].split(', ')),
6)
self.assertEqual('999', resp.headers['access-control-max-age'])
req = Request.blank(
'/v1/a/c/o.jpg',
{'REQUEST_METHOD': 'OPTIONS'},
headers={'Origin': 'https://bar.baz',
'Access-Control-Request-Headers':
'x-foo, x-bar, x-auth-token',
'Access-Control-Request-Method': 'GET'}
)
req.content_length = 0
resp = controller.OPTIONS(req)
self.assertEqual(200, resp.status_int)
self.assertEqual(
sortHeaderNames('x-foo, x-bar, x-auth-token'),
sortHeaderNames(resp.headers['access-control-allow-headers']))
def test_CORS_valid(self):
with save_globals():
controller = proxy_server.ContainerController(self.app, 'a', 'c')
def stubContainerInfo(*args):
return {
'cors': {
'allow_origin': 'http://foo.bar'
}
}
controller.container_info = stubContainerInfo
def containerGET(controller, req):
return Response(headers={
'X-Container-Meta-Color': 'red',
'X-Super-Secret': 'hush',
})
req = Request.blank(
'/v1/a/c',
{'REQUEST_METHOD': 'GET'},
headers={'Origin': 'http://foo.bar'})
resp = cors_validation(containerGET)(controller, req)
self.assertEqual(200, resp.status_int)
self.assertEqual('http://foo.bar',
resp.headers['access-control-allow-origin'])
self.assertEqual('red', resp.headers['x-container-meta-color'])
# X-Super-Secret is in the response, but not "exposed"
self.assertEqual('hush', resp.headers['x-super-secret'])
self.assertTrue('access-control-expose-headers' in resp.headers)
exposed = set(
h.strip() for h in
resp.headers['access-control-expose-headers'].split(','))
expected_exposed = set(['cache-control', 'content-language',
'content-type', 'expires', 'last-modified',
'pragma', 'etag', 'x-timestamp',
'x-trans-id', 'x-container-meta-color'])
self.assertEqual(expected_exposed, exposed)
def _gather_x_account_headers(self, controller_call, req, *connect_args,
**kwargs):
seen_headers = []
to_capture = ('X-Account-Partition', 'X-Account-Host',
'X-Account-Device')
def capture_headers(ipaddr, port, device, partition, method,
path, headers=None, query_string=None):
captured = {}
for header in to_capture:
captured[header] = headers.get(header)
seen_headers.append(captured)
with save_globals():
self.app.allow_account_management = True
set_http_connect(*connect_args, give_connect=capture_headers,
**kwargs)
resp = controller_call(req)
self.assertEqual(2, resp.status_int // 100) # sanity check
# don't care about the account HEAD, so throw away the
# first element
return sorted(seen_headers[1:],
key=lambda d: d['X-Account-Host'] or 'Z')
def test_PUT_x_account_headers_with_fewer_account_replicas(self):
self.app.account_ring.set_replicas(2)
req = Request.blank('/v1/a/c', headers={'': ''})
controller = proxy_server.ContainerController(self.app, 'a', 'c')
seen_headers = self._gather_x_account_headers(
controller.PUT, req,
200, 201, 201, 201) # HEAD PUT PUT PUT
self.assertEqual(seen_headers, [
{'X-Account-Host': '10.0.0.0:1000',
'X-Account-Partition': '0',
'X-Account-Device': 'sda'},
{'X-Account-Host': '10.0.0.1:1001',
'X-Account-Partition': '0',
'X-Account-Device': 'sdb'},
{'X-Account-Host': None,
'X-Account-Partition': None,
'X-Account-Device': None}
])
def test_PUT_x_account_headers_with_more_account_replicas(self):
self.app.account_ring.set_replicas(4)
req = Request.blank('/v1/a/c', headers={'': ''})
controller = proxy_server.ContainerController(self.app, 'a', 'c')
seen_headers = self._gather_x_account_headers(
controller.PUT, req,
200, 201, 201, 201) # HEAD PUT PUT PUT
self.assertEqual(seen_headers, [
{'X-Account-Host': '10.0.0.0:1000,10.0.0.3:1003',
'X-Account-Partition': '0',
'X-Account-Device': 'sda,sdd'},
{'X-Account-Host': '10.0.0.1:1001',
'X-Account-Partition': '0',
'X-Account-Device': 'sdb'},
{'X-Account-Host': '10.0.0.2:1002',
'X-Account-Partition': '0',
'X-Account-Device': 'sdc'}
])
def test_DELETE_x_account_headers_with_fewer_account_replicas(self):
self.app.account_ring.set_replicas(2)
req = Request.blank('/v1/a/c', headers={'': ''})
controller = proxy_server.ContainerController(self.app, 'a', 'c')
seen_headers = self._gather_x_account_headers(
controller.DELETE, req,
200, 204, 204, 204) # HEAD DELETE DELETE DELETE
self.assertEqual(seen_headers, [
{'X-Account-Host': '10.0.0.0:1000',
'X-Account-Partition': '0',
'X-Account-Device': 'sda'},
{'X-Account-Host': '10.0.0.1:1001',
'X-Account-Partition': '0',
'X-Account-Device': 'sdb'},
{'X-Account-Host': None,
'X-Account-Partition': None,
'X-Account-Device': None}
])
def test_DELETE_x_account_headers_with_more_account_replicas(self):
self.app.account_ring.set_replicas(4)
req = Request.blank('/v1/a/c', headers={'': ''})
controller = proxy_server.ContainerController(self.app, 'a', 'c')
seen_headers = self._gather_x_account_headers(
controller.DELETE, req,
200, 204, 204, 204) # HEAD DELETE DELETE DELETE
self.assertEqual(seen_headers, [
{'X-Account-Host': '10.0.0.0:1000,10.0.0.3:1003',
'X-Account-Partition': '0',
'X-Account-Device': 'sda,sdd'},
{'X-Account-Host': '10.0.0.1:1001',
'X-Account-Partition': '0',
'X-Account-Device': 'sdb'},
{'X-Account-Host': '10.0.0.2:1002',
'X-Account-Partition': '0',
'X-Account-Device': 'sdc'}
])
def test_PUT_backed_x_timestamp_header(self):
timestamps = []
def capture_timestamps(*args, **kwargs):
headers = kwargs['headers']
timestamps.append(headers.get('X-Timestamp'))
req = Request.blank('/v1/a/c', method='PUT', headers={'': ''})
with save_globals():
new_connect = set_http_connect(200, # account existence check
201, 201, 201,
give_connect=capture_timestamps)
resp = self.app.handle_request(req)
# sanity
self.assertRaises(StopIteration, new_connect.code_iter.next)
self.assertEqual(2, resp.status_int // 100)
timestamps.pop(0) # account existence check
self.assertEqual(3, len(timestamps))
for timestamp in timestamps:
self.assertEqual(timestamp, timestamps[0])
self.assertTrue(re.match('[0-9]{10}\.[0-9]{5}', timestamp))
def test_DELETE_backed_x_timestamp_header(self):
timestamps = []
def capture_timestamps(*args, **kwargs):
headers = kwargs['headers']
timestamps.append(headers.get('X-Timestamp'))
req = Request.blank('/v1/a/c', method='DELETE', headers={'': ''})
self.app.update_request(req)
with save_globals():
new_connect = set_http_connect(200, # account existence check
201, 201, 201,
give_connect=capture_timestamps)
resp = self.app.handle_request(req)
# sanity
self.assertRaises(StopIteration, new_connect.code_iter.next)
self.assertEqual(2, resp.status_int // 100)
timestamps.pop(0) # account existence check
self.assertEqual(3, len(timestamps))
for timestamp in timestamps:
self.assertEqual(timestamp, timestamps[0])
self.assertTrue(re.match('[0-9]{10}\.[0-9]{5}', timestamp))
def test_node_read_timeout_retry_to_container(self):
with save_globals():
req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': 'GET'})
self.app.node_timeout = 0.1
set_http_connect(200, 200, 200, body='abcdef', slow=[1.0, 1.0])
resp = req.get_response(self.app)
got_exc = False
try:
resp.body
except ChunkReadTimeout:
got_exc = True
self.assertTrue(got_exc)
@patch_policies([StoragePolicy(0, 'zero', True, object_ring=FakeRing())])
class TestAccountController(unittest.TestCase):
def setUp(self):
self.app = proxy_server.Application(None, FakeMemcache(),
account_ring=FakeRing(),
container_ring=FakeRing())
def assert_status_map(self, method, statuses, expected, env_expected=None,
headers=None, **kwargs):
headers = headers or {}
with save_globals():
set_http_connect(*statuses, **kwargs)
req = Request.blank('/v1/a', {}, headers=headers)
self.app.update_request(req)
res = method(req)
self.assertEqual(res.status_int, expected)
if env_expected:
self.assertEqual(res.environ['swift.account/a']['status'],
env_expected)
set_http_connect(*statuses)
req = Request.blank('/v1/a/', {})
self.app.update_request(req)
res = method(req)
self.assertEqual(res.status_int, expected)
if env_expected:
self.assertEqual(res.environ['swift.account/a']['status'],
env_expected)
def test_OPTIONS(self):
with save_globals():
self.app.allow_account_management = False
controller = proxy_server.AccountController(self.app, 'account')
req = Request.blank('/v1/account', {'REQUEST_METHOD': 'OPTIONS'})
req.content_length = 0
resp = controller.OPTIONS(req)
self.assertEqual(200, resp.status_int)
for verb in 'OPTIONS GET POST HEAD'.split():
self.assertTrue(
verb in resp.headers['Allow'])
self.assertEqual(len(resp.headers['Allow'].split(', ')), 4)
# Test a CORS OPTIONS request (i.e. including Origin and
# Access-Control-Request-Method headers)
self.app.allow_account_management = False
controller = proxy_server.AccountController(self.app, 'account')
req = Request.blank(
'/v1/account', {'REQUEST_METHOD': 'OPTIONS'},
headers={'Origin': 'http://foo.com',
'Access-Control-Request-Method': 'GET'})
req.content_length = 0
resp = controller.OPTIONS(req)
self.assertEqual(200, resp.status_int)
for verb in 'OPTIONS GET POST HEAD'.split():
self.assertTrue(
verb in resp.headers['Allow'])
self.assertEqual(len(resp.headers['Allow'].split(', ')), 4)
self.app.allow_account_management = True
controller = proxy_server.AccountController(self.app, 'account')
req = Request.blank('/v1/account', {'REQUEST_METHOD': 'OPTIONS'})
req.content_length = 0
resp = controller.OPTIONS(req)
self.assertEqual(200, resp.status_int)
for verb in 'OPTIONS GET POST PUT DELETE HEAD'.split():
self.assertTrue(
verb in resp.headers['Allow'])
self.assertEqual(len(resp.headers['Allow'].split(', ')), 6)
def test_GET(self):
with save_globals():
controller = proxy_server.AccountController(self.app, 'account')
# GET returns after the first successful call to an Account Server
self.assert_status_map(controller.GET, (200,), 200, 200)
self.assert_status_map(controller.GET, (503, 200), 200, 200)
self.assert_status_map(controller.GET, (503, 503, 200), 200, 200)
self.assert_status_map(controller.GET, (204,), 204, 204)
self.assert_status_map(controller.GET, (503, 204), 204, 204)
self.assert_status_map(controller.GET, (503, 503, 204), 204, 204)
self.assert_status_map(controller.GET, (404, 200), 200, 200)
self.assert_status_map(controller.GET, (404, 404, 200), 200, 200)
self.assert_status_map(controller.GET, (404, 503, 204), 204, 204)
# If Account servers fail, if autocreate = False, return majority
# response
self.assert_status_map(controller.GET, (404, 404, 404), 404, 404)
self.assert_status_map(controller.GET, (404, 404, 503), 404, 404)
self.assert_status_map(controller.GET, (404, 503, 503), 503)
self.app.memcache = FakeMemcacheReturnsNone()
self.assert_status_map(controller.GET, (404, 404, 404), 404, 404)
def test_GET_autocreate(self):
with save_globals():
controller = proxy_server.AccountController(self.app, 'account')
self.app.memcache = FakeMemcacheReturnsNone()
self.assertFalse(self.app.account_autocreate)
# Repeat the test for autocreate = False and 404 by all
self.assert_status_map(controller.GET,
(404, 404, 404), 404)
self.assert_status_map(controller.GET,
(404, 503, 404), 404)
# When autocreate is True, if none of the nodes respond 2xx
# And quorum of the nodes responded 404,
# ALL nodes are asked to create the account
# If successful, the GET request is repeated.
controller.app.account_autocreate = True
self.assert_status_map(controller.GET,
(404, 404, 404), 204)
self.assert_status_map(controller.GET,
(404, 503, 404), 204)
# We always return 503 if no majority between 4xx, 3xx or 2xx found
self.assert_status_map(controller.GET,
(500, 500, 400), 503)
def test_HEAD(self):
# Same behaviour as GET
with save_globals():
controller = proxy_server.AccountController(self.app, 'account')
self.assert_status_map(controller.HEAD, (200,), 200, 200)
self.assert_status_map(controller.HEAD, (503, 200), 200, 200)
self.assert_status_map(controller.HEAD, (503, 503, 200), 200, 200)
self.assert_status_map(controller.HEAD, (204,), 204, 204)
self.assert_status_map(controller.HEAD, (503, 204), 204, 204)
self.assert_status_map(controller.HEAD, (204, 503, 503), 204, 204)
self.assert_status_map(controller.HEAD, (204,), 204, 204)
self.assert_status_map(controller.HEAD, (404, 404, 404), 404, 404)
self.assert_status_map(controller.HEAD, (404, 404, 200), 200, 200)
self.assert_status_map(controller.HEAD, (404, 200), 200, 200)
self.assert_status_map(controller.HEAD, (404, 404, 503), 404, 404)
self.assert_status_map(controller.HEAD, (404, 503, 503), 503)
self.assert_status_map(controller.HEAD, (404, 503, 204), 204, 204)
def test_HEAD_autocreate(self):
# Same behaviour as GET
with save_globals():
controller = proxy_server.AccountController(self.app, 'account')
self.app.memcache = FakeMemcacheReturnsNone()
self.assertFalse(self.app.account_autocreate)
self.assert_status_map(controller.HEAD,
(404, 404, 404), 404)
controller.app.account_autocreate = True
self.assert_status_map(controller.HEAD,
(404, 404, 404), 204)
self.assert_status_map(controller.HEAD,
(500, 404, 404), 204)
# We always return 503 if no majority between 4xx, 3xx or 2xx found
self.assert_status_map(controller.HEAD,
(500, 500, 400), 503)
def test_POST_autocreate(self):
with save_globals():
controller = proxy_server.AccountController(self.app, 'account')
self.app.memcache = FakeMemcacheReturnsNone()
# first test with autocreate being False
self.assertFalse(self.app.account_autocreate)
self.assert_status_map(controller.POST,
(404, 404, 404), 404)
# next turn it on and test account being created than updated
controller.app.account_autocreate = True
self.assert_status_map(
controller.POST,
(404, 404, 404, 202, 202, 202, 201, 201, 201), 201)
# account_info PUT account POST account
self.assert_status_map(
controller.POST,
(404, 404, 503, 201, 201, 503, 204, 204, 504), 204)
# what if create fails
self.assert_status_map(
controller.POST,
(404, 404, 404, 403, 403, 403, 400, 400, 400), 400)
def test_POST_autocreate_with_sysmeta(self):
with save_globals():
controller = proxy_server.AccountController(self.app, 'account')
self.app.memcache = FakeMemcacheReturnsNone()
# first test with autocreate being False
self.assertFalse(self.app.account_autocreate)
self.assert_status_map(controller.POST,
(404, 404, 404), 404)
# next turn it on and test account being created than updated
controller.app.account_autocreate = True
calls = []
callback = _make_callback_func(calls)
key, value = 'X-Account-Sysmeta-Blah', 'something'
headers = {key: value}
self.assert_status_map(
controller.POST,
(404, 404, 404, 202, 202, 202, 201, 201, 201), 201,
# POST , autocreate PUT, POST again
headers=headers,
give_connect=callback)
self.assertEqual(9, len(calls))
for call in calls:
self.assertTrue(key in call['headers'],
'%s call, key %s missing in headers %s' %
(call['method'], key, call['headers']))
self.assertEqual(value, call['headers'][key])
def test_connection_refused(self):
self.app.account_ring.get_nodes('account')
for dev in self.app.account_ring.devs:
dev['ip'] = '127.0.0.1'
dev['port'] = 1 # can't connect on this port
controller = proxy_server.AccountController(self.app, 'account')
req = Request.blank('/v1/account', environ={'REQUEST_METHOD': 'HEAD'})
self.app.update_request(req)
resp = controller.HEAD(req)
self.assertEqual(resp.status_int, 503)
def test_other_socket_error(self):
self.app.account_ring.get_nodes('account')
for dev in self.app.account_ring.devs:
dev['ip'] = '127.0.0.1'
dev['port'] = -1 # invalid port number
controller = proxy_server.AccountController(self.app, 'account')
req = Request.blank('/v1/account', environ={'REQUEST_METHOD': 'HEAD'})
self.app.update_request(req)
resp = controller.HEAD(req)
self.assertEqual(resp.status_int, 503)
def test_response_get_accept_ranges_header(self):
with save_globals():
set_http_connect(200, 200, body='{}')
controller = proxy_server.AccountController(self.app, 'account')
req = Request.blank('/v1/a?format=json')
self.app.update_request(req)
res = controller.GET(req)
self.assertTrue('accept-ranges' in res.headers)
self.assertEqual(res.headers['accept-ranges'], 'bytes')
def test_response_head_accept_ranges_header(self):
with save_globals():
set_http_connect(200, 200, body='{}')
controller = proxy_server.AccountController(self.app, 'account')
req = Request.blank('/v1/a?format=json')
self.app.update_request(req)
res = controller.HEAD(req)
res.body
self.assertTrue('accept-ranges' in res.headers)
self.assertEqual(res.headers['accept-ranges'], 'bytes')
def test_PUT(self):
with save_globals():
controller = proxy_server.AccountController(self.app, 'account')
def test_status_map(statuses, expected, **kwargs):
set_http_connect(*statuses, **kwargs)
self.app.memcache.store = {}
req = Request.blank('/v1/a', {})
req.content_length = 0
self.app.update_request(req)
res = controller.PUT(req)
expected = str(expected)
self.assertEqual(res.status[:len(expected)], expected)
test_status_map((201, 201, 201), 405)
self.app.allow_account_management = True
test_status_map((201, 201, 201), 201)
test_status_map((201, 201, 500), 201)
test_status_map((201, 500, 500), 503)
test_status_map((204, 500, 404), 503)
def test_PUT_max_account_name_length(self):
with save_globals():
self.app.allow_account_management = True
limit = constraints.MAX_ACCOUNT_NAME_LENGTH
controller = proxy_server.AccountController(self.app, '1' * limit)
self.assert_status_map(controller.PUT, (201, 201, 201), 201)
controller = proxy_server.AccountController(
self.app, '2' * (limit + 1))
self.assert_status_map(controller.PUT, (201, 201, 201), 400)
def test_PUT_connect_exceptions(self):
with save_globals():
self.app.allow_account_management = True
controller = proxy_server.AccountController(self.app, 'account')
self.assert_status_map(controller.PUT, (201, 201, -1), 201)
self.assert_status_map(controller.PUT, (201, -1, -1), 503)
self.assert_status_map(controller.PUT, (503, 503, -1), 503)
def test_PUT_status(self):
with save_globals():
self.app.allow_account_management = True
controller = proxy_server.AccountController(self.app, 'account')
self.assert_status_map(controller.PUT, (201, 201, 202), 202)
def test_PUT_metadata(self):
self.metadata_helper('PUT')
def test_POST_metadata(self):
self.metadata_helper('POST')
def metadata_helper(self, method):
for test_header, test_value in (
('X-Account-Meta-TestHeader', 'TestValue'),
('X-Account-Meta-TestHeader', ''),
('X-Remove-Account-Meta-TestHeader', 'anything')):
test_errors = []
def test_connect(ipaddr, port, device, partition, method, path,
headers=None, query_string=None):
if path == '/a':
find_header = test_header
find_value = test_value
if find_header.lower().startswith('x-remove-'):
find_header = \
find_header.lower().replace('-remove', '', 1)
find_value = ''
for k, v in headers.items():
if k.lower() == find_header.lower() and \
v == find_value:
break
else:
test_errors.append('%s: %s not in %s' %
(find_header, find_value, headers))
with save_globals():
self.app.allow_account_management = True
controller = \
proxy_server.AccountController(self.app, 'a')
set_http_connect(201, 201, 201, give_connect=test_connect)
req = Request.blank('/v1/a/c',
environ={'REQUEST_METHOD': method},
headers={test_header: test_value})
self.app.update_request(req)
getattr(controller, method)(req)
self.assertEqual(test_errors, [])
def test_PUT_bad_metadata(self):
self.bad_metadata_helper('PUT')
def test_POST_bad_metadata(self):
self.bad_metadata_helper('POST')
def bad_metadata_helper(self, method):
with save_globals():
self.app.allow_account_management = True
controller = proxy_server.AccountController(self.app, 'a')
set_http_connect(200, 201, 201, 201)
req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': method})
self.app.update_request(req)
resp = getattr(controller, method)(req)
self.assertEqual(resp.status_int, 201)
set_http_connect(201, 201, 201)
req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': method},
headers={'X-Account-Meta-' +
('a' * constraints.MAX_META_NAME_LENGTH): 'v'})
self.app.update_request(req)
resp = getattr(controller, method)(req)
self.assertEqual(resp.status_int, 201)
set_http_connect(201, 201, 201)
req = Request.blank(
'/v1/a/c', environ={'REQUEST_METHOD': method},
headers={'X-Account-Meta-' +
('a' * (constraints.MAX_META_NAME_LENGTH + 1)): 'v'})
self.app.update_request(req)
resp = getattr(controller, method)(req)
self.assertEqual(resp.status_int, 400)
set_http_connect(201, 201, 201)
req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': method},
headers={'X-Account-Meta-Too-Long':
'a' * constraints.MAX_META_VALUE_LENGTH})
self.app.update_request(req)
resp = getattr(controller, method)(req)
self.assertEqual(resp.status_int, 201)
set_http_connect(201, 201, 201)
req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': method},
headers={'X-Account-Meta-Too-Long':
'a' * (constraints.MAX_META_VALUE_LENGTH + 1)})
self.app.update_request(req)
resp = getattr(controller, method)(req)
self.assertEqual(resp.status_int, 400)
set_http_connect(201, 201, 201)
headers = {}
for x in range(constraints.MAX_META_COUNT):
headers['X-Account-Meta-%d' % x] = 'v'
req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': method},
headers=headers)
self.app.update_request(req)
resp = getattr(controller, method)(req)
self.assertEqual(resp.status_int, 201)
set_http_connect(201, 201, 201)
headers = {}
for x in range(constraints.MAX_META_COUNT + 1):
headers['X-Account-Meta-%d' % x] = 'v'
req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': method},
headers=headers)
self.app.update_request(req)
resp = getattr(controller, method)(req)
self.assertEqual(resp.status_int, 400)
set_http_connect(201, 201, 201)
headers = {}
header_value = 'a' * constraints.MAX_META_VALUE_LENGTH
size = 0
x = 0
while size < (constraints.MAX_META_OVERALL_SIZE - 4
- constraints.MAX_META_VALUE_LENGTH):
size += 4 + constraints.MAX_META_VALUE_LENGTH
headers['X-Account-Meta-%04d' % x] = header_value
x += 1
if constraints.MAX_META_OVERALL_SIZE - size > 1:
headers['X-Account-Meta-a'] = \
'a' * (constraints.MAX_META_OVERALL_SIZE - size - 1)
req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': method},
headers=headers)
self.app.update_request(req)
resp = getattr(controller, method)(req)
self.assertEqual(resp.status_int, 201)
set_http_connect(201, 201, 201)
headers['X-Account-Meta-a'] = \
'a' * (constraints.MAX_META_OVERALL_SIZE - size)
req = Request.blank('/v1/a/c', environ={'REQUEST_METHOD': method},
headers=headers)
self.app.update_request(req)
resp = getattr(controller, method)(req)
self.assertEqual(resp.status_int, 400)
def test_DELETE(self):
with save_globals():
controller = proxy_server.AccountController(self.app, 'account')
def test_status_map(statuses, expected, **kwargs):
set_http_connect(*statuses, **kwargs)
self.app.memcache.store = {}
req = Request.blank('/v1/a', {'REQUEST_METHOD': 'DELETE'})
req.content_length = 0
self.app.update_request(req)
res = controller.DELETE(req)
expected = str(expected)
self.assertEqual(res.status[:len(expected)], expected)
test_status_map((201, 201, 201), 405)
self.app.allow_account_management = True
test_status_map((201, 201, 201), 201)
test_status_map((201, 201, 500), 201)
test_status_map((201, 500, 500), 503)
test_status_map((204, 500, 404), 503)
def test_DELETE_with_query_string(self):
# Extra safety in case someone typos a query string for an
# account-level DELETE request that was really meant to be caught by
# some middleware.
with save_globals():
controller = proxy_server.AccountController(self.app, 'account')
def test_status_map(statuses, expected, **kwargs):
set_http_connect(*statuses, **kwargs)
self.app.memcache.store = {}
req = Request.blank('/v1/a?whoops',
environ={'REQUEST_METHOD': 'DELETE'})
req.content_length = 0
self.app.update_request(req)
res = controller.DELETE(req)
expected = str(expected)
self.assertEqual(res.status[:len(expected)], expected)
test_status_map((201, 201, 201), 400)
self.app.allow_account_management = True
test_status_map((201, 201, 201), 400)
test_status_map((201, 201, 500), 400)
test_status_map((201, 500, 500), 400)
test_status_map((204, 500, 404), 400)
@patch_policies([StoragePolicy(0, 'zero', True, object_ring=FakeRing())])
class TestAccountControllerFakeGetResponse(unittest.TestCase):
"""
Test all the faked-out GET responses for accounts that don't exist. They
have to match the responses for empty accounts that really exist.
"""
def setUp(self):
conf = {'account_autocreate': 'yes'}
self.app = proxy_server.Application(conf, FakeMemcache(),
account_ring=FakeRing(),
container_ring=FakeRing())
self.app.memcache = FakeMemcacheReturnsNone()
def test_GET_autocreate_accept_json(self):
with save_globals():
set_http_connect(*([404] * 100)) # nonexistent: all backends 404
req = Request.blank(
'/v1/a', headers={'Accept': 'application/json'},
environ={'REQUEST_METHOD': 'GET',
'PATH_INFO': '/v1/a'})
resp = req.get_response(self.app)
self.assertEqual(200, resp.status_int)
self.assertEqual('application/json; charset=utf-8',
resp.headers['Content-Type'])
self.assertEqual("[]", resp.body)
def test_GET_autocreate_format_json(self):
with save_globals():
set_http_connect(*([404] * 100)) # nonexistent: all backends 404
req = Request.blank('/v1/a?format=json',
environ={'REQUEST_METHOD': 'GET',
'PATH_INFO': '/v1/a',
'QUERY_STRING': 'format=json'})
resp = req.get_response(self.app)
self.assertEqual(200, resp.status_int)
self.assertEqual('application/json; charset=utf-8',
resp.headers['Content-Type'])
self.assertEqual("[]", resp.body)
def test_GET_autocreate_accept_xml(self):
with save_globals():
set_http_connect(*([404] * 100)) # nonexistent: all backends 404
req = Request.blank('/v1/a', headers={"Accept": "text/xml"},
environ={'REQUEST_METHOD': 'GET',
'PATH_INFO': '/v1/a'})
resp = req.get_response(self.app)
self.assertEqual(200, resp.status_int)
self.assertEqual('text/xml; charset=utf-8',
resp.headers['Content-Type'])
empty_xml_listing = ('<?xml version="1.0" encoding="UTF-8"?>\n'
'<account name="a">\n</account>')
self.assertEqual(empty_xml_listing, resp.body)
def test_GET_autocreate_format_xml(self):
with save_globals():
set_http_connect(*([404] * 100)) # nonexistent: all backends 404
req = Request.blank('/v1/a?format=xml',
environ={'REQUEST_METHOD': 'GET',
'PATH_INFO': '/v1/a',
'QUERY_STRING': 'format=xml'})
resp = req.get_response(self.app)
self.assertEqual(200, resp.status_int)
self.assertEqual('application/xml; charset=utf-8',
resp.headers['Content-Type'])
empty_xml_listing = ('<?xml version="1.0" encoding="UTF-8"?>\n'
'<account name="a">\n</account>')
self.assertEqual(empty_xml_listing, resp.body)
def test_GET_autocreate_accept_unknown(self):
with save_globals():
set_http_connect(*([404] * 100)) # nonexistent: all backends 404
req = Request.blank('/v1/a', headers={"Accept": "mystery/meat"},
environ={'REQUEST_METHOD': 'GET',
'PATH_INFO': '/v1/a'})
resp = req.get_response(self.app)
self.assertEqual(406, resp.status_int)
def test_GET_autocreate_format_invalid_utf8(self):
with save_globals():
set_http_connect(*([404] * 100)) # nonexistent: all backends 404
req = Request.blank('/v1/a?format=\xff\xfe',
environ={'REQUEST_METHOD': 'GET',
'PATH_INFO': '/v1/a',
'QUERY_STRING': 'format=\xff\xfe'})
resp = req.get_response(self.app)
self.assertEqual(400, resp.status_int)
def test_account_acl_header_access(self):
acl = {
'admin': ['AUTH_alice'],
'read-write': ['AUTH_bob'],
'read-only': ['AUTH_carol'],
}
prefix = get_sys_meta_prefix('account')
privileged_headers = {(prefix + 'core-access-control'): format_acl(
version=2, acl_dict=acl)}
app = proxy_server.Application(
None, FakeMemcache(), account_ring=FakeRing(),
container_ring=FakeRing())
with save_globals():
# Mock account server will provide privileged information (ACLs)
set_http_connect(200, 200, 200, headers=privileged_headers)
req = Request.blank('/v1/a', environ={'REQUEST_METHOD': 'GET'})
resp = app.handle_request(req)
# Not a swift_owner -- ACLs should NOT be in response
header = 'X-Account-Access-Control'
self.assertTrue(header not in resp.headers, '%r was in %r' % (
header, resp.headers))
# Same setup -- mock acct server will provide ACLs
set_http_connect(200, 200, 200, headers=privileged_headers)
req = Request.blank('/v1/a', environ={'REQUEST_METHOD': 'GET',
'swift_owner': True})
resp = app.handle_request(req)
# For a swift_owner, the ACLs *should* be in response
self.assertTrue(header in resp.headers, '%r not in %r' % (
header, resp.headers))
def test_account_acls_through_delegation(self):
# Define a way to grab the requests sent out from the AccountController
# to the Account Server, and a way to inject responses we'd like the
# Account Server to return.
resps_to_send = []
@contextmanager
def patch_account_controller_method(verb):
old_method = getattr(proxy_server.AccountController, verb)
new_method = lambda self, req, *_, **__: resps_to_send.pop(0)
try:
setattr(proxy_server.AccountController, verb, new_method)
yield
finally:
setattr(proxy_server.AccountController, verb, old_method)
def make_test_request(http_method, swift_owner=True):
env = {
'REQUEST_METHOD': http_method,
'swift_owner': swift_owner,
}
acl = {
'admin': ['foo'],
'read-write': ['bar'],
'read-only': ['bas'],
}
headers = {} if http_method in ('GET', 'HEAD') else {
'x-account-access-control': format_acl(version=2, acl_dict=acl)
}
return Request.blank('/v1/a', environ=env, headers=headers)
# Our AccountController will invoke methods to communicate with the
# Account Server, and they will return responses like these:
def make_canned_response(http_method):
acl = {
'admin': ['foo'],
'read-write': ['bar'],
'read-only': ['bas'],
}
headers = {'x-account-sysmeta-core-access-control': format_acl(
version=2, acl_dict=acl)}
canned_resp = Response(headers=headers)
canned_resp.environ = {
'PATH_INFO': '/acct',
'REQUEST_METHOD': http_method,
}
resps_to_send.append(canned_resp)
app = proxy_server.Application(
None, FakeMemcache(), account_ring=FakeRing(),
container_ring=FakeRing())
app.allow_account_management = True
ext_header = 'x-account-access-control'
with patch_account_controller_method('GETorHEAD_base'):
# GET/HEAD requests should remap sysmeta headers from acct server
for verb in ('GET', 'HEAD'):
make_canned_response(verb)
req = make_test_request(verb)
resp = app.handle_request(req)
h = parse_acl(version=2, data=resp.headers.get(ext_header))
self.assertEqual(h['admin'], ['foo'])
self.assertEqual(h['read-write'], ['bar'])
self.assertEqual(h['read-only'], ['bas'])
# swift_owner = False: GET/HEAD shouldn't return sensitive info
make_canned_response(verb)
req = make_test_request(verb, swift_owner=False)
resp = app.handle_request(req)
h = resp.headers
self.assertEqual(None, h.get(ext_header))
# swift_owner unset: GET/HEAD shouldn't return sensitive info
make_canned_response(verb)
req = make_test_request(verb, swift_owner=False)
del req.environ['swift_owner']
resp = app.handle_request(req)
h = resp.headers
self.assertEqual(None, h.get(ext_header))
# Verify that PUT/POST requests remap sysmeta headers from acct server
with patch_account_controller_method('make_requests'):
make_canned_response('PUT')
req = make_test_request('PUT')
resp = app.handle_request(req)
h = parse_acl(version=2, data=resp.headers.get(ext_header))
self.assertEqual(h['admin'], ['foo'])
self.assertEqual(h['read-write'], ['bar'])
self.assertEqual(h['read-only'], ['bas'])
make_canned_response('POST')
req = make_test_request('POST')
resp = app.handle_request(req)
h = parse_acl(version=2, data=resp.headers.get(ext_header))
self.assertEqual(h['admin'], ['foo'])
self.assertEqual(h['read-write'], ['bar'])
self.assertEqual(h['read-only'], ['bas'])
class FakeObjectController(object):
def __init__(self):
self.app = self
self.logger = self
self.account_name = 'a'
self.container_name = 'c'
self.object_name = 'o'
self.trans_id = 'tx1'
self.object_ring = FakeRing()
self.node_timeout = 1
self.rate_limit_after_segment = 3
self.rate_limit_segments_per_sec = 2
self.GETorHEAD_base_args = []
def exception(self, *args):
self.exception_args = args
self.exception_info = sys.exc_info()
def GETorHEAD_base(self, *args):
self.GETorHEAD_base_args.append(args)
req = args[0]
path = args[4]
body = data = path[-1] * int(path[-1])
if req.range:
r = req.range.ranges_for_length(len(data))
if r:
(start, stop) = r[0]
body = data[start:stop]
resp = Response(app_iter=iter(body))
return resp
def iter_nodes(self, ring, partition):
for node in ring.get_part_nodes(partition):
yield node
for node in ring.get_more_nodes(partition):
yield node
def sort_nodes(self, nodes):
return nodes
def set_node_timing(self, node, timing):
return
class TestProxyObjectPerformance(unittest.TestCase):
def setUp(self):
# This is just a simple test that can be used to verify and debug the
# various data paths between the proxy server and the object
# server. Used as a play ground to debug buffer sizes for sockets.
prolis = _test_sockets[0]
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
# Client is transmitting in 2 MB chunks
fd = sock.makefile('wb', 2 * 1024 * 1024)
# Small, fast for testing
obj_len = 2 * 64 * 1024
# Use 1 GB or more for measurements
# obj_len = 2 * 512 * 1024 * 1024
self.path = '/v1/a/c/o.large'
fd.write('PUT %s HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Storage-Token: t\r\n'
'Content-Length: %s\r\n'
'Content-Type: application/octet-stream\r\n'
'\r\n' % (self.path, str(obj_len)))
fd.write('a' * obj_len)
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEqual(headers[:len(exp)], exp)
self.obj_len = obj_len
def test_GET_debug_large_file(self):
for i in range(10):
start = time.time()
prolis = _test_sockets[0]
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
# Client is reading in 2 MB chunks
fd = sock.makefile('wb', 2 * 1024 * 1024)
fd.write('GET %s HTTP/1.1\r\n'
'Host: localhost\r\n'
'Connection: close\r\n'
'X-Storage-Token: t\r\n'
'\r\n' % self.path)
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 200'
self.assertEqual(headers[:len(exp)], exp)
total = 0
while True:
buf = fd.read(100000)
if not buf:
break
total += len(buf)
self.assertEqual(total, self.obj_len)
end = time.time()
print("Run %02d took %07.03f" % (i, end - start))
@patch_policies([StoragePolicy(0, 'migrated', object_ring=FakeRing()),
StoragePolicy(1, 'ernie', True, object_ring=FakeRing()),
StoragePolicy(2, 'deprecated', is_deprecated=True,
object_ring=FakeRing()),
StoragePolicy(3, 'bert', object_ring=FakeRing())])
class TestSwiftInfo(unittest.TestCase):
def setUp(self):
utils._swift_info = {}
utils._swift_admin_info = {}
def test_registered_defaults(self):
proxy_server.Application({}, FakeMemcache(),
account_ring=FakeRing(),
container_ring=FakeRing())
si = utils.get_swift_info()['swift']
self.assertTrue('version' in si)
self.assertEqual(si['max_file_size'], constraints.MAX_FILE_SIZE)
self.assertEqual(si['max_meta_name_length'],
constraints.MAX_META_NAME_LENGTH)
self.assertEqual(si['max_meta_value_length'],
constraints.MAX_META_VALUE_LENGTH)
self.assertEqual(si['max_meta_count'], constraints.MAX_META_COUNT)
self.assertEqual(si['max_header_size'], constraints.MAX_HEADER_SIZE)
self.assertEqual(si['max_meta_overall_size'],
constraints.MAX_META_OVERALL_SIZE)
self.assertEqual(si['account_listing_limit'],
constraints.ACCOUNT_LISTING_LIMIT)
self.assertEqual(si['container_listing_limit'],
constraints.CONTAINER_LISTING_LIMIT)
self.assertEqual(si['max_account_name_length'],
constraints.MAX_ACCOUNT_NAME_LENGTH)
self.assertEqual(si['max_container_name_length'],
constraints.MAX_CONTAINER_NAME_LENGTH)
self.assertEqual(si['max_object_name_length'],
constraints.MAX_OBJECT_NAME_LENGTH)
self.assertTrue('strict_cors_mode' in si)
self.assertEqual(si['allow_account_management'], False)
self.assertEqual(si['account_autocreate'], False)
# This setting is by default excluded by disallowed_sections
self.assertEqual(si['valid_api_versions'],
constraints.VALID_API_VERSIONS)
# this next test is deliberately brittle in order to alert if
# other items are added to swift info
self.assertEqual(len(si), 18)
self.assertTrue('policies' in si)
sorted_pols = sorted(si['policies'], key=operator.itemgetter('name'))
self.assertEqual(len(sorted_pols), 3)
for policy in sorted_pols:
self.assertNotEquals(policy['name'], 'deprecated')
self.assertEqual(sorted_pols[0]['name'], 'bert')
self.assertEqual(sorted_pols[1]['name'], 'ernie')
self.assertEqual(sorted_pols[2]['name'], 'migrated')
class TestSocketObjectVersions(unittest.TestCase):
def setUp(self):
global _test_sockets
self.prolis = prolis = listen(('localhost', 0))
self._orig_prolis = _test_sockets[0]
allowed_headers = ', '.join([
'content-encoding',
'x-object-manifest',
'content-disposition',
'foo'
])
conf = {'devices': _testdir, 'swift_dir': _testdir,
'mount_check': 'false', 'allowed_headers': allowed_headers}
prosrv = versioned_writes.VersionedWritesMiddleware(
proxy_logging.ProxyLoggingMiddleware(
_test_servers[0], conf,
logger=_test_servers[0].logger),
{})
self.coro = spawn(wsgi.server, prolis, prosrv, NullLogger())
# replace global prosrv with one that's filtered with version
# middleware
self.sockets = list(_test_sockets)
self.sockets[0] = prolis
_test_sockets = tuple(self.sockets)
def tearDown(self):
self.coro.kill()
# put the global state back
global _test_sockets
self.sockets[0] = self._orig_prolis
_test_sockets = tuple(self.sockets)
def test_version_manifest(self, oc='versions', vc='vers', o='name'):
versions_to_create = 3
# Create a container for our versioned object testing
(prolis, acc1lis, acc2lis, con1lis, con2lis, obj1lis,
obj2lis, obj3lis) = _test_sockets
pre = quote('%03x' % len(o))
osub = '%s/sub' % o
presub = quote('%03x' % len(osub))
osub = quote(osub)
presub = quote(presub)
oc = quote(oc)
vc = quote(vc)
def put_container():
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/%s HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\nX-Storage-Token: t\r\n'
'Content-Length: 0\r\nX-Versions-Location: %s\r\n\r\n'
% (oc, vc))
fd.flush()
headers = readuntil2crlfs(fd)
fd.read()
return headers
headers = put_container()
exp = 'HTTP/1.1 201'
self.assertEqual(headers[:len(exp)], exp)
def get_container():
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/%s HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\n'
'X-Storage-Token: t\r\n\r\n\r\n' % oc)
fd.flush()
headers = readuntil2crlfs(fd)
body = fd.read()
return headers, body
# check that the header was set
headers, body = get_container()
exp = 'HTTP/1.1 2' # 2xx series response
self.assertEqual(headers[:len(exp)], exp)
self.assertIn('X-Versions-Location: %s' % vc, headers)
def put_version_container():
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/%s HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\nX-Storage-Token: t\r\n'
'Content-Length: 0\r\n\r\n' % vc)
fd.flush()
headers = readuntil2crlfs(fd)
fd.read()
return headers
# make the container for the object versions
headers = put_version_container()
exp = 'HTTP/1.1 201'
self.assertEqual(headers[:len(exp)], exp)
def put(version):
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/%s/%s HTTP/1.1\r\nHost: '
'localhost\r\nConnection: close\r\nX-Storage-Token: '
't\r\nContent-Length: 5\r\nContent-Type: text/jibberish%s'
'\r\n\r\n%05d\r\n' % (oc, o, version, version))
fd.flush()
headers = readuntil2crlfs(fd)
fd.read()
return headers
def get(container=oc, obj=o):
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/%s/%s HTTP/1.1\r\nHost: '
'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n'
'\r\n' % (container, obj))
fd.flush()
headers = readuntil2crlfs(fd)
body = fd.read()
return headers, body
# Create the versioned file
headers = put(0)
exp = 'HTTP/1.1 201'
self.assertEqual(headers[:len(exp)], exp)
# Create the object versions
for version in range(1, versions_to_create):
sleep(.01) # guarantee that the timestamp changes
headers = put(version)
exp = 'HTTP/1.1 201'
self.assertEqual(headers[:len(exp)], exp)
# Ensure retrieving the manifest file gets the latest version
headers, body = get()
exp = 'HTTP/1.1 200'
self.assertEqual(headers[:len(exp)], exp)
self.assertIn('Content-Type: text/jibberish%s' % version, headers)
self.assertNotIn('X-Object-Meta-Foo: barbaz', headers)
self.assertEqual(body, '%05d' % version)
def get_version_container():
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/%s HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\n'
'X-Storage-Token: t\r\n\r\n' % vc)
fd.flush()
headers = readuntil2crlfs(fd)
body = fd.read()
return headers, body
# Ensure we have the right number of versions saved
headers, body = get_version_container()
exp = 'HTTP/1.1 200'
self.assertEqual(headers[:len(exp)], exp)
versions = [x for x in body.split('\n') if x]
self.assertEqual(len(versions), versions_to_create - 1)
def delete():
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('DELETE /v1/a/%s/%s HTTP/1.1\r\nHost: localhost\r'
'\nConnection: close\r\nX-Storage-Token: t\r\n\r\n'
% (oc, o))
fd.flush()
headers = readuntil2crlfs(fd)
fd.read()
return headers
def copy():
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('COPY /v1/a/%s/%s HTTP/1.1\r\nHost: '
'localhost\r\nConnection: close\r\nX-Auth-Token: '
't\r\nDestination: %s/copied_name\r\n'
'Content-Length: 0\r\n\r\n' % (oc, o, oc))
fd.flush()
headers = readuntil2crlfs(fd)
fd.read()
return headers
# copy a version and make sure the version info is stripped
headers = copy()
exp = 'HTTP/1.1 2' # 2xx series response to the COPY
self.assertEqual(headers[:len(exp)], exp)
def get_copy():
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/%s/copied_name HTTP/1.1\r\nHost: '
'localhost\r\nConnection: close\r\n'
'X-Auth-Token: t\r\n\r\n' % oc)
fd.flush()
headers = readuntil2crlfs(fd)
body = fd.read()
return headers, body
headers, body = get_copy()
exp = 'HTTP/1.1 200'
self.assertEqual(headers[:len(exp)], exp)
self.assertEqual(body, '%05d' % version)
def post():
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('POST /v1/a/%s/%s HTTP/1.1\r\nHost: '
'localhost\r\nConnection: close\r\nX-Auth-Token: '
't\r\nContent-Type: foo/bar\r\nContent-Length: 0\r\n'
'X-Object-Meta-Bar: foo\r\n\r\n' % (oc, o))
fd.flush()
headers = readuntil2crlfs(fd)
fd.read()
return headers
# post and make sure it's updated
headers = post()
exp = 'HTTP/1.1 2' # 2xx series response to the POST
self.assertEqual(headers[:len(exp)], exp)
headers, body = get()
self.assertIn('Content-Type: foo/bar', headers)
self.assertIn('X-Object-Meta-Bar: foo', headers)
self.assertEqual(body, '%05d' % version)
# check container listing
headers, body = get_container()
exp = 'HTTP/1.1 200'
self.assertEqual(headers[:len(exp)], exp)
# Delete the object versions
for segment in range(versions_to_create - 1, 0, -1):
headers = delete()
exp = 'HTTP/1.1 2' # 2xx series response
self.assertEqual(headers[:len(exp)], exp)
# Ensure retrieving the manifest file gets the latest version
headers, body = get()
exp = 'HTTP/1.1 200'
self.assertEqual(headers[:len(exp)], exp)
self.assertIn('Content-Type: text/jibberish%s' % (segment - 1),
headers)
self.assertEqual(body, '%05d' % (segment - 1))
# Ensure we have the right number of versions saved
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/%s?prefix=%s%s/ HTTP/1.1\r\nHost: '
'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r'
'\n' % (vc, pre, o))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 2' # 2xx series response
self.assertEqual(headers[:len(exp)], exp)
body = fd.read()
versions = [x for x in body.split('\n') if x]
self.assertEqual(len(versions), segment - 1)
# there is now one version left (in the manifest)
# Ensure we have no saved versions
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/%s?prefix=%s%s/ HTTP/1.1\r\nHost: '
'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r\n'
% (vc, pre, o))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 204 No Content'
self.assertEqual(headers[:len(exp)], exp)
# delete the last version
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('DELETE /v1/a/%s/%s HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\nX-Storage-Token: t\r\n\r\n' % (oc, o))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 2' # 2xx series response
self.assertEqual(headers[:len(exp)], exp)
# Ensure it's all gone
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/%s/%s HTTP/1.1\r\nHost: '
'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r\n'
% (oc, o))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 404'
self.assertEqual(headers[:len(exp)], exp)
# make sure manifest files will be ignored
for _junk in range(1, versions_to_create):
sleep(.01) # guarantee that the timestamp changes
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/%s/%s HTTP/1.1\r\nHost: '
'localhost\r\nConnection: close\r\nX-Storage-Token: '
't\r\nContent-Length: 0\r\n'
'Content-Type: text/jibberish0\r\n'
'Foo: barbaz\r\nX-Object-Manifest: %s/%s/\r\n\r\n'
% (oc, o, oc, o))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEqual(headers[:len(exp)], exp)
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/%s?prefix=%s%s/ HTTP/1.1\r\nhost: '
'localhost\r\nconnection: close\r\nx-auth-token: t\r\n\r\n'
% (vc, pre, o))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 204 No Content'
self.assertEqual(headers[:len(exp)], exp)
# DELETE v1/a/c/obj shouldn't delete v1/a/c/obj/sub versions
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/%s/%s HTTP/1.1\r\nHost: '
'localhost\r\nConnection: close\r\nX-Storage-Token: '
't\r\nContent-Length: 5\r\nContent-Type: text/jibberish0\r\n'
'Foo: barbaz\r\n\r\n00000\r\n' % (oc, o))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEqual(headers[:len(exp)], exp)
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/%s/%s HTTP/1.1\r\nHost: '
'localhost\r\nConnection: close\r\nX-Storage-Token: '
't\r\nContent-Length: 5\r\nContent-Type: text/jibberish0\r\n'
'Foo: barbaz\r\n\r\n00001\r\n' % (oc, o))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEqual(headers[:len(exp)], exp)
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/%s/%s HTTP/1.1\r\nHost: '
'localhost\r\nConnection: close\r\nX-Storage-Token: '
't\r\nContent-Length: 4\r\nContent-Type: text/jibberish0\r\n'
'Foo: barbaz\r\n\r\nsub1\r\n' % (oc, osub))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEqual(headers[:len(exp)], exp)
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/%s/%s HTTP/1.1\r\nHost: '
'localhost\r\nConnection: close\r\nX-Storage-Token: '
't\r\nContent-Length: 4\r\nContent-Type: text/jibberish0\r\n'
'Foo: barbaz\r\n\r\nsub2\r\n' % (oc, osub))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEqual(headers[:len(exp)], exp)
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('DELETE /v1/a/%s/%s HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\nX-Storage-Token: t\r\n\r\n' % (oc, o))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 2' # 2xx series response
self.assertEqual(headers[:len(exp)], exp)
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('GET /v1/a/%s?prefix=%s%s/ HTTP/1.1\r\nHost: '
'localhost\r\nConnection: close\r\nX-Auth-Token: t\r\n\r\n'
% (vc, presub, osub))
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 2' # 2xx series response
self.assertEqual(headers[:len(exp)], exp)
body = fd.read()
versions = [x for x in body.split('\n') if x]
self.assertEqual(len(versions), 1)
# Check for when the versions target container doesn't exist
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/%swhoops HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\nX-Storage-Token: t\r\n'
'Content-Length: 0\r\nX-Versions-Location: none\r\n\r\n' % oc)
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEqual(headers[:len(exp)], exp)
# Create the versioned file
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/%swhoops/foo HTTP/1.1\r\nHost: '
'localhost\r\nConnection: close\r\nX-Storage-Token: '
't\r\nContent-Length: 5\r\n\r\n00000\r\n' % oc)
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 201'
self.assertEqual(headers[:len(exp)], exp)
# Create another version
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('PUT /v1/a/%swhoops/foo HTTP/1.1\r\nHost: '
'localhost\r\nConnection: close\r\nX-Storage-Token: '
't\r\nContent-Length: 5\r\n\r\n00001\r\n' % oc)
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 412'
self.assertEqual(headers[:len(exp)], exp)
# Delete the object
sock = connect_tcp(('localhost', prolis.getsockname()[1]))
fd = sock.makefile()
fd.write('DELETE /v1/a/%swhoops/foo HTTP/1.1\r\nHost: localhost\r\n'
'Connection: close\r\nX-Storage-Token: t\r\n\r\n' % oc)
fd.flush()
headers = readuntil2crlfs(fd)
exp = 'HTTP/1.1 2' # 2xx response
self.assertEqual(headers[:len(exp)], exp)
def test_version_manifest_utf8(self):
oc = '0_oc_non_ascii\xc2\xa3'
vc = '0_vc_non_ascii\xc2\xa3'
o = '0_o_non_ascii\xc2\xa3'
self.test_version_manifest(oc, vc, o)
def test_version_manifest_utf8_container(self):
oc = '1_oc_non_ascii\xc2\xa3'
vc = '1_vc_ascii'
o = '1_o_ascii'
self.test_version_manifest(oc, vc, o)
def test_version_manifest_utf8_version_container(self):
oc = '2_oc_ascii'
vc = '2_vc_non_ascii\xc2\xa3'
o = '2_o_ascii'
self.test_version_manifest(oc, vc, o)
def test_version_manifest_utf8_containers(self):
oc = '3_oc_non_ascii\xc2\xa3'
vc = '3_vc_non_ascii\xc2\xa3'
o = '3_o_ascii'
self.test_version_manifest(oc, vc, o)
def test_version_manifest_utf8_object(self):
oc = '4_oc_ascii'
vc = '4_vc_ascii'
o = '4_o_non_ascii\xc2\xa3'
self.test_version_manifest(oc, vc, o)
def test_version_manifest_utf8_version_container_utf_object(self):
oc = '5_oc_ascii'
vc = '5_vc_non_ascii\xc2\xa3'
o = '5_o_non_ascii\xc2\xa3'
self.test_version_manifest(oc, vc, o)
def test_version_manifest_utf8_container_utf_object(self):
oc = '6_oc_non_ascii\xc2\xa3'
vc = '6_vc_ascii'
o = '6_o_non_ascii\xc2\xa3'
self.test_version_manifest(oc, vc, o)
if __name__ == '__main__':
setup()
try:
unittest.main()
finally:
teardown()
| apache-2.0 | -1,199,167,596,724,305,700 | 43.616025 | 79 | 0.521227 | false |
ucsd-ccbb/Oncolist | src/restLayer/app/status.py | 1 | 2357 | import time
import math
import logging as log
class Status:
def __init__(self, name, logger=None):
self._time = time.time()
self._last_period = 0
self._period = 1
self._name = name
self._logger = logger if logger else log
self._template = "{name}: {m} of {n} ({percent:0.1%}) in {elapsed} ({remaining} remaining)"
def units(self, units):
self._template = "{name}: {m} of {n} " + units + " ({percent:0.1%}) in {elapsed} ({remaining} remaining)"
return self
def template(self, template):
self._template = template
return self
def period(self, period):
self._period = period
return self
def n(self, n):
self._n = n
return self
def fid(self, fid):
self._fid = fid
p = fid.tell()
fid.seek(0, 2) # move to end
self.n(fid.tell())
fid.seek(p) # return to original location
self.units('bytes')
self._logger.debug("file is %d bytes", self._n)
return self
def log(self, m=None):
dt = time.time() - self._time
this_period = math.floor(dt / self._period)
if this_period > self._last_period:
self._last_period = this_period
if m is None:
try:
m = self._fid.tell()
except AttributeError:
self._logger.warn("need to call status.log with a value")
p = float(m) / float(self._n)
msg = self._template.format(
name=self._name,
m=m,
n=self._n,
percent=p,
elapsed=humanize(dt),
remaining=humanize((1 - p) / p * dt) if m > 0 else "unknown"
)
self._logger.info(msg)
return self
def start(self):
self._logger.info(("" if self._name is None else self._name + " - ") + "started")
self._time = time.time()
self._last_period = 0
return self
def stop(self):
self._logger.info(("" if self._name is None else self._name + " - ") + "finished in %s", humanize(time.time() - self._time))
return self
def humanize(secs):
mins, secs = divmod(secs, 60)
hours, mins = divmod(mins, 60)
return '%02d:%02d:%02d' % (hours, mins, secs)
| mit | -3,635,773,776,725,758,000 | 27.743902 | 132 | 0.512092 | false |
google/oauth2client | tests/contrib/test_flask_util.py | 15 | 21608 | # Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests for the Flask utilities"""
import datetime
import json
import logging
import unittest
import flask
import mock
import six.moves.http_client as httplib
import six.moves.urllib.parse as urlparse
import oauth2client
from oauth2client import client
from oauth2client import clientsecrets
from oauth2client.contrib import flask_util
from tests import http_mock
DEFAULT_RESP = """\
{
"access_token": "foo_access_token",
"expires_in": 3600,
"extra": "value",
"refresh_token": "foo_refresh_token"
}
"""
class FlaskOAuth2Tests(unittest.TestCase):
def setUp(self):
self.app = flask.Flask(__name__)
self.app.testing = True
self.app.config['SECRET_KEY'] = 'notasecert'
self.app.logger.setLevel(logging.CRITICAL)
self.oauth2 = flask_util.UserOAuth2(
self.app,
client_id='client_idz',
client_secret='client_secretz')
def _generate_credentials(self, scopes=None):
return client.OAuth2Credentials(
'access_tokenz',
'client_idz',
'client_secretz',
'refresh_tokenz',
datetime.datetime.utcnow() + datetime.timedelta(seconds=3600),
oauth2client.GOOGLE_TOKEN_URI,
'Test',
id_token={
'sub': '123',
'email': '[email protected]'
},
scopes=scopes)
def test_explicit_configuration(self):
oauth2 = flask_util.UserOAuth2(
flask.Flask(__name__), client_id='id', client_secret='secret')
self.assertEqual(oauth2.client_id, 'id')
self.assertEqual(oauth2.client_secret, 'secret')
return_val = (
clientsecrets.TYPE_WEB,
{'client_id': 'id', 'client_secret': 'secret'})
with mock.patch('oauth2client.clientsecrets.loadfile',
return_value=return_val):
oauth2 = flask_util.UserOAuth2(
flask.Flask(__name__), client_secrets_file='file.json')
self.assertEqual(oauth2.client_id, 'id')
self.assertEqual(oauth2.client_secret, 'secret')
def test_delayed_configuration(self):
app = flask.Flask(__name__)
oauth2 = flask_util.UserOAuth2()
oauth2.init_app(app, client_id='id', client_secret='secret')
self.assertEqual(oauth2.app, app)
def test_explicit_storage(self):
storage_mock = mock.Mock()
oauth2 = flask_util.UserOAuth2(
flask.Flask(__name__), storage=storage_mock, client_id='id',
client_secret='secret')
self.assertEqual(oauth2.storage, storage_mock)
def test_explicit_scopes(self):
oauth2 = flask_util.UserOAuth2(
flask.Flask(__name__), scopes=['1', '2'], client_id='id',
client_secret='secret')
self.assertEqual(oauth2.scopes, ['1', '2'])
def test_bad_client_secrets(self):
return_val = (
'other',
{'client_id': 'id', 'client_secret': 'secret'})
with mock.patch('oauth2client.clientsecrets.loadfile',
return_value=return_val):
with self.assertRaises(ValueError):
flask_util.UserOAuth2(flask.Flask(__name__),
client_secrets_file='file.json')
def test_app_configuration(self):
app = flask.Flask(__name__)
app.config['GOOGLE_OAUTH2_CLIENT_ID'] = 'id'
app.config['GOOGLE_OAUTH2_CLIENT_SECRET'] = 'secret'
oauth2 = flask_util.UserOAuth2(app)
self.assertEqual(oauth2.client_id, 'id')
self.assertEqual(oauth2.client_secret, 'secret')
return_val = (
clientsecrets.TYPE_WEB,
{'client_id': 'id2', 'client_secret': 'secret2'})
with mock.patch('oauth2client.clientsecrets.loadfile',
return_value=return_val):
app = flask.Flask(__name__)
app.config['GOOGLE_OAUTH2_CLIENT_SECRETS_FILE'] = 'file.json'
oauth2 = flask_util.UserOAuth2(app)
self.assertEqual(oauth2.client_id, 'id2')
self.assertEqual(oauth2.client_secret, 'secret2')
def test_no_configuration(self):
with self.assertRaises(ValueError):
flask_util.UserOAuth2(flask.Flask(__name__))
def test_create_flow(self):
with self.app.test_request_context():
flow = self.oauth2._make_flow()
state = json.loads(flow.params['state'])
self.assertIn('google_oauth2_csrf_token', flask.session)
self.assertEqual(
flask.session['google_oauth2_csrf_token'], state['csrf_token'])
self.assertEqual(flow.client_id, self.oauth2.client_id)
self.assertEqual(flow.client_secret, self.oauth2.client_secret)
self.assertIn('http', flow.redirect_uri)
self.assertIn('oauth2callback', flow.redirect_uri)
flow = self.oauth2._make_flow(return_url='/return_url')
state = json.loads(flow.params['state'])
self.assertEqual(state['return_url'], '/return_url')
flow = self.oauth2._make_flow(extra_arg='test')
self.assertEqual(flow.params['extra_arg'], 'test')
# Test extra args specified in the constructor.
app = flask.Flask(__name__)
app.config['SECRET_KEY'] = 'notasecert'
oauth2 = flask_util.UserOAuth2(
app, client_id='client_id', client_secret='secret',
extra_arg='test')
with app.test_request_context():
flow = oauth2._make_flow()
self.assertEqual(flow.params['extra_arg'], 'test')
def test_authorize_view(self):
with self.app.test_client() as client:
response = client.get('/oauth2authorize')
location = response.headers['Location']
q = urlparse.parse_qs(location.split('?', 1)[1])
state = json.loads(q['state'][0])
self.assertIn(oauth2client.GOOGLE_AUTH_URI, location)
self.assertNotIn(self.oauth2.client_secret, location)
self.assertIn(self.oauth2.client_id, q['client_id'])
self.assertEqual(
flask.session['google_oauth2_csrf_token'], state['csrf_token'])
self.assertEqual(state['return_url'], '/')
with self.app.test_client() as client:
response = client.get('/oauth2authorize?return_url=/test')
location = response.headers['Location']
q = urlparse.parse_qs(location.split('?', 1)[1])
state = json.loads(q['state'][0])
self.assertEqual(state['return_url'], '/test')
with self.app.test_client() as client:
response = client.get('/oauth2authorize?extra_param=test')
location = response.headers['Location']
self.assertIn('extra_param=test', location)
def _setup_callback_state(self, client, **kwargs):
with self.app.test_request_context():
# Flask doesn't create a request context with a session
# transaction for some reason, so, set up the flow here,
# then apply it to the session in the transaction.
if not kwargs:
self.oauth2._make_flow(return_url='/return_url')
else:
self.oauth2._make_flow(**kwargs)
with client.session_transaction() as session:
session.update(flask.session)
csrf_token = session['google_oauth2_csrf_token']
flow = flask_util._get_flow_for_token(csrf_token)
state = flow.params['state']
return state
def test_callback_view(self):
self.oauth2.storage = mock.Mock()
with self.app.test_client() as client:
with mock.patch(
'oauth2client.transport.get_http_object') as new_http:
# Set-up mock.
http = http_mock.HttpMock(data=DEFAULT_RESP)
new_http.return_value = http
# Run tests.
state = self._setup_callback_state(client)
response = client.get(
'/oauth2callback?state={0}&code=codez'.format(state))
self.assertEqual(response.status_code, httplib.FOUND)
self.assertIn('/return_url', response.headers['Location'])
self.assertIn(self.oauth2.client_secret, http.body)
self.assertIn('codez', http.body)
self.assertTrue(self.oauth2.storage.put.called)
# Check the mocks were called.
new_http.assert_called_once_with()
def test_authorize_callback(self):
self.oauth2.authorize_callback = mock.Mock()
self.test_callback_view()
self.assertTrue(self.oauth2.authorize_callback.called)
def test_callback_view_errors(self):
# Error supplied to callback
with self.app.test_client() as client:
with client.session_transaction() as session:
session['google_oauth2_csrf_token'] = 'tokenz'
response = client.get('/oauth2callback?state={}&error=something')
self.assertEqual(response.status_code, httplib.BAD_REQUEST)
self.assertIn('something', response.data.decode('utf-8'))
# Error supplied to callback with html
with self.app.test_client() as client:
with client.session_transaction() as session:
session['google_oauth2_csrf_token'] = 'tokenz'
response = client.get(
'/oauth2callback?state={}&error=<script>something<script>')
self.assertEqual(response.status_code, httplib.BAD_REQUEST)
self.assertIn(
'<script>something<script>',
response.data.decode('utf-8'))
# CSRF mismatch
with self.app.test_client() as client:
with client.session_transaction() as session:
session['google_oauth2_csrf_token'] = 'goodstate'
state = json.dumps({
'csrf_token': 'badstate',
'return_url': '/return_url'
})
response = client.get(
'/oauth2callback?state={0}&code=codez'.format(state))
self.assertEqual(response.status_code, httplib.BAD_REQUEST)
# KeyError, no CSRF state.
with self.app.test_client() as client:
response = client.get('/oauth2callback?state={}&code=codez')
self.assertEqual(response.status_code, httplib.BAD_REQUEST)
# Code exchange error
with self.app.test_client() as client:
state = self._setup_callback_state(client)
with mock.patch(
'oauth2client.transport.get_http_object') as new_http:
# Set-up mock.
new_http.return_value = http_mock.HttpMock(
headers={'status': httplib.INTERNAL_SERVER_ERROR},
data=DEFAULT_RESP)
# Run tests.
response = client.get(
'/oauth2callback?state={0}&code=codez'.format(state))
self.assertEqual(response.status_code, httplib.BAD_REQUEST)
# Check the mocks were called.
new_http.assert_called_once_with()
# Invalid state json
with self.app.test_client() as client:
with client.session_transaction() as session:
session['google_oauth2_csrf_token'] = 'tokenz'
state = '[{'
response = client.get(
'/oauth2callback?state={0}&code=codez'.format(state))
self.assertEqual(response.status_code, httplib.BAD_REQUEST)
# Missing flow.
with self.app.test_client() as client:
with client.session_transaction() as session:
session['google_oauth2_csrf_token'] = 'tokenz'
state = json.dumps({
'csrf_token': 'tokenz',
'return_url': '/return_url'
})
response = client.get(
'/oauth2callback?state={0}&code=codez'.format(state))
self.assertEqual(response.status_code, httplib.BAD_REQUEST)
def test_no_credentials(self):
with self.app.test_request_context():
self.assertFalse(self.oauth2.has_credentials())
self.assertTrue(self.oauth2.credentials is None)
self.assertTrue(self.oauth2.user_id is None)
self.assertTrue(self.oauth2.email is None)
with self.assertRaises(ValueError):
self.oauth2.http()
self.assertFalse(self.oauth2.storage.get())
self.oauth2.storage.delete()
def test_with_credentials(self):
credentials = self._generate_credentials()
with self.app.test_request_context():
self.oauth2.storage.put(credentials)
self.assertEqual(
self.oauth2.credentials.access_token, credentials.access_token)
self.assertEqual(
self.oauth2.credentials.refresh_token,
credentials.refresh_token)
self.assertEqual(self.oauth2.user_id, '123')
self.assertEqual(self.oauth2.email, '[email protected]')
self.assertTrue(self.oauth2.http())
@mock.patch('oauth2client.client._UTCNOW')
def test_with_expired_credentials(self, utcnow):
utcnow.return_value = datetime.datetime(1990, 5, 29)
credentials = self._generate_credentials()
credentials.token_expiry = datetime.datetime(1990, 5, 28)
# Has a refresh token, so this should be fine.
with self.app.test_request_context():
self.oauth2.storage.put(credentials)
self.assertTrue(self.oauth2.has_credentials())
# Without a refresh token this should return false.
credentials.refresh_token = None
with self.app.test_request_context():
self.oauth2.storage.put(credentials)
self.assertFalse(self.oauth2.has_credentials())
def test_bad_id_token(self):
credentials = self._generate_credentials()
credentials.id_token = {}
with self.app.test_request_context():
self.oauth2.storage.put(credentials)
self.assertTrue(self.oauth2.user_id is None)
self.assertTrue(self.oauth2.email is None)
def test_required(self):
@self.app.route('/protected')
@self.oauth2.required
def index():
return 'Hello'
# No credentials, should redirect
with self.app.test_client() as client:
response = client.get('/protected')
self.assertEqual(response.status_code, httplib.FOUND)
self.assertIn('oauth2authorize', response.headers['Location'])
self.assertIn('protected', response.headers['Location'])
credentials = self._generate_credentials(scopes=self.oauth2.scopes)
# With credentials, should allow
with self.app.test_client() as client:
with client.session_transaction() as session:
session['google_oauth2_credentials'] = credentials.to_json()
response = client.get('/protected')
self.assertEqual(response.status_code, httplib.OK)
self.assertIn('Hello', response.data.decode('utf-8'))
# Expired credentials with refresh token, should allow.
credentials.token_expiry = datetime.datetime(1990, 5, 28)
with mock.patch('oauth2client.client._UTCNOW') as utcnow:
utcnow.return_value = datetime.datetime(1990, 5, 29)
with self.app.test_client() as client:
with client.session_transaction() as session:
session['google_oauth2_credentials'] = (
credentials.to_json())
response = client.get('/protected')
self.assertEqual(response.status_code, httplib.OK)
self.assertIn('Hello', response.data.decode('utf-8'))
# Expired credentials without a refresh token, should redirect.
credentials.refresh_token = None
with mock.patch('oauth2client.client._UTCNOW') as utcnow:
utcnow.return_value = datetime.datetime(1990, 5, 29)
with self.app.test_client() as client:
with client.session_transaction() as session:
session['google_oauth2_credentials'] = (
credentials.to_json())
response = client.get('/protected')
self.assertEqual(response.status_code, httplib.FOUND)
self.assertIn('oauth2authorize', response.headers['Location'])
self.assertIn('protected', response.headers['Location'])
def _create_incremental_auth_app(self):
self.app = flask.Flask(__name__)
self.app.testing = True
self.app.config['SECRET_KEY'] = 'notasecert'
self.oauth2 = flask_util.UserOAuth2(
self.app,
client_id='client_idz',
client_secret='client_secretz',
include_granted_scopes=True)
@self.app.route('/one')
@self.oauth2.required(scopes=['one'])
def one():
return 'Hello'
@self.app.route('/two')
@self.oauth2.required(scopes=['two', 'three'])
def two():
return 'Hello'
def test_incremental_auth(self):
self._create_incremental_auth_app()
# No credentials, should redirect
with self.app.test_client() as client:
response = client.get('/one')
self.assertIn('one', response.headers['Location'])
self.assertEqual(response.status_code, httplib.FOUND)
# Credentials for one. /one should allow, /two should redirect.
credentials = self._generate_credentials(scopes=['email', 'one'])
with self.app.test_client() as client:
with client.session_transaction() as session:
session['google_oauth2_credentials'] = credentials.to_json()
response = client.get('/one')
self.assertEqual(response.status_code, httplib.OK)
response = client.get('/two')
self.assertIn('two', response.headers['Location'])
self.assertEqual(response.status_code, httplib.FOUND)
# Starting the authorization flow should include the
# include_granted_scopes parameter as well as the scopes.
response = client.get(response.headers['Location'][17:])
q = urlparse.parse_qs(
response.headers['Location'].split('?', 1)[1])
self.assertIn('include_granted_scopes', q)
self.assertEqual(
set(q['scope'][0].split(' ')),
set(['one', 'email', 'two', 'three']))
# Actually call two() without a redirect.
credentials2 = self._generate_credentials(
scopes=['email', 'two', 'three'])
with self.app.test_client() as client:
with client.session_transaction() as session:
session['google_oauth2_credentials'] = credentials2.to_json()
response = client.get('/two')
self.assertEqual(response.status_code, httplib.OK)
def test_incremental_auth_exchange(self):
self._create_incremental_auth_app()
with mock.patch('oauth2client.transport.get_http_object') as new_http:
# Set-up mock.
new_http.return_value = http_mock.HttpMock(data=DEFAULT_RESP)
# Run tests.
with self.app.test_client() as client:
state = self._setup_callback_state(
client,
return_url='/return_url',
# Incremental auth scopes.
scopes=['one', 'two'])
response = client.get(
'/oauth2callback?state={0}&code=codez'.format(state))
self.assertEqual(response.status_code, httplib.FOUND)
credentials = self.oauth2.credentials
self.assertTrue(
credentials.has_scopes(['email', 'one', 'two']))
# Check the mocks were called.
new_http.assert_called_once_with()
def test_refresh(self):
token_val = 'new_token'
json_resp = '{"access_token": "%s"}' % (token_val,)
http = http_mock.HttpMock(data=json_resp)
with self.app.test_request_context():
with mock.patch('flask.session'):
self.oauth2.storage.put(self._generate_credentials())
self.oauth2.credentials.refresh(http)
self.assertEqual(
self.oauth2.storage.get().access_token, token_val)
def test_delete(self):
with self.app.test_request_context():
self.oauth2.storage.put(self._generate_credentials())
self.oauth2.storage.delete()
self.assertNotIn('google_oauth2_credentials', flask.session)
| apache-2.0 | 4,814,031,907,938,949,000 | 38.647706 | 79 | 0.588995 | false |
gmontess/Arista-Lab-PC | roles/arista.eos-virtual-router/test/arista-ansible-role-test/test_module.py | 9 | 18112 | # pylint: disable=invalid-name
# pylint: disable=missing-docstring
# pylint: disable=too-few-public-methods
# pylint: disable=too-many-instance-attributes
# pylint: disable=too-many-locals
import json
import os
import re
import subprocess
import sys
import warnings
import yaml
TESTCASES = list()
INVENTORY = 'test/fixtures/hosts'
HERE = os.path.abspath(os.path.dirname(__file__))
ROLE = re.match(
r'^.*\/ansible-eos-([^/\s]+)\/test/arista-ansible-role-test$', HERE).group(1)
RUN_CONFIG_BACKUP = '_eos_role_test_{}_running'.format(ROLE)
START_CONFIG_BACKUP = '_eos_role_test_{}_startup'.format(ROLE)
EOS_ROLE_PLAYBOOK = 'test/arista-ansible-role-test/eos_role.yml'
EOS_MODULE_PLAYBOOK = 'test/arista-ansible-role-test/eos_module.yml'
LOG_FILE = '{}/roletest.log'.format(HERE)
try:
os.remove(LOG_FILE)
except OSError:
pass
LOG = open(LOG_FILE, 'w')
SEPARATOR = ' ' + '*' * 50
class TestCase(object):
def __init__(self, **kwargs):
self.name = kwargs['name']
self.module = kwargs['module']
self.host = None
self.inventory = kwargs.get('inventory')
self.exitcode = kwargs.get('exitcode', 0)
self.idempotent = kwargs.get('idempotent', True)
self.changed = kwargs.get('changed', True)
self.present = kwargs.get('present')
self.absent = kwargs.get('absent')
self.arguments = kwargs.get('arguments', list())
self.variables = dict()
# optional properties
self.setup = kwargs.get('setup', list())
self.teardown = kwargs.get('teardown', list())
def __str__(self):
return self.name
class TestModule(object):
def __init__(self, testcase):
self.testcase = testcase
self.description = 'Test [%s]: %s' % (testcase.module, testcase.name)
def __call__(self):
self.output('Run first pass')
response = self.run_module()
for device in response:
hostname = device.keys()[0]
reported = int(device[hostname]['changed'])
expected = int(self.testcase.changed)
msg = ("First pass role execution reported {} task change(s), "
"expected {}".format(reported, expected))
self.output(msg)
assert reported == expected, msg
if self.testcase.idempotent:
self.output('Run second pass')
response = self.run_module()
for device in response:
hostname = device.keys()[0]
reported = int(device[hostname]['changed'])
msg = (
"Second pass role execution reported {} task change(s), "
"expected 0".format(reported))
self.output(msg)
assert not reported, msg
if self.testcase.present:
desc = 'Validate present configuration'
self.output(desc)
response = self.run_validation(self.testcase.present, desc=desc)
for device in response:
hostname = device.keys()[0]
# Result should contain an empty list of updates
delim = " ---\n"
updates = device[hostname]['updates']
msg = ("{} - Expected configuration\n{}{}\n{}not found "
"on device '{}'".format(desc, delim,
'\n'.join(updates), delim,
hostname))
assert device[hostname]['updates'] == [], msg
# Result should show no changes
msg = ("{} - Device '{}' reported no updates, but "
"returned 'changed'".format(desc, hostname))
assert device[hostname]['changed'] == False, msg
if self.testcase.absent:
desc = 'Validate absent configuration'
self.output(desc)
response = self.run_validation(self.testcase.absent, desc=desc)
for device in response:
hostname = device.keys()[0]
# Result should show change has taken place
msg = (
"{} - Entire absent configuration found on device '{}'".
format(desc, hostname)
)
assert device[hostname]['changed'] == True, msg
# Compare changes with expected values, sorted at global level
updates = '\n'.join(device[hostname]['updates'])
updates = re.split(r'\n(?=\S)', updates)
updates = '\n'.join(sorted(updates))
# The output from the playbook is sanitized - the phrase
# network-admin in username entries is changed to
# network-********. Replace the asterisks with admin again
# for matching the results.
updates = re.sub("username ([^\n]*) role network-\*{8}",
r'username \1 role network-admin',
updates)
absent = re.split(r'\n(?=\S)', self.testcase.absent.rstrip())
absent = '\n'.join(sorted(absent))
msg = ("{} - Some part of absent configuration found "
"on device '{}'".format(desc, hostname))
assert updates == absent, msg
def setUp(self):
print("\n{}\n".format(SEPARATOR) +
" See run log for complete output:\n {}".format(LOG_FILE) +
"\n{}\n".format(SEPARATOR))
LOG.write("\n\n\n{}\n".format(SEPARATOR) +
" Begin log for {}".format(self.description) +
"\n{}\n\n".format(SEPARATOR))
if self.testcase.setup:
self.output('Running test case setup commands')
setup_cmds = self.testcase.setup
if not isinstance(setup_cmds, list):
setup_cmds = setup_cmds.splitlines()
self.output("{}".format(setup_cmds))
args = {
'module': 'eos_command',
'description': 'Run test case setup commands',
'cmds': ['configure terminal'] + setup_cmds,
}
arguments = [json.dumps(args)]
ret_code, out, err = ansible_playbook(EOS_MODULE_PLAYBOOK,
arguments=arguments)
if ret_code != 0:
LOG.write("Playbook stdout:\n\n{}".format(out))
LOG.write("Playbook stderr:\n\n{}".format(err))
raise
def tearDown(self):
if self.testcase.teardown:
self.output('Running test case teardown commands')
teardown_cmds = self.testcase.teardown
if not isinstance(teardown_cmds, list):
teardown_cmds = teardown_cmds.splitlines()
self.output("{}\n".format(teardown_cmds))
args = {
'module': 'eos_command',
'description': 'Run test case teardown_cmds commands',
'cmds': ['configure terminal'] + teardown_cmds,
}
arguments = [json.dumps(args)]
ret_code, out, err = ansible_playbook(EOS_MODULE_PLAYBOOK,
arguments=arguments)
if ret_code != 0:
self.output("Playbook stdout:\n\n{}".format(out))
self.output("Playbook stderr:\n\n{}".format(err))
warnings.warn("\nError in test case teardown\n\n{}".format(
out))
@classmethod
def output(cls, text):
print '>>', str(text)
LOG.write('++ {}'.format(text) + '\n')
def run_module(self):
(retcode, out, _) = self.execute_module()
out_stripped = re.sub(r'\"config\": \"! Command:.*\\nend\"',
'\"config\": \"--- stripped for space ---\"',
out)
LOG.write("PLaybook stdout:\n\n{}".format(out_stripped))
msg = "Return code: {}, Expected code: {}".format(retcode, self.testcase.exitcode)
self.output(msg)
assert retcode == self.testcase.exitcode, msg
return self.parse_response(out)
def execute_module(self):
arguments = [json.dumps(self.testcase.arguments)]
arguments.append(json.dumps(
{'rolename': "ansible-eos-{}".format(ROLE)}))
return ansible_playbook(EOS_ROLE_PLAYBOOK, arguments=arguments)
def parse_response(self, output, validate=False):
# Get all the lines after the 'PLAY RECAP ****...' header
lines = re.sub(r'^.*PLAY RECAP \*+', '', output, 0, re.S).split('\n')
# Remove any empty lines from the list
lines = [x for x in lines if x]
recap = []
for line in lines:
match = re.search(r'^(\S+)\s+\:\s+' \
r'ok=(\d+)\s+' \
r'changed=(\d+)\s+' \
r'unreachable=(\d+)\s+' \
r'failed=(\d+)', line)
if not match:
self.output("Playbook stdout:\n\n{}".format(output))
raise ValueError("Unable to parse Ansible output for "
"recap information")
(name, okcount, changed, unreach, failed) = match.groups()
recap.append({name: {'ok': okcount,
'changed': changed,
'unreachable': unreach,
'failed': failed}})
if not validate:
return recap
updates = []
for device in recap:
hostname = device.keys()[0]
match = re.search(r'(?<!skipping: )\[%s\] => (\{.*\})' % hostname,
output, re.M)
if not match:
self.output("Playbook stdout:\n\n{}".format(output))
raise ValueError("Unable to parse Ansible output for "
"result validation")
result = json.loads(match.group(1))
updates.append({hostname: result})
return updates
def run_validation(self, src, desc='Validate configuration'):
args = {'module': 'eos_template', 'description': desc, 'src': src, }
arguments = [json.dumps(args)]
(ret_code, out, _) = ansible_playbook(EOS_MODULE_PLAYBOOK,
arguments=arguments,
options=['--check'])
LOG.write(out)
assert ret_code == 0, "Validation playbook failed execution"
return self.parse_response(out, validate=True)
def filter_modules(modules, filenames):
if modules:
modules = ['{0}.yml'.format(s) for s in modules.split(',')]
return list(set(modules).intersection(filenames))
return filenames
def setup():
print >> sys.stderr, "Test Suite Setup:"
run_backup = " Backing up running-config on nodes ..."
print >> sys.stderr, run_backup
LOG.write('++ ' + run_backup.strip())
args = {
'module': 'eos_command',
'description': 'Back up running-config on node',
'cmds': [
'configure terminal',
'copy running-config {}'.format(RUN_CONFIG_BACKUP)
],
}
arguments = [json.dumps(args)]
ret_code, out, err = ansible_playbook(EOS_MODULE_PLAYBOOK,
arguments=arguments)
if ret_code != 0:
LOG.write(">> ansible-playbook {} stdout:\n{}".format(EOS_MODULE_PLAYBOOK, out))
LOG.write(">> ansible-playbook {} stddrr:\n{}".format(EOS_MODULE_PLAYBOOK, err))
teardown()
raise RuntimeError("Error in Test Suite Setup")
run_backup = " Backing up startup-config on nodes ..."
print >> sys.stderr, run_backup
LOG.write('++ ' + run_backup.strip())
args = {
'module': 'eos_command',
'description': 'Back up startup-config on node',
'cmds': [
'configure terminal',
'copy startup-config {}'.format(START_CONFIG_BACKUP)
],
}
arguments = [json.dumps(args)]
ret_code, out, err = ansible_playbook(EOS_MODULE_PLAYBOOK,
arguments=arguments)
if ret_code != 0:
LOG.write(">> ansible-playbook {} stdout:\n{}".format(EOS_MODULE_PLAYBOOK, out))
LOG.write(">> ansible-playbook {} stddrr:\n{}".format(EOS_MODULE_PLAYBOOK, err))
teardown()
raise RuntimeError("Error in Test Suite Setup")
print >> sys.stderr, " Gathering test cases ..."
modules = os.environ.get('ANSIBLE_ROLE_TEST_CASES')
testcases_home = os.path.join(HERE, 'testcases')
if not os.path.exists(testcases_home):
print >> sys.stderr, "\n ***** Testcase directory not found!! *****"
teardown()
raise RuntimeError("Testcase path '{}' does not exist".format(testcases_home))
filenames = os.listdir(testcases_home)
for module in filter_modules(modules, filenames):
path = os.path.join(testcases_home, module)
definition = yaml.load(open(path))
defaults = definition.get('defaults', {})
testcases = definition.get('testcases', [])
if not testcases:
print >> sys.stderr, ("\n ***** No testcases defined in "
"module {} *****\n".format(module))
else:
for testcase in definition.get('testcases', []):
kwargs = defaults.copy()
kwargs.update(testcase)
TESTCASES.append(TestCase(**kwargs))
print >> sys.stderr, " Setup complete\n"
def teardown():
print >> sys.stderr, "\nTest Suite Teardown:"
no_teardown = os.environ.get('NO_ANSIBLE_ROLE_TEST_TEARDOWN')
if no_teardown:
print >> sys.stderr, ("{}\n"
" Skipping test suite teardown due to "
"NO_ANSIBLE_ROLE_TEST_TEARDOWN\n"
" To restore each device to pre-test state "
"execute the following commands\n"
" - configure terminal\n"
" - configure replace {}\n"
" - delete {}\n"
" - copy {} startup-config\n"
" - delete {}\n"
"{}".format(SEPARATOR, RUN_CONFIG_BACKUP,
RUN_CONFIG_BACKUP,
START_CONFIG_BACKUP,
START_CONFIG_BACKUP, SEPARATOR))
else:
# Restore the running-config on the nodes
# ---------------------------------------
restore_backup = " Restoring running-config on nodes ..."
print >> sys.stderr, restore_backup
LOG.write('++ ' + restore_backup.strip())
args = {
'module': 'eos_command',
'description': 'Restore running-config from backup',
'cmds': [
'configure terminal',
'configure replace {}'.format(RUN_CONFIG_BACKUP),
'delete {}'.format(RUN_CONFIG_BACKUP),
],
}
arguments = [json.dumps(args)]
# ret_code, out, err = ansible_playbook(CMD_PLAY, arguments=arguments)
ret_code, out, err = ansible_playbook(EOS_MODULE_PLAYBOOK,
arguments=arguments)
if ret_code != 0:
msg = "Error restoring running-config on nodes\n" \
"Running ansible-playbook {} -e {}\n" \
">> stdout: {}\n" \
">> stderr: {}\n".format(EOS_MODULE_PLAYBOOK, arguments, out, err)
warnings.warn(msg)
# Restore the startup-config on the nodes
# ---------------------------------------
restore_backup = " Restoring startup-config on nodes ..."
print >> sys.stderr, restore_backup
LOG.write('++ ' + restore_backup.strip())
args = {
'module': 'eos_command',
'description': 'Restore startup-config from backup',
'cmds': [
'configure terminal',
'copy {} startup-config'.format(START_CONFIG_BACKUP),
'delete {}'.format(START_CONFIG_BACKUP),
],
}
arguments = [json.dumps(args)]
# ret_code, out, err = ansible_playbook(CMD_PLAY, arguments=arguments)
ret_code, out, err = ansible_playbook(EOS_MODULE_PLAYBOOK,
arguments=arguments)
if ret_code != 0:
msg = "Error restoring startup-config on nodes\n" \
"Running ansible-playbook {} -e {}\n" \
">> stdout: {}\n" \
">> stderr: {}\n".format(EOS_MODULE_PLAYBOOK, arguments, out, err)
warnings.warn(msg)
print >> sys.stderr, " Teardown complete"
def test_module():
for testcase in TESTCASES:
yield TestModule(testcase)
def ansible_playbook(playbook, arguments=None, options=None):
if arguments is None:
arguments = []
if options is None:
options = []
command = ['ansible-playbook']
command.append(playbook)
command.extend(['-i', INVENTORY])
for arg in arguments:
command.extend(['-e', arg])
for opt in options:
command.append(opt)
command.append('-vvv')
# Format the command string for output on error - for easier
# copy/paste for manual run
cmdstr = ''
for segment in command:
if segment[0] == '{':
cmdstr = cmdstr + "\'{}\' ".format(segment)
else:
cmdstr = cmdstr + "{} ".format(segment)
LOG.write("-- Ansible playbook command:\n-- {}\n".format(cmdstr))
stdout = subprocess.PIPE
stderr = subprocess.PIPE
proc = subprocess.Popen(command, stdout=stdout, stderr=stderr)
out, err = proc.communicate()
return (proc.returncode, out, err)
| gpl-2.0 | 7,880,103,738,986,989,000 | 37.53617 | 90 | 0.518993 | false |
K-Carrington/dronekit-python | docs/conf.py | 6 | 7873 | # -*- coding: utf-8 -*-
#
# DroneKit documentation build configuration file, created by
# sphinx-quickstart on Wed Mar 19 15:28:50 2014.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
import sphinx_3dr_theme
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.append(os.path.abspath('..'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.todo', 'sphinx.ext.coverage']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'DroneKit Air: Python'
copyright = u'2015, 3D Robotics'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.5'
# The full version, including alpha/beta/rc tags.
release = '0.5'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_3dr_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = [sphinx_3dr_theme.get_html_theme_path()]
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'DroneKitdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'DroneKit.tex', u'DroneKit Documentation',
u'Kevin Hester', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'dronekit', u'DroneKit Documentation',
[u'Kevin Hester'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'DroneKit', u'DroneKit Documentation',
u'Kevin Hester', 'DroneKit', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
| apache-2.0 | -7,174,526,765,052,429,000 | 31.266393 | 80 | 0.705322 | false |
MalloyPower/parsing-python | front-end/testsuite-python-lib/Python-3.5.0/Lib/importlib/_bootstrap_external.py | 7 | 53413 | """Core implementation of path-based import.
This module is NOT meant to be directly imported! It has been designed such
that it can be bootstrapped into Python as the implementation of import. As
such it requires the injection of specific modules and attributes in order to
work. One should use importlib as the public-facing version of this module.
"""
#
# IMPORTANT: Whenever making changes to this module, be sure to run
# a top-level make in order to get the frozen version of the module
# updated. Not doing so will result in the Makefile to fail for
# all others who don't have a ./python around to freeze the module
# in the early stages of compilation.
#
# See importlib._setup() for what is injected into the global namespace.
# When editing this code be aware that code executed at import time CANNOT
# reference any injected objects! This includes not only global code but also
# anything specified at the class level.
# Bootstrap-related code ######################################################
_CASE_INSENSITIVE_PLATFORMS = 'win', 'cygwin', 'darwin'
def _make_relax_case():
if sys.platform.startswith(_CASE_INSENSITIVE_PLATFORMS):
def _relax_case():
"""True if filenames must be checked case-insensitively."""
return b'PYTHONCASEOK' in _os.environ
else:
def _relax_case():
"""True if filenames must be checked case-insensitively."""
return False
return _relax_case
def _w_long(x):
"""Convert a 32-bit integer to little-endian."""
return (int(x) & 0xFFFFFFFF).to_bytes(4, 'little')
def _r_long(int_bytes):
"""Convert 4 bytes in little-endian to an integer."""
return int.from_bytes(int_bytes, 'little')
def _path_join(*path_parts):
"""Replacement for os.path.join()."""
return path_sep.join([part.rstrip(path_separators)
for part in path_parts if part])
def _path_split(path):
"""Replacement for os.path.split()."""
if len(path_separators) == 1:
front, _, tail = path.rpartition(path_sep)
return front, tail
for x in reversed(path):
if x in path_separators:
front, tail = path.rsplit(x, maxsplit=1)
return front, tail
return '', path
def _path_stat(path):
"""Stat the path.
Made a separate function to make it easier to override in experiments
(e.g. cache stat results).
"""
return _os.stat(path)
def _path_is_mode_type(path, mode):
"""Test whether the path is the specified mode type."""
try:
stat_info = _path_stat(path)
except OSError:
return False
return (stat_info.st_mode & 0o170000) == mode
def _path_isfile(path):
"""Replacement for os.path.isfile."""
return _path_is_mode_type(path, 0o100000)
def _path_isdir(path):
"""Replacement for os.path.isdir."""
if not path:
path = _os.getcwd()
return _path_is_mode_type(path, 0o040000)
def _write_atomic(path, data, mode=0o666):
"""Best-effort function to write data to a path atomically.
Be prepared to handle a FileExistsError if concurrent writing of the
temporary file is attempted."""
# id() is used to generate a pseudo-random filename.
path_tmp = '{}.{}'.format(path, id(path))
fd = _os.open(path_tmp,
_os.O_EXCL | _os.O_CREAT | _os.O_WRONLY, mode & 0o666)
try:
# We first write data to a temporary file, and then use os.replace() to
# perform an atomic rename.
with _io.FileIO(fd, 'wb') as file:
file.write(data)
_os.replace(path_tmp, path)
except OSError:
try:
_os.unlink(path_tmp)
except OSError:
pass
raise
_code_type = type(_write_atomic.__code__)
# Finder/loader utility code ###############################################
# Magic word to reject .pyc files generated by other Python versions.
# It should change for each incompatible change to the bytecode.
#
# The value of CR and LF is incorporated so if you ever read or write
# a .pyc file in text mode the magic number will be wrong; also, the
# Apple MPW compiler swaps their values, botching string constants.
#
# The magic numbers must be spaced apart at least 2 values, as the
# -U interpeter flag will cause MAGIC+1 being used. They have been
# odd numbers for some time now.
#
# There were a variety of old schemes for setting the magic number.
# The current working scheme is to increment the previous value by
# 10.
#
# Starting with the adoption of PEP 3147 in Python 3.2, every bump in magic
# number also includes a new "magic tag", i.e. a human readable string used
# to represent the magic number in __pycache__ directories. When you change
# the magic number, you must also set a new unique magic tag. Generally this
# can be named after the Python major version of the magic number bump, but
# it can really be anything, as long as it's different than anything else
# that's come before. The tags are included in the following table, starting
# with Python 3.2a0.
#
# Known values:
# Python 1.5: 20121
# Python 1.5.1: 20121
# Python 1.5.2: 20121
# Python 1.6: 50428
# Python 2.0: 50823
# Python 2.0.1: 50823
# Python 2.1: 60202
# Python 2.1.1: 60202
# Python 2.1.2: 60202
# Python 2.2: 60717
# Python 2.3a0: 62011
# Python 2.3a0: 62021
# Python 2.3a0: 62011 (!)
# Python 2.4a0: 62041
# Python 2.4a3: 62051
# Python 2.4b1: 62061
# Python 2.5a0: 62071
# Python 2.5a0: 62081 (ast-branch)
# Python 2.5a0: 62091 (with)
# Python 2.5a0: 62092 (changed WITH_CLEANUP opcode)
# Python 2.5b3: 62101 (fix wrong code: for x, in ...)
# Python 2.5b3: 62111 (fix wrong code: x += yield)
# Python 2.5c1: 62121 (fix wrong lnotab with for loops and
# storing constants that should have been removed)
# Python 2.5c2: 62131 (fix wrong code: for x, in ... in listcomp/genexp)
# Python 2.6a0: 62151 (peephole optimizations and STORE_MAP opcode)
# Python 2.6a1: 62161 (WITH_CLEANUP optimization)
# Python 2.7a0: 62171 (optimize list comprehensions/change LIST_APPEND)
# Python 2.7a0: 62181 (optimize conditional branches:
# introduce POP_JUMP_IF_FALSE and POP_JUMP_IF_TRUE)
# Python 2.7a0 62191 (introduce SETUP_WITH)
# Python 2.7a0 62201 (introduce BUILD_SET)
# Python 2.7a0 62211 (introduce MAP_ADD and SET_ADD)
# Python 3000: 3000
# 3010 (removed UNARY_CONVERT)
# 3020 (added BUILD_SET)
# 3030 (added keyword-only parameters)
# 3040 (added signature annotations)
# 3050 (print becomes a function)
# 3060 (PEP 3115 metaclass syntax)
# 3061 (string literals become unicode)
# 3071 (PEP 3109 raise changes)
# 3081 (PEP 3137 make __file__ and __name__ unicode)
# 3091 (kill str8 interning)
# 3101 (merge from 2.6a0, see 62151)
# 3103 (__file__ points to source file)
# Python 3.0a4: 3111 (WITH_CLEANUP optimization).
# Python 3.0a5: 3131 (lexical exception stacking, including POP_EXCEPT)
# Python 3.1a0: 3141 (optimize list, set and dict comprehensions:
# change LIST_APPEND and SET_ADD, add MAP_ADD)
# Python 3.1a0: 3151 (optimize conditional branches:
# introduce POP_JUMP_IF_FALSE and POP_JUMP_IF_TRUE)
# Python 3.2a0: 3160 (add SETUP_WITH)
# tag: cpython-32
# Python 3.2a1: 3170 (add DUP_TOP_TWO, remove DUP_TOPX and ROT_FOUR)
# tag: cpython-32
# Python 3.2a2 3180 (add DELETE_DEREF)
# Python 3.3a0 3190 __class__ super closure changed
# Python 3.3a0 3200 (__qualname__ added)
# 3210 (added size modulo 2**32 to the pyc header)
# Python 3.3a1 3220 (changed PEP 380 implementation)
# Python 3.3a4 3230 (revert changes to implicit __class__ closure)
# Python 3.4a1 3250 (evaluate positional default arguments before
# keyword-only defaults)
# Python 3.4a1 3260 (add LOAD_CLASSDEREF; allow locals of class to override
# free vars)
# Python 3.4a1 3270 (various tweaks to the __class__ closure)
# Python 3.4a1 3280 (remove implicit class argument)
# Python 3.4a4 3290 (changes to __qualname__ computation)
# Python 3.4a4 3300 (more changes to __qualname__ computation)
# Python 3.4rc2 3310 (alter __qualname__ computation)
# Python 3.5a0 3320 (matrix multiplication operator)
# Python 3.5b1 3330 (PEP 448: Additional Unpacking Generalizations)
# Python 3.5b2 3340 (fix dictionary display evaluation order #11205)
# Python 3.5b2 3350 (add GET_YIELD_FROM_ITER opcode #24400)
#
# MAGIC must change whenever the bytecode emitted by the compiler may no
# longer be understood by older implementations of the eval loop (usually
# due to the addition of new opcodes).
MAGIC_NUMBER = (3350).to_bytes(2, 'little') + b'\r\n'
_RAW_MAGIC_NUMBER = int.from_bytes(MAGIC_NUMBER, 'little') # For import.c
_PYCACHE = '__pycache__'
_OPT = 'opt-'
SOURCE_SUFFIXES = ['.py'] # _setup() adds .pyw as needed.
BYTECODE_SUFFIXES = ['.pyc']
# Deprecated.
DEBUG_BYTECODE_SUFFIXES = OPTIMIZED_BYTECODE_SUFFIXES = BYTECODE_SUFFIXES
def cache_from_source(path, debug_override=None, *, optimization=None):
"""Given the path to a .py file, return the path to its .pyc file.
The .py file does not need to exist; this simply returns the path to the
.pyc file calculated as if the .py file were imported.
The 'optimization' parameter controls the presumed optimization level of
the bytecode file. If 'optimization' is not None, the string representation
of the argument is taken and verified to be alphanumeric (else ValueError
is raised).
The debug_override parameter is deprecated. If debug_override is not None,
a True value is the same as setting 'optimization' to the empty string
while a False value is equivalent to setting 'optimization' to '1'.
If sys.implementation.cache_tag is None then NotImplementedError is raised.
"""
if debug_override is not None:
_warnings.warn('the debug_override parameter is deprecated; use '
"'optimization' instead", DeprecationWarning)
if optimization is not None:
message = 'debug_override or optimization must be set to None'
raise TypeError(message)
optimization = '' if debug_override else 1
head, tail = _path_split(path)
base, sep, rest = tail.rpartition('.')
tag = sys.implementation.cache_tag
if tag is None:
raise NotImplementedError('sys.implementation.cache_tag is None')
almost_filename = ''.join([(base if base else rest), sep, tag])
if optimization is None:
if sys.flags.optimize == 0:
optimization = ''
else:
optimization = sys.flags.optimize
optimization = str(optimization)
if optimization != '':
if not optimization.isalnum():
raise ValueError('{!r} is not alphanumeric'.format(optimization))
almost_filename = '{}.{}{}'.format(almost_filename, _OPT, optimization)
return _path_join(head, _PYCACHE, almost_filename + BYTECODE_SUFFIXES[0])
def source_from_cache(path):
"""Given the path to a .pyc. file, return the path to its .py file.
The .pyc file does not need to exist; this simply returns the path to
the .py file calculated to correspond to the .pyc file. If path does
not conform to PEP 3147/488 format, ValueError will be raised. If
sys.implementation.cache_tag is None then NotImplementedError is raised.
"""
if sys.implementation.cache_tag is None:
raise NotImplementedError('sys.implementation.cache_tag is None')
head, pycache_filename = _path_split(path)
head, pycache = _path_split(head)
if pycache != _PYCACHE:
raise ValueError('{} not bottom-level directory in '
'{!r}'.format(_PYCACHE, path))
dot_count = pycache_filename.count('.')
if dot_count not in {2, 3}:
raise ValueError('expected only 2 or 3 dots in '
'{!r}'.format(pycache_filename))
elif dot_count == 3:
optimization = pycache_filename.rsplit('.', 2)[-2]
if not optimization.startswith(_OPT):
raise ValueError("optimization portion of filename does not start "
"with {!r}".format(_OPT))
opt_level = optimization[len(_OPT):]
if not opt_level.isalnum():
raise ValueError("optimization level {!r} is not an alphanumeric "
"value".format(optimization))
base_filename = pycache_filename.partition('.')[0]
return _path_join(head, base_filename + SOURCE_SUFFIXES[0])
def _get_sourcefile(bytecode_path):
"""Convert a bytecode file path to a source path (if possible).
This function exists purely for backwards-compatibility for
PyImport_ExecCodeModuleWithFilenames() in the C API.
"""
if len(bytecode_path) == 0:
return None
rest, _, extension = bytecode_path.rpartition('.')
if not rest or extension.lower()[-3:-1] != 'py':
return bytecode_path
try:
source_path = source_from_cache(bytecode_path)
except (NotImplementedError, ValueError):
source_path = bytecode_path[:-1]
return source_path if _path_isfile(source_path) else bytecode_path
def _get_cached(filename):
if filename.endswith(tuple(SOURCE_SUFFIXES)):
try:
return cache_from_source(filename)
except NotImplementedError:
pass
elif filename.endswith(tuple(BYTECODE_SUFFIXES)):
return filename
else:
return None
def _calc_mode(path):
"""Calculate the mode permissions for a bytecode file."""
try:
mode = _path_stat(path).st_mode
except OSError:
mode = 0o666
# We always ensure write access so we can update cached files
# later even when the source files are read-only on Windows (#6074)
mode |= 0o200
return mode
def _verbose_message(message, *args, verbosity=1):
"""Print the message to stderr if -v/PYTHONVERBOSE is turned on."""
if sys.flags.verbose >= verbosity:
if not message.startswith(('#', 'import ')):
message = '# ' + message
print(message.format(*args), file=sys.stderr)
def _check_name(method):
"""Decorator to verify that the module being requested matches the one the
loader can handle.
The first argument (self) must define _name which the second argument is
compared against. If the comparison fails then ImportError is raised.
"""
def _check_name_wrapper(self, name=None, *args, **kwargs):
if name is None:
name = self.name
elif self.name != name:
raise ImportError('loader for %s cannot handle %s' %
(self.name, name), name=name)
return method(self, name, *args, **kwargs)
try:
_wrap = _bootstrap._wrap
except NameError:
# XXX yuck
def _wrap(new, old):
for replace in ['__module__', '__name__', '__qualname__', '__doc__']:
if hasattr(old, replace):
setattr(new, replace, getattr(old, replace))
new.__dict__.update(old.__dict__)
_wrap(_check_name_wrapper, method)
return _check_name_wrapper
def _find_module_shim(self, fullname):
"""Try to find a loader for the specified module by delegating to
self.find_loader().
This method is deprecated in favor of finder.find_spec().
"""
# Call find_loader(). If it returns a string (indicating this
# is a namespace package portion), generate a warning and
# return None.
loader, portions = self.find_loader(fullname)
if loader is None and len(portions):
msg = 'Not importing directory {}: missing __init__'
_warnings.warn(msg.format(portions[0]), ImportWarning)
return loader
def _validate_bytecode_header(data, source_stats=None, name=None, path=None):
"""Validate the header of the passed-in bytecode against source_stats (if
given) and returning the bytecode that can be compiled by compile().
All other arguments are used to enhance error reporting.
ImportError is raised when the magic number is incorrect or the bytecode is
found to be stale. EOFError is raised when the data is found to be
truncated.
"""
exc_details = {}
if name is not None:
exc_details['name'] = name
else:
# To prevent having to make all messages have a conditional name.
name = '<bytecode>'
if path is not None:
exc_details['path'] = path
magic = data[:4]
raw_timestamp = data[4:8]
raw_size = data[8:12]
if magic != MAGIC_NUMBER:
message = 'bad magic number in {!r}: {!r}'.format(name, magic)
_verbose_message(message)
raise ImportError(message, **exc_details)
elif len(raw_timestamp) != 4:
message = 'reached EOF while reading timestamp in {!r}'.format(name)
_verbose_message(message)
raise EOFError(message)
elif len(raw_size) != 4:
message = 'reached EOF while reading size of source in {!r}'.format(name)
_verbose_message(message)
raise EOFError(message)
if source_stats is not None:
try:
source_mtime = int(source_stats['mtime'])
except KeyError:
pass
else:
if _r_long(raw_timestamp) != source_mtime:
message = 'bytecode is stale for {!r}'.format(name)
_verbose_message(message)
raise ImportError(message, **exc_details)
try:
source_size = source_stats['size'] & 0xFFFFFFFF
except KeyError:
pass
else:
if _r_long(raw_size) != source_size:
raise ImportError('bytecode is stale for {!r}'.format(name),
**exc_details)
return data[12:]
def _compile_bytecode(data, name=None, bytecode_path=None, source_path=None):
"""Compile bytecode as returned by _validate_bytecode_header()."""
code = marshal.loads(data)
if isinstance(code, _code_type):
_verbose_message('code object from {!r}', bytecode_path)
if source_path is not None:
_imp._fix_co_filename(code, source_path)
return code
else:
raise ImportError('Non-code object in {!r}'.format(bytecode_path),
name=name, path=bytecode_path)
def _code_to_bytecode(code, mtime=0, source_size=0):
"""Compile a code object into bytecode for writing out to a byte-compiled
file."""
data = bytearray(MAGIC_NUMBER)
data.extend(_w_long(mtime))
data.extend(_w_long(source_size))
data.extend(marshal.dumps(code))
return data
def decode_source(source_bytes):
"""Decode bytes representing source code and return the string.
Universal newline support is used in the decoding.
"""
import tokenize # To avoid bootstrap issues.
source_bytes_readline = _io.BytesIO(source_bytes).readline
encoding = tokenize.detect_encoding(source_bytes_readline)
newline_decoder = _io.IncrementalNewlineDecoder(None, True)
return newline_decoder.decode(source_bytes.decode(encoding[0]))
# Module specifications #######################################################
_POPULATE = object()
def spec_from_file_location(name, location=None, *, loader=None,
submodule_search_locations=_POPULATE):
"""Return a module spec based on a file location.
To indicate that the module is a package, set
submodule_search_locations to a list of directory paths. An
empty list is sufficient, though its not otherwise useful to the
import system.
The loader must take a spec as its only __init__() arg.
"""
if location is None:
# The caller may simply want a partially populated location-
# oriented spec. So we set the location to a bogus value and
# fill in as much as we can.
location = '<unknown>'
if hasattr(loader, 'get_filename'):
# ExecutionLoader
try:
location = loader.get_filename(name)
except ImportError:
pass
# If the location is on the filesystem, but doesn't actually exist,
# we could return None here, indicating that the location is not
# valid. However, we don't have a good way of testing since an
# indirect location (e.g. a zip file or URL) will look like a
# non-existent file relative to the filesystem.
spec = _bootstrap.ModuleSpec(name, loader, origin=location)
spec._set_fileattr = True
# Pick a loader if one wasn't provided.
if loader is None:
for loader_class, suffixes in _get_supported_file_loaders():
if location.endswith(tuple(suffixes)):
loader = loader_class(name, location)
spec.loader = loader
break
else:
return None
# Set submodule_search_paths appropriately.
if submodule_search_locations is _POPULATE:
# Check the loader.
if hasattr(loader, 'is_package'):
try:
is_package = loader.is_package(name)
except ImportError:
pass
else:
if is_package:
spec.submodule_search_locations = []
else:
spec.submodule_search_locations = submodule_search_locations
if spec.submodule_search_locations == []:
if location:
dirname = _path_split(location)[0]
spec.submodule_search_locations.append(dirname)
return spec
# Loaders #####################################################################
class WindowsRegistryFinder:
"""Meta path finder for modules declared in the Windows registry."""
REGISTRY_KEY = (
'Software\\Python\\PythonCore\\{sys_version}'
'\\Modules\\{fullname}')
REGISTRY_KEY_DEBUG = (
'Software\\Python\\PythonCore\\{sys_version}'
'\\Modules\\{fullname}\\Debug')
DEBUG_BUILD = False # Changed in _setup()
@classmethod
def _open_registry(cls, key):
try:
return _winreg.OpenKey(_winreg.HKEY_CURRENT_USER, key)
except OSError:
return _winreg.OpenKey(_winreg.HKEY_LOCAL_MACHINE, key)
@classmethod
def _search_registry(cls, fullname):
if cls.DEBUG_BUILD:
registry_key = cls.REGISTRY_KEY_DEBUG
else:
registry_key = cls.REGISTRY_KEY
key = registry_key.format(fullname=fullname,
sys_version=sys.version[:3])
try:
with cls._open_registry(key) as hkey:
filepath = _winreg.QueryValue(hkey, '')
except OSError:
return None
return filepath
@classmethod
def find_spec(cls, fullname, path=None, target=None):
filepath = cls._search_registry(fullname)
if filepath is None:
return None
try:
_path_stat(filepath)
except OSError:
return None
for loader, suffixes in _get_supported_file_loaders():
if filepath.endswith(tuple(suffixes)):
spec = _bootstrap.spec_from_loader(fullname,
loader(fullname, filepath),
origin=filepath)
return spec
@classmethod
def find_module(cls, fullname, path=None):
"""Find module named in the registry.
This method is deprecated. Use exec_module() instead.
"""
spec = cls.find_spec(fullname, path)
if spec is not None:
return spec.loader
else:
return None
class _LoaderBasics:
"""Base class of common code needed by both SourceLoader and
SourcelessFileLoader."""
def is_package(self, fullname):
"""Concrete implementation of InspectLoader.is_package by checking if
the path returned by get_filename has a filename of '__init__.py'."""
filename = _path_split(self.get_filename(fullname))[1]
filename_base = filename.rsplit('.', 1)[0]
tail_name = fullname.rpartition('.')[2]
return filename_base == '__init__' and tail_name != '__init__'
def create_module(self, spec):
"""Use default semantics for module creation."""
def exec_module(self, module):
"""Execute the module."""
code = self.get_code(module.__name__)
if code is None:
raise ImportError('cannot load module {!r} when get_code() '
'returns None'.format(module.__name__))
_bootstrap._call_with_frames_removed(exec, code, module.__dict__)
def load_module(self, fullname):
return _bootstrap._load_module_shim(self, fullname)
class SourceLoader(_LoaderBasics):
def path_mtime(self, path):
"""Optional method that returns the modification time (an int) for the
specified path, where path is a str.
Raises IOError when the path cannot be handled.
"""
raise IOError
def path_stats(self, path):
"""Optional method returning a metadata dict for the specified path
to by the path (str).
Possible keys:
- 'mtime' (mandatory) is the numeric timestamp of last source
code modification;
- 'size' (optional) is the size in bytes of the source code.
Implementing this method allows the loader to read bytecode files.
Raises IOError when the path cannot be handled.
"""
return {'mtime': self.path_mtime(path)}
def _cache_bytecode(self, source_path, cache_path, data):
"""Optional method which writes data (bytes) to a file path (a str).
Implementing this method allows for the writing of bytecode files.
The source path is needed in order to correctly transfer permissions
"""
# For backwards compatibility, we delegate to set_data()
return self.set_data(cache_path, data)
def set_data(self, path, data):
"""Optional method which writes data (bytes) to a file path (a str).
Implementing this method allows for the writing of bytecode files.
"""
def get_source(self, fullname):
"""Concrete implementation of InspectLoader.get_source."""
path = self.get_filename(fullname)
try:
source_bytes = self.get_data(path)
except OSError as exc:
raise ImportError('source not available through get_data()',
name=fullname) from exc
return decode_source(source_bytes)
def source_to_code(self, data, path, *, _optimize=-1):
"""Return the code object compiled from source.
The 'data' argument can be any object type that compile() supports.
"""
return _bootstrap._call_with_frames_removed(compile, data, path, 'exec',
dont_inherit=True, optimize=_optimize)
def get_code(self, fullname):
"""Concrete implementation of InspectLoader.get_code.
Reading of bytecode requires path_stats to be implemented. To write
bytecode, set_data must also be implemented.
"""
source_path = self.get_filename(fullname)
source_mtime = None
try:
bytecode_path = cache_from_source(source_path)
except NotImplementedError:
bytecode_path = None
else:
try:
st = self.path_stats(source_path)
except IOError:
pass
else:
source_mtime = int(st['mtime'])
try:
data = self.get_data(bytecode_path)
except OSError:
pass
else:
try:
bytes_data = _validate_bytecode_header(data,
source_stats=st, name=fullname,
path=bytecode_path)
except (ImportError, EOFError):
pass
else:
_verbose_message('{} matches {}', bytecode_path,
source_path)
return _compile_bytecode(bytes_data, name=fullname,
bytecode_path=bytecode_path,
source_path=source_path)
source_bytes = self.get_data(source_path)
code_object = self.source_to_code(source_bytes, source_path)
_verbose_message('code object from {}', source_path)
if (not sys.dont_write_bytecode and bytecode_path is not None and
source_mtime is not None):
data = _code_to_bytecode(code_object, source_mtime,
len(source_bytes))
try:
self._cache_bytecode(source_path, bytecode_path, data)
_verbose_message('wrote {!r}', bytecode_path)
except NotImplementedError:
pass
return code_object
class FileLoader:
"""Base file loader class which implements the loader protocol methods that
require file system usage."""
def __init__(self, fullname, path):
"""Cache the module name and the path to the file found by the
finder."""
self.name = fullname
self.path = path
def __eq__(self, other):
return (self.__class__ == other.__class__ and
self.__dict__ == other.__dict__)
def __hash__(self):
return hash(self.name) ^ hash(self.path)
@_check_name
def load_module(self, fullname):
"""Load a module from a file.
This method is deprecated. Use exec_module() instead.
"""
# The only reason for this method is for the name check.
# Issue #14857: Avoid the zero-argument form of super so the implementation
# of that form can be updated without breaking the frozen module
return super(FileLoader, self).load_module(fullname)
@_check_name
def get_filename(self, fullname):
"""Return the path to the source file as found by the finder."""
return self.path
def get_data(self, path):
"""Return the data from path as raw bytes."""
with _io.FileIO(path, 'r') as file:
return file.read()
class SourceFileLoader(FileLoader, SourceLoader):
"""Concrete implementation of SourceLoader using the file system."""
def path_stats(self, path):
"""Return the metadata for the path."""
st = _path_stat(path)
return {'mtime': st.st_mtime, 'size': st.st_size}
def _cache_bytecode(self, source_path, bytecode_path, data):
# Adapt between the two APIs
mode = _calc_mode(source_path)
return self.set_data(bytecode_path, data, _mode=mode)
def set_data(self, path, data, *, _mode=0o666):
"""Write bytes data to a file."""
parent, filename = _path_split(path)
path_parts = []
# Figure out what directories are missing.
while parent and not _path_isdir(parent):
parent, part = _path_split(parent)
path_parts.append(part)
# Create needed directories.
for part in reversed(path_parts):
parent = _path_join(parent, part)
try:
_os.mkdir(parent)
except FileExistsError:
# Probably another Python process already created the dir.
continue
except OSError as exc:
# Could be a permission error, read-only filesystem: just forget
# about writing the data.
_verbose_message('could not create {!r}: {!r}', parent, exc)
return
try:
_write_atomic(path, data, _mode)
_verbose_message('created {!r}', path)
except OSError as exc:
# Same as above: just don't write the bytecode.
_verbose_message('could not create {!r}: {!r}', path, exc)
class SourcelessFileLoader(FileLoader, _LoaderBasics):
"""Loader which handles sourceless file imports."""
def get_code(self, fullname):
path = self.get_filename(fullname)
data = self.get_data(path)
bytes_data = _validate_bytecode_header(data, name=fullname, path=path)
return _compile_bytecode(bytes_data, name=fullname, bytecode_path=path)
def get_source(self, fullname):
"""Return None as there is no source code."""
return None
# Filled in by _setup().
EXTENSION_SUFFIXES = []
class ExtensionFileLoader(FileLoader, _LoaderBasics):
"""Loader for extension modules.
The constructor is designed to work with FileFinder.
"""
def __init__(self, name, path):
self.name = name
self.path = path
def __eq__(self, other):
return (self.__class__ == other.__class__ and
self.__dict__ == other.__dict__)
def __hash__(self):
return hash(self.name) ^ hash(self.path)
def create_module(self, spec):
"""Create an unitialized extension module"""
module = _bootstrap._call_with_frames_removed(
_imp.create_dynamic, spec)
_verbose_message('extension module {!r} loaded from {!r}',
spec.name, self.path)
return module
def exec_module(self, module):
"""Initialize an extension module"""
_bootstrap._call_with_frames_removed(_imp.exec_dynamic, module)
_verbose_message('extension module {!r} executed from {!r}',
self.name, self.path)
def is_package(self, fullname):
"""Return True if the extension module is a package."""
file_name = _path_split(self.path)[1]
return any(file_name == '__init__' + suffix
for suffix in EXTENSION_SUFFIXES)
def get_code(self, fullname):
"""Return None as an extension module cannot create a code object."""
return None
def get_source(self, fullname):
"""Return None as extension modules have no source code."""
return None
@_check_name
def get_filename(self, fullname):
"""Return the path to the source file as found by the finder."""
return self.path
class _NamespacePath:
"""Represents a namespace package's path. It uses the module name
to find its parent module, and from there it looks up the parent's
__path__. When this changes, the module's own path is recomputed,
using path_finder. For top-level modules, the parent module's path
is sys.path."""
def __init__(self, name, path, path_finder):
self._name = name
self._path = path
self._last_parent_path = tuple(self._get_parent_path())
self._path_finder = path_finder
def _find_parent_path_names(self):
"""Returns a tuple of (parent-module-name, parent-path-attr-name)"""
parent, dot, me = self._name.rpartition('.')
if dot == '':
# This is a top-level module. sys.path contains the parent path.
return 'sys', 'path'
# Not a top-level module. parent-module.__path__ contains the
# parent path.
return parent, '__path__'
def _get_parent_path(self):
parent_module_name, path_attr_name = self._find_parent_path_names()
return getattr(sys.modules[parent_module_name], path_attr_name)
def _recalculate(self):
# If the parent's path has changed, recalculate _path
parent_path = tuple(self._get_parent_path()) # Make a copy
if parent_path != self._last_parent_path:
spec = self._path_finder(self._name, parent_path)
# Note that no changes are made if a loader is returned, but we
# do remember the new parent path
if spec is not None and spec.loader is None:
if spec.submodule_search_locations:
self._path = spec.submodule_search_locations
self._last_parent_path = parent_path # Save the copy
return self._path
def __iter__(self):
return iter(self._recalculate())
def __len__(self):
return len(self._recalculate())
def __repr__(self):
return '_NamespacePath({!r})'.format(self._path)
def __contains__(self, item):
return item in self._recalculate()
def append(self, item):
self._path.append(item)
# We use this exclusively in module_from_spec() for backward-compatibility.
class _NamespaceLoader:
def __init__(self, name, path, path_finder):
self._path = _NamespacePath(name, path, path_finder)
@classmethod
def module_repr(cls, module):
"""Return repr for the module.
The method is deprecated. The import machinery does the job itself.
"""
return '<module {!r} (namespace)>'.format(module.__name__)
def is_package(self, fullname):
return True
def get_source(self, fullname):
return ''
def get_code(self, fullname):
return compile('', '<string>', 'exec', dont_inherit=True)
def create_module(self, spec):
"""Use default semantics for module creation."""
def exec_module(self, module):
pass
def load_module(self, fullname):
"""Load a namespace module.
This method is deprecated. Use exec_module() instead.
"""
# The import system never calls this method.
_verbose_message('namespace module loaded with path {!r}', self._path)
return _bootstrap._load_module_shim(self, fullname)
# Finders #####################################################################
class PathFinder:
"""Meta path finder for sys.path and package __path__ attributes."""
@classmethod
def invalidate_caches(cls):
"""Call the invalidate_caches() method on all path entry finders
stored in sys.path_importer_caches (where implemented)."""
for finder in sys.path_importer_cache.values():
if hasattr(finder, 'invalidate_caches'):
finder.invalidate_caches()
@classmethod
def _path_hooks(cls, path):
"""Search sequence of hooks for a finder for 'path'.
If 'hooks' is false then use sys.path_hooks.
"""
if sys.path_hooks is not None and not sys.path_hooks:
_warnings.warn('sys.path_hooks is empty', ImportWarning)
for hook in sys.path_hooks:
try:
return hook(path)
except ImportError:
continue
else:
return None
@classmethod
def _path_importer_cache(cls, path):
"""Get the finder for the path entry from sys.path_importer_cache.
If the path entry is not in the cache, find the appropriate finder
and cache it. If no finder is available, store None.
"""
if path == '':
try:
path = _os.getcwd()
except FileNotFoundError:
# Don't cache the failure as the cwd can easily change to
# a valid directory later on.
return None
try:
finder = sys.path_importer_cache[path]
except KeyError:
finder = cls._path_hooks(path)
sys.path_importer_cache[path] = finder
return finder
@classmethod
def _legacy_get_spec(cls, fullname, finder):
# This would be a good place for a DeprecationWarning if
# we ended up going that route.
if hasattr(finder, 'find_loader'):
loader, portions = finder.find_loader(fullname)
else:
loader = finder.find_module(fullname)
portions = []
if loader is not None:
return _bootstrap.spec_from_loader(fullname, loader)
spec = _bootstrap.ModuleSpec(fullname, None)
spec.submodule_search_locations = portions
return spec
@classmethod
def _get_spec(cls, fullname, path, target=None):
"""Find the loader or namespace_path for this module/package name."""
# If this ends up being a namespace package, namespace_path is
# the list of paths that will become its __path__
namespace_path = []
for entry in path:
if not isinstance(entry, (str, bytes)):
continue
finder = cls._path_importer_cache(entry)
if finder is not None:
if hasattr(finder, 'find_spec'):
spec = finder.find_spec(fullname, target)
else:
spec = cls._legacy_get_spec(fullname, finder)
if spec is None:
continue
if spec.loader is not None:
return spec
portions = spec.submodule_search_locations
if portions is None:
raise ImportError('spec missing loader')
# This is possibly part of a namespace package.
# Remember these path entries (if any) for when we
# create a namespace package, and continue iterating
# on path.
namespace_path.extend(portions)
else:
spec = _bootstrap.ModuleSpec(fullname, None)
spec.submodule_search_locations = namespace_path
return spec
@classmethod
def find_spec(cls, fullname, path=None, target=None):
"""find the module on sys.path or 'path' based on sys.path_hooks and
sys.path_importer_cache."""
if path is None:
path = sys.path
spec = cls._get_spec(fullname, path, target)
if spec is None:
return None
elif spec.loader is None:
namespace_path = spec.submodule_search_locations
if namespace_path:
# We found at least one namespace path. Return a
# spec which can create the namespace package.
spec.origin = 'namespace'
spec.submodule_search_locations = _NamespacePath(fullname, namespace_path, cls._get_spec)
return spec
else:
return None
else:
return spec
@classmethod
def find_module(cls, fullname, path=None):
"""find the module on sys.path or 'path' based on sys.path_hooks and
sys.path_importer_cache.
This method is deprecated. Use find_spec() instead.
"""
spec = cls.find_spec(fullname, path)
if spec is None:
return None
return spec.loader
class FileFinder:
"""File-based finder.
Interactions with the file system are cached for performance, being
refreshed when the directory the finder is handling has been modified.
"""
def __init__(self, path, *loader_details):
"""Initialize with the path to search on and a variable number of
2-tuples containing the loader and the file suffixes the loader
recognizes."""
loaders = []
for loader, suffixes in loader_details:
loaders.extend((suffix, loader) for suffix in suffixes)
self._loaders = loaders
# Base (directory) path
self.path = path or '.'
self._path_mtime = -1
self._path_cache = set()
self._relaxed_path_cache = set()
def invalidate_caches(self):
"""Invalidate the directory mtime."""
self._path_mtime = -1
find_module = _find_module_shim
def find_loader(self, fullname):
"""Try to find a loader for the specified module, or the namespace
package portions. Returns (loader, list-of-portions).
This method is deprecated. Use find_spec() instead.
"""
spec = self.find_spec(fullname)
if spec is None:
return None, []
return spec.loader, spec.submodule_search_locations or []
def _get_spec(self, loader_class, fullname, path, smsl, target):
loader = loader_class(fullname, path)
return spec_from_file_location(fullname, path, loader=loader,
submodule_search_locations=smsl)
def find_spec(self, fullname, target=None):
"""Try to find a loader for the specified module, or the namespace
package portions. Returns (loader, list-of-portions)."""
is_namespace = False
tail_module = fullname.rpartition('.')[2]
try:
mtime = _path_stat(self.path or _os.getcwd()).st_mtime
except OSError:
mtime = -1
if mtime != self._path_mtime:
self._fill_cache()
self._path_mtime = mtime
# tail_module keeps the original casing, for __file__ and friends
if _relax_case():
cache = self._relaxed_path_cache
cache_module = tail_module.lower()
else:
cache = self._path_cache
cache_module = tail_module
# Check if the module is the name of a directory (and thus a package).
if cache_module in cache:
base_path = _path_join(self.path, tail_module)
for suffix, loader_class in self._loaders:
init_filename = '__init__' + suffix
full_path = _path_join(base_path, init_filename)
if _path_isfile(full_path):
return self._get_spec(loader_class, fullname, full_path, [base_path], target)
else:
# If a namespace package, return the path if we don't
# find a module in the next section.
is_namespace = _path_isdir(base_path)
# Check for a file w/ a proper suffix exists.
for suffix, loader_class in self._loaders:
full_path = _path_join(self.path, tail_module + suffix)
_verbose_message('trying {}'.format(full_path), verbosity=2)
if cache_module + suffix in cache:
if _path_isfile(full_path):
return self._get_spec(loader_class, fullname, full_path, None, target)
if is_namespace:
_verbose_message('possible namespace for {}'.format(base_path))
spec = _bootstrap.ModuleSpec(fullname, None)
spec.submodule_search_locations = [base_path]
return spec
return None
def _fill_cache(self):
"""Fill the cache of potential modules and packages for this directory."""
path = self.path
try:
contents = _os.listdir(path or _os.getcwd())
except (FileNotFoundError, PermissionError, NotADirectoryError):
# Directory has either been removed, turned into a file, or made
# unreadable.
contents = []
# We store two cached versions, to handle runtime changes of the
# PYTHONCASEOK environment variable.
if not sys.platform.startswith('win'):
self._path_cache = set(contents)
else:
# Windows users can import modules with case-insensitive file
# suffixes (for legacy reasons). Make the suffix lowercase here
# so it's done once instead of for every import. This is safe as
# the specified suffixes to check against are always specified in a
# case-sensitive manner.
lower_suffix_contents = set()
for item in contents:
name, dot, suffix = item.partition('.')
if dot:
new_name = '{}.{}'.format(name, suffix.lower())
else:
new_name = name
lower_suffix_contents.add(new_name)
self._path_cache = lower_suffix_contents
if sys.platform.startswith(_CASE_INSENSITIVE_PLATFORMS):
self._relaxed_path_cache = {fn.lower() for fn in contents}
@classmethod
def path_hook(cls, *loader_details):
"""A class method which returns a closure to use on sys.path_hook
which will return an instance using the specified loaders and the path
called on the closure.
If the path called on the closure is not a directory, ImportError is
raised.
"""
def path_hook_for_FileFinder(path):
"""Path hook for importlib.machinery.FileFinder."""
if not _path_isdir(path):
raise ImportError('only directories are supported', path=path)
return cls(path, *loader_details)
return path_hook_for_FileFinder
def __repr__(self):
return 'FileFinder({!r})'.format(self.path)
# Import setup ###############################################################
def _fix_up_module(ns, name, pathname, cpathname=None):
# This function is used by PyImport_ExecCodeModuleObject().
loader = ns.get('__loader__')
spec = ns.get('__spec__')
if not loader:
if spec:
loader = spec.loader
elif pathname == cpathname:
loader = SourcelessFileLoader(name, pathname)
else:
loader = SourceFileLoader(name, pathname)
if not spec:
spec = spec_from_file_location(name, pathname, loader=loader)
try:
ns['__spec__'] = spec
ns['__loader__'] = loader
ns['__file__'] = pathname
ns['__cached__'] = cpathname
except Exception:
# Not important enough to report.
pass
def _get_supported_file_loaders():
"""Returns a list of file-based module loaders.
Each item is a tuple (loader, suffixes).
"""
extensions = ExtensionFileLoader, _imp.extension_suffixes()
source = SourceFileLoader, SOURCE_SUFFIXES
bytecode = SourcelessFileLoader, BYTECODE_SUFFIXES
return [extensions, source, bytecode]
def _setup(_bootstrap_module):
"""Setup the path-based importers for importlib by importing needed
built-in modules and injecting them into the global namespace.
Other components are extracted from the core bootstrap module.
"""
global sys, _imp, _bootstrap
_bootstrap = _bootstrap_module
sys = _bootstrap.sys
_imp = _bootstrap._imp
# Directly load built-in modules needed during bootstrap.
self_module = sys.modules[__name__]
for builtin_name in ('_io', '_warnings', 'builtins', 'marshal'):
if builtin_name not in sys.modules:
builtin_module = _bootstrap._builtin_from_name(builtin_name)
else:
builtin_module = sys.modules[builtin_name]
setattr(self_module, builtin_name, builtin_module)
# Directly load the os module (needed during bootstrap).
os_details = ('posix', ['/']), ('nt', ['\\', '/'])
for builtin_os, path_separators in os_details:
# Assumption made in _path_join()
assert all(len(sep) == 1 for sep in path_separators)
path_sep = path_separators[0]
if builtin_os in sys.modules:
os_module = sys.modules[builtin_os]
break
else:
try:
os_module = _bootstrap._builtin_from_name(builtin_os)
break
except ImportError:
continue
else:
raise ImportError('importlib requires posix or nt')
setattr(self_module, '_os', os_module)
setattr(self_module, 'path_sep', path_sep)
setattr(self_module, 'path_separators', ''.join(path_separators))
# Directly load the _thread module (needed during bootstrap).
try:
thread_module = _bootstrap._builtin_from_name('_thread')
except ImportError:
# Python was built without threads
thread_module = None
setattr(self_module, '_thread', thread_module)
# Directly load the _weakref module (needed during bootstrap).
weakref_module = _bootstrap._builtin_from_name('_weakref')
setattr(self_module, '_weakref', weakref_module)
# Directly load the winreg module (needed during bootstrap).
if builtin_os == 'nt':
winreg_module = _bootstrap._builtin_from_name('winreg')
setattr(self_module, '_winreg', winreg_module)
# Constants
setattr(self_module, '_relax_case', _make_relax_case())
EXTENSION_SUFFIXES.extend(_imp.extension_suffixes())
if builtin_os == 'nt':
SOURCE_SUFFIXES.append('.pyw')
if '_d.pyd' in EXTENSION_SUFFIXES:
WindowsRegistryFinder.DEBUG_BUILD = True
def _install(_bootstrap_module):
"""Install the path-based import components."""
_setup(_bootstrap_module)
supported_loaders = _get_supported_file_loaders()
sys.path_hooks.extend([FileFinder.path_hook(*supported_loaders)])
if _os.__name__ == 'nt':
sys.meta_path.append(WindowsRegistryFinder)
sys.meta_path.append(PathFinder)
# XXX We expose a couple of classes in _bootstrap for the sake of
# a setuptools bug (https://bitbucket.org/pypa/setuptools/issue/378).
_bootstrap_module.FileFinder = FileFinder
_bootstrap_module.SourceFileLoader = SourceFileLoader
| mit | -5,917,779,256,120,822,000 | 36.456522 | 105 | 0.603617 | false |
jorenver/LP_Proyecto_Python | Menu.py | 1 | 2663 | from Escenario import *
from Observer import *
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from threading import *
from time import sleep
from PyQt4 import QtCore
import sys
import math
from neuroListener import *
from nivel1 import *
from pantallaPerdio import *
class Menu(Escenario,NeuroListener):
def __init__(self,*args):
Escenario.__init__(self,*args)
NeuroListener.__init__(self)
self.setWindowTitle("Ball Neumann")
self.Jugador=Jugador(0,0,0,5)
Bt_nuevoJ=QPushButton("Nuevo Juego",self)
Bt_nuevoJ.setGeometry(360,320,200,65)
Bt_AcercaDe=QPushButton("Acerca De",self)
Bt_AcercaDe.setGeometry(360,450,200,65)
Bt_Instrucciones=QPushButton("Instrucciones",self)
Bt_Instrucciones.setGeometry(360,580,200,65)
self.connect(Bt_nuevoJ, SIGNAL("clicked()"), self.nuevoJuego)
self.connect(Bt_AcercaDe, SIGNAL("clicked()"), self.acercaDe)
self.connect(Bt_Instrucciones, SIGNAL("clicked()"), self.instrucciones)
self.EscenarioActual=None
self.Duelo=Duelo(self)
self.state=0 #si esta en un escenario es 0 , si esta en un duelo es 1
self.inst=None
#self.setDaemon(True)
def nuevoJuego(self):
print "Nuevo Juego"
self.start()
self.EscenarioActual=EscenarioUno(self.Jugador,self)
self.EscenarioActual.show()
self.close()
def acercaDe(self):
print "Acerca De"
def instrucciones(self):
print "instrucciones"
self.inst=pantallaPerdio()
self.inst.show()
def update(self, Escenario):
self.state=0
if Escenario!=None:
self.EscenarioActual=Escenario
self.EscenarioActual.mover=True
else:
self.stop=True
self.Pperdio=pantallaPerdio()
sleep(0.1)
self.Pperdio.show()
def perder(self):
self.state=0
self.stop=True
self.close()
def update2(self):
self.state=1
self.Duelo.setJugador(self.Jugador)
self.EscenarioActual.mover=False
self.Duelo.mover=False
self.Duelo.comenzar(self.Jugador)
self.Duelo.mover=True
def paintEvent(self, event):
paint = QPainter()
paint.begin(self)
imagen=QImage("fondo","png")
center=QPoint(0,0)
paint.drawImage(center,imagen) # inserto el fondo
paint.end()
def derecha(self):
if(self.EscenarioActual!=None and self.state==0):
self.EscenarioActual.derecha()
elif(self.state==1):
self.Duelo.derecha()
def izquierda(self):
if(self.EscenarioActual!=None and self.state==0):
self.EscenarioActual.izquierda()
elif(self.state==1):
self.Duelo.izquierda()
def accion(self):
if(self.EscenarioActual!=None and self.state==0):
self.EscenarioActual.accion()
elif(self.state==1):
self.Duelo.accion()
app = QApplication(sys.argv)
nivel1 = Menu()
nivel1.show()
app.exec_() | unlicense | 1,473,444,614,645,116,200 | 23.897196 | 73 | 0.72024 | false |
wrouesnel/ansible-modules-extras | cloud/amazon/sqs_queue.py | 4 | 7248 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = """
---
module: sqs_queue
short_description: Creates or deletes AWS SQS queues.
description:
- Create or delete AWS SQS queues.
- Update attributes on existing queues.
version_added: "2.0"
author: Alan Loi (@loia)
requirements:
- "boto >= 2.33.0"
options:
state:
description:
- Create or delete the queue
required: false
choices: ['present', 'absent']
default: 'present'
name:
description:
- Name of the queue.
required: true
default_visibility_timeout:
description:
- The default visibility timeout in seconds.
required: false
default: null
message_retention_period:
description:
- The message retention period in seconds.
required: false
default: null
maximum_message_size:
description:
- The maximum message size in bytes.
required: false
default: null
delivery_delay:
description:
- The delivery delay in seconds.
required: false
default: null
receive_message_wait_time:
description:
- The receive message wait time in seconds.
required: false
default: null
region:
description:
- The AWS region to use. If not specified then the value of the EC2_REGION environment variable, if any, is used.
required: false
aliases: ['aws_region', 'ec2_region']
extends_documentation_fragment: aws
"""
EXAMPLES = '''
# Create SQS queue
- sqs_queue:
name: my-queue
region: ap-southeast-2
default_visibility_timeout: 120
message_retention_period: 86400
maximum_message_size: 1024
delivery_delay: 30
receive_message_wait_time: 20
# Delete SQS queue
- sqs_queue:
name: my-queue
region: ap-southeast-2
state: absent
'''
try:
import boto.sqs
from boto.exception import BotoServerError, NoAuthHandlerFound
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
def create_or_update_sqs_queue(connection, module):
queue_name = module.params.get('name')
queue_attributes = dict(
default_visibility_timeout=module.params.get('default_visibility_timeout'),
message_retention_period=module.params.get('message_retention_period'),
maximum_message_size=module.params.get('maximum_message_size'),
delivery_delay=module.params.get('delivery_delay'),
receive_message_wait_time=module.params.get('receive_message_wait_time'),
)
result = dict(
region=module.params.get('region'),
name=queue_name,
)
result.update(queue_attributes)
try:
queue = connection.get_queue(queue_name)
if queue:
# Update existing
result['changed'] = update_sqs_queue(queue, check_mode=module.check_mode, **queue_attributes)
else:
# Create new
if not module.check_mode:
queue = connection.create_queue(queue_name)
update_sqs_queue(queue, **queue_attributes)
result['changed'] = True
except BotoServerError:
result['msg'] = 'Failed to create/update sqs queue due to error: ' + traceback.format_exc()
module.fail_json(**result)
else:
module.exit_json(**result)
def update_sqs_queue(queue,
check_mode=False,
default_visibility_timeout=None,
message_retention_period=None,
maximum_message_size=None,
delivery_delay=None,
receive_message_wait_time=None):
changed = False
changed = set_queue_attribute(queue, 'VisibilityTimeout', default_visibility_timeout,
check_mode=check_mode) or changed
changed = set_queue_attribute(queue, 'MessageRetentionPeriod', message_retention_period,
check_mode=check_mode) or changed
changed = set_queue_attribute(queue, 'MaximumMessageSize', maximum_message_size,
check_mode=check_mode) or changed
changed = set_queue_attribute(queue, 'DelaySeconds', delivery_delay,
check_mode=check_mode) or changed
changed = set_queue_attribute(queue, 'ReceiveMessageWaitTimeSeconds', receive_message_wait_time,
check_mode=check_mode) or changed
return changed
def set_queue_attribute(queue, attribute, value, check_mode=False):
if not value:
return False
existing_value = queue.get_attributes(attributes=attribute)[attribute]
if str(value) != existing_value:
if not check_mode:
queue.set_attribute(attribute, value)
return True
return False
def delete_sqs_queue(connection, module):
queue_name = module.params.get('name')
result = dict(
region=module.params.get('region'),
name=queue_name,
)
try:
queue = connection.get_queue(queue_name)
if queue:
if not module.check_mode:
connection.delete_queue(queue)
result['changed'] = True
else:
result['changed'] = False
except BotoServerError:
result['msg'] = 'Failed to delete sqs queue due to error: ' + traceback.format_exc()
module.fail_json(**result)
else:
module.exit_json(**result)
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
state=dict(default='present', choices=['present', 'absent']),
name=dict(required=True, type='str'),
default_visibility_timeout=dict(type='int'),
message_retention_period=dict(type='int'),
maximum_message_size=dict(type='int'),
delivery_delay=dict(type='int'),
receive_message_wait_time=dict(type='int'),
))
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
region, ec2_url, aws_connect_params = get_aws_connection_info(module)
if not region:
module.fail_json(msg='region must be specified')
try:
connection = connect_to_aws(boto.sqs, region, **aws_connect_params)
except (NoAuthHandlerFound, StandardError), e:
module.fail_json(msg=str(e))
state = module.params.get('state')
if state == 'present':
create_or_update_sqs_queue(connection, module)
elif state == 'absent':
delete_sqs_queue(connection, module)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
main()
| gpl-3.0 | -7,296,166,623,857,389,000 | 29.582278 | 119 | 0.643488 | false |
uwosh/uwosh.bulletin | setup.py | 1 | 1081 | from setuptools import setup, find_packages
import os
version = '0.9.1'
setup(name='uwosh.bulletin',
version=version,
description="Product for creating educational bulletins",
long_description=open("README.txt").read() + "\n" +
open(os.path.join("docs", "HISTORY.txt")).read(),
# Get more strings from http://www.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
"Framework :: Plone",
"Programming Language :: Python",
"Topic :: Software Development :: Libraries :: Python Modules",
],
keywords='',
author='Joshua Klotz',
author_email='[email protected]',
url='http://svn.plone.org/svn/plone/plone.example',
license='GPL',
packages=find_packages(exclude=['ez_setup']),
namespace_packages=['uwosh'],
include_package_data=True,
zip_safe=False,
install_requires=[
'setuptools',
# -*- Extra requirements: -*-
],
entry_points="""
# -*- Entry points: -*-
""",
)
| gpl-2.0 | 8,634,403,981,888,073,000 | 31.757576 | 83 | 0.586494 | false |
adrientetar/defcon | Lib/defcon/objects/imageSet.py | 1 | 18202 | import os
import hashlib
import weakref
from ufoLib import UFOReader, UFOLibError
from defcon.objects.base import BaseObject
from ufoLib.filenames import userNameToFileName
pngSignature = "\x89PNG\r\n\x1a\n"
class ImageSet(BaseObject):
"""
This object manages all images in the font.
**This object posts the following notifications:**
===========================
Name
===========================
ImageSet.Changed
ImageSet.FileNamesChanged
ImageSet.ImageChanged
ImageSet.ImageWillBeAdded
ImageSet.ImageAdded
ImageSet.ImageWillBeDeleted
ImageSet.ImageDeleted
===========================
This object behaves like a dict. For example, to get the
raw image data for a particular image::
image = images["image file name"]
To add an image, do this::
images["image file name"] = rawImageData
When setting an image, the provided file name must be a file
system legal string. This will be checked by comparing the
provided file name to the results of :py:meth:`ImageSet.makeFileName`.
If the two don't match an error will be raised.
Before setting an image, the :py:meth:`ImageSet.findDuplicateImage`
method should be called. If a file name is retruend, the new image
data should not be added. The UFO spec recommends (but doesn't require)
that duplicate images be avoided. This will help with that.
To remove an image from this object, and from the UFO during save,
do this::
del images["image file name"]
"""
changeNotificationName = "ImageSet.Changed"
representationFactories = {}
def __init__(self, font=None):
self._font = None
if font is not None:
self._font = weakref.ref(font)
super(ImageSet, self).__init__()
self.beginSelfNotificationObservation()
self._data = {}
self._scheduledForDeletion = {}
# --------------
# Parent Objects
# --------------
def getParent(self):
return self.font
def _get_font(self):
if self._font is not None:
return self._font()
return None
font = property(_get_font, doc="The :class:`Font` that this object belongs to.")
# ----------
# File Names
# ----------
def _get_fileNames(self):
return list(self._data.keys())
def _set_fileNames(self, fileNames):
assert not self._data
oldValue = list(self._data.keys())
for fileName in fileNames:
self._data[fileName] = _imageDict(onDisk=True)
self.postNotification("ImageSet.FileNamesChanged", data=dict(oldValue=oldValue, newValue=fileNames))
fileNames = property(_get_fileNames, _set_fileNames, doc="A list of all image file names. This should not be set externally.")
def _get_unreferencedFileNames(self):
font = self.font
if font is None:
return []
unreferenced = set(self.fileNames)
for layer in font.layers:
unreferenced -= set(layer.imageReferences.keys())
return list(unreferenced)
unreferencedFileNames = property(_get_unreferencedFileNames, doc="A list of all file names not referenced by a glyph.")
# -------------
# Dict Behavior
# -------------
def __contains__(self, fileName):
return fileName in self._data
def __getitem__(self, fileName):
d = self._data[fileName]
if d["data"] is None:
path = self.font.path
reader = UFOReader(path)
data = reader.readImage(fileName)
d["data"] = data
d["digest"] = _makeDigest(data)
d["onDisk"] = True
d["onDiskModTime"] = reader.getFileModificationTime(os.path.join("images", fileName))
return d["data"]
def __setitem__(self, fileName, data):
if fileName not in self._data:
assert fileName == self.makeFileName(fileName)
assert data.startswith(pngSignature)
isNewImage = fileName not in self._data
onDisk = False
onDiskModTime = None
if fileName in self._scheduledForDeletion:
# preserve exsiting stamping
assert fileName not in self._data
self._data[fileName] = self._scheduledForDeletion.pop(fileName)
digest = _makeDigest(data)
if fileName in self._data:
n = self[fileName] # force it to load so that the stamping is correct
if self._data[fileName]["digest"] == digest:
return
onDisk = self._data[fileName]["onDisk"]
onDiskModTime = self._data[fileName]["onDiskModTime"]
del self._data[fileName] # now remove it
if isNewImage:
self.postNotification("ImageSet.ImageWillBeAdded", data=dict(name=fileName))
self._data[fileName] = _imageDict(data=data, dirty=True, digest=digest, onDisk=onDisk, onDiskModTime=onDiskModTime)
if isNewImage:
self.postNotification("ImageSet.ImageAdded", data=dict(name=fileName))
else:
self.postNotification("ImageSet.ImageChanged", data=dict(name=fileName))
self.dirty = True
def __delitem__(self, fileName):
n = self[fileName] # force it to load so that the stamping is correct
self.postNotification("ImageSet.ImageWillBeDeleted", data=dict(name=fileName))
self._scheduledForDeletion[fileName] = dict(self._data.pop(fileName))
self.postNotification("ImageSet.ImageDeleted", data=dict(name=fileName))
self.dirty = True
# ----
# Save
# ----
def getSaveProgressBarTickCount(self, formatVersion):
"""
Get the number of ticks that will be used by a progress bar
in the save method. This method should not be called externally.
Subclasses may override this method to implement custom saving behavior.
"""
return 0
def save(self, writer, removeUnreferencedImages=False, saveAs=False, progressBar=None):
"""
Save images. This method should not be called externally.
Subclasses may override this method to implement custom saving behavior.
"""
if removeUnreferencedImages:
self.disableNotifications()
for fileName in self.unreferencedFileNames:
del self[fileName]
self.enableNotifications()
if saveAs:
font = self.font
if font is not None and font.path is not None and os.path.exists(font.path):
reader = UFOReader(font.path)
readerImageNames = reader.getImageDirectoryListing()
for fileName, data in list(self._data.items()):
if data["data"] is not None or fileName not in readerImageNames:
continue
writer.copyImageFromReader(reader, fileName, fileName)
for fileName in self._scheduledForDeletion:
try:
writer.removeImage(fileName)
except UFOLibError:
# this will be raised if the file doesn't exist.
# instead of trying to maintain a list of in UFO
# vs. in memory, simply fail and move on when
# something can't be deleted because it isn't
# in the UFO.
pass
self._scheduledForDeletion.clear()
reader = UFOReader(writer.path)
for fileName, data in list(self._data.items()):
if not data["dirty"]:
continue
writer.writeImage(fileName, data["data"])
data["dirty"] = False
data["onDisk"] = True
data["onDiskModTime"] = reader.getFileModificationTime(os.path.join("images", fileName))
self.dirty = False
# ---------------
# File Management
# ---------------
def makeFileName(self, fileName):
"""
Make a file system legal version of **fileName**.
"""
if not isinstance(fileName, str):
fileName = str(fileName)
suffix = ""
if fileName.lower().endswith(".png"):
suffix = fileName[-4:]
fileName = fileName[:-4]
existing = set([i.lower() for i in self.fileNames])
return userNameToFileName(fileName, existing, suffix=suffix)
def findDuplicateImage(self, data):
"""
Search the images to see if an image matching
**image** already exists. If so, the file name
for the existing image will be returned.
"""
digest = _makeDigest(data)
notYetLoaded = []
for fileName, image in list(self._data.items()):
# skip if the image hasn't been loaded
if image["data"] is None:
notYetLoaded.append(fileName)
continue
otherDigest = image["digest"]
if otherDigest == digest:
return fileName
for fileName in notYetLoaded:
d = self[fileName]
image = self._data[fileName]
otherDigest = image["digest"]
if otherDigest == digest:
return fileName
return None
# ---------------------
# External Edit Support
# ---------------------
def testForExternalChanges(self, reader):
"""
Test for external changes. This should not be called externally.
"""
filesOnDisk = reader.getImageDirectoryListing()
modifiedImages = []
addedImages = []
deletedImages = []
for fileName in set(filesOnDisk) - set(self.fileNames):
if not fileName in self._scheduledForDeletion:
addedImages.append(fileName)
elif not self._scheduledForDeletion[fileName]["onDisk"]:
addedImages.append(fileName)
elif self._scheduledForDeletion[fileName]["onDiskModTime"] != reader.getFileModificationTime(os.path.join("images", fileName)):
addedImages.append(fileName)
for fileName, imageData in list(self._data.items()):
# file on disk and has been loaded
if fileName in filesOnDisk and imageData["data"] is not None:
newModTime = reader.getFileModificationTime(os.path.join("images", fileName))
if newModTime != imageData["onDiskModTime"]:
newData = reader.readImage(fileName)
newDigest = _makeDigest(newData)
if newDigest != imageData["digest"]:
modifiedImages.append(fileName)
continue
# file removed
if fileName not in filesOnDisk and imageData["onDisk"]:
deletedImages.append(fileName)
continue
return modifiedImages, addedImages, deletedImages
def reloadImages(self, fileNames):
"""
Reload specified images. This should not be called externally.
"""
for fileName in fileNames:
self._data[fileName] = _imageDict()
image = self[fileName]
# ------------------------
# Notification Observation
# ------------------------
def endSelfNotificationObservation(self):
super(ImageSet, self).endSelfNotificationObservation()
self._font = None
# -----------------------------
# Serialization/Deserialization
# -----------------------------
def getDataForSerialization(self, **kwargs):
simple_get = lambda key: getattr(self, key)
getters = []
for k in self.fileNames:
getters.append((k, simple_get))
return self._serialize(getters, **kwargs)
def setDataFromSerialization(self, data):
self._data = {}
self._scheduledForDeletion = {}
for k in data:
self[k] = data[k]
def _imageDict(data=None, dirty=False, digest=None, onDisk=True, onDiskModTime=None):
return dict(data=data, digest=digest, dirty=dirty, onDisk=onDisk, onDiskModTime=onDiskModTime)
def _makeDigest(data):
m = hashlib.md5()
m.update(data)
return m.digest()
# -----
# Tests
# -----
def _testRead():
"""
>>> from defcon import Font
>>> from defcon.test.testTools import getTestFontPath
>>> path = getTestFontPath()
>>> font = Font(path)
>>> sorted(font.images.fileNames)
['image 1.png', 'image 2.png']
>>> data = font.images["image 1.png"]
>>> p = os.path.join(path, "images", "image 1.png")
>>> f = open(p, "rb")
>>> expected = f.read()
>>> f.close()
>>> data == expected
True
>>> data = font.images["image 2.png"]
>>> p = os.path.join(path, "images", "image 2.png")
>>> f = open(p, "rb")
>>> expected = f.read()
>>> f.close()
>>> data == expected
True
"""
def _testWrite():
"""
>>> from defcon.test.testTools import makeTestFontCopy, tearDownTestFontCopy
>>> from defcon import Font
>>> path = makeTestFontCopy()
>>> font = Font(path)
>>> font.images["image 3.png"] = font.images["image 1.png"]
>>> del font.images["image 1.png"]
>>> font.save()
>>> p = os.path.join(path, "images", "image 1.png")
>>> os.path.exists(p)
False
>>> p = os.path.join(path, "images", "image 2.png")
>>> os.path.exists(p)
True
>>> p = os.path.join(path, "images", "image 3.png")
>>> os.path.exists(p)
True
>>> tearDownTestFontCopy()
"""
def _testSaveAs():
"""
>>> from defcon import Font
>>> from defcon.test.testTools import getTestFontPath, getTestFontCopyPath, tearDownTestFontCopy
>>> path = getTestFontPath()
>>> font = Font(path)
>>> saveAsPath = getTestFontCopyPath(path)
>>> font.save(saveAsPath)
>>> imagesDirectory = os.path.join(saveAsPath, "images")
>>> os.path.exists(imagesDirectory)
True
>>> imagePath = os.path.join(imagesDirectory, "image 1.png")
>>> os.path.exists(imagePath)
True
>>> imagePath = os.path.join(imagesDirectory, "image 2.png")
>>> os.path.exists(imagePath)
True
>>> tearDownTestFontCopy(saveAsPath)
"""
def _testUnreferencedImages():
"""
>>> from defcon import Font
>>> from defcon.test.testTools import getTestFontPath
>>> path = getTestFontPath()
>>> font = Font(path)
>>> font.images.unreferencedFileNames
['image 2.png']
>>> from defcon.test.testTools import makeTestFontCopy, tearDownTestFontCopy
>>> from defcon import Font
>>> path = makeTestFontCopy()
>>> font = Font(path)
>>> font.save(removeUnreferencedImages=True)
>>> p = os.path.join(path, "images", "image 1.png")
>>> os.path.exists(p)
True
>>> p = os.path.join(path, "images", "image 2.png")
>>> os.path.exists(p)
False
>>> tearDownTestFontCopy()
"""
def _testDuplicateImage():
"""
>>> from defcon import Font
>>> from defcon.test.testTools import getTestFontPath
>>> path = getTestFontPath()
>>> font = Font(path)
>>> data = font.images["image 1.png"]
>>> font.images.findDuplicateImage(data)
'image 1.png'
>>> imagePath = os.path.join(path, "images", "image 2.png")
>>> f = open(imagePath, "rb")
>>> data = f.read()
>>> f.close()
>>> font.images.findDuplicateImage(data)
'image 2.png'
"""
def _testExternalChanges():
"""
>>> from ufoLib import UFOReader
>>> from defcon.test.testTools import makeTestFontCopy, tearDownTestFontCopy
>>> from defcon import Font
# remove in memory and scan
>>> path = makeTestFontCopy()
>>> font = Font(path)
>>> del font.images["image 1.png"]
>>> reader = UFOReader(path)
>>> font.images.testForExternalChanges(reader)
([], [], [])
>>> tearDownTestFontCopy()
# add in memory and scan
>>> path = makeTestFontCopy()
>>> font = Font(path)
>>> font.images["image 3.png"] = pngSignature + "blah"
>>> reader = UFOReader(path)
>>> font.images.testForExternalChanges(reader)
([], [], [])
>>> tearDownTestFontCopy()
# modify in memory and scan
>>> path = makeTestFontCopy()
>>> font = Font(path)
>>> font.images["image 1.png"] = pngSignature + "blah"
>>> reader = UFOReader(path)
>>> font.images.testForExternalChanges(reader)
([], [], [])
>>> tearDownTestFontCopy()
# remove on disk and scan
>>> path = makeTestFontCopy()
>>> font = Font(path)
>>> image = font.images["image 1.png"]
>>> os.remove(os.path.join(path, "images", "image 1.png"))
>>> font.images.testForExternalChanges(reader)
([], [], ['image 1.png'])
>>> tearDownTestFontCopy()
# add on disk and scan
>>> import shutil
>>> path = makeTestFontCopy()
>>> font = Font(path)
>>> source = os.path.join(path, "images", "image 1.png")
>>> dest = os.path.join(path, "images", "image 3.png")
>>> shutil.copy(source, dest)
>>> font.images.testForExternalChanges(reader)
([], ['image 3.png'], [])
>>> tearDownTestFontCopy()
# modify on disk and scan
>>> path = makeTestFontCopy()
>>> font = Font(path)
>>> image = font.images["image 1.png"]
>>> imagePath = os.path.join(path, "images", "image 1.png")
>>> f = open(imagePath, "rb")
>>> data = f.read()
>>> f.close()
>>> f = open(imagePath, "wb")
>>> f.write(data + "blah")
>>> f.close()
>>> font.images.testForExternalChanges(reader)
(['image 1.png'], [], [])
>>> tearDownTestFontCopy()
"""
def _testReloadImages():
"""
>>> from ufoLib import UFOReader
>>> from defcon.test.testTools import makeTestFontCopy, tearDownTestFontCopy
>>> from defcon import Font
>>> path = makeTestFontCopy()
>>> font = Font(path)
>>> image = font.images["image 1.png"]
>>> imagePath = os.path.join(path, "images", "image 1.png")
>>> newImageData = pngSignature + "blah"
>>> f = open(imagePath, "wb")
>>> f.write(newImageData)
>>> f.close()
>>> font.images.reloadImages(["image 1.png"])
>>> image = font.images["image 1.png"]
>>> image == newImageData
True
>>> tearDownTestFontCopy()
"""
if __name__ == "__main__":
import doctest
doctest.testmod()
| mit | 5,750,341,940,308,363,000 | 32.895717 | 139 | 0.588452 | false |
WillisXChen/django-oscar | oscar/lib/python2.7/site-packages/behave/compat/importlib.py | 4 | 1609 | # -*- coding: utf-8 -*-
"""
importlib was introduced in python2.7, python3.2...
"""
try:
from importlib import import_module
except ImportError:
"""Backport of importlib.import_module from 3.x."""
# While not critical (and in no way guaranteed!), it would be nice to keep this
# code compatible with Python 2.3.
import sys
def _resolve_name(name, package, level):
"""Return the absolute name of the module to be imported."""
if not hasattr(package, 'rindex'):
raise ValueError("'package' not set to a string")
dot = len(package)
for x in xrange(level, 1, -1):
try:
dot = package.rindex('.', 0, dot)
except ValueError:
raise ValueError("attempted relative import beyond top-level "
"package")
return "%s.%s" % (package[:dot], name)
def import_module(name, package=None):
"""Import a module.
The 'package' argument is required when performing a relative import. It
specifies the package to use as the anchor point from which to resolve the
relative import to an absolute import.
"""
if name.startswith('.'):
if not package:
raise TypeError("relative imports require the 'package' argument")
level = 0
for character in name:
if character != '.':
break
level += 1
name = _resolve_name(name[level:], package, level)
__import__(name)
return sys.modules[name]
| bsd-3-clause | 8,489,660,349,678,390,000 | 33.978261 | 83 | 0.564947 | false |
rsoumyassdi/nuxeo-drive | nuxeo-drive-client/nxdrive/client/remote_filtered_file_system_client.py | 3 | 1642 | '''
Created on 19 mai 2014
@author: Remi Cattiau
'''
from nxdrive.client.remote_file_system_client import RemoteFileSystemClient
from nxdrive.client.common import DEFAULT_REPOSITORY_NAME
from nxdrive.logging_config import get_logger
log = get_logger(__name__)
class RemoteFilteredFileSystemClient(RemoteFileSystemClient):
'''
classdocs
'''
def __init__(self, server_url, user_id, device_id, client_version,
dao, proxies=None, proxy_exceptions=None,
password=None, token=None, repository=DEFAULT_REPOSITORY_NAME,
ignored_prefixes=None, ignored_suffixes=None,
timeout=20, blob_timeout=None, cookie_jar=None,
upload_tmp_dir=None, check_suspended=None):
'''
Constructor
'''
super(RemoteFilteredFileSystemClient, self).__init__(
server_url, user_id, device_id,
client_version, proxies, proxy_exceptions,
password, token, repository, ignored_prefixes,
ignored_suffixes, timeout, blob_timeout, cookie_jar,
upload_tmp_dir, check_suspended)
self._dao = dao
def is_filtered(self, path):
return self._dao.is_filter(path)
def get_children_info(self, fs_item_id):
result = super(RemoteFilteredFileSystemClient, self).get_children_info(fs_item_id)
# Need to filter the children result
filtered = []
for item in result:
if not self.is_filtered(item.path):
filtered.append(item)
else:
log.debug("Filtering item %r", item)
return filtered
| lgpl-2.1 | 6,468,345,709,675,984,000 | 33.93617 | 90 | 0.625457 | false |
bhaugen/nova | django_extensions/admin/__init__.py | 16 | 5431 | #
# Autocomplete feature for admin panel
#
# Most of the code has been written by Jannis Leidel and was updated a bit
# for django_extensions.
# http://jannisleidel.com/2008/11/autocomplete-form-widget-foreignkey-model-fields/
#
# to_string_function, Satchmo adaptation and some comments added by emes
# (Michal Salaban)
#
import operator
from django.http import HttpResponse, HttpResponseNotFound
from django.contrib import admin
from django.db import models
from django.db.models.query import QuerySet
from django.utils.encoding import smart_str
from django.utils.translation import ugettext as _
from django.utils.text import get_text_list
from django_extensions.admin.widgets import ForeignKeySearchInput
class ForeignKeyAutocompleteAdmin(admin.ModelAdmin):
"""Admin class for models using the autocomplete feature.
There are two additional fields:
- related_search_fields: defines fields of managed model that
have to be represented by autocomplete input, together with
a list of target model fields that are searched for
input string, e.g.:
related_search_fields = {
'author': ('first_name', 'email'),
}
- related_string_functions: contains optional functions which
take target model instance as only argument and return string
representation. By default __unicode__() method of target
object is used.
"""
related_search_fields = {}
related_string_functions = {}
def __call__(self, request, url):
if url is None:
pass
elif url == 'foreignkey_autocomplete':
return self.foreignkey_autocomplete(request)
return super(ForeignKeyAutocompleteAdmin, self).__call__(request, url)
def foreignkey_autocomplete(self, request):
"""
Searches in the fields of the given related model and returns the
result as a simple string to be used by the jQuery Autocomplete plugin
"""
query = request.GET.get('q', None)
app_label = request.GET.get('app_label', None)
model_name = request.GET.get('model_name', None)
search_fields = request.GET.get('search_fields', None)
object_pk = request.GET.get('object_pk', None)
try:
to_string_function = self.related_string_functions[model_name]
except KeyError:
to_string_function = lambda x: x.__unicode__()
if search_fields and app_label and model_name and (query or object_pk):
def construct_search(field_name):
# use different lookup methods depending on the notation
if field_name.startswith('^'):
return "%s__istartswith" % field_name[1:]
elif field_name.startswith('='):
return "%s__iexact" % field_name[1:]
elif field_name.startswith('@'):
return "%s__search" % field_name[1:]
else:
return "%s__icontains" % field_name
model = models.get_model(app_label, model_name)
queryset = model._default_manager.all()
data = ''
if query:
for bit in query.split():
or_queries = [models.Q(**{construct_search(
smart_str(field_name)): smart_str(bit)})
for field_name in search_fields.split(',')]
other_qs = QuerySet(model)
other_qs.dup_select_related(queryset)
other_qs = other_qs.filter(reduce(operator.or_, or_queries))
queryset = queryset & other_qs
data = ''.join([u'%s|%s\n' % (
to_string_function(f), f.pk) for f in queryset])
elif object_pk:
try:
obj = queryset.get(pk=object_pk)
except:
pass
else:
data = to_string_function(obj)
return HttpResponse(data)
return HttpResponseNotFound()
def get_help_text(self, field_name, model_name):
searchable_fields = self.related_search_fields.get(field_name, None)
if searchable_fields:
help_kwargs = {
'model_name': model_name,
'field_list': get_text_list(searchable_fields, _('and')),
}
return _('Use the left field to do %(model_name)s lookups in the fields %(field_list)s.') % help_kwargs
return ''
def formfield_for_dbfield(self, db_field, **kwargs):
"""
Overrides the default widget for Foreignkey fields if they are
specified in the related_search_fields class attribute.
"""
if (isinstance(db_field, models.ForeignKey) and
db_field.name in self.related_search_fields):
model_name = db_field.rel.to._meta.object_name
help_text = self.get_help_text(db_field.name, model_name)
if kwargs.get('help_text'):
help_text = u'%s %s' % (kwargs['help_text'], help_text)
kwargs['widget'] = ForeignKeySearchInput(db_field.rel,
self.related_search_fields[db_field.name])
kwargs['help_text'] = help_text
return super(ForeignKeyAutocompleteAdmin,
self).formfield_for_dbfield(db_field, **kwargs)
| mit | 4,489,243,927,069,710,300 | 42.448 | 115 | 0.590867 | false |
dneg/cortex | test/IECore/PrimitiveImplicitSurfaceFunction.py | 12 | 2275 | ##########################################################################
#
# Copyright (c) 2008, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Image Engine Design nor the names of any
# other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import math
import unittest
from IECore import *
class TestPrimitiveImplicitSurfaceFunction( unittest.TestCase ) :
def test( self ) :
# Poly sphere of radius 1
m = Reader.create( "test/IECore/data/cobFiles/pSphereShape1.cob" ).read()
a = PrimitiveImplicitSurfaceFunction( m )
v = a.getValue( V3f(10,0,0) )
self.assert_( math.fabs( v - 9 ) < 0.5 )
v = a.getValue( V3f(0,0,0) )
self.assert_( math.fabs( v ) - 1 < 0.5 )
if __name__ == "__main__":
unittest.main()
| bsd-3-clause | 4,072,947,904,866,268,700 | 37.559322 | 76 | 0.681319 | false |
deepmind/sonnet | sonnet/src/leaky_clip_by_value.py | 1 | 2331 | # Copyright 2019 The Sonnet Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Clipping operation with customized gradients."""
from typing import Optional
import tensorflow as tf
@tf.custom_gradient
def leaky_clip_by_value(t: tf.Tensor,
clip_value_min: tf.Tensor,
clip_value_max: tf.Tensor,
name: Optional[str] = None):
"""Clips tensor values to a specified min and max.
The gradient is set to zero when tensor values are already out of bound and
gradient-descent will push them even further away from the valid range. If
gradient-descent pushes the values towards the valid range, the gradient will
pass through without change.
Note that this is assuming a gradient flow for minimization. For
maximization, flip the gradient before it back-propagates to this op.
Args:
t: A Tensor.
clip_value_min: A 0-D (scalar) Tensor, or a Tensor with the same shape as t.
The minimum value to clip by.
clip_value_max: A 0-D (scalar) Tensor, or a Tensor with the same shape as t.
The maximum value to clip by.
name: A name for the operation (optional).
Returns:
A clipped Tensor.
Raises:
ValueError: If the clip tensors would trigger array broadcasting that would
make the returned tensor larger than the input.
"""
clip_t = tf.clip_by_value(t, clip_value_min, clip_value_max, name=name)
def grad(dy):
"""Custom gradient."""
zeros = tf.zeros_like(dy)
condition = tf.logical_or(
tf.logical_and(t < clip_value_min, dy > 0),
tf.logical_and(t > clip_value_max, dy < 0),
)
dy = tf.where(condition, zeros, dy)
return dy, None, None
return clip_t, grad
| apache-2.0 | -9,024,447,185,718,091,000 | 36 | 80 | 0.670099 | false |
dilawar/moose-full | moose-examples/paper-2015/Fig6_NetMultiscale/Fig6A.py | 2 | 20360 | #!/usr/bin/env python
#/**********************************************************************
#** This program is part of 'MOOSE', the
#** Messaging Object Oriented Simulation Environment.
#** Copyright (C) 2003-2014 Upinder S. Bhalla. and NCBS
#** It is made available under the terms of the
#** GNU Lesser General Public License version 2.1
#** See the file COPYING.LIB for the full notice.
#**********************************************************************/
'''
This LIF network with Ca plasticity is based on:
David Higgins, Michael Graupner, Nicolas Brunel
Memory Maintenance in Synapses with Calcium-Based
Plasticity in the Presence of Background Activity
PLOS Computational Biology, 2014.
Implemented by: Aditya Gilra, NCBS, Bangalore, October, 2014.
This variant has 2500 LIF neurons
Upi Bhalla, Nov 2014: Appended single neuron model.
This script generates Panel A from Figure 6. It is a dummy for showing
the layout, and loads in about 20 seconds.
'''
## import modules and functions to be used
import numpy as np
import matplotlib.pyplot as plt
import random
import time
import moose
from PyQt4 import Qt, QtCore, QtGui
from numpy import random as nprand
from moose.neuroml.NeuroML import NeuroML
import sys
sys.path.append( "/home/bhalla/moose/trunk/Demos/util" )
import rdesigneur as rd
import moogli
cellname = "./cells_channels/CA1_nochans.morph.xml"
#cellname = "./ca1_minimal.p"
fname = "fig6bcde"
#############################################
np.random.seed(100) # set seed for reproducibility of simulations
random.seed(100) # set seed for reproducibility of simulations
moose.seed(100) # set seed for reproducibility of simulations
#############################################
# All parameters as per:
# David Higgins, Michael Graupner, Nicolas Brunel
# Memory Maintenance in Synapses with Calcium-Based
# Plasticity in the Presence of Background Activity
# PLOS Computational Biology, 2014.
#############################################
#############################################
# Neuron model
#############################################
# equation: dv/dt = (1/taum)*(-(v-el)) + inp
# with spike when v>vt, reset to vr
PI = 3.14159265358979
useGssa = True
combineSegments = False
el = -70e-3 #V # Resting potential
vt = -50e-3 #V # Spiking threshold
Rm = 20e6 #Ohm # Only taum is needed, but LIF neuron accepts
Cm = 1e-9 #F # Rm and Cm and constructs taum=Rm*Cm
taum = Rm*Cm #s # Membrane time constant is 20 ms
vr = -60e-3 #V # Reset potential
Iinject = 10e-3/Rm # constant current injection into LIF neuron
# same as setting el=-70+15=-55 mV and inp=0
noiseInj = True # inject noisy current into each cell: boolean
noiseInjSD = 5e-3/Rm #A # SD of noise added to 'current'
# SD*sqrt(taum) is used as noise current SD
#############################################
# Network parameters: numbers
#############################################
N = 2500 # Total number of neurons
fexc = 0.8 # Fraction of exc neurons
NE = int(fexc*N) # Number of excitatory cells
NI = N-NE # Number of inhibitory cells
#############################################
# Simulation parameters
#############################################
simtime = 30 #s # Simulation time
interTetInterval = 5.0 # sec
updateDt = 0.2 #s: time to update live display
dt = 1e-3 #s # time step
#############################################
# Network parameters: synapses (not for ExcInhNetBase)
#############################################
## With each presynaptic spike in exc / inh neuron,
## J / -g*J is added to post-synaptic Vm -- delta-fn synapse
## Since LIF neuron used below is derived from Compartment class,
## conductance-based synapses (SynChan class) can also be used.
C = 100 # Number of incoming connections on each neuron (exc or inh)
fC = fexc # fraction fC incoming connections are exc, rest inhibitory
J = 0.2e-3 #V # exc strength is J (in V as we add to voltage)
# Critical J is ~ 0.45e-3 V in paper for N = 10000, C = 1000
# See what happens for J = 0.2e-3 V versus J = 0.8e-3 V
g = 4.0 # -gJ is the inh strength. For exc-inh balance g >~ f(1-f)=4
syndelay = dt # synaptic delay:
refrT = 0.0 # s # absolute refractory time
#############################################
# Ca Plasticity parameters: synapses (not for ExcInhNetBase)
#############################################
CaPlasticity = True # set it True or False to turn on/off plasticity
tauCa = 22.6936e-3 # s # Ca decay time scale
tauSyn = 346.3615 # s # synaptic plasticity time scale
## in vitro values in Higgins et al 2014, faster plasticity
CaPre = 0.56175 # mM
CaPost = 1.2964 # mM
## in vivo values in Higgins et al 2014, slower plasticity
#CaPre = 0.33705 # mM
#CaPost = 0.74378 # mM
delayD = 4.6098e-3 # s # CaPre is added to Ca after this delay
# proxy for rise-time of NMDA
thetaD = 1.0 # mM # depression threshold for Ca
thetaP = 1.3 # mM # potentiation threshold for Ca
gammaD = 331.909 # factor for depression term
gammaP = 725.085 # factor for potentiation term
eqWeight = 0.5 # initial synaptic weight
# gammaP/(gammaP+gammaD) = eq weight w/o noise
# but see eqn (22), noiseSD also appears
bistable = True # if bistable is True, use bistable potential for weights
noisy = True # use noisy weight updates given by noiseSD
noiseSD = 3.3501 # if noisy, use noiseSD (3.3501 from Higgins et al 2014)
#noiseSD = 0.1 # if bistable==False, use a smaller noise than in Higgins et al 2014
#############################################
# Here we set up a single neuron to fit in this network
#############################################
diffDt = 0.005
chemDt = 0.005
ePlotDt = 0.5e-3
cPlotDt = 0.005
#############################################
def buildRdesigneur():
##################################################################
# Here we define which prototypes are to be loaded in to the system.
# Each specification has the format
# source [localName]
# source can be any of
# filename.extension, # Identify type of file by extension, load it.
# function(), # func( name ) builds object of specified name
# file.py:function() , # load Python file, run function(name) in it.
# moose.Classname # Make obj moose.Classname, assign to name.
# path # Already loaded into library or on path.
# After loading the prototypes, there should be an object called 'name'
# in the library.
##################################################################
cellProto = [ [cellname, 'elec'] ]
chanProto = [
['./cells_channels/hd.xml'], \
['./cells_channels/kap.xml'], \
['./cells_channels/kad.xml'], \
['./cells_channels/kdr.xml'], \
['./cells_channels/na3.xml'], \
['./cells_channels/nax.xml'], \
['./cells_channels/CaConc.xml'], \
['./cells_channels/Ca.xml'], \
['./cells_channels/NMDA.xml'], \
['./cells_channels/Glu.xml'], \
['./cells_channels/GABA.xml'] \
]
spineProto = [ \
['makeSpineProto()', 'spine' ]
]
##################################################################
# Here we define what goes where, and any parameters. Each distribution
# has the format
# protoName, path, field, expr, [field, expr]...
# where
# protoName identifies the prototype to be placed on the cell
# path is a MOOSE wildcard path specifying where to put things
# field is the field to assign.
# expr is a math expression to define field value. This uses the
# muParser. Built-in variables are p, g, L, len, dia.
# The muParser provides most math functions, and the Heaviside
# function H(x) = 1 for x > 0 is also provided.
##################################################################
passiveDistrib = [
[ ".", "#", "RM", "2.8", "CM", "0.01", "RA", "1.5", \
"Em", "-58e-3", "initVm", "-65e-3" ], \
[ ".", "#axon#", "RA", "0.5" ] \
]
chanDistrib = [ \
["hd", "#dend#,#apical#,#user#", "Gbar", "5e-2*(1+(p*3e4))" ], \
["kdr", "#", "Gbar", "100" ], \
["na3", "#soma#,#dend#,#apical#,#user#", "Gbar", "250" ], \
["nax", "#axon#", "Gbar", "1250" ], \
["nax", "#soma#", "Gbar", "100" ], \
["kap", "#axon#,#soma#", "Gbar", "300" ], \
["kap", "#dend#,#apical#,#user#", "Gbar", \
"300*(H(100-p*1e6)) * (1+(p*1e4))" ], \
["Ca_conc", "#soma#,#dend#,#apical#,#user#", "tau", "0.0133" ], \
["kad", "#dend#,#apical#,#user#", "Gbar", \
"300*H(p*1e6-100)*(1+p*1e4)" ], \
["Ca", "#soma#", "Gbar", "10e-3" ], \
["Ca", "#dend#,#apical#,#user#", "Gbar", "50e-3" ], \
["GABA", "#dend#,#apical#,#user#", "Gbar", "100*H(250e-6 - p)" ], \
]
spineDistrib = [ \
["spine", '#apical#,#dend#,#user#', "spineSpacing", "6.2e-6", \
"spineSpacingDistrib", "1e-6", \
"angle", "0", \
"angleDistrib", str( 2*PI ), \
"size", "1", \
"sizeDistrib", "0.5" ] \
]
######################################################################
# Having defined everything, now to create the rdesigneur and proceed
# with creating the model.
######################################################################
rdes = rd.rdesigneur(
useGssa = useGssa, \
combineSegments = combineSegments, \
stealCellFromLibrary = True, \
passiveDistrib = passiveDistrib, \
spineDistrib = spineDistrib, \
chanDistrib = chanDistrib, \
cellProto = cellProto, \
spineProto = spineProto, \
chanProto = chanProto, \
)
return rdes
#############################################
def makeDetailedNeuron():
rdes = buildRdesigneur()
rdes.buildModel( '/model' )
#bcs.addAllPlots()
#############################################
# Exc-Inh network base class without connections
#############################################
class ExcInhNetBase:
"""Simulates and plots LIF neurons (exc and inh separate).
Author: Aditya Gilra, NCBS, Bangalore, India, October 2014
"""
def __init__(self,N=N,fexc=fexc,el=el,vt=vt,Rm=Rm,Cm=Cm,vr=vr,\
refrT=refrT,Iinject=Iinject):
""" Constructor of the class """
self.N = N # Total number of neurons
self.fexc = fexc # Fraction of exc neurons
self.NmaxExc = int(fexc*N) # max idx of exc neurons, rest inh
self.el = el # Resting potential
self.vt = vt # Spiking threshold
self.taum = taum # Membrane time constant
self.vr = vr # Reset potential
self.refrT = refrT # Absolute refractory period
self.Rm = Rm # Membrane resistance
self.Cm = Cm # Membrane capacitance
self.Iinject = Iinject # constant input current
self.noiseInjSD = noiseInjSD # SD of injected noise
self.simif = False # whether the simulation is complete
self._setup_network()
def __str__(self):
return "LIF network of %d neurons "\
"having %d exc." % (self.N,self.NmaxExc)
def _setup_network(self):
"""Sets up the network (_init_network is enough)"""
self.network = moose.LIF( 'network', self.N );
moose.le( '/network' )
self.network.vec.Em = self.el
self.network.vec.thresh = self.vt
self.network.vec.refractoryPeriod = self.refrT
self.network.vec.Rm = self.Rm
self.network.vec.vReset = self.vr
self.network.vec.Cm = self.Cm
if not noiseInj:
self.network.vec.inject = self.Iinject
else:
## inject a constant + noisy current
## values are set in self.simulate()
self.noiseTables = moose.StimulusTable('noiseTables',self.N)
moose.connect( self.noiseTables, 'output', \
self.network, 'setInject', 'OneToOne')
def _init_network(self,v0=el):
"""Initialises the network variables before simulation"""
self.network.vec.initVm = v0
def _init_plots(self):
## make a few tables to store a few Vm-s
numVms = 10
self.plots = moose.Table( '/plotVms', numVms )
## draw numVms out of N neurons
nrnIdxs = random.sample(range(self.N),numVms)
for i in range( numVms ):
moose.connect( self.network.vec[nrnIdxs[i]], 'VmOut', \
self.plots.vec[i], 'input')
## make self.N tables to store spikes of all neurons
self.spikes = moose.Table( '/plotSpikes', self.N )
moose.connect( self.network, 'spikeOut', \
self.spikes, 'input', 'OneToOne' )
## make 2 tables to store spikes of all exc and all inh neurons
self.spikesExc = moose.Table( '/plotSpikesAllExc' )
for i in range(self.NmaxExc):
moose.connect( self.network.vec[i], 'spikeOut', \
self.spikesExc, 'input' )
self.spikesInh = moose.Table( '/plotSpikesAllInh' )
for i in range(self.NmaxExc,self.N):
moose.connect( self.network.vec[i], 'spikeOut', \
self.spikesInh, 'input' )
def _plot(self, fig):
""" plots the spike raster for the simulated net"""
plt.figure(1)
ax = plt.subplot(221)
cleanAx( ax, 'B' )
plt.ylabel( 'Neuron #', fontsize = 16 )
for i in range(0,self.NmaxExc):
if i==0: label = 'Exc. spike trains'
else: label = ''
spikes = self.spikes.vec[i].vector
ax.plot(spikes,[i]*len(spikes),\
'b.',marker='.', markersize = 2, label=label)
for i in range(self.NmaxExc,self.N):
if i==self.NmaxExc: label = 'Inh. spike trains'
else: label = ''
spikes = self.spikes.vec[i].vector
ax.plot(spikes,[i]*len(spikes),\
'r.',marker='.', markersize = 2, label=label)
#############################################
# Exc-Inh network class with Ca plasticity based connections
# (inherits from ExcInhNetBase)
#############################################
class ExcInhNet(ExcInhNetBase):
""" Recurrent network simulation """
def __init__(self,J=J,incC=C,fC=fC,scaleI=g,syndelay=syndelay,**kwargs):
"""Overloads base (parent) class"""
self.J = J # exc connection weight
self.incC = incC # number of incoming connections per neuron
self.fC = fC # fraction of exc incoming connections
self.excC = int(fC*incC)# number of exc incoming connections
self.scaleI = scaleI # inh weight is scaleI*J
self.syndelay = syndelay# synaptic delay
# call the parent class constructor
ExcInhNetBase.__init__(self,**kwargs)
def __str__(self):
return "LIF network of %d neurons "\
"of which %d are exc." % (self.N,self.NmaxExc)
def _init_network(self,**args):
ExcInhNetBase._init_network(self,**args)
def _init_plots(self):
ExcInhNetBase._init_plots(self)
def _setup_network(self):
## Set up the neurons without connections
ExcInhNetBase._setup_network(self)
## Now, add in the connections...
## Each pre-synaptic spike cause Vm of post-neuron to rise by
## synaptic weight in one time step i.e. delta-fn synapse.
## Since LIF neuron is derived from Compartment class,
## conductance-based synapses (SynChan class) can also be used.
## E to E synapses can be plastic
## Two ways to do this:
## 1) Each LIF neuron has one incoming postsynaptic SynHandler,
## which collects the activation from all presynaptic neurons,
## but then a common Ca pool is used.
## 2) Each LIF neuron has multiple postsyanptic SynHandlers,
## one for each pre-synaptic neuron, i.e. one per synapse,
## then each synapse has a different Ca pool.
## Here we go with option 2) as per Higgins et al 2014 (Brunel private email)
## separate SynHandler per EE synapse, thus NmaxExc*excC
if CaPlasticity:
self.synsEE = moose.GraupnerBrunel2012CaPlasticitySynHandler( \
'/network/synsEE', self.NmaxExc*self.excC )
else:
self.synsEE = moose.SimpleSynHandler( \
'/network/synsEE', self.NmaxExc*self.excC )
moose.useClock( 0, '/network/synsEE', 'process' )
## I to E synapses are not plastic
self.synsIE = moose.SimpleSynHandler( '/network/synsIE', self.NmaxExc )
## all synapses to I neurons are not plastic
self.synsI = moose.SimpleSynHandler( '/network/synsI', self.N-self.NmaxExc )
## connect all SynHandlers to their respective neurons
moose.useClock( 0, '/network/synsIE', 'process' )
moose.useClock( 0, '/network/synsI', 'process' )
#############################################
# Make plots
#############################################
def interlude( view ):
view.yaw( 0.005 )
def create_viewer(rdes):
# print "Creating 3D Viewer"
network = moogli.extensions.moose.read(path=rdes.elecid.path,
vertices=10)
# print "Read Network"
network.set("color", moogli.colors.LIGHT_BLUE)
network.groups["spine"].set("color", moogli.colors.ORANGE)
# for dendrite in dendrites.values():
# dendrite.set_colors(moogli.core.Vec4f(173 / 255.0, 216 / 255.0, 230 / 255.0, 1.0))
[shape.set_radius(shape.get_apex_radius() * 4.0) for shape in
network.groups["spine"].groups["head"].shapes.values()]
# print "Creating LIFS"
soma = network.shapes[rdes.soma.path]
center = soma.get_center()
row_axis = moogli.geometry.X_AXIS
row_count = 50
row_separation = soma.get_base_radius() * 5.0
col_axis = moogli.geometry.Z_AXIS
col_count = 50
col_separation = row_separation
radii = soma.get_base_radius()
colors = moogli.colors.GREEN
vertices = 20
lifs = moogli.shapes.Sphere.grid("LIF",
center,
row_axis,
row_count,
row_separation,
col_axis,
col_count,
col_separation,
radii,
colors,
vertices)
# print "Created LIFS"
# morphology.create_group("dendrites", dendrites, 0.0, 300.0, colormap)
# print "Creating Viewer"
viewer = moogli.Viewer("viewer") # prelude = prelude, interlude = interlude)
# print "Created Viewer"
viewer.attach_shapes(network.shapes.values())
viewer.attach_shapes(lifs.shapes.values())
# print "Attached Shapes"
view = moogli.View("view", interlude=interlude)
viewer.attach_view(view)
# print "Attached View"
viewer.showMaximized()
viewer.start()
view.zoom( 0.4 )
view.pitch( PI/2.5 )
return viewer
if __name__=='__main__':
## ExcInhNetBase has unconnected neurons,
## ExcInhNet connects them
## Instantiate either ExcInhNetBase or ExcInhNet below
#net = ExcInhNetBase(N=N)
net = ExcInhNet(N=N)
print net
moose.le( '/' )
moose.le( '/network' )
rdes = buildRdesigneur()
rdes.buildModel( '/model' )
app = QtGui.QApplication(sys.argv)
viewer = create_viewer(rdes)
app.exec_()
| gpl-2.0 | 2,455,827,256,080,544,300 | 39.477137 | 93 | 0.538114 | false |
isidroamv/netmiko | examples/asa_upgrade.py | 6 | 2618 | #!/usr/bin/env python
"""Script to upgrade a Cisco ASA."""
import sys
from datetime import datetime
from getpass import getpass
from netmiko import ConnectHandler, FileTransfer
def asa_scp_handler(ssh_conn, cmd='ssh scopy enable', mode='enable'):
"""Enable/disable SCP on Cisco ASA."""
if mode == 'disable':
cmd = 'no ' + cmd
return ssh_conn.send_config_set([cmd])
def main():
"""Script to upgrade a Cisco ASA."""
ip_addr = raw_input("Enter ASA IP address: ")
my_pass = getpass()
start_time = datetime.now()
print ">>>> {}".format(start_time)
net_device = {
'device_type': 'cisco_asa',
'ip': ip_addr,
'username': 'admin',
'password': my_pass,
'secret': my_pass,
'port': 22,
}
print "\nLogging in to ASA"
ssh_conn = ConnectHandler(**net_device)
print
# ADJUST TO TRANSFER IMAGE FILE
dest_file_system = 'disk0:'
source_file = 'test1.txt'
dest_file = 'test1.txt'
alt_dest_file = 'asa825-59-k8.bin'
scp_changed = False
with FileTransfer(ssh_conn, source_file=source_file, dest_file=dest_file,
file_system=dest_file_system) as scp_transfer:
if not scp_transfer.check_file_exists():
if not scp_transfer.verify_space_available():
raise ValueError("Insufficient space available on remote device")
print "Enabling SCP"
output = asa_scp_handler(ssh_conn, mode='enable')
print output
print "\nTransferring file\n"
scp_transfer.transfer_file()
print "Disabling SCP"
output = asa_scp_handler(ssh_conn, mode='disable')
print output
print "\nVerifying file"
if scp_transfer.verify_file():
print "Source and destination MD5 matches"
else:
raise ValueError("MD5 failure between source and destination files")
print "\nSending boot commands"
full_file_name = "{}/{}".format(dest_file_system, alt_dest_file)
boot_cmd = 'boot system {}'.format(full_file_name)
output = ssh_conn.send_config_set([boot_cmd])
print output
print "\nVerifying state"
output = ssh_conn.send_command('show boot')
print output
# UNCOMMENT TO PERFORM WR MEM AND RELOAD
#print "\nWrite mem and reload"
#output = ssh_conn.send_command_expect('write mem')
#output += ssh_conn.send_command('reload')
#output += ssh_conn.send_command('y')
#print output
print "\n>>>> {}".format(datetime.now() - start_time)
print
if __name__ == "__main__":
main()
| mit | 1,381,343,185,416,863,700 | 29.44186 | 81 | 0.605042 | false |
pyhmsa/pyhmsa | pyhmsa/spec/datum/data.py | 1 | 1689 | """
Dictionary of datum objects
"""
# Standard library modules.
# Third party modules.
# Local modules.
from pyhmsa.spec.datum.datum import _Datum
from pyhmsa.type.identifier import _IdentifierDict
from pyhmsa.spec.condition.conditions import Conditions, WeakConditions
# Globals and constants variables.
class Data(_IdentifierDict):
def __init__(self, datafile):
super().__init__()
self._datafile = datafile
self._lock = datafile._lock
def __setitem__(self, identifier, datum):
if not isinstance(datum, _Datum):
raise ValueError("Value is not a datum")
# Remove old datum
if identifier in self:
del self[identifier]
# Reconstruct weak references to conditions
conditions = WeakConditions(self._datafile)
for cidentifier, condition in datum.conditions.items():
if condition not in self._datafile.conditions.values():
cidentifier = self._datafile.conditions.add(cidentifier, condition)
else:
for cidentifier2, condition2 in self._datafile.conditions.items():
if condition == condition2:
cidentifier = cidentifier2
break
conditions[cidentifier] = condition
with self._lock:
datum._conditions = conditions
super().__setitem__(identifier, datum)
def __delitem__(self, identifier):
datum = self[identifier]
conditions = Conditions()
conditions.update(datum.conditions)
with self._lock:
datum._conditions = conditions
super().__delitem__(identifier)
| mit | -6,022,058,141,246,230,000 | 29.709091 | 83 | 0.616933 | false |
delmic/odemis | src/odemis/driver/tfsbc.py | 2 | 15331 | # -*- coding: utf-8 -*-
"""
Created on 11 May 2020
@author: Philip Winkler
Copyright © 2020 Philip Winkler, Delmic
This file is part of Odemis.
Odemis is free software: you can redistribute it and/or modify it under the terms
of the GNU General Public License version 2 as published by the Free Software
Foundation.
Odemis is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR
PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with
Odemis. If not, see http://www.gnu.org/licenses/.
"""
from __future__ import division
import logging
import numpy
import serial.tools.list_ports
from pymodbus.client.sync import ModbusSerialClient
from odemis import model
from odemis.model import HwError
# Parameters for connection
BAUDRATE = 230400
# TODO: there seems to be a bug in the pymodbus library. Whenever write_registers is called, the library waits
# until the timeout or 1024 values were sent, see https://github.com/riptideio/pymodbus/issues/237.
# For now, we put a short timeout to make sure we don't have to wait for too long.
TIMEOUT = 0.2
BYTESIZE = 8
PARITY = serial.PARITY_NONE
STOPBITS = 1
# Modbus level addresses
SLAVE_UNIT = 2
# Modbus registers
BEAMDEFL_LX = 0 # lower x beam deflection element control
BEAMDEFL_LY = 1 # lower y beam deflection element control
BEAMDEFL_UX = 2 # upper x beam deflection element control
BEAMDEFL_UY = 3 # upper y beam deflection element control
# Transformation constants
DCREVERSCOEF = -1
DCROTUPPXX = 1
DCROTUPPYY = 1
DCROTLOWXX = -1
DCROTLOWYY = 1
# Conversion ranges
C_MIN_RAW_SHIFT = 0
C_MAX_RAW_SHIFT = 0xFFFF
C_MIN_DBL_SHIFT = -42.2e-3
C_MAX_DBL_SHIFT = 42.2e-3
def current_to_raw(current):
"""
Helper function for coordinate transform (from Thermofischer example code).
:param current: (float)
"""
k = (C_MAX_RAW_SHIFT - C_MIN_RAW_SHIFT) / (C_MAX_DBL_SHIFT - C_MIN_DBL_SHIFT)
return int((current - C_MIN_DBL_SHIFT) * k + C_MIN_RAW_SHIFT + 0.5)
def raw_to_current(raw):
"""
Inverse of current_to_raw.
:param raw: (int)
"""
k = (C_MAX_RAW_SHIFT - C_MIN_RAW_SHIFT) / (C_MAX_DBL_SHIFT - C_MIN_DBL_SHIFT)
return (raw - 0.5 - C_MIN_RAW_SHIFT) / k + C_MIN_DBL_SHIFT
def transform_coordinates(value, xlower, xupper, ylower, yupper):
"""
Transform x, y coordinates to register values of beamshift hardware.
:param value: (float, float) x, y value in the source coordinate system
:param xlower: (float, float) xlower metadata
:param xupper: (float, float) xupper metadata
:param ylower: (float, float) ylower metadata
:param yupper: (float, float) yupper metadata
:return (int, int, int, int): register values: x lower, y lower, x upper, y upper
"""
value = (value[0] * 1e6, value[1] * 1e6) # value in µm
# This transformation was provided as example code from Thermofischer (the variable names are slightly modified
# to fit the coding style of this driver, the rest of the calculation is identical).
dc_xupper = value[0] * xupper[0] + value[1] * yupper[0]
dc_xlower = value[0] * xlower[0] + value[1] * ylower[0]
dc_yupper = value[0] * xupper[1] + value[1] * yupper[1]
dc_ylower = value[0] * xlower[1] + value[1] * ylower[1]
currUX = DCREVERSCOEF * DCROTUPPXX * dc_xupper
currLX = DCREVERSCOEF * DCROTLOWXX * dc_xlower
currUY = DCREVERSCOEF * DCROTUPPYY * dc_yupper
currLY = DCREVERSCOEF * DCROTLOWYY * dc_ylower
for current in [currLX, currLY, currUX, currUY]:
if not C_MIN_DBL_SHIFT <= current <= C_MAX_DBL_SHIFT:
raise ValueError("Beam deflection %s exceeds limits (%s, %s) of DC coils."
% (current, C_MIN_DBL_SHIFT, C_MAX_DBL_SHIFT))
rawLX = current_to_raw(currLX)
rawUX = current_to_raw(currUX)
rawLY = current_to_raw(currLY)
rawUY = current_to_raw(currUY)
return [rawLX, rawLY, rawUX, rawUY]
def transform_coordinates_reverse(register_values, xlower, xupper, ylower, yupper):
"""
Transform register values back to x, y position in source coordindate system.
:param register_values: (int, int, int, int) register values
:param xlower: (float, float) xlower metadata
:param xupper: (float, float) xupper metadata
:param ylower: (float, float) ylower metadata
:param yupper: (float, float) yupper metadata
:return (int, int): x, y position
"""
rawLX, rawLY, rawUX, rawUY = register_values
currLX = raw_to_current(rawLX)
currLY = raw_to_current(rawLY)
currUX = raw_to_current(rawUX)
currUY = raw_to_current(rawUY)
dc_xupper = currUX / (DCREVERSCOEF * DCROTUPPXX)
dc_xlower = currLX / (DCREVERSCOEF * DCROTLOWXX)
dc_yupper = currUY / (DCREVERSCOEF * DCROTUPPYY)
dc_ylower = currLY / (DCREVERSCOEF * DCROTLOWYY)
# Now we have to solve an overdetermined linear system of four equations with two variables.
A = numpy.array([[xupper[0], yupper[0]], [xlower[0], ylower[0]],
[xupper[1], yupper[1]], [xlower[1], ylower[1]]])
b = numpy.array([dc_xupper, dc_xlower, dc_yupper, dc_ylower])
value, *_ = numpy.linalg.lstsq(A, b)
value = (value[0] * 1e-6, value[1] * 1e-6) # µm --> m
return value
class BeamShiftController(model.HwComponent):
"""
Driver for the Thermofischer beam deflection controller.
This class provides the .shift VA containing a tuple of two floats which describe
the x and y beam offset in m in the stage coordinate system.
The conversion to internal ampere values (including scaling and rotation) is specified
through the MD_CALIB metadata (a 4x2 tuple, 4x (float, float), xlower, xupper, ylower, yupper).
"""
def __init__(self, name, role, port=None, serialnum=None, **kwargs):
"""
:param port (str): (e.g. "/dev/ttyUSB0") or pattern for port ("/dev/ttyUSB*"),
"/dev/fake" will start the simulator
:param serialnum (str): serial number of RS485 adapter
The connection can be specified by either port or serialnum, it's not needed to provide both.
"""
# .hwVersion, .swVersion not available
model.HwComponent.__init__(self, name, role, **kwargs)
# Find port by RS485 adapter serial number
self._portpattern = port
self._serialnum = serialnum
self._port = self._findDevice(port, serialnum)
self._serial = self._openSerialPort(self._port)
# Shift VA
# Range depends on metadata and will be checked in ._write_registers
# The value is not correct until the metadata is set.
self.shift = model.TupleContinuous((0, 0), range=((-1, -1), (1, 1)),
cls=(int, float), unit="m",
setter=self._setShift)
def _findDevice(self, port=None, serialnum=None):
"""
Look for a compatible device. Requires at least one of the arguments port and serialnum.
port (str): port (e.g. "/dev/ttyUSB0") or pattern for port ("/dev/ttyUSB*"), "/dev/fake" will start the simulator
serialnum (str): serial number
return (str): the name of the port used
raises:
HwError: if no device on the ports with the given serial number is found
"""
# At least one of the arguments port and serialnum must be specified
if not port and not serialnum:
raise ValueError("At least one of the arguments 'port' and 'serialnum' must be specified.")
# For debugging purpose
if port == "/dev/fake":
return port
# If no ports specified, check all available ports
if port:
names = list(serial.tools.list_ports.grep(port))
else:
names = serial.tools.list_ports.comports() # search all serial ports
# Look for serial number if available, otherwise make sure only one port matches the port pattern.
if serialnum:
for port in names:
if serialnum in port.serial_number:
return port.device # Found it!
else:
raise HwError("Beam controller device with serial number %s not found for port %s. " % (serialnum, names) +
"Check the connection.")
else:
if len(names) == 1:
port = names[0]
return port.device
elif len(names) > 1:
raise HwError("Multiple ports detected for beam controller. Please specify a serial number.")
else:
raise HwError("Beam controller device not found for port %s. Check the connection." % port)
def _openSerialPort(self, port):
if self._port == "/dev/fake":
return BeamShiftControllerSimulator()
else:
return ModbusSerialClient(method='rtu', port=port,
baudrate=BAUDRATE, timeout=TIMEOUT,
stopbits=STOPBITS, parity=PARITY,
bytesize=BYTESIZE)
def _setShift(self, value):
"""
:param value (float, float): x, y shift from the center (in m)
"""
logging.debug("Requesting shift of %s m.", value)
try:
xlower, xupper, ylower, yupper = self._metadata[model.MD_CALIB]
except KeyError:
raise ValueError("Cannot set shift, MD_CALIB metadata not specified.")
except ValueError as ex:
# Wrong format data, e.g. missing value or None
raise ValueError("Failed to parse MD_CALIB metadata, ex: %s" % ex)
# Transform to register values (including scaling and rotation)
register_values = transform_coordinates(value, xlower, xupper, ylower, yupper)
# Read previous value of registers for debugging purpose
# Note on duration: a write instruction takes about 14 ms, a read instruction about 20 ms
ret = self._read_registers()
logging.debug("Register values before writing: %s.", ret)
logging.debug("Writing register values %s", register_values)
self._write_registers(register_values)
# Convert back to original coordinates (should be the same as requested shift, possibly
# with a small rounding error)
value = transform_coordinates_reverse(register_values, xlower, xupper, ylower, yupper)
return value
def _write_registers(self, values):
"""
Write to all four registers. Try to reconnect to device in case connection was lost.
:values (list of 4 ints): register values (-x, -y, x, y)
"""
if len(values) != 4:
raise ValueError("write_registers received payload of invalid length %s != 4." % len(values))
# Check if values are in allowed range
if not all(0 <= val <= 0xFFFF for val in values):
raise ValueError("Register values %s not in range [0, 65535]." % values)
try:
# write all registers together (starting at lower x register (=0x01))
rq = self._serial.write_registers(BEAMDEFL_LX, values, unit=SLAVE_UNIT)
except IOError:
self._reconnect()
raise IOError("Failed to write registers of beam control firmware, "
"restarted serial connection.")
def _read_registers(self):
"""
Read all four registers. Try to reconnect to device in case connection was lost.
:return (list of 4 ints): register values (-x, -y, x, y)
"""
try:
# write all registers together (starting at lower x register (=0x01))
rr = self._serial.read_holding_registers(BEAMDEFL_LX, 4, unit=SLAVE_UNIT)
return rr.registers
except IOError:
self._reconnect()
raise IOError("Failed to write registers of beam control firmware, "
"restarted serial connection.")
def _reconnect(self):
"""
Attempt to reconnect. It will block until this happens.
On return, the hardware should be ready to use as before.
"""
num_it = 5
self.state._set_value(model.HwError("Beam deflection controller disconnected"), force_write=True)
logging.warning("Failed to write registers, trying to reconnect...")
for i in range(num_it):
try:
self._serial.close()
self._serial = None
self._port = self._findDevice(self._portpattern, self._serialnum)
self._serial = self._openSerialPort(self._port)
logging.info("Recovered device.")
break
except IOError:
continue
else:
raise IOError("Failed to reconnect to beam deflection controller.")
self.state._set_value(model.ST_RUNNING, force_write=True)
def updateMetadata(self, md):
if model.MD_CALIB in md:
# Check format
bs = md[model.MD_CALIB]
try:
if not len(bs) == 4: # 4 tuples required
raise ValueError("Invalid MD_CALIB metadata %s: 4 tuples required." % (bs,))
if not all(len(val) == 2 for val in bs): # each of the 4 values is a tuple of 2
raise ValueError("Invalid MD_CALIB metadata %s: Two values per tuple required." % (bs,))
if not all(all(isinstance(val, (int, float)) for val in tup) for tup in bs): # each element is a number
raise ValueError("Invalid MD_CALIB metadata %s: Values must be numbers." % (bs,))
except Exception as ex:
raise ValueError("Invalid MD_CALIB metadata %s, ex: %s" % (bs, ex,))
# Read register values from hardware
vals = self._read_registers()
# Transform back with new metadata
xlower, xupper, ylower, yupper = md[model.MD_CALIB]
new_shift = transform_coordinates_reverse(vals, xlower, xupper, ylower, yupper)
# Update .shift (but don't set value in hardware)
logging.debug("Shift after metadata update: %s", new_shift)
self.shift._value = new_shift
self.shift.notify(new_shift)
model.HwComponent.updateMetadata(self, md)
class BeamShiftControllerSimulator(object):
def __init__(self):
self.r0 = 0
self.r1 = 0
self.r2 = 0
self.r3 = 0
def write_registers(self, start_register, values, unit=None):
"""
Writes four values in the registers r0-r3.
"""
self.r0 = values[0]
self.r1 = values[1]
self.r2 = values[2]
self.r3 = values[3]
return SimplifiedModbusObject([])
def read_holding_registers(self, start_register, num_registers, unit=None):
return SimplifiedModbusObject([self.r0, self.r1, self.r2, self.r3][:num_registers])
class SimplifiedModbusObject(object):
"""
Simulate a modbus object (has .registers and .function_code attributes).
"""
def __init__(self, registers):
self.function_code = 0x80
self.registers = registers
| gpl-2.0 | -2,090,438,126,747,198,200 | 39.874667 | 123 | 0.624739 | false |
rossburton/yocto-autobuilder | lib/python2.7/site-packages/buildbot-0.8.8-py2.7.egg/buildbot/db/migrate/versions/010_fix_column_lengths.py | 4 | 2577 | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
import sqlalchemy as sa
from migrate import changeset
def upgrade(migrate_engine):
metadata = sa.MetaData()
metadata.bind = migrate_engine
# the old (non-sqlalchemy-migrate) migration scripts messed up the
# lengths of these columns, so fix them here.
changeset.alter_column(
sa.Column('class_name', sa.String(128), nullable=False),
table="schedulers",
metadata=metadata,
engine=migrate_engine)
changeset.alter_column(
sa.Column('name', sa.String(128), nullable=False),
table="schedulers",
metadata=metadata,
engine=migrate_engine)
# sqlalchemy's reflection gets the server_defaults wrong, so this
# table has to be included here.
changes = sa.Table('changes', metadata,
sa.Column('changeid', sa.Integer, primary_key=True),
sa.Column('author', sa.String(256), nullable=False),
sa.Column('comments', sa.String(1024), nullable=False),
sa.Column('is_dir', sa.SmallInteger, nullable=False),
sa.Column('branch', sa.String(256)),
sa.Column('revision', sa.String(256)),
sa.Column('revlink', sa.String(256)),
sa.Column('when_timestamp', sa.Integer, nullable=False),
sa.Column('category', sa.String(256)),
sa.Column('repository', sa.String(length=512), nullable=False,
server_default=''),
sa.Column('project', sa.String(length=512), nullable=False,
server_default=''),
)
changeset.alter_column(
sa.Column('author', sa.String(256), nullable=False),
table=changes,
metadata=metadata,
engine=migrate_engine)
changeset.alter_column(
sa.Column('branch', sa.String(256)),
table=changes,
metadata=metadata,
engine=migrate_engine)
| gpl-2.0 | -8,529,053,755,613,529,000 | 39.904762 | 79 | 0.655413 | false |
scotwk/cloud-custodian | tools/c7n_mailer/c7n_mailer/ldap_lookup.py | 2 | 8776 | # Copyright 2017 Capital One Services, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import redis
import re
try:
import sqlite3
except ImportError:
have_sqlite = False
else:
have_sqlite = True
from ldap3 import Connection
from ldap3.core.exceptions import LDAPSocketOpenError
class LdapLookup(object):
def __init__(self, config, logger):
self.log = logger
self.connection = self.get_connection(
config.get('ldap_uri'),
config.get('ldap_bind_user', None),
config.get('ldap_bind_password', None)
)
self.base_dn = config.get('ldap_bind_dn')
self.email_key = config.get('ldap_email_key', 'mail')
self.manager_attr = config.get('ldap_manager_attribute', 'manager')
self.uid_key = config.get('ldap_uid_attribute', 'sAMAccountName')
self.attributes = ['displayName', self.uid_key, self.email_key, self.manager_attr]
self.uid_regex = config.get('ldap_uid_regex', None)
self.cache_engine = config.get('cache_engine', None)
if self.cache_engine == 'redis':
redis_host = config.get('redis_host')
redis_port = int(config.get('redis_port', 6379))
self.caching = self.get_redis_connection(redis_host, redis_port)
elif self.cache_engine == 'sqlite':
if not have_sqlite:
raise RuntimeError('No sqlite available: stackoverflow.com/q/44058239')
self.caching = LocalSqlite(config.get('ldap_cache_file', '/var/tmp/ldap.cache'), logger)
def get_redis_connection(self, redis_host, redis_port):
return Redis(redis_host=redis_host, redis_port=redis_port, db=0)
def get_connection(self, ldap_uri, ldap_bind_user, ldap_bind_password):
# note, if ldap_bind_user and ldap_bind_password are None
# an anonymous bind will be attempted.
try:
return Connection(
ldap_uri, user=ldap_bind_user, password=ldap_bind_password,
auto_bind=True,
receive_timeout=30,
auto_referrals=False,
)
except LDAPSocketOpenError:
self.log.error('Not able to establish a connection with LDAP.')
def search_ldap(self, base_dn, ldap_filter, attributes):
self.connection.search(base_dn, ldap_filter, attributes=self.attributes)
if len(self.connection.entries) == 0:
self.log.warning("user not found. base_dn: %s filter: %s", base_dn, ldap_filter)
return {}
if len(self.connection.entries) > 1:
self.log.warning("too many results for search %s", ldap_filter)
return {}
return self.connection.entries[0]
def get_email_to_addrs_from_uid(self, uid, manager=False):
to_addrs = []
uid_metadata = self.get_metadata_from_uid(uid)
uid_email = uid_metadata.get(self.email_key, None)
if uid_email:
to_addrs.append(uid_email)
if manager:
uid_manager_dn = uid_metadata.get(self.manager_attr, None)
uid_manager_email = None
if uid_manager_dn:
uid_manager = self.get_metadata_from_dn(uid_manager_dn)
uid_manager_email = uid_manager.get('mail')
if uid_manager_email:
to_addrs.append(uid_manager_email)
return to_addrs
# eg, dn = uid=bill_lumbergh,cn=users,dc=initech,dc=com
def get_metadata_from_dn(self, user_dn):
if self.cache_engine:
cache_result = self.caching.get(user_dn)
if cache_result:
cache_msg = 'Got ldap metadata from local cache for: %s' % user_dn
self.log.debug(cache_msg)
return cache_result
ldap_filter = '(%s=*)' % self.uid_key
ldap_results = self.search_ldap(user_dn, ldap_filter, attributes=self.attributes)
if ldap_results:
ldap_user_metadata = self.get_dict_from_ldap_object(self.connection.entries[0])
else:
self.caching.set(user_dn, {})
return {}
if self.cache_engine:
self.log.debug('Writing user: %s metadata to cache engine.' % user_dn)
self.caching.set(user_dn, ldap_user_metadata)
self.caching.set(ldap_user_metadata[self.uid_key], ldap_user_metadata)
return ldap_user_metadata
def get_dict_from_ldap_object(self, ldap_user_object):
ldap_user_metadata = {attr.key: attr.value for attr in ldap_user_object}
ldap_user_metadata['dn'] = ldap_user_object.entry_dn
ldap_user_metadata[self.email_key] = ldap_user_metadata[self.email_key].lower()
ldap_user_metadata[self.uid_key] = ldap_user_metadata[self.uid_key].lower()
return ldap_user_metadata
# eg, uid = bill_lumbergh
def get_metadata_from_uid(self, uid):
uid = uid.lower()
if self.uid_regex:
# for example if you set ldap_uid_regex in your mailer.yml to "^[0-9]{6}$" then it
# would only query LDAP if your string length is 6 characters long and only digits.
# re.search("^[0-9]{6}$", "123456")
# Out[41]: <_sre.SRE_Match at 0x1109ab440>
# re.search("^[0-9]{6}$", "1234567") returns None, or "12345a' also returns None
if not re.search(self.uid_regex, uid):
regex_msg = 'uid does not match regex: %s %s' % (self.uid_regex, uid)
self.log.debug(regex_msg)
return {}
if self.cache_engine:
cache_result = self.caching.get(uid)
if cache_result or cache_result == {}:
cache_msg = 'Got ldap metadata from local cache for: %s' % uid
self.log.debug(cache_msg)
return cache_result
ldap_filter = '(%s=%s)' % (self.uid_key, uid)
ldap_results = self.search_ldap(self.base_dn, ldap_filter, attributes=self.attributes)
if ldap_results:
ldap_user_metadata = self.get_dict_from_ldap_object(self.connection.entries[0])
if self.cache_engine:
self.log.debug('Writing user: %s metadata to cache engine.' % uid)
self.caching.set(ldap_user_metadata['dn'], ldap_user_metadata)
self.caching.set(uid, ldap_user_metadata)
else:
if self.cache_engine:
self.caching.set(uid, {})
return {}
return ldap_user_metadata
# Use sqlite as a local cache for folks not running the mailer in lambda, avoids extra daemons
# as dependencies. This normalizes the methods to set/get functions, so you can interchangeable
# decide which caching system to use, a local file, or memcache, redis, etc
# If you don't want a redis dependency and aren't running the mailer in lambda this works well
class LocalSqlite(object):
def __init__(self, local_filename, logger):
self.log = logger
self.sqlite = sqlite3.connect(local_filename)
self.sqlite.execute('''CREATE TABLE IF NOT EXISTS ldap_cache(key text, value text)''')
def get(self, key):
sqlite_result = self.sqlite.execute("select * FROM ldap_cache WHERE key=?", (key,))
result = sqlite_result.fetchall()
if len(result) != 1:
error_msg = 'Did not get 1 result from sqlite, something went wrong with key: %s' % key
self.log.error(error_msg)
return None
return json.loads(result[0][1])
def set(self, key, value):
# note, the ? marks are required to ensure escaping into the database.
self.sqlite.execute("INSERT INTO ldap_cache VALUES (?, ?)", (key, json.dumps(value)))
# redis can't write complex python objects like dictionaries as values (the way memcache can)
# so we turn our dict into a json string when setting, and json.loads when getting
class Redis(object):
def __init__(self, redis_host=None, redis_port=6379, db=0):
self.connection = redis.StrictRedis(host=redis_host, port=redis_port, db=db)
def get(self, key):
cache_value = self.connection.get(key)
if cache_value:
return json.loads(cache_value)
def set(self, key, value):
return self.connection.set(key, json.dumps(value))
| apache-2.0 | 3,096,408,759,724,237,300 | 44.708333 | 100 | 0.621126 | false |
PeteW/luigi | luigi/contrib/ecs.py | 9 | 6698 | # -*- coding: utf-8 -*-
#
# Copyright 2015 Outlier Bio, LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
EC2 Container Service wrapper for Luigi
From the AWS website:
Amazon EC2 Container Service (ECS) is a highly scalable, high performance
container management service that supports Docker containers and allows you
to easily run applications on a managed cluster of Amazon EC2 instances.
To use ECS, you create a taskDefinition_ JSON that defines the `docker run`_
command for one or more containers in a task or service, and then submit this
JSON to the API to run the task.
This `boto3-powered`_ wrapper allows you to create Luigi Tasks to submit ECS
``taskDefinition`` s. You can either pass a dict (mapping directly to the
``taskDefinition`` JSON) OR an Amazon Resource Name (arn) for a previously
registered ``taskDefinition``.
Requires:
- boto3 package
- Amazon AWS credentials discoverable by boto3 (e.g., by using ``aws configure``
from awscli_)
- A running ECS cluster (see `ECS Get Started`_)
Written and maintained by Jake Feala (@jfeala) for Outlier Bio (@outlierbio)
.. _`docker run`: https://docs.docker.com/reference/commandline/run
.. _taskDefinition: http://docs.aws.amazon.com/AmazonECS/latest/developerguide/task_defintions.html
.. _`boto3-powered`: https://boto3.readthedocs.io
.. _awscli: https://aws.amazon.com/cli
.. _`ECS Get Started`: http://docs.aws.amazon.com/AmazonECS/latest/developerguide/ECS_GetStarted.html
"""
import time
import logging
import luigi
logger = logging.getLogger('luigi-interface')
try:
import boto3
client = boto3.client('ecs')
except ImportError:
logger.warning('boto3 is not installed. ECSTasks require boto3')
POLL_TIME = 2
def _get_task_statuses(task_ids, cluster):
"""
Retrieve task statuses from ECS API
Returns list of {RUNNING|PENDING|STOPPED} for each id in task_ids
"""
response = client.describe_tasks(tasks=task_ids, cluster=cluster)
# Error checking
if response['failures'] != []:
raise Exception('There were some failures:\n{0}'.format(
response['failures']))
status_code = response['ResponseMetadata']['HTTPStatusCode']
if status_code != 200:
msg = 'Task status request received status code {0}:\n{1}'
raise Exception(msg.format(status_code, response))
return [t['lastStatus'] for t in response['tasks']]
def _track_tasks(task_ids, cluster):
"""Poll task status until STOPPED"""
while True:
statuses = _get_task_statuses(task_ids, cluster)
if all([status == 'STOPPED' for status in statuses]):
logger.info('ECS tasks {0} STOPPED'.format(','.join(task_ids)))
break
time.sleep(POLL_TIME)
logger.debug('ECS task status for tasks {0}: {1}'.format(task_ids, statuses))
class ECSTask(luigi.Task):
"""
Base class for an Amazon EC2 Container Service Task
Amazon ECS requires you to register "tasks", which are JSON descriptions
for how to issue the ``docker run`` command. This Luigi Task can either
run a pre-registered ECS taskDefinition, OR register the task on the fly
from a Python dict.
:param task_def_arn: pre-registered task definition ARN (Amazon Resource
Name), of the form::
arn:aws:ecs:<region>:<user_id>:task-definition/<family>:<tag>
:param task_def: dict describing task in taskDefinition JSON format, for
example::
task_def = {
'family': 'hello-world',
'volumes': [],
'containerDefinitions': [
{
'memory': 1,
'essential': True,
'name': 'hello-world',
'image': 'ubuntu',
'command': ['/bin/echo', 'hello world']
}
]
}
:param cluster: str defining the ECS cluster to use.
When this is not defined it will use the default one.
"""
task_def_arn = luigi.OptionalParameter(default=None)
task_def = luigi.OptionalParameter(default=None)
cluster = luigi.Parameter(default='default')
@property
def ecs_task_ids(self):
"""Expose the ECS task ID"""
if hasattr(self, '_task_ids'):
return self._task_ids
@property
def command(self):
"""
Command passed to the containers
Override to return list of dicts with keys 'name' and 'command',
describing the container names and commands to pass to the container.
Directly corresponds to the `overrides` parameter of runTask API. For
example::
[
{
'name': 'myContainer',
'command': ['/bin/sleep', '60']
}
]
"""
pass
def run(self):
if (not self.task_def and not self.task_def_arn) or \
(self.task_def and self.task_def_arn):
raise ValueError(('Either (but not both) a task_def (dict) or'
'task_def_arn (string) must be assigned'))
if not self.task_def_arn:
# Register the task and get assigned taskDefinition ID (arn)
response = client.register_task_definition(**self.task_def)
self.task_def_arn = response['taskDefinition']['taskDefinitionArn']
# Submit the task to AWS ECS and get assigned task ID
# (list containing 1 string)
if self.command:
overrides = {'containerOverrides': self.command}
else:
overrides = {}
response = client.run_task(taskDefinition=self.task_def_arn,
overrides=overrides,
cluster=self.cluster)
if response['failures']:
raise Exception(", ".join(["fail to run task {0} reason: {1}".format(failure['arn'], failure['reason'])
for failure in response['failures']]))
self._task_ids = [task['taskArn'] for task in response['tasks']]
# Wait on task completion
_track_tasks(self._task_ids, self.cluster)
| apache-2.0 | -3,722,882,333,341,477,400 | 33.704663 | 115 | 0.625858 | false |
csm0042/guicodebuilder | guicodebuilder/gui_builder.py | 1 | 92748 | import logging
import os
import tkinter as tk
import configparser
#######################################################################################################################
# Define Helper functions
#######################################################################################################################
def CountWidgetByType(iniFile, searchString):
import configparser
Config = configparser.ConfigParser()
Config.read(iniFile)
# Initialize counter
Count = 0
# Count each of the various types of entries in the INI file
for section in Config.sections():
foundPointer = section.find(searchString)
if foundPointer != -1:
Count = Count + 1
pass
pass
# Return results
return Count
#######################################################################################################################
# Define Window class
#######################################################################################################################
class Window(object):
def __init__(self):
self.width = str()
self.height = str()
self.posX = str()
self.posY = str()
self.title = str()
self.backgroundColor = str()
self.iniFile = str()
self.section = str()
self.junk = str()
#######################################################################################################################
# Define Frame widget class
#######################################################################################################################
class Frame(object):
def __init__(self):
self.backgroundColor = str()
self.borderwidth = str()
self.colormap = str()
self.container = str()
self.cursor = str()
self.height = str()
self.highlightBackgroundColor = str()
self.highlightColor = str()
self.highlightThickness = str()
self.padX = str()
self.padY = str()
self.relief = str()
self.takeFocus = str()
self.visual = str()
self.width = str()
self.iniFile = str()
self.section = str()
#######################################################################################################################
# Define Message widget class
#######################################################################################################################
class Message(object):
def __init__(self):
self.anchor = str()
self.aspect = str()
self.backgroundColor = str()
self.borderwidth = str()
self.cursor = str()
self.font = str()
self.fontSize = str()
self.foregroundColor = str()
self.highlightBackground = str()
self.highlightBackgroundColor = str()
self.highlightThickness = str()
self.justify = str()
self.padX = str()
self.padY = str()
self.relief = str()
self.takeFocus = str()
self.text = str()
self.textVariable = str()
self.width = str()
self.iniFile = str()
self.section = str()
#######################################################################################################################
# Define Text widget class
#######################################################################################################################
class Text(object):
def __init__(self):
self.autoSeparators = str()
self.backgroundColor = str()
self.backgroundStipple = str()
self.borderwidth = str()
self.cursor = str()
self.exportSelection = str()
self.font = str()
self.fontSize = str()
self.foregroundColor = str()
self.foregroundStipple = str()
self.height = str()
self.highlightBackgroundColor = str()
self.highlightColor = str()
self.highlightThickness = str()
self.insertBackground = str()
self.insertBorderwidth = str()
self.insertOffTime = str()
self.insertOnTime = str()
self.insertWidth = str()
self.justify = str()
self.lmargin1 = str()
self.lmargin2 = str()
self.maxUndo = str()
self.padX = str()
self.padY = str()
self.offset = str()
self.overstrike = str()
self.relief = str()
self.rmargin = str()
self.selectBackgroundColor = str()
self.selectForegroundColor = str()
self.selectBorderwidth = str()
self.setGrid = str()
self.spacing1 = str()
self.spacing2 = str()
self.spacing3 = str()
self.state = str()
self.tabs = str()
self.takeFocus = str()
self.text = str()
self.underline = str()
self.undo = str()
self.width = str()
self.wrap = str()
self.xScrollCommand = str()
self.yScrollCommand = str()
self.iniFile = str()
self.section = str()
#######################################################################################################################
# Define Button widget class
#######################################################################################################################
class Button(object):
def __init__(self):
self.activeBackgroundColor = str()
self.activeForegroundColor = str()
self.anchor = str()
self.backgroundColor = str()
self.bitmap = str()
self.borderwidth = int()
self.command = int()
self.compound = str()
self.cursor = str()
self.default = str()
self.disableForeground = str()
self.font = str()
self.fontSize = int()
self.foregroundColor = str()
self.height = int()
self.highlightBackgroundColor = str()
self.highlightColor = str()
self.highlightThickness = int()
self.image = str()
self.justify = str()
self.overRelief = str()
self.padX = int()
self.padY = int()
self.relief = str()
self.repeatDelay = int()
self.repeatInterval = int()
self.state = str()
self.takeFocus = str()
self.text = str()
self.textVariable = str()
self.underline = str()
self.width = int()
self.wrapLength = int()
self.iniFile = str()
self.section = str()
#######################################################################################################################
# Define place settings class
#######################################################################################################################
class Place(object):
def __init__(self):
self.anchor = str()
self.borderMode = str()
self.height = str()
self.width = str()
self.relHeight = str()
self.relWidth = str()
self.relX = str()
self.relY = str()
self.offsetX = str()
self.offsetY = str()
self.iniFile = str()
self.section = str()
#######################################################################################################################
# Define GUI class
#######################################################################################################################
class gui(object):
def __init__(self, inifile, logfile):
self.inifile = inifile
self.logfile = logfile
self.codelines = []
self.field = int()
self.address = int()
self.text_to_write = str()
self.text_to_write_mem = str()
self.place_settings = Place()
self.path = str()
self.codetowritetofile = str()
logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(levelname)-8s %(message)s',
filename=self.logfile, filemode='w')
logging.info('[gui.__init__] Appwindow object created')
self.root = tk.Tk()
self.section = str()
self.Window = Window()
self.frameCount = CountWidgetByType(self.inifile, "frame")
self.Frame = Frame()
self.FramePlace = Place()
self.tkFrame = [tk.Frame() for i in range(self.frameCount)]
logging.info('[gui.__init__] Found configuration data for %d "frame" widgets' %self.frameCount)
self.messageCount = CountWidgetByType(self.inifile, "message")
self.Message = Message()
self.MessagePlace = Place()
self.tkMessage = [tk.Message() for i in range(self.messageCount)]
logging.info('[gui.__init__] Found configuration data for %d "message" widgets' %self.messageCount)
self.textCount = CountWidgetByType(self.inifile, "text")
self.Text = Text()
self.TextPlace = Place()
self.tkText = [tk.Text() for i in range(self.textCount)]
logging.info('[gui.__init__] Found configuration data for %d "text" widgets' %self.textCount)
self.buttonCount = CountWidgetByType(self.inifile, "button")
self.Button = Button()
self.ButtonPlace = Place()
self.tkButton = [tk.Button() for i in range(self.buttonCount)]
logging.info('[gui.__init__] Found configuration data for %d "button" widgets' %self.buttonCount)
def create_class(self):
################################################################################################################
# BEGIN SCRIPT BY IMPORTING DEPENDENCIES
################################################################################################################
self.codelines.append('\n################################################################################################################')
self.codelines.append('\n#IMPORT LIBRARIES')
self.codelines.append('\n################################################################################################################')
self.codelines.append('\nimport logging')
self.codelines.append('\nimport tkinter as tk')
self.codelines.append('\n')
self.codelines.append('\n')
################################################################################################################
# DEFINE HELPER CLASSES AND MODULES
################################################################################################################
self.codelines.append('\n################################################################################################################')
self.codelines.append('\n#DEFINE CLASS')
self.codelines.append('\n################################################################################################################')
self.codelines.append('\nclass Window(object):')
self.codelines.append('\n\tdef __init__(self):')
self.codelines.append('\n\t\tself.width = str()')
self.codelines.append('\n\t\tself.height = str()')
self.codelines.append('\n\t\tself.posX = str()')
self.codelines.append('\n\t\tself.posY = str()')
self.codelines.append('\n\t\tself.title = str()')
self.codelines.append('\n\t\tself.backgroundColor = str()')
self.codelines.append('\n\t\tself.title = str()')
self.codelines.append('\n')
self.codelines.append('\n')
self.codelines.append('\nclass Frame(object):')
self.codelines.append('\n\tdef __init__(self):')
self.codelines.append('\n\t\tself.backgroundColor = str()')
self.codelines.append('\n\t\tself.borderwidth = str()')
self.codelines.append('\n\t\tself.colormap = str()')
self.codelines.append('\n\t\tself.container = str()')
self.codelines.append('\n\t\tself.cursor = str()')
self.codelines.append('\n\t\tself.height = str()')
self.codelines.append('\n\t\tself.highlightBackgroundColor = str()')
self.codelines.append('\n\t\tself.highlightColor = str()')
self.codelines.append('\n\t\tself.highlightThickness = str()')
self.codelines.append('\n\t\tself.padX = str()')
self.codelines.append('\n\t\tself.padY = str()')
self.codelines.append('\n\t\tself.relief = str()')
self.codelines.append('\n\t\tself.takeFocus = str()')
self.codelines.append('\n\t\tself.visual = str()')
self.codelines.append('\n\t\tself.width = str()')
self.codelines.append('\n')
self.codelines.append('\n')
self.codelines.append('\nclass Message(object):')
self.codelines.append('\n\tdef __init__(self):')
self.codelines.append('\n\t\tself.anchor = str()')
self.codelines.append('\n\t\tself.anchor = str()')
self.codelines.append('\n\t\tself.aspect = str()')
self.codelines.append('\n\t\tself.backgroundColor = str()')
self.codelines.append('\n\t\tself.borderwidth = str()')
self.codelines.append('\n\t\tself.cursor = str()')
self.codelines.append('\n\t\tself.font = str()')
self.codelines.append('\n\t\tself.fontSize = str()')
self.codelines.append('\n\t\tself.foregroundColor = str()')
self.codelines.append('\n\t\tself.highlightBackground = str()')
self.codelines.append('\n\t\tself.highlightBackgroundColor = str()')
self.codelines.append('\n\t\tself.highlightThickness = str()')
self.codelines.append('\n\t\tself.justify = str()')
self.codelines.append('\n\t\tself.padX = str()')
self.codelines.append('\n\t\tself.padY = str()')
self.codelines.append('\n\t\tself.relief = str()')
self.codelines.append('\n\t\tself.takeFocus = str()')
self.codelines.append('\n\t\tself.text = str()')
self.codelines.append('\n\t\tself.textVariable = str()')
self.codelines.append('\n\t\tself.width = str()')
self.codelines.append('\n')
self.codelines.append('\n')
self.codelines.append('\nclass Text(object):')
self.codelines.append('\n\tdef __init__(self):')
self.codelines.append('\n\t\tself.autoSeparators = str()')
self.codelines.append('\n\t\tself.backgroundColor = str()')
self.codelines.append('\n\t\tself.backgroundStipple = str()')
self.codelines.append('\n\t\tself.borderwidth = str()')
self.codelines.append('\n\t\tself.cursor = str()')
self.codelines.append('\n\t\tself.exportSelection = str()')
self.codelines.append('\n\t\tself.font = str()')
self.codelines.append('\n\t\tself.fontSize = str()')
self.codelines.append('\n\t\tself.foregroundColor = str()')
self.codelines.append('\n\t\tself.foregroundStipple = str()')
self.codelines.append('\n\t\tself.height = str()')
self.codelines.append('\n\t\tself.highlightBackgroundColor = str()')
self.codelines.append('\n\t\tself.highlightColor = str()')
self.codelines.append('\n\t\tself.highlightThickness = str()')
self.codelines.append('\n\t\tself.insertBackground = str()')
self.codelines.append('\n\t\tself.insertBorderwidth = str()')
self.codelines.append('\n\t\tself.insertOffTime = str()')
self.codelines.append('\n\t\tself.insertOnTime = str()')
self.codelines.append('\n\t\tself.insertWidth = str()')
self.codelines.append('\n\t\tself.justify = str()')
self.codelines.append('\n\t\tself.lmargin1 = str()')
self.codelines.append('\n\t\tself.lmargin2 = str()')
self.codelines.append('\n\t\tself.maxUndo = str()')
self.codelines.append('\n\t\tself.padX = str()')
self.codelines.append('\n\t\tself.padY = str()')
self.codelines.append('\n\t\tself.offset = str()')
self.codelines.append('\n\t\tself.overstrike = str()')
self.codelines.append('\n\t\tself.relief = str()')
self.codelines.append('\n\t\tself.rmargin = str()')
self.codelines.append('\n\t\tself.selectBackgroundColor = str()')
self.codelines.append('\n\t\tself.selectForegroundColor = str()')
self.codelines.append('\n\t\tself.selectBorderwidth = str()')
self.codelines.append('\n\t\tself.setGrid = str()')
self.codelines.append('\n\t\tself.spacing1 = str()')
self.codelines.append('\n\t\tself.spacing2 = str()')
self.codelines.append('\n\t\tself.spacing3 = str()')
self.codelines.append('\n\t\tself.state = str()')
self.codelines.append('\n\t\tself.tabs = str()')
self.codelines.append('\n\t\tself.takeFocus = str()')
self.codelines.append('\n\t\tself.text = str()')
self.codelines.append('\n\t\tself.underline = str()')
self.codelines.append('\n\t\tself.undo = str()')
self.codelines.append('\n\t\tself.width = str()')
self.codelines.append('\n\t\tself.wrap = str()')
self.codelines.append('\n\t\tself.xScrollCommand = str()')
self.codelines.append('\n\t\tself.yScrollCommand = str()')
self.codelines.append('\n')
self.codelines.append('\n')
self.codelines.append('\nclass Button(object):')
self.codelines.append('\n\tdef __init__(self):')
self.codelines.append('\n\t\tself.activeBackgroundColor = str()')
self.codelines.append('\n\t\tself.activeForegroundColor = str()')
self.codelines.append('\n\t\tself.anchor = str()')
self.codelines.append('\n\t\tself.backgroundColor = str()')
self.codelines.append('\n\t\tself.bitmap = str()')
self.codelines.append('\n\t\tself.borderwidth = str()')
self.codelines.append('\n\t\tself.command = str()')
self.codelines.append('\n\t\tself.compound = str()')
self.codelines.append('\n\t\tself.cursor = str()')
self.codelines.append('\n\t\tself.default = str()')
self.codelines.append('\n\t\tself.disableForeground = str()')
self.codelines.append('\n\t\tself.font = str()')
self.codelines.append('\n\t\tself.fontSize = str()')
self.codelines.append('\n\t\tself.foregroundColor = str()')
self.codelines.append('\n\t\tself.height = str()')
self.codelines.append('\n\t\tself.highlightBackgroundColor = str()')
self.codelines.append('\n\t\tself.highlightColor = str()')
self.codelines.append('\n\t\tself.highlightThickness = str()')
self.codelines.append('\n\t\tself.image = str()')
self.codelines.append('\n\t\tself.justify = str()')
self.codelines.append('\n\t\tself.overRelief = str()')
self.codelines.append('\n\t\tself.padX = str()')
self.codelines.append('\n\t\tself.padY = str()')
self.codelines.append('\n\t\tself.relief = str()')
self.codelines.append('\n\t\tself.repeatDelay = str()')
self.codelines.append('\n\t\tself.repeatInterval = str()')
self.codelines.append('\n\t\tself.state = str()')
self.codelines.append('\n\t\tself.takeFocus = str()')
self.codelines.append('\n\t\tself.text = str()')
self.codelines.append('\n\t\tself.textVariable = str()')
self.codelines.append('\n\t\tself.underline = str()')
self.codelines.append('\n\t\tself.width = str()')
self.codelines.append('\n\t\tself.wrapLength = str()')
self.codelines.append('\n')
self.codelines.append('\n')
self.codelines.append('\nclass Place(object):')
self.codelines.append('\n\tdef __init__(self):')
self.codelines.append('\n\t\tself.anchor = str()')
self.codelines.append('\n\t\tself.borderMode = str()')
self.codelines.append('\n\t\tself.height = str()')
self.codelines.append('\n\t\tself.width = str()')
self.codelines.append('\n\t\tself.relHeight = str()')
self.codelines.append('\n\t\tself.relWidth = str()')
self.codelines.append('\n\t\tself.relX = str()')
self.codelines.append('\n\t\tself.relY = str()')
self.codelines.append('\n\t\tself.offsetX = str()')
self.codelines.append('\n\t\tself.offsetY = str()')
self.codelines.append('\n')
self.codelines.append('\n')
################################################################################################################
# DEFINE GUI CLASS AND ADD __init__ METHOD
################################################################################################################
self.codelines.append('\n################################################################################################################')
self.codelines.append('\n#DEFINE GUI CLASS')
self.codelines.append('\n################################################################################################################')
self.codelines.append('\nclass gui(object):')
self.codelines.append('\n\tdef __init__(self, logfile):')
self.codelines.append('\n\t\tself.logfile = logfile')
self.codelines.append('\n\t\tself.root = tk.Tk()')
self.codelines.append('\n')
self.codelines.append('\n\t\tlogging.basicConfig(level=logging.DEBUG, ')
self.codelines.append('''\n\t\t\tformat='%(asctime)s %(levelname)-8s %(message)s', ''')
self.codelines.append('''\n\t\t\tfilename=self.logfile, filemode='w')''')
self.codelines.append('''\n\t\tlogging.info('[gui.__init__] Appwindow object created')''')
self.codelines.append('\n')
self.codelines.append('\n\t\tself.frameCount = ')
self.codelines.append(str(CountWidgetByType(self.inifile, "frame")))
self.codelines.append('\n\t\tself.Frame = Frame()')
self.codelines.append('\n\t\tself.frame_settings = Frame()')
self.codelines.append('\n\t\tself.FramePlace = Place()')
self.codelines.append('\n\t\tself.tkFrame = [tk.Frame() for i in range(self.frameCount)]')
self.codelines.append("\n\t\tlogging.info('[gui.__init__] Found configuration data for %d frame widgets' % self.frameCount)")
self.codelines.append('\n')
self.codelines.append('\n\t\tself.messageCount = ')
self.codelines.append(str(CountWidgetByType(self.inifile, "message")))
self.codelines.append('\n\t\tself.Message = Message()')
self.codelines.append('\n\t\tself.message_settings = Message()')
self.codelines.append('\n\t\tself.MessagePlace = Place()')
self.codelines.append('\n\t\tself.tkMessage = [tk.Message() for i in range(self.messageCount)]')
self.codelines.append("\n\t\tlogging.info('[gui.__init__] Found configuration data for %d message widgets' % self.messageCount)")
self.codelines.append('\n')
self.codelines.append('\n\t\tself.textCount = ')
self.codelines.append(str(CountWidgetByType(self.inifile, "text")))
self.codelines.append('\n\t\tself.Text = Text()')
self.codelines.append('\n\t\tself.text_settings = Text()')
self.codelines.append('\n\t\tself.TextPlace = Place()')
self.codelines.append('\n\t\tself.text_to_write = str()')
self.codelines.append('\n\t\tself.text_to_write_mem = str()')
self.codelines.append('\n\t\tself.tkText = [tk.Text() for i in range(self.textCount)]')
self.codelines.append("\n\t\tlogging.info('[gui.__init__] Found configuration data for %d text widgets' % self.textCount)")
self.codelines.append('\n')
self.codelines.append('\n\t\tself.buttonCount = ')
self.codelines.append(str(CountWidgetByType(self.inifile, "button")))
self.codelines.append('\n\t\tself.Button = Button()')
self.codelines.append('\n\t\tself.button_settings = Button()')
self.codelines.append('\n\t\tself.ButtonPlace = Place()')
self.codelines.append('\n\t\tself.tkButton = [tk.Button() for i in range(self.buttonCount)]')
self.codelines.append("\n\t\tlogging.info('[gui.__init__] Found configuration data for %d button widgets' % self.buttonCount)")
self.codelines.append('\n')
self.codelines.append('\n')
################################################################################################################
# CREATE TKINTER MAIN WINDOW
################################################################################################################
self.Window.section = "main window"
Config = configparser.ConfigParser()
Config.read(self.inifile)
dict1 = {}
options = Config.options(self.Window.section)
for option in options:
try:
dict1[option] = Config.get(self.Window.section, option)
if dict1[option] == -1:
pass
except:
dict1[option] = None
self.Window.width = dict1['width']
self.Window.height = dict1['height']
self.Window.posX = dict1['pos x']
self.Window.posY = dict1['pos y']
self.Window.title = dict1['title']
self.Window.backgroundColor = dict1['background color']
self.codelines.append('\n\t################################################################################################################')
self.codelines.append('\n\t#CREATE TKINTER MAIN WINDOW')
self.codelines.append('\n\t################################################################################################################')
self.codelines.append('\n\tdef create_window(self):')
self.codelines.append('''\n\t\tlogging.info('[gui.create_window] Adjusting window geometry')''')
self.codelines.append('''\n\t\tself.root.geometry("%sx%s+%s+%s" % (''')
self.codelines.append(self.Window.width)
self.codelines.append(', ')
self.codelines.append(self.Window.height)
self.codelines.append(', ')
self.codelines.append(self.Window.posX)
self.codelines.append(', ')
self.codelines.append(self.Window.posY)
self.codelines.append('))')
if self.Window.backgroundColor != "":
self.codelines.append("\n\t\tself.root.config(background='")
self.codelines.append(self.Window.backgroundColor)
self.codelines.append("')")
self.codelines.append('''\n\t\tlogging.info('[gui.create_window] Adjusting window background color')''')
if self.Window.title != '':
self.codelines.append("\n\t\tself.root.title('")
self.codelines.append(self.Window.title)
self.codelines.append("')")
self.codelines.append('''\n\t\tlogging.info('[gui.create_window] Setting window title')''')
self.codelines.append('\n')
################################################################################################################
# CALL LOOP TO CREATE FRAME WIDGETS
################################################################################################################
self.codelines.append("\n\t\tlogging.info('[gui.create_window] Starting frame widget loop')")
for i in range(0, self.frameCount):
self.Frame.section = str("frame" + str(i+1))
Config = configparser.ConfigParser()
Config.read(self.inifile)
dict1 = {}
options = Config.options(self.Frame.section)
for option in options:
try:
dict1[option] = Config.get(self.Frame.section, option)
if dict1[option] == -1:
pass
except:
dict1[option] = None
self.Frame.backgroundColor = dict1['background color']
self.Frame.borderWidth = dict1['border width']
self.Frame.colormap = dict1['color map']
self.Frame.container = dict1['container']
self.Frame.cursor = dict1['cursor']
self.Frame.height = dict1['height']
self.Frame.highlightBackgroundColor = dict1['highlight background color']
self.Frame.highlightColor = dict1['highlight color']
self.Frame.highlightThickness = dict1['highlight thickness']
self.Frame.padX = dict1['pad x']
self.Frame.padY = dict1['pad y']
self.Frame.relief = dict1['relief']
self.Frame.takeFocus = dict1['take focus']
self.Frame.visual = dict1['visual']
self.Frame.width = dict1['width']
self.FramePlace.anchor = dict1['place anchor']
self.FramePlace.borderMode = dict1['place border mode']
self.FramePlace.height = dict1['place height']
self.FramePlace.width = dict1['place width']
self.FramePlace.relHeight = dict1['place rel height']
self.FramePlace.relWidth = dict1['place rel width']
self.FramePlace.relX = dict1['place rel x']
self.FramePlace.relY = dict1['place rel y']
self.FramePlace.offsetX = dict1['place offset x']
self.FramePlace.offsetY = dict1['place offset y']
self.codelines.append("\n\t\tlogging.info('[gui.create_window] Creating frame widget #%d' % ")
self.codelines.append(str(i+1))
self.codelines.append(')')
self.codelines.append('\n\t\tself.tkFrame[')
self.codelines.append(str(i))
self.codelines.append(']=tk.Frame()')
if self.Frame.backgroundColor != '':
self.codelines.append('\n\t\tself.tkFrame[')
self.codelines.append(str(i))
self.codelines.append('].config(background=''')
self.codelines.append(self.Frame.backgroundColor)
self.codelines.append("')")
if self.Frame.borderWidth != '':
self.codelines.append('\n\t\tself.tkFrame[')
self.codelines.append(str(i))
self.codelines.append('].config(borderwidth=')
self.codelines.append(self.Frame.borderWidth)
self.codelines.append(")")
if self.Frame.colormap != '':
self.codelines.append('\n\t\tself.tkFrame[')
self.codelines.append(str(i))
self.codelines.append('].config(colormap=''')
self.codelines.append(self.Frame.colormap)
self.codelines.append("')")
if self.Frame.container != '':
self.codelines.append('\n\t\tself.tkFrame[')
self.codelines.append(str(i))
self.codelines.append('].config(container=')
self.codelines.append(self.Frame.container)
self.codelines.append(")")
if self.Frame.cursor != '':
self.codelines.append('\n\t\tself.tkFrame[')
self.codelines.append(str(i))
self.codelines.append('].config(cursor=')
self.codelines.append(self.Frame.cursor)
self.codelines.append(")")
if self.Frame.height != '':
self.codelines.append('\n\t\tself.tkFrame[')
self.codelines.append(str(i))
self.codelines.append('].config(height=')
self.codelines.append(self.Frame.height)
self.codelines.append(")")
if self.Frame.highlightBackgroundColor != '':
self.codelines.append('\n\t\tself.tkFrame[')
self.codelines.append(str(i))
self.codelines.append('].config(highlightbackground=''')
self.codelines.append(self.Frame.highlightBackgroundColor)
self.codelines.append("')")
if self.Frame.highlightColor != '':
self.codelines.append('\n\t\tself.tkFrame[')
self.codelines.append(str(i))
self.codelines.append('].config(highlightcolor=''')
self.codelines.append(self.Frame.highlightColor)
self.codelines.append("')")
if self.Frame.highlightThickness != '':
self.codelines.append('\n\t\tself.tkFrame[')
self.codelines.append(str(i))
self.codelines.append('].config(highlightthickness=')
self.codelines.append(self.Frame.highlightThickness)
self.codelines.append(")")
if self.Frame.padX != '':
self.codelines.append('\n\t\tself.tkFrame[')
self.codelines.append(str(i))
self.codelines.append('].config(padx=')
self.codelines.append(self.Frame.padX)
self.codelines.append(")")
if self.Frame.padY != '':
self.codelines.append('\n\t\tself.tkFrame[')
self.codelines.append(str(i))
self.codelines.append('].config(pady=')
self.codelines.append(self.Frame.padY)
self.codelines.append(")")
if self.Frame.relief != '':
self.codelines.append('\n\t\tself.tkFrame[')
self.codelines.append(str(i))
self.codelines.append("].config(relief='")
self.codelines.append(self.Frame.relief)
self.codelines.append("')")
if self.Frame.takeFocus != '':
self.codelines.append('\n\t\tself.tkFrame[')
self.codelines.append(str(i))
self.codelines.append('].config(takefocus=')
self.codelines.append(self.Frame.takeFocus)
self.codelines.append(")")
if self.Frame.visual != '':
self.codelines.append('\n\t\tself.tkFrame[')
self.codelines.append(str(i))
self.codelines.append('].config(visual=')
self.codelines.append(self.Frame.visual)
self.codelines.append(")")
if self.Frame.width != '':
self.codelines.append('\n\t\tself.tkFrame[')
self.codelines.append(str(i))
self.codelines.append('].config(width=')
self.codelines.append(self.Frame.width)
self.codelines.append(")")
self.codelines.append('\n\t\tself.tkFrame[')
self.codelines.append(str(i))
self.codelines.append('].place()')
if self.FramePlace.anchor != '':
self.codelines.append('\n\t\tself.tkFrame[')
self.codelines.append(str(i))
self.codelines.append("].place_configure(anchor='")
self.codelines.append(self.FramePlace.anchor)
self.codelines.append("')")
if self.FramePlace.borderMode != '':
self.codelines.append('\n\t\tself.tkFrame[')
self.codelines.append(str(i))
self.codelines.append("].place_configure(bordermode='")
self.codelines.append(self.FramePlace.borderMode)
self.codelines.append("')")
if self.FramePlace.height != '':
self.codelines.append('\n\t\tself.tkFrame[')
self.codelines.append(str(i))
self.codelines.append('].place_configure(height=')
self.codelines.append(self.FramePlace.height)
self.codelines.append(")")
if self.FramePlace.width != '':
self.codelines.append('\n\t\tself.tkFrame[')
self.codelines.append(str(i))
self.codelines.append('].place_configure(width=')
self.codelines.append(self.FramePlace.width)
self.codelines.append(")")
if self.FramePlace.relHeight != '':
self.codelines.append('\n\t\tself.tkFrame[')
self.codelines.append(str(i))
self.codelines.append('].place_configure(relheight=')
self.codelines.append(self.FramePlace.relHeight)
self.codelines.append(")")
if self.FramePlace.relWidth != '':
self.codelines.append('\n\t\tself.tkFrame[')
self.codelines.append(str(i))
self.codelines.append('].place_configure(relwidth=')
self.codelines.append(self.FramePlace.relWidth)
self.codelines.append(")")
if self.FramePlace.relX != '':
self.codelines.append('\n\t\tself.tkFrame[')
self.codelines.append(str(i))
self.codelines.append('].place_configure(relx=')
self.codelines.append(self.FramePlace.relX)
self.codelines.append(")")
if self.FramePlace.relY != '':
self.codelines.append('\n\t\tself.tkFrame[')
self.codelines.append(str(i))
self.codelines.append('].place_configure(rely=')
self.codelines.append(self.FramePlace.relY)
self.codelines.append(")")
if self.FramePlace.offsetX != '':
self.codelines.append('\n\t\tself.tkFrame[')
self.codelines.append(str(i))
self.codelines.append('].place_configure(x=')
self.codelines.append(self.FramePlace.offsetX)
self.codelines.append(")")
if self.FramePlace.offsetY != '':
self.codelines.append('\n\t\tself.tkFrame[')
self.codelines.append(str(i))
self.codelines.append('].place_configure(y=')
self.codelines.append(self.FramePlace.offsetY)
self.codelines.append(")")
self.codelines.append('\n')
################################################################################################################
# CALL LOOP TO CREATE MESSAGE WIDGETS
################################################################################################################
self.codelines.append("\n\t\tlogging.info('[gui.create_window] Starting message widget loop')")
for i in range(0, self.messageCount):
self.Message.section = str("message" + str(i+1))
Config = configparser.ConfigParser()
Config.read(self.inifile)
dict1 = {}
options = Config.options(self.Message.section)
for option in options:
try:
dict1[option] = Config.get(self.Message.section, option)
if dict1[option] == -1:
pass
except:
dict1[option] = None
self.Message.anchor = dict1['anchor']
self.Message.aspect = dict1['aspect']
self.Message.backgroundColor = dict1['background color']
self.Message.borderwidth = dict1['border width']
self.Message.cursor = dict1['cursor']
self.Message.font = dict1['font']
self.Message.fontSize = dict1['font size']
self.Message.foregroundColor = dict1['foreground color']
self.Message.highlightBackground = dict1['highlight background']
self.Message.highlightBackgroundColor = dict1['highlight background color']
self.Message.highlightThickness = dict1['highlight thickness']
self.Message.justify = dict1['justify']
self.Message.padX = dict1['pad x']
self.Message.padY = dict1['pad y']
self.Message.relief = dict1['relief']
self.Message.takeFocus = dict1['take focus']
self.Message.text = dict1['text']
self.Message.text = self.Message.text.replace('\\', '\\\\')
self.Message.textVariable = dict1['text variable']
self.Message.width = dict1['width']
self.MessagePlace.anchor = dict1['place anchor']
self.MessagePlace.borderMode = dict1['place border mode']
self.MessagePlace.height = dict1['place height']
self.MessagePlace.width = dict1['place width']
self.MessagePlace.relHeight = dict1['place rel height']
self.MessagePlace.relWidth = dict1['place rel width']
self.MessagePlace.relX = dict1['place rel x']
self.MessagePlace.relY = dict1['place rel y']
self.MessagePlace.offsetX = dict1['place offset x']
self.MessagePlace.offsetY = dict1['place offset y']
self.codelines.append("\n\t\tlogging.info('[gui.create_window] Creating message widget #%d' % ")
self.codelines.append(str(i+1))
self.codelines.append(')')
self.codelines.append('\n\t\tself.tkMessage[')
self.codelines.append(str(i))
self.codelines.append(']=tk.Message()')
if self.Message.anchor != '':
self.codelines.append('\n\t\tself.tkMessage[')
self.codelines.append(str(i))
self.codelines.append("].config(anchor='")
self.codelines.append(self.Message.anchor)
self.codelines.append("')")
if self.Message.aspect != '':
self.codelines.append('\n\t\tself.tkMessage[')
self.codelines.append(str(i))
self.codelines.append('].config(aspect=''')
self.codelines.append(self.Message.aspect)
self.codelines.append("')")
if self.Message.backgroundColor != '':
self.codelines.append('\n\t\tself.tkMessage[')
self.codelines.append(str(i))
self.codelines.append("].config(background='")
self.codelines.append(self.Message.backgroundColor)
self.codelines.append("')")
if self.Message.borderwidth != '':
self.codelines.append('\n\t\tself.tkMessage[')
self.codelines.append(str(i))
self.codelines.append('].config(borderwidth=')
self.codelines.append(self.Message.borderwidth)
self.codelines.append(")")
if self.Message.cursor != '':
self.codelines.append('\n\t\tself.tkMessage[')
self.codelines.append(str(i))
self.codelines.append("].config(cursor='")
self.codelines.append(self.Message.backgroundColor)
self.codelines.append("')")
if self.Message.font != '':
self.codelines.append('\n\t\tself.tkMessage[')
self.codelines.append(str(i))
self.codelines.append("].config(font=('")
self.codelines.append(self.Message.font)
self.codelines.append("', ")
self.codelines.append(self.Message.fontSize)
self.codelines.append("))")
if self.Message.foregroundColor != '':
self.codelines.append('\n\t\tself.tkMessage[')
self.codelines.append(str(i))
self.codelines.append("].config(foreground='")
self.codelines.append(self.Message.foregroundColor)
self.codelines.append("')")
if self.Message.highlightBackground != '':
self.codelines.append('\n\t\tself.tkMessage[')
self.codelines.append(str(i))
self.codelines.append("].config(highlightbackground='")
self.codelines.append(self.Message.highlightBackground)
self.codelines.append("')")
if self.Message.highlightBackgroundColor != '':
self.codelines.append('\n\t\tself.tkMessage[')
self.codelines.append(str(i))
self.codelines.append("].config(highlightcolor='")
self.codelines.append(self.Message.highlightBackgroundColor)
self.codelines.append("')")
if self.Message.highlightThickness != '':
self.codelines.append('\n\t\tself.tkMessage[')
self.codelines.append(str(i))
self.codelines.append("].config(highlightthickness=")
self.codelines.append(self.Message.highlightThickness)
self.codelines.append(")")
if self.Message.justify != '':
self.codelines.append('\n\t\tself.tkMessage[')
self.codelines.append(str(i))
self.codelines.append("].config(justify='")
self.codelines.append(self.Message.justify)
self.codelines.append("')")
if self.Message.padX != '':
self.codelines.append('\n\t\tself.tkMessage[')
self.codelines.append(str(i))
self.codelines.append("].config(padx=")
self.codelines.append(self.Message.padX)
self.codelines.append(")")
if self.Message.padY != '':
self.codelines.append('\n\t\tself.tkMessage[')
self.codelines.append(str(i))
self.codelines.append("].config(pady=")
self.codelines.append(self.Message.padY)
self.codelines.append(")")
if self.Message.relief != '':
self.codelines.append('\n\t\tself.tkMessage[')
self.codelines.append(str(i))
self.codelines.append("].config(relief='")
self.codelines.append(self.Message.relief)
self.codelines.append("')")
if self.Message.takeFocus != '':
self.codelines.append('\n\t\tself.tkMessage[')
self.codelines.append(str(i))
self.codelines.append("].config(takefocus='")
self.codelines.append(self.Message.takeFocus)
self.codelines.append("')")
if self.Message.text != '':
self.codelines.append('\n\t\tself.tkMessage[')
self.codelines.append(str(i))
self.codelines.append("].config(text='")
self.codelines.append(self.Message.text)
self.codelines.append("')")
if self.Message.textVariable != '':
self.codelines.append('\n\t\tself.tkMessage[')
self.codelines.append(str(i))
self.codelines.append("].config(textvariable='")
self.codelines.append(self.Message.textVariable)
self.codelines.append("')")
if self.Message.width != '':
self.codelines.append('\n\t\tself.tkMessage[')
self.codelines.append(str(i))
self.codelines.append("].config(width=")
self.codelines.append(self.Message.width)
self.codelines.append(")")
self.codelines.append('\n\t\tself.tkMessage[')
self.codelines.append(str(i))
self.codelines.append('].place()')
if self.MessagePlace.anchor != '':
self.codelines.append('\n\t\tself.tkMessage[')
self.codelines.append(str(i))
self.codelines.append("].place_configure(anchor='")
self.codelines.append(self.MessagePlace.anchor)
self.codelines.append("')")
if self.MessagePlace.borderMode != '':
self.codelines.append('\n\t\tself.tkMessage[')
self.codelines.append(str(i))
self.codelines.append("].place_configure(bordermode='")
self.codelines.append(self.MessagePlace.borderMode)
self.codelines.append("')")
if self.MessagePlace.height != '':
self.codelines.append('\n\t\tself.tkMessage[')
self.codelines.append(str(i))
self.codelines.append('].place_configure(height=')
self.codelines.append(self.MessagePlace.height)
self.codelines.append(")")
if self.MessagePlace.width != '':
self.codelines.append('\n\t\tself.tkMessage[')
self.codelines.append(str(i))
self.codelines.append('].place_configure(width=')
self.codelines.append(self.MessagePlace.width)
self.codelines.append(")")
if self.MessagePlace.relHeight != '':
self.codelines.append('\n\t\tself.tkMessage[')
self.codelines.append(str(i))
self.codelines.append('].place_configure(relheight=')
self.codelines.append(self.MessagePlace.relHeight)
self.codelines.append(")")
if self.MessagePlace.relWidth != '':
self.codelines.append('\n\t\tself.tkMessage[')
self.codelines.append(str(i))
self.codelines.append('].place_configure(relwidth=')
self.codelines.append(self.MessagePlace.relWidth)
self.codelines.append(")")
if self.MessagePlace.relX != '':
self.codelines.append('\n\t\tself.tkMessage[')
self.codelines.append(str(i))
self.codelines.append('].place_configure(relx=')
self.codelines.append(self.MessagePlace.relX)
self.codelines.append(")")
if self.MessagePlace.relY != '':
self.codelines.append('\n\t\tself.tkMessage[')
self.codelines.append(str(i))
self.codelines.append('].place_configure(rely=')
self.codelines.append(self.MessagePlace.relY)
self.codelines.append(")")
if self.MessagePlace.offsetX != '':
self.codelines.append('\n\t\tself.tkMessage[')
self.codelines.append(str(i))
self.codelines.append('].place_configure(x=')
self.codelines.append(self.MessagePlace.offsetX)
self.codelines.append(")")
if self.MessagePlace.offsetY != '':
self.codelines.append('\n\t\tself.tkMessage[')
self.codelines.append(str(i))
self.codelines.append('].place_configure(y=')
self.codelines.append(self.MessagePlace.offsetY)
self.codelines.append(")")
self.codelines.append('\n')
################################################################################################################
# CALL LOOP TO CREATE TEXT WIDGETS
################################################################################################################
self.codelines.append("\n\t\tlogging.info('[gui.create_window] Starting text widget loop')")
for i in range(0, self.textCount):
self.Text.section = str("text" + str(i+1))
Config = configparser.ConfigParser()
Config.read(self.inifile)
dict1 = {}
options = Config.options(self.Text.section)
for option in options:
try:
dict1[option] = Config.get(self.Text.section, option)
if dict1[option] == -1:
pass
except:
dict1[option] = None
self.Text.autoSeparators = dict1['auto separators']
self.Text.backgroundColor = dict1['background color']
self.Text.backgroundStipple = dict1['background stipple']
self.Text.borderwidth = dict1['border width']
self.Text.cursor = dict1['cursor']
self.Text.exportSelection = dict1['export selection']
self.Text.font = dict1['font']
self.Text.fontSize = dict1['font size']
self.Text.foregroundColor = dict1['foreground color']
self.Text.foregroundStipple = dict1['foreground stipple']
self.Text.height = dict1['height']
self.Text.highlightBackgroundColor = dict1['highlight background color']
self.Text.highlightColor = dict1['highlight color']
self.Text.highlightThickness = dict1['highlight thickness']
self.Text.insertBackground = dict1['insert background']
self.Text.insertBorderwidth = dict1['insert border width']
self.Text.insertOffTime = dict1['insert off time']
self.Text.insertOnTime = dict1['insert on time']
self.Text.insertWidth = dict1['insert width']
self.Text.justify = dict1['justify']
self.Text.lmargin1 = dict1['lmargin1']
self.Text.lmargin2 = dict1['lmargin2']
self.Text.maxUndo = dict1['max undo']
self.Text.padX = dict1['pad x']
self.Text.padY = dict1['pad y']
self.Text.offset = dict1['offset']
self.Text.overstrike = dict1['overstrike']
self.Text.relief = dict1['relief']
self.Text.rmargin = dict1['rmargin']
self.Text.selectBackgroundColor = dict1['select background color']
self.Text.selectForegroundColor = dict1['select foreground color']
self.Text.selectBorderwidth = dict1['select border width']
self.Text.setGrid = dict1['set grid']
self.Text.spacing1 = dict1['spacing1']
self.Text.spacing2 = dict1['spacing2']
self.Text.spacing3 = dict1['spacing3']
self.Text.state = dict1['state']
self.Text.tabs = dict1['tabs']
self.Text.takeFocus = dict1['take focus']
self.Text.text = dict1['text']
self.Text.text = self.Text.text.replace('\\', '\\\\')
self.Text.underline = dict1['underline']
self.Text.undo = dict1['undo']
self.Text.width = dict1['width']
self.Text.wrap = dict1['wrap']
self.Text.xScrollCommand = dict1['x scroll command']
self.Text.yScrollCommand = dict1['y scroll command']
self.TextPlace.anchor = dict1['place anchor']
self.TextPlace.borderMode = dict1['place border mode']
self.TextPlace.height = dict1['place height']
self.TextPlace.width = dict1['place width']
self.TextPlace.relHeight = dict1['place rel height']
self.TextPlace.relWidth = dict1['place rel width']
self.TextPlace.relX = dict1['place rel x']
self.TextPlace.relY = dict1['place rel y']
self.TextPlace.offsetX = dict1['place offset x']
self.TextPlace.offsetY = dict1['place offset y']
self.codelines.append("\n\t\tlogging.info('[gui.create_window] Creating text widget #%d' % ")
self.codelines.append(str(i+1))
self.codelines.append(')')
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append(']=tk.Text()')
if self.Text.autoSeparators != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append("].config(autoseparators='")
self.codelines.append(self.Text.autoSeparators)
self.codelines.append("')")
if self.Text.backgroundColor != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append("].config(bg='")
self.codelines.append(self.Text.backgroundColor)
self.codelines.append("')")
if self.Text.backgroundStipple != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append("].config(bgstipple='")
self.codelines.append(self.Text.backgroundStipple)
self.codelines.append("')")
if self.Text.borderwidth != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append("].config(bd=")
self.codelines.append(self.Text.borderwidth)
self.codelines.append(")")
if self.Text.foregroundStipple != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append("].config(fgstipple='")
self.codelines.append(self.Text.foregroundStipple)
self.codelines.append("')")
if self.Text.cursor != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append("].config(cursor='")
self.codelines.append(self.Text.cursor)
self.codelines.append("')")
if self.Text.exportSelection != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append("].config(exportselection='")
self.codelines.append(self.Text.exportSelection)
self.codelines.append("')")
if self.Text.font != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append("].config(font=('")
self.codelines.append(self.Text.font)
self.codelines.append("', ")
self.codelines.append(self.Text.fontSize)
self.codelines.append("))")
if self.Text.foregroundColor != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append("].config(foreground='")
self.codelines.append(self.Text.foregroundColor)
self.codelines.append("')")
if self.Text.height != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append("].config(height=")
self.codelines.append(self.Text.height)
self.codelines.append(")")
if self.Text.highlightBackgroundColor != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append("].config(highlightbackground='")
self.codelines.append(self.Text.highlightBackgroundColor)
self.codelines.append("')")
if self.Text.highlightColor != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append("].config(highlightcolor='")
self.codelines.append(self.Text.highlightColor)
self.codelines.append("')")
if self.Text.highlightThickness != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append("].config(highlightthickness=")
self.codelines.append(self.Text.highlightThickness)
self.codelines.append(")")
if self.Text.insertBackground != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append("].config(insertbackground='")
self.codelines.append(self.Text.insertBackground)
self.codelines.append("')")
if self.Text.insertBorderwidth != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append("].config(insertbackground='")
self.codelines.append(self.Text.insertBorderwidth)
self.codelines.append("')")
if self.Text.insertOffTime != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append("].config(insertofftime=")
self.codelines.append(self.Text.insertOffTime)
self.codelines.append(")")
if self.Text.insertOnTime != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append("].config(insertontime=")
self.codelines.append(self.Text.insertOnTime)
self.codelines.append(")")
if self.Text.insertWidth != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append("].config(insertwidth=")
self.codelines.append(self.Text.insertWidth)
self.codelines.append(")")
if self.Text.lmargin1 != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append("].config(lmargin1=")
self.codelines.append(self.Text.lmargin1)
self.codelines.append(")")
if self.Text.lmargin2 != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append("].config(lmargin2=")
self.codelines.append(self.Text.lmargin2)
self.codelines.append(")")
if self.Text.maxUndo != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append("].config(maxundo=")
self.codelines.append(self.Text.maxUndo)
self.codelines.append(")")
if self.Text.padX != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append("].config(padx=")
self.codelines.append(self.Text.padX)
self.codelines.append(")")
if self.Text.padY != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append("].config(pady=")
self.codelines.append(self.Text.padY)
self.codelines.append(")")
if self.Text.offset != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append("].config(offset=")
self.codelines.append(self.Text.offset)
self.codelines.append(")")
if self.Text.overstrike != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append("].config(overstrike='")
self.codelines.append(self.Text.overstrike)
self.codelines.append("')")
if self.Text.selectBackgroundColor != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append("].config(selectbackground='")
self.codelines.append(self.Text.selectBackgroundColor)
self.codelines.append("')")
if self.Text.selectForegroundColor != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append("].config(selectforeground='")
self.codelines.append(self.Text.selectForegroundColor)
self.codelines.append("')")
if self.Text.selectBorderwidth != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append("].config(selectborderwidth=")
self.codelines.append(self.Text.selectBorderwidth)
self.codelines.append(")")
if self.Text.setGrid != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append("].config(setgrid='")
self.codelines.append(self.Text.setGrid)
self.codelines.append("')")
if self.Text.spacing1 != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append("].config(spacing1=")
self.codelines.append(self.Text.spacing1)
self.codelines.append(")")
if self.Text.spacing2 != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append("].config(spacing2=")
self.codelines.append(self.Text.spacing2)
self.codelines.append(")")
if self.Text.spacing3 != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append("].config(spacing3=")
self.codelines.append(self.Text.spacing3)
self.codelines.append(")")
if self.Text.state != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append("].config(state='")
self.codelines.append(self.Text.state)
self.codelines.append("')")
if self.Text.tabs != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append("].config(tabs='")
self.codelines.append(self.Text.tabs)
self.codelines.append("')")
if self.Text.takeFocus != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append("].config(takefocus='")
self.codelines.append(self.Text.takeFocus)
self.codelines.append("')")
if self.Text.text != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append('].insert(tk.END, "')
self.codelines.append(self.Text.text)
self.codelines.append('")')
if self.Text.underline != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append("].config(underline='")
self.codelines.append(self.Text.underline)
self.codelines.append("')")
if self.Text.undo != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append("].config(undo='")
self.codelines.append(self.Text.undo)
self.codelines.append("')")
if self.Text.width != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append("].config(width=")
self.codelines.append(self.Text.width)
self.codelines.append(")")
if self.Text.wrap != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append("].config(wrap='")
self.codelines.append(self.Text.wrap)
self.codelines.append("')")
if self.Text.xScrollCommand != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append("].config(xscrollcommand='")
self.codelines.append(self.Text.xScrollCommand)
self.codelines.append("')")
if self.Text.yScrollCommand != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append("].config(yscrollcommand='")
self.codelines.append(self.Text.yScrollCommand)
self.codelines.append("')")
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append('].place()')
if self.TextPlace.anchor != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append("].place_configure(anchor='")
self.codelines.append(self.TextPlace.anchor)
self.codelines.append("')")
if self.TextPlace.borderMode != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append("].place_configure(bordermode='")
self.codelines.append(self.TextPlace.borderMode)
self.codelines.append("')")
if self.TextPlace.height != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append('].place_configure(height=')
self.codelines.append(self.TextPlace.height)
self.codelines.append(")")
if self.TextPlace.width != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append('].place_configure(width=')
self.codelines.append(self.TextPlace.width)
self.codelines.append(")")
if self.TextPlace.relHeight != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append('].place_configure(relheight=')
self.codelines.append(self.TextPlace.relHeight)
self.codelines.append(")")
if self.TextPlace.relWidth != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append('].place_configure(relwidth=')
self.codelines.append(self.TextPlace.relWidth)
self.codelines.append(")")
if self.TextPlace.relX != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append('].place_configure(relx=')
self.codelines.append(self.TextPlace.relX)
self.codelines.append(")")
if self.TextPlace.relY != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append('].place_configure(rely=')
self.codelines.append(self.TextPlace.relY)
self.codelines.append(")")
if self.TextPlace.offsetX != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append('].place_configure(x=')
self.codelines.append(self.TextPlace.offsetX)
self.codelines.append(")")
if self.TextPlace.offsetY != '':
self.codelines.append('\n\t\tself.tkText[')
self.codelines.append(str(i))
self.codelines.append('].place_configure(y=')
self.codelines.append(self.TextPlace.offsetY)
self.codelines.append(")")
self.codelines.append('\n')
################################################################################################################
# CALL LOOP TO CREATE BUTTON WIDGETS
################################################################################################################
self.codelines.append("\n\t\tlogging.info('[gui.create_window] Starting button widget loop')")
for i in range(0, self.buttonCount):
self.Button.section = str("button" + str(i+1))
Config = configparser.ConfigParser()
Config.read(self.inifile)
dict1 = {}
options = Config.options(self.Button.section)
for option in options:
try:
dict1[option] = Config.get(self.Button.section, option)
if dict1[option] == -1:
pass
except:
dict1[option] = None
self.Button.backgroundColor = dict1['background color']
self.Button.bitmap = dict1['bitmap']
self.Button.borderwidth = dict1['border width']
self.Button.command = dict1['command']
self.Button.compound = dict1['compound']
self.Button.cursor = dict1['cursor']
self.Button.default = dict1['default']
self.Button.disableForeground = dict1['disable foreground']
self.Button.font = dict1['font']
self.Button.fontSize = dict1['font size']
self.Button.foregroundColor = dict1['foreground color']
self.Button.height = dict1['height']
self.Button.highlightBackgroundColor = dict1['highlight background color']
self.Button.highlightColor = dict1['highlight color']
self.Button.highlightThickness = dict1['highlight thickness']
self.Button.image = dict1['image']
self.Button.justify = dict1['justify']
self.Button.overRelief = dict1['over relief']
self.Button.padX = dict1['pad x']
self.Button.padY = dict1['pad y']
self.Button.relief = dict1['relief']
self.Button.repeatDelay = dict1['repeat delay']
self.Button.repeatInterval = dict1['repeat interval']
self.Button.state = dict1['state']
self.Button.takeFocus = dict1['take focus']
self.Button.text = dict1['text']
self.Button.text = self.Button.text.replace('\\', '\\\\')
self.Button.textVariable = dict1['text variable']
self.Button.underline = dict1['underline']
self.Button.width = dict1['width']
self.Button.wrapLength = dict1['wrap length']
self.ButtonPlace.anchor = dict1['place anchor']
self.ButtonPlace.borderMode = dict1['place border mode']
self.ButtonPlace.height = dict1['place height']
self.ButtonPlace.width = dict1['place width']
self.ButtonPlace.relHeight = dict1['place rel height']
self.ButtonPlace.relWidth = dict1['place rel width']
self.ButtonPlace.relX = dict1['place rel x']
self.ButtonPlace.relY = dict1['place rel y']
self.ButtonPlace.offsetX = dict1['place offset x']
self.ButtonPlace.offsetY = dict1['place offset y']
self.codelines.append("\n\t\tlogging.info('[gui.create_window] Creating button widget #%d' % ")
self.codelines.append(str(i+1))
self.codelines.append(')')
self.codelines.append('\n\t\tself.tkButton[')
self.codelines.append(str(i))
self.codelines.append(']=tk.Button()')
if self.Button.backgroundColor != '':
self.codelines.append('\n\t\tself.tkButton[')
self.codelines.append(str(i))
self.codelines.append("].config(background='")
self.codelines.append(self.Button.backgroundColor)
self.codelines.append("')")
if self.Button.bitmap != '':
self.codelines.append('\n\t\tself.tkButton[')
self.codelines.append(str(i))
self.codelines.append("].config(bitmap='")
self.codelines.append(self.Button.bitmap)
self.codelines.append("')")
if self.Button.borderwidth != '':
self.codelines.append('\n\t\tself.tkButton[')
self.codelines.append(str(i))
self.codelines.append("].config(borderwidth='")
self.codelines.append(self.Button.borderwidth)
self.codelines.append("')")
if self.Button.command != '':
self.codelines.append('\n\t\tself.tkButton[')
self.codelines.append(str(i))
self.codelines.append("].config(command=lambda instance=")
self.codelines.append(self.Button.command)
self.codelines.append(": gui.callback(self, instance))")
if self.Button.compound != '':
self.codelines.append('\n\t\tself.tkButton[')
self.codelines.append(str(i))
self.codelines.append("].config(compound='")
self.codelines.append(self.Button.compound)
self.codelines.append("')")
if self.Button.cursor != '':
self.codelines.append('\n\t\tself.tkButton[')
self.codelines.append(str(i))
self.codelines.append("].config(cursor='")
self.codelines.append(self.Button.cursor)
self.codelines.append("')")
if self.Button.default != '':
self.codelines.append('\n\t\tself.tkButton[')
self.codelines.append(str(i))
self.codelines.append("].config(default='")
self.codelines.append(self.Button.default)
self.codelines.append("')")
if self.Button.disableForeground != '':
self.codelines.append('\n\t\tself.tkButton[')
self.codelines.append(str(i))
self.codelines.append("].config(disableforeground='")
self.codelines.append(self.Button.disableForeground)
self.codelines.append("')")
if self.Button.font != '':
self.codelines.append('\n\t\tself.tkButton[')
self.codelines.append(str(i))
self.codelines.append("].config(font=('")
self.codelines.append(self.Button.font)
self.codelines.append("', ")
self.codelines.append(self.Button.fontSize)
self.codelines.append("))")
if self.Button.foregroundColor != '':
self.codelines.append('\n\t\tself.tkButton[')
self.codelines.append(str(i))
self.codelines.append("].config(foreground='")
self.codelines.append(self.Button.foregroundColor)
self.codelines.append("')")
if self.Button.height != '':
self.codelines.append('\n\t\tself.tkButton[')
self.codelines.append(str(i))
self.codelines.append("].config(height=")
self.codelines.append(self.Button.height)
self.codelines.append(")")
if self.Button.highlightBackgroundColor != '':
self.codelines.append('\n\t\tself.tkButton[')
self.codelines.append(str(i))
self.codelines.append("].config(highlightbackground='")
self.codelines.append(self.Button.highlightBackgroundColor)
self.codelines.append("')")
if self.Button.highlightColor != '':
self.codelines.append('\n\t\tself.tkButton[')
self.codelines.append(str(i))
self.codelines.append("].config(highlightColor='")
self.codelines.append(self.Button.highlightColor)
self.codelines.append("')")
if self.Button.highlightThickness != '':
self.codelines.append('\n\t\tself.tkButton[')
self.codelines.append(str(i))
self.codelines.append("].config(highlightthickness=")
self.codelines.append(self.Button.highlightThickness)
self.codelines.append(")")
if self.Button.image != '':
self.codelines.append('\n\t\tself.tkButton[')
self.codelines.append(str(i))
self.codelines.append("].config(image='")
self.codelines.append(self.Button.image)
self.codelines.append("')")
if self.Button.justify != '':
self.codelines.append('\n\t\tself.tkButton[')
self.codelines.append(str(i))
self.codelines.append("].config(justify='")
self.codelines.append(self.Button.justify)
self.codelines.append("')")
if self.Button.overRelief != '':
self.codelines.append('\n\t\tself.tkButton[')
self.codelines.append(str(i))
self.codelines.append("].config(overrelief='")
self.codelines.append(self.Button.overRelief)
self.codelines.append("')")
if self.Button.padX != '':
self.codelines.append('\n\t\tself.tkButton[')
self.codelines.append(str(i))
self.codelines.append("].config(padx=")
self.codelines.append(self.Button.padX)
self.codelines.append(")")
if self.Button.padY != '':
self.codelines.append('\n\t\tself.tkButton[')
self.codelines.append(str(i))
self.codelines.append("].config(pady=")
self.codelines.append(self.Button.padY)
self.codelines.append(")")
if self.Button.relief != '':
self.codelines.append('\n\t\tself.tkButton[')
self.codelines.append(str(i))
self.codelines.append("].config(relief='")
self.codelines.append(self.Button.relief)
self.codelines.append("')")
if self.Button.repeatDelay != '':
self.codelines.append('\n\t\tself.tkButton[')
self.codelines.append(str(i))
self.codelines.append("].config(repeatdelay=")
self.codelines.append(self.Button.repeatDelay)
self.codelines.append(")")
if self.Button.repeatInterval != '':
self.codelines.append('\n\t\tself.tkButton[')
self.codelines.append(str(i))
self.codelines.append("].config(repeatinterval=")
self.codelines.append(self.Button.repeatInterval)
self.codelines.append(")")
if self.Button.state != '':
self.codelines.append('\n\t\tself.tkButton[')
self.codelines.append(str(i))
self.codelines.append("].config(state='")
self.codelines.append(self.Button.state)
self.codelines.append("')")
if self.Button.takeFocus != '':
self.codelines.append('\n\t\tself.tkButton[')
self.codelines.append(str(i))
self.codelines.append("].config(takefocus='")
self.codelines.append(self.Button.takeFocus)
self.codelines.append("')")
if self.Button.text != '':
self.codelines.append('\n\t\tself.tkButton[')
self.codelines.append(str(i))
self.codelines.append("].config(text='")
self.codelines.append(self.Button.text)
self.codelines.append("')")
if self.Button.textVariable != '':
self.codelines.append('\n\t\tself.tkButton[')
self.codelines.append(str(i))
self.codelines.append("].config(textvariable='")
self.codelines.append(self.Button.textVariable)
self.codelines.append("')")
if self.Button.underline != '':
self.codelines.append('\n\t\tself.tkButton[')
self.codelines.append(str(i))
self.codelines.append("].config(underline='")
self.codelines.append(self.Button.underline)
self.codelines.append("')")
if self.Button.width != '':
self.codelines.append('\n\t\tself.tkButton[')
self.codelines.append(str(i))
self.codelines.append("].config(width=")
self.codelines.append(self.Button.width)
self.codelines.append(")")
if self.Button.wrapLength != '':
self.codelines.append('\n\t\tself.tkButton[')
self.codelines.append(str(i))
self.codelines.append("].config(wraplength='")
self.codelines.append(self.Button.wrapLength)
self.codelines.append("')")
self.codelines.append('\n\t\tself.tkButton[')
self.codelines.append(str(i))
self.codelines.append('].place()')
if self.ButtonPlace.anchor != '':
self.codelines.append('\n\t\tself.tkButton[')
self.codelines.append(str(i))
self.codelines.append("].place_configure(anchor='")
self.codelines.append(self.ButtonPlace.anchor)
self.codelines.append("')")
if self.ButtonPlace.borderMode != '':
self.codelines.append('\n\t\tself.tkButton[')
self.codelines.append(str(i))
self.codelines.append("].place_configure(bordermode='")
self.codelines.append(self.ButtonPlace.borderMode)
self.codelines.append("')")
if self.ButtonPlace.height != '':
self.codelines.append('\n\t\tself.tkButton[')
self.codelines.append(str(i))
self.codelines.append('].place_configure(height=')
self.codelines.append(self.ButtonPlace.height)
self.codelines.append(")")
if self.ButtonPlace.width != '':
self.codelines.append('\n\t\tself.tkButton[')
self.codelines.append(str(i))
self.codelines.append('].place_configure(width=')
self.codelines.append(self.ButtonPlace.width)
self.codelines.append(")")
if self.ButtonPlace.relHeight != '':
self.codelines.append('\n\t\tself.tkButton[')
self.codelines.append(str(i))
self.codelines.append('].place_configure(relheight=')
self.codelines.append(self.ButtonPlace.relHeight)
self.codelines.append(")")
if self.ButtonPlace.relWidth != '':
self.codelines.append('\n\t\tself.tkButton[')
self.codelines.append(str(i))
self.codelines.append('].place_configure(relwidth=')
self.codelines.append(self.ButtonPlace.relWidth)
self.codelines.append(")")
if self.ButtonPlace.relX != '':
self.codelines.append('\n\t\tself.tkButton[')
self.codelines.append(str(i))
self.codelines.append('].place_configure(relx=')
self.codelines.append(self.ButtonPlace.relX)
self.codelines.append(")")
if self.ButtonPlace.relY != '':
self.codelines.append('\n\t\tself.tkButton[')
self.codelines.append(str(i))
self.codelines.append('].place_configure(rely=')
self.codelines.append(self.ButtonPlace.relY)
self.codelines.append(")")
if self.ButtonPlace.offsetX != '':
self.codelines.append('\n\t\tself.tkButton[')
self.codelines.append(str(i))
self.codelines.append('].place_configure(x=')
self.codelines.append(self.ButtonPlace.offsetX)
self.codelines.append(")")
if self.ButtonPlace.offsetY != '':
self.codelines.append('\n\t\tself.tkButton[')
self.codelines.append(str(i))
self.codelines.append('].place_configure(y=')
self.codelines.append(self.ButtonPlace.offsetY)
self.codelines.append(")")
self.codelines.append('\n')
################################################################################################################
# ADD CALL FOR TKINTER MAIN LOOP TO SPAWN APP WINDOW
################################################################################################################
self.codelines.append('\n\t\tself.root.mainloop()')
self.codelines.append('\n')
self.codelines.append('\n')
################################################################################################################
# ADD BUTTON CALLBACK METHOD
################################################################################################################
self.codelines.append('\n\tdef callback(self, instance):')
self.codelines.append('\n\t\tself.instance = instance')
self.codelines.append('\n')
for i in range(0, self.buttonCount):
self.codelines.append('\n\t\tif self.instance == ')
self.codelines.append(str(i+1))
self.codelines.append(':')
self.codelines.append("\n\t\t\tlogging.info('[gui.callback] Button %d pressed' % ")
self.codelines.append(str(i+1))
self.codelines.append(")")
if i == 0:
self.codelines.append('\n\t\t\tgui.kill_root(self)')
self.codelines.append('\n')
self.codelines.append('\n')
################################################################################################################
# ADD OTHER HELPFUL METHODS
################################################################################################################
self.codelines.append('\n\tdef return_root(self):')
self.codelines.append('\n\t\treturn self.root')
self.codelines.append('\n')
self.codelines.append('\n')
self.codelines.append('\n\tdef kill_root(self):')
self.codelines.append('\n\t\tself.root.destroy()')
self.codelines.append('\n\t\treturn')
self.codelines.append('\n')
self.codelines.append('\n')
self.codelines.append('\n\tdef return_text(self, field):')
self.codelines.append('\n\t\tself.field = field')
self.codelines.append('\n\t\tself.address = self.field-1')
self.codelines.append('\n\t\treturn self.tkText[self.address].get("1.0", tk.END)')
self.codelines.append('\n')
self.codelines.append('\n')
self.codelines.append('\n\tdef write_text(self, field, text_to_write):')
self.codelines.append('\n\t\tself.field = field')
self.codelines.append('\n\t\tself.address = self.field-1')
self.codelines.append('\n\t\tself.text_to_write = text_to_write')
self.codelines.append('\n\t\tif self.text_to_write != self.text_to_write_mem:')
self.codelines.append('\n\t\t\tself.tkText[self.address].insert(tk.END, self.text_to_write)')
self.codelines.append('\n\t\t\tself.text_to_write_mem = self.text_to_write')
self.codelines.append('\n\t\treturn')
self.codelines.append('\n')
self.codelines.append('\n')
self.codelines.append('\n\tdef clear_text(self, field):')
self.codelines.append('\n\t\tself.field = field')
self.codelines.append('\n\t\tself.address = self.field-1')
self.codelines.append('\n\t\tself.tkText[self.address].delete("1.0", tk.END)')
self.codelines.append('\n\t\treturn')
self.codelines.append('\n')
self.codelines.append('\n')
################################################################################################################
# ADD CODE TO END OF SCRIPT TO MAKE IT SELF-EXECUTING
################################################################################################################
self.codelines.append('\nif __name__ == "__main__":')
self.codelines.append("\n\tappwindow = gui('debug.log')")
self.codelines.append('\n\tappwindow.create_window()')
################################################################################################################
# ASSEMBLE LIST INTO A SINGLE STRING AND WRITE TO OUTPUT FILE
################################################################################################################
self.codetowritetofile = ''.join(self.codelines)
self.path, self.junk = os.path.split(self.inifile)
self.outputfile = os.path.join(self.path, 'gui.py')
self.outputfile = self.outputfile.replace('scripts', 'output')
f = open(self.outputfile, 'w')
f.write(self.codetowritetofile) | gpl-2.0 | 8,116,847,840,223,247,000 | 50.613244 | 149 | 0.524238 | false |
expectocode/telegram-analysis | mostactiveusers.py | 2 | 5645 | #!/usr/bin/env python3
"""
A program to plot a pie chart of the most active users in a Telegram chat
"""
import argparse
from json import loads
from os import path
from collections import defaultdict
import matplotlib.pyplot as plt
from datetime import date,datetime
from operator import itemgetter
def parse_args():
parser = argparse.ArgumentParser(description=
"Create a pie chart showing the most active users in a Telegram chat")
required = parser.add_argument_group('required arguments')
required.add_argument('-f','--file',
help='the jsonl chatlog file to analyse',
required = True
)
parser.add_argument(
'-o', '--output-folder',
help='the folder to save the pie chart image in.'
'Using this option will make the graph not display on screen.')
parser.add_argument(
'-s','--figure-size',
help='the size of the figure shown or saved (X and Y size).'
'Choose an appropriate value for your screen size. Default 12 8.',
nargs=2,type=int,default = [12,8]
)
parser.add_argument(
'-m','--minimum-percentage',
help='the minimum percentage of activity a person must contribute '
'to get their own slice of the pie chart. Default 2',
type=float,default=2
)
parser.add_argument(
'-d','--date-range',
help='the range of dates you want to look at data between. '
'Must be in format YYYY-MM-DD YYYY-MM-DD with the first date '
'the start of the range, and the second the end. Example: '
"-d '2017-11-20 2017-05-15'. Make sure you don't put a day "
'that is too high for the month eg 30th February.',
default="1000-01-01 4017-01-01"
#hopefully no chatlogs contain these dates :p
)
return parser.parse_args()
def get_dates(arg_dates):
if " " not in arg_dates:
print("You must put a space between start and end dates")
exit()
daterange = arg_dates.split()
start_date = datetime.strptime(daterange[0], "%Y-%m-%d").date()
end_date = datetime.strptime(daterange[1], "%Y-%m-%d").date()
return (start_date,end_date)
def extract_infos(event):
text_date = date.fromtimestamp(event['date'])
text_length = len(event['text'])
text_userid= event['from']['peer_id']
text_printname = event['from']['print_name']
return text_date,text_length,text_userid,text_printname
def make_ddict(jsonfile,start,end):
"""
Make a defaultdict with user IDs as keys and char count as values
Return (dict of IDs -> names, total chars, defaultdict)
"""
names = {} #dict
counter = defaultdict(int)
total_datapoints = 0
events = (loads(line) for line in jsonfile)
messages = (extract_infos(event) for event in events if 'text' in event)
messages = ((when,what,uid,who) for (when,what,uid,who) in messages if when >= start and when <= end)
for (msgdate,textlength,userid,printname) in messages:
total_datapoints += textlength
if str(userid) not in names:
#this code assumes that chatlog has most recent events first
#which is default for telegram-history-dumper
names[str(userid)] = printname
if printname == "":
names[str(userid)] = str(userid)
counter[userid] += textlength
return names,total_datapoints,counter
def annotate_figure(filename):
plt.title("Most active users in {} by chars sent".format(filename), y=1.05)
plt.axis('equal')
#so it plots as a circle
def make_trimmed_ddict(counter,total_datapoints,names,min_percent):
trimmedCounter = defaultdict(int)
#find percentile to start adding people to "other" at
min_chars = (min_percent/100) * total_datapoints
for person, frequency in counter.items():
if frequency < min_chars:
trimmedCounter["other"] += frequency
else:
if names[str(person)] == "other":
print("Someone in this chat is called 'other'. "
"They will be absorbed into the 'other' pie slice.")
trimmedCounter[names[str(person)]] = frequency
return trimmedCounter
def main():
"""
main function
"""
args = parse_args()
filepath = args.file
savefolder = args.output_folder
figure_size = (args.figure_size[0],args.figure_size[1])
start_date,end_date = get_dates(args.date_range)
other_percent = args.minimum_percentage
#default 2
#anyone who sends less than this percentage of the total is 'other'
filename = path.splitext(path.split(filepath)[-1])[0]
#make filename just the name of the file, with no leading directories and no extension
with open(filepath, 'r') as jsonfile:
names,total_datapoints,counter = make_ddict(jsonfile,start_date,end_date)
trimmedCounter = make_trimmed_ddict(counter,total_datapoints,names,other_percent)
sortedCounter = sorted(trimmedCounter.items(), key=itemgetter(1))
print(sortedCounter)
freqList = list(zip(*sortedCounter))
plt.figure(figsize=figure_size)
plt.pie(freqList[1], labels=freqList[0], startangle=135)
annotate_figure(filename)
# plt.set_lw(10)
if savefolder is not None:
#if there is a given folder to save the figure in, save it there
plt.savefig("{}/Most active users in {}.png".format(savefolder, filename))
else:
#if a save folder was not specified, just open a window to display graph
plt.show()
if __name__ == "__main__":
main()
| mit | -3,997,434,891,372,640,000 | 36.885906 | 105 | 0.639504 | false |
softlayer/softlayer-python | SoftLayer/managers/account.py | 2 | 11066 | """
SoftLayer.account
~~~~~~~~~~~~~~~~~~~~~~~
Account manager
:license: MIT, see License for more details.
"""
import logging
from SoftLayer import SoftLayerAPIError
from SoftLayer import utils
# Invalid names are ignored due to long method names and short argument names
# pylint: disable=invalid-name, no-self-use
LOGGER = logging.getLogger(__name__)
class AccountManager(utils.IdentifierMixin, object):
"""Common functions for getting information from the Account service
:param SoftLayer.API.BaseClient client: the client instance
"""
_DEFAULT_BILLING_ITEM_MASK = """mask[
orderItem[id,order[id,userRecord[id,email,displayName,userStatus]]],
nextInvoiceTotalRecurringAmount,
location, hourlyFlag, children
]"""
def __init__(self, client):
self.client = client
def get_summary(self):
"""Gets some basic account information
:return: Account object
"""
mask = """mask[
nextInvoiceTotalAmount,
pendingInvoice[invoiceTotalAmount],
blockDeviceTemplateGroupCount,
dedicatedHostCount,
domainCount,
hardwareCount,
networkStorageCount,
openTicketCount,
networkVlanCount,
subnetCount,
userCount,
virtualGuestCount
]
"""
return self.client.call('Account', 'getObject', mask=mask)
def get_upcoming_events(self, event_type):
"""Retrieves a list of Notification_Occurrence_Events that have not ended yet
:param: String event_type: notification event type.
:return: SoftLayer_Notification_Occurrence_Event
"""
mask = "mask[id, subject, startDate, endDate, modifyDate, statusCode, acknowledgedFlag, " \
"impactedResourceCount, updateCount, systemTicketId, notificationOccurrenceEventType[keyName]]"
_filter = {
'notificationOccurrenceEventType': {
'keyName': {
'operation': event_type
}
}
}
self.add_event_filter(_filter, event_type)
return self.client.call('Notification_Occurrence_Event', 'getAllObjects', filter=_filter, mask=mask, iter=True)
@staticmethod
def add_event_filter(_filter, event_type):
"""Add data to the object filter.
:param: _filter: event filter.
:param: string event_type: event type.
"""
if event_type == 'PLANNED':
_filter['endDate'] = {
'operation': '> sysdate - 2'
}
_filter['startDate'] = {
'operation': 'orderBy',
'options': [{
'name': 'sort',
'value': ['DESC']
}]
}
if event_type == 'UNPLANNED_INCIDENT':
_filter['modifyDate'] = {
'operation': '> sysdate - 2'
}
if event_type == 'ANNOUNCEMENT':
_filter['statusCode'] = {
'keyName': {
'operation': 'in',
'options': [{
'name': 'data',
'value': ['PUBLISHED']
}]
}
}
def ack_event(self, event_id):
"""Acknowledge an event. This mostly prevents it from appearing as a notification in the control portal.
:param int event_id: Notification_Occurrence_Event ID you want to ack
:return: True on success, Exception otherwise.
"""
return self.client.call('Notification_Occurrence_Event', 'acknowledgeNotification', id=event_id)
def get_event(self, event_id):
"""Gets details about a maintenance event
:param int event_id: Notification_Occurrence_Event ID
:return: Notification_Occurrence_Event
"""
mask = """mask[
acknowledgedFlag,
attachments,
impactedResources,
statusCode,
updates,
notificationOccurrenceEventType]
"""
return self.client.call('Notification_Occurrence_Event', 'getObject', id=event_id, mask=mask)
def get_invoices(self, limit=50, closed=False, get_all=False):
"""Gets an accounts invoices.
:param int limit: Number of invoices to get back in a single call.
:param bool closed: If True, will also get CLOSED invoices
:param bool get_all: If True, will paginate through invoices until all have been retrieved.
:return: Billing_Invoice
"""
mask = "mask[invoiceTotalAmount, itemCount]"
_filter = {
'invoices': {
'createDate': {
'operation': 'orderBy',
'options': [{
'name': 'sort',
'value': ['DESC']
}]
},
'statusCode': {'operation': 'OPEN'},
}
}
if closed:
del _filter['invoices']['statusCode']
return self.client.call('Account', 'getInvoices', mask=mask, filter=_filter, iter=get_all, limit=limit)
def get_billing_items(self, identifier):
"""Gets all topLevelBillingItems from a specific invoice
:param int identifier: Invoice Id
:return: Billing_Invoice_Item
"""
mask = """mask[
id, description, hostName, domainName, oneTimeAfterTaxAmount, recurringAfterTaxAmount, createDate,
categoryCode,
category[name],
location[name],
children[id, category[name], description, oneTimeAfterTaxAmount, recurringAfterTaxAmount]
]"""
return self.client.call(
'Billing_Invoice',
'getInvoiceTopLevelItems',
id=identifier,
mask=mask,
iter=True,
limit=100
)
def get_account_billing_items(self, mask=None):
"""Gets all the topLevelBillingItems currently active on the account
:param string mask: Object Mask
:return: Billing_Item
"""
if mask is None:
mask = """mask[
orderItem[id,order[id,userRecord[id,email,displayName,userStatus]]],
nextInvoiceTotalRecurringAmount,
location, hourlyFlag
]"""
object_filter = {
"allTopLevelBillingItems": {
"cancellationDate": {
"operation": "is null"
},
"createDate": utils.query_filter_orderby()
}
}
return self.client.call('Account', 'getAllTopLevelBillingItems',
mask=mask, filter=object_filter, iter=True, limit=100)
def get_billing_item(self, identifier, mask=None):
"""Gets details about a billing item
:param int identifier: Billing_Item id
:param string mask: Object mask to use.
:return: Billing_Item
"""
if mask is None:
mask = self._DEFAULT_BILLING_ITEM_MASK
return self.client.call('Billing_Item', 'getObject', id=identifier, mask=mask)
def get_billing_item_from_invoice(self, identifier, mask=None):
"""Gets details about a billing item of a billing invoice item
:param int identifier: Billing_Invoice_Item id
:param mask: Object mask to use.
:return: Billing_Item
"""
if mask is None:
mask = self._DEFAULT_BILLING_ITEM_MASK
return self.client.call('Billing_Invoice_Item', 'getBillingItem', id=identifier, mask=mask)
def get_item_detail(self, identifier):
"""Gets details about a billing item
:param int identifier: Billing_Item id or Billing_Invoice_Item
:return: Billing_Item
"""
try:
return self.get_billing_item(identifier)
except SoftLayerAPIError as exception:
if exception.faultCode == 404:
return self.get_billing_item_from_invoice(identifier)
raise
def cancel_item(self, identifier, reason="No longer needed", note=None):
"""Cancels a specific billing item with a reason
:param int identifier: Billing_Item id
:param string reason: A cancellation reason
:param string note: Custom note to set when cancelling. Defaults to information about who canceled the item.
:return: bool
"""
if note is None:
user = self.client.call('Account', 'getCurrentUser', mask="mask[id,displayName,email,username]")
note = "Cancelled by {} with the SLCLI".format(user.get('username'))
return self.client.call('Billing_Item', 'cancelItem', False, True, reason, note, id=identifier)
def get_account_all_billing_orders(self, limit=100, mask=None):
"""Gets all the topLevelBillingItems currently active on the account
:param string mask: Object Mask
:return: Billing_Item
"""
if mask is None:
mask = """
orderTotalAmount, userRecord,
initialInvoice[id,amount,invoiceTotalAmount],
items[description]
"""
return self.client.call('Billing_Order', 'getAllObjects',
limit=limit, mask=mask)
def get_routers(self, mask=None, location=None):
"""Gets all the routers currently active on the account
:param string mask: Object Mask
:param string location: location string
:returns: Routers
"""
object_filter = ''
if location:
object_filter = {
'routers': {
'topLevelLocation': {'name': {'operation': location}}
}
}
return self.client['SoftLayer_Account'].getRouters(filter=object_filter, mask=mask)
def get_network_message_delivery_accounts(self):
"""Gets all Network Message delivery accounts.
:returns: Network Message delivery accounts
"""
_mask = """vendor,type"""
return self.client['SoftLayer_Account'].getNetworkMessageDeliveryAccounts(mask=_mask)
def get_active_virtual_licenses(self):
"""Gets all active virtual licenses account.
:returns: active virtual licenses account
"""
_mask = """billingItem[categoryCode,createDate,description],
key,id,ipAddress,
softwareDescription[longDescription,name,manufacturer],
subnet"""
return self.client['SoftLayer_Account'].getActiveVirtualLicenses(mask=_mask)
def get_active_account_licenses(self):
"""Gets all active account licenses.
:returns: Active account Licenses
"""
_mask = """billingItem,softwareDescription"""
return self.client['SoftLayer_Account'].getActiveAccountLicenses(mask=_mask)
| mit | -3,087,197,820,268,709,400 | 32.737805 | 119 | 0.570034 | false |
stevei101/oslo.messaging | oslo_messaging/_executors/impl_eventlet.py | 7 | 1765 | # Copyright 2013 Red Hat, Inc.
# Copyright 2013 New Dream Network, LLC (DreamHost)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
from eventlet.green import threading as greenthreading
import futurist
from oslo_messaging._executors import impl_pooledexecutor
from oslo_utils import eventletutils
LOG = logging.getLogger(__name__)
class EventletExecutor(impl_pooledexecutor.PooledExecutor):
"""A message executor which integrates with eventlet.
This is an executor which polls for incoming messages from a greenthread
and dispatches each message in its own greenthread powered async
executor.
The stop() method kills the message polling greenthread and the wait()
method waits for all executor maintained greenthreads to complete.
"""
def __init__(self, conf, listener, dispatcher):
super(EventletExecutor, self).__init__(conf, listener, dispatcher)
eventletutils.warn_eventlet_not_patched(
expected_patched_modules=['thread'],
what="the 'oslo.messaging eventlet executor'")
_executor_cls = futurist.GreenThreadPoolExecutor
_lock_cls = greenthreading.Lock
_event_cls = greenthreading.Event
_thread_cls = greenthreading.Thread
| apache-2.0 | -4,453,021,982,890,011,000 | 36.553191 | 78 | 0.734844 | false |
Owlz/StegoDone | tests/multi/test_exif.py | 1 | 1535 |
import logging
logging.basicConfig(level=logging.DEBUG,format='%(name)s - %(levelname)s - %(message)s', datefmt='%m/%d/%Y %I:%M:%S %p')
import tempfile
import os
import hashlib
import stegoveritas
SCRIPTDIR = os.path.dirname(os.path.realpath(__file__))
def test_exif_jpg():
with tempfile.TemporaryDirectory() as tmpdirname:
args = [os.path.join(SCRIPTDIR, 'owl_exif_comment.jpg'), '-out', tmpdirname, '-exif']
veritas = stegoveritas.StegoVeritas(args=args)
veritas.run()
exifdir = os.path.join(tmpdirname, 'exif')
assert os.path.isdir(exifdir)
_, _, files = next(os.walk(exifdir))
found = False
for f in files:
with open(os.path.join(exifdir, f),'r') as f:
if 'This is a comment string.' in f.read():
found = True
break
assert found == True
def test_exif_png():
with tempfile.TemporaryDirectory() as tmpdirname:
args = [os.path.join(SCRIPTDIR, 'owl_exif.png'), '-out', tmpdirname, '-exif']
veritas = stegoveritas.StegoVeritas(args=args)
veritas.run()
exifdir = os.path.join(tmpdirname, 'exif')
assert os.path.isdir(exifdir)
_, _, files = next(os.walk(exifdir))
found = False
for f in files:
with open(os.path.join(exifdir, f),'r') as f:
if "This is my inserted png chunk." in f.read():
found = True
break
assert found == True
| gpl-2.0 | -3,754,606,121,038,039,000 | 28.519231 | 120 | 0.571336 | false |
scylladb/scylla-cluster-tests | sdcm/utils/prepare_region.py | 1 | 21172 | import logging
from ipaddress import ip_network
from functools import cached_property
import boto3
import botocore
from mypy_boto3_ec2 import EC2Client, EC2ServiceResource
from sdcm.keystore import KeyStore
LOGGER = logging.getLogger(__name__)
class AwsRegion:
VPC_NAME = "SCT-vpc"
VPC_CIDR = ip_network("10.0.0.0/16")
SECURITY_GROUP_NAME = "SCT-sg"
SUBNET_NAME = "SCT-subnet-{availability_zone}"
INTERNET_GATEWAY_NAME = "SCT-igw"
ROUTE_TABLE_NAME = "SCT-rt"
KEY_PAIR_NAME = "scylla-qa-ec2" # TODO: change legacy name to sct-keypair-aws
def __init__(self, region_name):
self.region_name = region_name
self.client: EC2Client = boto3.client("ec2", region_name=region_name)
self.resource: EC2ServiceResource = boto3.resource("ec2", region_name=region_name)
@property
def sct_vpc(self) -> EC2ServiceResource.Vpc:
vpcs = self.client.describe_vpcs(Filters=[{"Name": "tag:Name", "Values": [self.VPC_NAME]}])
LOGGER.debug(f"Found VPCs: {vpcs}")
existing_vpcs = vpcs.get("Vpcs", [])
if len(existing_vpcs) == 0:
return None
assert len(existing_vpcs) == 1, \
f"More than 1 VPC with {self.VPC_NAME} found in {self.region_name}: {existing_vpcs}"
return self.resource.Vpc(existing_vpcs[0]["VpcId"]) # pylint: disable=no-member
def create_vpc(self):
LOGGER.info("Going to create VPC...")
if self.sct_vpc:
LOGGER.warning(f"VPC '{self.VPC_NAME}' already exists! Id: '{self.sct_vpc.vpc_id}'.")
return self.sct_vpc.vpc_id
else:
result = self.client.create_vpc(CidrBlock=str(self.VPC_CIDR), AmazonProvidedIpv6CidrBlock=True)
vpc_id = result["Vpc"]["VpcId"]
vpc = self.resource.Vpc(vpc_id) # pylint: disable=no-member
vpc.create_tags(Tags=[{"Key": "Name", "Value": self.VPC_NAME}])
LOGGER.info("'%s' with id '%s' created. Waiting until it becomes available...", self.VPC_NAME, vpc_id)
vpc.wait_until_available()
return vpc_id
@cached_property
def availability_zones(self):
response = self.client.describe_availability_zones()
return [zone["ZoneName"]for zone in response['AvailabilityZones'] if zone["State"] == "available"]
@cached_property
def vpc_ipv6_cidr(self):
return ip_network(self.sct_vpc.ipv6_cidr_block_association_set[0]["Ipv6CidrBlock"])
def az_subnet_name(self, region_az):
return self.SUBNET_NAME.format(availability_zone=region_az)
def sct_subnet(self, region_az) -> EC2ServiceResource.Subnet:
subnet_name = self.az_subnet_name(region_az)
subnets = self.client.describe_subnets(Filters=[{"Name": "tag:Name", "Values": [subnet_name]}])
LOGGER.debug(f"Found Subnets: {subnets}")
existing_subnets = subnets.get("Subnets", [])
if len(existing_subnets) == 0:
return None
assert len(existing_subnets) == 1, \
f"More than 1 Subnet with {subnet_name} found in {self.region_name}: {existing_subnets}!"
return self.resource.Subnet(existing_subnets[0]["SubnetId"]) # pylint: disable=no-member
def create_subnet(self, region_az, ipv4_cidr, ipv6_cidr):
LOGGER.info(f"Creating subnet for {region_az}...")
subnet_name = self.az_subnet_name(region_az)
if self.sct_subnet(region_az):
subnet_id = self.sct_subnet(region_az).subnet_id
LOGGER.warning(f"Subnet '{subnet_name}' already exists! Id: '{subnet_id}'.")
else:
result = self.client.create_subnet(CidrBlock=str(ipv4_cidr), Ipv6CidrBlock=str(ipv6_cidr),
VpcId=self.sct_vpc.vpc_id, AvailabilityZone=region_az)
subnet_id = result["Subnet"]["SubnetId"]
subnet = self.resource.Subnet(subnet_id) # pylint: disable=no-member
subnet.create_tags(Tags=[{"Key": "Name", "Value": subnet_name}])
LOGGER.info("Configuring to automatically assign public IPv4 and IPv6 addresses...")
self.client.modify_subnet_attribute(
MapPublicIpOnLaunch={"Value": True},
SubnetId=subnet_id
)
# for some reason boto3 throws error when both AssignIpv6AddressOnCreation and MapPublicIpOnLaunch are used
self.client.modify_subnet_attribute(
AssignIpv6AddressOnCreation={"Value": True},
SubnetId=subnet_id
)
LOGGER.info("'%s' with id '%s' created.", subnet_name, subnet_id)
def create_subnets(self):
num_subnets = len(self.availability_zones)
ipv4_cidrs = list(self.VPC_CIDR.subnets(6))[:num_subnets]
ipv6_cidrs = list(self.vpc_ipv6_cidr.subnets(8))[:num_subnets]
for i, az_name in enumerate(self.availability_zones):
self.create_subnet(region_az=az_name, ipv4_cidr=ipv4_cidrs[i], ipv6_cidr=ipv6_cidrs[i])
@property
def sct_internet_gateway(self) -> EC2ServiceResource.InternetGateway:
igws = self.client.describe_internet_gateways(Filters=[{"Name": "tag:Name",
"Values": [self.INTERNET_GATEWAY_NAME]}])
LOGGER.debug(f"Found Internet gateways: {igws}")
existing_igws = igws.get("InternetGateways", [])
if len(existing_igws) == 0:
return None
assert len(existing_igws) == 1, \
f"More than 1 Internet Gateway with {self.INTERNET_GATEWAY_NAME} found " \
f"in {self.region_name}: {existing_igws}!"
return self.resource.InternetGateway(existing_igws[0]["InternetGatewayId"]) # pylint: disable=no-member
def create_internet_gateway(self):
LOGGER.info("Creating Internet Gateway..")
if self.sct_internet_gateway:
LOGGER.warning(f"Internet Gateway '{self.INTERNET_GATEWAY_NAME}' already exists! "
f"Id: '{self.sct_internet_gateway.internet_gateway_id}'.")
else:
result = self.client.create_internet_gateway()
igw_id = result["InternetGateway"]["InternetGatewayId"]
igw = self.resource.InternetGateway(igw_id) # pylint: disable=no-member
igw.create_tags(Tags=[{"Key": "Name", "Value": self.INTERNET_GATEWAY_NAME}])
LOGGER.info("'%s' with id '%s' created. Attaching to '%s'",
self.INTERNET_GATEWAY_NAME, igw_id, self.sct_vpc.vpc_id)
igw.attach_to_vpc(VpcId=self.sct_vpc.vpc_id)
@cached_property
def sct_route_table(self) -> EC2ServiceResource.RouteTable:
route_tables = self.client.describe_route_tables(Filters=[{"Name": "tag:Name",
"Values": [self.ROUTE_TABLE_NAME]}])
LOGGER.debug(f"Found Route Tables: {route_tables}")
existing_rts = route_tables.get("RouteTables", [])
if len(existing_rts) == 0:
return None
assert len(existing_rts) == 1, \
f"More than 1 Route Table with {self.ROUTE_TABLE_NAME} found " \
f"in {self.region_name}: {existing_rts}!"
return self.resource.RouteTable(existing_rts[0]["RouteTableId"]) # pylint: disable=no-member
def configure_route_table(self):
# add route to Internet: 0.0.0.0/0 -> igw
LOGGER.info("Configuring main Route Table...")
if self.sct_route_table:
LOGGER.warning(f"Route Table '{self.ROUTE_TABLE_NAME}' already exists! "
f"Id: '{self.sct_route_table.route_table_id}'.")
else:
route_tables = list(self.sct_vpc.route_tables.all())
assert len(route_tables) == 1, f"Only one main route table should exist for {self.VPC_NAME}. " \
f"Found {len(route_tables)}!"
route_table: EC2ServiceResource.RouteTable = route_tables[0]
route_table.create_tags(Tags=[{"Key": "Name", "Value": self.ROUTE_TABLE_NAME}])
LOGGER.info("Setting routing of all outbound traffic via Internet Gateway...")
route_table.create_route(DestinationCidrBlock="0.0.0.0/0",
GatewayId=self.sct_internet_gateway.internet_gateway_id)
route_table.create_route(DestinationIpv6CidrBlock="::/0",
GatewayId=self.sct_internet_gateway.internet_gateway_id)
LOGGER.info("Going to associate all Subnets with the Route Table...")
for az_name in self.availability_zones:
subnet_id = self.sct_subnet(az_name).subnet_id
LOGGER.info("Associating Route Table with '%s' [%s]...", self.az_subnet_name(az_name), subnet_id)
route_table.associate_with_subnet(SubnetId=subnet_id)
@property
def sct_security_group(self) -> EC2ServiceResource.SecurityGroup:
security_groups = self.client.describe_security_groups(Filters=[{"Name": "tag:Name",
"Values": [self.SECURITY_GROUP_NAME]}])
LOGGER.debug(f"Found Security Groups: {security_groups}")
existing_sgs = security_groups.get("SecurityGroups", [])
if len(existing_sgs) == 0:
return None
assert len(existing_sgs) == 1, \
f"More than 1 Security group with {self.SECURITY_GROUP_NAME} found " \
f"in {self.region_name}: {existing_sgs}!"
return self.resource.SecurityGroup(existing_sgs[0]["GroupId"]) # pylint: disable=no-member
def create_security_group(self):
"""
Custom TCP TCP 9093 0.0.0.0/0 Allow alert manager for all
Custom TCP TCP 9093 ::/0 Allow alert manager for all
"""
LOGGER.info("Creating Security Group...")
if self.sct_security_group:
LOGGER.warning(f"Security Group '{self.SECURITY_GROUP_NAME}' already exists! "
f"Id: '{self.sct_internet_gateway.internet_gateway_id}'.")
else:
result = self.client.create_security_group(Description='Security group that is used by SCT',
GroupName=self.SECURITY_GROUP_NAME,
VpcId=self.sct_vpc.vpc_id)
sg_id = result["GroupId"]
security_group = self.resource.SecurityGroup(sg_id) # pylint: disable=no-member
security_group.create_tags(Tags=[{"Key": "Name", "Value": self.SECURITY_GROUP_NAME}])
LOGGER.info("'%s' with id '%s' created. ", self.SECURITY_GROUP_NAME, self.sct_security_group.group_id)
LOGGER.info("Creating common ingress rules...")
security_group.authorize_ingress(
IpPermissions=[
{
"IpProtocol": "-1",
"UserIdGroupPairs": [
{
"Description": "Allow ALL traffic inside the Security group",
"GroupId": sg_id,
"UserId": security_group.owner_id
}
]
},
{
"FromPort": 22,
"ToPort": 22,
"IpProtocol": "tcp",
"IpRanges": [{'CidrIp': '0.0.0.0/0', 'Description': 'SSH connectivity to the instances'}],
"Ipv6Ranges": [{'CidrIpv6': '::/0', 'Description': 'SSH connectivity to the instances'}]
},
{
"FromPort": 3000,
"ToPort": 3000,
"IpProtocol": "tcp",
"IpRanges": [{'CidrIp': '0.0.0.0/0', 'Description': 'Allow Grafana for ALL'}],
"Ipv6Ranges": [{'CidrIpv6': '::/0', 'Description': 'Allow Grafana for ALL'}]
},
{
"FromPort": 9042,
"ToPort": 9042,
"IpProtocol": "tcp",
"IpRanges": [{'CidrIp': '0.0.0.0/0', 'Description': 'Allow CQL for ALL'}],
"Ipv6Ranges": [{'CidrIpv6': '::/0', 'Description': 'Allow CQL for ALL'}]
},
{
"FromPort": 9142,
"ToPort": 9142,
"IpProtocol": "tcp",
"IpRanges": [{'CidrIp': '0.0.0.0/0', 'Description': 'Allow SSL CQL for ALL'}],
"Ipv6Ranges": [{'CidrIpv6': '::/0', 'Description': 'Allow SSL CQL for ALL'}]
},
{
"FromPort": 9100,
"ToPort": 9100,
"IpProtocol": "tcp",
"IpRanges": [{'CidrIp': '0.0.0.0/0', 'Description': 'Allow node_exporter on Db nodes for ALL'}],
"Ipv6Ranges": [{'CidrIpv6': '::/0', 'Description': 'Allow node_exporter on Db nodes for ALL'}]
},
{
"FromPort": 8080,
"ToPort": 8080,
"IpProtocol": "tcp",
"IpRanges": [{'CidrIp': '0.0.0.0/0', 'Description': 'Allow Alternator for ALL'}],
"Ipv6Ranges": [{'CidrIpv6': '::/0', 'Description': 'Allow Alternator for ALL'}]
},
{
"FromPort": 9090,
"ToPort": 9090,
"IpProtocol": "tcp",
"IpRanges": [{'CidrIp': '0.0.0.0/0', 'Description': 'Allow Prometheus for ALL'}],
"Ipv6Ranges": [{'CidrIpv6': '::/0', 'Description': 'Allow Prometheus for ALL'}]
},
{
"FromPort": 9093,
"ToPort": 9093,
"IpProtocol": "tcp",
"IpRanges": [{'CidrIp': '0.0.0.0/0', 'Description': 'Allow Prometheus Alert Manager For All'}],
"Ipv6Ranges": [{'CidrIpv6': '::/0', 'Description': 'Allow Prometheus Alert Manager For All'}]
},
{
"FromPort": 9180,
"ToPort": 9180,
"IpProtocol": "tcp",
"IpRanges": [{'CidrIp': '0.0.0.0/0', 'Description': 'Allow Prometheus API for ALL'}],
"Ipv6Ranges": [{'CidrIpv6': '::/0', 'Description': 'Allow Prometheus API for ALL'}]
},
{
"FromPort": 7000,
"ToPort": 7000,
"IpProtocol": "tcp",
"IpRanges": [{'CidrIp': '0.0.0.0/0',
'Description': 'Allow Inter-node communication (RPC) for ALL'}],
"Ipv6Ranges": [{'CidrIpv6': '::/0',
'Description': 'Allow Inter-node communication (RPC) for ALL'}]
},
{
"FromPort": 7001,
"ToPort": 7001,
"IpProtocol": "tcp",
"IpRanges": [{'CidrIp': '0.0.0.0/0',
'Description': 'Allow SSL inter-node communication (RPC) for ALL'}],
"Ipv6Ranges": [{'CidrIpv6': '::/0',
'Description': 'Allow SSL inter-node communication (RPC) for ALL'}]
},
{
"FromPort": 7199,
"ToPort": 7199,
"IpProtocol": "tcp",
"IpRanges": [{'CidrIp': '0.0.0.0/0', 'Description': 'Allow JMX management for ALL'}],
"Ipv6Ranges": [{'CidrIpv6': '::/0', 'Description': 'Allow JMX management for ALL'}]
},
{
"FromPort": 10001,
"ToPort": 10001,
"IpProtocol": "tcp",
"IpRanges": [{'CidrIp': '0.0.0.0/0',
'Description': 'Allow Scylla Manager Agent REST API for ALL'}],
"Ipv6Ranges": [{'CidrIpv6': '::/0',
'Description': 'Allow Scylla Manager Agent REST API for ALL'}]
},
{
"FromPort": 56090,
"ToPort": 56090,
"IpProtocol": "tcp",
"IpRanges": [{'CidrIp': '0.0.0.0/0',
'Description': 'Allow Scylla Manager Agent version 2.1 Prometheus API for ALL'}],
"Ipv6Ranges": [{'CidrIpv6': '::/0',
'Description': 'Allow Scylla Manager Agent version 2.1 Prometheus API for ALL'}]
},
{
"IpProtocol": "-1",
"IpRanges": [{'CidrIp': '172.0.0.0/11',
'Description': 'Allow traffic from Scylla Cloud lab while VPC peering for ALL'}],
},
{
"FromPort": 5080,
"ToPort": 5080,
"IpProtocol": "tcp",
"IpRanges": [{'CidrIp': '0.0.0.0/0',
'Description': 'Allow Scylla Manager HTTP API for ALL'}],
"Ipv6Ranges": [{'CidrIpv6': '::/0',
'Description': 'Allow Scylla Manager HTTP API for ALL'}]
},
{
"FromPort": 5443,
"ToPort": 5443,
"IpProtocol": "tcp",
"IpRanges": [{'CidrIp': '0.0.0.0/0',
'Description': 'Allow Scylla Manager HTTPS API for ALL'}],
"Ipv6Ranges": [{'CidrIpv6': '::/0',
'Description': 'Allow Scylla Manager HTTPS API for ALL'}]
},
{
"FromPort": 5090,
"ToPort": 5090,
"IpProtocol": "tcp",
"IpRanges": [{'CidrIp': '0.0.0.0/0',
'Description': 'Allow Scylla Manager Agent Prometheus API for ALL'}],
"Ipv6Ranges": [{'CidrIpv6': '::/0',
'Description': 'Allow Scylla Manager Agent Prometheus API for ALL'}]
},
{
"FromPort": 5112,
"ToPort": 5112,
"IpProtocol": "tcp",
"IpRanges": [{'CidrIp': '0.0.0.0/0',
'Description': 'Allow Scylla Manager pprof Debug For ALL'}],
"Ipv6Ranges": [{'CidrIpv6': '::/0',
'Description': 'Allow Scylla Manager pprof Debug For ALL'}]
}
]
)
@property
def sct_keypair(self):
try:
key_pairs = self.client.describe_key_pairs(KeyNames=[self.KEY_PAIR_NAME])
except botocore.exceptions.ClientError as ex:
if "InvalidKeyPair.NotFound" in str(ex):
return None
else:
raise
LOGGER.debug(f"Found key pairs: {key_pairs}")
existing_key_pairs = key_pairs.get("KeyPairs", [])
assert len(existing_key_pairs) == 1, \
f"More than 1 Key Pair with {self.KEY_PAIR_NAME} found " \
f"in {self.region_name}: {existing_key_pairs}!"
return self.resource.KeyPair(existing_key_pairs[0]["KeyName"]) # pylint: disable=no-member
def create_key_pair(self):
LOGGER.info("Creating SCT Key Pair...")
if self.sct_keypair:
LOGGER.warning(f"SCT Key Pair already exists in {self.region_name}!")
else:
ks = KeyStore()
sct_key_pair = ks.get_ec2_ssh_key_pair()
self.resource.import_key_pair(KeyName=self.KEY_PAIR_NAME, # pylint: disable=no-member
PublicKeyMaterial=sct_key_pair.public_key)
LOGGER.info("SCT Key Pair created.")
def configure(self):
LOGGER.info(f"Configuring '{self.region_name}' region...")
self.create_vpc()
self.create_subnets()
self.create_internet_gateway()
self.configure_route_table()
self.create_security_group()
self.create_key_pair()
LOGGER.info("Region configured successfully.")
if __name__ == "__main__":
AWS_REGION = AwsRegion(region_name="eu-west-2")
AWS_REGION.configure()
| agpl-3.0 | 4,325,552,328,933,938,000 | 51.019656 | 120 | 0.495277 | false |
linuxknow/SampleBitCoin | ValidateCoinSimple.py | 1 | 4438 | '''
#############################################################################
# #
# ValidateCoinSimple.py #
# Copyright (C) 2013 linuxknow #
# linuxknow [at] gmail dot com #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/> #
# #
#############################################################################
'''
import json
import bottle
from bottle import route, run, request, abort
from collections import deque
class Calculus():
def __init__(self,total):
self.list_address=[]
self.amount = 0
self.total_address = total
def set_list_address(self,list_address):
self.list_address = list_address
def get_validate_address(self,address):
if address is not None and address[0] in (1,3) and len(address)==34:
return True
else:
return False
def get_validate_amount(self,valor):
print valor
if not str(valor).isdigit():
return False
if int(valor) <= 0:
return False
else:
return True
def validate_all_address(self):
ok = False
for address in self.list_address:
if self.get_validate_address(address):
ok = True
return ok
def validate_bitcoin(self,data):
queue = deque(['address1','address2','address3','address4','address5'])
address_general = []
for inc in range(0,(self.total_address+1),1):
address_general.append(queue.popleft())
for key in address_general:
self.list_address.append(data[key])
##Debug
if self.get_validate_amount(data['amount']):
print "valido el numero"
else:
print "invalido numero"
if self.validate_all_address():
print "valido los address"
else:
print "direcciones invalidas"
if self.get_validate_amount(data['amount']) and self.validate_all_address():
return True
else:
return False
@route('/', method='GET')
def homepage():
return 'validate bitcoin!'
@route('/validateCoin', method='PUT')
def validate():
##Testing para todo las address
#validar={'amount','address1','address2','address3','address4','address5'}
bitcoin={}
cant_address = 0
data = request.body.readline()
if not data:
abort(400, 'No data received')
bitcoin_web = json.loads(data)
for key, value in bitcoin_web.iteritems():
#if key not in validar:
# abort(400, 'No data received')
#else:
if key in ['address']:
cant_address+=1
bitcoin[key] = value
#return dict(valores = str(entity))
calcular = Calculus(cant_address)
if calcular.validate_bitcoin(bitcoin):
##Datos que no tengo
str = "'INSERT INTO tumble VALUES (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)', (idTumble,inputAddress,bitcoin['amount'],....,100,''))"
return "Bitcon agendado"
else:
#abort(400, 'No data received')
return "Su bitcoin no es valido"
##Guarda la info en la DB
#try:
# db['bitcoin'].save(bitcoin_web)
#except ValidationError as ve:
# abort(400, str(ve))
bottle.debug(True)
run(host='localhost', port=8082)
| gpl-3.0 | 2,338,833,910,033,928,700 | 33.671875 | 131 | 0.509464 | false |
JoelO/pyntl | frac.py | 1 | 2928 | from common import gcd
from functools import total_ordering
@total_ordering
class Fraction:
'''
A class to handle operations on rational numbers.
NOTE: This class does no sanity or type checking.
You CAN divide by zero, if that's what you're into.
'''
#################
## Constructor ##
#################
def __init__(self, numerator = 0, denominator = 1):
self.numerator = numerator
self.denominator = denominator
#####################
## Type Conversion ##
#####################
def __str__(self):
return str(self.numerator) + " / " + str(self.denominator)
def __int__(self):
# NOTE: This returns the floor, which is inconsistent with floats.
return self.numerator // self.denominator
def __float__(self):
return float(self.numerator) / self.denominator
def __bool__(self):
return self.numerator != 0
def __nonzero__(self):
# Needed for Python 2
return self.__bool__()
##########################
## Arithmetic operators ##
##########################
def __neg__(self):
return Fraction(-self.numerator, self.denominator)
def __add__(self, other):
newNumerator = self.numerator * other.denominator + \
self.denominator * other.numerator
newDenominator = self.denominator * other.denominator
return Fraction(newNumerator, newDenominator)
def __radd__(self, other):
return self + other
def __sub__(self, other):
return self + (-other)
def __rsub__(self, other):
return other + (-self)
# FIXME: This doesn't allow division by integers.
def __div__(self, other):
return self * other.reciprocal()
def __rdiv__(self, other):
return other * self.reciprocal()
def __mul__(self, other):
newNumerator = self.numerator * other.numerator
newDenominator = self.denominator * other.denominator
return Fraction(newNumerator, newDenominator)
def __rmul__(self, other):
return self * other
def __pow__(self, power):
newNumerator = self.numerator ** power
newDenominator = self.denominator ** power
return Fraction(newNumerator, newDenominator)
def reciprocal(self):
return Fraction(self.denominator, self.numerator)
def reduce(self):
g = gcd(self.numerator, self.denominator)
if g != 0:
self.numerator /= g
self.denominator /= g
return self
##########################
## Comparison Operators ##
##########################
def __eq__(self, other):
return self.numerator * other.denominator == \
self.denominator * other.numerator
def __lt__(self, other):
return self.numerator * other.denominator < \
self.denominator * other.numerator
| mit | 397,949,535,553,568,700 | 26.885714 | 74 | 0.559085 | false |
nickcdryan/hep_ml | hep_ml/nnet.py | 3 | 23946 | """
**hep_ml.nnet** is minimalistic version of feed-forward neural networks on theano.
The neural networks from this library provide sklearn classifier's interface.
Definitions for loss functions, trainers of neural networks are defined in this file too.
Main point of this library: black-box stochastic optimization of any given loss function.
This gives ability to define any activation expression (at the cost of unavailability of pretraining).
In this file we have **examples** of neural networks,
user is encouraged to write his own specific architecture,
which can be much more complex than those used usually.
This library should be preferred for different experiments with architectures.
Also **hep_ml.nnet** allows optimization of parameters in any differentiable decision function.
Being written in theano, these neural networks are able to make use of your GPU.
See also libraries: theanets, keras.
Examples
________
Training a neural network with two hidden layers using IRPROP- algorithm
>>> network = MLPClassifier(layers=[7, 7], loss='log_loss', trainer='irprop-', epochs=1000)
>>> network.fit(X, y)
>>> probability = network.predict_proba(X)
Training an AdaBoost over neural network, adadelta trainer was used and trainer specific parameter was used
(size of minibatch)
>>> from sklearn.ensemble import AdaBoostClassifier
>>> base_network = MLPClassifier(layers=[10], trainer='adadelta', trainer_parameters={'batch': 600})
>>> classifier = AdaBoostClassifier(base_estimator=base_network, n_estimators=20)
>>> classifier.fit(X, y)
Using custom pretransformer and ExponentialLoss:
>>> from sklearn.preprocessing import PolynomialFeatures
>>> network = MLPClassifier(layers=[10], scaler=PolynomialFeatures(), loss='exp_loss')
To create custom neural network, see code of SimpleNeuralNetwork.
"""
from __future__ import print_function, division, absolute_import
from copy import deepcopy
import numpy
import theano
import theano.tensor as T
from theano.ifelse import ifelse
from theano.tensor.shared_randomstreams import RandomStreams
from sklearn.utils.validation import check_random_state
from sklearn.base import BaseEstimator, ClassifierMixin, TransformerMixin, clone
from sklearn import preprocessing
from .commonutils import check_xyw, check_sample_weight
from scipy.special import expit
floatX = theano.config.floatX
__author__ = 'Alex Rogozhnikov'
__all__ = ['AbstractNeuralNetworkClassifier',
'MLPClassifier',
'SimpleNeuralNetwork',
'SoftmaxNeuralNetwork',
'RBFNeuralNetwork',
'PairwiseNeuralNetwork',
'PairwiseSoftplusNeuralNetwork',
]
# region Loss functions
def squared_loss(y, pred, w):
""" Squared loss for classification, not to be messed up with MSE"""
return T.mean(w * (y - T.nnet.sigmoid(pred)) ** 2)
def log_loss(y, pred, w):
""" Logistic loss for classification (aka cross-entropy, aka binomial deviance) """
margin = pred * (1 - 2 * y)
return T.mean(w * T.nnet.softplus(margin))
def exp_loss(y, pred, w):
""" Exponential loss for classification (aka AdaLoss function) """
margin = pred * (1 - 2 * y)
return T.mean(w * T.exp(margin))
def exp_log_loss(y, pred, w):
""" Classification loss function,
combines logistic loss for signal and exponential loss for background """
return 2 * log_loss(y, pred, w=w * y) + exp_loss(y, pred, w=w * (1 - y))
# regression loss
def mse_loss(y, pred, w):
""" Regression loss function, mean squared error. """
return T.mean(w * (y - pred) ** 2)
def smooth_huber_loss(y, pred, w):
"""Regression loss function, smooth version of Huber loss function. """
return T.mean(w * T.log(T.cosh(y - pred)))
losses = {'mse_loss': mse_loss,
'exp_loss': exp_loss,
'log_loss': log_loss,
'exp_log_loss': exp_log_loss,
'squared_loss': squared_loss,
'smooth_huber_loss': smooth_huber_loss,
}
# endregion
# region Trainers
def get_batch(x, y, w, random_stream, batch_size=10):
""" Generates subset of training dataset, of size batch"""
indices = random_stream.choice(a=T.shape(x)[0], size=(batch_size,))
return x[indices], y[indices], w[indices]
def sgd_trainer(x, y, w, parameters, loss, random_stream, batch=10, learning_rate=0.1,
l2_penalty=0.001, momentum=0.9, ):
"""Stochastic gradient descent with momentum,
:param int batch: size of minibatch, each time averaging gradient over minibatch.
:param float learning_rate: size of step
:param float l2_penalty: speed of weights' decay, l2 regularization prevents overfitting
:param float momentum: momentum to stabilize learning process.
"""
updates = []
shareds = []
xp, yp, wp = get_batch(x, y, w, batch_size=batch, random_stream=random_stream)
for name, param in parameters.items():
der = T.grad(loss(xp, yp, wp), param)
momentum_ = theano.shared(param.get_value() * 0.)
shareds.append(momentum_)
updates.append([momentum_, momentum_ * momentum + (1. - momentum) * der])
updates.append([param, param * (1. - learning_rate * l2_penalty) - learning_rate * momentum_])
return shareds, updates
def irprop_minus_trainer(x, y, w, parameters, loss, random_stream,
positive_step=1.2, negative_step=0.5, max_step=1., min_step=1e-6):
"""IRPROP- is batch trainer, for details see http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.21.3428
This is default trainer, very stable for classification.
:param positive_step: factor, by which the step is increased when continuing going in the direction
:param negative_step: factor, by which the step is increased when changing direction to opposite
:param min_step: minimal change of weight during iteration
:param max_step: maximal change of weight during iteration
"""
shareds = []
updates = []
loss_value = loss(x, y, w)
for name, param in parameters.items():
old_derivative = theano.shared(param.get_value() * 0.)
delta = theano.shared(param.get_value() * 0. + 1e-3)
shareds.extend([old_derivative, delta])
new_derivative = T.grad(loss_value, param)
new_delta = T.where(new_derivative * old_derivative > 0, delta * positive_step, delta * negative_step)
new_delta = T.clip(new_delta, min_step, max_step)
updates.append([param, param - new_delta * T.sgn(new_derivative)])
updates.append([delta, new_delta])
new_old_derivative = T.where(new_derivative * old_derivative < 0, 0, new_derivative)
updates.append([old_derivative, new_old_derivative])
return shareds, updates
def irprop_star_trainer(x, y, w, parameters, loss, random_stream,
positive_step=1.2, negative_step=0.5, max_step=1., min_step=1e-6):
""" IRPROP* trainer (own experimental modification of IRPROP-, not recommended for usage) """
shareds = []
updates = []
loss_value = loss(x, y, w)
for name, param in parameters.items():
param_shape = param.get_value().shape
n = int(numpy.prod(param_shape))
new_derivative_ = T.grad(loss_value, param).flatten()
lnewder, rnewder = new_derivative_.reshape([n, 1]), new_derivative_.reshape([1, n])
new_derivative_plus = lnewder + rnewder
new_derivative_minus = lnewder - rnewder
new_param = param
for new_derivative in [new_derivative_plus, new_derivative_minus]:
delta = theano.shared(numpy.zeros([n, n], dtype=floatX) + 1e-3)
old_derivative = theano.shared(numpy.zeros([n, n], dtype=floatX))
new_delta = T.where(new_derivative * old_derivative > 0, delta * positive_step, delta * negative_step)
new_delta = T.clip(new_delta, min_step, max_step)
updates.append([delta, new_delta])
new_old_derivative = T.where(new_derivative * old_derivative < 0, 0, new_derivative)
updates.append([old_derivative, new_old_derivative])
new_param = new_param - (new_delta * T.sgn(new_derivative)).sum(axis=1).reshape(param.shape)
shareds.extend([old_derivative, delta])
updates.append([param, new_param])
return shareds, updates
def irprop_plus_trainer(x, y, w, parameters, loss, random_stream,
positive_step=1.2, negative_step=0.5, max_step=1., min_step=1e-6):
"""IRPROP+ is batch trainer, for details see http://citeseerx.ist.psu.edu/viewdoc/summary?doi=10.1.1.21.3428
:param positive_step: factor, by which the step is increased when continuing going in the direction
:param negative_step: factor, by which the step is increased when changing direction to opposite
:param min_step: minimal change of weight during iteration
:param max_step: maximal change of weight during iteration
"""
loss_value = loss(x, y, w)
prev_loss_value = theano.shared(1e10)
shareds = []
updates = []
for name, param in parameters.iteritems():
old_derivative = theano.shared(param.get_value() * 0.)
delta = theano.shared(param.get_value() * 0. + 1e-3)
new_derivative = T.grad(loss_value, param)
shift_if_bad_step = T.where(new_derivative * old_derivative < 0, delta * T.sgn(old_derivative), 0)
# THIS doesn't work!
shift = ifelse(loss_value > prev_loss_value, shift_if_bad_step, 0. * param)
# unfortunately we can't do it this way: param += shift
new_delta = T.where(new_derivative * old_derivative > 0, delta * positive_step, delta * negative_step)
new_delta = T.clip(new_delta, min_step, max_step)
updates.append([param, param + shift - new_delta * T.sgn(new_derivative)])
updates.append([delta, new_delta])
new_old_derivative = T.where(new_derivative * old_derivative < 0, 0, new_derivative)
updates.append([old_derivative, new_old_derivative])
shareds.extend([old_derivative, delta, prev_loss_value])
updates.append([prev_loss_value, loss_value])
return shareds, updates
def adadelta_trainer(x, y, w, parameters, loss, random_stream,
decay_rate=0.95, epsilon=1e-5, learning_rate=1., batch=1000):
"""AdaDelta is trainer with adaptive learning rate.
:param decay_rate: momentum-like parameter
:param learning_rate: size of step
:param batch: size of minibatch
:param epsilon: regularization
"""
shareds = []
updates = []
xp, yp, wp = get_batch(x, y, w, batch_size=batch, random_stream=random_stream)
for name, param in parameters.items():
derivative = T.grad(loss(xp, yp, wp), param)
cumulative_derivative = theano.shared(param.get_value() * 0.)
cumulative_step = theano.shared(param.get_value() * 0.)
shareds.extend([cumulative_derivative, cumulative_step])
updates.append([cumulative_derivative, cumulative_derivative * decay_rate + (1 - decay_rate) * derivative ** 2])
step = - derivative * T.sqrt((cumulative_step + epsilon) / (cumulative_derivative + epsilon))
updates.append([cumulative_step, cumulative_step * decay_rate + (1 - decay_rate) * step ** 2])
updates.append([param, param + learning_rate * step])
return shareds, updates
trainers = {'sgd': sgd_trainer,
'irprop-': irprop_minus_trainer,
'irprop+': irprop_plus_trainer,
'irprop*': irprop_star_trainer,
'adadelta': adadelta_trainer,
}
# endregion
def _prepare_scaler(transform):
"""Returns new transformer used in neural network
:param transform: str ot transformer
:return: transformer, cloned or created.
"""
if transform == 'standard':
return preprocessing.StandardScaler()
elif transform == 'minmax':
return preprocessing.MinMaxScaler()
else:
assert isinstance(transform, TransformerMixin), 'provided transformer should be derived from TransformerMixin'
return clone(transform)
class AbstractNeuralNetworkClassifier(BaseEstimator, ClassifierMixin):
"""
Base class for classification neural networks.
Supports only binary classification, supports weights, which makes it usable in boosting.
Works in sklearn fit-predict way: X is [n_samples, n_features], y is [n_samples], sample_weight is [n_samples].
Works as usual sklearn classifier, can be used in boosting, for instance, pickled, etc.
"""
def __init__(self, layers=(10,), scaler='standard', loss='log_loss', trainer='irprop-', epochs=100,
trainer_parameters=None, random_state=None):
"""
:param layers: list of int, e.g [9, 7] - the number of units in each *hidden* layer
:param scaler: 'standard' or 'minmax' or some other Transformer used to pretransform features.
Default is 'standard', which will apply StandardScaler from sklearn.
:param loss: loss function used (log_loss by default), str or function(y, pred, w) -> float
:param trainer: string, name of optimization method used
:param epochs: number of times each takes part in training
:param dict trainer_parameters: parameters passed to trainer function (learning_rate, etc., trainer-specific).
"""
self.scaler = scaler
self.layers = layers
self.loss = loss
self.prepared = False
self.epochs = epochs
self.parameters = {}
self.trainer = trainer
self.trainer_parameters = deepcopy(trainer_parameters)
self.random_state = random_state
self.classes_ = numpy.array([0, 1])
def _create_matrix_parameter(self, name, n1, n2):
"""Creates a parameter of neural network, which is typically a matrix"""
matrix = theano.shared(value=self.random_state_.normal(size=[n1, n2]).astype(floatX) * 0.01, name=name)
self.parameters[name] = matrix
return matrix
def _create_scalar_parameters(self, *names):
"""Creates a parameter of neural network, which is typically a matrix"""
for name in names:
param = theano.shared(value=self.random_state_.normal() * 0.01, name=name)
self.parameters[name] = param
yield param
def prepare(self):
"""This method should provide activation function and set parameters.
Each network overrides this function.
:return: Activation function, f: X -> p,
X of shape [n_events, n_outputs], p of shape [n_events].
For classification, p is arbitrary real, the greater p, the more event
looks like signal event (label 1).
"""
raise NotImplementedError()
def _prepare(self, n_input_features):
"""This function is called once, it creates the activation function, it's gradient
and initializes the weights
:return: loss function as lambda (x, y, w) -> loss"""
self.random_state_ = check_random_state(self.random_state)
self.layers_ = [n_input_features] + list(self.layers) + [1]
self.parameters = {}
self.prepared = True
loss_function = losses.get(self.loss, self.loss)
x = T.matrix('X')
y = T.vector('y')
w = T.vector('w')
activation_raw = self.prepare()
self.Activation = theano.function([x], activation_raw(x).flatten())
loss_ = lambda x, y, w: loss_function(y, activation_raw(x).flatten(), w)
self.Loss = theano.function([x, y, w], loss_(x, y, w))
return loss_
def _transform(self, X, y=None, fit=True):
"""Apply selected scaler or transformer to dataset
(also this method adds a column filled with ones).
:param numpy.array X: of shape [n_samples, n_features], data
:param numpy.array y: of shape [n_samples], labels
:param bool fit: if True, will
:return: transformed data, numpy.array of shape [n_samples, n_output_features]
"""
if fit:
self.scaler_ = _prepare_scaler(self.scaler)
self.scaler_.fit(X, y)
# Fighting copy-bug of sklearn's transformers
X = numpy.array(X, dtype=float)
result = self.scaler_.transform(X)
result = numpy.hstack([result, numpy.ones([len(X), 1])])
return result
def _prepare_inputs(self, X, y, sample_weight):
X, y, sample_weight = check_xyw(X, y, sample_weight)
sample_weight = check_sample_weight(y, sample_weight, normalize=True)
X = self._transform(X, y, fit=True)
self.classes_ = numpy.array([0, 1])
assert (numpy.unique(y) == self.classes_).all(), 'only two-class classification supported, labels are 0 and 1'
return X, y, sample_weight
def fit(self, X, y, sample_weight=None, trainer=None, epochs=None, **trainer_parameters):
""" Prepare the model by optimizing selected loss function with some trainer.
This method doesn't support additional fitting, use `partial_fit`.
:param X: numpy.array of shape [n_samples, n_features]
:param y: numpy.array of shape [n_samples]
:param sample_weight: numpy.array of shape [n_samples], leave None for array of 1's
:param trainer: str, method used to minimize loss, overrides one in the ctor
:param trainer_parameters: parameters for this method, override ones in ctor
:return: self """
X, y, sample_weight = self._prepare_inputs(X, y, sample_weight=sample_weight)
loss_lambda = self._prepare(X.shape[1])
trainer = trainers[self.trainer if trainer is None else trainer]
parameters_ = {} if self.trainer_parameters is None else self.trainer_parameters.copy()
parameters_.update(trainer_parameters)
x = theano.shared(X)
y = theano.shared(y)
w = theano.shared(numpy.array(sample_weight, dtype=floatX))
shareds, updates = trainer(x, y, w, self.parameters, loss_lambda,
RandomStreams(seed=self.random_state_.randint(0, 1000)), **parameters_)
make_one_step = theano.function([], [], updates=updates)
# TODO epochs are computed wrongly at the moment if 'batch' parameter not passed.
n_batches = 1
if parameters_.has_key('batch'):
batch = parameters_['batch']
n_batches = len(X) // batch + 1
for i in range(epochs or self.epochs):
for _ in range(n_batches):
make_one_step()
return self
def activate(self, X):
"""
Activates NN on particular dataset
:param numpy.array X: of shape [n_samples, n_features]
:return: numpy.array with results of shape [n_samples]
"""
X = self._transform(X, fit=False)
return self.Activation(X)
def predict_proba(self, X):
"""Computes probability of each event to belong to each class
:param numpy.array X: of shape [n_samples, n_features]
:return: numpy.array of shape [n_samples, n_classes]
"""
result = numpy.zeros([len(X), 2])
result[:, 1] = expit(self.activate(X))
result[:, 0] = 1 - result[:, 1]
return result
def predict(self, X):
""" Predict the classes for new events.
:param numpy.array X: of shape [n_samples, n_features]
:return: numpy.array of shape [n_samples] with labels of predicted classes """
return self.predict_proba(X).argmax(axis=1)
def compute_loss(self, X, y, sample_weight=None):
"""Computes loss (that was used in training) on labeled dataset
:param X: numpy.array of shape [n_samples, n_features]
:param y: numpy.array with integer labels of shape [n_samples],
in two-class classification 0 and 1 labels should be used
:param sample_weight: optional, numpy.array of shape [n_samples].
:return float, the loss vales computed"""
sample_weight = check_sample_weight(y, sample_weight, normalize=False)
X = self.transform(X, fit=False)
return self.Loss(X, y, sample_weight)
# region Neural networks
class SimpleNeuralNetwork(AbstractNeuralNetworkClassifier):
"""The most simple NN with one hidden layer (sigmoid activation), for example purposes.
Supports only one hidden layer.
See source code as example."""
def prepare(self):
n1, n2, n3 = self.layers_
W1 = self._create_matrix_parameter('W1', n1, n2)
W2 = self._create_matrix_parameter('W2', n2, n3)
def activation(input):
first = T.nnet.sigmoid(T.dot(input, W1))
return T.dot(first, W2)
return activation
class MLPClassifier(AbstractNeuralNetworkClassifier):
"""MLP (MultiLayerPerceptron) supports arbitrary number of layers (sigmoid activation each)."""
def prepare(self):
activation = lambda x: x
for i, layer in list(enumerate(self.layers_))[1:]:
W = self._create_matrix_parameter('W' + str(i), self.layers_[i - 1], self.layers_[i])
# act=activation and W_=W are tricks to avoid lambda-capturing
if i == 0:
activation = lambda x, act=activation, W_=W: T.dot(act(x), W_)
else:
activation = lambda x, act=activation, W_=W: T.dot(T.tanh(act(x)), W_)
return activation
class RBFNeuralNetwork(AbstractNeuralNetworkClassifier):
"""
Neural network with one hidden layer with normalized RBF activation (Radial Basis Function).
"""
def prepare(self):
n1, n2, n3 = self.layers_
W1 = self._create_matrix_parameter('W1', n2, n1)
W2 = self._create_matrix_parameter('W2', n2, n3)
# this parameter is responsible for scaling, it is optimised too
G = theano.shared(value=0.1, name='G')
self.parameters['G'] = G
def activation(input):
translation_vectors = W1.reshape((1, W1.shape[0], -1)) - input.reshape((input.shape[0], 1, -1))
minkowski_distances = (abs(translation_vectors) ** 2).sum(2)
first = T.nnet.softmax(- (0.001 + G * G) * minkowski_distances)
return T.dot(first, W2)
return activation
class SoftmaxNeuralNetwork(AbstractNeuralNetworkClassifier):
"""Neural network with one hidden layer, softmax activation function """
def prepare(self):
n1, n2, n3 = self.layers_
W1 = self._create_matrix_parameter('W1', n1, n2)
W2 = self._create_matrix_parameter('W2', n2, n3)
def activation(input):
first = T.nnet.softmax(T.dot(input, W1))
return T.dot(first, W2)
return activation
class PairwiseNeuralNetwork(AbstractNeuralNetworkClassifier):
"""The result is computed as :math:`h = sigmoid(Ax)`, :math:`output = \sum_{ij} B_{ij} h_i (1 - h_j)`,
this is a brilliant example when easier to define activation
function rather than trying to implement this inside some framework."""
def prepare(self):
n1, n2, n3 = self.layers_
W1 = self._create_matrix_parameter('W1', n1, n2)
W2 = self._create_matrix_parameter('W2', n2, n2)
def activation(input):
first = T.nnet.sigmoid(T.dot(input, W1))
return T.batched_dot(T.dot(first, W2), 1 - first)
return activation
class PairwiseSoftplusNeuralNetwork(AbstractNeuralNetworkClassifier):
"""The result is computed as :math:`h = softplus(Ax)`, :math:`output = \sum_{ij} B_{ij} h_i (1 - h_j)` """
def prepare(self):
n1, n2, n3 = self.layers_
W1 = self._create_matrix_parameter('W1', n1, n2)
W2 = self._create_matrix_parameter('W2', n2, n2)
def activation(input):
z = T.dot(input, W1)
first1 = T.nnet.softplus(z)
first2 = T.nnet.softplus(-z)
return T.batched_dot(T.dot(first1, W2), first2)
return activation
# endregion | apache-2.0 | 975,122,723,542,924,800 | 39.519459 | 120 | 0.645745 | false |
kzcashteam/kzcash | qa/rpc-tests/forknotify.py | 66 | 2086 | #!/usr/bin/env python2
# Copyright (c) 2014-2015 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
#
# Test -alertnotify
#
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
class ForkNotifyTest(BitcoinTestFramework):
alert_filename = None # Set by setup_network
def setup_network(self):
self.nodes = []
self.alert_filename = os.path.join(self.options.tmpdir, "alert.txt")
with open(self.alert_filename, 'w') as f:
pass # Just open then close to create zero-length file
self.nodes.append(start_node(0, self.options.tmpdir,
["-blockversion=2", "-alertnotify=echo %s >> \"" + self.alert_filename + "\""]))
# Node1 mines block.version=211 blocks
self.nodes.append(start_node(1, self.options.tmpdir,
["-blockversion=211"]))
connect_nodes(self.nodes[1], 0)
self.is_network_split = False
self.sync_all()
def run_test(self):
# Mine 51 up-version blocks
self.nodes[1].generate(51)
self.sync_all()
# -alertnotify should trigger on the 51'st,
# but mine and sync another to give
# -alertnotify time to write
self.nodes[1].generate(1)
self.sync_all()
with open(self.alert_filename, 'r') as f:
alert_text = f.read()
if len(alert_text) == 0:
raise AssertionError("-alertnotify did not warn of up-version blocks")
# Mine more up-version blocks, should not get more alerts:
self.nodes[1].generate(1)
self.sync_all()
self.nodes[1].generate(1)
self.sync_all()
with open(self.alert_filename, 'r') as f:
alert_text2 = f.read()
if alert_text != alert_text2:
raise AssertionError("-alertnotify excessive warning of up-version blocks")
if __name__ == '__main__':
ForkNotifyTest().main()
| mit | 1,468,296,126,421,932,000 | 33.196721 | 108 | 0.612176 | false |
harikishen/addons-server | src/olympia/addons/tests/test_models.py | 1 | 116429 | # -*- coding: utf-8 -*-
import json
import os
import time
from datetime import datetime, timedelta
from django import forms
from django.conf import settings
from django.core import mail
from django.core.files.storage import default_storage as storage
from django.db import IntegrityError
from django.utils import translation
from mock import Mock, patch
from olympia import amo, core
from olympia.activity.models import ActivityLog, AddonLog
from olympia.amo.tests import addon_factory, TestCase, version_factory
from olympia.amo.helpers import absolutify, user_media_url
from olympia.addons.models import (
Addon, AddonApprovalsCounter, AddonCategory, AddonDependency,
AddonFeatureCompatibility, AddonUser, AppSupport, DeniedGuid, DeniedSlug,
Category, Charity, CompatOverride, CompatOverrideRange, FrozenAddon,
IncompatibleVersions, Persona, Preview, track_addon_status_change)
from olympia.applications.models import AppVersion
from olympia.bandwagon.models import Collection
from olympia.constants.categories import CATEGORIES
from olympia.devhub.models import RssKey
from olympia.files.models import File
from olympia.files.tests.test_models import UploadTest
from olympia.reviews.models import Review, ReviewFlag
from olympia.translations.models import (
delete_translation, Translation, TranslationSequence)
from olympia.users.models import UserProfile
from olympia.versions.models import ApplicationsVersions, Version
from olympia.versions.compare import version_int
class TestCleanSlug(TestCase):
def test_clean_slug_new_object(self):
# Make sure there's at least an addon with the "addon" slug, subsequent
# ones should be "addon-1", "addon-2" ...
a = Addon.objects.create()
assert a.slug == "addon"
# Start with a first clash. This should give us "addon-1".
# We're not saving yet, we're testing the slug creation without an id.
b = Addon()
b.clean_slug()
assert b.slug == 'addon1'
# Now save the instance to the database for future clashes.
b.save()
# Test on another object without an id.
c = Addon()
c.clean_slug()
assert c.slug == 'addon2'
# Even if an addon is deleted, don't clash with its slug.
c.status = amo.STATUS_DELETED
# Now save the instance to the database for future clashes.
c.save()
# And yet another object without an id. Make sure we're not trying to
# assign the 'addon-2' slug from the deleted addon.
d = Addon()
d.clean_slug()
assert d.slug == 'addon3'
def test_clean_slug_with_id(self):
# Create an addon and save it to have an id.
a = Addon.objects.create()
# Start over: don't use the name nor the id to generate the slug.
a.slug = a.name = ""
a.clean_slug()
# Slugs created from an id are of the form "id~", eg "123~" to avoid
# clashing with URLs.
assert a.slug == "%s~" % a.id
# And again, this time make it clash.
b = Addon.objects.create()
# Set a's slug to be what should be created for b from its id.
a.slug = "%s~" % b.id
a.save()
# Now start over for b.
b.slug = b.name = ""
b.clean_slug()
assert b.slug == "%s~1" % b.id
def test_clean_slug_with_name(self):
# Make sure there's at least an addon with the "fooname" slug,
# subsequent ones should be "fooname-1", "fooname-2" ...
a = Addon.objects.create(name="fooname")
assert a.slug == "fooname"
b = Addon(name="fooname")
b.clean_slug()
assert b.slug == "fooname1"
def test_clean_slug_with_slug(self):
# Make sure there's at least an addon with the "fooslug" slug,
# subsequent ones should be "fooslug-1", "fooslug-2" ...
a = Addon.objects.create(name="fooslug")
assert a.slug == "fooslug"
b = Addon(name="fooslug")
b.clean_slug()
assert b.slug == "fooslug1"
def test_clean_slug_denied_slug(self):
denied_slug = 'foodenied'
DeniedSlug.objects.create(name=denied_slug)
a = Addon(slug=denied_slug)
a.clean_slug()
# Blacklisted slugs (like "activate" or IDs) have a "~" appended to
# avoid clashing with URLs.
assert a.slug == "%s~" % denied_slug
# Now save the instance to the database for future clashes.
a.save()
b = Addon(slug=denied_slug)
b.clean_slug()
assert b.slug == "%s~1" % denied_slug
def test_clean_slug_denied_slug_long_slug(self):
long_slug = "this_is_a_very_long_slug_that_is_longer_than_thirty_chars"
DeniedSlug.objects.create(name=long_slug[:30])
# If there's no clashing slug, just append a "~".
a = Addon.objects.create(slug=long_slug[:30])
assert a.slug == "%s~" % long_slug[:29]
# If there's a clash, use the standard clash resolution.
a = Addon.objects.create(slug=long_slug[:30])
assert a.slug == "%s1" % long_slug[:28]
def test_clean_slug_long_slug(self):
long_slug = "this_is_a_very_long_slug_that_is_longer_than_thirty_chars"
# If there's no clashing slug, don't over-shorten it.
a = Addon.objects.create(slug=long_slug)
assert a.slug == long_slug[:30]
# Now that there is a clash, test the clash resolution.
b = Addon(slug=long_slug)
b.clean_slug()
assert b.slug == "%s1" % long_slug[:28]
def test_clean_slug_always_slugify(self):
illegal_chars = "some spaces and !?@"
# Slugify if there's a slug provided.
a = Addon(slug=illegal_chars)
a.clean_slug()
assert a.slug.startswith("some-spaces-and"), a.slug
# Also slugify if there's no slug provided.
b = Addon(name=illegal_chars)
b.clean_slug()
assert b.slug.startswith("some-spaces-and"), b.slug
def test_clean_slug_worst_case_scenario(self):
long_slug = "this_is_a_very_long_slug_that_is_longer_than_thirty_chars"
# Generate 100 addons with this very long slug. We should encounter the
# worst case scenario where all the available clashes have been
# avoided. Check the comment in addons.models.clean_slug, in the "else"
# part of the "for" loop checking for available slugs not yet assigned.
for i in range(100):
Addon.objects.create(slug=long_slug)
with self.assertRaises(RuntimeError): # Fail on the 100th clash.
Addon.objects.create(slug=long_slug)
def test_clean_slug_ends_with_dash(self):
"""Addon name ending with a dash should still work: See bug 1206063."""
a = Addon.objects.create(name='ends with dash -')
assert a.slug == 'ends-with-dash-'
assert a.slug == amo.utils.slugify(a.slug)
b = Addon.objects.create(name='ends with dash -')
assert b.slug == 'ends-with-dash-1'
assert b.slug == amo.utils.slugify(b.slug)
class TestAddonManager(TestCase):
fixtures = ['base/appversion', 'base/users',
'base/addon_3615', 'addons/featured', 'addons/test_manager',
'base/collections', 'base/featured',
'bandwagon/featured_collections', 'base/addon_5299_gcal']
def setUp(self):
super(TestAddonManager, self).setUp()
core.set_user(None)
self.addon = Addon.objects.get(pk=3615)
def test_managers_public(self):
assert self.addon in Addon.objects.all()
assert self.addon in Addon.unfiltered.all()
def test_managers_unlisted(self):
self.make_addon_unlisted(self.addon)
assert self.addon in Addon.objects.all()
assert self.addon in Addon.unfiltered.all()
def test_managers_unlisted_deleted(self):
self.make_addon_unlisted(self.addon)
self.addon.update(status=amo.STATUS_DELETED)
assert self.addon not in Addon.objects.all()
assert self.addon in Addon.unfiltered.all()
def test_managers_deleted(self):
self.addon.update(status=amo.STATUS_DELETED)
assert self.addon not in Addon.objects.all()
assert self.addon in Addon.unfiltered.all()
def test_featured(self):
assert Addon.objects.featured(amo.FIREFOX).count() == 3
def test_listed(self):
# We need this for the fixtures, but it messes up the tests.
self.addon.update(disabled_by_user=True)
# Now continue as normal.
Addon.objects.filter(id=5299).update(disabled_by_user=True)
q = Addon.objects.listed(amo.FIREFOX, amo.STATUS_PUBLIC)
assert len(q.all()) == 4
# Pick one of the listed addons.
addon = Addon.objects.get(pk=2464)
assert addon in q.all()
# Disabling hides it.
addon.disabled_by_user = True
addon.save()
# Should be 3 now, since the one is now disabled.
assert q.count() == 3
# If we search for public or unreviewed we find it.
addon.disabled_by_user = False
addon.status = amo.STATUS_NOMINATED
addon.save()
assert q.count() == 3
assert Addon.objects.listed(amo.FIREFOX, amo.STATUS_PUBLIC,
amo.STATUS_NOMINATED).count() == 4
# Can't find it without a file.
addon.versions.get().files.get().delete()
assert q.count() == 3
def test_public(self):
for a in Addon.objects.public():
assert a.status == amo.STATUS_PUBLIC
def test_valid(self):
addon = Addon.objects.get(pk=5299)
addon.update(disabled_by_user=True)
objs = Addon.objects.valid()
for addon in objs:
assert addon.status in amo.VALID_ADDON_STATUSES
assert not addon.disabled_by_user
def test_valid_disabled_by_user(self):
before = Addon.objects.valid_and_disabled_and_pending().count()
addon = Addon.objects.get(pk=5299)
addon.update(disabled_by_user=True)
assert Addon.objects.valid_and_disabled_and_pending().count() == before
def test_valid_disabled_by_admin(self):
before = Addon.objects.valid_and_disabled_and_pending().count()
addon = Addon.objects.get(pk=5299)
addon.update(status=amo.STATUS_DISABLED)
assert Addon.objects.valid_and_disabled_and_pending().count() == before
def test_invalid_deleted(self):
before = Addon.objects.valid_and_disabled_and_pending().count()
addon = Addon.objects.get(pk=5299)
addon.update(status=amo.STATUS_DELETED)
assert Addon.objects.valid_and_disabled_and_pending().count() == (
before - 1)
def test_valid_disabled_pending(self):
before = Addon.objects.valid_and_disabled_and_pending().count()
amo.tests.addon_factory(status=amo.STATUS_PENDING)
assert Addon.objects.valid_and_disabled_and_pending().count() == (
before + 1)
def test_valid_disabled_version(self):
before = Addon.objects.valid_and_disabled_and_pending().count()
# Add-on, no version. Doesn't count.
addon = amo.tests.addon_factory()
addon.update(_current_version=None, _signal=False)
assert Addon.objects.valid_and_disabled_and_pending().count() == before
# Theme, no version. Counts.
addon = amo.tests.addon_factory(type=amo.ADDON_PERSONA)
addon.update(_current_version=None, _signal=False)
assert Addon.objects.valid_and_disabled_and_pending().count() == (
before + 1)
def test_new_featured(self):
f = Addon.objects.featured(amo.FIREFOX)
assert f.count() == 3
assert sorted(x.id for x in f) == (
[2464, 7661, 15679])
f = Addon.objects.featured(amo.THUNDERBIRD)
assert not f.exists()
def test_filter_for_many_to_many(self):
# Check https://bugzilla.mozilla.org/show_bug.cgi?id=1142035.
collection = self.addon.collections.first()
assert collection.addons.get() == self.addon
# Addon shouldn't be listed in collection.addons if it's deleted.
# Unlisted.
self.make_addon_unlisted(self.addon)
collection = Collection.objects.get(pk=collection.pk)
assert collection.addons.get() == self.addon
# Deleted and unlisted.
self.addon.update(status=amo.STATUS_DELETED)
collection = Collection.objects.get(pk=collection.pk)
assert collection.addons.count() == 0
# Only deleted.
self.make_addon_listed(self.addon)
collection = Collection.objects.get(pk=collection.pk)
assert collection.addons.count() == 0
def test_no_filter_for_relations(self):
# Check https://bugzilla.mozilla.org/show_bug.cgi?id=1142035.
version = self.addon.versions.first()
assert version.addon == self.addon
# Deleted or unlisted, version.addon should still work.
# Unlisted.
self.make_addon_unlisted(self.addon)
version = Version.objects.get(pk=version.pk) # Reload from db.
assert version.addon == self.addon
# Deleted and unlisted.
self.addon.update(status=amo.STATUS_DELETED)
version = Version.objects.get(pk=version.pk) # Reload from db.
assert version.addon == self.addon
# Only deleted.
self.make_addon_listed(self.addon)
version = Version.objects.get(pk=version.pk) # Reload from db.
assert version.addon == self.addon
class TestAddonModels(TestCase):
fixtures = ['base/appversion',
'base/collections',
'base/featured',
'base/users',
'base/addon_5299_gcal',
'base/addon_3615',
'base/addon_3723_listed',
'base/addon_6704_grapple.json',
'base/addon_4594_a9',
'base/addon_4664_twitterbar',
'base/thunderbird',
'addons/featured',
'addons/invalid_latest_version',
'addons/denied',
'bandwagon/featured_collections']
def setUp(self):
super(TestAddonModels, self).setUp()
TranslationSequence.objects.create(id=99243)
self.old_version = amo.FIREFOX.latest_version
amo.FIREFOX.latest_version = '3.6.15'
def tearDown(self):
amo.FIREFOX.latest_version = self.old_version
super(TestAddonModels, self).tearDown()
def test_current_version(self):
"""
Tests that we get the current (latest public) version of an addon.
"""
a = Addon.objects.get(pk=3615)
assert a.current_version.id == 81551
def test_current_version_listed(self):
a = Addon.objects.get(pk=3723)
assert a.current_version.id == 89774
def test_current_version_listed_no_version(self):
Addon.objects.filter(pk=3723).update(_current_version=None)
Version.objects.filter(addon=3723).delete()
a = Addon.objects.get(pk=3723)
assert a.current_version is None
def test_latest_unlisted_version(self):
addon = Addon.objects.get(pk=3615)
an_unlisted_version = version_factory(
addon=addon, version='3.0', channel=amo.RELEASE_CHANNEL_UNLISTED)
an_unlisted_version.update(created=self.days_ago(2))
a_newer_unlisted_version = version_factory(
addon=addon, version='4.0', channel=amo.RELEASE_CHANNEL_UNLISTED)
a_newer_unlisted_version.update(created=self.days_ago(1))
version_factory(
addon=addon, version='5.0', channel=amo.RELEASE_CHANNEL_UNLISTED,
file_kw={'status': amo.STATUS_DISABLED})
assert addon.latest_unlisted_version == a_newer_unlisted_version
# Make sure the property is cached.
an_even_newer_unlisted_version = version_factory(
addon=addon, version='6.0', channel=amo.RELEASE_CHANNEL_UNLISTED)
assert addon.latest_unlisted_version == a_newer_unlisted_version
# Make sure it can be deleted to reset it.
del addon.latest_unlisted_version
assert addon.latest_unlisted_version == an_even_newer_unlisted_version
# Make sure it's writeable.
addon.latest_unlisted_version = an_unlisted_version
assert addon.latest_unlisted_version == an_unlisted_version
def test_find_latest_version(self):
"""
Tests that we get the latest version of an addon.
"""
addon = Addon.objects.get(pk=3615)
addon.current_version.update(created=self.days_ago(2))
new_version = version_factory(addon=addon, version='2.0')
new_version.update(created=self.days_ago(1))
assert addon.find_latest_version(None) == new_version
another_new_version = version_factory(
addon=addon, version='3.0', channel=amo.RELEASE_CHANNEL_UNLISTED)
assert addon.find_latest_version(None) == another_new_version
def test_find_latest_version_different_channel(self):
addon = Addon.objects.get(pk=3615)
addon.current_version.update(created=self.days_ago(2))
new_version = version_factory(addon=addon, version='2.0')
new_version.update(created=self.days_ago(1))
unlisted_version = version_factory(
addon=addon, version='3.0', channel=amo.RELEASE_CHANNEL_UNLISTED)
assert (
addon.find_latest_version(channel=amo.RELEASE_CHANNEL_LISTED) ==
new_version)
assert (
addon.find_latest_version(channel=amo.RELEASE_CHANNEL_UNLISTED) ==
unlisted_version)
def test_find_latest_version_no_version(self):
Addon.objects.filter(pk=3723).update(_current_version=None)
Version.objects.filter(addon=3723).delete()
addon = Addon.objects.get(pk=3723)
assert addon.find_latest_version(None) is None
def test_find_latest_version_ignore_beta(self):
addon = Addon.objects.get(pk=3615)
v1 = version_factory(addon=addon, version='1.0')
v1.update(created=self.days_ago(1))
assert addon.find_latest_version(None).id == v1.id
version_factory(addon=addon, version='2.0beta',
file_kw={'status': amo.STATUS_BETA})
# Still should be v1
assert addon.find_latest_version(None).id == v1.id
def test_find_latest_version_ignore_disabled(self):
addon = Addon.objects.get(pk=3615)
v1 = version_factory(addon=addon, version='1.0')
v1.update(created=self.days_ago(1))
assert addon.find_latest_version(None).id == v1.id
version_factory(addon=addon, version='2.0',
file_kw={'status': amo.STATUS_DISABLED})
# Still should be v1
assert addon.find_latest_version(None).id == v1.id
def test_find_latest_version_only_exclude_beta(self):
addon = Addon.objects.get(pk=3615)
v1 = version_factory(addon=addon, version='1.0')
v1.update(created=self.days_ago(2))
assert addon.find_latest_version(
None, exclude=(amo.STATUS_BETA,)).id == v1.id
v2 = version_factory(addon=addon, version='2.0',
file_kw={'status': amo.STATUS_DISABLED})
v2.update(created=self.days_ago(1))
version_factory(addon=addon, version='3.0beta',
file_kw={'status': amo.STATUS_BETA})
# Should be v2 since we don't exclude disabled, but do exclude beta.
assert addon.find_latest_version(
None, exclude=(amo.STATUS_BETA,)).id == v2.id
def test_find_latest_verison_dont_exclude_anything(self):
addon = Addon.objects.get(pk=3615)
v1 = version_factory(addon=addon, version='1.0')
v1.update(created=self.days_ago(2))
assert addon.find_latest_version(None, exclude=()).id == v1.id
v2 = version_factory(addon=addon, version='2.0',
file_kw={'status': amo.STATUS_DISABLED})
v2.update(created=self.days_ago(1))
v3 = version_factory(addon=addon, version='3.0beta',
file_kw={'status': amo.STATUS_BETA})
# Should be v3 since we don't exclude anything.
assert addon.find_latest_version(None, exclude=()).id == v3.id
def test_find_latest_verison_dont_exclude_anything_with_channel(self):
addon = Addon.objects.get(pk=3615)
v1 = version_factory(addon=addon, version='1.0')
v1.update(created=self.days_ago(3))
assert addon.find_latest_version(
amo.RELEASE_CHANNEL_LISTED, exclude=()).id == v1.id
v2 = version_factory(addon=addon, version='2.0',
file_kw={'status': amo.STATUS_DISABLED})
v2.update(created=self.days_ago(2))
v3 = version_factory(addon=addon, version='3.0beta',
file_kw={'status': amo.STATUS_BETA})
v2.update(created=self.days_ago(1))
version_factory(
addon=addon, version='4.0', channel=amo.RELEASE_CHANNEL_UNLISTED)
# Should be v3 since we don't exclude anything, but do have a channel
# set to listed, and version 4.0 is unlisted.
assert addon.find_latest_version(
amo.RELEASE_CHANNEL_LISTED, exclude=()).id == v3.id
def test_current_version_unsaved(self):
addon = Addon()
addon._current_version = Version()
assert addon.current_version is None
def test_find_latest_version_unsaved(self):
addon = Addon()
assert addon.find_latest_version(None) is None
def test_current_beta_version(self):
addon = Addon.objects.get(pk=5299)
assert addon.current_beta_version.id == 50000
def test_transformer(self):
addon = Addon.objects.get(pk=3615)
# If the transformer works then we won't have any more queries.
with self.assertNumQueries(0):
assert addon.current_version
def _delete(self, addon_id):
"""Test deleting add-ons."""
core.set_user(UserProfile.objects.last())
addon_count = Addon.unfiltered.count()
addon = Addon.objects.get(pk=addon_id)
guid = addon.guid
addon.delete('bye')
assert addon_count == Addon.unfiltered.count() # Soft deletion.
assert addon.status == amo.STATUS_DELETED
assert addon.slug is None
assert addon.current_version is None
assert addon.guid == guid # We don't clear it anymore.
deleted_count = Addon.unfiltered.filter(
status=amo.STATUS_DELETED).count()
assert len(mail.outbox) == deleted_count
log = AddonLog.objects.order_by('-id').first().activity_log
assert log.action == amo.LOG.DELETE_ADDON.id
assert log.to_string() == (
"Addon id {0} with GUID {1} has been deleted".format(addon_id,
guid))
def test_delete(self):
addon = Addon.unfiltered.get(pk=3615)
addon.name = u'é' # Make sure we don't have encoding issues.
addon.save()
self._delete(3615)
# Delete another add-on, and make sure we don't have integrity errors
# with unique constraints on fields that got nullified.
self._delete(5299)
def test_delete_persona(self):
addon = amo.tests.addon_factory(type=amo.ADDON_PERSONA)
assert addon.guid is None # Personas don't have GUIDs.
self._delete(addon.pk)
def _delete_url(self):
"""Test deleting addon has URL in the email."""
a = Addon.objects.get(pk=4594)
url = a.get_url_path()
a.delete('bye')
assert absolutify(url) in mail.outbox[0].body
def test_delete_url(self):
count = Addon.unfiltered.count()
self._delete_url()
assert count == Addon.unfiltered.count()
def test_delete_reason(self):
"""Test deleting with a reason gives the reason in the mail."""
reason = u'trêason'
a = Addon.objects.get(pk=3615)
a.name = u'é'
assert len(mail.outbox) == 0
a.delete(msg='bye', reason=reason)
assert len(mail.outbox) == 1
assert reason in mail.outbox[0].body
def test_delete_incomplete_no_versions(self):
"""Test deleting incomplete add-ons."""
count = Addon.unfiltered.count()
addon = Addon.objects.get(pk=3615)
addon.current_version.delete(hard=True)
# The addon status will have been changed when we deleted the version,
# and the instance should be the same, so we shouldn't need to reload.
assert addon.status == amo.STATUS_NULL
addon.delete(None)
assert len(mail.outbox) == 0
assert Addon.unfiltered.count() == (count - 1)
def test_delete_incomplete_with_versions(self):
"""Test deleting incomplete add-ons."""
count = Addon.unfiltered.count()
a = Addon.objects.get(pk=3615)
a.status = 0
a.save()
a.delete('oh looky here')
assert len(mail.outbox) == 1
assert count == Addon.unfiltered.count()
def test_delete_searchengine(self):
"""
Test deleting searchengines (which have no guids) should not barf up
the deletion machine.
"""
a = Addon.objects.get(pk=4594)
a.delete('bye')
assert len(mail.outbox) == 1
def test_incompatible_latest_apps(self):
a = Addon.objects.get(pk=3615)
assert a.incompatible_latest_apps() == []
av = ApplicationsVersions.objects.get(pk=47881)
av.max = AppVersion.objects.get(pk=97) # Firefox 2.0
av.save()
a = Addon.objects.get(pk=3615)
assert a.incompatible_latest_apps() == [amo.FIREFOX]
# Check a search engine addon.
a = Addon.objects.get(pk=4594)
assert a.incompatible_latest_apps() == []
def test_incompatible_asterix(self):
av = ApplicationsVersions.objects.get(pk=47881)
av.max = AppVersion.objects.create(application=amo.FIREFOX.id,
version_int=version_int('5.*'),
version='5.*')
av.save()
a = Addon.objects.get(pk=3615)
assert a.incompatible_latest_apps() == []
def test_icon_url(self):
"""
Tests for various icons.
1. Test for an icon that exists.
2. Test for default THEME icon.
3. Test for default non-THEME icon.
"""
a = Addon.objects.get(pk=3615)
assert "/3/3615-32.png" in a.icon_url
a = Addon.objects.get(pk=6704)
a.icon_type = None
assert a.icon_url.endswith('/icons/default-theme.png'), (
'No match for %s' % a.icon_url)
a = Addon.objects.get(pk=3615)
a.icon_type = None
assert a.icon_url.endswith('icons/default-32.png')
def test_icon_url_default(self):
a = Addon.objects.get(pk=3615)
a.update(icon_type='')
default = 'icons/default-32.png'
assert a.icon_url.endswith(default)
assert a.get_icon_url(32).endswith(default)
assert a.get_icon_url(32, use_default=True).endswith(default)
assert a.get_icon_url(32, use_default=False) is None
def test_thumbnail_url(self):
"""
Test for the actual thumbnail URL if it should exist, or the no-preview
url.
"""
a = Addon.objects.get(pk=4664)
a.thumbnail_url.index('/previews/thumbs/20/20397.png?modified=')
a = Addon.objects.get(pk=5299)
assert a.thumbnail_url.endswith('/icons/no-preview.png'), (
'No match for %s' % a.thumbnail_url)
def test_is_unreviewed(self):
"""Test if add-on is unreviewed or not"""
# public add-on
a = Addon.objects.get(pk=3615)
assert not a.is_unreviewed(), 'public add-on: is_unreviewed=False'
a.status = amo.STATUS_NOMINATED
assert a.is_unreviewed(), 'pending add-on: is_unreviewed=True'
def test_is_public(self):
# Public add-on.
addon = Addon.objects.get(pk=3615)
assert addon.status == amo.STATUS_PUBLIC
assert addon.is_public()
# Should be public by status, but since it's disabled add-on it's not.
addon.disabled_by_user = True
assert not addon.is_public()
def test_requires_restart(self):
addon = Addon.objects.get(pk=3615)
file_ = addon.current_version.all_files[0]
assert not file_.no_restart
assert file_.requires_restart
assert addon.requires_restart
file_.update(no_restart=True)
assert not Addon.objects.get(pk=3615).requires_restart
addon.versions.all().delete()
addon._current_version = None
assert not addon.requires_restart
def test_is_featured(self):
"""Test if an add-on is globally featured"""
a = Addon.objects.get(pk=1003)
assert a.is_featured(amo.FIREFOX, 'en-US'), (
'globally featured add-on not recognized')
def test_has_full_profile(self):
"""Test if an add-on's developer profile is complete (public)."""
def addon():
return Addon.objects.get(pk=3615)
assert not addon().has_full_profile()
a = addon()
a.the_reason = 'some reason'
a.save()
assert not addon().has_full_profile()
a.the_future = 'some future'
a.save()
assert addon().has_full_profile()
a.the_reason = ''
a.the_future = ''
a.save()
assert not addon().has_full_profile()
def test_has_profile(self):
"""Test if an add-on's developer profile is (partially or entirely)
completed.
"""
def addon():
return Addon.objects.get(pk=3615)
assert not addon().has_profile()
a = addon()
a.the_reason = 'some reason'
a.save()
assert addon().has_profile()
a.the_future = 'some future'
a.save()
assert addon().has_profile()
a.the_reason = ''
a.the_future = ''
a.save()
assert not addon().has_profile()
def newlines_helper(self, string_before):
addon = Addon.objects.get(pk=3615)
addon.privacy_policy = string_before
addon.save()
return addon.privacy_policy.localized_string_clean
def test_newlines_normal(self):
before = ("Paragraph one.\n"
"This should be on the very next line.\n\n"
"Should be two nl's before this line.\n\n\n"
"Should be three nl's before this line.\n\n\n\n"
"Should be four nl's before this line.")
after = before # Nothing special; this shouldn't change.
assert self.newlines_helper(before) == after
def test_newlines_ul(self):
before = ("<ul>\n\n"
"<li>No nl's between the ul and the li.</li>\n\n"
"<li>No nl's between li's.\n\n"
"But there should be two before this line.</li>\n\n"
"</ul>")
after = ("<ul>"
"<li>No nl's between the ul and the li.</li>"
"<li>No nl's between li's.\n\n"
"But there should be two before this line.</li>"
"</ul>")
assert self.newlines_helper(before) == after
def test_newlines_ul_tight(self):
before = ("There should be one nl between this and the ul.\n"
"<ul><li>test</li><li>test</li></ul>\n"
"There should be no nl's above this line.")
after = ("There should be one nl between this and the ul.\n"
"<ul><li>test</li><li>test</li></ul>"
"There should be no nl's above this line.")
assert self.newlines_helper(before) == after
def test_newlines_ul_loose(self):
before = ("There should be two nl's between this and the ul.\n\n"
"<ul><li>test</li><li>test</li></ul>\n\n"
"There should be one nl above this line.")
after = ("There should be two nl's between this and the ul.\n\n"
"<ul><li>test</li><li>test</li></ul>\n"
"There should be one nl above this line.")
assert self.newlines_helper(before) == after
def test_newlines_blockquote_tight(self):
before = ("There should be one nl below this.\n"
"<blockquote>Hi</blockquote>\n"
"There should be no nl's above this.")
after = ("There should be one nl below this.\n"
"<blockquote>Hi</blockquote>"
"There should be no nl's above this.")
assert self.newlines_helper(before) == after
def test_newlines_blockquote_loose(self):
before = ("There should be two nls below this.\n\n"
"<blockquote>Hi</blockquote>\n\n"
"There should be one nl above this.")
after = ("There should be two nls below this.\n\n"
"<blockquote>Hi</blockquote>\n"
"There should be one nl above this.")
assert self.newlines_helper(before) == after
def test_newlines_inline(self):
before = ("If we end a paragraph w/ a <b>non-block-level tag</b>\n\n"
"<b>The newlines</b> should be kept")
after = before # Should stay the same
assert self.newlines_helper(before) == after
def test_newlines_code_inline(self):
before = ("Code tags aren't blocks.\n\n"
"<code>alert(test);</code>\n\n"
"See?")
after = before # Should stay the same
assert self.newlines_helper(before) == after
def test_newlines_li_newlines(self):
before = ("<ul><li>\nxx</li></ul>")
after = ("<ul><li>xx</li></ul>")
assert self.newlines_helper(before) == after
before = ("<ul><li>xx\n</li></ul>")
after = ("<ul><li>xx</li></ul>")
assert self.newlines_helper(before) == after
before = ("<ul><li>xx\nxx</li></ul>")
after = ("<ul><li>xx\nxx</li></ul>")
assert self.newlines_helper(before) == after
before = ("<ul><li></li></ul>")
after = ("<ul><li></li></ul>")
assert self.newlines_helper(before) == after
# All together now
before = ("<ul><li>\nxx</li> <li>xx\n</li> <li>xx\nxx</li> "
"<li></li>\n</ul>")
after = ("<ul><li>xx</li> <li>xx</li> <li>xx\nxx</li> "
"<li></li></ul>")
assert self.newlines_helper(before) == after
def test_newlines_empty_tag(self):
before = ("This is a <b></b> test!")
after = before
assert self.newlines_helper(before) == after
def test_newlines_empty_tag_nested(self):
before = ("This is a <b><i></i></b> test!")
after = before
assert self.newlines_helper(before) == after
def test_newlines_empty_tag_block_nested(self):
b = ("Test.\n\n<blockquote><ul><li></li></ul></blockquote>\ntest.")
a = ("Test.\n\n<blockquote><ul><li></li></ul></blockquote>test.")
assert self.newlines_helper(b) == a
def test_newlines_empty_tag_block_nested_spaced(self):
before = ("Test.\n\n<blockquote>\n\n<ul>\n\n<li>"
"</li>\n\n</ul>\n\n</blockquote>\ntest.")
after = ("Test.\n\n<blockquote><ul><li></li></ul></blockquote>test.")
assert self.newlines_helper(before) == after
def test_newlines_li_newlines_inline(self):
before = ("<ul><li>\n<b>test\ntest\n\ntest</b>\n</li>"
"<li>Test <b>test</b> test.</li></ul>")
after = ("<ul><li><b>test\ntest\n\ntest</b></li>"
"<li>Test <b>test</b> test.</li></ul>")
assert self.newlines_helper(before) == after
def test_newlines_li_all_inline(self):
before = ("Test with <b>no newlines</b> and <code>block level "
"stuff</code> to see what happens.")
after = before # Should stay the same
assert self.newlines_helper(before) == after
def test_newlines_spaced_blocks(self):
before = ("<blockquote>\n\n<ul>\n\n<li>\n\ntest\n\n</li>\n\n"
"</ul>\n\n</blockquote>")
after = "<blockquote><ul><li>test</li></ul></blockquote>"
assert self.newlines_helper(before) == after
def test_newlines_spaced_inline(self):
before = "Line.\n\n<b>\nThis line is bold.\n</b>\n\nThis isn't."
after = before
assert self.newlines_helper(before) == after
def test_newlines_nested_inline(self):
before = "<b>\nThis line is bold.\n\n<i>This is also italic</i></b>"
after = before
assert self.newlines_helper(before) == after
def test_newlines_xss_script(self):
before = "<script>\n\nalert('test');\n</script>"
after = "<script>\n\nalert('test');\n</script>"
assert self.newlines_helper(before) == after
def test_newlines_xss_inline(self):
before = "<b onclick=\"alert('test');\">test</b>"
after = "<b>test</b>"
assert self.newlines_helper(before) == after
@patch('olympia.amo.helpers.urlresolvers.get_outgoing_url')
def test_newlines_attribute_link_doublequote(self, mock_get_outgoing_url):
mock_get_outgoing_url.return_value = 'http://google.com'
before = '<a href="http://google.com">test</a>'
parsed = self.newlines_helper(before)
assert 'rel="nofollow"' in parsed
def test_newlines_attribute_singlequote(self):
before = "<abbr title='laugh out loud'>lol</abbr>"
after = '<abbr title="laugh out loud">lol</abbr>'
assert self.newlines_helper(before) == after
def test_newlines_attribute_doublequote(self):
before = '<abbr title="laugh out loud">lol</abbr>'
after = before
assert self.newlines_helper(before) == after
def test_newlines_attribute_nestedquotes_doublesingle(self):
before = '<abbr title="laugh \'out\' loud">lol</abbr>'
after = before
assert self.newlines_helper(before) == after
def test_newlines_attribute_nestedquotes_singledouble(self):
before = '<abbr title=\'laugh "out" loud\'>lol</abbr>'
after = before
assert self.newlines_helper(before) == after
def test_newlines_unclosed_b(self):
before = ("<b>test")
after = ("<b>test</b>")
assert self.newlines_helper(before) == after
def test_newlines_unclosed_b_wrapped(self):
before = ("This is a <b>test")
after = ("This is a <b>test</b>")
assert self.newlines_helper(before) == after
def test_newlines_unclosed_li(self):
before = ("<ul><li>test</ul>")
after = ("<ul><li>test</li></ul>")
assert self.newlines_helper(before) == after
def test_newlines_malformed_faketag(self):
before = "<madonna"
after = ""
assert self.newlines_helper(before) == after
def test_newlines_correct_faketag(self):
before = "<madonna>"
after = "<madonna>"
assert self.newlines_helper(before) == after
def test_newlines_malformed_tag(self):
before = "<strong"
after = ""
assert self.newlines_helper(before) == after
def test_newlines_malformed_faketag_surrounded(self):
before = "This is a <test of bleach"
after = 'This is a'
assert self.newlines_helper(before) == after
def test_newlines_malformed_tag_surrounded(self):
before = "This is a <strong of bleach"
after = "This is a"
assert self.newlines_helper(before) == after
def test_newlines_less_than(self):
before = "3 < 5"
after = "3 < 5"
assert self.newlines_helper(before) == after
def test_newlines_less_than_tight(self):
before = "abc 3<5 def"
after = "abc 3<5 def"
assert self.newlines_helper(before) == after
def test_app_categories(self):
def get_addon():
return Addon.objects.get(pk=3615)
# This add-on is already associated with three Firefox categories
# using fixtures: Bookmarks, Feeds, Social.
FIREFOX_EXT_CATS = CATEGORIES[amo.FIREFOX.id][amo.ADDON_EXTENSION]
expected_firefox_cats = [
FIREFOX_EXT_CATS['bookmarks'],
FIREFOX_EXT_CATS['feeds-news-blogging'],
FIREFOX_EXT_CATS['social-communication']
]
addon = get_addon()
assert set(addon.all_categories) == set(expected_firefox_cats)
assert addon.app_categories == {amo.FIREFOX: expected_firefox_cats}
# Let's add a thunderbird category.
thunderbird_static_cat = (
CATEGORIES[amo.THUNDERBIRD.id][amo.ADDON_EXTENSION]['tags'])
tb_category = Category.from_static_category(thunderbird_static_cat)
tb_category.save()
AddonCategory.objects.create(addon=addon, category=tb_category)
# Reload the addon to get a fresh, uncached categories list.
addon = get_addon()
# Test that the thunderbird category was added correctly.
assert set(addon.all_categories) == set(
expected_firefox_cats + [thunderbird_static_cat])
assert set(addon.app_categories.keys()) == set(
[amo.FIREFOX, amo.THUNDERBIRD])
assert set(addon.app_categories[amo.FIREFOX]) == set(
expected_firefox_cats)
assert set(addon.app_categories[amo.THUNDERBIRD]) == set(
[thunderbird_static_cat])
def test_app_categories_ignore_unknown_cats(self):
def get_addon():
return Addon.objects.get(pk=3615)
# This add-on is already associated with three Firefox categories
# using fixtures: Bookmarks, Feeds, Social.
FIREFOX_EXT_CATS = CATEGORIES[amo.FIREFOX.id][amo.ADDON_EXTENSION]
expected_firefox_cats = [
FIREFOX_EXT_CATS['bookmarks'],
FIREFOX_EXT_CATS['feeds-news-blogging'],
FIREFOX_EXT_CATS['social-communication']
]
addon = get_addon()
assert set(addon.all_categories) == set(expected_firefox_cats)
assert addon.app_categories == {amo.FIREFOX: expected_firefox_cats}
# Associate this add-on with a couple more categories, including
# one that does not exist in the constants.
unknown_cat = Category.objects.create(
application=amo.SUNBIRD.id, id=123456, type=amo.ADDON_EXTENSION,
name='Sunny D')
AddonCategory.objects.create(addon=addon, category=unknown_cat)
thunderbird_static_cat = (
CATEGORIES[amo.THUNDERBIRD.id][amo.ADDON_EXTENSION]['appearance'])
tb_category = Category.from_static_category(thunderbird_static_cat)
tb_category.save()
AddonCategory.objects.create(addon=addon, category=tb_category)
# Reload the addon to get a fresh, uncached categories list.
addon = get_addon()
# The sunbird category should not be present since it does not match
# an existing static category, thunderbird one should have been added.
assert set(addon.all_categories) == set(
expected_firefox_cats + [thunderbird_static_cat])
assert set(addon.app_categories.keys()) == set(
[amo.FIREFOX, amo.THUNDERBIRD])
assert set(addon.app_categories[amo.FIREFOX]) == set(
expected_firefox_cats)
assert set(addon.app_categories[amo.THUNDERBIRD]) == set(
[thunderbird_static_cat])
def test_review_replies(self):
"""
Make sure that developer replies are not returned as if they were
original reviews.
"""
addon = Addon.objects.get(id=3615)
u = UserProfile.objects.get(pk=999)
version = addon.current_version
new_review = Review(version=version, user=u, rating=2, body='hello',
addon=addon)
new_review.save()
new_reply = Review(version=version, user=addon.authors.all()[0],
addon=addon, reply_to=new_review,
rating=2, body='my reply')
new_reply.save()
review_list = [r.pk for r in addon.reviews]
assert new_review.pk in review_list, (
'Original review must show up in review list.')
assert new_reply.pk not in review_list, (
'Developer reply must not show up in review list.')
def test_takes_contributions(self):
a = Addon(status=amo.STATUS_PUBLIC, wants_contributions=True,
paypal_id='$$')
assert a.takes_contributions
a.status = amo.STATUS_NOMINATED
assert not a.takes_contributions
a.status = amo.STATUS_PUBLIC
a.wants_contributions = False
assert not a.takes_contributions
a.wants_contributions = True
a.paypal_id = None
assert not a.takes_contributions
a.charity_id = 12
assert a.takes_contributions
def test_show_beta(self):
# Addon.current_beta_version will be empty, so show_beta is False.
a = Addon(status=amo.STATUS_PUBLIC)
assert not a.show_beta
@patch('olympia.addons.models.Addon.current_beta_version')
def test_show_beta_with_beta_version(self, beta_mock):
beta_mock.return_value = object()
# Fake current_beta_version to return something truthy.
a = Addon(status=amo.STATUS_PUBLIC)
assert a.show_beta
# We have a beta version but status has to be public.
a.status = amo.STATUS_NOMINATED
assert not a.show_beta
def test_update_logs(self):
addon = Addon.objects.get(id=3615)
core.set_user(UserProfile.objects.all()[0])
addon.versions.all().delete()
entries = ActivityLog.objects.all()
assert entries[0].action == amo.LOG.CHANGE_STATUS.id
def setup_files(self, status):
addon = Addon.objects.create(type=1)
version = Version.objects.create(addon=addon)
File.objects.create(status=status, version=version)
return addon, version
def test_no_change_disabled_user(self):
addon, version = self.setup_files(amo.STATUS_AWAITING_REVIEW)
addon.update(status=amo.STATUS_PUBLIC)
addon.update(disabled_by_user=True)
version.save()
assert addon.status == amo.STATUS_PUBLIC
assert addon.is_disabled
def test_no_change_disabled(self):
addon = Addon.objects.create(type=1)
version = Version.objects.create(addon=addon)
addon.update(status=amo.STATUS_DISABLED)
version.save()
assert addon.status == amo.STATUS_DISABLED
assert addon.is_disabled
def test_no_change_deleted(self):
addon = Addon.objects.create(type=1)
version = Version.objects.create(addon=addon)
addon.update(status=amo.STATUS_DELETED)
version.save()
assert addon.status == amo.STATUS_DELETED
assert addon.is_deleted
def test_removing_public(self):
addon, version = self.setup_files(amo.STATUS_AWAITING_REVIEW)
addon.update(status=amo.STATUS_PUBLIC)
version.save()
assert addon.status == amo.STATUS_NOMINATED
def test_can_request_review_no_files(self):
addon = Addon.objects.get(pk=3615)
addon.versions.all()[0].files.all().delete()
assert addon.can_request_review() is False
def test_can_request_review_rejected(self):
addon = Addon.objects.get(pk=3615)
latest_version = addon.find_latest_version(amo.RELEASE_CHANNEL_LISTED)
latest_version.files.update(status=amo.STATUS_DISABLED)
assert addon.can_request_review() is False
def check_can_request_review(self, status, expected, extra_update_kw=None):
if extra_update_kw is None:
extra_update_kw = {}
addon = Addon.objects.get(pk=3615)
changes = {'status': status, 'disabled_by_user': False}
changes.update(**extra_update_kw)
addon.update(**changes)
assert addon.can_request_review() == expected
def test_can_request_review_null(self):
self.check_can_request_review(amo.STATUS_NULL, True)
def test_can_request_review_null_disabled(self):
self.check_can_request_review(
amo.STATUS_NULL, False, extra_update_kw={'disabled_by_user': True})
def test_can_request_review_nominated(self):
self.check_can_request_review(amo.STATUS_NOMINATED, False)
def test_can_request_review_public(self):
self.check_can_request_review(amo.STATUS_PUBLIC, False)
def test_can_request_review_disabled(self):
self.check_can_request_review(amo.STATUS_DISABLED, False)
def test_can_request_review_deleted(self):
self.check_can_request_review(amo.STATUS_DELETED, False)
def test_none_homepage(self):
# There was an odd error when a translation was set to None.
Addon.objects.create(homepage=None, type=amo.ADDON_EXTENSION)
def test_slug_isdigit(self):
a = Addon.objects.create(type=1, name='xx', slug='123')
assert a.slug == '123~'
a.slug = '44'
a.save()
assert a.slug == '44~'
def test_slug_isdenied(self):
# When an addon is uploaded, it doesn't use the form validation,
# so we'll just mangle the slug if its denied.
a = Addon.objects.create(type=1, name='xx', slug='validate')
assert a.slug == 'validate~'
a.slug = 'validate'
a.save()
assert a.slug == 'validate~'
def delete(self):
addon = Addon.objects.get(id=3615)
assert len(mail.outbox) == 0
addon.delete('so long and thanks for all the fish')
assert len(mail.outbox) == 1
def test_delete_to(self):
self.delete()
assert mail.outbox[0].to == [settings.FLIGTAR]
def test_delete_by(self):
try:
user = Addon.objects.get(id=3615).authors.all()[0]
core.set_user(user)
self.delete()
assert 'DELETED BY: 55021' in mail.outbox[0].body
finally:
core.set_user(None)
def test_delete_by_unknown(self):
self.delete()
assert 'DELETED BY: Unknown' in mail.outbox[0].body
def test_delete_mail_not_localized(self):
"""Don't localize the email sent to the admins using the user's
locale."""
with self.activate('pl'):
self.delete()
admin_mail = mail.outbox[0]
# Make sure the type (EXTENSION) isn't localized.
assert 'Deleting EXTENSION a3615 (3615)' in admin_mail.subject
assert 'The following EXTENSION was deleted' in admin_mail.body
def test_view_source(self):
# view_source should default to True.
a = Addon.objects.create(type=1)
assert a.view_source
@patch('olympia.files.models.File.hide_disabled_file')
def test_admin_disabled_file_hidden(self, hide_mock):
a = Addon.objects.get(id=3615)
a.status = amo.STATUS_PUBLIC
a.save()
assert not hide_mock.called
a.status = amo.STATUS_DISABLED
a.save()
assert hide_mock.called
@patch('olympia.files.models.File.hide_disabled_file')
def test_user_disabled_file_hidden(self, hide_mock):
a = Addon.objects.get(id=3615)
a.disabled_by_user = False
a.save()
assert not hide_mock.called
a.disabled_by_user = True
a.save()
assert hide_mock.called
def test_category_transform(self):
addon = Addon.objects.get(id=3615)
cats = addon.categories.filter(application=amo.FIREFOX.id)
names = [c.name for c in cats]
assert addon.get_category(amo.FIREFOX.id).name in names
def test_binary_property(self):
addon = Addon.objects.get(id=3615)
file = addon.current_version.files.all()[0]
file.update(binary=True)
assert addon.binary
def test_binary_components_property(self):
addon = Addon.objects.get(id=3615)
file = addon.current_version.files.all()[0]
file.update(binary_components=True)
assert addon.binary_components
def test_listed_has_complete_metadata_no_categories(self):
addon = Addon.objects.get(id=3615)
assert addon.has_complete_metadata() # Confirm complete already.
addon.categories.all().delete()
addon = Addon.objects.get(id=3615)
assert not addon.has_complete_metadata()
assert addon.has_complete_metadata(has_listed_versions=False)
def test_listed_has_complete_metadata_no_summary(self):
addon = Addon.objects.get(id=3615)
assert addon.has_complete_metadata() # Confirm complete already.
delete_translation(addon, 'summary')
addon = Addon.objects.get(id=3615)
assert not addon.has_complete_metadata()
assert addon.has_complete_metadata(
has_listed_versions=False)
def test_listed_has_complete_metadata_no_license(self):
addon = Addon.objects.get(id=3615)
assert addon.has_complete_metadata() # Confirm complete already.
addon.current_version.update(license=None)
addon = Addon.objects.get(id=3615)
assert not addon.has_complete_metadata()
assert addon.has_complete_metadata(
has_listed_versions=False)
def test_unlisted_has_complete_metadata(self):
addon = Addon.objects.get(id=3615)
self.make_addon_unlisted(addon)
assert addon.has_complete_metadata() # Confirm complete already.
# Clear everything
addon.versions.update(license=None)
addon.categories.all().delete()
delete_translation(addon, 'summary')
addon = Addon.objects.get(id=3615)
assert addon.has_complete_metadata() # Still complete
assert not addon.has_complete_metadata(has_listed_versions=True)
class TestShouldRedirectToSubmitFlow(TestCase):
fixtures = ['base/addon_3615']
def test_no_versions_doesnt_redirect(self):
addon = Addon.objects.get(id=3615)
assert not addon.should_redirect_to_submit_flow()
# Now break addon.
delete_translation(addon, 'summary')
addon = Addon.objects.get(id=3615)
assert not addon.has_complete_metadata()
addon.update(status=amo.STATUS_NULL)
assert addon.should_redirect_to_submit_flow()
for ver in addon.versions.all():
ver.delete()
assert not addon.should_redirect_to_submit_flow()
def test_disabled_versions_doesnt_redirect(self):
addon = Addon.objects.get(id=3615)
assert not addon.should_redirect_to_submit_flow()
# Now break addon.
delete_translation(addon, 'summary')
addon = Addon.objects.get(id=3615)
assert not addon.has_complete_metadata()
addon.update(status=amo.STATUS_NULL)
assert addon.should_redirect_to_submit_flow()
for ver in addon.versions.all():
for file_ in ver.all_files:
file_.update(status=amo.STATUS_DISABLED)
assert not addon.should_redirect_to_submit_flow()
def test_only_null_redirects(self):
addon = Addon.objects.get(id=3615)
assert not addon.should_redirect_to_submit_flow()
# Now break addon.
delete_translation(addon, 'summary')
addon = Addon.objects.get(id=3615)
assert not addon.has_complete_metadata()
status_exc_null = dict(amo.STATUS_CHOICES_ADDON)
status_exc_null.pop(amo.STATUS_NULL)
for status in status_exc_null:
assert not addon.should_redirect_to_submit_flow()
addon.update(status=amo.STATUS_NULL)
assert addon.should_redirect_to_submit_flow()
class TestHasListedAndUnlistedVersions(TestCase):
def setUp(self):
self.addon = addon_factory()
latest_version = self.addon.find_latest_version(
channel=amo.RELEASE_CHANNEL_LISTED)
latest_version.delete(hard=True)
assert self.addon.versions.count() == 0
def test_no_versions(self):
assert not self.addon.has_listed_versions()
assert not self.addon.has_unlisted_versions()
def test_listed_version(self):
version_factory(channel=amo.RELEASE_CHANNEL_LISTED, addon=self.addon)
assert self.addon.has_listed_versions()
assert not self.addon.has_unlisted_versions()
def test_unlisted_version(self):
version_factory(channel=amo.RELEASE_CHANNEL_UNLISTED, addon=self.addon)
assert not self.addon.has_listed_versions()
assert self.addon.has_unlisted_versions()
def test_unlisted_and_listed_versions(self):
version_factory(channel=amo.RELEASE_CHANNEL_LISTED, addon=self.addon)
version_factory(channel=amo.RELEASE_CHANNEL_UNLISTED, addon=self.addon)
assert self.addon.has_listed_versions()
assert self.addon.has_unlisted_versions()
class TestAddonNomination(TestCase):
fixtures = ['base/addon_3615']
def test_set_nomination(self):
a = Addon.objects.get(id=3615)
a.update(status=amo.STATUS_NULL)
a.versions.latest().update(nomination=None)
a.update(status=amo.STATUS_NOMINATED)
assert a.versions.latest().nomination
def test_new_version_inherits_nomination(self):
a = Addon.objects.get(id=3615)
ver = 10
a.update(status=amo.STATUS_NOMINATED)
old_ver = a.versions.latest()
v = Version.objects.create(addon=a, version=str(ver))
assert v.nomination == old_ver.nomination
ver += 1
def test_beta_version_does_not_inherit_nomination(self):
a = Addon.objects.get(id=3615)
a.update(status=amo.STATUS_NULL)
v = Version.objects.create(addon=a, version='1.0')
v.nomination = None
v.save()
a.update(status=amo.STATUS_NOMINATED)
File.objects.create(version=v, status=amo.STATUS_BETA,
filename='foobar.xpi')
v.version = '1.1'
v.save()
assert v.nomination is None
def test_lone_version_does_not_inherit_nomination(self):
a = Addon.objects.get(id=3615)
Version.objects.all().delete()
v = Version.objects.create(addon=a, version='1.0')
assert v.nomination is None
def test_reviewed_addon_does_not_inherit_nomination(self):
a = Addon.objects.get(id=3615)
ver = 10
for st in (amo.STATUS_PUBLIC, amo.STATUS_BETA, amo.STATUS_NULL):
a.update(status=st)
v = Version.objects.create(addon=a, version=str(ver))
assert v.nomination is None
ver += 1
def test_nomination_no_version(self):
# Check that the on_change method still works if there are no versions.
a = Addon.objects.get(id=3615)
a.versions.all().delete()
a.update(status=amo.STATUS_NOMINATED)
def test_nomination_already_set(self):
addon = Addon.objects.get(id=3615)
earlier = datetime.today() - timedelta(days=2)
addon.versions.latest().update(nomination=earlier)
addon.update(status=amo.STATUS_NOMINATED)
assert addon.versions.latest().nomination.date() == earlier.date()
def setup_nomination(self, addon_status=amo.STATUS_NOMINATED,
file_status=amo.STATUS_AWAITING_REVIEW):
addon = Addon.objects.create()
version = Version.objects.create(addon=addon)
File.objects.create(status=file_status, version=version)
# Cheating date to make sure we don't have a date on the same second
# the code we test is running.
past = self.days_ago(1)
version.update(nomination=past, created=past, modified=past)
addon.update(status=addon_status)
nomination = addon.versions.latest().nomination
assert nomination
return addon, nomination
def test_new_version_of_under_review_addon_does_not_reset_nomination(self):
addon, nomination = self.setup_nomination()
version = Version.objects.create(addon=addon, version='0.2')
File.objects.create(status=amo.STATUS_AWAITING_REVIEW, version=version)
assert addon.versions.latest().nomination == nomination
def test_nomination_not_reset_if_adding_new_versions_and_files(self):
"""
When under review, adding new versions and files should not
reset nomination.
"""
addon, nomination = self.setup_nomination()
# Switching it to a public status.
version = Version.objects.create(addon=addon, version="0.1")
File.objects.create(status=amo.STATUS_PUBLIC, version=version)
assert addon.versions.latest().nomination == nomination
# Adding a new unreviewed version.
version = Version.objects.create(addon=addon, version="0.2")
File.objects.create(status=amo.STATUS_AWAITING_REVIEW, version=version)
assert addon.versions.latest().nomination == nomination
# Adding a new unreviewed version.
version = Version.objects.create(addon=addon, version="0.3")
File.objects.create(status=amo.STATUS_AWAITING_REVIEW, version=version)
assert addon.versions.latest().nomination == nomination
def check_nomination_reset_with_new_version(self, addon, nomination):
version = Version.objects.create(addon=addon, version="0.2")
assert version.nomination is None
File.objects.create(status=amo.STATUS_AWAITING_REVIEW, version=version)
assert addon.versions.latest().nomination != nomination
def test_new_version_of_approved_addon_should_reset_nomination(self):
addon, nomination = self.setup_nomination(
addon_status=amo.STATUS_PUBLIC, file_status=amo.STATUS_PUBLIC)
# Now create a new version with an attached file, and update status.
self.check_nomination_reset_with_new_version(addon, nomination)
class TestThemeDelete(TestCase):
def setUp(self):
super(TestThemeDelete, self).setUp()
self.addon = addon_factory(type=amo.ADDON_PERSONA)
# Taking the creation and modified time back 1 day
self.addon.update(created=self.days_ago(1), modified=self.days_ago(1))
def test_remove_theme_update_m_time(self):
m_time_before = self.addon.modified
self.addon.delete('enough', 'no reason at all')
m_time_after = self.addon.modified
assert m_time_before != m_time_after
class TestAddonDelete(TestCase):
def test_cascades(self):
addon = Addon.objects.create(type=amo.ADDON_EXTENSION)
AddonCategory.objects.create(
addon=addon,
category=Category.objects.create(type=amo.ADDON_EXTENSION))
AddonDependency.objects.create(
addon=addon, dependent_addon=addon)
AddonUser.objects.create(
addon=addon, user=UserProfile.objects.create())
AppSupport.objects.create(addon=addon, app=1)
CompatOverride.objects.create(addon=addon)
FrozenAddon.objects.create(addon=addon)
Persona.objects.create(addon=addon, persona_id=0)
Preview.objects.create(addon=addon)
AddonLog.objects.create(
addon=addon, activity_log=ActivityLog.objects.create(action=0))
RssKey.objects.create(addon=addon)
# This should not throw any FK errors if all the cascades work.
addon.delete()
# Make sure it was actually a hard delete.
assert not Addon.unfiltered.filter(pk=addon.pk).exists()
def test_review_delete(self):
addon = Addon.objects.create(type=amo.ADDON_EXTENSION,
status=amo.STATUS_PUBLIC)
review = Review.objects.create(addon=addon, rating=1, body='foo',
user=UserProfile.objects.create())
flag = ReviewFlag(review=review)
addon.delete()
assert Addon.unfiltered.filter(pk=addon.pk).exists()
assert not Review.objects.filter(pk=review.pk).exists()
assert not ReviewFlag.objects.filter(pk=flag.pk).exists()
def test_delete_with_deleted_versions(self):
addon = Addon.objects.create(type=amo.ADDON_EXTENSION)
version = Version.objects.create(addon=addon, version="1.0")
version.delete()
addon.delete()
assert Addon.unfiltered.filter(pk=addon.pk).exists()
class TestAddonFeatureCompatibility(TestCase):
fixtures = ['base/addon_3615']
def test_feature_compatibility_not_present(self):
addon = Addon.objects.get(pk=3615)
assert addon.feature_compatibility
assert not addon.feature_compatibility.pk
def test_feature_compatibility_present(self):
addon = Addon.objects.get(pk=3615)
AddonFeatureCompatibility.objects.create(addon=addon)
assert addon.feature_compatibility
assert addon.feature_compatibility.pk
class TestUpdateStatus(TestCase):
def test_no_file_ends_with_NULL(self):
addon = Addon.objects.create(type=amo.ADDON_EXTENSION)
addon.status = amo.STATUS_NOMINATED
addon.save()
assert Addon.objects.no_cache().get(pk=addon.pk).status == (
amo.STATUS_NOMINATED)
Version.objects.create(addon=addon)
assert Addon.objects.no_cache().get(pk=addon.pk).status == (
amo.STATUS_NULL)
def test_no_valid_file_ends_with_NULL(self):
addon = Addon.objects.create(type=amo.ADDON_EXTENSION)
version = Version.objects.create(addon=addon)
f = File.objects.create(status=amo.STATUS_AWAITING_REVIEW,
version=version)
addon.status = amo.STATUS_NOMINATED
addon.save()
assert Addon.objects.no_cache().get(pk=addon.pk).status == (
amo.STATUS_NOMINATED)
f.status = amo.STATUS_DISABLED
f.save()
assert Addon.objects.no_cache().get(pk=addon.pk).status == (
amo.STATUS_NULL)
def test_unlisted_versions_ignored(self):
addon = addon_factory(status=amo.STATUS_PUBLIC)
addon.update_status()
assert Addon.objects.no_cache().get(pk=addon.pk).status == (
amo.STATUS_PUBLIC)
addon.current_version.update(channel=amo.RELEASE_CHANNEL_UNLISTED)
# update_status will have been called via versions.models.update_status
assert Addon.objects.no_cache().get(pk=addon.pk).status == (
amo.STATUS_NULL) # No listed versions so now NULL
class TestGetVersion(TestCase):
fixtures = ['base/addon_3615', ]
def setUp(self):
super(TestGetVersion, self).setUp()
self.addon = Addon.objects.get(id=3615)
self.version = self.addon.current_version
def test_public_new_public_version(self):
new_version = version_factory(
addon=self.addon, file_kw={'status': amo.STATUS_PUBLIC})
assert self.addon.find_latest_public_listed_version() == new_version
def test_public_new_unreviewed_version(self):
version_factory(
addon=self.addon, file_kw={'status': amo.STATUS_AWAITING_REVIEW})
assert self.addon.find_latest_public_listed_version() == self.version
def test_should_promote_previous_valid_version_if_latest_is_disabled(self):
version_factory(
addon=self.addon, file_kw={'status': amo.STATUS_DISABLED})
assert self.addon.find_latest_public_listed_version() == self.version
def test_should_be_listed(self):
new_version = version_factory(
addon=self.addon,
channel=amo.RELEASE_CHANNEL_UNLISTED,
file_kw={'status': amo.STATUS_PUBLIC})
assert new_version != self.version
# Since the new version is unlisted, find_latest_public_listed_version
# should still find the current one.
assert self.addon.find_latest_public_listed_version() == self.version
class TestAddonGetURLPath(TestCase):
def test_get_url_path(self):
addon = addon_factory(slug='woo')
assert addon.get_url_path() == '/en-US/firefox/addon/woo/'
def test_get_url_path_more(self):
addon = addon_factory(slug='yeah')
assert addon.get_url_path(more=True) == (
'/en-US/firefox/addon/yeah/more')
def test_unlisted_addon_get_url_path(self):
addon = addon_factory(
slug='woo', version_kw={'channel': amo.RELEASE_CHANNEL_UNLISTED})
assert addon.get_url_path() == ''
class TestAddonModelsFeatured(TestCase):
fixtures = ['base/appversion', 'base/users',
'addons/featured', 'bandwagon/featured_collections',
'base/addon_3615', 'base/collections', 'base/featured']
def setUp(self):
super(TestAddonModelsFeatured, self).setUp()
# Addon._featured keeps an in-process cache we need to clear.
if hasattr(Addon, '_featured'):
del Addon._featured
def _test_featured_random(self):
f = Addon.featured_random(amo.FIREFOX, 'en-US')
assert sorted(f) == [1001, 1003, 2464, 3481, 7661, 15679]
f = Addon.featured_random(amo.FIREFOX, 'fr')
assert sorted(f) == [1001, 1003, 2464, 7661, 15679]
f = Addon.featured_random(amo.THUNDERBIRD, 'en-US')
assert f == []
def test_featured_random(self):
self._test_featured_random()
class TestBackupVersion(TestCase):
fixtures = ['addons/update', 'base/appversion']
def setUp(self):
super(TestBackupVersion, self).setUp()
self.version_1_2_0 = 105387
self.addon = Addon.objects.get(pk=1865)
core.set_user(None)
def setup_new_version(self):
for version in Version.objects.filter(pk__gte=self.version_1_2_0):
appversion = version.apps.all()[0]
appversion.min = AppVersion.objects.get(version='4.0b1')
appversion.save()
def test_no_current_version(self):
for v in Version.objects.all():
v.delete()
self.addon.update(_current_version=None)
assert self.addon.current_version is None
def test_current_version_listed_only(self):
version = self.addon.current_version
version.update(channel=amo.RELEASE_CHANNEL_UNLISTED)
# The call above should have triggerred update_version().
assert self.addon.current_version != version
# new current_version should be version 1.2.1, since 1.2.2 is unlisted.
assert self.addon.current_version == Version.objects.get(pk=112396)
def test_firefox_versions(self):
self.setup_new_version()
self.addon.update_version()
current = self.addon.current_version.compatible_apps[amo.FIREFOX]
assert current.max.version == '4.0b8pre'
assert current.min.version == '3.0.12'
def test_version_signals(self):
self.addon.update(_current_version=None)
self.setup_new_version()
version = self.addon.versions.all()[0]
assert not self.addon.current_version
version.save()
assert Addon.objects.get(pk=1865).current_version
def test_update_version_theme(self):
# Test versions do not get deleted when calling with theme.
self.addon.update(type=amo.ADDON_PERSONA)
assert not self.addon.update_version()
assert self.addon._current_version
# Test latest version copied to current version if no current version.
self.addon.update(_current_version=None, _signal=False)
assert self.addon.update_version()
assert self.addon._current_version == (
self.addon.find_latest_version(None))
class TestCategoryModel(TestCase):
def test_category_url(self):
"""Every type must have a url path for its categories."""
for t in amo.ADDON_TYPE.keys():
if t == amo.ADDON_DICT:
continue # Language packs don't have categories.
cat = Category(type=t, slug='omg')
assert cat.get_url_path()
def test_name_from_constants(self):
category = Category(
type=amo.ADDON_EXTENSION, application=amo.FIREFOX.id,
slug='alerts-updates')
assert category.name == u'Alerts & Updates'
with translation.override('fr'):
assert category.name == u'Alertes et mises à jour'
def test_name_fallback_to_db(self):
category = Category.objects.create(
type=amo.ADDON_EXTENSION, application=amo.FIREFOX.id,
slug='this-cat-does-not-exist', db_name=u'ALAAAAAAARM')
assert category.name == u'ALAAAAAAARM'
with translation.override('fr'):
assert category.name == u'ALAAAAAAARM'
class TestPersonaModel(TestCase):
fixtures = ['addons/persona']
def setUp(self):
super(TestPersonaModel, self).setUp()
self.addon = Addon.objects.get(id=15663)
self.persona = self.addon.persona
self.persona.header = 'header.png'
self.persona.footer = 'footer.png'
self.persona.save()
modified = int(time.mktime(self.persona.addon.modified.timetuple()))
self.p = lambda fn: '/15663/%s?%s' % (fn, modified)
def test_image_urls(self):
# AMO-uploaded themes have `persona_id=0`.
self.persona.persona_id = 0
self.persona.save()
assert self.persona.thumb_url.endswith(self.p('preview.png'))
assert self.persona.icon_url.endswith(self.p('icon.png'))
assert self.persona.preview_url.endswith(self.p('preview.png'))
assert self.persona.header_url.endswith(self.p('header.png'))
assert self.persona.footer_url.endswith(self.p('footer.png'))
def test_old_image_urls(self):
assert self.persona.thumb_url.endswith(self.p('preview.jpg'))
assert self.persona.icon_url.endswith(self.p('preview_small.jpg'))
assert self.persona.preview_url.endswith(self.p('preview_large.jpg'))
assert self.persona.header_url.endswith(self.p('header.png'))
assert self.persona.footer_url.endswith(self.p('footer.png'))
def test_update_url(self):
with self.settings(LANGUAGE_CODE='fr', LANGUAGE_URL_MAP={}):
url_ = self.persona.update_url
assert url_.endswith('/fr/themes/update-check/15663')
def test_json_data(self):
self.persona.addon.all_categories = [Category(db_name='Yolo Art')]
VAMO = 'https://vamo/%(locale)s/themes/update-check/%(id)d'
with self.settings(LANGUAGE_CODE='fr',
LANGUAGE_URL_MAP={},
NEW_PERSONAS_UPDATE_URL=VAMO,
SITE_URL='https://omgsh.it'):
data = self.persona.theme_data
id_ = str(self.persona.addon.id)
assert data['id'] == id_
assert data['name'] == unicode(self.persona.addon.name)
assert data['accentcolor'] == '#8d8d97'
assert data['textcolor'] == '#ffffff'
assert data['category'] == 'Yolo Art'
assert data['author'] == 'persona_author'
assert data['description'] == unicode(self.addon.description)
assert data['headerURL'].startswith(
'%s%s/header.png?' % (user_media_url('addons'), id_))
assert data['footerURL'].startswith(
'%s%s/footer.png?' % (user_media_url('addons'), id_))
assert data['previewURL'].startswith(
'%s%s/preview_large.jpg?' % (user_media_url('addons'), id_))
assert data['iconURL'].startswith(
'%s%s/preview_small.jpg?' % (user_media_url('addons'), id_))
assert data['detailURL'] == (
'https://omgsh.it%s' % self.persona.addon.get_url_path())
assert data['updateURL'] == (
'https://vamo/fr/themes/update-check/' + id_)
assert data['version'] == '1.0'
def test_json_data_new_persona(self):
self.persona.persona_id = 0 # Make this a "new" theme.
self.persona.save()
self.persona.addon.all_categories = [Category(db_name='Yolo Art')]
VAMO = 'https://vamo/%(locale)s/themes/update-check/%(id)d'
with self.settings(LANGUAGE_CODE='fr',
LANGUAGE_URL_MAP={},
NEW_PERSONAS_UPDATE_URL=VAMO,
SITE_URL='https://omgsh.it'):
data = self.persona.theme_data
id_ = str(self.persona.addon.id)
assert data['id'] == id_
assert data['name'] == unicode(self.persona.addon.name)
assert data['accentcolor'] == '#8d8d97'
assert data['textcolor'] == '#ffffff'
assert data['category'] == 'Yolo Art'
assert data['author'] == 'persona_author'
assert data['description'] == unicode(self.addon.description)
assert data['headerURL'].startswith(
'%s%s/header.png?' % (user_media_url('addons'), id_))
assert data['footerURL'].startswith(
'%s%s/footer.png?' % (user_media_url('addons'), id_))
assert data['previewURL'].startswith(
'%s%s/preview.png?' % (user_media_url('addons'), id_))
assert data['iconURL'].startswith(
'%s%s/icon.png?' % (user_media_url('addons'), id_))
assert data['detailURL'] == (
'https://omgsh.it%s' % self.persona.addon.get_url_path())
assert data['updateURL'] == (
'https://vamo/fr/themes/update-check/' + id_)
assert data['version'] == '1.0'
def test_image_urls_without_footer(self):
self.persona.footer = ''
self.persona.save()
assert self.persona.footer_url == ''
def test_json_data_without_footer(self):
self.persona.footer = ''
self.persona.save()
data = self.persona.theme_data
assert data['footerURL'] == ''
assert data['footer'] == ''
class TestPreviewModel(TestCase):
fixtures = ['base/previews']
def test_as_dict(self):
expect = ['caption', 'full', 'thumbnail']
reality = sorted(Preview.objects.all()[0].as_dict().keys())
assert expect == reality
def test_filename(self):
preview = Preview.objects.get(pk=24)
assert 'png' in preview.thumbnail_path
assert 'png' in preview.image_path
def test_filename_in_url(self):
preview = Preview.objects.get(pk=24)
assert 'png' in preview.thumbnail_url
assert 'png' in preview.image_url
def check_delete(self, preview, filename):
"""
Test that when the Preview object is deleted, its image and thumb
are deleted from the filesystem.
"""
try:
with storage.open(filename, 'w') as f:
f.write('sample data\n')
assert storage.exists(filename)
preview.delete()
assert not storage.exists(filename)
finally:
if storage.exists(filename):
storage.delete(filename)
def test_delete_image(self):
preview = Preview.objects.get(pk=24)
self.check_delete(preview, preview.image_path)
def test_delete_thumbnail(self):
preview = Preview.objects.get(pk=24)
self.check_delete(preview, preview.thumbnail_path)
class TestAddonDependencies(TestCase):
fixtures = ['base/appversion',
'base/users',
'base/addon_5299_gcal',
'base/addon_3615',
'base/addon_3723_listed',
'base/addon_6704_grapple',
'base/addon_4664_twitterbar']
def test_dependencies(self):
ids = [3615, 3723, 4664, 6704]
addon = Addon.objects.get(id=5299)
dependencies = Addon.objects.in_bulk(ids)
for dependency in dependencies.values():
AddonDependency(addon=addon, dependent_addon=dependency).save()
# Make sure all dependencies were saved correctly.
assert sorted([a.id for a in addon.dependencies.all()]) == sorted(ids)
# Add-on 3723 is disabled and won't show up in `all_dependencies`
# property.
assert addon.all_dependencies == [
dependencies[3615], dependencies[4664], dependencies[6704]]
# Adding another dependency won't change anything because we're already
# at the maximum (3).
new_dep = amo.tests.addon_factory()
AddonDependency.objects.create(addon=addon, dependent_addon=new_dep)
assert addon.all_dependencies == [
dependencies[3615], dependencies[4664], dependencies[6704]]
# Removing the first dependency will allow the one we just created to
# be visible.
dependencies[3615].delete()
assert addon.all_dependencies == [
dependencies[4664], dependencies[6704], new_dep]
def test_unique_dependencies(self):
a = Addon.objects.get(id=5299)
b = Addon.objects.get(id=3615)
AddonDependency.objects.create(addon=a, dependent_addon=b)
assert list(a.dependencies.values_list('id', flat=True)) == [3615]
with self.assertRaises(IntegrityError):
AddonDependency.objects.create(addon=a, dependent_addon=b)
class TestListedAddonTwoVersions(TestCase):
fixtures = ['addons/listed-two-versions']
def test_listed_two_versions(self):
Addon.objects.get(id=2795) # bug 563967
class TestAddonFromUpload(UploadTest):
fixtures = ['base/users']
def setUp(self):
super(TestAddonFromUpload, self).setUp()
u = UserProfile.objects.get(pk=999)
core.set_user(u)
self.platform = amo.PLATFORM_MAC.id
for version in ('3.0', '3.6.*'):
AppVersion.objects.create(application=1, version=version)
self.addCleanup(translation.deactivate)
def manifest(self, basename):
return os.path.join(
settings.ROOT, 'src', 'olympia', 'devhub', 'tests', 'addons',
basename)
def test_denied_guid(self):
"""New deletions won't be added to DeniedGuid but legacy support
should still be tested."""
DeniedGuid.objects.create(guid='guid@xpi')
with self.assertRaises(forms.ValidationError) as e:
Addon.from_upload(self.get_upload('extension.xpi'),
[self.platform])
assert e.exception.messages == ['Duplicate add-on ID found.']
def test_existing_guid(self):
# Upload addon so we can delete it.
deleted = Addon.from_upload(self.get_upload('extension.xpi'),
[self.platform])
deleted.update(status=amo.STATUS_PUBLIC)
deleted.delete()
assert deleted.guid == 'guid@xpi'
# Now upload the same add-on again (so same guid).
with self.assertRaises(forms.ValidationError) as e:
Addon.from_upload(self.get_upload('extension.xpi'),
[self.platform])
assert e.exception.messages == ['Duplicate add-on ID found.']
def test_existing_guid_same_author(self):
# Upload addon so we can delete it.
deleted = Addon.from_upload(self.get_upload('extension.xpi'),
[self.platform])
# Claim the add-on.
AddonUser(addon=deleted, user=UserProfile.objects.get(pk=999)).save()
deleted.update(status=amo.STATUS_PUBLIC)
deleted.delete()
assert deleted.guid == 'guid@xpi'
# Now upload the same add-on again (so same guid), checking no
# validationError is raised this time.
addon = Addon.from_upload(self.get_upload('extension.xpi'),
[self.platform])
deleted.reload()
assert addon.guid == 'guid@xpi'
assert deleted.guid == 'guid-reused-by-pk-%s' % addon.pk
def test_old_soft_deleted_addons_and_upload_non_extension(self):
"""We used to just null out GUIDs on soft deleted addons. This test
makes sure we don't fail badly when uploading an add-on which isn't an
extension (has no GUID).
See https://github.com/mozilla/addons-server/issues/1659."""
# Upload a couple of addons so we can pretend they were soft deleted.
deleted1 = Addon.from_upload(
self.get_upload('extension.xpi'), [self.platform])
deleted2 = Addon.from_upload(
self.get_upload('alt-rdf.xpi'), [self.platform])
AddonUser(addon=deleted1, user=UserProfile.objects.get(pk=999)).save()
AddonUser(addon=deleted2, user=UserProfile.objects.get(pk=999)).save()
# Soft delete them like they were before, by nullifying their GUIDs.
deleted1.update(status=amo.STATUS_PUBLIC, guid=None)
deleted2.update(status=amo.STATUS_PUBLIC, guid=None)
# Now upload a new add-on which isn't an extension, and has no GUID.
# This fails if we try to reclaim the GUID from deleted add-ons: the
# GUID is None, so it'll try to get the add-on that has a GUID which is
# None, but many are returned. So make sure we're not trying to reclaim
# the GUID.
Addon.from_upload(
self.get_upload('search.xml'), [self.platform])
def test_xpi_attributes(self):
addon = Addon.from_upload(self.get_upload('extension.xpi'),
[self.platform])
assert addon.name == 'xpi name'
assert addon.guid == 'guid@xpi'
assert addon.type == amo.ADDON_EXTENSION
assert addon.status == amo.STATUS_NULL
assert addon.homepage == 'http://homepage.com'
assert addon.summary == 'xpi description'
assert addon.description is None
assert addon.slug == 'xpi-name'
def test_xpi_version(self):
addon = Addon.from_upload(self.get_upload('extension.xpi'),
[self.platform])
v = addon.versions.get()
assert v.version == '0.1'
assert v.files.get().platform == self.platform
assert v.files.get().status == amo.STATUS_AWAITING_REVIEW
def test_xpi_for_multiple_platforms(self):
platforms = [amo.PLATFORM_LINUX.id, amo.PLATFORM_MAC.id]
addon = Addon.from_upload(self.get_upload('extension.xpi'),
platforms)
v = addon.versions.get()
assert sorted([f.platform for f in v.all_files]) == (
sorted(platforms))
def test_search_attributes(self):
addon = Addon.from_upload(self.get_upload('search.xml'),
[self.platform])
assert addon.name == 'search tool'
assert addon.guid is None
assert addon.type == amo.ADDON_SEARCH
assert addon.status == amo.STATUS_NULL
assert addon.homepage is None
assert addon.description is None
assert addon.slug == 'search-tool'
assert addon.summary == 'Search Engine for Firefox'
def test_search_version(self):
addon = Addon.from_upload(self.get_upload('search.xml'),
[self.platform])
v = addon.versions.get()
assert v.version == datetime.now().strftime('%Y%m%d')
assert v.files.get().platform == amo.PLATFORM_ALL.id
assert v.files.get().status == amo.STATUS_AWAITING_REVIEW
def test_no_homepage(self):
addon = Addon.from_upload(self.get_upload('extension-no-homepage.xpi'),
[self.platform])
assert addon.homepage is None
def test_default_locale(self):
# Make sure default_locale follows the active translation.
addon = Addon.from_upload(self.get_upload('search.xml'),
[self.platform])
assert addon.default_locale == 'en-US'
translation.activate('es')
addon = Addon.from_upload(self.get_upload('search.xml'),
[self.platform])
assert addon.default_locale == 'es'
def test_validation_completes(self):
upload = self.get_upload('extension.xpi')
assert not upload.validation_timeout
addon = Addon.from_upload(upload, [self.platform])
assert not addon.admin_review
def test_validation_timeout(self):
upload = self.get_upload('extension.xpi')
validation = json.loads(upload.validation)
timeout_message = {
'id': ['validator', 'unexpected_exception', 'validation_timeout'],
}
validation['messages'] = [timeout_message] + validation['messages']
upload.validation = json.dumps(validation)
assert upload.validation_timeout
addon = Addon.from_upload(upload, [self.platform])
assert addon.admin_review
def test_webextension_generate_guid(self):
addon = Addon.from_upload(
self.get_upload('webextension_no_id.xpi'),
[self.platform])
assert addon.guid is not None
assert addon.guid.startswith('{')
assert addon.guid.endswith('}')
# Uploading the same addon without a id works.
new_addon = Addon.from_upload(
self.get_upload('webextension_no_id.xpi'),
[self.platform])
assert new_addon.guid is not None
assert new_addon.guid != addon.guid
assert addon.guid.startswith('{')
assert addon.guid.endswith('}')
def test_webextension_reuse_guid(self):
addon = Addon.from_upload(
self.get_upload('webextension.xpi'),
[self.platform])
assert addon.guid == '@webextension-guid'
# Uploading the same addon with pre-existing id fails
with self.assertRaises(forms.ValidationError) as e:
Addon.from_upload(self.get_upload('webextension.xpi'),
[self.platform])
assert e.exception.messages == ['Duplicate add-on ID found.']
def test_basic_extension_is_marked_as_e10s_unknown(self):
# extension.xpi does not have multiprocessCompatible set to true, so
# it's marked as not-compatible.
addon = Addon.from_upload(
self.get_upload('extension.xpi'),
[self.platform])
assert addon.guid
feature_compatibility = addon.feature_compatibility
assert feature_compatibility.pk
assert feature_compatibility.e10s == amo.E10S_UNKNOWN
def test_extension_is_marked_as_e10s_incompatible(self):
addon = Addon.from_upload(
self.get_upload('multiprocess_incompatible_extension.xpi'),
[self.platform])
assert addon.guid
feature_compatibility = addon.feature_compatibility
assert feature_compatibility.pk
assert feature_compatibility.e10s == amo.E10S_INCOMPATIBLE
def test_multiprocess_extension_is_marked_as_e10s_compatible(self):
addon = Addon.from_upload(
self.get_upload('multiprocess_compatible_extension.xpi'),
[self.platform])
assert addon.guid
feature_compatibility = addon.feature_compatibility
assert feature_compatibility.pk
assert feature_compatibility.e10s == amo.E10S_COMPATIBLE
def test_webextension_is_marked_as_e10s_compatible(self):
addon = Addon.from_upload(
self.get_upload('webextension.xpi'),
[self.platform])
assert addon.guid
feature_compatibility = addon.feature_compatibility
assert feature_compatibility.pk
assert feature_compatibility.e10s == amo.E10S_COMPATIBLE_WEBEXTENSION
def test_webextension_resolve_translations(self):
addon = Addon.from_upload(
self.get_upload('notify-link-clicks-i18n.xpi'),
[self.platform])
# Normalized from `en` to `en-US`
assert addon.default_locale == 'en-US'
assert addon.name == 'Notify link clicks i18n'
assert addon.summary == (
'Shows a notification when the user clicks on links.')
# Make sure we set the correct slug
assert addon.slug == 'notify-link-clicks-i18n'
translation.activate('de')
addon.reload()
assert addon.name == 'Meine Beispielerweiterung'
assert addon.summary == u'Benachrichtigt den Benutzer über Linkklicks'
@patch('olympia.addons.models.parse_addon')
def test_webext_resolve_translations_corrects_locale(self, parse_addon):
"""Make sure we correct invalid `default_locale` values"""
parse_addon.return_value = {
'default_locale': u'en',
'e10s_compatibility': 2,
'guid': u'[email protected]',
'name': u'__MSG_extensionName__',
'is_webextension': True,
'type': 1,
'apps': [],
'summary': u'__MSG_extensionDescription__',
'version': u'1.0',
'homepage': '...'
}
addon = Addon.from_upload(
self.get_upload('notify-link-clicks-i18n.xpi'),
[self.platform])
# Normalized from `en` to `en-US`
assert addon.default_locale == 'en-US'
@patch('olympia.addons.models.parse_addon')
def test_webext_resolve_translations_unknown_locale(self, parse_addon):
"""Make sure we use our default language as default
for invalid locales
"""
parse_addon.return_value = {
'default_locale': u'xxx',
'e10s_compatibility': 2,
'guid': u'[email protected]',
'name': u'__MSG_extensionName__',
'is_webextension': True,
'type': 1,
'apps': [],
'summary': u'__MSG_extensionDescription__',
'version': u'1.0',
'homepage': '...'
}
addon = Addon.from_upload(
self.get_upload('notify-link-clicks-i18n.xpi'),
[self.platform])
# Normalized from `en` to `en-US`
assert addon.default_locale == 'en-US'
REDIRECT_URL = 'https://outgoing.prod.mozaws.net/v1/'
class TestCharity(TestCase):
fixtures = ['base/charity.json']
@patch.object(settings, 'REDIRECT_URL', REDIRECT_URL)
def test_url(self):
charity = Charity(name="a", paypal="b", url="http://foo.com")
charity.save()
assert charity.outgoing_url.startswith(REDIRECT_URL)
@patch.object(settings, 'REDIRECT_URL', REDIRECT_URL)
def test_url_foundation(self):
foundation = Charity.objects.get(pk=amo.FOUNDATION_ORG)
assert not foundation.outgoing_url.startswith(REDIRECT_URL)
class TestFrozenAddons(TestCase):
def test_immediate_freeze(self):
# Adding a FrozenAddon should immediately drop the addon's hotness.
a = Addon.objects.create(type=1, hotness=22)
FrozenAddon.objects.create(addon=a)
assert Addon.objects.get(id=a.id).hotness == 0
class TestRemoveLocale(TestCase):
def test_remove(self):
a = Addon.objects.create(type=1)
a.name = {'en-US': 'woo', 'el': 'yeah'}
a.description = {'en-US': 'woo', 'el': 'yeah', 'he': 'ola'}
a.save()
a.remove_locale('el')
qs = (Translation.objects.filter(localized_string__isnull=False)
.values_list('locale', flat=True))
assert sorted(qs.filter(id=a.name_id)) == ['en-US']
assert sorted(qs.filter(id=a.description_id)) == ['en-US', 'he']
def test_remove_version_locale(self):
addon = Addon.objects.create(type=amo.ADDON_THEME)
version = Version.objects.create(addon=addon)
version.releasenotes = {'fr': 'oui'}
version.save()
addon.remove_locale('fr')
assert not (Translation.objects.filter(localized_string__isnull=False)
.values_list('locale', flat=True))
class TestAddonWatchDisabled(TestCase):
def setUp(self):
super(TestAddonWatchDisabled, self).setUp()
self.addon = Addon(type=amo.ADDON_THEME, disabled_by_user=False,
status=amo.STATUS_PUBLIC)
self.addon.save()
@patch('olympia.addons.models.File.objects.filter')
def test_no_disabled_change(self, file_mock):
mock = Mock()
file_mock.return_value = [mock]
self.addon.save()
assert not mock.unhide_disabled_file.called
assert not mock.hide_disabled_file.called
@patch('olympia.addons.models.File.objects.filter')
def test_disable_addon(self, file_mock):
mock = Mock()
file_mock.return_value = [mock]
self.addon.update(disabled_by_user=True)
assert not mock.unhide_disabled_file.called
assert mock.hide_disabled_file.called
@patch('olympia.addons.models.File.objects.filter')
def test_admin_disable_addon(self, file_mock):
mock = Mock()
file_mock.return_value = [mock]
self.addon.update(status=amo.STATUS_DISABLED)
assert not mock.unhide_disabled_file.called
assert mock.hide_disabled_file.called
@patch('olympia.addons.models.File.objects.filter')
def test_enable_addon(self, file_mock):
mock = Mock()
file_mock.return_value = [mock]
self.addon.update(status=amo.STATUS_DISABLED)
mock.reset_mock()
self.addon.update(status=amo.STATUS_PUBLIC)
assert mock.unhide_disabled_file.called
assert not mock.hide_disabled_file.called
class TestAddonWatchDeveloperNotes(TestCase):
def make_addon(self, **kwargs):
addon = Addon(type=amo.ADDON_EXTENSION, status=amo.STATUS_PUBLIC,
**kwargs)
addon.save()
addon.versions.create(has_info_request=True)
addon.versions.create(has_info_request=False)
addon.versions.create(has_info_request=True)
return addon
def assertHasInfoSet(self, addon):
assert any([v.has_info_request for v in addon.versions.all()])
def assertHasInfoNotSet(self, addon):
assert all([not v.has_info_request for v in addon.versions.all()])
def test_has_info_save(self):
"""Test saving without a change doesn't clear has_info_request."""
addon = self.make_addon()
self.assertHasInfoSet(addon)
addon.save()
self.assertHasInfoSet(addon)
def test_has_info_update_whiteboard(self):
"""Test saving with a change to whiteboard clears has_info_request."""
addon = self.make_addon()
self.assertHasInfoSet(addon)
addon.whiteboard = 'Info about things.'
addon.save()
self.assertHasInfoNotSet(addon)
def test_has_info_update_whiteboard_no_change(self):
"""Test saving without a change to whiteboard doesn't clear
has_info_request."""
addon = self.make_addon(whiteboard='Info about things.')
self.assertHasInfoSet(addon)
addon.whiteboard = 'Info about things.'
addon.save()
self.assertHasInfoSet(addon)
def test_has_info_whiteboard_removed(self):
"""Test saving with an empty whiteboard doesn't clear
has_info_request."""
addon = self.make_addon(whiteboard='Info about things.')
self.assertHasInfoSet(addon)
addon.whiteboard = ''
addon.save()
self.assertHasInfoSet(addon)
def test_has_info_update_developer_comments(self):
"""Test saving with a change to developer_comments clears
has_info_request."""
addon = self.make_addon()
self.assertHasInfoSet(addon)
addon.developer_comments = 'Things are thing-like.'
addon.save()
self.assertHasInfoNotSet(addon)
def test_has_info_update_developer_comments_again(self):
"""Test saving a change to developer_comments when developer_comments
was already set clears has_info_request (developer_comments is a
PurifiedField so it is really just an id)."""
addon = self.make_addon(developer_comments='Wat things like.')
self.assertHasInfoSet(addon)
addon.developer_comments = 'Things are thing-like.'
addon.save()
self.assertHasInfoNotSet(addon)
def test_has_info_update_developer_comments_no_change(self):
"""Test saving without a change to developer_comments doesn't clear
has_info_request."""
addon = self.make_addon(developer_comments='Things are thing-like.')
self.assertHasInfoSet(addon)
addon.developer_comments = 'Things are thing-like.'
addon.save()
self.assertHasInfoSet(addon)
def test_has_info_remove_developer_comments(self):
"""Test saving with an empty developer_comments doesn't clear
has_info_request."""
addon = self.make_addon(developer_comments='Things are thing-like.')
self.assertHasInfoSet(addon)
addon.developer_comments = ''
addon.save()
self.assertHasInfoSet(addon)
class TestTrackAddonStatusChange(TestCase):
def create_addon(self, **kwargs):
return addon_factory(kwargs.pop('status', amo.STATUS_NULL), **kwargs)
def test_increment_new_status(self):
with patch('olympia.addons.models.track_addon_status_change') as mock_:
addon = Addon()
addon.save()
mock_.assert_called_with(addon)
def test_increment_updated_status(self):
addon = self.create_addon()
with patch('olympia.addons.models.track_addon_status_change') as mock_:
addon.update(status=amo.STATUS_PUBLIC)
addon.reload()
mock_.call_args[0][0].status == addon.status
def test_ignore_non_status_changes(self):
addon = self.create_addon()
with patch('olympia.addons.models.track_addon_status_change') as mock_:
addon.update(type=amo.ADDON_THEME)
assert not mock_.called, (
'Unexpected call: {}'.format(self.mock_incr.call_args)
)
def test_increment_all_addon_statuses(self):
addon = self.create_addon(status=amo.STATUS_PUBLIC)
with patch('olympia.addons.models.statsd.incr') as mock_incr:
track_addon_status_change(addon)
mock_incr.assert_any_call(
'addon_status_change.all.status_{}'.format(amo.STATUS_PUBLIC)
)
class TestSearchSignals(amo.tests.ESTestCase):
def setUp(self):
super(TestSearchSignals, self).setUp()
self.addCleanup(self.cleanup)
def cleanup(self):
self.empty_index('default')
def test_no_addons(self):
assert Addon.search_public().count() == 0
def test_create(self):
addon = addon_factory(name='woo')
self.refresh()
assert Addon.search_public().count() == 1
assert Addon.search_public().query(name='woo')[0].id == addon.id
def test_update(self):
addon = addon_factory(name='woo')
self.refresh()
assert Addon.search_public().count() == 1
addon.name = 'yeah'
addon.save()
self.refresh()
assert Addon.search_public().count() == 1
assert Addon.search_public().query(name='woo').count() == 0
assert Addon.search_public().query(name='yeah')[0].id == addon.id
def test_user_disable(self):
"""Test that add-ons are removed from search results after being
disabled by their developers."""
addon = addon_factory(name='woo')
self.refresh()
assert Addon.search_public().count() == 1
addon.update(disabled_by_user=True)
self.refresh()
assert Addon.search_public().count() == 0
def test_switch_to_unlisted(self):
"""Test that add-ons are removed from search results after being
switched to unlisted."""
addon = addon_factory(name='woo')
self.refresh()
assert Addon.search_public().count() == 1
addon.current_version.update(channel=amo.RELEASE_CHANNEL_UNLISTED)
self.refresh()
assert Addon.search_public().count() == 0
def test_switch_to_listed(self):
"""Test that add-ons created as unlisted do not appear in search
results until switched to listed."""
addon = addon_factory(
name='woo', version_kw={'channel': amo.RELEASE_CHANNEL_UNLISTED},
status=amo.STATUS_NULL)
self.refresh()
assert Addon.search_public().count() == 0
latest_version = addon.find_latest_version(
channel=amo.RELEASE_CHANNEL_UNLISTED)
latest_version.update(channel=amo.RELEASE_CHANNEL_LISTED)
addon.update(status=amo.STATUS_PUBLIC)
self.refresh()
assert Addon.search_public().count() == 1
def test_delete(self):
addon = addon_factory(name='woo')
self.refresh()
assert Addon.search_public().count() == 1
addon.delete('woo')
self.refresh()
assert Addon.search_public().count() == 0
class TestLanguagePack(TestCase, amo.tests.AMOPaths):
def setUp(self):
super(TestLanguagePack, self).setUp()
self.addon = amo.tests.addon_factory(type=amo.ADDON_LPAPP,
status=amo.STATUS_PUBLIC)
self.platform_all = amo.PLATFORM_ALL.id
self.platform_mob = amo.PLATFORM_ANDROID.id
self.version = self.addon.current_version
def test_extract(self):
File.objects.create(platform=self.platform_mob, version=self.version,
filename=self.xpi_path('langpack-localepicker'),
status=amo.STATUS_PUBLIC)
assert self.addon.reload().get_localepicker()
assert 'title=Select a language' in self.addon.get_localepicker()
def test_extract_no_file(self):
File.objects.create(platform=self.platform_mob, version=self.version,
filename=self.xpi_path('langpack'),
status=amo.STATUS_PUBLIC)
assert self.addon.reload().get_localepicker() == ''
def test_extract_no_files(self):
assert self.addon.get_localepicker() == ''
def test_extract_not_language_pack(self):
File.objects.create(platform=self.platform_mob, version=self.version,
filename=self.xpi_path('langpack-localepicker'),
status=amo.STATUS_PUBLIC)
assert self.addon.reload().get_localepicker()
self.addon.update(type=amo.ADDON_EXTENSION)
assert self.addon.get_localepicker() == ''
def test_extract_not_platform_mobile(self):
File.objects.create(platform=self.platform_all, version=self.version,
filename=self.xpi_path('langpack-localepicker'),
status=amo.STATUS_PUBLIC)
assert self.addon.reload().get_localepicker() == ''
class TestCompatOverride(TestCase):
def setUp(self):
super(TestCompatOverride, self).setUp()
self.app = amo.APP_IDS[1]
one = CompatOverride.objects.create(guid='one')
CompatOverrideRange.objects.create(compat=one, app=self.app.id)
two = CompatOverride.objects.create(guid='two')
CompatOverrideRange.objects.create(compat=two, app=self.app.id,
min_version='1', max_version='2')
CompatOverrideRange.objects.create(compat=two, app=self.app.id,
min_version='1', max_version='2',
min_app_version='3',
max_app_version='4')
def check(self, obj, **kw):
"""Check that key/value pairs in kw match attributes of obj."""
for key, expected in kw.items():
actual = getattr(obj, key)
assert actual == expected
def test_is_hosted(self):
c = CompatOverride.objects.create(guid='a')
assert not c.is_hosted()
Addon.objects.create(type=1, guid='b')
c = CompatOverride.objects.create(guid='b')
assert c.is_hosted()
def test_override_type(self):
one = CompatOverride.objects.get(guid='one')
# The default is incompatible.
c = CompatOverrideRange.objects.create(compat=one, app=1)
assert c.override_type() == 'incompatible'
c = CompatOverrideRange.objects.create(compat=one, app=1, type=0)
assert c.override_type() == 'compatible'
def test_guid_match(self):
# We hook up the add-on automatically if we see a matching guid.
addon = Addon.objects.create(id=1, guid='oh yeah', type=1)
c = CompatOverride.objects.create(guid=addon.guid)
assert c.addon_id == addon.id
c = CompatOverride.objects.create(guid='something else')
assert c.addon is None
def test_transformer(self):
compats = list(CompatOverride.objects
.transform(CompatOverride.transformer))
ranges = list(CompatOverrideRange.objects.all())
# If the transformer works then we won't have any more queries.
with self.assertNumQueries(0):
for c in compats:
assert c.compat_ranges == (
[r for r in ranges if r.compat_id == c.id])
def test_collapsed_ranges(self):
# Test that we get back the right structures from collapsed_ranges().
c = CompatOverride.objects.get(guid='one')
r = c.collapsed_ranges()
assert len(r) == 1
compat_range = r[0]
self.check(compat_range, type='incompatible', min='0', max='*')
assert len(compat_range.apps) == 1
self.check(compat_range.apps[0], app=amo.FIREFOX, min='0', max='*')
def test_collapsed_ranges_multiple_versions(self):
c = CompatOverride.objects.get(guid='one')
CompatOverrideRange.objects.create(compat=c, app=1,
min_version='1', max_version='2',
min_app_version='3',
max_app_version='3.*')
r = c.collapsed_ranges()
assert len(r) == 2
self.check(r[0], type='incompatible', min='0', max='*')
assert len(r[0].apps) == 1
self.check(r[0].apps[0], app=amo.FIREFOX, min='0', max='*')
self.check(r[1], type='incompatible', min='1', max='2')
assert len(r[1].apps) == 1
self.check(r[1].apps[0], app=amo.FIREFOX, min='3', max='3.*')
def test_collapsed_ranges_different_types(self):
# If the override ranges have different types they should be separate
# entries.
c = CompatOverride.objects.get(guid='one')
CompatOverrideRange.objects.create(compat=c, app=1, type=0,
min_app_version='3',
max_app_version='3.*')
r = c.collapsed_ranges()
assert len(r) == 2
self.check(r[0], type='compatible', min='0', max='*')
assert len(r[0].apps) == 1
self.check(r[0].apps[0], app=amo.FIREFOX, min='3', max='3.*')
self.check(r[1], type='incompatible', min='0', max='*')
assert len(r[1].apps) == 1
self.check(r[1].apps[0], app=amo.FIREFOX, min='0', max='*')
def test_collapsed_ranges_multiple_apps(self):
c = CompatOverride.objects.get(guid='two')
r = c.collapsed_ranges()
assert len(r) == 1
compat_range = r[0]
self.check(compat_range, type='incompatible', min='1', max='2')
assert len(compat_range.apps) == 2
self.check(compat_range.apps[0], app=amo.FIREFOX, min='0', max='*')
self.check(compat_range.apps[1], app=amo.FIREFOX, min='3', max='4')
def test_collapsed_ranges_multiple_versions_and_apps(self):
c = CompatOverride.objects.get(guid='two')
CompatOverrideRange.objects.create(min_version='5', max_version='6',
compat=c, app=1)
r = c.collapsed_ranges()
assert len(r) == 2
self.check(r[0], type='incompatible', min='1', max='2')
assert len(r[0].apps) == 2
self.check(r[0].apps[0], app=amo.FIREFOX, min='0', max='*')
self.check(r[0].apps[1], app=amo.FIREFOX, min='3', max='4')
self.check(r[1], type='incompatible', min='5', max='6')
assert len(r[1].apps) == 1
self.check(r[1].apps[0], app=amo.FIREFOX, min='0', max='*')
class TestIncompatibleVersions(TestCase):
def setUp(self):
super(TestIncompatibleVersions, self).setUp()
self.app = amo.APP_IDS[amo.FIREFOX.id]
self.addon = Addon.objects.create(guid='r@b', type=amo.ADDON_EXTENSION)
def test_signals_min(self):
assert IncompatibleVersions.objects.count() == 0
c = CompatOverride.objects.create(guid='r@b')
CompatOverrideRange.objects.create(compat=c, app=self.app.id,
min_version='0',
max_version='1.0')
# Test the max version matched.
version1 = Version.objects.create(id=2, addon=self.addon,
version='1.0')
assert IncompatibleVersions.objects.filter(
version=version1).count() == 1
assert IncompatibleVersions.objects.count() == 1
# Test the lower range.
version2 = Version.objects.create(id=1, addon=self.addon,
version='0.5')
assert IncompatibleVersions.objects.filter(
version=version2).count() == 1
assert IncompatibleVersions.objects.count() == 2
# Test delete signals.
version1.delete()
assert IncompatibleVersions.objects.count() == 1
version2.delete()
assert IncompatibleVersions.objects.count() == 0
def test_signals_max(self):
assert IncompatibleVersions.objects.count() == 0
c = CompatOverride.objects.create(guid='r@b')
CompatOverrideRange.objects.create(compat=c, app=self.app.id,
min_version='1.0',
max_version='*')
# Test the min_version matched.
version1 = Version.objects.create(addon=self.addon, version='1.0')
assert IncompatibleVersions.objects.filter(
version=version1).count() == 1
assert IncompatibleVersions.objects.count() == 1
# Test the upper range.
version2 = Version.objects.create(addon=self.addon, version='99.0')
assert IncompatibleVersions.objects.filter(
version=version2).count() == 1
assert IncompatibleVersions.objects.count() == 2
# Test delete signals.
version1.delete()
assert IncompatibleVersions.objects.count() == 1
version2.delete()
assert IncompatibleVersions.objects.count() == 0
class TestAddonApprovalsCounter(TestCase):
def setUp(self):
self.addon = addon_factory()
def test_increment_existing(self):
assert not AddonApprovalsCounter.objects.filter(
addon=self.addon).exists()
AddonApprovalsCounter.increment_for_addon(self.addon)
approval_counter = AddonApprovalsCounter.objects.get(addon=self.addon)
assert approval_counter.counter == 1
AddonApprovalsCounter.increment_for_addon(self.addon)
approval_counter.reload()
assert approval_counter.counter == 2
def test_increment_non_existing(self):
approval_counter = AddonApprovalsCounter.objects.create(
addon=self.addon, counter=0)
AddonApprovalsCounter.increment_for_addon(self.addon)
approval_counter.reload()
assert approval_counter.counter == 1
def test_reset_existing(self):
approval_counter = AddonApprovalsCounter.objects.create(
addon=self.addon, counter=42)
AddonApprovalsCounter.reset_for_addon(self.addon)
approval_counter.reload()
assert approval_counter.counter == 0
def test_reset_non_existing(self):
assert not AddonApprovalsCounter.objects.filter(
addon=self.addon).exists()
AddonApprovalsCounter.reset_for_addon(self.addon)
approval_counter = AddonApprovalsCounter.objects.get(addon=self.addon)
assert approval_counter.counter == 0
| bsd-3-clause | -988,882,400,106,832,100 | 37.52548 | 79 | 0.612932 | false |
shinglyu/ns3-h264-svc | .waf-1.6.11-30618c54883417962c38f5d395f83584/waflib/Tools/irixcc.py | 14 | 1237 | #! /usr/bin/env python
# encoding: utf-8
# WARNING! Do not edit! http://waf.googlecode.com/git/docs/wafbook/single.html#_obtaining_the_waf_file
import os
from waflib import Utils
from waflib.Tools import ccroot,ar
from waflib.Configure import conf
def find_irixcc(conf):
v=conf.env
cc=None
if v['CC']:cc=v['CC']
elif'CC'in conf.environ:cc=conf.environ['CC']
if not cc:cc=conf.find_program('cc',var='CC')
if not cc:conf.fatal('irixcc was not found')
cc=conf.cmd_to_list(cc)
try:
conf.cmd_and_log(cc+['-version'])
except:
conf.fatal('%r -version could not be executed'%cc)
v['CC']=cc
v['CC_NAME']='irix'
def irixcc_common_flags(conf):
v=conf.env
v['CC_SRC_F']=''
v['CC_TGT_F']=['-c','-o']
v['CPPPATH_ST']='-I%s'
v['DEFINES_ST']='-D%s'
if not v['LINK_CC']:v['LINK_CC']=v['CC']
v['CCLNK_SRC_F']=''
v['CCLNK_TGT_F']=['-o']
v['LIB_ST']='-l%s'
v['LIBPATH_ST']='-L%s'
v['STLIB_ST']='-l%s'
v['STLIBPATH_ST']='-L%s'
v['cprogram_PATTERN']='%s'
v['cshlib_PATTERN']='lib%s.so'
v['cstlib_PATTERN']='lib%s.a'
def configure(conf):
conf.find_irixcc()
conf.find_cpp()
conf.find_ar()
conf.irixcc_common_flags()
conf.cc_load_tools()
conf.cc_add_flags()
conf.link_add_flags()
conf(find_irixcc)
conf(irixcc_common_flags) | gpl-2.0 | 6,142,413,400,744,441,000 | 24.265306 | 102 | 0.646726 | false |
gfyoung/scipy | scipy/linalg/__init__.py | 7 | 7176 | """
====================================
Linear algebra (:mod:`scipy.linalg`)
====================================
.. currentmodule:: scipy.linalg
Linear algebra functions.
.. seealso::
`numpy.linalg` for more linear algebra functions. Note that
although `scipy.linalg` imports most of them, identically named
functions from `scipy.linalg` may offer more or slightly differing
functionality.
Basics
======
.. autosummary::
:toctree: generated/
inv - Find the inverse of a square matrix
solve - Solve a linear system of equations
solve_banded - Solve a banded linear system
solveh_banded - Solve a Hermitian or symmetric banded system
solve_circulant - Solve a circulant system
solve_triangular - Solve a triangular matrix
solve_toeplitz - Solve a toeplitz matrix
det - Find the determinant of a square matrix
norm - Matrix and vector norm
lstsq - Solve a linear least-squares problem
pinv - Pseudo-inverse (Moore-Penrose) using lstsq
pinv2 - Pseudo-inverse using svd
pinvh - Pseudo-inverse of hermitian matrix
kron - Kronecker product of two arrays
tril - Construct a lower-triangular matrix from a given matrix
triu - Construct an upper-triangular matrix from a given matrix
orthogonal_procrustes - Solve an orthogonal Procrustes problem
matrix_balance - Balance matrix entries with a similarity transformation
subspace_angles - Compute the subspace angles between two matrices
LinAlgError
LinAlgWarning
Eigenvalue Problems
===================
.. autosummary::
:toctree: generated/
eig - Find the eigenvalues and eigenvectors of a square matrix
eigvals - Find just the eigenvalues of a square matrix
eigh - Find the e-vals and e-vectors of a Hermitian or symmetric matrix
eigvalsh - Find just the eigenvalues of a Hermitian or symmetric matrix
eig_banded - Find the eigenvalues and eigenvectors of a banded matrix
eigvals_banded - Find just the eigenvalues of a banded matrix
eigh_tridiagonal - Find the eigenvalues and eigenvectors of a tridiagonal matrix
eigvalsh_tridiagonal - Find just the eigenvalues of a tridiagonal matrix
Decompositions
==============
.. autosummary::
:toctree: generated/
lu - LU decomposition of a matrix
lu_factor - LU decomposition returning unordered matrix and pivots
lu_solve - Solve Ax=b using back substitution with output of lu_factor
svd - Singular value decomposition of a matrix
svdvals - Singular values of a matrix
diagsvd - Construct matrix of singular values from output of svd
orth - Construct orthonormal basis for the range of A using svd
null_space - Construct orthonormal basis for the null space of A using svd
ldl - LDL.T decomposition of a Hermitian or a symmetric matrix.
cholesky - Cholesky decomposition of a matrix
cholesky_banded - Cholesky decomp. of a sym. or Hermitian banded matrix
cho_factor - Cholesky decomposition for use in solving a linear system
cho_solve - Solve previously factored linear system
cho_solve_banded - Solve previously factored banded linear system
polar - Compute the polar decomposition.
qr - QR decomposition of a matrix
qr_multiply - QR decomposition and multiplication by Q
qr_update - Rank k QR update
qr_delete - QR downdate on row or column deletion
qr_insert - QR update on row or column insertion
rq - RQ decomposition of a matrix
qz - QZ decomposition of a pair of matrices
ordqz - QZ decomposition of a pair of matrices with reordering
schur - Schur decomposition of a matrix
rsf2csf - Real to complex Schur form
hessenberg - Hessenberg form of a matrix
cdf2rdf - Complex diagonal form to real diagonal block form
.. seealso::
`scipy.linalg.interpolative` -- Interpolative matrix decompositions
Matrix Functions
================
.. autosummary::
:toctree: generated/
expm - Matrix exponential
logm - Matrix logarithm
cosm - Matrix cosine
sinm - Matrix sine
tanm - Matrix tangent
coshm - Matrix hyperbolic cosine
sinhm - Matrix hyperbolic sine
tanhm - Matrix hyperbolic tangent
signm - Matrix sign
sqrtm - Matrix square root
funm - Evaluating an arbitrary matrix function
expm_frechet - Frechet derivative of the matrix exponential
expm_cond - Relative condition number of expm in the Frobenius norm
fractional_matrix_power - Fractional matrix power
Matrix Equation Solvers
=======================
.. autosummary::
:toctree: generated/
solve_sylvester - Solve the Sylvester matrix equation
solve_continuous_are - Solve the continuous-time algebraic Riccati equation
solve_discrete_are - Solve the discrete-time algebraic Riccati equation
solve_continuous_lyapunov - Solve the continous-time Lyapunov equation
solve_discrete_lyapunov - Solve the discrete-time Lyapunov equation
Sketches and Random Projections
===============================
.. autosummary::
:toctree: generated/
clarkson_woodruff_transform - Applies the Clarkson Woodruff Sketch (a.k.a CountMin Sketch)
Special Matrices
================
.. autosummary::
:toctree: generated/
block_diag - Construct a block diagonal matrix from submatrices
circulant - Circulant matrix
companion - Companion matrix
dft - Discrete Fourier transform matrix
hadamard - Hadamard matrix of order 2**n
hankel - Hankel matrix
helmert - Helmert matrix
hilbert - Hilbert matrix
invhilbert - Inverse Hilbert matrix
leslie - Leslie matrix
pascal - Pascal matrix
invpascal - Inverse Pascal matrix
toeplitz - Toeplitz matrix
tri - Construct a matrix filled with ones at and below a given diagonal
Low-level routines
==================
.. autosummary::
:toctree: generated/
get_blas_funcs
get_lapack_funcs
find_best_blas_type
.. seealso::
`scipy.linalg.blas` -- Low-level BLAS functions
`scipy.linalg.lapack` -- Low-level LAPACK functions
`scipy.linalg.cython_blas` -- Low-level BLAS functions for Cython
`scipy.linalg.cython_lapack` -- Low-level LAPACK functions for Cython
""" # noqa: E501
from __future__ import division, print_function, absolute_import
from .linalg_version import linalg_version as __version__
from .misc import *
from .basic import *
from .decomp import *
from .decomp_lu import *
from ._decomp_ldl import *
from .decomp_cholesky import *
from .decomp_qr import *
from ._decomp_qz import *
from .decomp_svd import *
from .decomp_schur import *
from ._decomp_polar import *
from .matfuncs import *
from .blas import *
from .lapack import *
from .special_matrices import *
from ._solvers import *
from ._procrustes import *
from ._decomp_update import *
from ._sketches import *
__all__ = [s for s in dir() if not s.startswith('_')]
from numpy.dual import register_func
for k in ['norm', 'inv', 'svd', 'solve', 'det', 'eig', 'eigh', 'eigvals',
'eigvalsh', 'lstsq', 'cholesky']:
try:
register_func(k, eval(k))
except ValueError:
pass
try:
register_func('pinv', pinv2)
except ValueError:
pass
del k, register_func
from scipy._lib._testutils import PytestTester
test = PytestTester(__name__)
del PytestTester
| bsd-3-clause | -4,749,869,154,643,300,000 | 30.336245 | 93 | 0.716555 | false |
Boyang--Li/mavlink | pymavlink/quaternion.py | 1 | 18832 | #!/usr/bin/env python
"""
Quaternion implementation for use in pymavlink
"""
from __future__ import absolute_import, division, print_function
import numpy as np
from rotmat import Vector3, Matrix3
__author__ = "Thomas Gubler"
__copyright__ = "Copyright (C) 2014 Thomas Gubler"
__license__ = "GNU Lesser General Public License v3"
__email__ = "[email protected]"
class QuaternionBase(object):
"""
Quaternion class, this is the version which supports numpy arrays
If you need support for Matrix3 look at the Quaternion class
Usage:
>>> from quaternion import QuaternionBase
>>> import numpy as np
>>> q = QuaternionBase([np.radians(20), np.radians(20), np.radians(20)])
>>> print(q)
[ 0.9603483 0.13871646 0.19810763 0.13871646]
>>> print(q.dcm)
[[ 0.88302222 -0.21147065 0.41898917]
[ 0.3213938 0.92303098 -0.21147065]
[-0.34202014 0.3213938 0.88302222]]
>>> q = QuaternionBase([1, 0, 0, 0])
>>> print(q.euler)
[ 0. -0. 0.]
>>> m = [[1, 0, 0], [0, 0, -1], [0, 1, 0]]
>>> q = QuaternionBase(m)
>>> vector = [0, 1, 0]
>>> vector2 = q.transform(vector)
"""
def __init__(self, attitude=[1, 0, 0, 0]):
"""
Construct a quaternion from an attitude
:param attitude: another QuaternionBase,
3 element list [roll, pitch, yaw],
4 element list [w, x, y ,z], DCM (3x3 array)
"""
if isinstance(attitude, QuaternionBase):
self.q = attitude.q
elif np.array(attitude).shape == (3, 3):
self.dcm = attitude
elif len(attitude) == 4:
self.q = attitude
elif len(attitude) == 3:
self.euler = attitude
else:
raise TypeError("attitude is not valid")
@property
def q(self):
"""
Get the quaternion
:returns: array containing the quaternion elements
"""
if self._q is None:
if self._euler is not None:
# get q from euler
self._q = self._euler_to_q(self.euler)
elif self._dcm is not None:
# get q from DCM
self._q = self._dcm_to_q(self.dcm)
return self._q
def __getitem__(self, index):
"""Returns the quaternion entry at index"""
return self.q[index]
@q.setter
def q(self, q):
"""
Set the quaternion
:param q: list or array of quaternion values [w, x, y, z]
"""
self._q = np.array(q)
# mark other representations as outdated, will get generated on next
# read
self._euler = None
self._dcm = None
@property
def euler(self):
"""
Get the euler angles.
The convention is Tait-Bryan (ZY'X'')
:returns: array containing the euler angles [roll, pitch, yaw]
"""
if self._euler is None:
if self._q is not None:
# try to get euler angles from q via DCM
self._dcm = self._q_to_dcm(self.q)
self._euler = self._dcm_to_euler(self.dcm)
elif self._dcm is not None:
# get euler angles from DCM
self._euler = self._dcm_to_euler(self.dcm)
return self._euler
@euler.setter
def euler(self, euler):
"""
Set the euler angles
:param euler: list or array of the euler angles [roll, pitch, yaw]
"""
assert(len(euler) == 3)
self._euler = np.array(euler)
# mark other representations as outdated, will get generated on next
# read
self._q = None
self._dcm = None
@property
def dcm(self):
"""
Get the DCM
:returns: 3x3 array
"""
if self._dcm is None:
if self._q is not None:
# try to get dcm from q
self._dcm = self._q_to_dcm(self.q)
elif self._euler is not None:
# try to get get dcm from euler
self._dcm = self._euler_to_dcm(self._euler)
return self._dcm
@dcm.setter
def dcm(self, dcm):
"""
Set the DCM
:param dcm: 3x3 array
"""
assert(len(dcm) == 3)
for sub in dcm:
assert(len(sub) == 3)
self._dcm = np.array(dcm)
# mark other representations as outdated, will get generated on next
# read
self._q = None
self._euler = None
def transform(self, v):
"""
Calculates the vector transformed by this quaternion
:param v: array with len 3 to be transformed
:returns: transformed vector
"""
assert(len(v) == 3)
assert(np.allclose(self.norm, 1))
# perform transformation t = q * [0, v] * q^-1 but avoid multiplication
# because terms cancel out
q0 = self.q[0]
qi = self.q[1:4]
ui = np.array(v)
a = q0 * ui + np.cross(qi, ui)
t = np.dot(qi, ui) * qi + q0 * a - np.cross(a, qi)
return t
@property
def norm(self):
"""
Returns norm of quaternion
:returns: norm (scalar)
"""
return QuaternionBase.norm_array(self.q)
def normalize(self):
"""Normalizes the quaternion"""
self.q = QuaternionBase.normalize_array(self.q)
@property
def inversed(self):
"""
Get inversed quaternion
:returns: inversed quaternion
"""
q_inv = self._q_inversed(self.q)
return QuaternionBase(q_inv)
def __eq__(self, other):
"""
Equality test (same orientation, not necessarily same rotation)
:param other: a QuaternionBase
:returns: true if the quaternions are equal
"""
if isinstance(other, QuaternionBase):
return abs(self.q.dot(other.q)) > 1 - np.finfo(float).eps
return NotImplemented
def close(self, other):
"""
Equality test with tolerance
(same orientation, not necessarily same rotation)
:param other: a QuaternionBase
:returns: true if the quaternions are almost equal
"""
if isinstance(other, QuaternionBase):
return np.allclose(self.q, other.q) or np.allclose(self.q, -other.q)
return NotImplemented
def __mul__(self, other):
"""
:param other: QuaternionBase
:returns: multiplaction of this Quaternion with other
"""
if isinstance(other, QuaternionBase):
o = other.q
elif len(other) == 4:
o = other
else:
return NotImplemented
return QuaternionBase(self._mul_array(self.q, o))
def __truediv__(self, other):
"""
:param other: QuaternionBase
:returns: division of this Quaternion with other
"""
if isinstance(other, QuaternionBase):
o = other
elif len(other) == 4:
o = QuaternionBase(other)
else:
return NotImplemented
return self * o.inversed
@staticmethod
def normalize_array(q):
"""
Normalizes the list with len 4 so that it can be used as quaternion
:param q: array of len 4
:returns: normalized array
"""
assert(len(q) == 4)
q = np.array(q)
n = QuaternionBase.norm_array(q)
return q / n
@staticmethod
def norm_array(q):
"""
Calculate quaternion norm on array q
:param quaternion: array of len 4
:returns: norm (scalar)
"""
assert(len(q) == 4)
return np.sqrt(np.dot(q, q))
def _mul_array(self, p, q):
"""
Performs multiplication of the 2 quaterniona arrays p and q
:param p: array of len 4
:param q: array of len 4
:returns: array of len, result of p * q (with p, q quaternions)
"""
assert(len(q) == len(p) == 4)
p0 = p[0]
pi = p[1:4]
q0 = q[0]
qi = q[1:4]
res = np.zeros(4)
res[0] = p0 * q0 - np.dot(pi, qi)
res[1:4] = p0 * qi + q0 * pi + np.cross(pi, qi)
return res
def _euler_to_q(self, euler):
"""
Create q array from euler angles
:param euler: array [roll, pitch, yaw] in rad
:returns: array q which represents a quaternion [w, x, y, z]
"""
assert(len(euler) == 3)
phi = euler[0]
theta = euler[1]
psi = euler[2]
c_phi_2 = np.cos(phi / 2)
s_phi_2 = np.sin(phi / 2)
c_theta_2 = np.cos(theta / 2)
s_theta_2 = np.sin(theta / 2)
c_psi_2 = np.cos(psi / 2)
s_psi_2 = np.sin(psi / 2)
q = np.zeros(4)
q[0] = (c_phi_2 * c_theta_2 * c_psi_2 +
s_phi_2 * s_theta_2 * s_psi_2)
q[1] = (s_phi_2 * c_theta_2 * c_psi_2 -
c_phi_2 * s_theta_2 * s_psi_2)
q[2] = (c_phi_2 * s_theta_2 * c_psi_2 +
s_phi_2 * c_theta_2 * s_psi_2)
q[3] = (c_phi_2 * c_theta_2 * s_psi_2 -
s_phi_2 * s_theta_2 * c_psi_2)
return q
def _q_to_dcm(self, q):
"""
Create DCM from q
:param q: array q which represents a quaternion [w, x, y, z]
:returns: 3x3 dcm array
"""
assert(len(q) == 4)
assert(np.allclose(QuaternionBase.norm_array(q), 1))
dcm = np.zeros([3, 3])
a = q[0]
b = q[1]
c = q[2]
d = q[3]
a_sq = a * a
b_sq = b * b
c_sq = c * c
d_sq = d * d
dcm[0][0] = a_sq + b_sq - c_sq - d_sq
dcm[0][1] = 2 * (b * c - a * d)
dcm[0][2] = 2 * (a * c + b * d)
dcm[1][0] = 2 * (b * c + a * d)
dcm[1][1] = a_sq - b_sq + c_sq - d_sq
dcm[1][2] = 2 * (c * d - a * b)
dcm[2][0] = 2 * (b * d - a * c)
dcm[2][1] = 2 * (a * b + c * d)
dcm[2][2] = a_sq - b_sq - c_sq + d_sq
return dcm
def _dcm_to_q(self, dcm):
"""
Create q from dcm
Reference:
- Shoemake, Quaternions,
http://www.cs.ucr.edu/~vbz/resources/quatut.pdf
:param dcm: 3x3 dcm array
returns: quaternion array
"""
assert(dcm.shape == (3, 3))
q = np.zeros(4)
tr = np.trace(dcm)
if tr > 0:
s = np.sqrt(tr + 1.0)
q[0] = s * 0.5
s = 0.5 / s
q[1] = (dcm[2][1] - dcm[1][2]) * s
q[2] = (dcm[0][2] - dcm[2][0]) * s
q[3] = (dcm[1][0] - dcm[0][1]) * s
else:
dcm_i = np.argmax(np.diag(dcm))
dcm_j = (dcm_i + 1) % 3
dcm_k = (dcm_i + 2) % 3
s = np.sqrt((dcm[dcm_i][dcm_i] - dcm[dcm_j][dcm_j] -
dcm[dcm_k][dcm_k]) + 1.0)
q[dcm_i + 1] = s * 0.5
s = 0.5 / s
q[dcm_j + 1] = (dcm[dcm_i][dcm_j] + dcm[dcm_j][dcm_i]) * s
q[dcm_k + 1] = (dcm[dcm_k][dcm_i] + dcm[dcm_i][dcm_k]) * s
q[0] = (dcm[dcm_k][dcm_j] - dcm[dcm_j][dcm_k]) * s
return q
def _euler_to_dcm(self, euler):
"""
Create DCM from euler angles
:param euler: array [roll, pitch, yaw] in rad
:returns: 3x3 dcm array
"""
assert(len(euler) == 3)
phi = euler[0]
theta = euler[1]
psi = euler[2]
dcm = np.zeros([3, 3])
c_phi = np.cos(phi)
s_phi = np.sin(phi)
c_theta = np.cos(theta)
s_theta = np.sin(theta)
c_psi = np.cos(psi)
s_psi = np.sin(psi)
dcm[0][0] = c_theta * c_psi
dcm[0][1] = -c_phi * s_psi + s_phi * s_theta * c_psi
dcm[0][2] = s_phi * s_psi + c_phi * s_theta * c_psi
dcm[1][0] = c_theta * s_psi
dcm[1][1] = c_phi * c_psi + s_phi * s_theta * s_psi
dcm[1][2] = -s_phi * c_psi + c_phi * s_theta * s_psi
dcm[2][0] = -s_theta
dcm[2][1] = s_phi * c_theta
dcm[2][2] = c_phi * c_theta
return dcm
def _dcm_to_euler(self, dcm):
"""
Create DCM from euler angles
:param dcm: 3x3 dcm array
:returns: array [roll, pitch, yaw] in rad
"""
assert(dcm.shape == (3, 3))
theta = np.arcsin(min(1, max(-1, -dcm[2][0])))
if abs(theta - np.pi/2) < 1.0e-3:
phi = 0.0
psi = (np.arctan2(dcm[1][2] - dcm[0][1],
dcm[0][2] + dcm[1][1]) + phi)
elif abs(theta + np.pi/2) < 1.0e-3:
phi = 0.0
psi = np.arctan2(dcm[1][2] - dcm[0][1],
dcm[0][2] + dcm[1][1] - phi)
else:
phi = np.arctan2(dcm[2][1], dcm[2][2])
psi = np.arctan2(dcm[1][0], dcm[0][0])
return np.array([phi, theta, psi])
def _q_inversed(self, q):
"""
Returns inversed quaternion q
:param q: array q which represents a quaternion [w, x, y, z]
:returns: inversed array q which is a quaternion [w, x, y ,z]
"""
assert(len(q) == 4)
return np.hstack([q[0], -q[1:4]])
def __str__(self):
"""String of quaternion values"""
return str(self.q)
class Quaternion(QuaternionBase):
"""
Quaternion class that supports pymavlink's Vector3 and Matrix3
Usage:
>>> from quaternion import Quaternion
>>> from rotmat import Vector3, Matrix3
>>> m = Matrix3()
>>> m.from_euler(45, 0, 0)
>>> print(m)
Matrix3((1.00, 0.00, 0.00), (0.00, 0.53, -0.85), (-0.00, 0.85, 0.53))
>>> q = Quaternion(m)
>>> print(q)
[ 0.87330464 0.48717451 0. 0. ]
>>> print(q.dcm)
Matrix3((1.00, 0.00, 0.00), (0.00, 0.53, -0.85), (-0.00, 0.85, 0.53))
>>> v = Vector3(0, 1, 0)
>>> v2 = q.transform(v)
>>> print(v2)
Vector3(0.00, 0.53, 0.85)
"""
def __init__(self, attitude):
"""
Construct a quaternion from an attitude
:param attitude: another Quaternion, QuaternionBase,
3 element list [roll, pitch, yaw],
4 element list [w, x, y ,z], DCM (3x3 array or Matrix3)
"""
if isinstance(attitude, Quaternion):
self.q = attitude.q
if isinstance(attitude, Matrix3):
self.dcm = attitude
elif np.array(attitude).shape == (3, 3):
# convert dcm array to Matrix3
self.dcm = self._dcm_array_to_matrix3(attitude)
elif isinstance(attitude, Vector3):
# provided euler angles
euler = [attitude.x, attitude.y, attitude.z]
super(Quaternion, self).__init__(euler)
else:
super(Quaternion, self).__init__(attitude)
@property
def dcm(self):
"""
Get the DCM
:returns: Matrix3
"""
if self._dcm is None:
if self._q is not None:
# try to get dcm from q
self._dcm = self._q_to_dcm(self.q)
elif self._euler is not None:
# try to get get dcm from euler
self._dcm = self._euler_to_dcm(self._euler)
return self._dcm
@dcm.setter
def dcm(self, dcm):
"""
Set the DCM
:param dcm: Matrix3
"""
assert(isinstance(dcm, Matrix3))
self._dcm = dcm
# mark other representations as outdated, will get generated on next
# read
self._q = None
self._euler = None
@property
def inversed(self):
"""
Get inversed quaternion
:returns: inversed quaternion
"""
return Quaternion(super(Quaternion, self).inversed)
def transform(self, v3):
"""
Calculates the vector transformed by this quaternion
:param v3: Vector3 to be transformed
:returns: transformed vector
"""
if isinstance(v3, Vector3):
t = super(Quaternion, self).transform([v3.x, v3.y, v3.z])
return Vector3(t[0], t[1], t[2])
elif len(v3) == 3:
return super(Quaternion, self).transform(v3)
else:
raise TypeError("param v3 is not a vector type")
def _dcm_array_to_matrix3(self, dcm):
"""
Converts dcm array into Matrix3
:param dcm: 3x3 dcm array
:returns: Matrix3
"""
assert(dcm.shape == (3, 3))
a = Vector3(dcm[0][0], dcm[0][1], dcm[0][2])
b = Vector3(dcm[1][0], dcm[1][1], dcm[1][2])
c = Vector3(dcm[2][0], dcm[2][1], dcm[2][2])
return Matrix3(a, b, c)
def _matrix3_to_dcm_array(self, m):
"""
Converts Matrix3 in an array
:param m: Matrix3
:returns: 3x3 array
"""
assert(isinstance(m, Matrix3))
return np.array([[m.a.x, m.a.y, m.a.z],
[m.b.x, m.b.y, m.b.z],
[m.c.x, m.c.y, m.c.z]])
def _q_to_dcm(self, q):
"""
Create DCM (Matrix3) from q
:param q: array q which represents a quaternion [w, x, y, z]
:returns: Matrix3
"""
assert(len(q) == 4)
arr = super(Quaternion, self)._q_to_dcm(q)
return self._dcm_array_to_matrix3(arr)
def _dcm_to_q(self, dcm):
"""
Create q from dcm (Matrix3)
:param dcm: Matrix3
:returns: array q which represents a quaternion [w, x, y, z]
"""
assert(isinstance(dcm, Matrix3))
arr = self._matrix3_to_dcm_array(dcm)
return super(Quaternion, self)._dcm_to_q(arr)
def _euler_to_dcm(self, euler):
"""
Create DCM (Matrix3) from euler angles
:param euler: array [roll, pitch, yaw] in rad
:returns: Matrix3
"""
assert(len(euler) == 3)
m = Matrix3()
m.from_euler(*euler)
return m
def _dcm_to_euler(self, dcm):
"""
Create DCM from euler angles
:param dcm: Matrix3
:returns: array [roll, pitch, yaw] in rad
"""
assert(isinstance(dcm, Matrix3))
return np.array(dcm.to_euler())
def __mul__(self, other):
"""
:param other: Quaternion
:returns: multiplaction of this Quaternion with other
"""
return Quaternion(super(Quaternion, self).__mul__(other))
def __truediv__(self, other):
"""
:param other: Quaternion
:returns: division of this Quaternion with other
"""
return Quaternion(super(Quaternion, self).__truediv__(other))
if __name__ == "__main__":
import doctest
doctest.testmod()
import unittest
unittest.main('quaterniontest')
| lgpl-3.0 | -4,134,734,881,633,261,000 | 28.797468 | 80 | 0.496071 | false |
splice/splice-server | playpen/server_metadata/upload_metadata.py | 1 | 1708 | #!/usr/bin/env python
import json
import sys
from optparse import OptionParser
from splice.common import config, splice_server_client
def read_file(input_file):
f = open(input_file)
try:
return f.read()
finally:
f.close()
def upload(host, port, data):
return splice_server_client.upload_splice_server_metadata(host, port, "/splice/api/v1/spliceserver/", data)
if __name__ == "__main__":
# Parse arguments
parser = OptionParser(description="Seeds a RCS with Splice Server metadata")
parser.add_option("--host", action="store", help="Hostname for RCS", default="127.0.0.1")
parser.add_option("--port", action="store", help="Port for RCS", default="443")
parser.add_option("--input", action="store", help="JSON file containing splice server metadata to upload to server", default="")
parser.add_option("--config", action="store", help="RCS server config file, defaults to /etc/splice/conf.d/server.conf",
default="/etc/splice/conf.d/server.conf")
(opts, args) = parser.parse_args()
config.init(opts.config)
if not opts.input:
print "Please re-run with --input specified for a JSON file to upload to server"
sys.exit(1)
# Read in config file
data = read_file(opts.input)
try:
data = json.loads(data)
except Exception, e:
print "Input data from %s does not appear to be valid JSON." % (opts.input)
print "Input data: \n%s" % (data)
print "Caught Exception: %s" % (e)
sys.exit(1)
# Send to RCS
print "Data = \n%s" % (data)
response = upload(opts.host, opts.port, data)
#print "Uploaded: \n%s\n\nReceived Reponse:\n%s" % (data, response)
| gpl-2.0 | -4,868,430,189,488,142,000 | 32.490196 | 132 | 0.645199 | false |
jemromerol/apasvo | apasvo/gui/views/error.py | 1 | 1546 | # encoding: utf-8
'''
@author: Jose Emilio Romero Lopez
@copyright: Copyright 2013-2014, Jose Emilio Romero Lopez.
@license: GPL
@contact: [email protected]
This file is part of APASVO.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
from PySide import QtGui
def display_error_dlg(msg, additional_info=None, parent=None):
"""Displays an error dialog."""
msgBox = QtGui.QMessageBox(parent)
msgBox.setText("An error occurred:")
horizontalSpacer = QtGui.QSpacerItem(480, 0, QtGui.QSizePolicy.Minimum,
QtGui.QSizePolicy.Expanding)
layout = msgBox.layout()
layout.addItem(horizontalSpacer, layout.rowCount(), 0, 1, layout.columnCount())
msgBox.setIcon(QtGui.QMessageBox.Critical)
msgBox.setInformativeText(msg)
if additional_info is not None:
msgBox.setDetailedText(additional_info)
msgBox.setStandardButtons(QtGui.QMessageBox.Ok)
msgBox.exec_()
| gpl-3.0 | 6,917,896,951,927,512,000 | 34.953488 | 83 | 0.721863 | false |
limscoder/amfast | amfast/remoting/sa_subscription_manager.py | 1 | 7640 | import time
import cPickle as pickle
import sqlalchemy as sa
from sqlalchemy.sql import func, and_
if sa.__version__.startswith('0.5'):
# 0.5 is lowest supported version
BINARY_TYPE = sa.Binary
else:
BINARY_TYPE = sa.LargeBinary
from subscription_manager import Subscription, SubscriptionManager
import flex_messages as messaging
class SaSubscriptionManager(SubscriptionManager):
"""Manages subscriptions in a database, uses SqlAlchemy to talk to the DB."""
def __init__(self, engine, metadata, secure=False, ttl=30000, table_prefix=''):
SubscriptionManager.__init__(self, secure=secure, ttl=ttl)
self.engine = engine
self.metadata = metadata
self.table_prefix = table_prefix and "%s_" % table_prefix.rstrip('_') or table_prefix
self.mapTables()
def reset(self):
db = self.getDb()
db.execute(self.subscriptions.delete())
db.execute(self.messages.delete())
db.close()
def mapTables(self):
self.subscriptions = sa.Table('%ssubscriptions' % self.table_prefix, self.metadata,
sa.Column('connection_id', sa.String(36), primary_key=True),
sa.Column('client_id', sa.String(36), primary_key=True),
sa.Column('topic', sa.String(128), primary_key=True)
)
self.messages = sa.Table('%smessages' % self.table_prefix, self.metadata,
sa.Column('id', sa.Integer, primary_key=True),
sa.Column('topic', sa.String(256), index=True),
sa.Column('clientId', sa.String(128), nullable=True),
sa.Column('messageId', sa.String(128), nullable=True),
sa.Column('correlationId', sa.String(128), nullable=True),
sa.Column('destination', sa.String(128), nullable=True),
sa.Column('timestamp', sa.Float(), nullable=True),
sa.Column('timeToLive', sa.Float(), nullable=True),
sa.Column('headers', BINARY_TYPE(), nullable=True),
sa.Column('body', BINARY_TYPE(), nullable=False)
)
def createTables(self):
db = self.getDb()
self.subscriptions.create(db, checkfirst=True)
self.messages.create(db, checkfirst=True)
db.close()
def getDb(self):
return self.engine.connect()
def subscribe(self, connection_id, client_id, topic, sub_topic=None, selector=None):
"""Subscribe a client to a topic.
arguments
==========
* connection_id - string, id of Flash client that is subscribing.
* client_id - string, id of messaging client that is subscribing.
* topic - string, Topic to subscribe to.
* sub_topic - string, Sub-Topic to subscribe to. Default = None.
"""
topic = self.getTopicKey(topic, sub_topic)
ins = self.subscriptions.insert().values(
connection_id=connection_id,
client_id=client_id,
topic=topic
)
db = self.getDb()
db.execute(ins)
db.close()
def unSubscribe(self, connection_id, client_id, topic, sub_topic=None):
"""Un-Subscribe a client from a topic.
arguments
==========
* connection_id - string, id of Flash client that is subscribing.
* client_id - string, id of messaging client that is subscribing.
* topic - string, Topic to un-subscribe from.
* sub_topic - string, Sub-Topic to un-subscribe from. Default = None.
"""
topic = self.getTopicKey(topic, sub_topic)
d = self.subscriptions.delete().\
where(and_(self.subscriptions.c.connection_id==connection_id,
self.subscriptions.c.client_id==client_id,
self.subscriptions.c.topic==topic))
db = self.getDb()
db.execute(d)
db.close()
def deleteConnection(self, connection):
"""Remove all subscriptions for this connection.
arguments
==========
* connection_id - string, id of Flash client that is subscribing.
"""
d = self.subscriptions.delete().\
where(self.subscriptions.c.connection_id==connection.id)
db = self.getDb()
db.execute(d)
db.close()
def iterSubscribers(self, topic, sub_topic=None):
"""Iterate through Flash client ids subscribed to a specific topic."""
topic = self.getTopicKey(topic, sub_topic)
s = sa.select([self.subscriptions.c.connection_id],
self.subscriptions.c.topic==topic, distinct=True)
db = self.getDb()
results = db.execute(s)
for row in results:
yield row[self.subscriptions.c.connection_id]
def iterConnectionSubscriptions(self, connection):
"""Iterate through all Subscriptions that belong to a specific connection."""
s = sa.select([self.subscriptions.c.connection_id,
self.subscriptions.c.client_id, self.subscriptions.c.topic],
self.subscriptions.c.connection_id==connection.id)
db = self.getDb()
results = db.execute(s)
for row in results:
yield Subscription(row[self.subscriptions.c.connection_id],
row[self.subscriptions.c.client_id], row[self.subscriptions.c.topic])
def persistMessage(self, msg):
"""Store a message."""
if hasattr(msg, 'headers') and (msg.headers is not None):
enc_headers = pickle.dumps(msg.headers)
else:
enc_headers = None
if hasattr(msg, 'correlationId'):
correlation_id = msg.correlationId
else:
correlation_id = None
ins = self.messages.insert().values(
topic=self.getMessageTopicKey(msg),
clientId=msg.clientId,
messageId=msg.messageId,
correlationId=correlation_id,
destination=msg.destination,
timestamp=msg.timestamp,
timeToLive=msg.timeToLive,
headers=enc_headers,
body=pickle.dumps(msg.body)
)
db = self.getDb()
db.execute(ins)
db.close()
def deleteExpiredMessages(self, cutoff_time):
"""Deletes expired messages."""
d = self.messages.delete().\
where(self.messages.c.timestamp + self.messages.c.timeToLive < cutoff_time)
db = self.getDb()
db.execute(d)
db.close()
def pollMessages(self, topic, cutoff_time, current_time):
"""Retrieves all queued messages, and discards expired messages.
arguments:
===========
* topic - string, Topic to find messages for.
* cutoff_time - float, epoch time, only messages published
after this time will be returned.
* current_time - float, epoch time, used to determine if a
message is expired.
"""
# Poll for new messages
s = sa.select((self.messages,),
and_(self.messages.c.topic == topic,
self.messages.c.timestamp > cutoff_time)).\
order_by(self.messages.c.timestamp)
db = self.getDb()
results = db.execute(s)
for row in results:
if row['headers'] is None:
headers = None
else:
headers = pickle.loads(str(row['headers']))
yield messaging.AsyncMessage(body=pickle.loads(str(row['body'])),
clientId=row['clientId'], destination=row['destination'],
headers=headers, timeToLive=row['timeToLive'],
timestamp=row['timestamp'], messageId=row['messageId'])
db.close()
| mit | -6,660,335,286,674,362,000 | 34.700935 | 93 | 0.595157 | false |
vineodd/PIMSim | GEM5Simulation/gem5/tests/long/se/10.mcf/test.py | 23 | 1777 | # Copyright (c) 2006-2007 The Regents of The University of Michigan
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Korey Sewell
m5.util.addToPath('../configs/common')
from cpu2000 import mcf
workload = mcf(isa, opsys, 'smred')
root.system.cpu[0].workload = workload.makeProcess()
root.system.physmem.range=AddrRange('256MB')
| gpl-3.0 | 2,432,586,508,160,135,000 | 51.264706 | 72 | 0.790096 | false |
nguy/artview | artview/components/corrections/calculate_attenuation.py | 2 | 6698 | """
calculate_attenuation.py
"""
# Load the needed packages
from functools import partial
import pyart
from pyart.config import get_field_name
import time
from ...core import (Component, Variable, common, QtWidgets, QtGui,
QtCore, VariableChoose)
import os
class CalculateAttenuation(Component):
'''
Interface for executing :py:func:`pyart.correct.calculate_attenuation`
'''
Vradar = None #: see :ref:`shared_variable`
@classmethod
def guiStart(self, parent=None):
'''Graphical interface for starting this class'''
kwargs, independent = \
common._SimplePluginStart("CalculateAttenuation").startDisplay()
kwargs['parent'] = parent
return self(**kwargs), independent
def __init__(self, Vradar=None, # Vgatefilter=None,
name="CalculateAttenuation", parent=None):
'''Initialize the class to create the interface.
Parameters
----------
[Optional]
Vradar : :py:class:`~artview.core.core.Variable` instance
Radar signal variable.
A value of None initializes an empty Variable.
name : string
Field Radiobutton window name.
parent : PyQt instance
Parent instance to associate to this class.
If None, then Qt owns, otherwise associated w/ parent PyQt instance
'''
super(CalculateAttenuation, self).__init__(name=name, parent=parent)
self.central_widget = QtWidgets.QWidget()
self.setCentralWidget(self.central_widget)
self.layout = QtWidgets.QGridLayout(self.central_widget)
self.despeckleButton = QtWidgets.QPushButton("CalculateAttenuation")
self.despeckleButton.clicked.connect(self.calculate_attenuation)
self.layout.addWidget(self.despeckleButton, 0, 0)
parentdir = os.path.abspath(os.path.join(os.path.dirname(__file__),
os.pardir))
config_icon = QtGui.QIcon(os.sep.join([parentdir, 'icons',
"categories-applications-system-icon.png"]))
self.configButton = QtWidgets.QPushButton(config_icon,"")
self.layout.addWidget(self.configButton, 0, 1)
self.configMenu = QtWidgets.QMenu(self)
self.configButton.setMenu(self.configMenu)
self.configMenu.addAction(QtWidgets.QAction("Set Parameters", self,
triggered=self.setParameters))
self.configMenu.addAction(QtWidgets.QAction("Help", self,
triggered=self._displayHelp))
self.parameters = {
"radar": None,
"z_offset": 0,
"debug": False,
"doc": 15,
"fzl": 4000,
"rhv_min": 0.8,
"ncp_min": 0.5,
"a_coef": 0.06,
"beta": 0.8,
"refl_field": get_field_name('reflectivity'),
"ncp_field": get_field_name('normalized_coherent_power'),
"rhv_field": get_field_name('cross_correlation_ratio'),
"phidp_field": get_field_name('differential_phase'),
"spec_at_field": get_field_name('specific_attenuation'),
"corr_refl_field": get_field_name('corrected_reflectivity'),
}
self.parameters_type = [
("z_offset", float),
("debug", bool),
("doc", float),
("fzl", float),
("rhv_min", float),
("ncp_min", float),
("a_coef", float),
("beta", float),
("refl_field", str),
("ncp_field", str),
("rhv_field", str),
("phidp_field", str),
("spec_at_field", str),
("corr_refl_field", str),
]
self.layout.setColumnStretch(0, 1)
if Vradar is None:
self.Vradar = Variable(None)
else:
self.Vradar = Vradar
self.sharedVariables = {"Vradar": None}
self.connectAllVariables()
self.show()
def calculate_attenuation(self):
'''Mount Options and execute
:py:func:`~pyart.correct.calculate_attenuation`.
The resulting fields are added to Vradar.
Vradar is updated, strong or weak depending on overwriting old fields.
'''
# test radar
if self.Vradar.value is None:
common.ShowWarning("Radar is None, can not perform correction.")
return
# mount options
self.parameters['radar'] = self.Vradar.value
print(self.parameters)
# execute
print("Correcting ..")
t0 = time.time()
try:
spec_at, cor_z = pyart.correct.calculate_attenuation(**self.parameters)
except:
import traceback
error = traceback.format_exc()
common.ShowLongText("Py-ART fails with following error\n\n" +
error)
t1 = time.time()
print(("Correction took %fs" % (t1-t0)))
# verify field overwriting
spec_at_field_name = self.parameters['spec_at_field']
corr_refl_field_name = self.parameters['corr_refl_field']
strong_update = False # insertion is weak, overwrite strong
if spec_at_field_name in self.Vradar.value.fields.keys():
resp = common.ShowQuestion(
"Field %s already exists! Do you want to over write it?" %
spec_at_field_name)
if resp != QtWidgets.QMessageBox.Ok:
return
else:
strong_update = True
if corr_refl_field_name in self.Vradar.value.fields.keys():
resp = common.ShowQuestion(
"Field %s already exists! Do you want to over write it?" %
corr_refl_field_name)
if resp != QtWidgets.QMessageBox.Ok:
return
else:
strong_update = True
# add fields and update
self.Vradar.value.add_field(spec_at_field_name, spec_at, True)
self.Vradar.value.add_field(corr_refl_field_name, cor_z, True)
self.Vradar.update(strong_update)
print("Correction took %fs" % (t1-t0))
def setParameters(self):
'''Open set parameters dialog.'''
parm = common.get_options(self.parameters_type, self.parameters)
for key in parm.keys():
self.parameters[key] = parm[key]
def _displayHelp(self):
'''Display Py-Art's docstring for help.'''
common.ShowLongText(pyart.correct.calculate_attenuation.__doc__)
_plugins = [CalculateAttenuation]
| bsd-3-clause | -4,730,286,594,823,258,000 | 35.010753 | 90 | 0.56599 | false |
tommybananas/OpenWPM | automation/Commands/utils/domain_utils.py | 2 | 4254 | from publicsuffix import PublicSuffixList, fetch
from ipaddress import ip_address
from urlparse import urlparse
from functools import wraps
import tempfile
import codecs
import os
# We cache the Public Suffix List in temp directory
PSL_CACHE_LOC = os.path.join(tempfile.gettempdir(),'public_suffix_list.dat')
def get_psl():
"""
Grabs an updated public suffix list.
"""
if not os.path.isfile(PSL_CACHE_LOC):
print "%s does not exist, downloading a copy." % PSL_CACHE_LOC
psl_file = fetch()
with codecs.open(PSL_CACHE_LOC, 'w', encoding='utf8') as f:
f.write(psl_file.read())
print "Using psl from cache: %s" % PSL_CACHE_LOC
psl_cache = codecs.open(PSL_CACHE_LOC, encoding='utf8')
return PublicSuffixList(psl_cache)
def load_psl(function):
@wraps(function)
def wrapper(*args, **kwargs):
if not kwargs.has_key('psl'):
if wrapper.psl is None:
wrapper.psl = get_psl()
return function(*args, psl=wrapper.psl, **kwargs)
else:
return function(*args, **kwargs)
wrapper.psl = None
return wrapper
def is_ip_address(hostname):
"""
Check if the given string is a valid IP address
"""
try:
ip_address(unicode(hostname))
return True
except ValueError:
return False
@load_psl
def get_ps_plus_1(url, **kwargs):
"""
Returns the PS+1 of the url. This will also return
an IP address if the hostname of the url is a valid
IP address.
An (optional) PublicSuffixList object can be passed with keyword arg 'psl',
otherwise a version cached in the system temp directory is used.
"""
if not kwargs.has_key('psl'):
raise ValueError("A PublicSuffixList must be passed as a keyword argument.")
hostname = urlparse(url).hostname
if is_ip_address(hostname):
return hostname
elif hostname is None:
# Possible reasons hostname is None, `url` is:
# * malformed
# * a relative url
# * a `javascript:` or `data:` url
# * many others
return
else:
return kwargs['psl'].get_public_suffix(hostname)
@load_psl
def hostname_subparts(url, include_ps=False, **kwargs):
"""
Returns a list of slices of a url's hostname down to the PS+1 (or PS if include_ps)
For example: http://a.b.c.d.com/path?query#frag would yield:
[a.b.c.d.com, b.c.d.com, c.d.com, d.com] if include_ps == False
[a.b.c.d.com, b.c.d.com, c.d.com, d.com, com] if include_ps == True
An (optional) PublicSuffixList object can be passed with keyword arg 'psl'.
otherwise a version cached in the system temp directory is used.
"""
if not kwargs.has_key('psl'):
raise ValueError("A PublicSuffixList must be passed as a keyword argument.")
hostname = urlparse(url).hostname
# If an IP address, just return a single item list with the IP
if is_ip_address(hostname):
return [hostname]
subparts = list()
ps_plus_1 = kwargs['psl'].get_public_suffix(hostname)
# We expect all ps_plus_1s to have at least one '.'
# If they don't, the url was likely malformed, so we'll just return an
# empty list
if '.' not in ps_plus_1:
return []
subdomains = hostname[:-(len(ps_plus_1)+1)].split('.')
if subdomains == ['']:
subdomains = []
for i in range(len(subdomains)):
subparts.append('.'.join(subdomains[i:])+'.'+ps_plus_1)
subparts.append(ps_plus_1)
if include_ps:
try:
subparts.append(ps_plus_1[ps_plus_1.index('.')+1:])
except:
pass
return subparts
def get_stripped_url(url, scheme=False):
"""Returns a url stripped to (scheme)?+hostname+path"""
purl = urlparse(url)
surl = ''
if scheme:
surl += purl.scheme + '://'
try:
surl += purl.hostname + purl.path
except TypeError:
surl += purl.hostname
return surl
def get_stripped_urls(urls, scheme=False):
""" Returns a set (or list) of urls stripped to (scheme)?+hostname+path """
new_urls = list()
for url in urls:
get_stripped_url(url, scheme)
if type(urls) == set:
return set(new_urls)
return new_urls
| gpl-3.0 | -7,362,794,950,796,595,000 | 31.227273 | 87 | 0.623178 | false |
mpenning/polymer | polymer/Polymer.py | 1 | 33370 | from __future__ import absolute_import
from logging.handlers import TimedRotatingFileHandler
from logging.handlers import MemoryHandler
import multiprocessing.queues as mpq
from multiprocessing import Process
import multiprocessing
from datetime import datetime
from copy import deepcopy
from hashlib import md5
import traceback as tb
import logging
import time
import sys
import os
if sys.version_info < (3, 0):
import cPickle as pickle # Python2
else:
import _pickle as pickle # Python3
try:
# This works in Python2.7...
from Queue import Empty, Full
except ImportError:
# This works in Python3.x...
from queue import Empty, Full
from colorama import init as color_init
from colorama import Fore, Style
""" Polymer.py - Manage parallel tasks
Copyright (C) 2015-2020 David Michael Pennington
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
If you need to contact the author, you can do so by emailing:
mike [~at~] pennington [/dot\] net
"""
class SharedCounter(object):
""" A synchronized shared counter.
The locking done by multiprocessing.Value ensures that only a single
process or thread may read or write the in-memory ctypes object. However,
in order to do n += 1, Python performs a read followed by a write, so a
second process may read the old value before the new one is written by the
first process. The solution is to use a multiprocessing.Lock to guarantee
the atomicity of the modifications to Value.
This class comes almost entirely from Eli Bendersky's blog:
http://eli.thegreenplace.net/2012/01/04/shared-counter-with-pythons-multiprocessing/
"""
def __init__(self, n = 0):
self.count = multiprocessing.Value('i', n)
def increment(self, n = 1):
""" Increment the counter by n (default = 1) """
with self.count.get_lock():
self.count.value += n
@property
def value(self):
""" Return the value of the counter """
return self.count.value
################################################################################
#
#py23_mp_queue() is heavily based on the following github repo's commit...
#http://github.com/vterron/lemon/commit/9ca6b4b1212228dbd4f69b88aaf88b12952d7d6f
#Code license is GPLv3 according to github.com/vterron/lemon/setup.py
#
################################################################################
class py23_mp_queue(mpq.Queue):
""" A portable implementation of multiprocessing.Queue.
Because of multithreading / multiprocessing semantics, Queue.qsize() may
raise the NotImplementedError exception on Unix platforms like Mac OS X
where sem_getvalue() is not implemented. This subclass addresses this
problem by using a synchronized shared counter (initialized to zero) and
increasing / decreasing its value every time the put() and get() methods
are called, respectively. This not only prevents NotImplementedError from
being raised, but also allows us to implement a reliable version of both
qsize() and empty().
"""
# NOTE This is the new implementation based on:
# https://github.com/vterron/lemon/blob/master/util/queue.py
def __init__(self, *args, **kwargs):
if sys.version_info >= (3, 4, 0):
super(py23_mp_queue, self).__init__(
*args, ctx=multiprocessing.get_context("spawn"), **kwargs
)
else:
super(py23_mp_queue, self).__init__(*args, **kwargs)
self.size = SharedCounter(0)
def put(self, *args, **kwargs):
# Infinite recursion possible if we don't use super() here
super(py23_mp_queue, self).put(*args, **kwargs)
self.size.increment(1)
def get(self, *args, **kwargs):
item = super(py23_mp_queue, self).get(*args, **kwargs)
self.size.increment(-1)
return item
def qsize(self):
""" Reliable implementation of multiprocessing.Queue.qsize() """
return self.size.value
def empty(self):
""" Reliable implementation of multiprocessing.Queue.empty() """
return not self.qsize()
def clear(self):
""" Remove all elements from the Queue. """
while not self.empty():
self.get()
class Worker(object):
"""multiprocessing worker"""
def __init__(self, w_id, todo_q, done_q, default_sleep=0.00001):
assert isinstance(default_sleep, int) or isinstance(default_sleep, float)
color_init()
self.w_id = w_id
self.cycle_sleep = default_sleep # How long the worker should sleep
self.task = None
self.done_q = done_q
try:
self.message_loop(todo_q, done_q) # do work
except:
if self.task is not None:
self.task.task_stop = time.time() # Seconds since epoch
## Format and return the error
tb_str = "".join(tb.format_exception(*(sys.exc_info())))
self.done_q_send(
{
"w_id": self.w_id,
"task": self.task,
"error": tb_str,
"state": "__ERROR__",
}
)
# self.cycle_sleep = self.task.worker_loop_delay
# time.sleep(self.cycle_sleep)
self.task = None
def done_q_send(self, msg_dict):
"""Send message dicts through done_q, and throw explicit errors for
pickle problems"""
# Check whether msg_dict can be pickled...
no_pickle_keys = self.invalid_dict_pickle_keys(msg_dict)
if no_pickle_keys == []:
self.done_q.put(msg_dict)
else:
## Explicit pickle error handling
hash_func = md5()
hash_func.update(str(msg_dict))
dict_hash = str(hash_func.hexdigest())[-7:] # Last 7 digits of hash
linesep = os.linesep
sys.stderr.write(
"{0} {1}done_q_send({2}) Can't pickle this dict:{3} '''{7}{4} {5}{7}{6}''' {7}".format(
datetime.now(),
Style.BRIGHT,
dict_hash,
Style.RESET_ALL,
Fore.MAGENTA,
str(msg_dict),
Style.RESET_ALL,
linesep,
)
)
sys.stderr.write(
"{0} {1}Pickling problems often come from open or hung TCP sockets{2}{3}".format(
datetime.now(), Style.BRIGHT, Style.RESET_ALL, linesep,
)
)
## Verbose list of the offending key(s) / object attrs
## Send all output to stderr...
err_frag1 = (
Style.BRIGHT
+ " done_q_send({0}) Offending dict keys:".format(str(dict_hash))
+ Style.RESET_ALL
)
err_frag2 = Fore.YELLOW + " {0}".format(no_pickle_keys) + Style.RESET_ALL
err_frag3 = "{0}".format(linesep)
sys.stderr.write(err_frag1 + err_frag2 + err_frag3)
for key in sorted(no_pickle_keys):
sys.stderr.write(
" msg_dict['{0}']: {1}'{2}'{3}{4}".format(
key,
Fore.MAGENTA,
repr(msg_dict.get(key)),
Style.RESET_ALL,
linesep,
)
)
if isinstance(msg_dict.get(key), object):
thisobj = msg_dict.get(key)
no_pickle_attrs = self.invalid_obj_pickle_attrs(thisobj)
err_frag1 = (
Style.BRIGHT
+ " done_q_send({0}) Offending attrs:".format(dict_hash)
+ Style.RESET_ALL
)
err_frag2 = (
Fore.YELLOW + " {0}".format(no_pickle_attrs) + Style.RESET_ALL
)
err_frag3 = "{0}".format(linesep)
sys.stderr.write(err_frag1 + err_frag2 + err_frag3)
for attr in no_pickle_attrs:
sys.stderr.write(
" msg_dict['{0}'].{1}: {2}'{3}'{4}{5}".format(
key,
attr,
Fore.RED,
repr(getattr(thisobj, attr)),
Style.RESET_ALL,
linesep,
)
)
sys.stderr.write(
" {0}done_q_send({1}) keys (no problems):{2}{3}".format(
Style.BRIGHT, dict_hash, Style.RESET_ALL, linesep
)
)
for key in sorted(set(msg_dict.keys()).difference(no_pickle_keys)):
sys.stderr.write(
" msg_dict['{0}']: {1}{2}{3}{4}".format(
key,
Fore.GREEN,
repr(msg_dict.get(key)),
Style.RESET_ALL,
linesep,
)
)
def invalid_dict_pickle_keys(self, msg_dict):
"""Return a list of keys that can't be pickled. Return [] if
there are no pickling problems with the values associated with the
keys. Return the list of keys, if there are problems."""
no_pickle_keys = list()
for key, val in msg_dict.items():
try:
pickle.dumps(key)
pickle.dumps(val)
except TypeError:
no_pickle_keys.append(key) # This key has an unpicklable value
except pickle.PicklingError:
no_pickle_keys.append(key) # This key has an unpicklable value
except pickle.UnpickleableError:
no_pickle_keys.append(key) # This key has an unpicklable value
return no_pickle_keys
def invalid_obj_pickle_attrs(self, thisobj):
no_pickle_attrs = list()
for attr, val in vars(thisobj).items():
try:
pickle.dumps(getattr(thisobj, attr))
except TypeError:
no_pickle_attrs.append(attr) # This attr is unpicklable
except pickle.PicklingError:
no_pickle_attrs.append(attr) # This attr is unpicklable
except pickle.UnpickleableError:
no_pickle_attrs.append(attr) # This attr is unpicklable
return no_pickle_attrs
def message_loop(self, todo_q, done_q):
"""Loop through messages and execute tasks"""
t_msg = {}
while t_msg.get("state", "") != "__DIE__":
try:
t_msg = todo_q.get(True, self.cycle_sleep) # Poll blocking
self.task = t_msg.get("task", "") # __DIE__ has no task
if self.task != "":
self.task.task_start = time.time() # Start the timer
# Send ACK to the controller who requested work on this task
self.done_q_send(
{"w_id": self.w_id, "task": self.task, "state": "__ACK__"}
)
# Update the sleep time with latest recommendations
self.cycle_sleep = self.task.worker_loop_delay
# Assign the result of task.run() to task.result
self.task.result = self.task.run()
self.task.task_stop = time.time() # Seconds since epoch
self.done_q_send(
{"w_id": self.w_id, "task": self.task, "state": "__FINISHED__"}
) # Ack work finished
self.task = None
except Empty:
pass
except Full:
time.sleep(0.1)
## Disable extraneous error handling...
except:
if self.task is not None:
self.task.task_stop = time.time() # Seconds since epoch
# Handle all other errors here...
tb_str = "".join(tb.format_exception(*(sys.exc_info())))
self.done_q_send(
{
"w_id": self.w_id,
"task": self.task,
"error": tb_str,
"state": "__ERROR__",
}
)
return
class TaskMgrStats(object):
def __init__(self, worker_count, log_interval=60, hot_loop=False):
self.log_interval = log_interval
self.stats_start = time.time()
self.exec_times = list() # Archive of all exec times
self.queue_times = list() # Archive of all queue times
self.worker_count = worker_count
self.hot_loop = hot_loop
def reset(self):
self.stats_start = time.time()
self.exec_times = list()
self.queue_times = list()
@property
def worker_pct_busy(self):
# return time_worked/total_work_time*100.0
return time_worked
@property
def time_delta(self):
return time.time() - self.stats_start
@property
def log_time(self):
"""Return True if it's time to log"""
if self.hot_loop and self.time_delta >= self.log_interval:
return True
return False
@property
def log_message(self):
"""Build a log message and reset the stats"""
time_delta = deepcopy(self.time_delta)
total_work_time = self.worker_count * time_delta
time_worked = sum(self.exec_times)
pct_busy = time_worked / total_work_time * 100.0
min_task_time = min(self.exec_times)
avg_task_time = sum(self.exec_times) / len(self.exec_times)
max_task_time = max(self.exec_times)
min_queue_time = min(self.queue_times)
avg_queue_time = sum(self.queue_times) / len(self.queue_times)
max_queue_time = max(self.queue_times)
time_delta = self.time_delta
total_tasks = len(self.exec_times)
avg_task_rate = total_tasks / time_delta
self.reset()
task_msg = """Ran {0} tasks, {1} tasks/s; {2} workers {3}% busy""".format(
total_tasks, round(avg_task_rate, 1), self.worker_count, round(pct_busy, 1)
)
task_mam = """ Task run times: {0}/{1}/{2} (min/avg/max)""".format(
round(min_task_time, 3), round(avg_task_time, 3), round(max_task_time, 3)
)
queue_mam = """ Time in queue: {0}/{1}/{2} (min/avg/max)""".format(
round(min_queue_time, 6), round(avg_queue_time, 6), round(max_queue_time, 6)
)
return """{0}\n{1}\n{2}""".format(task_msg, task_mam, queue_mam)
class TaskMgr(object):
"""Manage tasks to and from workers; maybe one day use zmq instead of
multiprocessing.Queue"""
# http://www.jeffknupp.com/blog/2014/02/11/a-celerylike-python-task-queue-in-55-lines-of-code/
def __init__(
self,
work_todo=None,
log_level=3,
log_stdout=True,
log_path="taskmgr.log",
queue=None,
hot_loop=False,
worker_count=5,
log_interval=60,
worker_cycle_sleep=0.000001,
resubmit_on_error=False,
):
if work_todo is None:
work_todo = list()
assert isinstance(work_todo, list), "Please add work in a python list"
self.work_todo = work_todo # List with work to do
self.worker_count = worker_count
self.worker_cycle_sleep = worker_cycle_sleep
self.log_level = log_level # 0: off, 1: errors, 2: info, 3: debug
self.log_stdout = log_stdout
self.log_path = log_path
self.log_interval = log_interval
self.resubmit_on_error = resubmit_on_error
# By default, Python3's multiprocessing.Queue doesn't implement qsize()
# NOTE: OSX doesn't implement queue.qsize(), py23_mp_queue is a
# workaround
# AttributeError: 'module' object has no attribute 'get_context'
self.todo_q = py23_mp_queue() # workers listen to todo_q (task queue)
self.done_q = py23_mp_queue() # results queue
self.worker_assignments = dict() # key: w_id, value: worker Process objs
self.results = dict()
self.configure_logging()
self.hot_loop = hot_loop
self.retval = set([])
color_init()
if hot_loop:
assert isinstance(queue, ControllerQueue)
self.controller = queue
self.supervise() # hot_loops automatically supervise()
def configure_logging(self):
if self.log_level:
self.log = logging.getLogger(__name__)
fmtstr = "%(asctime)s.%(msecs).03d %(levelname)s %(message)s"
format = logging.Formatter(fmt=fmtstr, datefmt="%Y-%m-%d %H:%M:%S")
self.log.setLevel(logging.DEBUG)
if self.log_path:
## Rotate the logfile every day...
rotating_file = TimedRotatingFileHandler(
self.log_path, when="D", interval=1, backupCount=5
)
rotating_file.setFormatter(format)
memory_log = logging.handlers.MemoryHandler(
1024 * 10, logging.DEBUG, rotating_file
)
self.log.addHandler(memory_log)
if self.log_stdout:
console = logging.StreamHandler()
console.setFormatter(format)
self.log.addHandler(console)
if not bool(self.log_path) and (not self.log_stdout):
self.log_level = 0
def supervise(self):
"""If not in a hot_loop, call supervise() to start the tasks"""
self.retval = set([])
stats = TaskMgrStats(
worker_count=self.worker_count,
log_interval=self.log_interval,
hot_loop=self.hot_loop,
)
hot_loop = self.hot_loop
if self.log_level >= 2:
logmsg = "TaskMgr.supervise() started {0} workers".format(self.worker_count)
self.log.info(logmsg)
self.workers = self.spawn_workers()
## Add work
self.num_tasks = 0
if not hot_loop:
if self.log_level >= 2:
logmsg = "TaskMgr.supervise() received {0} tasks".format(
len(self.work_todo)
)
self.log.info(logmsg)
for task in self.work_todo:
self.num_tasks += 1
if self.log_level >= 2:
logmsg = "TaskMgr.supervise() queued task: {0}".format(task)
self.log.info(logmsg)
self.queue_task(task)
finished = False
while not finished:
try:
if hot_loop:
# Calculate the adaptive loop delay
delay = self.calc_wait_time(stats.exec_times)
self.queue_tasks_from_controller(delay=delay) # queue tasks
time.sleep(delay)
r_msg = self.done_q.get_nowait() # __ACK__ or __FINISHED__
task = r_msg.get("task")
w_id = r_msg.get("w_id")
state = r_msg.get("state", "")
if state == "__ACK__":
self.worker_assignments[w_id] = task
self.work_todo.remove(task)
if self.log_level >= 3:
self.log.debug("r_msg: {0}".format(r_msg))
if self.log_level >= 3:
self.log.debug("w_id={0} received task={1}".format(w_id, task))
elif state == "__FINISHED__":
now = time.time()
task_exec_time = task.task_stop - task.task_start
task_queue_time = now - task.queue_time - task_exec_time
stats.exec_times.append(task_exec_time)
stats.queue_times.append(task_queue_time)
if self.log_level >= 1:
self.log.info(
"TaskMgr.work_todo: {0} tasks left".format(
len(self.work_todo)
)
)
if self.log_level >= 3:
self.log.debug("TaskMgr.work_todo: {0}".format(self.work_todo))
self.log.debug("r_msg: {0}".format(r_msg))
if not hot_loop:
self.retval.add(task) # Add result to retval
self.worker_assignments.pop(w_id) # Delete the key
finished = self.is_finished()
else:
self.controller.from_taskmgr_q.put(task) # Send to the controller
self.worker_assignments.pop(w_id) # Delete the key
elif state == "__ERROR__":
now = time.time()
task_exec_time = task.task_stop - task.task_start
task_queue_time = now - task.queue_time - task_exec_time
stats.exec_times.append(task_exec_time)
stats.queue_times.append(task_queue_time)
if self.log_level >= 1:
self.log.error("r_msg: {0}".format(r_msg))
self.log.error("".join(r_msg.get("error")))
self.log.debug(
"TaskMgr.work_todo: {0} tasks left".format(
len(self.work_todo)
)
)
if self.log_level >= 3:
self.log.debug("TaskMgr.work_todo: {0}".format(self.work_todo))
if not hot_loop:
if not self.resubmit_on_error:
# If task is in work_todo, delete it
for tt in self.work_todo:
if tt == task:
self.work_todo.remove(task) # Remove task...
try:
# Delete the worker assignment...
self.worker_assignments.pop(w_id)
except:
pass
self.retval.add(task) # Add result to retval
self.respawn_dead_workers()
except Empty:
state = "__EMPTY__"
except Exception as ee:
tb_str = "".join(tb.format_exception(*(sys.exc_info())))
print("ERROR:")
print(ee, tb_str)
if stats.log_time:
if self.log_level >= 0:
self.log.info(stats.log_message)
# Adaptive loop delay
delay = self.calc_wait_time(stats.exec_times)
time.sleep(delay)
self.respawn_dead_workers()
finished = self.is_finished()
if not hot_loop:
self.kill_workers()
for w_id, p in self.workers.items():
p.join()
## Log a final stats summary...
if self.log_level > 0:
self.log.info(stats.log_message)
return self.retval
def calc_wait_time(self, exec_times):
num_samples = float(len(exec_times))
if num_samples > 0.0:
# NOTE: OSX doesn't implement queue.qsize(), I worked around the
# problem
queue_size = max(self.done_q.qsize(), 1.0) + max(self.todo_q.qsize(), 1.0)
min_task_time = min(exec_times)
wait_time = min_task_time / queue_size
else:
wait_time = 0.00001 # 10us delay to avoid worker / done_q race
return wait_time
def queue_tasks_from_controller(self, delay=0.0):
finished = False
while not finished:
try:
## Hot loops will queue a list of tasks...
tasklist = self.controller.to_taskmgr_q.get_nowait()
for task in tasklist:
if delay > 0.0:
task.worker_loop_delay = delay
self.work_todo.append(task)
self.queue_task(task)
except Empty:
# Poll until empty
finished = True
# except (Exception) as e:
# tb.print_exc()
def queue_task(self, task):
task.queue_time = time.time() # Record the queueing time
self.todo_q_send({"task": task})
def is_finished(self):
if (len(self.work_todo) == 0) and (len(self.worker_assignments.keys()) == 0):
return True
elif not self.hot_loop and (len(self.retval)) == self.num_tasks:
# We need this exit condition due to __ERROR__ race conditions...
return True
return False
def kill_workers(self):
stop = {"state": "__DIE__"}
[self.todo_q_send(stop) for x in range(0, self.worker_count)]
def respawn_dead_workers(self):
"""Respawn workers / tasks upon crash"""
for w_id, p in self.workers.items():
if not p.is_alive():
# Queue the task for another worker, if required...
if self.log_level >= 2:
self.log.info("Worker w_id {0} died".format(w_id))
task = self.worker_assignments.get(w_id, {})
if self.log_level >= 2 and task != {}:
self.log.info(
"Dead worker w_id {0} was assigned task - {1}".format(
w_id, task
)
)
error_suffix = ""
if task != {}:
del self.worker_assignments[w_id]
if self.resubmit_on_error or self.hot_loop:
self.work_todo.append(task)
self.queue_task(task)
if self.log_level >= 2:
self.log.info("Resubmitting task - {0}".format(task))
error_suffix = " with task={1}".format(task)
if self.log_level >= 1:
self.log.debug(
"TaskMgr.work_todo: {0} tasks left".format(len(self.work_todo))
)
if self.log_level >= 2:
self.log.info(
"Respawning worker - w_id={0}{1}".format(w_id, error_suffix)
)
self.workers[w_id] = Process(
target=Worker,
args=(w_id, self.todo_q, self.done_q, self.worker_cycle_sleep),
)
self.workers[w_id].daemon = True
self.workers[w_id].start()
def spawn_workers(self):
workers = dict()
for w_id in range(0, self.worker_count):
workers[w_id] = Process(
target=Worker,
name="Polymer.py Worker {0}".format(w_id),
args=(w_id, self.todo_q, self.done_q, self.worker_cycle_sleep),
)
workers[w_id].daemon = True
workers[w_id].start()
return workers
def todo_q_send(self, msg_dict):
# Check whether msg_dict can be pickled...
no_pickle_keys = self.invalid_dict_pickle_keys(msg_dict)
if no_pickle_keys == []:
self.todo_q.put(msg_dict)
else:
## Explicit pickle error handling
hash_func = md5()
hash_func.update(str(msg_dict))
dict_hash = str(hash_func.hexdigest())[-7:] # Last 7 digits of hash
linesep = os.linesep
sys.stderr.write(
"{0} {1}todo_q_send({2}) Can't pickle this dict:{3} '''{7}{4} {5}{7}{6}''' {7}".format(
datetime.now(),
Style.BRIGHT,
dict_hash,
Style.RESET_ALL,
Fore.MAGENTA,
msg_dict,
Style.RESET_ALL,
linesep,
)
)
## Verbose list of the offending key(s) / object attrs
## Send all output to stderr...
err_frag1 = (
Style.BRIGHT
+ " todo_q_send({0}) Offending dict keys:".format(dict_hash)
+ Style.RESET_ALL
)
err_frag2 = Fore.YELLOW + " {0}".format(no_pickle_keys) + Style.RESET_ALL
err_frag3 = "{0}".format(linesep)
sys.stderr.write(err_frag1 + err_frag2 + err_frag3)
for key in sorted(no_pickle_keys):
sys.stderr.write(
" msg_dict['{0}']: {1}'{2}'{3}{4}".format(
key,
Fore.MAGENTA,
repr(msg_dict.get(key)),
Style.RESET_ALL,
linesep,
)
)
if isinstance(msg_dict.get(key), object):
thisobj = msg_dict.get(key)
no_pickle_attrs = self.invalid_obj_pickle_attrs(thisobj)
err_frag1 = (
Style.BRIGHT
+ " todo_q_send({0}) Offending attrs:".format(dict_hash)
+ Style.RESET_ALL
)
err_frag2 = (
Fore.YELLOW + " {0}".format(no_pickle_attrs) + Style.RESET_ALL
)
err_frag3 = "{0}".format(linesep)
sys.stderr.write(err_frag1 + err_frag2 + err_frag3)
for attr in no_pickle_attrs:
sys.stderr.write(
" msg_dict['{0}'].{1}: {2}'{3}'{4}{5}".format(
key,
attr,
Fore.RED,
repr(getattr(thisobj, attr)),
Style.RESET_ALL,
linesep,
)
)
sys.stderr.write(
" {0}todo_q_send({1}) keys (no problems):{2}{3}".format(
Style.BRIGHT, dict_hash, Style.RESET_ALL, linesep
)
)
for key in sorted(set(msg_dict.keys()).difference(no_pickle_keys)):
sys.stderr.write(
" msg_dict['{0}']: {1}{2}{3}{4}".format(
key,
Fore.GREEN,
repr(msg_dict.get(key)),
Style.RESET_ALL,
linesep,
)
)
def invalid_dict_pickle_keys(self, msg_dict):
"""Return a list of keys that can't be pickled. Return [] if
there are no pickling problems with the values associated with the
keys. Return the list of keys, if there are problems."""
no_pickle_keys = list()
for key, val in msg_dict.items():
try:
pickle.dumps(key)
pickle.dumps(val)
except TypeError:
no_pickle_keys.append(key) # This key has an unpicklable value
except pickle.PicklingError:
no_pickle_keys.append(key) # This key has an unpicklable value
except pickle.UnpickleableError:
no_pickle_keys.append(key) # This key has an unpicklable value
return no_pickle_keys
def invalid_obj_pickle_attrs(self, thisobj):
no_pickle_attrs = list()
for attr, val in vars(thisobj).items():
try:
pickle.dumps(getattr(thisobj, attr))
except TypeError:
no_pickle_attrs.append(attr) # This attr is unpicklable
except pickle.PicklingError:
no_pickle_attrs.append(attr) # This attr is unpicklable
except pickle.UnpickleableError:
no_pickle_attrs.append(attr) # This attr is unpicklable
return no_pickle_attrs
class ControllerQueue(object):
"""A set of queues to manage a continuous hot TaskMgr work loop"""
def __init__(self):
## to and from are with respect to the (client) controller object
self.from_taskmgr_q = py23_mp_queue() # sent to the controller from TaskMgr
self.to_taskmgr_q = py23_mp_queue() # sent from the controller to TaskMgr
| gpl-3.0 | 12,495,114,191,308,644 | 38.72619 | 106 | 0.500629 | false |
jsirois/pants | src/python/pants/util/filtering_test.py | 4 | 2389 | # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
from typing import Callable
from pants.util.filtering import and_filters, create_filter, create_filters
def is_divisible_by(divisor_str: str) -> Callable[[int], bool]:
return lambda n: n % int(divisor_str) == 0
def test_create_filter() -> None:
divisible_by_2 = create_filter("2", is_divisible_by)
assert divisible_by_2(2) is True
assert divisible_by_2(3) is False
assert divisible_by_2(4) is True
assert divisible_by_2(6) is True
def test_create_filters() -> None:
# This tests that create_filters() properly captures different closures.
divisible_by_2, divisible_by_3 = create_filters(["2", "3"], is_divisible_by)
assert divisible_by_2(2) is True
assert divisible_by_2(3) is False
assert divisible_by_2(4) is True
assert divisible_by_2(6) is True
assert divisible_by_3(2) is False
assert divisible_by_3(3) is True
assert divisible_by_3(4) is False
assert divisible_by_3(6) is True
def test_and_filters() -> None:
divisible_by_6 = and_filters(create_filters(["2", "3"], is_divisible_by))
assert divisible_by_6(2) is False
assert divisible_by_6(3) is False
assert divisible_by_6(6) is True
assert divisible_by_6(9) is False
assert divisible_by_6(12) is True
def test_list_filter() -> None:
divisible_by_2_or_3 = create_filter("2,3", is_divisible_by)
assert divisible_by_2_or_3(2) is True
assert divisible_by_2_or_3(3) is True
assert divisible_by_2_or_3(4) is True
assert divisible_by_2_or_3(5) is False
assert divisible_by_2_or_3(6) is True
def test_explicit_plus_filter() -> None:
divisible_by_2_or_3 = create_filter("+2,3", is_divisible_by)
assert divisible_by_2_or_3(2) is True
assert divisible_by_2_or_3(3) is True
assert divisible_by_2_or_3(4) is True
assert divisible_by_2_or_3(5) is False
assert divisible_by_2_or_3(6) is True
def test_negated_filter() -> None:
# This tests that the negation applies to the entire list.
coprime_to_2_and_3 = create_filter("-2,3", is_divisible_by)
assert coprime_to_2_and_3(2) is False
assert coprime_to_2_and_3(3) is False
assert coprime_to_2_and_3(4) is False
assert coprime_to_2_and_3(5) is True
assert coprime_to_2_and_3(6) is False
| apache-2.0 | 8,250,414,748,049,587,000 | 33.623188 | 80 | 0.681038 | false |
abramhindle/UnnaturalCodeFork | python/testdata/launchpad/lib/lp/registry/browser/featuredproject.py | 1 | 2255 | # Copyright 2009-2011 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
"""Featured Project views."""
__metaclass__ = type
__all__ = [
'FeaturedProjectsView',
]
from zope.component import getUtility
from zope.interface import Interface
from zope.schema import (
Choice,
Set,
)
from lp import _
from lp.app.browser.launchpadform import (
action,
custom_widget,
LaunchpadFormView,
)
from lp.app.widgets.itemswidgets import LabeledMultiCheckBoxWidget
from lp.registry.interfaces.pillar import IPillarNameSet
from lp.services.webapp import canonical_url
class FeaturedProjectForm(Interface):
"""Form that requires the user to choose a pillar to feature."""
add = Choice(
title=_("Add project"),
description=_(
"Choose a project to feature on the Launchpad home page."),
required=False, vocabulary='DistributionOrProductOrProjectGroup')
remove = Set(
title=u'Remove projects',
description=_(
'Select projects that you would like to remove from the list.'),
required=False,
value_type=Choice(vocabulary="FeaturedProject"))
class FeaturedProjectsView(LaunchpadFormView):
"""A view for adding and removing featured projects."""
label = 'Manage featured projects in Launchpad'
page_title = label
schema = FeaturedProjectForm
custom_widget('remove', LabeledMultiCheckBoxWidget)
@action(_('Update featured project list'), name='update')
def update_action(self, action, data):
"""Add and remove featured projects."""
add = data.get('add')
if add is not None:
getUtility(IPillarNameSet).add_featured_project(add)
remove = data.get('remove')
if remove is not None:
for project in remove:
getUtility(IPillarNameSet).remove_featured_project(project)
self.next_url = canonical_url(self.context)
@action(_("Cancel"), name="cancel", validator='validate_cancel')
def action_cancel(self, action, data):
self.next_url = canonical_url(self.context)
@property
def action_url(self):
return "/+featuredprojects"
| agpl-3.0 | -1,841,910,937,153,029,000 | 27.544304 | 76 | 0.674501 | false |
GrossfieldLab/loos | Packages/PyLOOS/center_molecule.py | 1 | 1660 | #!/usr/bin/env python3
"""
Center an entire model based on a selection.
"""
"""
This file is part of LOOS.
LOOS (Lightweight Object-Oriented Structure library)
Copyright (c) 2012 Tod Romo, Grossfield Lab
Department of Biochemistry and Biophysics
School of Medicine & Dentistry, University of Rochester
This package (LOOS) is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation under version 3 of the License.
This package is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
# Import LOOS
from loos import *
import sys
# Command line arguments
model_name = sys.argv[1]
selection = sys.argv[2]
# Create the model
model = createSystem(model_name)
# Select the requested atoms
subset = selectAtoms(model, selection)
# Compute centroid the old-fashioned way...
# Obviously, it is faster to call subset.centroid(),
# but we're demonstrating how else one could do it.
center = GCoord(0,0,0)
for atom in subset:
center = center + atom.coords()
center = center / len(subset)
for atom in subset:
atom.coords(atom.coords() - center)
# Convert to a PDB
pdb = PDB.fromAtomicGroup(model)
# Add a REMARK to the PDB
pdb.remarks().add("Structure centered using '" + selection + "'")
# Print it to stdout...
print(pdb)
| gpl-3.0 | 3,887,408,212,674,063,400 | 27.62069 | 77 | 0.737349 | false |
teamfx/openjfx-9-dev-rt | modules/javafx.web/src/main/native/Tools/QueueStatusServer/handlers/updateworkitems.py | 3 | 3426 | # Copyright (C) 2013 Google Inc. All rights reserved.
# Copyright (C) 2014 Apple Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from google.appengine.ext import webapp, db
from google.appengine.ext.webapp import template
from handlers.updatebase import UpdateBase
from loggers.recordpatchevent import RecordPatchEvent
from model.queues import Queue
from model.workitems import WorkItems
from datetime import datetime
class UpdateWorkItems(UpdateBase):
def get(self):
self.response.out.write(template.render("templates/updateworkitems.html", None))
def _parse_work_items_string(self, items_string):
item_strings = items_string.split(" ") if items_string else []
return map(int, item_strings)
def _work_items_from_request(self):
high_priority_items_string = self.request.get("high_priority_work_items")
items_string = self.request.get("work_items")
high_priority_work_items = self._parse_work_items_string(high_priority_items_string)
work_items = self._parse_work_items_string(items_string)
return high_priority_work_items, work_items
def _queue_from_request(self):
queue_name = self.request.get("queue_name")
queue = Queue.queue_with_name(queue_name)
if not queue:
self.response.out.write("\"%s\" is not in queues %s" % (queue_name, Queue.all()))
return None
return queue
def post(self):
queue = self._queue_from_request()
if not queue:
self.response.set_status(500)
return
high_priority_items, items = self._work_items_from_request()
# Add items that are not currently in the work queue. Never remove any items,
# as that should be done by the queue, feeder only adds them.
added_items = queue.work_items().add_work_items(high_priority_items, items)
for work_item in added_items:
RecordPatchEvent.added(work_item, queue.name())
| gpl-2.0 | 929,578,126,264,829,600 | 43.493506 | 93 | 0.724168 | false |
ondra-novak/chromium.src | tools/memory_inspector/memory_inspector/core/backends.py | 18 | 5877 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
_backends = {} # Maps a string (backend name) to a |Backend| instance.
def Register(backend):
"""Called by each backend module to register upon initialization."""
assert(isinstance(backend, Backend))
_backends[backend.name] = backend
def ListBackends():
"""Enumerates all the backends."""
return _backends.itervalues()
def ListDevices():
"""Enumerates all the devices from all the registered backends."""
for backend in _backends.itervalues():
for device in backend.EnumerateDevices():
assert(isinstance(device, Device))
yield device
def GetBackend(backend_name):
"""Retrieves a specific backend given its name."""
return _backends.get(backend_name, None)
def GetDevice(backend_name, device_id):
"""Retrieves a specific device given its backend name and device id."""
backend = GetBackend(backend_name)
if not backend:
return None
for device in backend.EnumerateDevices():
if device.id == device_id:
return device
return None
# The classes below model the contract interfaces exposed to the frontends and
# implemented by each concrete backend.
class Backend(object):
"""Base class for backends.
This is the extension point for the OS-specific profiler implementations.
"""
def __init__(self, settings=None):
# Initialize with empty settings if not required by the overriding backend.
self.settings = settings or Settings()
def EnumerateDevices(self):
"""Enumeates the devices discovered and supported by the backend.
Returns:
A sequence of |Device| instances.
"""
raise NotImplementedError()
def ExtractSymbols(self, native_heaps, sym_paths):
"""Performs symbolization. Returns a |symbol.Symbols| from |NativeHeap|s."""
raise NotImplementedError()
@property
def name(self):
"""A unique name which identifies the backend.
Typically this will just return the target OS name, e.g., 'Android'."""
raise NotImplementedError()
class Device(object):
"""Interface contract for devices enumerated by a backend."""
def __init__(self, backend, settings=None):
self.backend = backend
# Initialize with empty settings if not required by the overriding device.
self.settings = settings or Settings()
def Initialize(self):
"""Called before anything else, for initial provisioning."""
raise NotImplementedError()
def IsNativeTracingEnabled(self):
"""Check if the device is ready to capture native allocation traces."""
raise NotImplementedError()
def EnableNativeTracing(self, enabled):
"""Provision the device and make it ready to trace native allocations."""
raise NotImplementedError()
def ListProcesses(self):
"""Returns a sequence of |Process|."""
raise NotImplementedError()
def GetProcess(self, pid):
"""Returns an instance of |Process| or None (if not found)."""
raise NotImplementedError()
def GetStats(self):
"""Returns an instance of |DeviceStats|."""
raise NotImplementedError()
@property
def name(self):
"""Friendly name of the target device (e.g., phone model)."""
raise NotImplementedError()
@property
def id(self):
"""Unique identifier (within the backend) of the device (e.g., S/N)."""
raise NotImplementedError()
class Process(object):
"""Interface contract for each running process."""
def __init__(self, device, pid, name):
assert(isinstance(device, Device))
assert(isinstance(pid, int))
self.device = device
self.pid = pid
self.name = name
def DumpMemoryMaps(self):
"""Returns an instance of |memory_map.Map|."""
raise NotImplementedError()
def DumpNativeHeap(self):
"""Returns an instance of |native_heap.NativeHeap|."""
raise NotImplementedError()
def GetStats(self):
"""Returns an instance of |ProcessStats|."""
raise NotImplementedError()
def __str__(self):
return '[%d] %s' % (self.pid, self.name)
class DeviceStats(object):
"""CPU/Memory stats for a |Device|."""
def __init__(self, uptime, cpu_times, memory_stats):
"""Args:
uptime: uptime in seconds.
cpu_times: array (CPUs) of dicts (cpu times since last call).
e.g., [{'User': 10, 'System': 80, 'Idle': 10}, ... ]
memory_stats: Dictionary of memory stats. e.g., {'Free': 1, 'Cached': 10}
"""
assert(isinstance(cpu_times, list) and isinstance(cpu_times[0], dict))
assert(isinstance(memory_stats, dict))
self.uptime = uptime
self.cpu_times = cpu_times
self.memory_stats = memory_stats
class ProcessStats(object):
"""CPU/Memory stats for a |Process|."""
def __init__(self, threads, run_time, cpu_usage, vm_rss, page_faults):
"""Args:
threads: Number of threads.
run_time: Total process uptime in seconds.
cpu_usage: CPU usage [0-100] since the last GetStats call.
vm_rss_kb: Resident Memory Set in Kb.
page_faults: Number of VM page faults (hard + soft).
"""
self.threads = threads
self.run_time = run_time
self.cpu_usage = cpu_usage
self.vm_rss = vm_rss
self.page_faults = page_faults
class Settings(object):
"""Models user-definable settings for backends and devices."""
def __init__(self, expected_keys=None):
"""Args:
expected_keys: A dict. (key-name -> description) of expected settings
"""
self.expected_keys = expected_keys or {}
self.values = dict((k, '') for k in self.expected_keys.iterkeys())
def __getitem__(self, key):
assert(key in self.expected_keys), 'Unexpected setting: ' + key
return self.values.get(key)
def __setitem__(self, key, value):
assert(key in self.expected_keys), 'Unexpected setting: ' + key
self.values[key] = value
| bsd-3-clause | 7,049,543,083,455,468,000 | 28.984694 | 80 | 0.679769 | false |
fabrice-lecuyer/QuantLib-SWIG | Python/QuantLib/__init__.py | 2 | 3198 | # -*- coding: iso-8859-1 -*-
"""
Copyright (C) 2000, 2001, 2002, 2003 RiskMap srl
This file is part of QuantLib, a free-software/open-source library
for financial quantitative analysts and developers - http://quantlib.org/
QuantLib is free software: you can redistribute it and/or modify it
under the terms of the QuantLib license. You should have received a
copy of the license along with this program; if not, please email
<[email protected]>. The license is also available online at
<http://quantlib.org/license.shtml>.
This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the license for more details.
"""
import sys
if sys.version_info.major >= 3:
from .QuantLib import *
from .QuantLib import _QuantLib
else:
from QuantLib import *
from QuantLib import _QuantLib
del sys
__author__ = 'The QuantLib Group'
__email__ = '[email protected]'
if hasattr(_QuantLib,'__version__'):
__version__ = _QuantLib.__version__
elif hasattr(_QuantLib.cvar,'__version__'):
__version__ = _QuantLib.cvar.__version__
else:
print('Could not find __version__ attribute')
if hasattr(_QuantLib,'__hexversion__'):
__hexversion__ = _QuantLib.__hexversion__
elif hasattr(_QuantLib.cvar,'__hexversion__'):
__hexversion__ = _QuantLib.cvar.__hexversion__
else:
print('Could not find __hexversion__ attribute')
__license__ = """
COPYRIGHT AND PERMISSION NOTICE
Copyright (c) 2002, 2003 Ferdinando Ametrano
Copyright (c) 2001, 2002, 2003 Nicolas Di Césaré
Copyright (c) 2001, 2002, 2003 Sadruddin Rejeb
Copyright (c) 2000, 2001, 2002, 2003 RiskMap srl
All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, and/or sell copies of the
Software, and to permit persons to whom the Software is furnished to do so,
provided that the above copyright notice(s) and this permission notice appear
in all copies of the Software and that both the above copyright notice(s) and
this permission notice appear in supporting documentation.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT OF THIRD PARTY RIGHTS.
IN NO EVENT SHALL THE COPYRIGHT HOLDER OR HOLDERS INCLUDED IN THIS NOTICE BE
LIABLE FOR ANY CLAIM, OR ANY SPECIAL INDIRECT OR CONSEQUENTIAL DAMAGES, OR
ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER
IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
Except as contained in this notice, the name of a copyright holder shall not
be used in advertising or otherwise to promote the sale, use or other
dealings in this Software without prior written authorization of the
copyright holder.
"""
| bsd-3-clause | -4,919,913,352,782,863,000 | 41.078947 | 78 | 0.758599 | false |
prarthitm/edxplatform | lms/djangoapps/grades/new/subsection_grade.py | 1 | 13620 | """
SubsectionGrade Class
"""
from collections import OrderedDict
from lazy import lazy
from logging import getLogger
from courseware.model_data import ScoresClient
from lms.djangoapps.grades.scores import get_score, possibly_scored
from lms.djangoapps.grades.models import BlockRecord, PersistentSubsectionGrade
from lms.djangoapps.grades.config.models import PersistentGradesEnabledFlag
from openedx.core.lib.grade_utils import is_score_higher
from student.models import anonymous_id_for_user
from submissions import api as submissions_api
from xmodule import block_metadata_utils, graders
from xmodule.graders import AggregatedScore
log = getLogger(__name__)
class SubsectionGrade(object):
"""
Class for Subsection Grades.
"""
def __init__(self, subsection):
self.location = subsection.location
self.display_name = block_metadata_utils.display_name_with_default_escaped(subsection)
self.url_name = block_metadata_utils.url_name_for_block(subsection)
self.format = getattr(subsection, 'format', '')
self.due = getattr(subsection, 'due', None)
self.graded = getattr(subsection, 'graded', False)
self.course_version = getattr(subsection, 'course_version', None)
self.subtree_edited_timestamp = getattr(subsection, 'subtree_edited_on', None)
self.graded_total = None # aggregated grade for all graded problems
self.all_total = None # aggregated grade for all problems, regardless of whether they are graded
self.locations_to_scores = OrderedDict() # dict of problem locations to ProblemScore
@property
def scores(self):
"""
List of all problem scores in the subsection.
"""
return self.locations_to_scores.values()
@property
def attempted(self):
"""
Returns whether any problem in this subsection
was attempted by the student.
"""
assert self.all_total is not None, (
"SubsectionGrade not fully populated yet. Call init_from_structure or init_from_model "
"before use."
)
return self.all_total.attempted
def init_from_structure(self, student, course_structure, submissions_scores, csm_scores):
"""
Compute the grade of this subsection for the given student and course.
"""
for descendant_key in course_structure.post_order_traversal(
filter_func=possibly_scored,
start_node=self.location,
):
self._compute_block_score(descendant_key, course_structure, submissions_scores, csm_scores)
self.all_total, self.graded_total = graders.aggregate_scores(self.scores)
self._log_event(log.debug, u"init_from_structure", student)
return self
def init_from_model(self, student, model, course_structure, submissions_scores, csm_scores):
"""
Load the subsection grade from the persisted model.
"""
for block in model.visible_blocks.blocks:
self._compute_block_score(block.locator, course_structure, submissions_scores, csm_scores, block)
self.graded_total = AggregatedScore(
tw_earned=model.earned_graded,
tw_possible=model.possible_graded,
graded=True,
attempted=model.first_attempted is not None,
)
self.all_total = AggregatedScore(
tw_earned=model.earned_all,
tw_possible=model.possible_all,
graded=False,
attempted=model.first_attempted is not None,
)
self._log_event(log.debug, u"init_from_model", student)
return self
@classmethod
def bulk_create_models(cls, student, subsection_grades, course_key):
"""
Saves the subsection grade in a persisted model.
"""
return PersistentSubsectionGrade.bulk_create_grades(
[subsection_grade._persisted_model_params(student) for subsection_grade in subsection_grades], # pylint: disable=protected-access
course_key,
)
def create_model(self, student):
"""
Saves the subsection grade in a persisted model.
"""
self._log_event(log.debug, u"create_model", student)
return PersistentSubsectionGrade.create_grade(**self._persisted_model_params(student))
def update_or_create_model(self, student):
"""
Saves or updates the subsection grade in a persisted model.
"""
self._log_event(log.debug, u"update_or_create_model", student)
return PersistentSubsectionGrade.update_or_create_grade(**self._persisted_model_params(student))
def _compute_block_score(
self,
block_key,
course_structure,
submissions_scores,
csm_scores,
persisted_block=None,
):
"""
Compute score for the given block. If persisted_values
is provided, it is used for possible and weight.
"""
try:
block = course_structure[block_key]
except KeyError:
# It's possible that the user's access to that
# block has changed since the subsection grade
# was last persisted.
pass
else:
if getattr(block, 'has_score', False):
problem_score = get_score(
submissions_scores,
csm_scores,
persisted_block,
block,
)
if problem_score:
self.locations_to_scores[block_key] = problem_score
def _persisted_model_params(self, student):
"""
Returns the parameters for creating/updating the
persisted model for this subsection grade.
"""
return dict(
user_id=student.id,
usage_key=self.location,
course_version=self.course_version,
subtree_edited_timestamp=self.subtree_edited_timestamp,
earned_all=self.all_total.earned,
possible_all=self.all_total.possible,
earned_graded=self.graded_total.earned,
possible_graded=self.graded_total.possible,
visible_blocks=self._get_visible_blocks,
attempted=self.attempted
)
@property
def _get_visible_blocks(self):
"""
Returns the list of visible blocks.
"""
return [
BlockRecord(location, score.weight, score.raw_possible, score.graded)
for location, score in
self.locations_to_scores.iteritems()
]
def _log_event(self, log_func, log_statement, student):
"""
Logs the given statement, for this instance.
"""
log_func(
u"Persistent Grades: SG.{}, subsection: {}, course: {}, "
u"version: {}, edit: {}, user: {},"
u"total: {}/{}, graded: {}/{}".format(
log_statement,
self.location,
self.location.course_key,
self.course_version,
self.subtree_edited_timestamp,
student.id,
self.all_total.earned,
self.all_total.possible,
self.graded_total.earned,
self.graded_total.possible,
)
)
class SubsectionGradeFactory(object):
"""
Factory for Subsection Grades.
"""
def __init__(self, student, course, course_structure):
self.student = student
self.course = course
self.course_structure = course_structure
self._cached_subsection_grades = None
self._unsaved_subsection_grades = []
def create(self, subsection, read_only=False):
"""
Returns the SubsectionGrade object for the student and subsection.
If read_only is True, doesn't save any updates to the grades.
"""
self._log_event(
log.debug, u"create, read_only: {0}, subsection: {1}".format(read_only, subsection.location), subsection,
)
subsection_grade = self._get_bulk_cached_grade(subsection)
if not subsection_grade:
subsection_grade = SubsectionGrade(subsection).init_from_structure(
self.student, self.course_structure, self._submissions_scores, self._csm_scores,
)
if PersistentGradesEnabledFlag.feature_enabled(self.course.id):
if read_only:
self._unsaved_subsection_grades.append(subsection_grade)
else:
grade_model = subsection_grade.create_model(self.student)
self._update_saved_subsection_grade(subsection.location, grade_model)
return subsection_grade
def bulk_create_unsaved(self):
"""
Bulk creates all the unsaved subsection_grades to this point.
"""
SubsectionGrade.bulk_create_models(self.student, self._unsaved_subsection_grades, self.course.id)
self._unsaved_subsection_grades = []
def update(self, subsection, only_if_higher=None):
"""
Updates the SubsectionGrade object for the student and subsection.
"""
# Save ourselves the extra queries if the course does not persist
# subsection grades.
if not PersistentGradesEnabledFlag.feature_enabled(self.course.id):
return
self._log_event(log.warning, u"update, subsection: {}".format(subsection.location), subsection)
calculated_grade = SubsectionGrade(subsection).init_from_structure(
self.student, self.course_structure, self._submissions_scores, self._csm_scores,
)
if only_if_higher:
try:
grade_model = PersistentSubsectionGrade.read_grade(self.student.id, subsection.location)
except PersistentSubsectionGrade.DoesNotExist:
pass
else:
orig_subsection_grade = SubsectionGrade(subsection).init_from_model(
self.student, grade_model, self.course_structure, self._submissions_scores, self._csm_scores,
)
if not is_score_higher(
orig_subsection_grade.graded_total.earned,
orig_subsection_grade.graded_total.possible,
calculated_grade.graded_total.earned,
calculated_grade.graded_total.possible,
):
return orig_subsection_grade
grade_model = calculated_grade.update_or_create_model(self.student)
self._update_saved_subsection_grade(subsection.location, grade_model)
return calculated_grade
@lazy
def _csm_scores(self):
"""
Lazily queries and returns all the scores stored in the user
state (in CSM) for the course, while caching the result.
"""
scorable_locations = [block_key for block_key in self.course_structure if possibly_scored(block_key)]
return ScoresClient.create_for_locations(self.course.id, self.student.id, scorable_locations)
@lazy
def _submissions_scores(self):
"""
Lazily queries and returns the scores stored by the
Submissions API for the course, while caching the result.
"""
anonymous_user_id = anonymous_id_for_user(self.student, self.course.id)
return submissions_api.get_scores(unicode(self.course.id), anonymous_user_id)
def _get_bulk_cached_grade(self, subsection):
"""
Returns the student's SubsectionGrade for the subsection,
while caching the results of a bulk retrieval for the
course, for future access of other subsections.
Returns None if not found.
"""
if not PersistentGradesEnabledFlag.feature_enabled(self.course.id):
return
saved_subsection_grades = self._get_bulk_cached_subsection_grades()
subsection_grade = saved_subsection_grades.get(subsection.location)
if subsection_grade:
return SubsectionGrade(subsection).init_from_model(
self.student, subsection_grade, self.course_structure, self._submissions_scores, self._csm_scores,
)
def _get_bulk_cached_subsection_grades(self):
"""
Returns and caches (for future access) the results of
a bulk retrieval of all subsection grades in the course.
"""
if self._cached_subsection_grades is None:
self._cached_subsection_grades = {
record.full_usage_key: record
for record in PersistentSubsectionGrade.bulk_read_grades(self.student.id, self.course.id)
}
return self._cached_subsection_grades
def _update_saved_subsection_grade(self, subsection_usage_key, subsection_model):
"""
Updates (or adds) the subsection grade for the given
subsection usage key in the local cache, iff the cache
is populated.
"""
if self._cached_subsection_grades is not None:
self._cached_subsection_grades[subsection_usage_key] = subsection_model
def _log_event(self, log_func, log_statement, subsection):
"""
Logs the given statement, for this instance.
"""
log_func(u"Persistent Grades: SGF.{}, course: {}, version: {}, edit: {}, user: {}".format(
log_statement,
self.course.id,
getattr(subsection, 'course_version', None),
getattr(subsection, 'subtree_edited_on', None),
self.student.id,
))
| agpl-3.0 | 1,679,069,635,527,470,300 | 38.364162 | 142 | 0.617254 | false |
tryexceptpass/sofi | test/alert_test.py | 2 | 1497 | from sofi.ui import Alert
def test_basic():
assert(str(Alert()) == "<div class=\"alert\" role=\"alert\"></div>")
def test_text():
assert(str(Alert("This is alert")) == "<div class=\"alert\" role=\"alert\">This is alert</div>")
def test_success_severity():
assert(str(Alert("This is alert", severity='success')) == "<div class=\"alert alert-success\" role=\"alert\">This is alert</div>")
def test_danger_severity():
assert(str(Alert("This is alert", severity='danger')) == "<div class=\"alert alert-danger\" role=\"alert\">This is alert</div>")
def test_info_severity():
assert(str(Alert("This is alert", severity='info')) == "<div class=\"alert alert-info\" role=\"alert\">This is alert</div>")
def test_warning_severity():
assert(str(Alert("This is alert", severity='warning')) == "<div class=\"alert alert-warning\" role=\"alert\">This is alert</div>")
def test_close_button():
assert(str(Alert("This is alert", closebtn=True)) == "<div class=\"alert alert-dismissible fade in\" role=\"alert\"><button type=\"button\" class=\"close\" data-dismiss=\"alert\" aria-label=\"Close\"><span aria-hidden=\"true\">×</span></button>This is alert</div>")
def test_custom_class_ident_style_and_attrs():
assert(str(Alert(text="This is alert", cl='abclass', ident='123', style="font-size:0.9em;", attrs={"data-test": 'abc'}))
== "<div id=\"123\" class=\"alert abclass\" style=\"font-size:0.9em;\" data-test=\"abc\" role=\"alert\">This is alert</div>")
| mit | 998,005,765,520,920,600 | 56.576923 | 275 | 0.644623 | false |
JrtPec/opengrid | opengrid/recipes/electricity_standby.py | 2 | 10486 |
# coding: utf-8
# In[1]:
# opengrid imports
from opengrid.library import misc, houseprint, caching
from opengrid.library.analysis import DailyAgg
from opengrid import config
c=config.Config()
# other imports
import pandas as pd
import charts
import numpy as np
import os
import datetime as dt
import pytz
BXL = pytz.timezone('Europe/Brussels')
# configuration for the plots
DEV = c.get('env', 'type') == 'dev' # DEV is True if we are in development environment, False if on the droplet
print("Environment configured for development: {}".format(DEV))
if not DEV:
# production environment: don't try to display plots
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
from matplotlib.dates import MinuteLocator, HourLocator, DateFormatter, AutoDateLocator, num2date
if DEV:
if c.get('env', 'plots') == 'inline':
get_ipython().magic(u'matplotlib inline')
else:
get_ipython().magic(u'matplotlib qt')
else:
pass # don't try to render plots
plt.rcParams['figure.figsize'] = 12,8
# In[2]:
# Load houseprint from cache if possible, otherwise build it from source
try:
hp_filename = os.path.join(c.get('data', 'folder'), 'hp_anonymous.pkl')
hp = houseprint.load_houseprint_from_file(hp_filename)
print("Houseprint loaded from {}".format(hp_filename))
except Exception as e:
print(e)
print("Because of this error we try to build the houseprint from source")
hp = houseprint.Houseprint()
sensors = hp.get_sensors(sensortype='electricity') # sensor objects
# Remove some sensors
exclude = [
'565de0a7dc64d8370aa321491217b85f' # 3E
]
solar = [x.key for x in hp.search_sensors(type='electricity', system='solar')]
exclude += solar
for s in sensors:
if s.key in exclude:
sensors.remove(s)
hp.init_tmpo()
# The first time, this will take a very looong time to get all the detailed data for building the cache
# Afterwards, this is quick
starttime = dt.time(0, tzinfo=BXL)
endtime = dt.time(5, tzinfo=BXL)
caching.cache_results(hp=hp, sensors=sensors, resultname='elec_min_night_0-5', AnalysisClass=DailyAgg,
agg='min', starttime=starttime, endtime=endtime)
caching.cache_results(hp=hp, sensors=sensors, resultname='elec_max_night_0-5', AnalysisClass=DailyAgg,
agg='max', starttime=starttime, endtime=endtime)
# In[ ]:
cache_min = caching.Cache(variable='elec_min_night_0-5')
cache_max = caching.Cache(variable='elec_max_night_0-5')
dfdaymin = cache_min.get(sensors=sensors)
dfdaymax = cache_max.get(sensors=sensors)
dfdaymin.info()
# The next plot shows that some periods are missing. Due to the cumulative nature of the electricity counter, we still have the total consumption. However, it is spread out of the entire period. So we don't know the standby power during these days, and we have to remove those days.
# In[ ]:
if DEV:
sensor = hp.search_sensors(key='3aa4')[0]
df = sensor.get_data(head=pd.Timestamp('20151117'), tail=pd.Timestamp('20160104'))
charts.plot(df, stock=True, show='inline')
# In[ ]:
def is_submeter(sensor, dfdaymin, dfdaymax):
"""
Return True if this sensor is a sub-meter
sensor = sensor object
"""
other_sensors = sensor.device.get_sensors(sensortype='electricity')
other_sensors.remove(sensor)
if len(other_sensors) == 0:
print("\n{} - {}: no other sensors, this must be main.".format(sensor.device.key, sensor.description))
return False
else:
print("\n{} - {}: comparing with:".format(sensor.device.key, sensor.description))
for o in other_sensors:
# we only check the values for last day
print("* {}:".format(o.description))
sensormin = dfdaymin.ix[-1,sensor.key]
sensormax = dfdaymax.ix[-1,sensor.key]
try:
othermin = dfdaymin.ix[-1].dropna()[o.key]
othermax = dfdaymax.ix[-1].dropna()[o.key]
except:
print(" No data found for sensor {}".format(o.description))
pass
else:
if (sensormin <= othermin) and (sensormax <= othermax):
print(" {} has lower daily min AND max, so it is a submeter.".format(sensor.description))
return True
else:
print(" {} has higher daily min and/or max, we look further.".format(sensor.description))
else:
print("All other sensors have no data OR lower daily min and max. {} must be main.".format(sensor.description))
return False
# In[ ]:
# The function is_submeter makes one obvious error: see results for FL03001566
for col in dfdaymin:
is_submeter(hp.find_sensor(col), dfdaymin, dfdaymax)
# In[ ]:
# Clean out the data:
# First remove sensors that are submeters
for col in dfdaymin:
if is_submeter(hp.find_sensor(col), dfdaymin, dfdaymax):
print("\n!!Removing submeter {}".format(col))
dfdaymin = dfdaymin.drop(col, axis=1)
# Now remove days with too low values to be realistic
dfdaymin[dfdaymin < 10] = np.nan
# Now remove days where the minimum=maximum (within 1 Watt difference)
dfdaymin[(dfdaymax - dfdaymin) < 1] = np.nan
# In[ ]:
dfdaymin.info()
# In[ ]:
if DEV:
charts.plot(dfdaymin, stock=True, show='inline')
# In[ ]:
standby_statistics = dfdaymin.T.describe(percentiles=[0.1,0.5,0.9]).T
# In[ ]:
if DEV:
charts.plot(standby_statistics[['10%', '50%', '90%']], stock=True, show='inline')
# In[ ]:
# Get detailed profiles for the last day
now = pd.Timestamp('now', tz=BXL)
dt_start_of_last_day = pd.Timestamp(dfdaymin.index[-1].date(), tz=BXL)
dt_end_of_last_day = dt_start_of_last_day + pd.Timedelta(hours=endtime.hour, minutes=endtime.minute)
sensors = map(hp.find_sensor, dfdaymin.columns)
df_details = hp.get_data(sensors = sensors, head=dt_start_of_last_day, tail=dt_end_of_last_day)
df_details.fillna(method='ffill', inplace=True)
df_details.fillna(method='bfill', inplace=True)
# ### Boxplot approach. Possible for a period of maximum +/- 2 weeks.
# In[ ]:
# choose a period
look_back_days = 10
dt_start_of_period = dt_start_of_last_day - pd.Timedelta(days=look_back_days-1)
dfdaymin_period = dfdaymin.ix[dt_start_of_period:].dropna(axis=1, how='all')
# In[ ]:
box = [dfdaymin_period.loc[i,:].dropna().values for i in dfdaymin_period.index]
for sensor in dfdaymin_period.columns:
fig=plt.figure(figsize=(10,5))
ax1=plt.subplot(121)
ax1.boxplot(box, positions=range(len(box)), notch=False)
ax1.plot(range(len(box)), dfdaymin_period[sensor], 'rD', ms=10, label='Standby power')
xticks = [x.strftime(format='%d/%m') for x in dfdaymin_period.index]
plt.xticks(range(len(box)), xticks, rotation='vertical')
plt.title(hp.find_sensor(sensor).device.key + ' - ' + hp.find_sensor(sensor).description)
ax1.grid()
ax1.set_ylabel('Watt')
plt.legend(numpoints=1, frameon=False)
ax2=plt.subplot(122)
try:
ax2.plot_date(df_details[sensor].index, df_details[sensor].values, 'b-', label='Last night')
ax2.xaxis_date(tz=BXL) #Put timeseries plot in local time
# rotate the labels
plt.xticks(rotation='vertical')
ax2.set_ylabel('Watt')
ax2.set_xlabel('Local time (BXL)')
ax2.grid()
xax = ax2.get_xaxis() # get the x-axis
xax.set_major_locator(HourLocator())
xax.set_minor_locator(MinuteLocator(30))
adf = xax.get_major_formatter() # the the auto-formatter
adf.scaled[1./24] = '%H:%M' # set the < 1d scale to H:M
adf.scaled[1.0] = '%Y-%m-%d' # set the > 1d < 1m scale to Y-m-d
adf.scaled[30.] = '%Y-%m' # set the > 1m < 1Y scale to Y-m
adf.scaled[365.] = '%Y' # set the > 1y scale to Y
plt.legend(loc='upper right', frameon=False)
plt.tight_layout()
except Exception as e:
print(e)
else:
plt.savefig(os.path.join(c.get('data', 'folder'), 'figures', 'standby_horizontal_'+sensor+'.png'), dpi=100)
pass
if not DEV:
plt.close()
# ### Percentile approach. Useful for longer time periods, but tweaking of graph still needed
# In[ ]:
# choose a period
look_back_days = 40
dt_start_of_period = dt_start_of_last_day - pd.Timedelta(days=look_back_days-1)
dfdaymin_period = dfdaymin.ix[dt_start_of_period:].dropna(axis=1, how='all')
df = dfdaymin_period.join(standby_statistics[['10%', '50%', '90%']], how='left')
# In[ ]:
for sensor in dfdaymin_period.columns:
plt.figure(figsize=(10,8))
ax1=plt.subplot(211)
ax1.plot_date(df.index, df[u'10%'], '-', lw=2, color='g', label=u'10% percentile')
ax1.plot_date(df.index, df[u'50%'], '-', lw=2, color='orange', label=u'50% percentile')
ax1.plot_date(df.index, df[u'90%'], '-', lw=2, color='r', label=u'90% percentile')
ax1.plot_date(df.index, df[sensor], 'rD', ms=7, label='Your standby power')
ax1.legend()
locs, lables=plt.xticks()
xticks = [x.strftime(format='%d/%m') for x in num2date(locs)]
plt.xticks(locs, xticks, rotation='vertical')
plt.title(hp.find_sensor(sensor).device.key + ' - ' + sensor)
ax1.grid()
ax1.set_ylabel('Watt')
ax2=plt.subplot(212)
try:
ax2.plot_date(df_details[sensor].index, df_details[sensor].values, 'b-', label='Detailed consumption of last night')
ax2.xaxis_date(tz=BXL) #Put timeseries plot in local time
# rotate the labels
plt.xticks(rotation='vertical')
ax2.set_ylabel('Watt')
ax2.set_xlabel('Local time (BXL)')
ax2.grid()
xax = ax2.get_xaxis() # get the x-axis
xax.set_major_locator(HourLocator())
xax.set_minor_locator(MinuteLocator(30))
adf = xax.get_major_formatter() # the the auto-formatter
adf.scaled[1./24] = '%H:%M' # set the < 1d scale to H:M
adf.scaled[1.0] = '%Y-%m-%d' # set the > 1d < 1m scale to Y-m-d
adf.scaled[30.] = '%Y-%m' # set the > 1m < 1Y scale to Y-m
adf.scaled[365.] = '%Y' # set the > 1y scale to Y
plt.legend(loc='upper right', frameon=False)
plt.tight_layout()
except Exception as e:
print(e)
else:
plt.savefig(os.path.join(c.get('data', 'folder'), 'figures', 'standby_vertical_'+sensor+'.png'), dpi=100)
pass
if not DEV:
plt.close()
# In[ ]:
| apache-2.0 | -5,636,995,876,353,852,000 | 32.288889 | 287 | 0.637421 | false |
captainpete/rethinkdb | drivers/python/rethinkdb/_dump.py | 37 | 6879 | #!/usr/bin/env python
from __future__ import print_function
import sys, os, datetime, time, shutil, tempfile, subprocess, os.path
from optparse import OptionParser
from ._backup import *
info = "'rethinkdb dump' creates an archive of data from a RethinkDB cluster"
usage = "rethinkdb dump [-c HOST:PORT] [-a AUTH_KEY] [-f FILE] [--clients NUM] [-e (DB | DB.TABLE)]..."
def print_dump_help():
print(info)
print(usage)
print("")
print(" -h [ --help ] print this help")
print(" -c [ --connect ] HOST:PORT host and client port of a rethinkdb node to connect")
print(" to (defaults to localhost:28015)")
print(" -a [ --auth ] AUTH_KEY authorization key for rethinkdb clients")
print(" -f [ --file ] FILE file to write archive to (defaults to")
print(" rethinkdb_dump_DATE_TIME.tar.gz)")
print(" -e [ --export ] (DB | DB.TABLE) limit dump to the given database or table (may")
print(" be specified multiple times)")
print(" --clients NUM_CLIENTS number of tables to export simultaneously (defaults")
print(" to 3)")
print(" --temp-dir DIRECTORY the directory to use for intermediary results")
print("")
print("EXAMPLES:")
print("rethinkdb dump -c mnemosyne:39500")
print(" Archive all data from a cluster running on host 'mnemosyne' with a client port at 39500.")
print("")
print("rethinkdb dump -e test -f rdb_dump.tar.gz")
print(" Archive only the 'test' database from a local cluster into a named file.")
print("")
print("rethinkdb dump -c hades -e test.subscribers -a hunter2")
print(" Archive a specific table from a cluster running on host 'hades' which requires authorization.")
def parse_options():
parser = OptionParser(add_help_option=False, usage=usage)
parser.add_option("-c", "--connect", dest="host", metavar="host:port", default="localhost:28015", type="string")
parser.add_option("-a", "--auth", dest="auth_key", metavar="key", default="", type="string")
parser.add_option("-f", "--file", dest="out_file", metavar="file", default=None, type="string")
parser.add_option("-e", "--export", dest="tables", metavar="(db | db.table)", default=[], action="append", type="string")
parser.add_option("--temp-dir", dest="temp_dir", metavar="directory", default=None, type="string")
parser.add_option("--clients", dest="clients", metavar="NUM", default=3, type="int")
parser.add_option("--debug", dest="debug", default=False, action="store_true")
parser.add_option("-h", "--help", dest="help", default=False, action="store_true")
(options, args) = parser.parse_args()
# Check validity of arguments
if len(args) != 0:
raise RuntimeError("Error: No positional arguments supported. Unrecognized option '%s'" % args[0])
if options.help:
print_dump_help()
exit(0)
res = {}
# Verify valid host:port --connect option
(res["host"], res["port"]) = parse_connect_option(options.host)
# Verify valid output file
res["temp_filename"] = "rethinkdb_dump_%s" % datetime.datetime.today().strftime("%Y-%m-%dT%H:%M:%S")
if options.out_file is None:
res["out_file"] = os.path.abspath("./" + res["temp_filename"] + ".tar.gz")
else:
res["out_file"] = os.path.abspath(options.out_file)
if os.path.exists(res["out_file"]):
raise RuntimeError("Error: Output file already exists: %s" % res["out_file"])
# Verify valid client count
if options.clients < 1:
raise RuntimeError("Error: invalid number of clients (%d), must be greater than zero" % options.clients)
res["clients"] = options.clients
# Make sure the temporary directory exists and is accessible
res["temp_dir"] = options.temp_dir
if res["temp_dir"] is not None:
if not os.path.isdir(res["temp_dir"]):
raise RuntimeError("Error: Temporary directory doesn't exist or is not a directory: %s" % res["temp_dir"])
if not os.access(res["temp_dir"], os.W_OK):
raise RuntimeError("Error: Temporary directory inaccessible: %s" % res["temp_dir"])
res["tables"] = options.tables
res["auth_key"] = options.auth_key
res["debug"] = options.debug
return res
def do_export(temp_dir, options):
print("Exporting to directory...")
export_args = ["rethinkdb-export"]
export_args.extend(["--connect", "%s:%s" % (options["host"], options["port"])])
export_args.extend(["--directory", os.path.join(temp_dir, options["temp_filename"])])
export_args.extend(["--auth", options["auth_key"]])
export_args.extend(["--clients", str(options["clients"])])
for table in options["tables"]:
export_args.extend(["--export", table])
if options["debug"]:
export_args.extend(["--debug"])
res = subprocess.call(export_args)
if res != 0:
raise RuntimeError("Error: rethinkdb-export failed")
# 'Done' message will be printed by the export script
def do_zip(temp_dir, options):
print("Zipping export directory...")
start_time = time.time()
tar_args = ["tar", "czf", options["out_file"]]
if sys.platform.startswith("linux"):
# Tar on OSX does not support this flag, which may be useful with low free space
tar_args.append("--remove-files")
tar_args.extend(["-C", temp_dir])
tar_args.append(options["temp_filename"])
res = subprocess.call(tar_args)
if res != 0:
raise RuntimeError("Error: tar of export directory failed")
print(" Done (%d seconds)" % (time.time() - start_time))
def run_rethinkdb_export(options):
# Create a temporary directory to store the intermediary results
temp_dir = tempfile.mkdtemp(dir=options["temp_dir"])
res = -1
# Print a warning about the capabilities of dump, so no one is confused (hopefully)
print("NOTE: 'rethinkdb-dump' saves data and secondary indexes, but does *not* save")
print(" cluster metadata. You will need to recreate your cluster setup yourself after ")
print(" you run 'rethinkdb-restore'.")
try:
do_export(temp_dir, options)
do_zip(temp_dir, options)
except KeyboardInterrupt:
time.sleep(0.2)
raise RuntimeError("Interrupted")
finally:
shutil.rmtree(temp_dir)
def main():
try:
options = parse_options()
except RuntimeError as ex:
print("Usage: %s" % usage, file=sys.stderr)
print(ex, file=sys.stderr)
return 1
try:
start_time = time.time()
run_rethinkdb_export(options)
except RuntimeError as ex:
print(ex, file=sys.stderr)
return 1
return 0
if __name__ == "__main__":
exit(main())
| agpl-3.0 | -542,051,475,482,611,500 | 40.945122 | 125 | 0.621166 | false |
sergiomb2/gdesklets | config/ConfigString.py | 2 | 1536 | from ConfigWidget import ConfigWidget
from utils.datatypes import *
import gtk
class ConfigString(ConfigWidget):
def __init__(self, name, getter, setter, caller):
ConfigWidget.__init__(self, name, getter, setter, caller)
self._register_property("password", TYPE_BOOL, self._setp_password,
self._getp, False,
doc = "Whether to obfuscate input")
self._register_property("value", TYPE_STRING, self._setp_value,
self._getp, "", doc = "Value")
def get_widgets(self):
self.__label = gtk.Label("")
self.__label.show()
self.__entry = gtk.Entry()
self.__entry.set_width_chars(15)
self.__entry.show()
align = gtk.Alignment(0.0, 0.5, 0.0, 0.0)
align.show()
align.add(self.__label)
self.__entry.set_invisible_char(unichr(0x2022))
self.__entry.connect("focus-out-event", self.__on_change)
return (align, self.__entry)
def __on_change(self, src, event):
value = src.get_text()
self._set_config(value)
def _set_enabled(self, value): self.__entry.set_sensitive(value)
def _set_label(self, value): self.__label.set_text(value)
def _setp_password(self, key, value):
self.__entry.set_visibility(not value)
self._setp(key, value)
def _setp_value(self, key, value):
self.__entry.set_text(value)
self._set_config(value)
self._setp(key, value)
| gpl-2.0 | 591,097,896,653,488,300 | 25.482759 | 75 | 0.568359 | false |
jreback/pandas | pandas/core/indexes/interval.py | 1 | 41420 | """ define the IntervalIndex """
from functools import wraps
from operator import le, lt
import textwrap
from typing import TYPE_CHECKING, Any, List, Optional, Tuple, Union, cast
import numpy as np
from pandas._config import get_option
from pandas._libs import lib
from pandas._libs.interval import Interval, IntervalMixin, IntervalTree
from pandas._libs.tslibs import BaseOffset, Timedelta, Timestamp, to_offset
from pandas._typing import DtypeObj, Label
from pandas.errors import InvalidIndexError
from pandas.util._decorators import Appender, cache_readonly
from pandas.util._exceptions import rewrite_exception
from pandas.core.dtypes.cast import (
find_common_type,
infer_dtype_from_scalar,
maybe_box_datetimelike,
maybe_downcast_numeric,
)
from pandas.core.dtypes.common import (
ensure_platform_int,
is_categorical_dtype,
is_datetime64tz_dtype,
is_datetime_or_timedelta_dtype,
is_dtype_equal,
is_float,
is_float_dtype,
is_integer,
is_integer_dtype,
is_interval_dtype,
is_list_like,
is_number,
is_object_dtype,
is_scalar,
)
from pandas.core.dtypes.dtypes import IntervalDtype
from pandas.core.algorithms import take_1d, unique
from pandas.core.arrays.interval import IntervalArray, _interval_shared_docs
import pandas.core.common as com
from pandas.core.indexers import is_valid_positional_slice
import pandas.core.indexes.base as ibase
from pandas.core.indexes.base import (
Index,
_index_shared_docs,
default_pprint,
ensure_index,
maybe_extract_name,
unpack_nested_dtype,
)
from pandas.core.indexes.datetimes import DatetimeIndex, date_range
from pandas.core.indexes.extension import ExtensionIndex, inherit_names
from pandas.core.indexes.multi import MultiIndex
from pandas.core.indexes.timedeltas import TimedeltaIndex, timedelta_range
from pandas.core.ops import get_op_result_name
if TYPE_CHECKING:
from pandas import CategoricalIndex
_index_doc_kwargs = dict(ibase._index_doc_kwargs)
_index_doc_kwargs.update(
{
"klass": "IntervalIndex",
"qualname": "IntervalIndex",
"target_klass": "IntervalIndex or list of Intervals",
"name": textwrap.dedent(
"""\
name : object, optional
Name to be stored in the index.
"""
),
}
)
def _get_next_label(label):
dtype = getattr(label, "dtype", type(label))
if isinstance(label, (Timestamp, Timedelta)):
dtype = "datetime64"
if is_datetime_or_timedelta_dtype(dtype) or is_datetime64tz_dtype(dtype):
return label + np.timedelta64(1, "ns")
elif is_integer_dtype(dtype):
return label + 1
elif is_float_dtype(dtype):
return np.nextafter(label, np.infty)
else:
raise TypeError(f"cannot determine next label for type {repr(type(label))}")
def _get_prev_label(label):
dtype = getattr(label, "dtype", type(label))
if isinstance(label, (Timestamp, Timedelta)):
dtype = "datetime64"
if is_datetime_or_timedelta_dtype(dtype) or is_datetime64tz_dtype(dtype):
return label - np.timedelta64(1, "ns")
elif is_integer_dtype(dtype):
return label - 1
elif is_float_dtype(dtype):
return np.nextafter(label, -np.infty)
else:
raise TypeError(f"cannot determine next label for type {repr(type(label))}")
def _new_IntervalIndex(cls, d):
"""
This is called upon unpickling, rather than the default which doesn't have
arguments and breaks __new__.
"""
return cls.from_arrays(**d)
def setop_check(method):
"""
This is called to decorate the set operations of IntervalIndex
to perform the type check in advance.
"""
op_name = method.__name__
@wraps(method)
def wrapped(self, other, sort=False):
self._validate_sort_keyword(sort)
self._assert_can_do_setop(other)
other, _ = self._convert_can_do_setop(other)
if not isinstance(other, IntervalIndex):
result = getattr(self.astype(object), op_name)(other)
if op_name in ("difference",):
result = result.astype(self.dtype)
return result
return method(self, other, sort)
return wrapped
@Appender(
_interval_shared_docs["class"]
% {
"klass": "IntervalIndex",
"summary": "Immutable index of intervals that are closed on the same side.",
"name": _index_doc_kwargs["name"],
"versionadded": "0.20.0",
"extra_attributes": "is_overlapping\nvalues\n",
"extra_methods": "",
"examples": textwrap.dedent(
"""\
Examples
--------
A new ``IntervalIndex`` is typically constructed using
:func:`interval_range`:
>>> pd.interval_range(start=0, end=5)
IntervalIndex([(0, 1], (1, 2], (2, 3], (3, 4], (4, 5]],
closed='right',
dtype='interval[int64]')
It may also be constructed using one of the constructor
methods: :meth:`IntervalIndex.from_arrays`,
:meth:`IntervalIndex.from_breaks`, and :meth:`IntervalIndex.from_tuples`.
See further examples in the doc strings of ``interval_range`` and the
mentioned constructor methods.
"""
),
}
)
@inherit_names(["set_closed", "to_tuples"], IntervalArray, wrap=True)
@inherit_names(["__array__", "overlaps", "contains"], IntervalArray)
@inherit_names(["is_non_overlapping_monotonic", "closed"], IntervalArray, cache=True)
class IntervalIndex(IntervalMixin, ExtensionIndex):
_typ = "intervalindex"
_comparables = ["name"]
_attributes = ["name", "closed"]
# we would like our indexing holder to defer to us
_defer_to_indexing = True
_data: IntervalArray
_values: IntervalArray
_can_hold_strings = False
# --------------------------------------------------------------------
# Constructors
def __new__(
cls,
data,
closed=None,
dtype=None,
copy: bool = False,
name=None,
verify_integrity: bool = True,
):
name = maybe_extract_name(name, data, cls)
with rewrite_exception("IntervalArray", cls.__name__):
array = IntervalArray(
data,
closed=closed,
copy=copy,
dtype=dtype,
verify_integrity=verify_integrity,
)
return cls._simple_new(array, name)
@classmethod
def _simple_new(cls, array: IntervalArray, name: Label = None):
"""
Construct from an IntervalArray
Parameters
----------
array : IntervalArray
name : Label, default None
Attached as result.name
"""
assert isinstance(array, IntervalArray), type(array)
result = IntervalMixin.__new__(cls)
result._data = array
result.name = name
result._cache = {}
result._reset_identity()
return result
@classmethod
@Appender(
_interval_shared_docs["from_breaks"]
% {
"klass": "IntervalIndex",
"examples": textwrap.dedent(
"""\
Examples
--------
>>> pd.IntervalIndex.from_breaks([0, 1, 2, 3])
IntervalIndex([(0, 1], (1, 2], (2, 3]],
closed='right',
dtype='interval[int64]')
"""
),
}
)
def from_breaks(
cls, breaks, closed: str = "right", name=None, copy: bool = False, dtype=None
):
with rewrite_exception("IntervalArray", cls.__name__):
array = IntervalArray.from_breaks(
breaks, closed=closed, copy=copy, dtype=dtype
)
return cls._simple_new(array, name=name)
@classmethod
@Appender(
_interval_shared_docs["from_arrays"]
% {
"klass": "IntervalIndex",
"examples": textwrap.dedent(
"""\
Examples
--------
>>> pd.IntervalIndex.from_arrays([0, 1, 2], [1, 2, 3])
IntervalIndex([(0, 1], (1, 2], (2, 3]],
closed='right',
dtype='interval[int64]')
"""
),
}
)
def from_arrays(
cls,
left,
right,
closed: str = "right",
name=None,
copy: bool = False,
dtype=None,
):
with rewrite_exception("IntervalArray", cls.__name__):
array = IntervalArray.from_arrays(
left, right, closed, copy=copy, dtype=dtype
)
return cls._simple_new(array, name=name)
@classmethod
@Appender(
_interval_shared_docs["from_tuples"]
% {
"klass": "IntervalIndex",
"examples": textwrap.dedent(
"""\
Examples
--------
>>> pd.IntervalIndex.from_tuples([(0, 1), (1, 2)])
IntervalIndex([(0, 1], (1, 2]],
closed='right',
dtype='interval[int64]')
"""
),
}
)
def from_tuples(
cls, data, closed: str = "right", name=None, copy: bool = False, dtype=None
):
with rewrite_exception("IntervalArray", cls.__name__):
arr = IntervalArray.from_tuples(data, closed=closed, copy=copy, dtype=dtype)
return cls._simple_new(arr, name=name)
# --------------------------------------------------------------------
@cache_readonly
def _engine(self):
left = self._maybe_convert_i8(self.left)
right = self._maybe_convert_i8(self.right)
return IntervalTree(left, right, closed=self.closed)
def __contains__(self, key: Any) -> bool:
"""
return a boolean if this key is IN the index
We *only* accept an Interval
Parameters
----------
key : Interval
Returns
-------
bool
"""
hash(key)
if not isinstance(key, Interval):
return False
try:
self.get_loc(key)
return True
except KeyError:
return False
@cache_readonly
def _multiindex(self) -> MultiIndex:
return MultiIndex.from_arrays([self.left, self.right], names=["left", "right"])
def __array_wrap__(self, result, context=None):
# we don't want the superclass implementation
return result
def __reduce__(self):
d = {"left": self.left, "right": self.right}
d.update(self._get_attributes_dict())
return _new_IntervalIndex, (type(self), d), None
@Appender(Index.astype.__doc__)
def astype(self, dtype, copy: bool = True):
with rewrite_exception("IntervalArray", type(self).__name__):
new_values = self._values.astype(dtype, copy=copy)
return Index(new_values, dtype=new_values.dtype, name=self.name)
@property
def inferred_type(self) -> str:
"""Return a string of the type inferred from the values"""
return "interval"
@Appender(Index.memory_usage.__doc__)
def memory_usage(self, deep: bool = False) -> int:
# we don't use an explicit engine
# so return the bytes here
return self.left.memory_usage(deep=deep) + self.right.memory_usage(deep=deep)
# IntervalTree doesn't have a is_monotonic_decreasing, so have to override
# the Index implementation
@cache_readonly
def is_monotonic_decreasing(self) -> bool:
"""
Return True if the IntervalIndex is monotonic decreasing (only equal or
decreasing values), else False
"""
return self[::-1].is_monotonic_increasing
@cache_readonly
def is_unique(self) -> bool:
"""
Return True if the IntervalIndex contains unique elements, else False.
"""
left = self.left
right = self.right
if self.isna().sum() > 1:
return False
if left.is_unique or right.is_unique:
return True
seen_pairs = set()
check_idx = np.where(left.duplicated(keep=False))[0]
for idx in check_idx:
pair = (left[idx], right[idx])
if pair in seen_pairs:
return False
seen_pairs.add(pair)
return True
@property
def is_overlapping(self) -> bool:
"""
Return True if the IntervalIndex has overlapping intervals, else False.
Two intervals overlap if they share a common point, including closed
endpoints. Intervals that only have an open endpoint in common do not
overlap.
.. versionadded:: 0.24.0
Returns
-------
bool
Boolean indicating if the IntervalIndex has overlapping intervals.
See Also
--------
Interval.overlaps : Check whether two Interval objects overlap.
IntervalIndex.overlaps : Check an IntervalIndex elementwise for
overlaps.
Examples
--------
>>> index = pd.IntervalIndex.from_tuples([(0, 2), (1, 3), (4, 5)])
>>> index
IntervalIndex([(0, 2], (1, 3], (4, 5]],
closed='right',
dtype='interval[int64]')
>>> index.is_overlapping
True
Intervals that share closed endpoints overlap:
>>> index = pd.interval_range(0, 3, closed='both')
>>> index
IntervalIndex([[0, 1], [1, 2], [2, 3]],
closed='both',
dtype='interval[int64]')
>>> index.is_overlapping
True
Intervals that only have an open endpoint in common do not overlap:
>>> index = pd.interval_range(0, 3, closed='left')
>>> index
IntervalIndex([[0, 1), [1, 2), [2, 3)],
closed='left',
dtype='interval[int64]')
>>> index.is_overlapping
False
"""
# GH 23309
return self._engine.is_overlapping
def _needs_i8_conversion(self, key) -> bool:
"""
Check if a given key needs i8 conversion. Conversion is necessary for
Timestamp, Timedelta, DatetimeIndex, and TimedeltaIndex keys. An
Interval-like requires conversion if its endpoints are one of the
aforementioned types.
Assumes that any list-like data has already been cast to an Index.
Parameters
----------
key : scalar or Index-like
The key that should be checked for i8 conversion
Returns
-------
bool
"""
if is_interval_dtype(key) or isinstance(key, Interval):
return self._needs_i8_conversion(key.left)
i8_types = (Timestamp, Timedelta, DatetimeIndex, TimedeltaIndex)
return isinstance(key, i8_types)
def _maybe_convert_i8(self, key):
"""
Maybe convert a given key to its equivalent i8 value(s). Used as a
preprocessing step prior to IntervalTree queries (self._engine), which
expects numeric data.
Parameters
----------
key : scalar or list-like
The key that should maybe be converted to i8.
Returns
-------
scalar or list-like
The original key if no conversion occurred, int if converted scalar,
Int64Index if converted list-like.
"""
original = key
if is_list_like(key):
key = ensure_index(key)
if not self._needs_i8_conversion(key):
return original
scalar = is_scalar(key)
if is_interval_dtype(key) or isinstance(key, Interval):
# convert left/right and reconstruct
left = self._maybe_convert_i8(key.left)
right = self._maybe_convert_i8(key.right)
constructor = Interval if scalar else IntervalIndex.from_arrays
return constructor(left, right, closed=self.closed)
if scalar:
# Timestamp/Timedelta
key_dtype, key_i8 = infer_dtype_from_scalar(key, pandas_dtype=True)
if lib.is_period(key):
key_i8 = key.ordinal
else:
# DatetimeIndex/TimedeltaIndex
key_dtype, key_i8 = key.dtype, Index(key.asi8)
if key.hasnans:
# convert NaT from its i8 value to np.nan so it's not viewed
# as a valid value, maybe causing errors (e.g. is_overlapping)
key_i8 = key_i8.where(~key._isnan)
# ensure consistency with IntervalIndex subtype
subtype = self.dtype.subtype
if not is_dtype_equal(subtype, key_dtype):
raise ValueError(
f"Cannot index an IntervalIndex of subtype {subtype} with "
f"values of dtype {key_dtype}"
)
return key_i8
def _searchsorted_monotonic(self, label, side, exclude_label=False):
if not self.is_non_overlapping_monotonic:
raise KeyError(
"can only get slices from an IntervalIndex if bounds are "
"non-overlapping and all monotonic increasing or decreasing"
)
if isinstance(label, IntervalMixin):
raise NotImplementedError("Interval objects are not currently supported")
# GH 20921: "not is_monotonic_increasing" for the second condition
# instead of "is_monotonic_decreasing" to account for single element
# indexes being both increasing and decreasing
if (side == "left" and self.left.is_monotonic_increasing) or (
side == "right" and not self.left.is_monotonic_increasing
):
sub_idx = self.right
if self.open_right or exclude_label:
label = _get_next_label(label)
else:
sub_idx = self.left
if self.open_left or exclude_label:
label = _get_prev_label(label)
return sub_idx._searchsorted_monotonic(label, side)
# --------------------------------------------------------------------
# Indexing Methods
def get_loc(
self, key, method: Optional[str] = None, tolerance=None
) -> Union[int, slice, np.ndarray]:
"""
Get integer location, slice or boolean mask for requested label.
Parameters
----------
key : label
method : {None}, optional
* default: matches where the label is within an interval only.
Returns
-------
int if unique index, slice if monotonic index, else mask
Examples
--------
>>> i1, i2 = pd.Interval(0, 1), pd.Interval(1, 2)
>>> index = pd.IntervalIndex([i1, i2])
>>> index.get_loc(1)
0
You can also supply a point inside an interval.
>>> index.get_loc(1.5)
1
If a label is in several intervals, you get the locations of all the
relevant intervals.
>>> i3 = pd.Interval(0, 2)
>>> overlapping_index = pd.IntervalIndex([i1, i2, i3])
>>> overlapping_index.get_loc(0.5)
array([ True, False, True])
Only exact matches will be returned if an interval is provided.
>>> index.get_loc(pd.Interval(0, 1))
0
"""
self._check_indexing_method(method)
if not is_scalar(key):
raise InvalidIndexError(key)
if isinstance(key, Interval):
if self.closed != key.closed:
raise KeyError(key)
mask = (self.left == key.left) & (self.right == key.right)
else:
# assume scalar
op_left = le if self.closed_left else lt
op_right = le if self.closed_right else lt
try:
mask = op_left(self.left, key) & op_right(key, self.right)
except TypeError as err:
# scalar is not comparable to II subtype --> invalid label
raise KeyError(key) from err
matches = mask.sum()
if matches == 0:
raise KeyError(key)
elif matches == 1:
return mask.argmax()
return lib.maybe_booleans_to_slice(mask.view("u1"))
def _get_indexer(
self,
target: Index,
method: Optional[str] = None,
limit: Optional[int] = None,
tolerance: Optional[Any] = None,
) -> np.ndarray:
if isinstance(target, IntervalIndex):
# equal indexes -> 1:1 positional match
if self.equals(target):
return np.arange(len(self), dtype="intp")
if not self._should_compare(target):
return self._get_indexer_non_comparable(target, method, unique=True)
# non-overlapping -> at most one match per interval in target
# want exact matches -> need both left/right to match, so defer to
# left/right get_indexer, compare elementwise, equality -> match
left_indexer = self.left.get_indexer(target.left)
right_indexer = self.right.get_indexer(target.right)
indexer = np.where(left_indexer == right_indexer, left_indexer, -1)
elif is_categorical_dtype(target.dtype):
target = cast("CategoricalIndex", target)
# get an indexer for unique categories then propagate to codes via take_1d
categories_indexer = self.get_indexer(target.categories)
indexer = take_1d(categories_indexer, target.codes, fill_value=-1)
elif not is_object_dtype(target):
# homogeneous scalar index: use IntervalTree
target = self._maybe_convert_i8(target)
indexer = self._engine.get_indexer(target.values)
else:
# heterogeneous scalar index: defer elementwise to get_loc
return self._get_indexer_pointwise(target)[0]
return ensure_platform_int(indexer)
@Appender(_index_shared_docs["get_indexer_non_unique"] % _index_doc_kwargs)
def get_indexer_non_unique(self, target: Index) -> Tuple[np.ndarray, np.ndarray]:
target = ensure_index(target)
if isinstance(target, IntervalIndex) and not self._should_compare(target):
# different closed or incompatible subtype -> no matches
return self._get_indexer_non_comparable(target, None, unique=False)
elif is_object_dtype(target.dtype) or isinstance(target, IntervalIndex):
# target might contain intervals: defer elementwise to get_loc
return self._get_indexer_pointwise(target)
else:
# Note: this case behaves differently from other Index subclasses
# because IntervalIndex does partial-int indexing
target = self._maybe_convert_i8(target)
indexer, missing = self._engine.get_indexer_non_unique(target.values)
return ensure_platform_int(indexer), ensure_platform_int(missing)
def _get_indexer_pointwise(self, target: Index) -> Tuple[np.ndarray, np.ndarray]:
"""
pointwise implementation for get_indexer and get_indexer_non_unique.
"""
indexer, missing = [], []
for i, key in enumerate(target):
try:
locs = self.get_loc(key)
if isinstance(locs, slice):
# Only needed for get_indexer_non_unique
locs = np.arange(locs.start, locs.stop, locs.step, dtype="intp")
locs = np.array(locs, ndmin=1)
except KeyError:
missing.append(i)
locs = np.array([-1])
except InvalidIndexError as err:
# i.e. non-scalar key
raise TypeError(key) from err
indexer.append(locs)
indexer = np.concatenate(indexer)
return ensure_platform_int(indexer), ensure_platform_int(missing)
@property
def _index_as_unique(self):
return not self.is_overlapping
_requires_unique_msg = (
"cannot handle overlapping indices; use IntervalIndex.get_indexer_non_unique"
)
def _convert_slice_indexer(self, key: slice, kind: str):
if not (key.step is None or key.step == 1):
# GH#31658 if label-based, we require step == 1,
# if positional, we disallow float start/stop
msg = "label-based slicing with step!=1 is not supported for IntervalIndex"
if kind == "loc":
raise ValueError(msg)
elif kind == "getitem":
if not is_valid_positional_slice(key):
# i.e. this cannot be interpreted as a positional slice
raise ValueError(msg)
return super()._convert_slice_indexer(key, kind)
def _should_fallback_to_positional(self) -> bool:
# integer lookups in Series.__getitem__ are unambiguously
# positional in this case
return self.dtype.subtype.kind in ["m", "M"]
def _maybe_cast_slice_bound(self, label, side: str, kind):
return getattr(self, side)._maybe_cast_slice_bound(label, side, kind)
@Appender(Index._convert_list_indexer.__doc__)
def _convert_list_indexer(self, keyarr):
"""
we are passed a list-like indexer. Return the
indexer for matching intervals.
"""
locs = self.get_indexer_for(keyarr)
# we have missing values
if (locs == -1).any():
raise KeyError(keyarr[locs == -1].tolist())
return locs
def _is_comparable_dtype(self, dtype: DtypeObj) -> bool:
if not isinstance(dtype, IntervalDtype):
return False
common_subtype = find_common_type([self.dtype.subtype, dtype.subtype])
return not is_object_dtype(common_subtype)
def _should_compare(self, other) -> bool:
other = unpack_nested_dtype(other)
if is_object_dtype(other.dtype):
return True
if not self._is_comparable_dtype(other.dtype):
return False
return other.closed == self.closed
# --------------------------------------------------------------------
@cache_readonly
def left(self) -> Index:
return Index(self._data.left, copy=False)
@cache_readonly
def right(self) -> Index:
return Index(self._data.right, copy=False)
@cache_readonly
def mid(self):
return Index(self._data.mid, copy=False)
@property
def length(self):
return Index(self._data.length, copy=False)
def putmask(self, mask, value):
arr = self._data.copy()
try:
value_left, value_right = arr._validate_setitem_value(value)
except (ValueError, TypeError):
return self.astype(object).putmask(mask, value)
if isinstance(self._data._left, np.ndarray):
np.putmask(arr._left, mask, value_left)
np.putmask(arr._right, mask, value_right)
else:
# TODO: special case not needed with __array_function__
arr._left.putmask(mask, value_left)
arr._right.putmask(mask, value_right)
return type(self)._simple_new(arr, name=self.name)
@Appender(Index.where.__doc__)
def where(self, cond, other=None):
if other is None:
other = self._na_value
values = np.where(cond, self._values, other)
result = IntervalArray(values)
return type(self)._simple_new(result, name=self.name)
def delete(self, loc):
"""
Return a new IntervalIndex with passed location(-s) deleted
Returns
-------
IntervalIndex
"""
new_left = self.left.delete(loc)
new_right = self.right.delete(loc)
result = self._data._shallow_copy(new_left, new_right)
return type(self)._simple_new(result, name=self.name)
def insert(self, loc, item):
"""
Return a new IntervalIndex inserting new item at location. Follows
Python list.append semantics for negative values. Only Interval
objects and NA can be inserted into an IntervalIndex
Parameters
----------
loc : int
item : object
Returns
-------
IntervalIndex
"""
left_insert, right_insert = self._data._validate_scalar(item)
new_left = self.left.insert(loc, left_insert)
new_right = self.right.insert(loc, right_insert)
result = self._data._shallow_copy(new_left, new_right)
return type(self)._simple_new(result, name=self.name)
# --------------------------------------------------------------------
# Rendering Methods
# __repr__ associated methods are based on MultiIndex
def _format_with_header(self, header: List[str], na_rep: str = "NaN") -> List[str]:
return header + list(self._format_native_types(na_rep=na_rep))
def _format_native_types(self, na_rep="NaN", quoting=None, **kwargs):
# GH 28210: use base method but with different default na_rep
return super()._format_native_types(na_rep=na_rep, quoting=quoting, **kwargs)
def _format_data(self, name=None):
# TODO: integrate with categorical and make generic
# name argument is unused here; just for compat with base / categorical
n = len(self)
max_seq_items = min((get_option("display.max_seq_items") or n) // 10, 10)
formatter = str
if n == 0:
summary = "[]"
elif n == 1:
first = formatter(self[0])
summary = f"[{first}]"
elif n == 2:
first = formatter(self[0])
last = formatter(self[-1])
summary = f"[{first}, {last}]"
else:
if n > max_seq_items:
n = min(max_seq_items // 2, 10)
head = [formatter(x) for x in self[:n]]
tail = [formatter(x) for x in self[-n:]]
head_joined = ", ".join(head)
tail_joined = ", ".join(tail)
summary = f"[{head_joined} ... {tail_joined}]"
else:
tail = [formatter(x) for x in self]
joined = ", ".join(tail)
summary = f"[{joined}]"
return summary + "," + self._format_space()
def _format_attrs(self):
attrs = [("closed", repr(self.closed))]
if self.name is not None:
attrs.append(("name", default_pprint(self.name)))
attrs.append(("dtype", f"'{self.dtype}'"))
return attrs
def _format_space(self) -> str:
space = " " * (len(type(self).__name__) + 1)
return f"\n{space}"
# --------------------------------------------------------------------
# Set Operations
def _assert_can_do_setop(self, other):
super()._assert_can_do_setop(other)
if isinstance(other, IntervalIndex) and not self._should_compare(other):
# GH#19016: ensure set op will not return a prohibited dtype
raise TypeError(
"can only do set operations between two IntervalIndex "
"objects that are closed on the same side "
"and have compatible dtypes"
)
def _intersection(self, other, sort):
"""
intersection specialized to the case with matching dtypes.
"""
# For IntervalIndex we also know other.closed == self.closed
if self.left.is_unique and self.right.is_unique:
taken = self._intersection_unique(other)
elif other.left.is_unique and other.right.is_unique and self.isna().sum() <= 1:
# Swap other/self if other is unique and self does not have
# multiple NaNs
taken = other._intersection_unique(self)
else:
# duplicates
taken = self._intersection_non_unique(other)
if sort is None:
taken = taken.sort_values()
return taken
def _intersection_unique(self, other: "IntervalIndex") -> "IntervalIndex":
"""
Used when the IntervalIndex does not have any common endpoint,
no matter left or right.
Return the intersection with another IntervalIndex.
Parameters
----------
other : IntervalIndex
Returns
-------
IntervalIndex
"""
lindexer = self.left.get_indexer(other.left)
rindexer = self.right.get_indexer(other.right)
match = (lindexer == rindexer) & (lindexer != -1)
indexer = lindexer.take(match.nonzero()[0])
indexer = unique(indexer)
return self.take(indexer)
def _intersection_non_unique(self, other: "IntervalIndex") -> "IntervalIndex":
"""
Used when the IntervalIndex does have some common endpoints,
on either sides.
Return the intersection with another IntervalIndex.
Parameters
----------
other : IntervalIndex
Returns
-------
IntervalIndex
"""
mask = np.zeros(len(self), dtype=bool)
if self.hasnans and other.hasnans:
first_nan_loc = np.arange(len(self))[self.isna()][0]
mask[first_nan_loc] = True
other_tups = set(zip(other.left, other.right))
for i, tup in enumerate(zip(self.left, self.right)):
if tup in other_tups:
mask[i] = True
return self[mask]
def _setop(op_name: str, sort=None):
def func(self, other, sort=sort):
# At this point we are assured
# isinstance(other, IntervalIndex)
# other.closed == self.closed
result = getattr(self._multiindex, op_name)(other._multiindex, sort=sort)
result_name = get_op_result_name(self, other)
# GH 19101: ensure empty results have correct dtype
if result.empty:
result = result._values.astype(self.dtype.subtype)
else:
result = result._values
return type(self).from_tuples(result, closed=self.closed, name=result_name)
func.__name__ = op_name
return setop_check(func)
_union = _setop("union")
difference = _setop("difference")
# --------------------------------------------------------------------
@property
def _is_all_dates(self) -> bool:
"""
This is False even when left/right contain datetime-like objects,
as the check is done on the Interval itself
"""
return False
# TODO: arithmetic operations
def _is_valid_endpoint(endpoint) -> bool:
"""
Helper for interval_range to check if start/end are valid types.
"""
return any(
[
is_number(endpoint),
isinstance(endpoint, Timestamp),
isinstance(endpoint, Timedelta),
endpoint is None,
]
)
def _is_type_compatible(a, b) -> bool:
"""
Helper for interval_range to check type compat of start/end/freq.
"""
is_ts_compat = lambda x: isinstance(x, (Timestamp, BaseOffset))
is_td_compat = lambda x: isinstance(x, (Timedelta, BaseOffset))
return (
(is_number(a) and is_number(b))
or (is_ts_compat(a) and is_ts_compat(b))
or (is_td_compat(a) and is_td_compat(b))
or com.any_none(a, b)
)
def interval_range(
start=None, end=None, periods=None, freq=None, name=None, closed="right"
):
"""
Return a fixed frequency IntervalIndex.
Parameters
----------
start : numeric or datetime-like, default None
Left bound for generating intervals.
end : numeric or datetime-like, default None
Right bound for generating intervals.
periods : int, default None
Number of periods to generate.
freq : numeric, str, or DateOffset, default None
The length of each interval. Must be consistent with the type of start
and end, e.g. 2 for numeric, or '5H' for datetime-like. Default is 1
for numeric and 'D' for datetime-like.
name : str, default None
Name of the resulting IntervalIndex.
closed : {'left', 'right', 'both', 'neither'}, default 'right'
Whether the intervals are closed on the left-side, right-side, both
or neither.
Returns
-------
IntervalIndex
See Also
--------
IntervalIndex : An Index of intervals that are all closed on the same side.
Notes
-----
Of the four parameters ``start``, ``end``, ``periods``, and ``freq``,
exactly three must be specified. If ``freq`` is omitted, the resulting
``IntervalIndex`` will have ``periods`` linearly spaced elements between
``start`` and ``end``, inclusively.
To learn more about datetime-like frequency strings, please see `this link
<https://pandas.pydata.org/pandas-docs/stable/user_guide/timeseries.html#offset-aliases>`__.
Examples
--------
Numeric ``start`` and ``end`` is supported.
>>> pd.interval_range(start=0, end=5)
IntervalIndex([(0, 1], (1, 2], (2, 3], (3, 4], (4, 5]],
closed='right', dtype='interval[int64]')
Additionally, datetime-like input is also supported.
>>> pd.interval_range(start=pd.Timestamp('2017-01-01'),
... end=pd.Timestamp('2017-01-04'))
IntervalIndex([(2017-01-01, 2017-01-02], (2017-01-02, 2017-01-03],
(2017-01-03, 2017-01-04]],
closed='right', dtype='interval[datetime64[ns]]')
The ``freq`` parameter specifies the frequency between the left and right.
endpoints of the individual intervals within the ``IntervalIndex``. For
numeric ``start`` and ``end``, the frequency must also be numeric.
>>> pd.interval_range(start=0, periods=4, freq=1.5)
IntervalIndex([(0.0, 1.5], (1.5, 3.0], (3.0, 4.5], (4.5, 6.0]],
closed='right', dtype='interval[float64]')
Similarly, for datetime-like ``start`` and ``end``, the frequency must be
convertible to a DateOffset.
>>> pd.interval_range(start=pd.Timestamp('2017-01-01'),
... periods=3, freq='MS')
IntervalIndex([(2017-01-01, 2017-02-01], (2017-02-01, 2017-03-01],
(2017-03-01, 2017-04-01]],
closed='right', dtype='interval[datetime64[ns]]')
Specify ``start``, ``end``, and ``periods``; the frequency is generated
automatically (linearly spaced).
>>> pd.interval_range(start=0, end=6, periods=4)
IntervalIndex([(0.0, 1.5], (1.5, 3.0], (3.0, 4.5], (4.5, 6.0]],
closed='right',
dtype='interval[float64]')
The ``closed`` parameter specifies which endpoints of the individual
intervals within the ``IntervalIndex`` are closed.
>>> pd.interval_range(end=5, periods=4, closed='both')
IntervalIndex([[1, 2], [2, 3], [3, 4], [4, 5]],
closed='both', dtype='interval[int64]')
"""
start = maybe_box_datetimelike(start)
end = maybe_box_datetimelike(end)
endpoint = start if start is not None else end
if freq is None and com.any_none(periods, start, end):
freq = 1 if is_number(endpoint) else "D"
if com.count_not_none(start, end, periods, freq) != 3:
raise ValueError(
"Of the four parameters: start, end, periods, and "
"freq, exactly three must be specified"
)
if not _is_valid_endpoint(start):
raise ValueError(f"start must be numeric or datetime-like, got {start}")
elif not _is_valid_endpoint(end):
raise ValueError(f"end must be numeric or datetime-like, got {end}")
if is_float(periods):
periods = int(periods)
elif not is_integer(periods) and periods is not None:
raise TypeError(f"periods must be a number, got {periods}")
if freq is not None and not is_number(freq):
try:
freq = to_offset(freq)
except ValueError as err:
raise ValueError(
f"freq must be numeric or convertible to DateOffset, got {freq}"
) from err
# verify type compatibility
if not all(
[
_is_type_compatible(start, end),
_is_type_compatible(start, freq),
_is_type_compatible(end, freq),
]
):
raise TypeError("start, end, freq need to be type compatible")
# +1 to convert interval count to breaks count (n breaks = n-1 intervals)
if periods is not None:
periods += 1
if is_number(endpoint):
# force consistency between start/end/freq (lower end if freq skips it)
if com.all_not_none(start, end, freq):
end -= (end - start) % freq
# compute the period/start/end if unspecified (at most one)
if periods is None:
periods = int((end - start) // freq) + 1
elif start is None:
start = end - (periods - 1) * freq
elif end is None:
end = start + (periods - 1) * freq
breaks = np.linspace(start, end, periods)
if all(is_integer(x) for x in com.not_none(start, end, freq)):
# np.linspace always produces float output
breaks = maybe_downcast_numeric(breaks, np.dtype("int64"))
else:
# delegate to the appropriate range function
if isinstance(endpoint, Timestamp):
breaks = date_range(start=start, end=end, periods=periods, freq=freq)
else:
breaks = timedelta_range(start=start, end=end, periods=periods, freq=freq)
return IntervalIndex.from_breaks(breaks, name=name, closed=closed)
| bsd-3-clause | 507,990,376,686,882,940 | 32.923014 | 96 | 0.576147 | false |
bmun/huxley | huxley/core/migrations/0001_initial.py | 1 | 8677 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='Assignment',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
],
options={
'db_table': 'assignment',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Committee',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=8)),
('full_name', models.CharField(max_length=128)),
('delegation_size', models.PositiveSmallIntegerField(default=2)),
('special', models.BooleanField(default=False)),
],
options={
'db_table': 'committee',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Conference',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('session', models.PositiveSmallIntegerField(default=0)),
('start_date', models.DateField()),
('end_date', models.DateField()),
('reg_open', models.DateField()),
('early_reg_close', models.DateField()),
('reg_close', models.DateField()),
('min_attendance', models.PositiveSmallIntegerField(default=0)),
('max_attendance', models.PositiveSmallIntegerField(default=0)),
('open_reg', models.BooleanField(default=True)),
('waitlist_reg', models.BooleanField(default=False)),
],
options={
'db_table': 'conference',
'get_latest_by': 'start_date',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Country',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=128)),
('special', models.BooleanField(default=False)),
],
options={
'db_table': 'country',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='CountryPreference',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('rank', models.PositiveSmallIntegerField()),
('country', models.ForeignKey(to='core.Country', on_delete=models.CASCADE)),
],
options={
'ordering': ['-school', 'rank'],
'db_table': 'country_preference',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='Delegate',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=64, blank=True)),
('email', models.EmailField(max_length=75, blank=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
('summary', models.TextField(default=b'', null=True)),
('assignment', models.ForeignKey(related_name='delegates', to='core.Assignment', on_delete=models.SET_NULL)),
],
options={
'ordering': ['assignment__country'],
'db_table': 'delegate',
},
bases=(models.Model,),
),
migrations.CreateModel(
name='School',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('registered', models.DateTimeField(auto_now_add=True)),
('name', models.CharField(max_length=128)),
('address', models.CharField(max_length=128)),
('city', models.CharField(max_length=128)),
('state', models.CharField(max_length=16)),
('zip_code', models.CharField(max_length=16)),
('country', models.CharField(max_length=64)),
('primary_name', models.CharField(max_length=128)),
('primary_gender', models.PositiveSmallIntegerField(default=4, choices=[(1, b'Male'), (2, b'Female'), (3, b'Other'), (4, b'Unspecified')])),
('primary_email', models.EmailField(max_length=75)),
('primary_phone', models.CharField(max_length=32)),
('primary_type', models.PositiveSmallIntegerField(default=2, choices=[(2, b'Faculty'), (1, b'Student')])),
('secondary_name', models.CharField(max_length=128, blank=True)),
('secondary_gender', models.PositiveSmallIntegerField(default=4, blank=True, choices=[(1, b'Male'), (2, b'Female'), (3, b'Other'), (4, b'Unspecified')])),
('secondary_email', models.EmailField(max_length=75, blank=True)),
('secondary_phone', models.CharField(max_length=32, blank=True)),
('secondary_type', models.PositiveSmallIntegerField(default=2, blank=True, choices=[(2, b'Faculty'), (1, b'Student')])),
('program_type', models.PositiveSmallIntegerField(default=1, choices=[(1, b'Club'), (2, b'Class')])),
('times_attended', models.PositiveSmallIntegerField(default=0)),
('international', models.BooleanField(default=False)),
('waitlist', models.BooleanField(default=False)),
('beginner_delegates', models.PositiveSmallIntegerField()),
('intermediate_delegates', models.PositiveSmallIntegerField()),
('advanced_delegates', models.PositiveSmallIntegerField()),
('spanish_speaking_delegates', models.PositiveSmallIntegerField()),
('prefers_bilingual', models.BooleanField(default=False)),
('prefers_specialized_regional', models.BooleanField(default=False)),
('prefers_crisis', models.BooleanField(default=False)),
('prefers_alternative', models.BooleanField(default=False)),
('prefers_press_corps', models.BooleanField(default=False)),
('registration_comments', models.TextField(default=b'', blank=True)),
('fees_owed', models.DecimalField(default=0, max_digits=6, decimal_places=2)),
('fees_paid', models.DecimalField(default=0, max_digits=6, decimal_places=2)),
('countrypreferences', models.ManyToManyField(to='core.Country', through='core.CountryPreference')),
],
options={
'db_table': 'school',
},
bases=(models.Model,),
),
migrations.AddField(
model_name='countrypreference',
name='school',
field=models.ForeignKey(to='core.School', on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AlterUniqueTogether(
name='countrypreference',
unique_together=set([('country', 'school')]),
),
migrations.AddField(
model_name='committee',
name='countries',
field=models.ManyToManyField(to='core.Country', through='core.Assignment'),
preserve_default=True,
),
migrations.AddField(
model_name='assignment',
name='committee',
field=models.ForeignKey(to='core.Committee', on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AddField(
model_name='assignment',
name='country',
field=models.ForeignKey(to='core.Country', on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AddField(
model_name='assignment',
name='school',
field=models.ForeignKey(default=None, blank=True, to='core.School', null=True, on_delete=models.CASCADE),
preserve_default=True,
),
migrations.AlterUniqueTogether(
name='assignment',
unique_together=set([('committee', 'country')]),
),
]
| bsd-3-clause | -3,202,705,995,020,903,000 | 46.939227 | 170 | 0.544658 | false |
MingLin-home/Ming_slim | datasets/download_and_convert_cifar100.py | 1 | 6711 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
r"""Downloads and converts cifar100 data to TFRecords of TF-Example protos.
This module downloads the cifar10 data, uncompresses it, reads the files
that make up the cifar10 data and creates two TFRecord datasets: one for train
and one for test. Each TFRecord dataset is comprised of a set of TF-Example
protocol buffers, each of which contain a single image and label.
The script should take several minutes to run.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# import cPickle
import os
import sys
import tarfile
import scipy.misc
import numpy as np
from six.moves import urllib
import tensorflow as tf
from datasets import dataset_utils
# The URL where the CIFAR data can be downloaded.
_DATA_URL = 'https://www.cs.toronto.edu/~kriz/cifar-100-binary.tar.gz'
# The number of training files.
_NUM_TRAIN_FILES = 1
# The height and width of each image.
_IMAGE_SIZE = 32
_IMAGE_COLOR_CHANNEL = 3
def _add_to_tfrecord(filename, tfrecord_writer, offset=0):
"""Loads data from the cifar10 pickle files and writes files to a TFRecord.
Args:
filename: The filename of the cifar10 pickle file.
tfrecord_writer: The TFRecord writer to use for writing.
offset: An offset into the absolute number of images previously written.
Returns:
The new offset.
"""
# with tf.gfile.Open(filename, 'r') as f:
# data = cPickle.load(f)
#
# images = data['data']
# num_images = images.shape[0]
#
# images = images.reshape((num_images, 3, 32, 32))
# labels = data['labels']
with open(filename,'rb') as fid:
all_byte = np.fromfile(fid,dtype=np.uint8)
one_record_len = _IMAGE_SIZE*_IMAGE_SIZE*_IMAGE_COLOR_CHANNEL + 2
all_byte = all_byte.reshape((-1,one_record_len,))
labels = all_byte[:,1]
num_images = all_byte.shape[0]
images = all_byte[:,2:].reshape((num_images, 3, 32, 32))
print('load from %s, num_images=%d' %(filename,num_images))
debug = False
with tf.Graph().as_default():
image_placeholder = tf.placeholder(dtype=tf.uint8)
encoded_image = tf.image.encode_png(image_placeholder)
with tf.Session('') as sess:
for j in range(num_images):
sys.stdout.write('\r>> Reading file [%s] image %d/%d' % (
filename, offset + j + 1, offset + num_images))
sys.stdout.flush()
image = np.squeeze(images[j]).transpose((1, 2, 0))
label = labels[j]
if debug:
debug=False
print(image)
scipy.misc.imsave('d:/debug_%s_%d.png' % (os.path.basename(filename),label),image)
pass # end if
png_string = sess.run(encoded_image,
feed_dict={image_placeholder: image})
example = dataset_utils.image_to_tfexample(
png_string, 'png'.encode(), _IMAGE_SIZE, _IMAGE_SIZE, label)
tfrecord_writer.write(example.SerializeToString())
return offset + num_images
def _get_output_filename(dataset_dir, split_name):
"""Creates the output filename.
Args:
dataset_dir: The dataset directory where the dataset is stored.
split_name: The name of the train/test split.
Returns:
An absolute file path.
"""
return '%s/cifar100_%s.tfrecord' % (dataset_dir, split_name)
def _download_and_uncompress_dataset(dataset_dir):
"""Downloads cifar10 and uncompresses it locally.
Args:
dataset_dir: The directory where the temporary files are stored.
"""
filename = _DATA_URL.split('/')[-1]
filepath = os.path.join(dataset_dir, filename)
if not os.path.exists(filepath):
def _progress(count, block_size, total_size):
sys.stdout.write('\r>> Downloading %s %.1f%%' % (
filename, float(count * block_size) / float(total_size) * 100.0))
sys.stdout.flush()
filepath, _ = urllib.request.urlretrieve(_DATA_URL, filepath, _progress)
print()
statinfo = os.stat(filepath)
print('Successfully downloaded', filename, statinfo.st_size, 'bytes.')
tarfile.open(filepath, 'r:gz').extractall(dataset_dir)
def _clean_up_temporary_files(dataset_dir):
"""Removes temporary files used to create the dataset.
Args:
dataset_dir: The directory where the temporary files are stored.
"""
filename = _DATA_URL.split('/')[-1]
filepath = os.path.join(dataset_dir, filename)
tf.gfile.Remove(filepath)
tmp_dir = os.path.join(dataset_dir, 'cifar-100-batches-bin')
tf.gfile.DeleteRecursively(tmp_dir)
def run(dataset_dir):
"""Runs the download and conversion operation.
Args:
dataset_dir: The dataset directory where the dataset is stored.
"""
if not tf.gfile.Exists(dataset_dir):
tf.gfile.MakeDirs(dataset_dir)
training_filename = _get_output_filename(dataset_dir, 'train')
testing_filename = _get_output_filename(dataset_dir, 'test')
if tf.gfile.Exists(training_filename) and tf.gfile.Exists(testing_filename):
print('Dataset files already exist. Exiting without re-creating them.')
return
dataset_utils.download_and_uncompress_tarball(_DATA_URL, dataset_dir)
# First, process the training data:
with tf.python_io.TFRecordWriter(training_filename) as tfrecord_writer:
offset = 0
for i in range(_NUM_TRAIN_FILES):
filename = os.path.join(dataset_dir,
'cifar-100-binary',
'train.bin' ) # 1-indexed.
offset = _add_to_tfrecord(filename, tfrecord_writer, offset)
# Next, process the testing data:
with tf.python_io.TFRecordWriter(testing_filename) as tfrecord_writer:
filename = os.path.join(dataset_dir,
'cifar-100-binary',
'test.bin')
_add_to_tfrecord(filename, tfrecord_writer)
# Finally, write the labels file:
# labels_to_class_names = dict(zip(range(len(_CLASS_NAMES)), _CLASS_NAMES))
# dataset_utils.write_label_file(labels_to_class_names, dataset_dir)
# _clean_up_temporary_files(dataset_dir)
print('\nFinished converting the Cifar100 dataset!')
| gpl-3.0 | 2,938,116,569,980,777,500 | 32.059113 | 94 | 0.669945 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.