repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
chaluemwut/fbserver | venv/lib/python2.7/site-packages/numpy/random/tests/test_random.py | 30 | 31114 | from __future__ import division, absolute_import, print_function
import numpy as np
from numpy.testing import (
TestCase, run_module_suite, assert_, assert_raises, assert_equal,
assert_warns)
from numpy import random
from numpy.compat import asbytes
import sys
class TestSeed(TestCase):
def test_scalar(self):
s = np.random.RandomState(0)
assert_equal(s.randint(1000), 684)
s = np.random.RandomState(4294967295)
assert_equal(s.randint(1000), 419)
def test_array(self):
s = np.random.RandomState(range(10))
assert_equal(s.randint(1000), 468)
s = np.random.RandomState(np.arange(10))
assert_equal(s.randint(1000), 468)
s = np.random.RandomState([0])
assert_equal(s.randint(1000), 973)
s = np.random.RandomState([4294967295])
assert_equal(s.randint(1000), 265)
def test_invalid_scalar(self):
# seed must be a unsigned 32 bit integers
assert_raises(TypeError, np.random.RandomState, -0.5)
assert_raises(ValueError, np.random.RandomState, -1)
def test_invalid_array(self):
# seed must be a unsigned 32 bit integers
assert_raises(TypeError, np.random.RandomState, [-0.5])
assert_raises(ValueError, np.random.RandomState, [-1])
assert_raises(ValueError, np.random.RandomState, [4294967296])
assert_raises(ValueError, np.random.RandomState, [1, 2, 4294967296])
assert_raises(ValueError, np.random.RandomState, [1, -2, 4294967296])
class TestBinomial(TestCase):
def test_n_zero(self):
# Tests the corner case of n == 0 for the binomial distribution.
# binomial(0, p) should be zero for any p in [0, 1].
# This test addresses issue #3480.
zeros = np.zeros(2, dtype='int')
for p in [0, .5, 1]:
assert_(random.binomial(0, p) == 0)
np.testing.assert_array_equal(random.binomial(zeros, p), zeros)
def test_p_is_nan(self):
# Issue #4571.
assert_raises(ValueError, random.binomial, 1, np.nan)
class TestMultinomial(TestCase):
def test_basic(self):
random.multinomial(100, [0.2, 0.8])
def test_zero_probability(self):
random.multinomial(100, [0.2, 0.8, 0.0, 0.0, 0.0])
def test_int_negative_interval(self):
assert_(-5 <= random.randint(-5, -1) < -1)
x = random.randint(-5, -1, 5)
assert_(np.all(-5 <= x))
assert_(np.all(x < -1))
def test_size(self):
# gh-3173
p = [0.5, 0.5]
assert_equal(np.random.multinomial(1, p, np.uint32(1)).shape, (1, 2))
assert_equal(np.random.multinomial(1, p, np.uint32(1)).shape, (1, 2))
assert_equal(np.random.multinomial(1, p, np.uint32(1)).shape, (1, 2))
assert_equal(np.random.multinomial(1, p, [2, 2]).shape, (2, 2, 2))
assert_equal(np.random.multinomial(1, p, (2, 2)).shape, (2, 2, 2))
assert_equal(np.random.multinomial(1, p, np.array((2, 2))).shape,
(2, 2, 2))
assert_raises(TypeError, np.random.multinomial, 1, p,
np.float(1))
class TestSetState(TestCase):
def setUp(self):
self.seed = 1234567890
self.prng = random.RandomState(self.seed)
self.state = self.prng.get_state()
def test_basic(self):
old = self.prng.tomaxint(16)
self.prng.set_state(self.state)
new = self.prng.tomaxint(16)
assert_(np.all(old == new))
def test_gaussian_reset(self):
# Make sure the cached every-other-Gaussian is reset.
old = self.prng.standard_normal(size=3)
self.prng.set_state(self.state)
new = self.prng.standard_normal(size=3)
assert_(np.all(old == new))
def test_gaussian_reset_in_media_res(self):
# When the state is saved with a cached Gaussian, make sure the
# cached Gaussian is restored.
self.prng.standard_normal()
state = self.prng.get_state()
old = self.prng.standard_normal(size=3)
self.prng.set_state(state)
new = self.prng.standard_normal(size=3)
assert_(np.all(old == new))
def test_backwards_compatibility(self):
# Make sure we can accept old state tuples that do not have the
# cached Gaussian value.
old_state = self.state[:-2]
x1 = self.prng.standard_normal(size=16)
self.prng.set_state(old_state)
x2 = self.prng.standard_normal(size=16)
self.prng.set_state(self.state)
x3 = self.prng.standard_normal(size=16)
assert_(np.all(x1 == x2))
assert_(np.all(x1 == x3))
def test_negative_binomial(self):
# Ensure that the negative binomial results take floating point
# arguments without truncation.
self.prng.negative_binomial(0.5, 0.5)
class TestRandomDist(TestCase):
# Make sure the random distrobution return the correct value for a
# given seed
def setUp(self):
self.seed = 1234567890
def test_rand(self):
np.random.seed(self.seed)
actual = np.random.rand(3, 2)
desired = np.array([[0.61879477158567997, 0.59162362775974664],
[0.88868358904449662, 0.89165480011560816],
[0.4575674820298663, 0.7781880808593471]])
np.testing.assert_array_almost_equal(actual, desired, decimal=15)
def test_randn(self):
np.random.seed(self.seed)
actual = np.random.randn(3, 2)
desired = np.array([[1.34016345771863121, 1.73759122771936081],
[1.498988344300628, -0.2286433324536169],
[2.031033998682787, 2.17032494605655257]])
np.testing.assert_array_almost_equal(actual, desired, decimal=15)
def test_randint(self):
np.random.seed(self.seed)
actual = np.random.randint(-99, 99, size=(3, 2))
desired = np.array([[31, 3],
[-52, 41],
[-48, -66]])
np.testing.assert_array_equal(actual, desired)
def test_random_integers(self):
np.random.seed(self.seed)
actual = np.random.random_integers(-99, 99, size=(3, 2))
desired = np.array([[31, 3],
[-52, 41],
[-48, -66]])
np.testing.assert_array_equal(actual, desired)
def test_random_sample(self):
np.random.seed(self.seed)
actual = np.random.random_sample((3, 2))
desired = np.array([[0.61879477158567997, 0.59162362775974664],
[0.88868358904449662, 0.89165480011560816],
[0.4575674820298663, 0.7781880808593471]])
np.testing.assert_array_almost_equal(actual, desired, decimal=15)
def test_choice_uniform_replace(self):
np.random.seed(self.seed)
actual = np.random.choice(4, 4)
desired = np.array([2, 3, 2, 3])
np.testing.assert_array_equal(actual, desired)
def test_choice_nonuniform_replace(self):
np.random.seed(self.seed)
actual = np.random.choice(4, 4, p=[0.4, 0.4, 0.1, 0.1])
desired = np.array([1, 1, 2, 2])
np.testing.assert_array_equal(actual, desired)
def test_choice_uniform_noreplace(self):
np.random.seed(self.seed)
actual = np.random.choice(4, 3, replace=False)
desired = np.array([0, 1, 3])
np.testing.assert_array_equal(actual, desired)
def test_choice_nonuniform_noreplace(self):
np.random.seed(self.seed)
actual = np.random.choice(4, 3, replace=False,
p=[0.1, 0.3, 0.5, 0.1])
desired = np.array([2, 3, 1])
np.testing.assert_array_equal(actual, desired)
def test_choice_noninteger(self):
np.random.seed(self.seed)
actual = np.random.choice(['a', 'b', 'c', 'd'], 4)
desired = np.array(['c', 'd', 'c', 'd'])
np.testing.assert_array_equal(actual, desired)
def test_choice_exceptions(self):
sample = np.random.choice
assert_raises(ValueError, sample, -1, 3)
assert_raises(ValueError, sample, 3., 3)
assert_raises(ValueError, sample, [[1, 2], [3, 4]], 3)
assert_raises(ValueError, sample, [], 3)
assert_raises(ValueError, sample, [1, 2, 3, 4], 3,
p=[[0.25, 0.25], [0.25, 0.25]])
assert_raises(ValueError, sample, [1, 2], 3, p=[0.4, 0.4, 0.2])
assert_raises(ValueError, sample, [1, 2], 3, p=[1.1, -0.1])
assert_raises(ValueError, sample, [1, 2], 3, p=[0.4, 0.4])
assert_raises(ValueError, sample, [1, 2, 3], 4, replace=False)
assert_raises(ValueError, sample, [1, 2, 3], 2, replace=False,
p=[1, 0, 0])
def test_choice_return_shape(self):
p = [0.1, 0.9]
# Check scalar
assert_(np.isscalar(np.random.choice(2, replace=True)))
assert_(np.isscalar(np.random.choice(2, replace=False)))
assert_(np.isscalar(np.random.choice(2, replace=True, p=p)))
assert_(np.isscalar(np.random.choice(2, replace=False, p=p)))
assert_(np.isscalar(np.random.choice([1, 2], replace=True)))
assert_(np.random.choice([None], replace=True) is None)
a = np.array([1, 2])
arr = np.empty(1, dtype=object)
arr[0] = a
assert_(np.random.choice(arr, replace=True) is a)
# Check 0-d array
s = tuple()
assert_(not np.isscalar(np.random.choice(2, s, replace=True)))
assert_(not np.isscalar(np.random.choice(2, s, replace=False)))
assert_(not np.isscalar(np.random.choice(2, s, replace=True, p=p)))
assert_(not np.isscalar(np.random.choice(2, s, replace=False, p=p)))
assert_(not np.isscalar(np.random.choice([1, 2], s, replace=True)))
assert_(np.random.choice([None], s, replace=True).ndim == 0)
a = np.array([1, 2])
arr = np.empty(1, dtype=object)
arr[0] = a
assert_(np.random.choice(arr, s, replace=True).item() is a)
# Check multi dimensional array
s = (2, 3)
p = [0.1, 0.1, 0.1, 0.1, 0.4, 0.2]
assert_(np.random.choice(6, s, replace=True).shape, s)
assert_(np.random.choice(6, s, replace=False).shape, s)
assert_(np.random.choice(6, s, replace=True, p=p).shape, s)
assert_(np.random.choice(6, s, replace=False, p=p).shape, s)
assert_(np.random.choice(np.arange(6), s, replace=True).shape, s)
def test_bytes(self):
np.random.seed(self.seed)
actual = np.random.bytes(10)
desired = asbytes('\x82Ui\x9e\xff\x97+Wf\xa5')
np.testing.assert_equal(actual, desired)
def test_shuffle(self):
# Test lists, arrays, and multidimensional versions of both:
for conv in [lambda x: x,
np.asarray,
lambda x: [(i, i) for i in x],
lambda x: np.asarray([(i, i) for i in x])]:
np.random.seed(self.seed)
alist = conv([1, 2, 3, 4, 5, 6, 7, 8, 9, 0])
np.random.shuffle(alist)
actual = alist
desired = conv([0, 1, 9, 6, 2, 4, 5, 8, 7, 3])
np.testing.assert_array_equal(actual, desired)
def test_shuffle_flexible(self):
# gh-4270
arr = [(0, 1), (2, 3)]
dt = np.dtype([('a', np.int32, 1), ('b', np.int32, 1)])
nparr = np.array(arr, dtype=dt)
a, b = nparr[0].copy(), nparr[1].copy()
for i in range(50):
np.random.shuffle(nparr)
assert_(a in nparr)
assert_(b in nparr)
def test_shuffle_masked(self):
# gh-3263
a = np.ma.masked_values(np.reshape(range(20), (5,4)) % 3 - 1, -1)
b = np.ma.masked_values(np.arange(20) % 3 - 1, -1)
ma = np.ma.count_masked(a)
mb = np.ma.count_masked(b)
for i in range(50):
np.random.shuffle(a)
self.assertEqual(ma, np.ma.count_masked(a))
np.random.shuffle(b)
self.assertEqual(mb, np.ma.count_masked(b))
def test_beta(self):
np.random.seed(self.seed)
actual = np.random.beta(.1, .9, size=(3, 2))
desired = np.array(
[[1.45341850513746058e-02, 5.31297615662868145e-04],
[1.85366619058432324e-06, 4.19214516800110563e-03],
[1.58405155108498093e-04, 1.26252891949397652e-04]])
np.testing.assert_array_almost_equal(actual, desired, decimal=15)
def test_binomial(self):
np.random.seed(self.seed)
actual = np.random.binomial(100.123, .456, size=(3, 2))
desired = np.array([[37, 43],
[42, 48],
[46, 45]])
np.testing.assert_array_equal(actual, desired)
def test_chisquare(self):
np.random.seed(self.seed)
actual = np.random.chisquare(50, size=(3, 2))
desired = np.array([[63.87858175501090585, 68.68407748911370447],
[65.77116116901505904, 47.09686762438974483],
[72.3828403199695174, 74.18408615260374006]])
np.testing.assert_array_almost_equal(actual, desired, decimal=13)
def test_dirichlet(self):
np.random.seed(self.seed)
alpha = np.array([51.72840233779265162, 39.74494232180943953])
actual = np.random.mtrand.dirichlet(alpha, size=(3, 2))
desired = np.array([[[0.54539444573611562, 0.45460555426388438],
[0.62345816822039413, 0.37654183177960598]],
[[0.55206000085785778, 0.44793999914214233],
[0.58964023305154301, 0.41035976694845688]],
[[0.59266909280647828, 0.40733090719352177],
[0.56974431743975207, 0.43025568256024799]]])
np.testing.assert_array_almost_equal(actual, desired, decimal=15)
def test_dirichlet_size(self):
# gh-3173
p = np.array([51.72840233779265162, 39.74494232180943953])
assert_equal(np.random.dirichlet(p, np.uint32(1)).shape, (1, 2))
assert_equal(np.random.dirichlet(p, np.uint32(1)).shape, (1, 2))
assert_equal(np.random.dirichlet(p, np.uint32(1)).shape, (1, 2))
assert_equal(np.random.dirichlet(p, [2, 2]).shape, (2, 2, 2))
assert_equal(np.random.dirichlet(p, (2, 2)).shape, (2, 2, 2))
assert_equal(np.random.dirichlet(p, np.array((2, 2))).shape, (2, 2, 2))
assert_raises(TypeError, np.random.dirichlet, p, np.float(1))
def test_exponential(self):
np.random.seed(self.seed)
actual = np.random.exponential(1.1234, size=(3, 2))
desired = np.array([[1.08342649775011624, 1.00607889924557314],
[2.46628830085216721, 2.49668106809923884],
[0.68717433461363442, 1.69175666993575979]])
np.testing.assert_array_almost_equal(actual, desired, decimal=15)
def test_f(self):
np.random.seed(self.seed)
actual = np.random.f(12, 77, size=(3, 2))
desired = np.array([[1.21975394418575878, 1.75135759791559775],
[1.44803115017146489, 1.22108959480396262],
[1.02176975757740629, 1.34431827623300415]])
np.testing.assert_array_almost_equal(actual, desired, decimal=15)
def test_gamma(self):
np.random.seed(self.seed)
actual = np.random.gamma(5, 3, size=(3, 2))
desired = np.array([[24.60509188649287182, 28.54993563207210627],
[26.13476110204064184, 12.56988482927716078],
[31.71863275789960568, 33.30143302795922011]])
np.testing.assert_array_almost_equal(actual, desired, decimal=14)
def test_geometric(self):
np.random.seed(self.seed)
actual = np.random.geometric(.123456789, size=(3, 2))
desired = np.array([[8, 7],
[17, 17],
[5, 12]])
np.testing.assert_array_equal(actual, desired)
def test_gumbel(self):
np.random.seed(self.seed)
actual = np.random.gumbel(loc=.123456789, scale=2.0, size=(3, 2))
desired = np.array([[0.19591898743416816, 0.34405539668096674],
[-1.4492522252274278, -1.47374816298446865],
[1.10651090478803416, -0.69535848626236174]])
np.testing.assert_array_almost_equal(actual, desired, decimal=15)
def test_hypergeometric(self):
np.random.seed(self.seed)
actual = np.random.hypergeometric(10.1, 5.5, 14, size=(3, 2))
desired = np.array([[10, 10],
[10, 10],
[9, 9]])
np.testing.assert_array_equal(actual, desired)
# Test nbad = 0
actual = np.random.hypergeometric(5, 0, 3, size=4)
desired = np.array([3, 3, 3, 3])
np.testing.assert_array_equal(actual, desired)
actual = np.random.hypergeometric(15, 0, 12, size=4)
desired = np.array([12, 12, 12, 12])
np.testing.assert_array_equal(actual, desired)
# Test ngood = 0
actual = np.random.hypergeometric(0, 5, 3, size=4)
desired = np.array([0, 0, 0, 0])
np.testing.assert_array_equal(actual, desired)
actual = np.random.hypergeometric(0, 15, 12, size=4)
desired = np.array([0, 0, 0, 0])
np.testing.assert_array_equal(actual, desired)
def test_laplace(self):
np.random.seed(self.seed)
actual = np.random.laplace(loc=.123456789, scale=2.0, size=(3, 2))
desired = np.array([[0.66599721112760157, 0.52829452552221945],
[3.12791959514407125, 3.18202813572992005],
[-0.05391065675859356, 1.74901336242837324]])
np.testing.assert_array_almost_equal(actual, desired, decimal=15)
def test_logistic(self):
np.random.seed(self.seed)
actual = np.random.logistic(loc=.123456789, scale=2.0, size=(3, 2))
desired = np.array([[1.09232835305011444, 0.8648196662399954],
[4.27818590694950185, 4.33897006346929714],
[-0.21682183359214885, 2.63373365386060332]])
np.testing.assert_array_almost_equal(actual, desired, decimal=15)
def test_lognormal(self):
np.random.seed(self.seed)
actual = np.random.lognormal(mean=.123456789, sigma=2.0, size=(3, 2))
desired = np.array([[16.50698631688883822, 36.54846706092654784],
[22.67886599981281748, 0.71617561058995771],
[65.72798501792723869, 86.84341601437161273]])
np.testing.assert_array_almost_equal(actual, desired, decimal=13)
def test_logseries(self):
np.random.seed(self.seed)
actual = np.random.logseries(p=.923456789, size=(3, 2))
desired = np.array([[2, 2],
[6, 17],
[3, 6]])
np.testing.assert_array_equal(actual, desired)
def test_multinomial(self):
np.random.seed(self.seed)
actual = np.random.multinomial(20, [1/6.]*6, size=(3, 2))
desired = np.array([[[4, 3, 5, 4, 2, 2],
[5, 2, 8, 2, 2, 1]],
[[3, 4, 3, 6, 0, 4],
[2, 1, 4, 3, 6, 4]],
[[4, 4, 2, 5, 2, 3],
[4, 3, 4, 2, 3, 4]]])
np.testing.assert_array_equal(actual, desired)
def test_multivariate_normal(self):
np.random.seed(self.seed)
mean = (.123456789, 10)
# Hmm... not even symmetric.
cov = [[1, 0], [1, 0]]
size = (3, 2)
actual = np.random.multivariate_normal(mean, cov, size)
desired = np.array([[[-1.47027513018564449, 10.],
[-1.65915081534845532, 10.]],
[[-2.29186329304599745, 10.],
[-1.77505606019580053, 10.]],
[[-0.54970369430044119, 10.],
[0.29768848031692957, 10.]]])
np.testing.assert_array_almost_equal(actual, desired, decimal=15)
# Check for default size, was raising deprecation warning
actual = np.random.multivariate_normal(mean, cov)
desired = np.array([-0.79441224511977482, 10.])
np.testing.assert_array_almost_equal(actual, desired, decimal=15)
# Check that non positive-semidefinite covariance raises warning
mean = [0, 0]
cov = [[1, 1 + 1e-10], [1 + 1e-10, 1]]
assert_warns(RuntimeWarning, np.random.multivariate_normal, mean, cov)
def test_negative_binomial(self):
np.random.seed(self.seed)
actual = np.random.negative_binomial(n=100, p=.12345, size=(3, 2))
desired = np.array([[848, 841],
[892, 611],
[779, 647]])
np.testing.assert_array_equal(actual, desired)
def test_noncentral_chisquare(self):
np.random.seed(self.seed)
actual = np.random.noncentral_chisquare(df=5, nonc=5, size=(3, 2))
desired = np.array([[23.91905354498517511, 13.35324692733826346],
[31.22452661329736401, 16.60047399466177254],
[5.03461598262724586, 17.94973089023519464]])
np.testing.assert_array_almost_equal(actual, desired, decimal=14)
def test_noncentral_f(self):
np.random.seed(self.seed)
actual = np.random.noncentral_f(dfnum=5, dfden=2, nonc=1,
size=(3, 2))
desired = np.array([[1.40598099674926669, 0.34207973179285761],
[3.57715069265772545, 7.92632662577829805],
[0.43741599463544162, 1.1774208752428319]])
np.testing.assert_array_almost_equal(actual, desired, decimal=14)
def test_normal(self):
np.random.seed(self.seed)
actual = np.random.normal(loc=.123456789, scale=2.0, size=(3, 2))
desired = np.array([[2.80378370443726244, 3.59863924443872163],
[3.121433477601256, -0.33382987590723379],
[4.18552478636557357, 4.46410668111310471]])
np.testing.assert_array_almost_equal(actual, desired, decimal=15)
def test_pareto(self):
np.random.seed(self.seed)
actual = np.random.pareto(a=.123456789, size=(3, 2))
desired = np.array(
[[2.46852460439034849e+03, 1.41286880810518346e+03],
[5.28287797029485181e+07, 6.57720981047328785e+07],
[1.40840323350391515e+02, 1.98390255135251704e+05]])
# For some reason on 32-bit x86 Ubuntu 12.10 the [1, 0] entry in this
# matrix differs by 24 nulps. Discussion:
# http://mail.scipy.org/pipermail/numpy-discussion/2012-September/063801.html
# Consensus is that this is probably some gcc quirk that affects
# rounding but not in any important way, so we just use a looser
# tolerance on this test:
np.testing.assert_array_almost_equal_nulp(actual, desired, nulp=30)
def test_poisson(self):
np.random.seed(self.seed)
actual = np.random.poisson(lam=.123456789, size=(3, 2))
desired = np.array([[0, 0],
[1, 0],
[0, 0]])
np.testing.assert_array_equal(actual, desired)
def test_poisson_exceptions(self):
lambig = np.iinfo('l').max
lamneg = -1
assert_raises(ValueError, np.random.poisson, lamneg)
assert_raises(ValueError, np.random.poisson, [lamneg]*10)
assert_raises(ValueError, np.random.poisson, lambig)
assert_raises(ValueError, np.random.poisson, [lambig]*10)
def test_power(self):
np.random.seed(self.seed)
actual = np.random.power(a=.123456789, size=(3, 2))
desired = np.array([[0.02048932883240791, 0.01424192241128213],
[0.38446073748535298, 0.39499689943484395],
[0.00177699707563439, 0.13115505880863756]])
np.testing.assert_array_almost_equal(actual, desired, decimal=15)
def test_rayleigh(self):
np.random.seed(self.seed)
actual = np.random.rayleigh(scale=10, size=(3, 2))
desired = np.array([[13.8882496494248393, 13.383318339044731],
[20.95413364294492098, 21.08285015800712614],
[11.06066537006854311, 17.35468505778271009]])
np.testing.assert_array_almost_equal(actual, desired, decimal=14)
def test_standard_cauchy(self):
np.random.seed(self.seed)
actual = np.random.standard_cauchy(size=(3, 2))
desired = np.array([[0.77127660196445336, -6.55601161955910605],
[0.93582023391158309, -2.07479293013759447],
[-4.74601644297011926, 0.18338989290760804]])
np.testing.assert_array_almost_equal(actual, desired, decimal=15)
def test_standard_exponential(self):
np.random.seed(self.seed)
actual = np.random.standard_exponential(size=(3, 2))
desired = np.array([[0.96441739162374596, 0.89556604882105506],
[2.1953785836319808, 2.22243285392490542],
[0.6116915921431676, 1.50592546727413201]])
np.testing.assert_array_almost_equal(actual, desired, decimal=15)
def test_standard_gamma(self):
np.random.seed(self.seed)
actual = np.random.standard_gamma(shape=3, size=(3, 2))
desired = np.array([[5.50841531318455058, 6.62953470301903103],
[5.93988484943779227, 2.31044849402133989],
[7.54838614231317084, 8.012756093271868]])
np.testing.assert_array_almost_equal(actual, desired, decimal=14)
def test_standard_normal(self):
np.random.seed(self.seed)
actual = np.random.standard_normal(size=(3, 2))
desired = np.array([[1.34016345771863121, 1.73759122771936081],
[1.498988344300628, -0.2286433324536169],
[2.031033998682787, 2.17032494605655257]])
np.testing.assert_array_almost_equal(actual, desired, decimal=15)
def test_standard_t(self):
np.random.seed(self.seed)
actual = np.random.standard_t(df=10, size=(3, 2))
desired = np.array([[0.97140611862659965, -0.08830486548450577],
[1.36311143689505321, -0.55317463909867071],
[-0.18473749069684214, 0.61181537341755321]])
np.testing.assert_array_almost_equal(actual, desired, decimal=15)
def test_triangular(self):
np.random.seed(self.seed)
actual = np.random.triangular(left=5.12, mode=10.23, right=20.34,
size=(3, 2))
desired = np.array([[12.68117178949215784, 12.4129206149193152],
[16.20131377335158263, 16.25692138747600524],
[11.20400690911820263, 14.4978144835829923]])
np.testing.assert_array_almost_equal(actual, desired, decimal=14)
def test_uniform(self):
np.random.seed(self.seed)
actual = np.random.uniform(low=1.23, high=10.54, size=(3, 2))
desired = np.array([[6.99097932346268003, 6.73801597444323974],
[9.50364421400426274, 9.53130618907631089],
[5.48995325769805476, 8.47493103280052118]])
np.testing.assert_array_almost_equal(actual, desired, decimal=15)
def test_vonmises(self):
np.random.seed(self.seed)
actual = np.random.vonmises(mu=1.23, kappa=1.54, size=(3, 2))
desired = np.array([[2.28567572673902042, 2.89163838442285037],
[0.38198375564286025, 2.57638023113890746],
[1.19153771588353052, 1.83509849681825354]])
np.testing.assert_array_almost_equal(actual, desired, decimal=15)
def test_vonmises_small(self):
# check infinite loop, gh-4720
np.random.seed(self.seed)
r = np.random.vonmises(mu=0., kappa=1.1e-8, size=10**6)
np.testing.assert_(np.isfinite(r).all())
def test_wald(self):
np.random.seed(self.seed)
actual = np.random.wald(mean=1.23, scale=1.54, size=(3, 2))
desired = np.array([[3.82935265715889983, 5.13125249184285526],
[0.35045403618358717, 1.50832396872003538],
[0.24124319895843183, 0.22031101461955038]])
np.testing.assert_array_almost_equal(actual, desired, decimal=14)
def test_weibull(self):
np.random.seed(self.seed)
actual = np.random.weibull(a=1.23, size=(3, 2))
desired = np.array([[0.97097342648766727, 0.91422896443565516],
[1.89517770034962929, 1.91414357960479564],
[0.67057783752390987, 1.39494046635066793]])
np.testing.assert_array_almost_equal(actual, desired, decimal=15)
def test_zipf(self):
np.random.seed(self.seed)
actual = np.random.zipf(a=1.23, size=(3, 2))
desired = np.array([[66, 29],
[1, 1],
[3, 13]])
np.testing.assert_array_equal(actual, desired)
class TestThread(object):
# make sure each state produces the same sequence even in threads
def setUp(self):
self.seeds = range(4)
def check_function(self, function, sz):
from threading import Thread
out1 = np.empty((len(self.seeds),) + sz)
out2 = np.empty((len(self.seeds),) + sz)
# threaded generation
t = [Thread(target=function, args=(np.random.RandomState(s), o))
for s, o in zip(self.seeds, out1)]
[x.start() for x in t]
[x.join() for x in t]
# the same serial
for s, o in zip(self.seeds, out2):
function(np.random.RandomState(s), o)
# these platforms change x87 fpu precision mode in threads
if (np.intp().dtype.itemsize == 4 and
(sys.platform == "win32" or
sys.platform.startswith("gnukfreebsd"))):
np.testing.assert_array_almost_equal(out1, out2)
else:
np.testing.assert_array_equal(out1, out2)
def test_normal(self):
def gen_random(state, out):
out[...] = state.normal(size=10000)
self.check_function(gen_random, sz=(10000,))
def test_exp(self):
def gen_random(state, out):
out[...] = state.exponential(scale=np.ones((100, 1000)))
self.check_function(gen_random, sz=(100, 1000))
def test_multinomial(self):
def gen_random(state, out):
out[...] = state.multinomial(10, [1/6.]*6, size=10000)
self.check_function(gen_random, sz=(10000,6))
if __name__ == "__main__":
run_module_suite()
| apache-2.0 |
bcoca/ansible | lib/ansible/plugins/lookup/template.py | 15 | 6037 | # Copyright: (c) 2012, Michael DeHaan <[email protected]>
# Copyright: (c) 2012-17, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = """
name: template
author: Michael DeHaan
version_added: "0.9"
short_description: retrieve contents of file after templating with Jinja2
description:
- Returns a list of strings; for each template in the list of templates you pass in, returns a string containing the results of processing that template.
options:
_terms:
description: list of files to template
convert_data:
type: bool
description:
- Whether to convert YAML into data. If False, strings that are YAML will be left untouched.
- Mutually exclusive with the jinja2_native option.
variable_start_string:
description: The string marking the beginning of a print statement.
default: '{{'
version_added: '2.8'
type: str
variable_end_string:
description: The string marking the end of a print statement.
default: '}}'
version_added: '2.8'
type: str
jinja2_native:
description:
- Controls whether to use Jinja2 native types.
- It is off by default even if global jinja2_native is True.
- Has no effect if global jinja2_native is False.
- This offers more flexibility than the template module which does not use Jinja2 native types at all.
- Mutually exclusive with the convert_data option.
default: False
version_added: '2.11'
type: bool
template_vars:
description: A dictionary, the keys become additional variables available for templating.
default: {}
version_added: '2.3'
type: dict
"""
EXAMPLES = """
- name: show templating results
debug:
msg: "{{ lookup('template', './some_template.j2') }}"
- name: show templating results with different variable start and end string
debug:
msg: "{{ lookup('template', './some_template.j2', variable_start_string='[%', variable_end_string='%]') }}"
"""
RETURN = """
_raw:
description: file(s) content after templating
type: list
elements: raw
"""
from copy import deepcopy
import os
from ansible.errors import AnsibleError
from ansible.plugins.lookup import LookupBase
from ansible.module_utils._text import to_bytes, to_text
from ansible.template import generate_ansible_template_vars, AnsibleEnvironment, USE_JINJA2_NATIVE
from ansible.utils.display import Display
if USE_JINJA2_NATIVE:
from ansible.utils.native_jinja import NativeJinjaText
display = Display()
class LookupModule(LookupBase):
def run(self, terms, variables, **kwargs):
ret = []
self.set_options(var_options=variables, direct=kwargs)
# capture options
convert_data_p = self.get_option('convert_data')
lookup_template_vars = self.get_option('template_vars')
jinja2_native = self.get_option('jinja2_native')
variable_start_string = self.get_option('variable_start_string')
variable_end_string = self.get_option('variable_end_string')
if USE_JINJA2_NATIVE and not jinja2_native:
templar = self._templar.copy_with_new_env(environment_class=AnsibleEnvironment)
else:
templar = self._templar
for term in terms:
display.debug("File lookup term: %s" % term)
lookupfile = self.find_file_in_search_path(variables, 'templates', term)
display.vvvv("File lookup using %s as file" % lookupfile)
if lookupfile:
b_template_data, show_data = self._loader._get_file_contents(lookupfile)
template_data = to_text(b_template_data, errors='surrogate_or_strict')
# set jinja2 internal search path for includes
searchpath = variables.get('ansible_search_path', [])
if searchpath:
# our search paths aren't actually the proper ones for jinja includes.
# We want to search into the 'templates' subdir of each search path in
# addition to our original search paths.
newsearchpath = []
for p in searchpath:
newsearchpath.append(os.path.join(p, 'templates'))
newsearchpath.append(p)
searchpath = newsearchpath
searchpath.insert(0, os.path.dirname(lookupfile))
# The template will have access to all existing variables,
# plus some added by ansible (e.g., template_{path,mtime}),
# plus anything passed to the lookup with the template_vars=
# argument.
vars = deepcopy(variables)
vars.update(generate_ansible_template_vars(term, lookupfile))
vars.update(lookup_template_vars)
with templar.set_temporary_context(variable_start_string=variable_start_string,
variable_end_string=variable_end_string,
available_variables=vars, searchpath=searchpath):
res = templar.template(template_data, preserve_trailing_newlines=True,
convert_data=convert_data_p, escape_backslashes=False)
if USE_JINJA2_NATIVE and not jinja2_native:
# jinja2_native is true globally but off for the lookup, we need this text
# not to be processed by literal_eval anywhere in Ansible
res = NativeJinjaText(res)
ret.append(res)
else:
raise AnsibleError("the template file %s could not be found for the lookup" % term)
return ret
| gpl-3.0 |
jbadson/render_controller | python/test/test_controller.py | 1 | 25532 | #!/usr/bin/env python3
import pytest
from unittest import mock
from importlib import reload
import rendercontroller.controller
from rendercontroller.exceptions import (
JobNotFoundError,
JobStatusError,
NodeNotFoundError,
)
config_test_dict = {
"string_val": "val1",
"int_val": 2,
"float_val": 3.5,
"bool_val": True,
}
test_nodes = ["node1", "node2", "node3", "node4"]
# Simulates RenderServer.renderjobs
renderdata = {
"7f6b127663af400fa43ddc52c8bdeeb1": {
"_id": 4356967728,
"complist": [],
"compstatus": {
"borg1": {
"active": False,
"error": None,
"frame": None,
"pid": None,
"progress": 0.0,
"timer": None,
},
"borg2": {
"active": False,
"error": None,
"frame": None,
"pid": None,
"progress": 0.0,
"timer": None,
},
"borg3": {
"active": False,
"error": None,
"frame": None,
"pid": None,
"progress": 0.0,
"timer": None,
},
"borg4": {
"active": False,
"error": None,
"frame": None,
"pid": None,
"progress": 0.0,
"timer": None,
},
"borg5": {
"active": False,
"error": None,
"frame": None,
"pid": None,
"progress": 0.0,
"timer": None,
},
"conundrum": {
"active": False,
"error": None,
"frame": None,
"pid": None,
"progress": 0.0,
"timer": None,
},
"eldiente": {
"active": False,
"error": None,
"frame": None,
"pid": None,
"progress": 0.0,
"timer": None,
},
"grob1": {
"active": False,
"error": None,
"frame": None,
"pid": None,
"progress": 0.0,
"timer": None,
},
"grob2": {
"active": False,
"error": None,
"frame": None,
"pid": None,
"progress": 0.0,
"timer": None,
},
"grob3": {
"active": False,
"error": None,
"frame": None,
"pid": None,
"progress": 0.0,
"timer": None,
},
"grob4": {
"active": False,
"error": None,
"frame": None,
"pid": None,
"progress": 0.0,
"timer": None,
},
"grob5": {
"active": False,
"error": None,
"frame": None,
"pid": None,
"progress": 0.0,
"timer": None,
},
"grob6": {
"active": False,
"error": None,
"frame": None,
"pid": None,
"progress": 0.0,
"timer": None,
},
"hex1": {
"active": False,
"error": None,
"frame": None,
"pid": None,
"progress": 0.0,
"timer": None,
},
"hex2": {
"active": False,
"error": None,
"frame": None,
"pid": None,
"progress": 0.0,
"timer": None,
},
"hex3": {
"active": False,
"error": None,
"frame": None,
"pid": None,
"progress": 0.0,
"timer": None,
},
"lindsey": {
"active": False,
"error": None,
"frame": None,
"pid": None,
"progress": 0.0,
"timer": None,
},
"localhost": {
"active": False,
"error": None,
"frame": None,
"pid": None,
"progress": 0.0,
"timer": None,
},
"paradox": {
"active": False,
"error": None,
"frame": None,
"pid": None,
"progress": 0.0,
"timer": None,
},
},
"endframe": 4,
"extraframes": [],
"path": "/Users/jim/Downloads/test_render/test_render.blend",
"priority": "Normal",
"progress": 0.0,
"queuetime": 1544986156.003638,
"render_engine": "blend",
"render_params": None,
"startframe": 1,
"starttime": 1544986597.73921,
"status": "Stopped",
"stoptime": 1544986599.43864,
"times": (1.699430227279663, 0.42485755681991577, 0),
"totalframes": [0, 0, 0, 0],
},
"9003067201194900903b257115df33bd": {
"_id": 4357090216,
"complist": [
"localhost",
"hex1",
"hex2",
"hex3",
"borg1",
"borg2",
"borg3",
"borg4",
"borg5",
"grob1",
"grob2",
"grob3",
"grob4",
"grob5",
"grob6",
"eldiente",
"lindsey",
"conundrum",
"paradox",
],
"compstatus": {
"borg1": {
"active": False,
"error": None,
"frame": None,
"pid": None,
"progress": 0.0,
"timer": None,
},
"borg2": {
"active": False,
"error": None,
"frame": None,
"pid": None,
"progress": 0.0,
"timer": None,
},
"borg3": {
"active": False,
"error": None,
"frame": None,
"pid": None,
"progress": 0.0,
"timer": None,
},
"borg4": {
"active": False,
"error": None,
"frame": None,
"pid": None,
"progress": 0.0,
"timer": None,
},
"borg5": {
"active": False,
"error": None,
"frame": None,
"pid": None,
"progress": 0.0,
"timer": None,
},
"conundrum": {
"active": False,
"error": None,
"frame": None,
"pid": None,
"progress": 0.0,
"timer": None,
},
"eldiente": {
"active": False,
"error": None,
"frame": None,
"pid": None,
"progress": 0.0,
"timer": None,
},
"grob1": {
"active": False,
"error": None,
"frame": None,
"pid": None,
"progress": 0.0,
"timer": None,
},
"grob2": {
"active": False,
"error": None,
"frame": None,
"pid": None,
"progress": 0.0,
"timer": None,
},
"grob3": {
"active": False,
"error": None,
"frame": None,
"pid": None,
"progress": 0.0,
"timer": None,
},
"grob4": {
"active": False,
"error": None,
"frame": None,
"pid": None,
"progress": 0.0,
"timer": None,
},
"grob5": {
"active": False,
"error": None,
"frame": None,
"pid": None,
"progress": 0.0,
"timer": None,
},
"grob6": {
"active": False,
"error": None,
"frame": None,
"pid": None,
"progress": 0.0,
"timer": None,
},
"hex1": {
"active": False,
"error": "Broken pipe",
"frame": 2,
"pid": None,
"progress": 0.0,
"timer": 1544986615.439296,
},
"hex2": {
"active": False,
"error": "Broken pipe",
"frame": 3,
"pid": None,
"progress": 0.0,
"timer": 1544986615.451592,
},
"hex3": {
"active": True,
"error": None,
"frame": 1,
"pid": None,
"progress": 0.0,
"timer": 1544986647.636271,
},
"lindsey": {
"active": False,
"error": None,
"frame": None,
"pid": None,
"progress": 0.0,
"timer": None,
},
"localhost": {
"active": False,
"error": None,
"frame": None,
"pid": None,
"progress": 0.0,
"timer": None,
},
"paradox": {
"active": False,
"error": None,
"frame": None,
"pid": None,
"progress": 0.0,
"timer": None,
},
},
"endframe": 3,
"extraframes": [],
"path": "/Users/jim/Downloads/test_render/test_render_slow.blend",
"priority": "Normal",
"progress": 0.0,
"queuetime": 1544985625.618685,
"render_engine": "blend",
"render_params": {"scene": "TestScene"},
"startframe": 1,
"starttime": 1544986615.412951,
"status": "Stopped",
"stoptime": 1544986647.6129642,
"times": (32.200013160705566, 10.73333772023519, 0),
"totalframes": [0, 0, 0],
},
}
renderjobs = {}
for key, data in renderdata.items():
job = mock.MagicMock()
job.get_attrs.return_value = data
renderjobs[key] = job
summary = [
{
"file_path": "/Users/jim/Downloads/test_render/test_render_slow.blend",
"id": "9003067201194900903b257115df33bd",
"progress": 0.0,
"status": "Stopped",
"time_elapsed": 32.200013160705566,
"time_remaining": 0,
"time_created": 1544985625.618685,
},
{
"file_path": "/Users/jim/Downloads/test_render/test_render.blend",
"id": "7f6b127663af400fa43ddc52c8bdeeb1",
"progress": 0.0,
"status": "Stopped",
"time_elapsed": 1.699430227279663,
"time_remaining": 0,
"time_created": 1544986156.003638,
},
]
status_33bd = {
"end_frame": 3,
"file_path": "/Users/jim/Downloads/test_render/test_render_slow.blend",
"id": "9003067201194900903b257115df33bd",
"node_status": {
"borg1": {"enabled": True, "frame": None, "progress": 0.0, "rendering": False},
"borg2": {"enabled": True, "frame": None, "progress": 0.0, "rendering": False},
"borg3": {"enabled": True, "frame": None, "progress": 0.0, "rendering": False},
"borg4": {"enabled": True, "frame": None, "progress": 0.0, "rendering": False},
"borg5": {"enabled": True, "frame": None, "progress": 0.0, "rendering": False},
"conundrum": {
"enabled": True,
"frame": None,
"progress": 0.0,
"rendering": False,
},
"eldiente": {
"enabled": True,
"frame": None,
"progress": 0.0,
"rendering": False,
},
"grob1": {"enabled": True, "frame": None, "progress": 0.0, "rendering": False},
"grob2": {"enabled": True, "frame": None, "progress": 0.0, "rendering": False},
"grob3": {"enabled": True, "frame": None, "progress": 0.0, "rendering": False},
"grob4": {"enabled": True, "frame": None, "progress": 0.0, "rendering": False},
"grob5": {"enabled": True, "frame": None, "progress": 0.0, "rendering": False},
"grob6": {"enabled": True, "frame": None, "progress": 0.0, "rendering": False},
"hex1": {"enabled": True, "frame": 2, "progress": 0.0, "rendering": False},
"hex2": {"enabled": True, "frame": 3, "progress": 0.0, "rendering": False},
"hex3": {"enabled": True, "frame": 1, "progress": 0.0, "rendering": True},
"lindsey": {
"enabled": True,
"frame": None,
"progress": 0.0,
"rendering": False,
},
"localhost": {
"enabled": True,
"frame": None,
"progress": 0.0,
"rendering": False,
},
"paradox": {
"enabled": True,
"frame": None,
"progress": 0.0,
"rendering": False,
},
},
"progress": 0.0,
"render_engine": "blend",
"render_params": {"scene": "TestScene"},
"start_frame": 1,
"status": "Stopped",
"time_avg": 10.73333772023519,
"time_elapsed": 32.200013160705566,
"time_remaining": 0,
"time_created": 1544985625.618685,
}
status_eeb1 = {
"end_frame": 4,
"file_path": "/Users/jim/Downloads/test_render/test_render.blend",
"id": "7f6b127663af400fa43ddc52c8bdeeb1",
"node_status": {
"borg1": {"enabled": False, "frame": None, "progress": 0.0, "rendering": False},
"borg2": {"enabled": False, "frame": None, "progress": 0.0, "rendering": False},
"borg3": {"enabled": False, "frame": None, "progress": 0.0, "rendering": False},
"borg4": {"enabled": False, "frame": None, "progress": 0.0, "rendering": False},
"borg5": {"enabled": False, "frame": None, "progress": 0.0, "rendering": False},
"conundrum": {
"enabled": False,
"frame": None,
"progress": 0.0,
"rendering": False,
},
"eldiente": {
"enabled": False,
"frame": None,
"progress": 0.0,
"rendering": False,
},
"grob1": {"enabled": False, "frame": None, "progress": 0.0, "rendering": False},
"grob2": {"enabled": False, "frame": None, "progress": 0.0, "rendering": False},
"grob3": {"enabled": False, "frame": None, "progress": 0.0, "rendering": False},
"grob4": {"enabled": False, "frame": None, "progress": 0.0, "rendering": False},
"grob5": {"enabled": False, "frame": None, "progress": 0.0, "rendering": False},
"grob6": {"enabled": False, "frame": None, "progress": 0.0, "rendering": False},
"hex1": {"enabled": False, "frame": None, "progress": 0.0, "rendering": False},
"hex2": {"enabled": False, "frame": None, "progress": 0.0, "rendering": False},
"hex3": {"enabled": False, "frame": None, "progress": 0.0, "rendering": False},
"lindsey": {
"enabled": False,
"frame": None,
"progress": 0.0,
"rendering": False,
},
"localhost": {
"enabled": False,
"frame": None,
"progress": 0.0,
"rendering": False,
},
"paradox": {
"enabled": False,
"frame": None,
"progress": 0.0,
"rendering": False,
},
},
"progress": 0.0,
"render_engine": "blend",
"render_params": None,
"start_frame": 1,
"status": "Stopped",
"time_avg": 0.42485755681991577,
"time_elapsed": 1.699430227279663,
"time_remaining": 0,
"time_created": 1544986156.003638,
}
@pytest.fixture(scope="function")
@mock.patch("rendercontroller.controller.job")
@mock.patch("rendercontroller.controller.Config")
def controller_fix(conf, job):
return rendercontroller.controller.RenderController(conf)
def test_config_init():
with pytest.raises(RuntimeError):
# Make sure we can't instantiate
rendercontroller.controller.Config()
def test_config_set_get():
conf = rendercontroller.controller.Config
with pytest.raises(RuntimeError):
conf() # Make sure we can't instantiate
conf.set_all(config_test_dict)
# Test get method
assert conf.get("string_val") == config_test_dict["string_val"]
assert conf.get("int_val") == config_test_dict["int_val"]
assert conf.get("float_val") == config_test_dict["float_val"]
assert conf.get("bool_val") == config_test_dict["bool_val"]
# Test subscript method
assert conf.string_val == config_test_dict["string_val"]
assert conf.int_val == config_test_dict["int_val"]
assert conf.float_val == config_test_dict["float_val"]
assert conf.bool_val == config_test_dict["bool_val"]
# Singleton pollutes the other tests if we don't reload
reload(rendercontroller.controller)
def test_config_bad_key():
conf = rendercontroller.controller.Config
with pytest.raises(AttributeError):
conf.get("bogus_val")
with pytest.raises(AttributeError):
assert conf.bogus_val == True
assert conf.get("bogus_val", default="something") == "something"
@mock.patch("rendercontroller.controller.job")
@mock.patch("rendercontroller.controller.Config")
def test_controller_init(conf, job):
job.RenderServer.assert_not_called()
rendercontroller.controller.RenderController(None)
assert job.CONFIG is None
job.RenderServer.assert_called_once()
@mock.patch("rendercontroller.controller.job")
@mock.patch("rendercontroller.controller.Config")
def test_controller_render_nodes(conf, job):
conf.render_nodes = test_nodes
rc = rendercontroller.controller.RenderController(conf)
assert rc.render_nodes == test_nodes
@mock.patch("rendercontroller.controller.job")
@mock.patch("rendercontroller.controller.Config")
def test_controller_autostart(conf, job):
conf.autostart = True
rc = rendercontroller.controller.RenderController(conf)
assert rc.autostart is True
rc.disable_autostart()
assert rc.autostart is False
rc.enable_autostart()
assert rc.autostart is True
@mock.patch("rendercontroller.controller.uuid4")
def test_controller_new_job_no_params(uuid, controller_fix):
path = "/tmp/testfile.blend"
start = 1
end = 10
engine = "blend"
nodes = test_nodes[0:2]
# Override uuid4 so we can verify calls all the way through
job_id = "2973f9954570424d943afdedee3525b7"
uuid.return_value.hex = job_id
controller_fix.server.enqueue.return_value = job_id
res = controller_fix.new_job(
path=path, start_frame=start, end_frame=end, render_engine=engine, nodes=nodes
)
controller_fix.server.enqueue.assert_called_with(
{
"index": job_id,
"path": path,
"startframe": start,
"endframe": end,
"extraframes": None,
"render_engine": engine,
"complist": nodes,
"render_params": None,
}
)
assert res == job_id
@mock.patch("rendercontroller.controller.uuid4")
def test_controller_new_job_with_params(uuid, controller_fix):
path = "/tmp/testfile.blend"
start = 1
end = 10
engine = "blend"
nodes = test_nodes[0:2]
render_params = {"scene": "NewScene"}
# Override uuid4 so we can verify calls all the way through
job_id = "2973f9954570424d943afdedee3525b7"
uuid.return_value.hex = job_id
controller_fix.server.enqueue.return_value = job_id
res = controller_fix.new_job(
path=path,
start_frame=start,
end_frame=end,
render_engine=engine,
nodes=nodes,
render_params=render_params,
)
controller_fix.server.enqueue.assert_called_with(
{
"index": job_id,
"path": path,
"startframe": start,
"endframe": end,
"extraframes": None,
"render_engine": engine,
"complist": nodes,
"render_params": {"scene": "NewScene"},
}
)
assert res == job_id
def test_controller_start(controller_fix):
controller_fix.server.start_render.assert_not_called()
controller_fix.start("testjob")
controller_fix.server.start_render.assert_called_once()
# Test job not found
controller_fix.server.start_render.side_effect = KeyError("testjob2")
with pytest.raises(JobNotFoundError):
controller_fix.start("testjob2")
def test_controller_stop(controller_fix):
controller_fix.server.kill_render.assert_not_called()
controller_fix.stop("testjob")
controller_fix.server.kill_render.assert_called_once()
# Test default kill arg
controller_fix.server.kill_render.assert_called_with("testjob", True)
# Test kill=False
controller_fix.stop("testjob", kill=False)
controller_fix.server.kill_render.assert_called_with("testjob", False)
# Test job not found
controller_fix.server.kill_render.side_effect = KeyError("testjob2")
with pytest.raises(JobNotFoundError):
controller_fix.stop("testjob2")
def test_controller_enqueue(controller_fix):
controller_fix.server.resume_render.assert_not_called()
controller_fix.enqueue("testjob")
controller_fix.server.resume_render.assert_called_once()
controller_fix.server.resume_render.assert_called_with("testjob", startnow=False)
# Test job not found
controller_fix.server.resume_render.side_effect = KeyError("testjob2")
with pytest.raises(JobNotFoundError):
controller_fix.enqueue("testjob2")
def test_controller_delete(controller_fix):
controller_fix.server.clear_job.assert_not_called()
controller_fix.server.get_status.return_value = "Stopped"
controller_fix.delete("testjob")
controller_fix.server.clear_job.assert_called_once()
# Test rendering job
controller_fix.server.get_status.return_value = "Rendering"
with pytest.raises(JobStatusError):
controller_fix.delete("testjob")
controller_fix.server.get_status.return_value = "Stopped"
# Test job not found
controller_fix.server.clear_job.side_effect = KeyError("testjob2")
with pytest.raises(JobNotFoundError):
controller_fix.delete("testjob2")
@mock.patch("rendercontroller.job.CONFIG")
def test_controller_enable_node(conf, controller_fix):
conf.render_nodes = test_nodes
controller_fix.server.renderjobs = {"testjob": mock.MagicMock()}
controller_fix.server.renderjobs["testjob"].add_computer.assert_not_called()
controller_fix.enable_node("testjob", "node1")
controller_fix.server.renderjobs["testjob"].add_computer.assert_called_with("node1")
# Test node not found
with pytest.raises(NodeNotFoundError):
controller_fix.enable_node("testjob", "node99")
# Test job not found
controller_fix.server.clear_job.side_effect = KeyError("testjob2")
with pytest.raises(JobNotFoundError):
controller_fix.delete("testjob2")
@mock.patch("rendercontroller.job.CONFIG")
def test_controller_disable_node(conf, controller_fix):
conf.render_nodes = test_nodes
controller_fix.server.renderjobs = {"testjob": mock.MagicMock()}
controller_fix.server.renderjobs["testjob"].remove_computer.assert_not_called()
controller_fix.disable_node("testjob", "node1")
controller_fix.server.renderjobs["testjob"].remove_computer.assert_called_with(
"node1"
)
# Test node not found
with pytest.raises(NodeNotFoundError):
controller_fix.disable_node("testjob", "node99")
# Test job not found
controller_fix.server.clear_job.side_effect = KeyError("testjob2")
with pytest.raises(JobNotFoundError):
controller_fix.delete("testjob2")
def test_controller_get_summary(controller_fix):
controller_fix.server.renderjobs = renderjobs
ret = controller_fix.get_summary()
# Messy because dict ordering might be different
for i in range(len(ret)):
assert sorted(ret[i]) == sorted(summary[i])
def test_controller_get_job_status(controller_fix):
controller_fix.server.renderjobs = renderjobs
ret = controller_fix.get_job_status("9003067201194900903b257115df33bd")
assert sorted(ret) == sorted(status_33bd)
def test_controller_get_status(controller_fix):
controller_fix.server.renderjobs = renderjobs
ret = controller_fix.get_status()
statuses = [status_eeb1, status_33bd]
for i in range(len(ret)):
assert sorted(ret[i]) == sorted(statuses[i])
def test_controller_shutdown(controller_fix):
controller_fix.server.shutdown_server.assert_not_called()
controller_fix.shutdown()
controller_fix.server.shutdown_server.assert_called_once()
| gpl-3.0 |
SerhiiStets/Voice-assistant | friday.py | 1 | 3098 | #!/usr/bin/env python3
# Requires Pygame, gtts, pyttsx
# For windows: Speech Recognition, PyAudio
import os.path
import random
import webbrowser
import socket
import pyttsx
import logging
import speech_recognition as sr
from sys import platform
from time import sleep, strftime
from pygame import mixer, time
from gtts import gTTS
def internet_connection(host="8.8.8.8", port=53, timeout=3):
"""
Host: 8.8.8.8 (google-public-dns-a.google.com)
OpenPort: 53/tcp
Service: domain (DNS/TCP)
"""
try:
socket.setdefaulttimeout(timeout)
socket.socket(socket.AF_INET, socket.SOCK_STREAM).connect((host, port))
return True
except Exception as e:
logging.exception(e)
return False
def greetings():
day_time = int(strftime('%H'))
if random.random() < 0.5:
speak("At your service sir")
else:
if day_time < 12:
speak('Good morning')
elif 12 <= day_time < 18:
speak('Good afternoon')
else:
speak('Good evening')
def speak(audio_string):
voice_path = "lib/voice/"
mixer.pre_init(24000, -16, 1, 512)
def speak_no_internet(audio):
engine = pyttsx.init()
engine.say(audio)
engine.runAndWait()
def new_audio(name, audio):
tts = gTTS(text=audio, lang='en-us', slow=False)
tts.save(voice_path + name)
speak(audio)
audio_name = audio_string.lower().replace(" ", "_") + ".mp3"
if os.path.isfile(voice_path + audio_name):
mixer.init()
mixer.music.load(voice_path + audio_name)
mixer.music.play()
while mixer.music.get_busy():
time.Clock().tick(10)
print(audio_string)
else:
if internet_connection():
new_audio(audio_name, audio_string)
else:
speak_no_internet(audio_string)
def listen(recognizer, audio):
try:
print(recognizer.recognize_google(audio).capitalize())
brain(recognizer.recognize_google(audio))
except sr.UnknownValueError:
print("I don't understand")
except sr.RequestError as e:
print("Could not request results from Google Speech Recognition service; {0}".format(e))
def brain(request):
if "where is" in request.lower():
place = request.replace("?", "").split(" ", 2)
if len(request.split()) == 3:
speak("Here's what i found")
webbrowser.open("https://www.google.com/maps/place/" + place[2] + "/&")
else:
speak("Please enter place")
elif "what time is it" in request.lower():
print(strftime('%H:%M'))
def windows():
r = sr.Recognizer()
m = sr.Microphone()
with m as source:
r.adjust_for_ambient_noise(source)
r.listen_in_background(m, listen)
while True:
sleep(0.1)
def linux():
while True:
brain(input())
def main():
sleep(1)
greetings()
if platform.startswith('linux'):
linux()
elif platform.startswith('win32'):
windows()
if __name__ == '__main__':
main()
| mit |
bev-a-tron/pledgeservice | testlib/webob/request.py | 30 | 59995 | import binascii
import cgi
import io
import os
import re
import sys
import tempfile
import mimetypes
try:
import simplejson as json
except ImportError:
import json
import warnings
from webob.acceptparse import (
AcceptLanguage,
AcceptCharset,
MIMEAccept,
MIMENilAccept,
NoAccept,
accept_property,
)
from webob.cachecontrol import (
CacheControl,
serialize_cache_control,
)
from webob.compat import (
PY3,
bytes_,
integer_types,
native_,
parse_qsl_text,
reraise,
text_type,
url_encode,
url_quote,
url_unquote,
quote_plus,
urlparse,
)
from webob.cookies import RequestCookies
from webob.descriptors import (
CHARSET_RE,
SCHEME_RE,
converter,
converter_date,
environ_getter,
environ_decoder,
parse_auth,
parse_int,
parse_int_safe,
parse_range,
serialize_auth,
serialize_if_range,
serialize_int,
serialize_range,
upath_property,
deprecated_property,
)
from webob.etag import (
IfRange,
AnyETag,
NoETag,
etag_property,
)
from webob.headers import EnvironHeaders
from webob.multidict import (
NestedMultiDict,
MultiDict,
NoVars,
GetDict,
)
from webob.util import warn_deprecation
__all__ = ['BaseRequest', 'Request', 'LegacyRequest']
class _NoDefault:
def __repr__(self):
return '(No Default)'
NoDefault = _NoDefault()
PATH_SAFE = '/:@&+$,'
http_method_probably_has_body = dict.fromkeys(
('GET', 'HEAD', 'DELETE', 'TRACE'), False)
http_method_probably_has_body.update(
dict.fromkeys(('POST', 'PUT', 'PATCH'), True))
_LATIN_ENCODINGS = (
'ascii', 'latin-1', 'latin', 'latin_1', 'l1', 'latin1',
'iso-8859-1', 'iso8859_1', 'iso_8859_1', 'iso8859', '8859',
)
class BaseRequest(object):
## The limit after which request bodies should be stored on disk
## if they are read in (under this, and the request body is stored
## in memory):
request_body_tempfile_limit = 10*1024
_charset = None
def __init__(self, environ, charset=None, unicode_errors=None,
decode_param_names=None, **kw):
if type(environ) is not dict:
raise TypeError(
"WSGI environ must be a dict; you passed %r" % (environ,))
if unicode_errors is not None:
warnings.warn(
"You unicode_errors=%r to the Request constructor. Passing a "
"``unicode_errors`` value to the Request is no longer "
"supported in WebOb 1.2+. This value has been ignored " % (
unicode_errors,),
DeprecationWarning
)
if decode_param_names is not None:
warnings.warn(
"You passed decode_param_names=%r to the Request constructor. "
"Passing a ``decode_param_names`` value to the Request "
"is no longer supported in WebOb 1.2+. This value has "
"been ignored " % (decode_param_names,),
DeprecationWarning
)
if not _is_utf8(charset):
raise DeprecationWarning(
"You passed charset=%r to the Request constructor. As of "
"WebOb 1.2, if your application needs a non-UTF-8 request "
"charset, please construct the request without a charset or "
"with a charset of 'None', then use ``req = "
"req.decode(charset)``" % charset
)
d = self.__dict__
d['environ'] = environ
if kw:
cls = self.__class__
if 'method' in kw:
# set method first, because .body setters
# depend on it for checks
self.method = kw.pop('method')
for name, value in kw.items():
if not hasattr(cls, name):
raise TypeError(
"Unexpected keyword: %s=%r" % (name, value))
setattr(self, name, value)
if PY3: # pragma: no cover
def encget(self, key, default=NoDefault, encattr=None):
val = self.environ.get(key, default)
if val is NoDefault:
raise KeyError(key)
if val is default:
return default
if not encattr:
return val
encoding = getattr(self, encattr)
if encoding in _LATIN_ENCODINGS: # shortcut
return val
return bytes_(val, 'latin-1').decode(encoding)
else:
def encget(self, key, default=NoDefault, encattr=None):
val = self.environ.get(key, default)
if val is NoDefault:
raise KeyError(key)
if val is default:
return default
if encattr is None:
return val
encoding = getattr(self, encattr)
return val.decode(encoding)
def encset(self, key, val, encattr=None):
if encattr:
encoding = getattr(self, encattr)
else:
encoding = 'ascii'
if PY3: # pragma: no cover
self.environ[key] = bytes_(val, encoding).decode('latin-1')
else:
self.environ[key] = bytes_(val, encoding)
@property
def charset(self):
if self._charset is None:
charset = detect_charset(self._content_type_raw)
if _is_utf8(charset):
charset = 'UTF-8'
self._charset = charset
return self._charset
@charset.setter
def charset(self, charset):
if _is_utf8(charset):
charset = 'UTF-8'
if charset != self.charset:
raise DeprecationWarning("Use req = req.decode(%r)" % charset)
def decode(self, charset=None, errors='strict'):
charset = charset or self.charset
if charset == 'UTF-8':
return self
# cookies and path are always utf-8
t = Transcoder(charset, errors)
new_content_type = CHARSET_RE.sub('; charset="UTF-8"',
self._content_type_raw)
content_type = self.content_type
r = self.__class__(
self.environ.copy(),
query_string=t.transcode_query(self.query_string),
content_type=new_content_type,
)
if content_type == 'application/x-www-form-urlencoded':
r.body = bytes_(t.transcode_query(native_(r.body)))
return r
elif content_type != 'multipart/form-data':
return r
fs_environ = self.environ.copy()
fs_environ.setdefault('CONTENT_LENGTH', '0')
fs_environ['QUERY_STRING'] = ''
if PY3: # pragma: no cover
fs = cgi.FieldStorage(fp=self.body_file,
environ=fs_environ,
keep_blank_values=True,
encoding=charset,
errors=errors)
else:
fs = cgi.FieldStorage(fp=self.body_file,
environ=fs_environ,
keep_blank_values=True)
fout = t.transcode_fs(fs, r._content_type_raw)
# this order is important, because setting body_file
# resets content_length
r.body_file = fout
r.content_length = fout.tell()
fout.seek(0)
return r
# this is necessary for correct warnings depth for both
# BaseRequest and Request (due to AdhocAttrMixin.__setattr__)
_setattr_stacklevel = 2
def _body_file__get(self):
"""
Input stream of the request (wsgi.input).
Setting this property resets the content_length and seekable flag
(unlike setting req.body_file_raw).
"""
if not self.is_body_readable:
return io.BytesIO()
r = self.body_file_raw
clen = self.content_length
if not self.is_body_seekable and clen is not None:
# we need to wrap input in LimitedLengthFile
# but we have to cache the instance as well
# otherwise this would stop working
# (.remaining counter would reset between calls):
# req.body_file.read(100)
# req.body_file.read(100)
env = self.environ
wrapped, raw = env.get('webob._body_file', (0,0))
if raw is not r:
wrapped = LimitedLengthFile(r, clen)
wrapped = io.BufferedReader(wrapped)
env['webob._body_file'] = wrapped, r
r = wrapped
return r
def _body_file__set(self, value):
if isinstance(value, bytes):
warn_deprecation(
"Please use req.body = b'bytes' or req.body_file = fileobj",
'1.2',
self._setattr_stacklevel
)
self.content_length = None
self.body_file_raw = value
self.is_body_seekable = False
self.is_body_readable = True
def _body_file__del(self):
self.body = b''
body_file = property(_body_file__get,
_body_file__set,
_body_file__del,
doc=_body_file__get.__doc__)
body_file_raw = environ_getter('wsgi.input')
@property
def body_file_seekable(self):
"""
Get the body of the request (wsgi.input) as a seekable file-like
object. Middleware and routing applications should use this
attribute over .body_file.
If you access this value, CONTENT_LENGTH will also be updated.
"""
if not self.is_body_seekable:
self.make_body_seekable()
return self.body_file_raw
url_encoding = environ_getter('webob.url_encoding', 'UTF-8')
scheme = environ_getter('wsgi.url_scheme')
method = environ_getter('REQUEST_METHOD', 'GET')
http_version = environ_getter('SERVER_PROTOCOL')
content_length = converter(
environ_getter('CONTENT_LENGTH', None, '14.13'),
parse_int_safe, serialize_int, 'int')
remote_user = environ_getter('REMOTE_USER', None)
remote_addr = environ_getter('REMOTE_ADDR', None)
query_string = environ_getter('QUERY_STRING', '')
server_name = environ_getter('SERVER_NAME')
server_port = converter(
environ_getter('SERVER_PORT'),
parse_int, serialize_int, 'int')
script_name = environ_decoder('SCRIPT_NAME', '', encattr='url_encoding')
path_info = environ_decoder('PATH_INFO', encattr='url_encoding')
# bw compat
uscript_name = script_name
upath_info = path_info
_content_type_raw = environ_getter('CONTENT_TYPE', '')
def _content_type__get(self):
"""Return the content type, but leaving off any parameters (like
charset, but also things like the type in ``application/atom+xml;
type=entry``)
If you set this property, you can include parameters, or if
you don't include any parameters in the value then existing
parameters will be preserved.
"""
return self._content_type_raw.split(';', 1)[0]
def _content_type__set(self, value=None):
if value is not None:
value = str(value)
if ';' not in value:
content_type = self._content_type_raw
if ';' in content_type:
value += ';' + content_type.split(';', 1)[1]
self._content_type_raw = value
content_type = property(_content_type__get,
_content_type__set,
_content_type__set,
_content_type__get.__doc__)
_headers = None
def _headers__get(self):
"""
All the request headers as a case-insensitive dictionary-like
object.
"""
if self._headers is None:
self._headers = EnvironHeaders(self.environ)
return self._headers
def _headers__set(self, value):
self.headers.clear()
self.headers.update(value)
headers = property(_headers__get, _headers__set, doc=_headers__get.__doc__)
@property
def client_addr(self):
"""
The effective client IP address as a string. If the
``HTTP_X_FORWARDED_FOR`` header exists in the WSGI environ, this
attribute returns the client IP address present in that header
(e.g. if the header value is ``192.168.1.1, 192.168.1.2``, the value
will be ``192.168.1.1``). If no ``HTTP_X_FORWARDED_FOR`` header is
present in the environ at all, this attribute will return the value
of the ``REMOTE_ADDR`` header. If the ``REMOTE_ADDR`` header is
unset, this attribute will return the value ``None``.
.. warning::
It is possible for user agents to put someone else's IP or just
any string in ``HTTP_X_FORWARDED_FOR`` as it is a normal HTTP
header. Forward proxies can also provide incorrect values (private
IP addresses etc). You cannot "blindly" trust the result of this
method to provide you with valid data unless you're certain that
``HTTP_X_FORWARDED_FOR`` has the correct values. The WSGI server
must be behind a trusted proxy for this to be true.
"""
e = self.environ
xff = e.get('HTTP_X_FORWARDED_FOR')
if xff is not None:
addr = xff.split(',')[0].strip()
else:
addr = e.get('REMOTE_ADDR')
return addr
@property
def host_port(self):
"""
The effective server port number as a string. If the ``HTTP_HOST``
header exists in the WSGI environ, this attribute returns the port
number present in that header. If the ``HTTP_HOST`` header exists but
contains no explicit port number: if the WSGI url scheme is "https" ,
this attribute returns "443", if the WSGI url scheme is "http", this
attribute returns "80" . If no ``HTTP_HOST`` header is present in
the environ at all, this attribute will return the value of the
``SERVER_PORT`` header (which is guaranteed to be present).
"""
e = self.environ
host = e.get('HTTP_HOST')
if host is not None:
if ':' in host:
host, port = host.split(':', 1)
else:
url_scheme = e['wsgi.url_scheme']
if url_scheme == 'https':
port = '443'
else:
port = '80'
else:
port = e['SERVER_PORT']
return port
@property
def host_url(self):
"""
The URL through the host (no path)
"""
e = self.environ
scheme = e.get('wsgi.url_scheme')
url = scheme + '://'
host = e.get('HTTP_HOST')
if host is not None:
if ':' in host:
host, port = host.split(':', 1)
else:
port = None
else:
host = e.get('SERVER_NAME')
port = e.get('SERVER_PORT')
if scheme == 'https':
if port == '443':
port = None
elif scheme == 'http':
if port == '80':
port = None
url += host
if port:
url += ':%s' % port
return url
@property
def application_url(self):
"""
The URL including SCRIPT_NAME (no PATH_INFO or query string)
"""
bscript_name = bytes_(self.script_name, self.url_encoding)
return self.host_url + url_quote(bscript_name, PATH_SAFE)
@property
def path_url(self):
"""
The URL including SCRIPT_NAME and PATH_INFO, but not QUERY_STRING
"""
bpath_info = bytes_(self.path_info, self.url_encoding)
return self.application_url + url_quote(bpath_info, PATH_SAFE)
@property
def path(self):
"""
The path of the request, without host or query string
"""
bscript = bytes_(self.script_name, self.url_encoding)
bpath = bytes_(self.path_info, self.url_encoding)
return url_quote(bscript, PATH_SAFE) + url_quote(bpath, PATH_SAFE)
@property
def path_qs(self):
"""
The path of the request, without host but with query string
"""
path = self.path
qs = self.environ.get('QUERY_STRING')
if qs:
path += '?' + qs
return path
@property
def url(self):
"""
The full request URL, including QUERY_STRING
"""
url = self.path_url
qs = self.environ.get('QUERY_STRING')
if qs:
url += '?' + qs
return url
def relative_url(self, other_url, to_application=False):
"""
Resolve other_url relative to the request URL.
If ``to_application`` is True, then resolve it relative to the
URL with only SCRIPT_NAME
"""
if to_application:
url = self.application_url
if not url.endswith('/'):
url += '/'
else:
url = self.path_url
return urlparse.urljoin(url, other_url)
def path_info_pop(self, pattern=None):
"""
'Pops' off the next segment of PATH_INFO, pushing it onto
SCRIPT_NAME, and returning the popped segment. Returns None if
there is nothing left on PATH_INFO.
Does not return ``''`` when there's an empty segment (like
``/path//path``); these segments are just ignored.
Optional ``pattern`` argument is a regexp to match the return value
before returning. If there is no match, no changes are made to the
request and None is returned.
"""
path = self.path_info
if not path:
return None
slashes = ''
while path.startswith('/'):
slashes += '/'
path = path[1:]
idx = path.find('/')
if idx == -1:
idx = len(path)
r = path[:idx]
if pattern is None or re.match(pattern, r):
self.script_name += slashes + r
self.path_info = path[idx:]
return r
def path_info_peek(self):
"""
Returns the next segment on PATH_INFO, or None if there is no
next segment. Doesn't modify the environment.
"""
path = self.path_info
if not path:
return None
path = path.lstrip('/')
return path.split('/', 1)[0]
def _urlvars__get(self):
"""
Return any *named* variables matched in the URL.
Takes values from ``environ['wsgiorg.routing_args']``.
Systems like ``routes`` set this value.
"""
if 'paste.urlvars' in self.environ:
return self.environ['paste.urlvars']
elif 'wsgiorg.routing_args' in self.environ:
return self.environ['wsgiorg.routing_args'][1]
else:
result = {}
self.environ['wsgiorg.routing_args'] = ((), result)
return result
def _urlvars__set(self, value):
environ = self.environ
if 'wsgiorg.routing_args' in environ:
environ['wsgiorg.routing_args'] = (
environ['wsgiorg.routing_args'][0], value)
if 'paste.urlvars' in environ:
del environ['paste.urlvars']
elif 'paste.urlvars' in environ:
environ['paste.urlvars'] = value
else:
environ['wsgiorg.routing_args'] = ((), value)
def _urlvars__del(self):
if 'paste.urlvars' in self.environ:
del self.environ['paste.urlvars']
if 'wsgiorg.routing_args' in self.environ:
if not self.environ['wsgiorg.routing_args'][0]:
del self.environ['wsgiorg.routing_args']
else:
self.environ['wsgiorg.routing_args'] = (
self.environ['wsgiorg.routing_args'][0], {})
urlvars = property(_urlvars__get,
_urlvars__set,
_urlvars__del,
doc=_urlvars__get.__doc__)
def _urlargs__get(self):
"""
Return any *positional* variables matched in the URL.
Takes values from ``environ['wsgiorg.routing_args']``.
Systems like ``routes`` set this value.
"""
if 'wsgiorg.routing_args' in self.environ:
return self.environ['wsgiorg.routing_args'][0]
else:
# Since you can't update this value in-place, we don't need
# to set the key in the environment
return ()
def _urlargs__set(self, value):
environ = self.environ
if 'paste.urlvars' in environ:
# Some overlap between this and wsgiorg.routing_args; we need
# wsgiorg.routing_args to make this work
routing_args = (value, environ.pop('paste.urlvars'))
elif 'wsgiorg.routing_args' in environ:
routing_args = (value, environ['wsgiorg.routing_args'][1])
else:
routing_args = (value, {})
environ['wsgiorg.routing_args'] = routing_args
def _urlargs__del(self):
if 'wsgiorg.routing_args' in self.environ:
if not self.environ['wsgiorg.routing_args'][1]:
del self.environ['wsgiorg.routing_args']
else:
self.environ['wsgiorg.routing_args'] = (
(), self.environ['wsgiorg.routing_args'][1])
urlargs = property(_urlargs__get,
_urlargs__set,
_urlargs__del,
_urlargs__get.__doc__)
@property
def is_xhr(self):
"""Is X-Requested-With header present and equal to ``XMLHttpRequest``?
Note: this isn't set by every XMLHttpRequest request, it is
only set if you are using a Javascript library that sets it
(or you set the header yourself manually). Currently
Prototype and jQuery are known to set this header."""
return self.environ.get('HTTP_X_REQUESTED_WITH', '') == 'XMLHttpRequest'
def _host__get(self):
"""Host name provided in HTTP_HOST, with fall-back to SERVER_NAME"""
if 'HTTP_HOST' in self.environ:
return self.environ['HTTP_HOST']
else:
return '%(SERVER_NAME)s:%(SERVER_PORT)s' % self.environ
def _host__set(self, value):
self.environ['HTTP_HOST'] = value
def _host__del(self):
if 'HTTP_HOST' in self.environ:
del self.environ['HTTP_HOST']
host = property(_host__get, _host__set, _host__del, doc=_host__get.__doc__)
@property
def domain(self):
""" Returns the domain portion of the host value. Equivalent to:
.. code-block:: python
domain = request.host
if ':' in domain:
domain = domain.split(':', 1)[0]
This will be equivalent to the domain portion of the ``HTTP_HOST``
value in the environment if it exists, or the ``SERVER_NAME`` value in
the environment if it doesn't. For example, if the environment
contains an ``HTTP_HOST`` value of ``foo.example.com:8000``,
``request.domain`` will return ``foo.example.com``.
Note that this value cannot be *set* on the request. To set the host
value use :meth:`webob.request.Request.host` instead.
"""
domain = self.host
if ':' in domain:
domain = domain.split(':', 1)[0]
return domain
def _body__get(self):
"""
Return the content of the request body.
"""
if not self.is_body_readable:
return b''
self.make_body_seekable() # we need this to have content_length
r = self.body_file.read(self.content_length)
self.body_file_raw.seek(0)
return r
def _body__set(self, value):
if value is None:
value = b''
if not isinstance(value, bytes):
raise TypeError("You can only set Request.body to bytes (not %r)"
% type(value))
if not http_method_probably_has_body.get(self.method, True):
if not value:
self.content_length = None
self.body_file_raw = io.BytesIO()
return
self.content_length = len(value)
self.body_file_raw = io.BytesIO(value)
self.is_body_seekable = True
def _body__del(self):
self.body = b''
body = property(_body__get, _body__set, _body__del, doc=_body__get.__doc__)
def _json_body__get(self):
"""Access the body of the request as JSON"""
return json.loads(self.body.decode(self.charset))
def _json_body__set(self, value):
self.body = json.dumps(value, separators=(',', ':')).encode(self.charset)
def _json_body__del(self):
del self.body
json = json_body = property(_json_body__get, _json_body__set, _json_body__del)
def _text__get(self):
"""
Get/set the text value of the body
"""
if not self.charset:
raise AttributeError(
"You cannot access Request.text unless charset is set")
body = self.body
return body.decode(self.charset)
def _text__set(self, value):
if not self.charset:
raise AttributeError(
"You cannot access Response.text unless charset is set")
if not isinstance(value, text_type):
raise TypeError(
"You can only set Request.text to a unicode string "
"(not %s)" % type(value))
self.body = value.encode(self.charset)
def _text__del(self):
del self.body
text = property(_text__get, _text__set, _text__del, doc=_text__get.__doc__)
@property
def POST(self):
"""
Return a MultiDict containing all the variables from a form
request. Returns an empty dict-like object for non-form requests.
Form requests are typically POST requests, however PUT & PATCH requests
with an appropriate Content-Type are also supported.
"""
env = self.environ
if self.method not in ('POST', 'PUT', 'PATCH'):
return NoVars('Not a form request')
if 'webob._parsed_post_vars' in env:
vars, body_file = env['webob._parsed_post_vars']
if body_file is self.body_file_raw:
return vars
content_type = self.content_type
if ((self.method == 'PUT' and not content_type)
or content_type not in
('',
'application/x-www-form-urlencoded',
'multipart/form-data')
):
# Not an HTML form submission
return NoVars('Not an HTML form submission (Content-Type: %s)'
% content_type)
self._check_charset()
if self.is_body_seekable:
self.body_file_raw.seek(0)
fs_environ = env.copy()
# FieldStorage assumes a missing CONTENT_LENGTH, but a
# default of 0 is better:
fs_environ.setdefault('CONTENT_LENGTH', '0')
fs_environ['QUERY_STRING'] = ''
if PY3: # pragma: no cover
fs = cgi.FieldStorage(
fp=self.body_file,
environ=fs_environ,
keep_blank_values=True,
encoding='utf8')
vars = MultiDict.from_fieldstorage(fs)
else:
fs = cgi.FieldStorage(
fp=self.body_file,
environ=fs_environ,
keep_blank_values=True)
vars = MultiDict.from_fieldstorage(fs)
#ctype = self.content_type or 'application/x-www-form-urlencoded'
ctype = self._content_type_raw or 'application/x-www-form-urlencoded'
f = FakeCGIBody(vars, ctype)
self.body_file = io.BufferedReader(f)
env['webob._parsed_post_vars'] = (vars, self.body_file_raw)
return vars
@property
def GET(self):
"""
Return a MultiDict containing all the variables from the
QUERY_STRING.
"""
env = self.environ
source = env.get('QUERY_STRING', '')
if 'webob._parsed_query_vars' in env:
vars, qs = env['webob._parsed_query_vars']
if qs == source:
return vars
data = []
if source:
# this is disabled because we want to access req.GET
# for text/plain; charset=ascii uploads for example
#self._check_charset()
data = parse_qsl_text(source)
#d = lambda b: b.decode('utf8')
#data = [(d(k), d(v)) for k,v in data]
vars = GetDict(data, env)
env['webob._parsed_query_vars'] = (vars, source)
return vars
def _check_charset(self):
if self.charset != 'UTF-8':
raise DeprecationWarning(
"Requests are expected to be submitted in UTF-8, not %s. "
"You can fix this by doing req = req.decode('%s')" % (
self.charset, self.charset)
)
@property
def params(self):
"""
A dictionary-like object containing both the parameters from
the query string and request body.
"""
params = NestedMultiDict(self.GET, self.POST)
return params
@property
def cookies(self):
"""
Return a dictionary of cookies as found in the request.
"""
return RequestCookies(self.environ)
@cookies.setter
def cookies(self, val):
self.environ.pop('HTTP_COOKIE', None)
r = RequestCookies(self.environ)
r.update(val)
def copy(self):
"""
Copy the request and environment object.
This only does a shallow copy, except of wsgi.input
"""
self.make_body_seekable()
env = self.environ.copy()
new_req = self.__class__(env)
new_req.copy_body()
return new_req
def copy_get(self):
"""
Copies the request and environment object, but turning this request
into a GET along the way. If this was a POST request (or any other
verb) then it becomes GET, and the request body is thrown away.
"""
env = self.environ.copy()
return self.__class__(env, method='GET', content_type=None,
body=b'')
# webob.is_body_seekable marks input streams that are seekable
# this way we can have seekable input without testing the .seek() method
is_body_seekable = environ_getter('webob.is_body_seekable', False)
#is_body_readable = environ_getter('webob.is_body_readable', False)
def _is_body_readable__get(self):
"""
webob.is_body_readable is a flag that tells us
that we can read the input stream even though
CONTENT_LENGTH is missing. This allows FakeCGIBody
to work and can be used by servers to support
chunked encoding in requests.
For background see https://bitbucket.org/ianb/webob/issue/6
"""
if http_method_probably_has_body.get(self.method):
# known HTTP method with body
return True
elif self.content_length is not None:
# unknown HTTP method, but the Content-Length
# header is present
return True
else:
# last resort -- rely on the special flag
return self.environ.get('webob.is_body_readable', False)
def _is_body_readable__set(self, flag):
self.environ['webob.is_body_readable'] = bool(flag)
is_body_readable = property(_is_body_readable__get, _is_body_readable__set,
doc=_is_body_readable__get.__doc__
)
def make_body_seekable(self):
"""
This forces ``environ['wsgi.input']`` to be seekable.
That means that, the content is copied into a BytesIO or temporary
file and flagged as seekable, so that it will not be unnecessarily
copied again.
After calling this method the .body_file is always seeked to the
start of file and .content_length is not None.
The choice to copy to BytesIO is made from
``self.request_body_tempfile_limit``
"""
if self.is_body_seekable:
self.body_file_raw.seek(0)
else:
self.copy_body()
def copy_body(self):
"""
Copies the body, in cases where it might be shared with
another request object and that is not desired.
This copies the body in-place, either into a BytesIO object
or a temporary file.
"""
if not self.is_body_readable:
# there's no body to copy
self.body = b''
elif self.content_length is None:
# chunked body or FakeCGIBody
self.body = self.body_file_raw.read()
self._copy_body_tempfile()
else:
# try to read body into tempfile
did_copy = self._copy_body_tempfile()
if not did_copy:
# it wasn't necessary, so just read it into memory
self.body = self.body_file.read(self.content_length)
def _copy_body_tempfile(self):
"""
Copy wsgi.input to tempfile if necessary. Returns True if it did.
"""
tempfile_limit = self.request_body_tempfile_limit
todo = self.content_length
assert isinstance(todo, integer_types), todo
if not tempfile_limit or todo <= tempfile_limit:
return False
fileobj = self.make_tempfile()
input = self.body_file
while todo > 0:
data = input.read(min(todo, 65536))
if not data:
# Normally this should not happen, because LimitedLengthFile
# should have raised an exception by now.
# It can happen if the is_body_seekable flag is incorrect.
raise DisconnectionError(
"Client disconnected (%s more bytes were expected)"
% todo
)
fileobj.write(data)
todo -= len(data)
fileobj.seek(0)
self.body_file_raw = fileobj
self.is_body_seekable = True
return True
def make_tempfile(self):
"""
Create a tempfile to store big request body.
This API is not stable yet. A 'size' argument might be added.
"""
return tempfile.TemporaryFile()
def remove_conditional_headers(self,
remove_encoding=True,
remove_range=True,
remove_match=True,
remove_modified=True):
"""
Remove headers that make the request conditional.
These headers can cause the response to be 304 Not Modified,
which in some cases you may not want to be possible.
This does not remove headers like If-Match, which are used for
conflict detection.
"""
check_keys = []
if remove_range:
check_keys += ['HTTP_IF_RANGE', 'HTTP_RANGE']
if remove_match:
check_keys.append('HTTP_IF_NONE_MATCH')
if remove_modified:
check_keys.append('HTTP_IF_MODIFIED_SINCE')
if remove_encoding:
check_keys.append('HTTP_ACCEPT_ENCODING')
for key in check_keys:
if key in self.environ:
del self.environ[key]
accept = accept_property('Accept', '14.1', MIMEAccept, MIMENilAccept)
accept_charset = accept_property('Accept-Charset', '14.2', AcceptCharset)
accept_encoding = accept_property('Accept-Encoding', '14.3',
NilClass=NoAccept)
accept_language = accept_property('Accept-Language', '14.4', AcceptLanguage)
authorization = converter(
environ_getter('HTTP_AUTHORIZATION', None, '14.8'),
parse_auth, serialize_auth,
)
def _cache_control__get(self):
"""
Get/set/modify the Cache-Control header (`HTTP spec section 14.9
<http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.9>`_)
"""
env = self.environ
value = env.get('HTTP_CACHE_CONTROL', '')
cache_header, cache_obj = env.get('webob._cache_control', (None, None))
if cache_obj is not None and cache_header == value:
return cache_obj
cache_obj = CacheControl.parse(value,
updates_to=self._update_cache_control,
type='request')
env['webob._cache_control'] = (value, cache_obj)
return cache_obj
def _cache_control__set(self, value):
env = self.environ
value = value or ''
if isinstance(value, dict):
value = CacheControl(value, type='request')
if isinstance(value, CacheControl):
str_value = str(value)
env['HTTP_CACHE_CONTROL'] = str_value
env['webob._cache_control'] = (str_value, value)
else:
env['HTTP_CACHE_CONTROL'] = str(value)
env['webob._cache_control'] = (None, None)
def _cache_control__del(self):
env = self.environ
if 'HTTP_CACHE_CONTROL' in env:
del env['HTTP_CACHE_CONTROL']
if 'webob._cache_control' in env:
del env['webob._cache_control']
def _update_cache_control(self, prop_dict):
self.environ['HTTP_CACHE_CONTROL'] = serialize_cache_control(prop_dict)
cache_control = property(_cache_control__get,
_cache_control__set,
_cache_control__del,
doc=_cache_control__get.__doc__)
if_match = etag_property('HTTP_IF_MATCH', AnyETag, '14.24')
if_none_match = etag_property('HTTP_IF_NONE_MATCH', NoETag, '14.26',
strong=False)
date = converter_date(environ_getter('HTTP_DATE', None, '14.8'))
if_modified_since = converter_date(
environ_getter('HTTP_IF_MODIFIED_SINCE', None, '14.25'))
if_unmodified_since = converter_date(
environ_getter('HTTP_IF_UNMODIFIED_SINCE', None, '14.28'))
if_range = converter(
environ_getter('HTTP_IF_RANGE', None, '14.27'),
IfRange.parse, serialize_if_range, 'IfRange object')
max_forwards = converter(
environ_getter('HTTP_MAX_FORWARDS', None, '14.31'),
parse_int, serialize_int, 'int')
pragma = environ_getter('HTTP_PRAGMA', None, '14.32')
range = converter(
environ_getter('HTTP_RANGE', None, '14.35'),
parse_range, serialize_range, 'Range object')
referer = environ_getter('HTTP_REFERER', None, '14.36')
referrer = referer
user_agent = environ_getter('HTTP_USER_AGENT', None, '14.43')
def __repr__(self):
try:
name = '%s %s' % (self.method, self.url)
except KeyError:
name = '(invalid WSGI environ)'
msg = '<%s at 0x%x %s>' % (
self.__class__.__name__,
abs(id(self)), name)
return msg
def as_bytes(self, skip_body=False):
"""
Return HTTP bytes representing this request.
If skip_body is True, exclude the body.
If skip_body is an integer larger than one, skip body
only if its length is bigger than that number.
"""
url = self.url
host = self.host_url
assert url.startswith(host)
url = url[len(host):]
parts = [bytes_('%s %s %s' % (self.method, url, self.http_version))]
#self.headers.setdefault('Host', self.host)
# acquire body before we handle headers so that
# content-length will be set
body = None
if self.method in ('PUT', 'POST'):
if skip_body > 1:
if len(self.body) > skip_body:
body = bytes_('<body skipped (len=%s)>' % len(self.body))
else:
skip_body = False
if not skip_body:
body = self.body
for k, v in sorted(self.headers.items()):
header = bytes_('%s: %s' % (k, v))
parts.append(header)
if body:
parts.extend([b'', body])
# HTTP clearly specifies CRLF
return b'\r\n'.join(parts)
def as_string(self, skip_body=False):
# TODO: Remove in 1.4
warn_deprecation(
"Please use req.as_bytes",
'1.3',
self._setattr_stacklevel
)
def as_text(self):
bytes = self.as_bytes()
return bytes.decode(self.charset)
__str__ = as_text
@classmethod
def from_bytes(cls, b):
"""
Create a request from HTTP bytes data. If the bytes contain
extra data after the request, raise a ValueError.
"""
f = io.BytesIO(b)
r = cls.from_file(f)
if f.tell() != len(b):
raise ValueError("The string contains more data than expected")
return r
@classmethod
def from_string(cls, b):
# TODO: Remove in 1.4
warn_deprecation(
"Please use req.from_bytes",
'1.3',
cls._setattr_stacklevel
)
@classmethod
def from_text(cls, s):
b = bytes_(s, 'utf-8')
return cls.from_bytes(b)
@classmethod
def from_file(cls, fp):
"""Read a request from a file-like object (it must implement
``.read(size)`` and ``.readline()``).
It will read up to the end of the request, not the end of the
file (unless the request is a POST or PUT and has no
Content-Length, in that case, the entire file is read).
This reads the request as represented by ``str(req)``; it may
not read every valid HTTP request properly.
"""
start_line = fp.readline()
is_text = isinstance(start_line, text_type)
if is_text:
crlf = '\r\n'
colon = ':'
else:
crlf = b'\r\n'
colon = b':'
try:
header = start_line.rstrip(crlf)
method, resource, http_version = header.split(None, 2)
method = native_(method, 'utf-8')
resource = native_(resource, 'utf-8')
http_version = native_(http_version, 'utf-8')
except ValueError:
raise ValueError('Bad HTTP request line: %r' % start_line)
r = cls(environ_from_url(resource),
http_version=http_version,
method=method.upper()
)
del r.environ['HTTP_HOST']
while 1:
line = fp.readline()
if not line.strip():
# end of headers
break
hname, hval = line.split(colon, 1)
hname = native_(hname, 'utf-8')
hval = native_(hval, 'utf-8').strip()
if hname in r.headers:
hval = r.headers[hname] + ', ' + hval
r.headers[hname] = hval
if r.method in ('PUT', 'POST'):
clen = r.content_length
if clen is None:
body = fp.read()
else:
body = fp.read(clen)
if is_text:
body = bytes_(body, 'utf-8')
r.body = body
return r
def call_application(self, application, catch_exc_info=False):
"""
Call the given WSGI application, returning ``(status_string,
headerlist, app_iter)``
Be sure to call ``app_iter.close()`` if it's there.
If catch_exc_info is true, then returns ``(status_string,
headerlist, app_iter, exc_info)``, where the fourth item may
be None, but won't be if there was an exception. If you don't
do this and there was an exception, the exception will be
raised directly.
"""
if self.is_body_seekable:
self.body_file_raw.seek(0)
captured = []
output = []
def start_response(status, headers, exc_info=None):
if exc_info is not None and not catch_exc_info:
reraise(exc_info)
captured[:] = [status, headers, exc_info]
return output.append
app_iter = application(self.environ, start_response)
if output or not captured:
try:
output.extend(app_iter)
finally:
if hasattr(app_iter, 'close'):
app_iter.close()
app_iter = output
if catch_exc_info:
return (captured[0], captured[1], app_iter, captured[2])
else:
return (captured[0], captured[1], app_iter)
# Will be filled in later:
ResponseClass = None
def send(self, application=None, catch_exc_info=False):
"""
Like ``.call_application(application)``, except returns a
response object with ``.status``, ``.headers``, and ``.body``
attributes.
This will use ``self.ResponseClass`` to figure out the class
of the response object to return.
If ``application`` is not given, this will send the request to
``self.make_default_send_app()``
"""
if application is None:
application = self.make_default_send_app()
if catch_exc_info:
status, headers, app_iter, exc_info = self.call_application(
application, catch_exc_info=True)
del exc_info
else:
status, headers, app_iter = self.call_application(
application, catch_exc_info=False)
return self.ResponseClass(
status=status, headerlist=list(headers), app_iter=app_iter)
get_response = send
def make_default_send_app(self):
global _client
try:
client = _client
except NameError:
from webob import client
_client = client
return client.send_request_app
@classmethod
def blank(cls, path, environ=None, base_url=None,
headers=None, POST=None, **kw):
"""
Create a blank request environ (and Request wrapper) with the
given path (path should be urlencoded), and any keys from
environ.
The path will become path_info, with any query string split
off and used.
All necessary keys will be added to the environ, but the
values you pass in will take precedence. If you pass in
base_url then wsgi.url_scheme, HTTP_HOST, and SCRIPT_NAME will
be filled in from that value.
Any extra keyword will be passed to ``__init__``.
"""
env = environ_from_url(path)
if base_url:
scheme, netloc, path, query, fragment = urlparse.urlsplit(base_url)
if query or fragment:
raise ValueError(
"base_url (%r) cannot have a query or fragment"
% base_url)
if scheme:
env['wsgi.url_scheme'] = scheme
if netloc:
if ':' not in netloc:
if scheme == 'http':
netloc += ':80'
elif scheme == 'https':
netloc += ':443'
else:
raise ValueError(
"Unknown scheme: %r" % scheme)
host, port = netloc.split(':', 1)
env['SERVER_PORT'] = port
env['SERVER_NAME'] = host
env['HTTP_HOST'] = netloc
if path:
env['SCRIPT_NAME'] = url_unquote(path)
if environ:
env.update(environ)
content_type = kw.get('content_type', env.get('CONTENT_TYPE'))
if headers and 'Content-Type' in headers:
content_type = headers['Content-Type']
if content_type is not None:
kw['content_type'] = content_type
environ_add_POST(env, POST, content_type=content_type)
obj = cls(env, **kw)
if headers is not None:
obj.headers.update(headers)
return obj
class LegacyRequest(BaseRequest):
uscript_name = upath_property('SCRIPT_NAME')
upath_info = upath_property('PATH_INFO')
def encget(self, key, default=NoDefault, encattr=None):
val = self.environ.get(key, default)
if val is NoDefault:
raise KeyError(key)
if val is default:
return default
return val
class AdhocAttrMixin(object):
_setattr_stacklevel = 3
def __setattr__(self, attr, value, DEFAULT=object()):
if (getattr(self.__class__, attr, DEFAULT) is not DEFAULT or
attr.startswith('_')):
object.__setattr__(self, attr, value)
else:
self.environ.setdefault('webob.adhoc_attrs', {})[attr] = value
def __getattr__(self, attr, DEFAULT=object()):
try:
return self.environ['webob.adhoc_attrs'][attr]
except KeyError:
raise AttributeError(attr)
def __delattr__(self, attr, DEFAULT=object()):
if getattr(self.__class__, attr, DEFAULT) is not DEFAULT:
return object.__delattr__(self, attr)
try:
del self.environ['webob.adhoc_attrs'][attr]
except KeyError:
raise AttributeError(attr)
class Request(AdhocAttrMixin, BaseRequest):
""" The default request implementation """
def environ_from_url(path):
if SCHEME_RE.search(path):
scheme, netloc, path, qs, fragment = urlparse.urlsplit(path)
if fragment:
raise TypeError("Path cannot contain a fragment (%r)" % fragment)
if qs:
path += '?' + qs
if ':' not in netloc:
if scheme == 'http':
netloc += ':80'
elif scheme == 'https':
netloc += ':443'
else:
raise TypeError("Unknown scheme: %r" % scheme)
else:
scheme = 'http'
netloc = 'localhost:80'
if path and '?' in path:
path_info, query_string = path.split('?', 1)
path_info = url_unquote(path_info)
else:
path_info = url_unquote(path)
query_string = ''
env = {
'REQUEST_METHOD': 'GET',
'SCRIPT_NAME': '',
'PATH_INFO': path_info or '',
'QUERY_STRING': query_string,
'SERVER_NAME': netloc.split(':')[0],
'SERVER_PORT': netloc.split(':')[1],
'HTTP_HOST': netloc,
'SERVER_PROTOCOL': 'HTTP/1.0',
'wsgi.version': (1, 0),
'wsgi.url_scheme': scheme,
'wsgi.input': io.BytesIO(),
'wsgi.errors': sys.stderr,
'wsgi.multithread': False,
'wsgi.multiprocess': False,
'wsgi.run_once': False,
#'webob.is_body_seekable': True,
}
return env
def environ_add_POST(env, data, content_type=None):
if data is None:
return
elif isinstance(data, text_type): # pragma: no cover
data = data.encode('ascii')
if env['REQUEST_METHOD'] not in ('POST', 'PUT'):
env['REQUEST_METHOD'] = 'POST'
has_files = False
if hasattr(data, 'items'):
data = list(data.items())
for k, v in data:
if isinstance(v, (tuple, list)):
has_files = True
break
if content_type is None:
if has_files:
content_type = 'multipart/form-data'
else:
content_type = 'application/x-www-form-urlencoded'
if content_type.startswith('multipart/form-data'):
if not isinstance(data, bytes):
content_type, data = _encode_multipart(data, content_type)
elif content_type.startswith('application/x-www-form-urlencoded'):
if has_files:
raise ValueError('Submiting files is not allowed for'
' content type `%s`' % content_type)
if not isinstance(data, bytes):
data = url_encode(data)
else:
if not isinstance(data, bytes):
raise ValueError('Please provide `POST` data as string'
' for content type `%s`' % content_type)
data = bytes_(data, 'utf8')
env['wsgi.input'] = io.BytesIO(data)
env['webob.is_body_seekable'] = True
env['CONTENT_LENGTH'] = str(len(data))
env['CONTENT_TYPE'] = content_type
#########################
## Helper classes and monkeypatching
#########################
class DisconnectionError(IOError):
pass
class LimitedLengthFile(io.RawIOBase):
def __init__(self, file, maxlen):
self.file = file
self.maxlen = maxlen
self.remaining = maxlen
def __repr__(self):
return '<%s(%r, maxlen=%s)>' % (
self.__class__.__name__,
self.file,
self.maxlen
)
def fileno(self):
return self.file.fileno()
@staticmethod
def readable():
return True
def readinto(self, buff):
if not self.remaining:
return 0
sz0 = min(len(buff), self.remaining)
data = self.file.read(sz0)
sz = len(data)
self.remaining -= sz
#if not data:
if sz < sz0 and self.remaining:
raise DisconnectionError(
"The client disconnected while sending the POST/PUT body "
+ "(%d more bytes were expected)" % self.remaining
)
buff[:sz] = data
return sz
def _cgi_FieldStorage__repr__patch(self):
""" monkey patch for FieldStorage.__repr__
Unbelievably, the default __repr__ on FieldStorage reads
the entire file content instead of being sane about it.
This is a simple replacement that doesn't do that
"""
if self.file:
return "FieldStorage(%r, %r)" % (self.name, self.filename)
return "FieldStorage(%r, %r, %r)" % (self.name, self.filename, self.value)
cgi.FieldStorage.__repr__ = _cgi_FieldStorage__repr__patch
class FakeCGIBody(io.RawIOBase):
def __init__(self, vars, content_type):
if content_type.startswith('multipart/form-data'):
if not _get_multipart_boundary(content_type):
raise ValueError('Content-type: %r does not contain boundary'
% content_type)
self.vars = vars
self.content_type = content_type
self.file = None
def __repr__(self):
inner = repr(self.vars)
if len(inner) > 20:
inner = inner[:15] + '...' + inner[-5:]
return '<%s at 0x%x viewing %s>' % (
self.__class__.__name__,
abs(id(self)), inner)
def fileno(self):
return None
@staticmethod
def readable():
return True
def readinto(self, buff):
if self.file is None:
if self.content_type.startswith(
'application/x-www-form-urlencoded'):
data = '&'.join(
'%s=%s' % (quote_plus(bytes_(k, 'utf8')), quote_plus(bytes_(v, 'utf8')))
for k,v in self.vars.items()
)
self.file = io.BytesIO(bytes_(data))
elif self.content_type.startswith('multipart/form-data'):
self.file = _encode_multipart(
self.vars.items(),
self.content_type,
fout=io.BytesIO()
)[1]
self.file.seek(0)
else:
assert 0, ('Bad content type: %r' % self.content_type)
return self.file.readinto(buff)
def _get_multipart_boundary(ctype):
m = re.search(r'boundary=([^ ]+)', ctype, re.I)
if m:
return native_(m.group(1).strip('"'))
def _encode_multipart(vars, content_type, fout=None):
"""Encode a multipart request body into a string"""
f = fout or io.BytesIO()
w = f.write
wt = lambda t: f.write(t.encode('utf8'))
CRLF = b'\r\n'
boundary = _get_multipart_boundary(content_type)
if not boundary:
boundary = native_(binascii.hexlify(os.urandom(10)))
content_type += ('; boundary=%s' % boundary)
for name, value in vars:
w(b'--')
wt(boundary)
w(CRLF)
assert name is not None, 'Value associated with no name: %r' % value
wt('Content-Disposition: form-data; name="%s"' % name)
filename = None
if getattr(value, 'filename', None):
filename = value.filename
elif isinstance(value, (list, tuple)):
filename, value = value
if hasattr(value, 'read'):
value = value.read()
if filename is not None:
wt('; filename="%s"' % filename)
mime_type = mimetypes.guess_type(filename)[0]
else:
mime_type = None
w(CRLF)
# TODO: should handle value.disposition_options
if getattr(value, 'type', None):
wt('Content-type: %s' % value.type)
if value.type_options:
for ct_name, ct_value in sorted(value.type_options.items()):
wt('; %s="%s"' % (ct_name, ct_value))
w(CRLF)
elif mime_type:
wt('Content-type: %s' % mime_type)
w(CRLF)
w(CRLF)
if hasattr(value, 'value'):
value = value.value
if isinstance(value, bytes):
w(value)
else:
wt(value)
w(CRLF)
wt('--%s--' % boundary)
if fout:
return content_type, fout
else:
return content_type, f.getvalue()
def detect_charset(ctype):
m = CHARSET_RE.search(ctype)
if m:
return m.group(1).strip('"').strip()
def _is_utf8(charset):
if not charset:
return True
else:
return charset.lower().replace('-', '') == 'utf8'
class Transcoder(object):
def __init__(self, charset, errors='strict'):
self.charset = charset # source charset
self.errors = errors # unicode errors
self._trans = lambda b: b.decode(charset, errors).encode('utf8')
def transcode_query(self, q):
if PY3: # pragma: no cover
q_orig = q
if '=' not in q:
# this doesn't look like a form submission
return q_orig
q = list(parse_qsl_text(q, self.charset))
return url_encode(q)
else:
q_orig = q
if '=' not in q:
# this doesn't look like a form submission
return q_orig
q = urlparse.parse_qsl(q, self.charset)
t = self._trans
q = [(t(k), t(v)) for k,v in q]
return url_encode(q)
def transcode_fs(self, fs, content_type):
# transcode FieldStorage
if PY3: # pragma: no cover
decode = lambda b: b
else:
decode = lambda b: b.decode(self.charset, self.errors)
data = []
for field in fs.list or ():
field.name = decode(field.name)
if field.filename:
field.filename = decode(field.filename)
data.append((field.name, field))
else:
data.append((field.name, decode(field.value)))
# TODO: transcode big requests to temp file
content_type, fout = _encode_multipart(
data,
content_type,
fout=io.BytesIO()
)
return fout
# TODO: remove in 1.4
for _name in 'GET POST params cookies'.split():
_str_name = 'str_'+_name
_prop = deprecated_property(
None, _str_name,
"disabled starting WebOb 1.2, use %s instead" % _name, '1.2')
setattr(BaseRequest, _str_name, _prop)
| agpl-3.0 |
Exa-Networks/exaproxy | lib/exaproxy/reactor/client/passthrough.py | 2 | 6224 | # encoding: utf-8
"""
server.py
Created by David Farrar on 2011-11-30.
Copyright (c) 2011-2013 Exa Networks. All rights reserved.
"""
import socket
import errno
from exaproxy.network.functions import isipv4
from exaproxy.network.errno_list import errno_block
from exaproxy.util.proxy import ProxyProtocol
class PassthroughClient (object):
proxy_protocol = ProxyProtocol()
__slots__ = ['name', 'ipv4', 'sock', 'accept_addr', 'accept_port', 'peer', 'reader', 'writer', 'w_buffer', 'log']
def __init__(self, name, sock, peer, logger, max_buffer, proxied):
addr, port = sock.getsockname()
self.name = name
self.sock = sock
self.peer = peer
self.accept_addr = addr
self.accept_port = port
self.ipv4 = isipv4(addr)
self.reader = self._read(sock, max_buffer, proxied=proxied)
self.writer = self._write(sock)
self.w_buffer = ''
self.log = logger
# start the _read coroutine
self.reader.next()
def getAcceptAddress (self):
return self.accept_addr, self.accept_port
def _read (self, sock, max_buffer, read_size=64*1024, proxied=False):
"""Coroutine managing data read from the client"""
# yield request, content
# request is the text that form the request header
# content any text which is related to the current request after the headers
yield ''
r_buffer = ''
size = 0
masquerade = None
# mode can be one of : proxy, passthrough
# proxy: we are reading an opening proxy protocol header
# passthrough : read as much as can to be relayed
mode = 'proxy' if proxied else 'passthrough'
data = ''
while True:
try:
while True:
if mode != 'passthrough' or r_buffer == '':
new_data = sock.recv(read_size)
if not new_data:
break # read failed so we abort
r_buffer += new_data
if mode == 'proxy':
masquerade, r_buffer, mode = self.processProxyHeader(r_buffer, mode)
if masquerade is None:
break
elif not masquerade:
continue
self.setPeer(masquerade)
if mode == 'passthrough':
data, r_buffer, mode = self.process(r_buffer, mode, max_buffer)
if data is None:
break
if data:
data_response, data = [data], ''
yield [''], data_response
else:
yield [''], ['']
# break out of the outer loop as soon as we leave the inner loop
# through normal execution
break
except socket.error, e:
if e.args[0] in errno_block:
yield [''], ['']
else:
break
yield [None], [None]
def processProxyHeader (self, r_buffer, mode):
r_buffer = r_buffer.lstrip('\r\n')
for eol in self.eol:
if eol in r_buffer:
client_ip, r_buffer = self.proxy_protocol.parse(r_buffer)
mode = 'passthrough'
break
else:
client_ip, r_buffer = '', r_buffer
return client_ip, r_buffer, mode
def process (self, r_buffer, mode, max_buffer):
if mode == 'passthrough':
data, r_buffer, new_mode = r_buffer, '', mode
else:
data, r_buffer, new_mode = None, r_buffer, None
return data, r_buffer, new_mode
def setPeer (self, peer):
"""Set the claimed ip address for this client.
Does not effect the ip address we try sending data to."""
self.peer = peer
def readData (self):
# pop data from lists to free memory held by the coroutine
request_l, content_l = self.reader.send(('passthrough',0))
request = request_l.pop()
content = content_l.pop()
return self.name, self.accept_addr, self.accept_port, self.peer, request, '', content
def readRelated (self, mode, remaining):
# pop data from lists to free memory held by the coroutine
mode = 'passthrough'
request_l, content_l = self.reader.send((mode,remaining))
request = request_l.pop()
content = content_l.pop()
return self.name, self.accept_addr, self.peer, request, '', content
def _write(self, sock):
"""Coroutine managing data sent to the client"""
data = yield None
finished = False
w_buffer = self.w_buffer
while True:
try:
while True:
w_buffer = self.w_buffer
had_buffer = bool(w_buffer)
if data is not None:
w_buffer += data
else:
# We've finished downloading, even if the client hasn't yet
finished = True
if finished:
if not w_buffer:
break # terminate the client connection
elif data:
self.log.error('Tried to send data to client after we told it to close. Dropping it.')
if not had_buffer or data == '':
sent = sock.send(w_buffer)
#if sent:
# self.log.debug(">> [%s]" % w_buffer[:sent].replace('\t','\\t').replace('\r','\\r').replace('\n','\\n'))
w_buffer = w_buffer[sent:]
else:
sent = 0
self.w_buffer = w_buffer
buffered = bool(w_buffer) or finished
data = yield buffered, had_buffer, sent if self.ipv4 else 0, 0 if self.ipv4 else sent
# break out of the outer loop as soon as we leave the inner loop
# through normal execution
yield None
break
except socket.error, e:
self.w_buffer = w_buffer
if e.args[0] in errno_block:
self.log.debug('interrupted when trying to sent %d bytes, fine, will retry' % len(data))
self.log.debug('reason: errno %d: %s' % (e.args[0], errno.errorcode.get(e.args[0], '<no errno name>')))
data = yield bool(w_buffer) or finished, had_buffer, 0, 0
else:
self.log.debug('handled an unexpected error writing on socket')
self.log.debug('reason, errno %d: %s' % (e.args[0], errno.errorcode.get(e.args[0], '<no errno name>')))
yield None # stop the client connection
break # and don't come back
yield None
def writeData(self, data):
return self.writer.send(data)
def startData(self, command, data):
# start the _write coroutine
self.writer = self._write(self.sock)
self.writer.next()
if command == 'stream':
res = self.writer.send(data)
elif command == 'close':
self.writer.send(data)
res = self.writer.send(None) # close the connection once the buffer is empty
else:
res = None
# buffered, had_buffer
return self.name, self.peer, res
def shutdown(self):
try:
self.sock.shutdown(socket.SHUT_RDWR)
except socket.error:
pass
finally:
self.sock.close()
self.writer.close()
self.reader.close()
| bsd-2-clause |
louietsai/python-for-android | python-build/python-libs/gdata/src/atom/core.py | 137 | 20292 | #!/usr/bin/env python
#
# Copyright (C) 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This module is used for version 2 of the Google Data APIs.
__author__ = '[email protected] (Jeff Scudder)'
import inspect
try:
from xml.etree import cElementTree as ElementTree
except ImportError:
try:
import cElementTree as ElementTree
except ImportError:
try:
from xml.etree import ElementTree
except ImportError:
from elementtree import ElementTree
STRING_ENCODING = 'utf-8'
class XmlElement(object):
"""Represents an element node in an XML document.
The text member is a UTF-8 encoded str or unicode.
"""
_qname = None
_other_elements = None
_other_attributes = None
# The rule set contains mappings for XML qnames to child members and the
# appropriate member classes.
_rule_set = None
_members = None
text = None
def __init__(self, text=None, *args, **kwargs):
if ('_members' not in self.__class__.__dict__
or self.__class__._members is None):
self.__class__._members = tuple(self.__class__._list_xml_members())
for member_name, member_type in self.__class__._members:
if member_name in kwargs:
setattr(self, member_name, kwargs[member_name])
else:
if isinstance(member_type, list):
setattr(self, member_name, [])
else:
setattr(self, member_name, None)
self._other_elements = []
self._other_attributes = {}
if text is not None:
self.text = text
def _list_xml_members(cls):
"""Generator listing all members which are XML elements or attributes.
The following members would be considered XML members:
foo = 'abc' - indicates an XML attribute with the qname abc
foo = SomeElement - indicates an XML child element
foo = [AnElement] - indicates a repeating XML child element, each instance
will be stored in a list in this member
foo = ('att1', '{http://example.com/namespace}att2') - indicates an XML
attribute which has different parsing rules in different versions of
the protocol. Version 1 of the XML parsing rules will look for an
attribute with the qname 'att1' but verion 2 of the parsing rules will
look for a namespaced attribute with the local name of 'att2' and an
XML namespace of 'http://example.com/namespace'.
"""
members = []
for pair in inspect.getmembers(cls):
if not pair[0].startswith('_') and pair[0] != 'text':
member_type = pair[1]
if (isinstance(member_type, tuple) or isinstance(member_type, list)
or isinstance(member_type, (str, unicode))
or (inspect.isclass(member_type)
and issubclass(member_type, XmlElement))):
members.append(pair)
return members
_list_xml_members = classmethod(_list_xml_members)
def _get_rules(cls, version):
"""Initializes the _rule_set for the class which is used when parsing XML.
This method is used internally for parsing and generating XML for an
XmlElement. It is not recommended that you call this method directly.
Returns:
A tuple containing the XML parsing rules for the appropriate version.
The tuple looks like:
(qname, {sub_element_qname: (member_name, member_class, repeating), ..},
{attribute_qname: member_name})
To give a couple of concrete example, the atom.data.Control _get_rules
with version of 2 will return:
('{http://www.w3.org/2007/app}control',
{'{http://www.w3.org/2007/app}draft': ('draft',
<class 'atom.data.Draft'>,
False)},
{})
Calling _get_rules with version 1 on gdata.data.FeedLink will produce:
('{http://schemas.google.com/g/2005}feedLink',
{'{http://www.w3.org/2005/Atom}feed': ('feed',
<class 'gdata.data.GDFeed'>,
False)},
{'href': 'href', 'readOnly': 'read_only', 'countHint': 'count_hint',
'rel': 'rel'})
"""
# Initialize the _rule_set to make sure there is a slot available to store
# the parsing rules for this version of the XML schema.
# Look for rule set in the class __dict__ proxy so that only the
# _rule_set for this class will be found. By using the dict proxy
# we avoid finding rule_sets defined in superclasses.
# The four lines below provide support for any number of versions, but it
# runs a bit slower then hard coding slots for two versions, so I'm using
# the below two lines.
#if '_rule_set' not in cls.__dict__ or cls._rule_set is None:
# cls._rule_set = []
#while len(cls.__dict__['_rule_set']) < version:
# cls._rule_set.append(None)
# If there is no rule set cache in the class, provide slots for two XML
# versions. If and when there is a version 3, this list will need to be
# expanded.
if '_rule_set' not in cls.__dict__ or cls._rule_set is None:
cls._rule_set = [None, None]
# If a version higher than 2 is requested, fall back to version 2 because
# 2 is currently the highest supported version.
if version > 2:
return cls._get_rules(2)
# Check the dict proxy for the rule set to avoid finding any rule sets
# which belong to the superclass. We only want rule sets for this class.
if cls._rule_set[version-1] is None:
# The rule set for each version consists of the qname for this element
# ('{namespace}tag'), a dictionary (elements) for looking up the
# corresponding class member when given a child element's qname, and a
# dictionary (attributes) for looking up the corresponding class member
# when given an XML attribute's qname.
elements = {}
attributes = {}
if ('_members' not in cls.__dict__ or cls._members is None):
cls._members = tuple(cls._list_xml_members())
for member_name, target in cls._members:
if isinstance(target, list):
# This member points to a repeating element.
elements[_get_qname(target[0], version)] = (member_name, target[0],
True)
elif isinstance(target, tuple):
# This member points to a versioned XML attribute.
if version <= len(target):
attributes[target[version-1]] = member_name
else:
attributes[target[-1]] = member_name
elif isinstance(target, (str, unicode)):
# This member points to an XML attribute.
attributes[target] = member_name
elif issubclass(target, XmlElement):
# This member points to a single occurance element.
elements[_get_qname(target, version)] = (member_name, target, False)
version_rules = (_get_qname(cls, version), elements, attributes)
cls._rule_set[version-1] = version_rules
return version_rules
else:
return cls._rule_set[version-1]
_get_rules = classmethod(_get_rules)
def get_elements(self, tag=None, namespace=None, version=1):
"""Find all sub elements which match the tag and namespace.
To find all elements in this object, call get_elements with the tag and
namespace both set to None (the default). This method searches through
the object's members and the elements stored in _other_elements which
did not match any of the XML parsing rules for this class.
Args:
tag: str
namespace: str
version: int Specifies the version of the XML rules to be used when
searching for matching elements.
Returns:
A list of the matching XmlElements.
"""
matches = []
ignored1, elements, ignored2 = self.__class__._get_rules(version)
if elements:
for qname, element_def in elements.iteritems():
member = getattr(self, element_def[0])
if member:
if _qname_matches(tag, namespace, qname):
if element_def[2]:
# If this is a repeating element, copy all instances into the
# result list.
matches.extend(member)
else:
matches.append(member)
for element in self._other_elements:
if _qname_matches(tag, namespace, element._qname):
matches.append(element)
return matches
GetElements = get_elements
# FindExtensions and FindChildren are provided for backwards compatibility
# to the atom.AtomBase class.
# However, FindExtensions may return more results than the v1 atom.AtomBase
# method does, because get_elements searches both the expected children
# and the unexpected "other elements". The old AtomBase.FindExtensions
# method searched only "other elements" AKA extension_elements.
FindExtensions = get_elements
FindChildren = get_elements
def get_attributes(self, tag=None, namespace=None, version=1):
"""Find all attributes which match the tag and namespace.
To find all attributes in this object, call get_attributes with the tag
and namespace both set to None (the default). This method searches
through the object's members and the attributes stored in
_other_attributes which did not fit any of the XML parsing rules for this
class.
Args:
tag: str
namespace: str
version: int Specifies the version of the XML rules to be used when
searching for matching attributes.
Returns:
A list of XmlAttribute objects for the matching attributes.
"""
matches = []
ignored1, ignored2, attributes = self.__class__._get_rules(version)
if attributes:
for qname, attribute_def in attributes.iteritems():
member = getattr(self, attribute_def[0])
if member:
if _qname_matches(tag, namespace, qname):
matches.append(XmlAttribute(qname, member))
for qname, value in self._other_attributes.iteritems():
if _qname_matches(tag, namespace, qname):
matches.append(XmlAttribute(qname, value))
return matches
GetAttributes = get_attributes
def _harvest_tree(self, tree, version=1):
"""Populates object members from the data in the tree Element."""
qname, elements, attributes = self.__class__._get_rules(version)
for element in tree:
if elements and element.tag in elements:
definition = elements[element.tag]
# If this is a repeating element, make sure the member is set to a
# list.
if definition[2]:
if getattr(self, definition[0]) is None:
setattr(self, definition[0], [])
getattr(self, definition[0]).append(_xml_element_from_tree(element,
definition[1], version))
else:
setattr(self, definition[0], _xml_element_from_tree(element,
definition[1], version))
else:
self._other_elements.append(_xml_element_from_tree(element, XmlElement,
version))
for attrib, value in tree.attrib.iteritems():
if attributes and attrib in attributes:
setattr(self, attributes[attrib], value)
else:
self._other_attributes[attrib] = value
if tree.text:
self.text = tree.text
def _to_tree(self, version=1, encoding=None):
new_tree = ElementTree.Element(_get_qname(self, version))
self._attach_members(new_tree, version, encoding)
return new_tree
def _attach_members(self, tree, version=1, encoding=None):
"""Convert members to XML elements/attributes and add them to the tree.
Args:
tree: An ElementTree.Element which will be modified. The members of
this object will be added as child elements or attributes
according to the rules described in _expected_elements and
_expected_attributes. The elements and attributes stored in
other_attributes and other_elements are also added a children
of this tree.
version: int Ingnored in this method but used by VersionedElement.
encoding: str (optional)
"""
qname, elements, attributes = self.__class__._get_rules(version)
encoding = encoding or STRING_ENCODING
# Add the expected elements and attributes to the tree.
if elements:
for tag, element_def in elements.iteritems():
member = getattr(self, element_def[0])
# If this is a repeating element and there are members in the list.
if member and element_def[2]:
for instance in member:
instance._become_child(tree, version)
elif member:
member._become_child(tree, version)
if attributes:
for attribute_tag, member_name in attributes.iteritems():
value = getattr(self, member_name)
if value:
tree.attrib[attribute_tag] = value
# Add the unexpected (other) elements and attributes to the tree.
for element in self._other_elements:
element._become_child(tree, version)
for key, value in self._other_attributes.iteritems():
# I'm not sure if unicode can be used in the attribute name, so for now
# we assume the encoding is correct for the attribute name.
if not isinstance(value, unicode):
value = value.decode(encoding)
tree.attrib[key] = value
if self.text:
if isinstance(self.text, unicode):
tree.text = self.text
else:
tree.text = self.text.decode(encoding)
def to_string(self, version=1, encoding=None):
"""Converts this object to XML."""
return ElementTree.tostring(self._to_tree(version, encoding))
ToString = to_string
def __str__(self):
return self.to_string()
def _become_child(self, tree, version=1):
"""Adds a child element to tree with the XML data in self."""
new_child = ElementTree.Element('')
tree.append(new_child)
new_child.tag = _get_qname(self, version)
self._attach_members(new_child, version)
def __get_extension_elements(self):
return self._other_elements
def __set_extension_elements(self, elements):
self._other_elements = elements
extension_elements = property(__get_extension_elements,
__set_extension_elements,
"""Provides backwards compatibility for v1 atom.AtomBase classes.""")
def __get_extension_attributes(self):
return self._other_attributes
def __set_extension_attributes(self, attributes):
self._other_attributes = attributes
extension_attributes = property(__get_extension_attributes,
__set_extension_attributes,
"""Provides backwards compatibility for v1 atom.AtomBase classes.""")
def _get_tag(self, version=1):
qname = _get_qname(self, version)
return qname[qname.find('}')+1:]
def _get_namespace(self, version=1):
qname = _get_qname(self, version)
if qname.startswith('{'):
return qname[1:qname.find('}')]
else:
return None
def _set_tag(self, tag):
if isinstance(self._qname, tuple):
self._qname = self._qname.copy()
if self._qname[0].startswith('{'):
self._qname[0] = '{%s}%s' % (self._get_namespace(1), tag)
else:
self._qname[0] = tag
else:
if self._qname.startswith('{'):
self._qname = '{%s}%s' % (self._get_namespace(), tag)
else:
self._qname = tag
def _set_namespace(self, namespace):
if isinstance(self._qname, tuple):
self._qname = self._qname.copy()
if namespace:
self._qname[0] = '{%s}%s' % (namespace, self._get_tag(1))
else:
self._qname[0] = self._get_tag(1)
else:
if namespace:
self._qname = '{%s}%s' % (namespace, self._get_tag(1))
else:
self._qname = self._get_tag(1)
tag = property(_get_tag, _set_tag,
"""Provides backwards compatibility for v1 atom.AtomBase classes.""")
namespace = property(_get_namespace, _set_namespace,
"""Provides backwards compatibility for v1 atom.AtomBase classes.""")
# Provided for backwards compatibility to atom.ExtensionElement
children = extension_elements
attributes = extension_attributes
def _get_qname(element, version):
if isinstance(element._qname, tuple):
if version <= len(element._qname):
return element._qname[version-1]
else:
return element._qname[-1]
else:
return element._qname
def _qname_matches(tag, namespace, qname):
"""Logic determines if a QName matches the desired local tag and namespace.
This is used in XmlElement.get_elements and XmlElement.get_attributes to
find matches in the element's members (among all expected-and-unexpected
elements-and-attributes).
Args:
expected_tag: string
expected_namespace: string
qname: string in the form '{xml_namespace}localtag' or 'tag' if there is
no namespace.
Returns:
boolean True if the member's tag and namespace fit the expected tag and
namespace.
"""
# If there is no expected namespace or tag, then everything will match.
if qname is None:
member_tag = None
member_namespace = None
else:
if qname.startswith('{'):
member_namespace = qname[1:qname.index('}')]
member_tag = qname[qname.index('}') + 1:]
else:
member_namespace = None
member_tag = qname
return ((tag is None and namespace is None)
# If there is a tag, but no namespace, see if the local tag matches.
or (namespace is None and member_tag == tag)
# There was no tag, but there was a namespace so see if the namespaces
# match.
or (tag is None and member_namespace == namespace)
# There was no tag, and the desired elements have no namespace, so check
# to see that the member's namespace is None.
or (tag is None and namespace == ''
and member_namespace is None)
# The tag and the namespace both match.
or (tag == member_tag
and namespace == member_namespace)
# The tag matches, and the expected namespace is the empty namespace,
# check to make sure the member's namespace is None.
or (tag == member_tag and namespace == ''
and member_namespace is None))
def parse(xml_string, target_class=None, version=1, encoding=None):
"""Parses the XML string according to the rules for the target_class.
Args:
xml_string: str or unicode
target_class: XmlElement or a subclass. If None is specified, the
XmlElement class is used.
version: int (optional) The version of the schema which should be used when
converting the XML into an object. The default is 1.
encoding: str (optional) The character encoding of the bytes in the
xml_string. Default is 'UTF-8'.
"""
if target_class is None:
target_class = XmlElement
if isinstance(xml_string, unicode):
if encoding is None:
xml_string = xml_string.encode(STRING_ENCODING)
else:
xml_string = xml_string.encode(encoding)
tree = ElementTree.fromstring(xml_string)
return _xml_element_from_tree(tree, target_class, version)
Parse = parse
xml_element_from_string = parse
XmlElementFromString = xml_element_from_string
def _xml_element_from_tree(tree, target_class, version=1):
if target_class._qname is None:
instance = target_class()
instance._qname = tree.tag
instance._harvest_tree(tree, version)
return instance
# TODO handle the namespace-only case
# Namespace only will be used with Google Spreadsheets rows and
# Google Base item attributes.
elif tree.tag == _get_qname(target_class, version):
instance = target_class()
instance._harvest_tree(tree, version)
return instance
return None
class XmlAttribute(object):
def __init__(self, qname, value):
self._qname = qname
self.value = value
| apache-2.0 |
cloudera/cm_api | python/src/cm_api/endpoints/cms.py | 2 | 20364 | # Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from cm_api.endpoints.types import *
from cm_api.endpoints.services import ApiService
class ApiLicense(BaseApiObject):
"""Model for a CM license."""
_ATTRIBUTES = {
'owner' : ROAttr(),
'uuid' : ROAttr(),
'expiration' : ROAttr(),
}
def __init__(self, resource_root):
BaseApiObject.init(self, resource_root)
class ClouderaManager(BaseApiResource):
"""
The Cloudera Manager instance.
Provides access to CM configuration and services.
"""
def __init__(self, resource_root):
BaseApiObject.init(self, resource_root)
def _path(self):
return '/cm'
def get_commands(self, view=None):
"""
Retrieve a list of running global commands.
@param view: View to materialize ('full' or 'summary')
@return: A list of running commands.
"""
return self._get("commands", ApiCommand, True,
params = view and dict(view=view) or None)
def create_mgmt_service(self, service_setup_info):
"""
Setup the Cloudera Management Service.
@param service_setup_info: ApiServiceSetupInfo object.
@return: The management service instance.
"""
return self._put("service", ApiService, data=service_setup_info)
def delete_mgmt_service(self):
"""
Delete the Cloudera Management Service.
@return: The deleted management service instance.
"""
return self._delete("service", ApiService, api_version=6)
def get_service(self):
"""
Return the Cloudera Management Services instance.
@return: An ApiService instance.
"""
return self._get("service", ApiService)
def get_license(self):
"""
Return information about the currently installed license.
@return: License information.
"""
return self._get("license", ApiLicense)
def update_license(self, license_text):
"""
Install or update the Cloudera Manager license.
@param license_text: the license in text form
"""
content = (
'--MULTI_BOUNDARY',
'Content-Disposition: form-data; name="license"',
'',
license_text,
'--MULTI_BOUNDARY--',
'')
resp = self._get_resource_root().post('cm/license',
data="\r\n".join(content),
contenttype='multipart/form-data; boundary=MULTI_BOUNDARY')
return ApiLicense.from_json_dict(resp, self._get_resource_root())
def get_config(self, view = None):
"""
Retrieve the Cloudera Manager configuration.
The 'summary' view contains strings as the dictionary values. The full
view contains ApiConfig instances as the values.
@param view: View to materialize ('full' or 'summary')
@return: Dictionary with configuration data.
"""
return self._get_config("config", view)
def update_config(self, config):
"""
Update the CM configuration.
@param config: Dictionary with configuration to update.
@return: Dictionary with updated configuration.
"""
return self._update_config("config", config)
def generate_credentials(self):
"""
Generate credentials for services configured with Kerberos.
@return: Information about the submitted command.
"""
return self._cmd('generateCredentials')
def import_admin_credentials(self, username, password):
"""
Imports the KDC Account Manager credentials needed by Cloudera
Manager to create kerberos principals needed by CDH services.
@param username Username of the Account Manager. Full name including the Kerberos
realm must be specified.
@param password Password for the Account Manager.
@return: Information about the submitted command.
@since API v7
"""
return self._cmd('importAdminCredentials', params=dict(username=username, password=password))
def get_licensed_feature_usage(self):
"""
Retrieve a summary of licensed feature usage.
This command will return information about what Cloudera Enterprise
licensed features are in use in the clusters being managed by this Cloudera
Manager, as well as totals for usage across all clusters.
The specific features described can vary between different versions of
Cloudera Manager.
Available since API v6.
"""
return self._get('getLicensedFeatureUsage',
ret_type=ApiLicensedFeatureUsage,
ret_is_list=False,
api_version=6)
def inspect_hosts(self):
"""
Runs the host inspector on the configured hosts.
@return: Information about the submitted command.
"""
return self._cmd('inspectHosts')
def collect_diagnostic_data(self, start_datetime, end_datetime, includeInfoLog=False):
"""
This method is deprecated as of CM 4.5.
You should use collect_diagnostic_data_45.
Issue the command to collect diagnostic data.
@param start_datetime: The start of the collection period. Type datetime.
@param end_datetime: The end of the collection period. Type datetime.
@param includeInfoLog: Whether to include INFO level log messages.
"""
args = {
'startTime': start_datetime.isoformat(),
'endTime': end_datetime.isoformat(),
'includeInfoLog': includeInfoLog,
}
# This method is deprecated as of CM API version 3 which was introduced
# in CM 4.5.
return self._cmd('collectDiagnosticData', data=args, api_version=2)
def collect_diagnostic_data_45(self, end_datetime, bundle_size_bytes, cluster_name=None,
roles=None, collect_metrics=False, start_datetime=None):
"""
Issue the command to collect diagnostic data.
If start_datetime is specified, diagnostic data is collected for the entire period between
start_datetime and end_datetime provided that bundle size is less than or equal to
bundle_size_bytes. Diagnostics data collection fails if the bundle size is greater than
bundle_size_bytes.
If start_datetime is not specified, diagnostic data is collected starting from end_datetime
and collecting backwards upto a maximum of bundle_size_bytes.
@param end_datetime: The end of the collection period. Type datetime.
@param bundle_size_bytes: The target size for the support bundle in bytes
@param cluster_name: The cluster to collect or None for all clusters
@param roles: Role ids of roles to restrict log and metric collection to. Valid since v10.
@param collect_metrics: Whether to collect metrics for viewing as charts. Valid since v13.
@param start_datetime: The start of the collection period. Type datetime. Valid since v13.
"""
args = {
'endTime': end_datetime.isoformat(),
'bundleSizeBytes': bundle_size_bytes,
'clusterName': cluster_name
}
if self._get_resource_root().version >= 10:
args['roles'] = roles
if self._get_resource_root().version >= 13:
args['enableMonitorMetricsCollection'] = collect_metrics
if start_datetime is not None:
args['startTime'] = start_datetime.isoformat()
return self._cmd('collectDiagnosticData', data=args)
def hosts_decommission(self, host_names):
"""
Decommission the specified hosts by decommissioning the slave roles
and stopping the remaining ones.
@param host_names: List of names of hosts to be decommissioned.
@return: Information about the submitted command.
@since: API v2
"""
return self._cmd('hostsDecommission', data=host_names)
def hosts_recommission(self, host_names):
"""
Recommission the specified hosts by recommissioning the slave roles.
This command doesn't start the roles. Use hosts_start_roles for that.
@param host_names: List of names of hosts to be recommissioned.
@return: Information about the submitted command.
@since: API v2
"""
return self._cmd('hostsRecommission', data=host_names)
def hosts_recommission_with_start(self, host_names):
"""
Recommission the specified hosts by recommissioning the slave roles.
This command will start the roles before recommissioning.
Warning: Evolving. This method may change in the future and does not
offer standard compatibility guarantees.
Do not use without guidance from Cloudera.
Currently, only HDFS DataNodes will be started by this command.
@param host_names: List of names of hosts to be recommissioned.
@return: Information about the submitted command.
@since: API v15
"""
return self._cmd('hostsRecommissionWithStart', data=host_names, api_version=15)
def hosts_start_roles(self, host_names):
"""
Start all the roles on the specified hosts.
@param host_names: List of names of hosts on which to start all roles.
@return: Information about the submitted command.
@since: API v2
"""
return self._cmd('hostsStartRoles', data=host_names)
def create_peer(self, name, url, username, password, peer_type="REPLICATION"):
"""
Create a new peer for replication.
@param name: The name of the peer.
@param url: The url of the peer.
@param username: The admin username to use to setup the remote side of the peer connection.
@param password: The password of the admin user.
@param peer_type: Added in v11. The type of the peer. Defaults to 'REPLICATION'.
@return: The newly created peer.
@since: API v3
"""
if self._get_resource_root().version < 11:
peer_type = None
peer = ApiCmPeer(self._get_resource_root(),
name=name,
url=url,
username=username,
password=password,
type=peer_type)
return self._post("peers", ApiCmPeer, data=peer, api_version=3)
def _get_peer_type_param(self, peer_type):
"""
Checks if the resource_root's API version is >= 11 and construct type param.
"""
params = None
if self._get_resource_root().version >= 11:
params = {
'type': peer_type,
}
return params
def delete_peer(self, name, peer_type="REPLICATION"):
"""
Delete a replication peer.
@param name: The name of the peer.
@param peer_type: Added in v11. The type of the peer. Defaults to 'REPLICATION'.
@return: The deleted peer.
@since: API v3
"""
params = self._get_peer_type_param(peer_type)
return self._delete("peers/" + name, ApiCmPeer, params=params, api_version=3)
def update_peer(self,
current_name,
new_name, new_url, username, password, peer_type="REPLICATION"):
"""
Update a replication peer.
@param current_name: The name of the peer to updated.
@param new_name: The new name for the peer.
@param new_url: The new url for the peer.
@param username: The admin username to use to setup the remote side of the peer connection.
@param password: The password of the admin user.
@param peer_type: Added in v11. The type of the peer. Defaults to 'REPLICATION'.
@return: The updated peer.
@since: API v3
"""
if self._get_resource_root().version < 11:
peer_type = None
peer = ApiCmPeer(self._get_resource_root(),
name=new_name,
url=new_url,
username=username,
password=password,
type=peer_type)
return self._put("peers/" + current_name, ApiCmPeer, data=peer, api_version=3)
def get_peers(self):
"""
Retrieve a list of replication peers.
@return: A list of replication peers.
@since: API v3
"""
return self._get("peers", ApiCmPeer, True, api_version=3)
def get_peer(self, name, peer_type="REPLICATION"):
"""
Retrieve a replication peer by name.
@param name: The name of the peer.
@param peer_type: Added in v11. The type of the peer. Defaults to 'REPLICATION'.
@return: The peer.
@since: API v3
"""
params = self._get_peer_type_param(peer_type)
return self._get("peers/" + name, ApiCmPeer, params=params, api_version=3)
def test_peer_connectivity(self, name, peer_type="REPLICATION"):
"""
Test connectivity for a replication peer.
@param name: The name of the peer to test.
@param peer_type: Added in v11. The type of the peer to test. Defaults to 'REPLICATION'.
@return: The command representing the test.
@since: API v3
"""
params = self._get_peer_type_param(peer_type)
return self._post("peers/%s/commands/test" % name, ApiCommand, params=params,
api_version=3)
def get_all_hosts_config(self, view=None):
"""
Retrieve the default configuration for all hosts.
@param view: View to materialize.
@param view: View to materialize ('full' or 'summary')
@return: Dictionary with configuration data.
"""
return self._get_config("allHosts/config", view)
def update_all_hosts_config(self, config):
"""
Update the default configuration for all hosts.
@param config: Dictionary with configuration to update.
@return: Dictionary with updated configuration.
"""
return self._update_config("allHosts/config", config)
def auto_assign_roles(self):
"""
Automatically assign roles to hosts and create the roles for the Cloudera
Management Service.
Assignments are done based on number of hosts in the deployment and hardware
specifications. Existing roles will be taken into account and their
assignments will be not be modified. The deployment should not have any
clusters when calling this endpoint. If it does, an exception will be thrown
preventing any role assignments.
@since: API v6
"""
self._put("service/autoAssignRoles", None, api_version=6)
def auto_configure(self):
"""
Automatically configures roles of the Cloudera Management Service.
Overwrites some existing configurations. Only default role config groups
must exist before calling this endpoint. Other role config groups must not
exist. If they do, an exception will be thrown preventing any
configuration. Ignores any clusters (and their services and roles)
colocated with the Cloudera Management Service. To avoid over-committing
the heap on hosts, place the Cloudera Management Service roles on machines
not used by any of the clusters.
@since: API v6
"""
self._put("service/autoConfigure", None, api_version=6)
def host_install(self, user_name, host_names, ssh_port=None, password=None,
private_key=None, passphrase=None, parallel_install_count=None,
cm_repo_url=None, gpg_key_custom_url=None,
java_install_strategy=None, unlimited_jce=None):
"""
Install Cloudera Manager Agent on a set of hosts.
@param user_name: The username used to authenticate with the hosts. Root access
to your hosts is required to install Cloudera packages. The
installer will connect to your hosts via SSH and log in either
directly as root or as another user with password-less sudo
privileges to become root.
@param host_names: List of names of hosts to configure for use with
Cloudera Manager. A host may be specified by a
hostname(FQDN) or an IP address.
@param ssh_port: SSH port. If unset, defaults to 22.
@param password: The password used to authenticate with the hosts. Specify
either this or a private key. For password-less login, use
an empty string as password.
@param private_key: The private key to authenticate with the hosts. Specify
either this or a password.
@param passphrase: The passphrase associated with the private key used to
authenticate with the hosts (optional).
@param parallel_install_count: Number of simultaneous installations.
Defaults to 10. Running a large number of
installations at once can consume large amounts
of network bandwidth and other system resources.
@param cm_repo_url: The Cloudera Manager repository URL to use (optional).
Example for SLES, Redhat or other RPM based distributions:
http://archive-primary.cloudera.com/cm5/redhat/6/x86_64/cm/5/
Example for Ubuntu or other Debian based distributions:
"deb http://archive.cloudera.com/cm5/ubuntu/lucid/amd64/cm/ lucid-cm5 contrib"
@param gpg_key_custom_url: The Cloudera Manager public GPG key (optional).
Example for SLES, Redhat or other RPM based distributions:
http://archive-primary.cloudera.com/cm5/redhat/6/x86_64/cm/RPM-GPG-KEY-cloudera
Example for Ubuntu or other Debian based distributions:
http://archive.cloudera.com/debian/archive.key
@param java_install_strategy: Added in v8: Strategy to use for JDK installation. Valid values are 1.
AUTO (default): Cloudera Manager will install the JDK versions that are
required when the "AUTO" option is selected. Cloudera Manager may
overwrite any of the existing JDK installations. 2. NONE: Cloudera
Manager will not install any JDK when "NONE" option is selected. It
should be used if an existing JDK installation has to be used.
@param unlimited_jce: Added in v8: Flag for unlimited strength JCE policy files installation If
unset, defaults to false
@return: Information about the submitted command.
@since: API v6
"""
host_install_args = {}
if user_name:
host_install_args['userName'] = user_name
if host_names:
host_install_args['hostNames'] = host_names
if ssh_port:
host_install_args['sshPort'] = ssh_port
if password:
host_install_args['password'] = password
if private_key:
host_install_args['privateKey'] = private_key
if passphrase:
host_install_args['passphrase'] = passphrase
if parallel_install_count:
host_install_args['parallelInstallCount'] = parallel_install_count
if cm_repo_url:
host_install_args['cmRepoUrl'] = cm_repo_url
if gpg_key_custom_url:
host_install_args['gpgKeyCustomUrl'] = gpg_key_custom_url
if java_install_strategy is not None:
host_install_args['javaInstallStrategy'] = java_install_strategy
if unlimited_jce:
host_install_args['unlimitedJCE'] = unlimited_jce
return self._cmd('hostInstall', data=host_install_args)
def begin_trial(self):
"""
Begin the trial license for this Cloudera Manager instance.
This allows the user to have enterprise-level features for a 60-day trial
period.
@since: API v6
"""
self._post("trial/begin", None, api_version=6)
def end_trial(self):
"""
End the trial license for this Cloudera Manager instance.
@since: API v6
"""
self._post("trial/end", None, api_version=6)
def import_cluster_template(self, api_cluster_template, add_repositories=False):
"""
Create a cluster according to the provided template
@param api_cluster_template: cluster template to import
@param add_repositories: if true the parcels repositories in the cluster template will be added.
@return: Command handing cluster import
@since: API v12
"""
return self._post("importClusterTemplate", ApiCommand, False, api_cluster_template, params=dict(addRepositories=add_repositories), api_version=12)
| apache-2.0 |
ausmeyer/hiv_structural_determinants-paper-now | data/matrix/matrix_structure/distances.py | 1 | 1379 | import numpy as np
from Bio.PDB import *
def calc_distances_aas(model):
count = 1
sites = []
for a in model.get_residues():
if a.get_resname() == 'SIA':
continue
distances = []
for b in model.get_residues():
if b.get_resname() != 'SIA':
distance = b['CA'] - a['CA']
distances.append(distance)
sites.append(distances)
print(count)
count += 1
out_file = open('distances.dat', 'w')
for a in sites:
for b in a:
if b == a[-1]:
out_file.write(str(b))
else:
out_file.write(str(b) + ',')
out_file.write('\n')
out_file.close()
def calc_distances_sia():
location = np.zeros(3)
for a in model.get_residues():
if a.get_resname() == "SIA":
for a1 in a:
location += a1.get_coord()
for a in model.get_residues():
if a.get_resname() == "SIA":
for b in model.get_residues():
if b.get_resname() != 'SIA':
distances.write(str(np.sqrt(sum((location - b['CA'].get_coord())**2))) + '\n')
def main():
parser = PDBParser()
structure = parser.get_structure('temp', '1HIW_monomer.pdb')
calc_distances_aas(structure[0])
main()
| mit |
AntidoteLabs/Antidote-DM | Antidotes DM/youtube_dl/extractor/slutload.py | 146 | 1390 | from __future__ import unicode_literals
import re
from .common import InfoExtractor
class SlutloadIE(InfoExtractor):
_VALID_URL = r'^https?://(?:\w+\.)?slutload\.com/video/[^/]+/(?P<id>[^/]+)/?$'
_TEST = {
'url': 'http://www.slutload.com/video/virginie-baisee-en-cam/TD73btpBqSxc/',
'md5': '0cf531ae8006b530bd9df947a6a0df77',
'info_dict': {
'id': 'TD73btpBqSxc',
'ext': 'mp4',
"title": "virginie baisee en cam",
"age_limit": 18,
'thumbnail': 're:https?://.*?\.jpg'
}
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
webpage = self._download_webpage(url, video_id)
video_title = self._html_search_regex(r'<h1><strong>([^<]+)</strong>',
webpage, 'title').strip()
video_url = self._html_search_regex(
r'(?s)<div id="vidPlayer"\s+data-url="([^"]+)"',
webpage, 'video URL')
thumbnail = self._html_search_regex(
r'(?s)<div id="vidPlayer"\s+.*?previewer-file="([^"]+)"',
webpage, 'thumbnail', fatal=False)
return {
'id': video_id,
'url': video_url,
'title': video_title,
'thumbnail': thumbnail,
'age_limit': 18
}
| gpl-2.0 |
joram/sickbeard-orange | sickbeard/searchCurrent.py | 18 | 1154 | # Author: Nic Wolfe <[email protected]>
# URL: http://code.google.com/p/sickbeard/
#
# This file is part of Sick Beard.
#
# Sick Beard is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Sick Beard is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Sick Beard. If not, see <http://www.gnu.org/licenses/>.
from __future__ import with_statement
import sickbeard
from sickbeard import search_queue
import threading
class CurrentSearcher():
def __init__(self):
self.lock = threading.Lock()
self.amActive = False
def run(self):
search_queue_item = search_queue.RSSSearchQueueItem()
sickbeard.searchQueueScheduler.action.add_item(search_queue_item) #@UndefinedVariable
| gpl-3.0 |
MobinRanjbar/hue | apps/about/src/about/views.py | 28 | 2381 | #!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
import logging
from django.conf import settings
from django.utils.translation import ugettext as _
from desktop import appmanager
from desktop.lib.django_util import JsonResponse, render, login_notrequired
from desktop.log.access import access_log_level
from desktop.models import Settings
from desktop.views import collect_usage
@login_notrequired
@access_log_level(logging.DEBUG)
def admin_wizard(request):
if request.user.is_superuser:
apps = appmanager.get_apps(request.user)
else:
apps = []
app_names = [app.name for app in sorted(apps, key=lambda app: app.menu_index)]
tours_and_tutorials = Settings.get_settings().tours_and_tutorials
return render('admin_wizard.mako', request, {
'version': settings.HUE_DESKTOP_VERSION,
'apps': dict([(app.name, app) for app in apps]),
'app_names': app_names,
'tours_and_tutorials': tours_and_tutorials,
'collect_usage': collect_usage(),
})
def update_preferences(request):
response = {'status': -1, 'data': ''}
if request.method == 'POST':
try:
settings = Settings.get_settings()
settings.tours_and_tutorials = request.POST.get('tours_and_tutorials', False)
settings.collect_usage = request.POST.get('collect_usage', False)
settings.save()
response['status'] = 0
response['tours_and_tutorials'] = settings.tours_and_tutorials
response['collect_usage'] = settings.collect_usage
except Exception, e:
response['data'] = str(e)
else:
response['data'] = _('POST request required.')
return JsonResponse(response)
| apache-2.0 |
MattsFleaMarket/python-for-android | python3-alpha/python3-src/Lib/distutils/util.py | 46 | 24178 | """distutils.util
Miscellaneous utility functions -- anything that doesn't fit into
one of the other *util.py modules.
"""
import sys, os, string, re
from distutils.errors import DistutilsPlatformError
from distutils.dep_util import newer
from distutils.spawn import spawn
from distutils import log
from distutils.errors import DistutilsByteCompileError
def get_platform ():
"""Return a string that identifies the current platform. This is used
mainly to distinguish platform-specific build directories and
platform-specific built distributions. Typically includes the OS name
and version and the architecture (as supplied by 'os.uname()'),
although the exact information included depends on the OS; eg. for IRIX
the architecture isn't particularly important (IRIX only runs on SGI
hardware), but for Linux the kernel version isn't particularly
important.
Examples of returned values:
linux-i586
linux-alpha (?)
solaris-2.6-sun4u
irix-5.3
irix64-6.2
Windows will return one of:
win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc)
win-ia64 (64bit Windows on Itanium)
win32 (all others - specifically, sys.platform is returned)
For other non-POSIX platforms, currently just returns 'sys.platform'.
"""
if os.name == 'nt':
# sniff sys.version for architecture.
prefix = " bit ("
i = sys.version.find(prefix)
if i == -1:
return sys.platform
j = sys.version.find(")", i)
look = sys.version[i+len(prefix):j].lower()
if look == 'amd64':
return 'win-amd64'
if look == 'itanium':
return 'win-ia64'
return sys.platform
if os.name != "posix" or not hasattr(os, 'uname'):
# XXX what about the architecture? NT is Intel or Alpha,
# Mac OS is M68k or PPC, etc.
return sys.platform
# Try to distinguish various flavours of Unix
(osname, host, release, version, machine) = os.uname()
# Convert the OS name to lowercase, remove '/' characters
# (to accommodate BSD/OS), and translate spaces (for "Power Macintosh")
osname = osname.lower().replace('/', '')
machine = machine.replace(' ', '_')
machine = machine.replace('/', '-')
if osname[:5] == "linux":
# At least on Linux/Intel, 'machine' is the processor --
# i386, etc.
# XXX what about Alpha, SPARC, etc?
return "%s-%s" % (osname, machine)
elif osname[:5] == "sunos":
if release[0] >= "5": # SunOS 5 == Solaris 2
osname = "solaris"
release = "%d.%s" % (int(release[0]) - 3, release[2:])
# fall through to standard osname-release-machine representation
elif osname[:4] == "irix": # could be "irix64"!
return "%s-%s" % (osname, release)
elif osname[:3] == "aix":
return "%s-%s.%s" % (osname, version, release)
elif osname[:6] == "cygwin":
osname = "cygwin"
rel_re = re.compile (r'[\d.]+', re.ASCII)
m = rel_re.match(release)
if m:
release = m.group()
elif osname[:6] == "darwin":
#
# For our purposes, we'll assume that the system version from
# distutils' perspective is what MACOSX_DEPLOYMENT_TARGET is set
# to. This makes the compatibility story a bit more sane because the
# machine is going to compile and link as if it were
# MACOSX_DEPLOYMENT_TARGET.
from distutils.sysconfig import get_config_vars
cfgvars = get_config_vars()
macver = cfgvars.get('MACOSX_DEPLOYMENT_TARGET')
if 1:
# Always calculate the release of the running machine,
# needed to determine if we can build fat binaries or not.
macrelease = macver
# Get the system version. Reading this plist is a documented
# way to get the system version (see the documentation for
# the Gestalt Manager)
try:
f = open('/System/Library/CoreServices/SystemVersion.plist')
except IOError:
# We're on a plain darwin box, fall back to the default
# behaviour.
pass
else:
try:
m = re.search(
r'<key>ProductUserVisibleVersion</key>\s*' +
r'<string>(.*?)</string>', f.read())
if m is not None:
macrelease = '.'.join(m.group(1).split('.')[:2])
# else: fall back to the default behaviour
finally:
f.close()
if not macver:
macver = macrelease
if macver:
from distutils.sysconfig import get_config_vars
release = macver
osname = "macosx"
if (macrelease + '.') >= '10.4.' and \
'-arch' in get_config_vars().get('CFLAGS', '').strip():
# The universal build will build fat binaries, but not on
# systems before 10.4
#
# Try to detect 4-way universal builds, those have machine-type
# 'universal' instead of 'fat'.
machine = 'fat'
cflags = get_config_vars().get('CFLAGS')
archs = re.findall('-arch\s+(\S+)', cflags)
archs = tuple(sorted(set(archs)))
if len(archs) == 1:
machine = archs[0]
elif archs == ('i386', 'ppc'):
machine = 'fat'
elif archs == ('i386', 'x86_64'):
machine = 'intel'
elif archs == ('i386', 'ppc', 'x86_64'):
machine = 'fat3'
elif archs == ('ppc64', 'x86_64'):
machine = 'fat64'
elif archs == ('i386', 'ppc', 'ppc64', 'x86_64'):
machine = 'universal'
else:
raise ValueError(
"Don't know machine value for archs=%r"%(archs,))
elif machine == 'i386':
# On OSX the machine type returned by uname is always the
# 32-bit variant, even if the executable architecture is
# the 64-bit variant
if sys.maxsize >= 2**32:
machine = 'x86_64'
elif machine in ('PowerPC', 'Power_Macintosh'):
# Pick a sane name for the PPC architecture.
machine = 'ppc'
# See 'i386' case
if sys.maxsize >= 2**32:
machine = 'ppc64'
return "%s-%s-%s" % (osname, release, machine)
# get_platform ()
def convert_path (pathname):
"""Return 'pathname' as a name that will work on the native filesystem,
i.e. split it on '/' and put it back together again using the current
directory separator. Needed because filenames in the setup script are
always supplied in Unix style, and have to be converted to the local
convention before we can actually use them in the filesystem. Raises
ValueError on non-Unix-ish systems if 'pathname' either starts or
ends with a slash.
"""
if os.sep == '/':
return pathname
if not pathname:
return pathname
if pathname[0] == '/':
raise ValueError("path '%s' cannot be absolute" % pathname)
if pathname[-1] == '/':
raise ValueError("path '%s' cannot end with '/'" % pathname)
paths = pathname.split('/')
while '.' in paths:
paths.remove('.')
if not paths:
return os.curdir
return os.path.join(*paths)
# convert_path ()
def change_root (new_root, pathname):
"""Return 'pathname' with 'new_root' prepended. If 'pathname' is
relative, this is equivalent to "os.path.join(new_root,pathname)".
Otherwise, it requires making 'pathname' relative and then joining the
two, which is tricky on DOS/Windows and Mac OS.
"""
if os.name == 'posix':
if not os.path.isabs(pathname):
return os.path.join(new_root, pathname)
else:
return os.path.join(new_root, pathname[1:])
elif os.name == 'nt':
(drive, path) = os.path.splitdrive(pathname)
if path[0] == '\\':
path = path[1:]
return os.path.join(new_root, path)
elif os.name == 'os2':
(drive, path) = os.path.splitdrive(pathname)
if path[0] == os.sep:
path = path[1:]
return os.path.join(new_root, path)
else:
raise DistutilsPlatformError("nothing known about platform '%s'" % os.name)
_environ_checked = 0
def check_environ ():
"""Ensure that 'os.environ' has all the environment variables we
guarantee that users can use in config files, command-line options,
etc. Currently this includes:
HOME - user's home directory (Unix only)
PLAT - description of the current platform, including hardware
and OS (see 'get_platform()')
"""
global _environ_checked
if _environ_checked:
return
if os.name == 'posix' and 'HOME' not in os.environ:
import pwd
os.environ['HOME'] = pwd.getpwuid(os.getuid())[5]
if 'PLAT' not in os.environ:
os.environ['PLAT'] = get_platform()
_environ_checked = 1
def subst_vars (s, local_vars):
"""Perform shell/Perl-style variable substitution on 'string'. Every
occurrence of '$' followed by a name is considered a variable, and
variable is substituted by the value found in the 'local_vars'
dictionary, or in 'os.environ' if it's not in 'local_vars'.
'os.environ' is first checked/augmented to guarantee that it contains
certain values: see 'check_environ()'. Raise ValueError for any
variables not found in either 'local_vars' or 'os.environ'.
"""
check_environ()
def _subst (match, local_vars=local_vars):
var_name = match.group(1)
if var_name in local_vars:
return str(local_vars[var_name])
else:
return os.environ[var_name]
try:
return re.sub(r'\$([a-zA-Z_][a-zA-Z_0-9]*)', _subst, s)
except KeyError as var:
raise ValueError("invalid variable '$%s'" % var)
# subst_vars ()
def grok_environment_error (exc, prefix="error: "):
"""Generate a useful error message from an EnvironmentError (IOError or
OSError) exception object. Handles Python 1.5.1 and 1.5.2 styles, and
does what it can to deal with exception objects that don't have a
filename (which happens when the error is due to a two-file operation,
such as 'rename()' or 'link()'. Returns the error message as a string
prefixed with 'prefix'.
"""
# check for Python 1.5.2-style {IO,OS}Error exception objects
if hasattr(exc, 'filename') and hasattr(exc, 'strerror'):
if exc.filename:
error = prefix + "%s: %s" % (exc.filename, exc.strerror)
else:
# two-argument functions in posix module don't
# include the filename in the exception object!
error = prefix + "%s" % exc.strerror
else:
error = prefix + str(exc.args[-1])
return error
# Needed by 'split_quoted()'
_wordchars_re = _squote_re = _dquote_re = None
def _init_regex():
global _wordchars_re, _squote_re, _dquote_re
_wordchars_re = re.compile(r'[^\\\'\"%s ]*' % string.whitespace)
_squote_re = re.compile(r"'(?:[^'\\]|\\.)*'")
_dquote_re = re.compile(r'"(?:[^"\\]|\\.)*"')
def split_quoted (s):
"""Split a string up according to Unix shell-like rules for quotes and
backslashes. In short: words are delimited by spaces, as long as those
spaces are not escaped by a backslash, or inside a quoted string.
Single and double quotes are equivalent, and the quote characters can
be backslash-escaped. The backslash is stripped from any two-character
escape sequence, leaving only the escaped character. The quote
characters are stripped from any quoted string. Returns a list of
words.
"""
# This is a nice algorithm for splitting up a single string, since it
# doesn't require character-by-character examination. It was a little
# bit of a brain-bender to get it working right, though...
if _wordchars_re is None: _init_regex()
s = s.strip()
words = []
pos = 0
while s:
m = _wordchars_re.match(s, pos)
end = m.end()
if end == len(s):
words.append(s[:end])
break
if s[end] in string.whitespace: # unescaped, unquoted whitespace: now
words.append(s[:end]) # we definitely have a word delimiter
s = s[end:].lstrip()
pos = 0
elif s[end] == '\\': # preserve whatever is being escaped;
# will become part of the current word
s = s[:end] + s[end+1:]
pos = end+1
else:
if s[end] == "'": # slurp singly-quoted string
m = _squote_re.match(s, end)
elif s[end] == '"': # slurp doubly-quoted string
m = _dquote_re.match(s, end)
else:
raise RuntimeError("this can't happen (bad char '%c')" % s[end])
if m is None:
raise ValueError("bad string (mismatched %s quotes?)" % s[end])
(beg, end) = m.span()
s = s[:beg] + s[beg+1:end-1] + s[end:]
pos = m.end() - 2
if pos >= len(s):
words.append(s)
break
return words
# split_quoted ()
def execute (func, args, msg=None, verbose=0, dry_run=0):
"""Perform some action that affects the outside world (eg. by
writing to the filesystem). Such actions are special because they
are disabled by the 'dry_run' flag. This method takes care of all
that bureaucracy for you; all you have to do is supply the
function to call and an argument tuple for it (to embody the
"external action" being performed), and an optional message to
print.
"""
if msg is None:
msg = "%s%r" % (func.__name__, args)
if msg[-2:] == ',)': # correct for singleton tuple
msg = msg[0:-2] + ')'
log.info(msg)
if not dry_run:
func(*args)
def strtobool (val):
"""Convert a string representation of truth to true (1) or false (0).
True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values
are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if
'val' is anything else.
"""
val = val.lower()
if val in ('y', 'yes', 't', 'true', 'on', '1'):
return 1
elif val in ('n', 'no', 'f', 'false', 'off', '0'):
return 0
else:
raise ValueError("invalid truth value %r" % (val,))
def byte_compile (py_files,
optimize=0, force=0,
prefix=None, base_dir=None,
verbose=1, dry_run=0,
direct=None):
"""Byte-compile a collection of Python source files to either .pyc
or .pyo files in the same directory. 'py_files' is a list of files
to compile; any files that don't end in ".py" are silently skipped.
'optimize' must be one of the following:
0 - don't optimize (generate .pyc)
1 - normal optimization (like "python -O")
2 - extra optimization (like "python -OO")
If 'force' is true, all files are recompiled regardless of
timestamps.
The source filename encoded in each bytecode file defaults to the
filenames listed in 'py_files'; you can modify these with 'prefix' and
'basedir'. 'prefix' is a string that will be stripped off of each
source filename, and 'base_dir' is a directory name that will be
prepended (after 'prefix' is stripped). You can supply either or both
(or neither) of 'prefix' and 'base_dir', as you wish.
If 'dry_run' is true, doesn't actually do anything that would
affect the filesystem.
Byte-compilation is either done directly in this interpreter process
with the standard py_compile module, or indirectly by writing a
temporary script and executing it. Normally, you should let
'byte_compile()' figure out to use direct compilation or not (see
the source for details). The 'direct' flag is used by the script
generated in indirect mode; unless you know what you're doing, leave
it set to None.
"""
# nothing is done if sys.dont_write_bytecode is True
if sys.dont_write_bytecode:
raise DistutilsByteCompileError('byte-compiling is disabled.')
# First, if the caller didn't force us into direct or indirect mode,
# figure out which mode we should be in. We take a conservative
# approach: choose direct mode *only* if the current interpreter is
# in debug mode and optimize is 0. If we're not in debug mode (-O
# or -OO), we don't know which level of optimization this
# interpreter is running with, so we can't do direct
# byte-compilation and be certain that it's the right thing. Thus,
# always compile indirectly if the current interpreter is in either
# optimize mode, or if either optimization level was requested by
# the caller.
if direct is None:
direct = (__debug__ and optimize == 0)
# "Indirect" byte-compilation: write a temporary script and then
# run it with the appropriate flags.
if not direct:
try:
from tempfile import mkstemp
(script_fd, script_name) = mkstemp(".py")
except ImportError:
from tempfile import mktemp
(script_fd, script_name) = None, mktemp(".py")
log.info("writing byte-compilation script '%s'", script_name)
if not dry_run:
if script_fd is not None:
script = os.fdopen(script_fd, "w")
else:
script = open(script_name, "w")
script.write("""\
from distutils.util import byte_compile
files = [
""")
# XXX would be nice to write absolute filenames, just for
# safety's sake (script should be more robust in the face of
# chdir'ing before running it). But this requires abspath'ing
# 'prefix' as well, and that breaks the hack in build_lib's
# 'byte_compile()' method that carefully tacks on a trailing
# slash (os.sep really) to make sure the prefix here is "just
# right". This whole prefix business is rather delicate -- the
# problem is that it's really a directory, but I'm treating it
# as a dumb string, so trailing slashes and so forth matter.
#py_files = map(os.path.abspath, py_files)
#if prefix:
# prefix = os.path.abspath(prefix)
script.write(",\n".join(map(repr, py_files)) + "]\n")
script.write("""
byte_compile(files, optimize=%r, force=%r,
prefix=%r, base_dir=%r,
verbose=%r, dry_run=0,
direct=1)
""" % (optimize, force, prefix, base_dir, verbose))
script.close()
cmd = [sys.executable, script_name]
if optimize == 1:
cmd.insert(1, "-O")
elif optimize == 2:
cmd.insert(1, "-OO")
spawn(cmd, dry_run=dry_run)
execute(os.remove, (script_name,), "removing %s" % script_name,
dry_run=dry_run)
# "Direct" byte-compilation: use the py_compile module to compile
# right here, right now. Note that the script generated in indirect
# mode simply calls 'byte_compile()' in direct mode, a weird sort of
# cross-process recursion. Hey, it works!
else:
from py_compile import compile
for file in py_files:
if file[-3:] != ".py":
# This lets us be lazy and not filter filenames in
# the "install_lib" command.
continue
# Terminology from the py_compile module:
# cfile - byte-compiled file
# dfile - purported source filename (same as 'file' by default)
cfile = file + (__debug__ and "c" or "o")
dfile = file
if prefix:
if file[:len(prefix)] != prefix:
raise ValueError("invalid prefix: filename %r doesn't start with %r"
% (file, prefix))
dfile = dfile[len(prefix):]
if base_dir:
dfile = os.path.join(base_dir, dfile)
cfile_base = os.path.basename(cfile)
if direct:
if force or newer(file, cfile):
log.info("byte-compiling %s to %s", file, cfile_base)
if not dry_run:
compile(file, cfile, dfile)
else:
log.debug("skipping byte-compilation of %s to %s",
file, cfile_base)
# byte_compile ()
def rfc822_escape (header):
"""Return a version of the string escaped for inclusion in an
RFC-822 header, by ensuring there are 8 spaces space after each newline.
"""
lines = header.split('\n')
sep = '\n' + 8 * ' '
return sep.join(lines)
# 2to3 support
def run_2to3(files, fixer_names=None, options=None, explicit=None):
"""Invoke 2to3 on a list of Python files.
The files should all come from the build area, as the
modification is done in-place. To reduce the build time,
only files modified since the last invocation of this
function should be passed in the files argument."""
if not files:
return
# Make this class local, to delay import of 2to3
from lib2to3.refactor import RefactoringTool, get_fixers_from_package
class DistutilsRefactoringTool(RefactoringTool):
def log_error(self, msg, *args, **kw):
log.error(msg, *args)
def log_message(self, msg, *args):
log.info(msg, *args)
def log_debug(self, msg, *args):
log.debug(msg, *args)
if fixer_names is None:
fixer_names = get_fixers_from_package('lib2to3.fixes')
r = DistutilsRefactoringTool(fixer_names, options=options)
r.refactor(files, write=True)
def copydir_run_2to3(src, dest, template=None, fixer_names=None,
options=None, explicit=None):
"""Recursively copy a directory, only copying new and changed files,
running run_2to3 over all newly copied Python modules afterward.
If you give a template string, it's parsed like a MANIFEST.in.
"""
from distutils.dir_util import mkpath
from distutils.file_util import copy_file
from distutils.filelist import FileList
filelist = FileList()
curdir = os.getcwd()
os.chdir(src)
try:
filelist.findall()
finally:
os.chdir(curdir)
filelist.files[:] = filelist.allfiles
if template:
for line in template.splitlines():
line = line.strip()
if not line: continue
filelist.process_template_line(line)
copied = []
for filename in filelist.files:
outname = os.path.join(dest, filename)
mkpath(os.path.dirname(outname))
res = copy_file(os.path.join(src, filename), outname, update=1)
if res[1]: copied.append(outname)
run_2to3([fn for fn in copied if fn.lower().endswith('.py')],
fixer_names=fixer_names, options=options, explicit=explicit)
return copied
class Mixin2to3:
'''Mixin class for commands that run 2to3.
To configure 2to3, setup scripts may either change
the class variables, or inherit from individual commands
to override how 2to3 is invoked.'''
# provide list of fixers to run;
# defaults to all from lib2to3.fixers
fixer_names = None
# options dictionary
options = None
# list of fixers to invoke even though they are marked as explicit
explicit = None
def run_2to3(self, files):
return run_2to3(files, self.fixer_names, self.options, self.explicit)
| apache-2.0 |
naturalatlas/mapnik | scons/scons-local-3.0.1/SCons/Tool/mslib.py | 5 | 2224 | """SCons.Tool.mslib
Tool-specific initialization for lib (MicroSoft library archiver).
There normally shouldn't be any need to import this module directly.
It will usually be imported through the generic SCons.Tool.Tool()
selection method.
"""
#
# Copyright (c) 2001 - 2017 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "src/engine/SCons/Tool/mslib.py 74b2c53bc42290e911b334a6b44f187da698a668 2017/11/14 13:16:53 bdbaddog"
import SCons.Defaults
import SCons.Tool
import SCons.Tool.msvs
import SCons.Tool.msvc
import SCons.Util
from .MSCommon import msvc_exists, msvc_setup_env_once
def generate(env):
"""Add Builders and construction variables for lib to an Environment."""
SCons.Tool.createStaticLibBuilder(env)
# Set-up ms tools paths
msvc_setup_env_once(env)
env['AR'] = 'lib'
env['ARFLAGS'] = SCons.Util.CLVar('/nologo')
env['ARCOM'] = "${TEMPFILE('$AR $ARFLAGS /OUT:$TARGET $SOURCES','$ARCOMSTR')}"
env['LIBPREFIX'] = ''
env['LIBSUFFIX'] = '.lib'
def exists(env):
return msvc_exists()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
| lgpl-2.1 |
pythonlittleboy/python_gentleman_crawler | index/ActorDAO.py | 1 | 3021 | from index import SysConst
import time
def saveActors(actors):
newActors = []
conn = SysConst.getConnect()
for actor in actors:
exist = saveActor(actor, conn)
if not exist:
newActors.append(actor)
conn.commit()
conn.close()
return newActors
def saveActor(actor, conn):
name = actor["name"]
url = actor["url"]
shortName = actor["short_name"]
cursor = conn.cursor()
cursor = cursor.execute("SELECT * from t_actors where short_name=?", [shortName])
if (len(cursor.fetchall()) > 0):
return True
cursor.execute(
"insert into t_actors (name, url, short_name) values (?, ?, ?)",
[name, url, shortName])
return False
def getAllActors():
conn = SysConst.getConnect()
yesterday = round(time.time() - 24 * 60 * 60)
lastweek = round(time.time() - 24 * 60 * 60 * 3)
# print(yesterday)
cursor = conn.execute("SELECT name, url, short_name from t_actors where "
" (favor = 1 and last_read_time < ?) "
" or (favor = 0 and last_read_time < ?)"
" or last_read_time is null"
" order by favor desc, last_read_time desc", [yesterday, lastweek])
results = []
for row in cursor:
url = row[1]
# domain change to nanrenvip.co
#url = url.replace("www.nh87.cn", "nanrenvip.net")
url = url.replace("www.nh87.cn", "nanrenvip.co")
url = url.replace("nanrenvip.net", "nanrenvip.co")
url = url.replace("nanrenvip.co", "nanrenvip.org")
one = {"name": row[0], "url": url, "short_name": row[2]}
results.append(one)
conn.close()
return results
def getAllActorsFully():
conn = SysConst.getConnect()
# print(yesterday)
cursor = conn.execute("SELECT name, url, short_name from t_actors "
" order by favor desc, last_read_time desc", [])
results = []
for row in cursor:
one = {"name": row[0], "url": row[1], "short_name": row[2]}
results.append(one)
conn.close()
return results
def updateLastReadTime(name):
conn = SysConst.getConnect()
now = round(time.time())
cursor = conn.cursor()
cursor.execute(
"update t_actors set last_read_time = ? where name = ?",
[now, name])
conn.commit()
conn.close()
def getFavorActors():
conn = SysConst.getConnect()
yesterday = round(time.time() - 24 * 60 * 60)
# print(yesterday)
cursor = conn.execute("SELECT * from t_actors where favor = 1")
results = []
for row in cursor:
one = {"name": row[0], "url": row[1]}
results.append(one)
conn.close()
return results
# print(len(getAllActors()))
# print(time.ctime(round(time.time()) - 24 * 60 * 60))
# updateLastReadTime("三浦惠理子")
#print(getFavorActors())
| apache-2.0 |
nexiles/odoo | addons/account/test/test_parent_structure.py | 432 | 2108 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
#
# TODO: move this in a YAML test with !python tag
#
import xmlrpclib
DB = 'training3'
USERID = 1
USERPASS = 'admin'
sock = xmlrpclib.ServerProxy('http://%s:%s/xmlrpc/object' % ('localhost',8069))
ids = sock.execute(DB, USERID, USERPASS, 'account.account', 'search', [], {})
account_lists = sock.execute(DB, USERID, USERPASS, 'account.account', 'read', ids, ['parent_id','parent_left','parent_right'])
accounts = dict(map(lambda x: (x['id'],x), account_lists))
for a in account_lists:
if a['parent_id']:
assert a['parent_left'] > accounts[a['parent_id'][0]]['parent_left']
assert a['parent_right'] < accounts[a['parent_id'][0]]['parent_right']
assert a['parent_left'] < a['parent_right']
for a2 in account_lists:
assert not ((a2['parent_right']>a['parent_left']) and
(a2['parent_left']<a['parent_left']) and
(a2['parent_right']<a['parent_right']))
if a2['parent_id']==a['id']:
assert (a2['parent_left']>a['parent_left']) and (a2['parent_right']<a['parent_right'])
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
evilp/android_kernel_hp_phobos | scripts/rt-tester/rt-tester.py | 11005 | 5307 | #!/usr/bin/python
#
# rt-mutex tester
#
# (C) 2006 Thomas Gleixner <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 2 as
# published by the Free Software Foundation.
#
import os
import sys
import getopt
import shutil
import string
# Globals
quiet = 0
test = 0
comments = 0
sysfsprefix = "/sys/devices/system/rttest/rttest"
statusfile = "/status"
commandfile = "/command"
# Command opcodes
cmd_opcodes = {
"schedother" : "1",
"schedfifo" : "2",
"lock" : "3",
"locknowait" : "4",
"lockint" : "5",
"lockintnowait" : "6",
"lockcont" : "7",
"unlock" : "8",
"signal" : "11",
"resetevent" : "98",
"reset" : "99",
}
test_opcodes = {
"prioeq" : ["P" , "eq" , None],
"priolt" : ["P" , "lt" , None],
"priogt" : ["P" , "gt" , None],
"nprioeq" : ["N" , "eq" , None],
"npriolt" : ["N" , "lt" , None],
"npriogt" : ["N" , "gt" , None],
"unlocked" : ["M" , "eq" , 0],
"trylock" : ["M" , "eq" , 1],
"blocked" : ["M" , "eq" , 2],
"blockedwake" : ["M" , "eq" , 3],
"locked" : ["M" , "eq" , 4],
"opcodeeq" : ["O" , "eq" , None],
"opcodelt" : ["O" , "lt" , None],
"opcodegt" : ["O" , "gt" , None],
"eventeq" : ["E" , "eq" , None],
"eventlt" : ["E" , "lt" , None],
"eventgt" : ["E" , "gt" , None],
}
# Print usage information
def usage():
print "rt-tester.py <-c -h -q -t> <testfile>"
print " -c display comments after first command"
print " -h help"
print " -q quiet mode"
print " -t test mode (syntax check)"
print " testfile: read test specification from testfile"
print " otherwise from stdin"
return
# Print progress when not in quiet mode
def progress(str):
if not quiet:
print str
# Analyse a status value
def analyse(val, top, arg):
intval = int(val)
if top[0] == "M":
intval = intval / (10 ** int(arg))
intval = intval % 10
argval = top[2]
elif top[0] == "O":
argval = int(cmd_opcodes.get(arg, arg))
else:
argval = int(arg)
# progress("%d %s %d" %(intval, top[1], argval))
if top[1] == "eq" and intval == argval:
return 1
if top[1] == "lt" and intval < argval:
return 1
if top[1] == "gt" and intval > argval:
return 1
return 0
# Parse the commandline
try:
(options, arguments) = getopt.getopt(sys.argv[1:],'chqt')
except getopt.GetoptError, ex:
usage()
sys.exit(1)
# Parse commandline options
for option, value in options:
if option == "-c":
comments = 1
elif option == "-q":
quiet = 1
elif option == "-t":
test = 1
elif option == '-h':
usage()
sys.exit(0)
# Select the input source
if arguments:
try:
fd = open(arguments[0])
except Exception,ex:
sys.stderr.write("File not found %s\n" %(arguments[0]))
sys.exit(1)
else:
fd = sys.stdin
linenr = 0
# Read the test patterns
while 1:
linenr = linenr + 1
line = fd.readline()
if not len(line):
break
line = line.strip()
parts = line.split(":")
if not parts or len(parts) < 1:
continue
if len(parts[0]) == 0:
continue
if parts[0].startswith("#"):
if comments > 1:
progress(line)
continue
if comments == 1:
comments = 2
progress(line)
cmd = parts[0].strip().lower()
opc = parts[1].strip().lower()
tid = parts[2].strip()
dat = parts[3].strip()
try:
# Test or wait for a status value
if cmd == "t" or cmd == "w":
testop = test_opcodes[opc]
fname = "%s%s%s" %(sysfsprefix, tid, statusfile)
if test:
print fname
continue
while 1:
query = 1
fsta = open(fname, 'r')
status = fsta.readline().strip()
fsta.close()
stat = status.split(",")
for s in stat:
s = s.strip()
if s.startswith(testop[0]):
# Separate status value
val = s[2:].strip()
query = analyse(val, testop, dat)
break
if query or cmd == "t":
break
progress(" " + status)
if not query:
sys.stderr.write("Test failed in line %d\n" %(linenr))
sys.exit(1)
# Issue a command to the tester
elif cmd == "c":
cmdnr = cmd_opcodes[opc]
# Build command string and sys filename
cmdstr = "%s:%s" %(cmdnr, dat)
fname = "%s%s%s" %(sysfsprefix, tid, commandfile)
if test:
print fname
continue
fcmd = open(fname, 'w')
fcmd.write(cmdstr)
fcmd.close()
except Exception,ex:
sys.stderr.write(str(ex))
sys.stderr.write("\nSyntax error in line %d\n" %(linenr))
if not test:
fd.close()
sys.exit(1)
# Normal exit pass
print "Pass"
sys.exit(0)
| gpl-2.0 |
samuelshaner/openmc | tests/test_mgxs_library_ce_to_mg/test_mgxs_library_ce_to_mg.py | 1 | 3449 | #!/usr/bin/env python
import os
import sys
import glob
import hashlib
sys.path.insert(0, os.pardir)
from testing_harness import PyAPITestHarness
from input_set import PinCellInputSet
import openmc
import openmc.mgxs
class MGXSTestHarness(PyAPITestHarness):
def _build_inputs(self):
# Set the input set to use the pincell model
self._input_set = PinCellInputSet()
# Generate inputs using parent class routine
super(MGXSTestHarness, self)._build_inputs()
# Initialize a two-group structure
energy_groups = openmc.mgxs.EnergyGroups(group_edges=[0, 0.625, 20.e6])
# Initialize MGXS Library for a few cross section types
self.mgxs_lib = openmc.mgxs.Library(self._input_set.geometry)
self.mgxs_lib.by_nuclide = False
self.mgxs_lib.mgxs_types = ['total', 'absorption', 'nu-fission matrix',
'nu-scatter matrix', 'multiplicity matrix']
self.mgxs_lib.energy_groups = energy_groups
self.mgxs_lib.correction = None
self.mgxs_lib.legendre_order = 3
self.mgxs_lib.domain_type = 'material'
self.mgxs_lib.build_library()
# Initialize a tallies file
self._input_set.tallies = openmc.Tallies()
self.mgxs_lib.add_to_tallies_file(self._input_set.tallies, merge=False)
self._input_set.tallies.export_to_xml()
def _run_openmc(self):
# Initial run
if self._opts.mpi_exec is not None:
returncode = openmc.run(mpi_procs=self._opts.mpi_np,
openmc_exec=self._opts.exe,
mpi_exec=self._opts.mpi_exec)
else:
returncode = openmc.run(openmc_exec=self._opts.exe)
assert returncode == 0, 'CE OpenMC calculation did not exit' \
'successfully.'
# Build MG Inputs
# Get data needed to execute Library calculations.
statepoint = glob.glob(os.path.join(os.getcwd(), self._sp_name))[0]
sp = openmc.StatePoint(statepoint)
self.mgxs_lib.load_from_statepoint(sp)
self._input_set.mgxs_file, self._input_set.materials, \
self._input_set.geometry = self.mgxs_lib.create_mg_mode()
# Modify materials and settings so we can run in MG mode
self._input_set.materials.cross_sections = './mgxs.h5'
self._input_set.settings.energy_mode = 'multi-group'
# Write modified input files
self._input_set.settings.export_to_xml()
self._input_set.geometry.export_to_xml()
self._input_set.materials.export_to_xml()
self._input_set.mgxs_file.export_to_hdf5()
# Dont need tallies.xml, so remove the file
if os.path.exists('./tallies.xml'):
os.remove('./tallies.xml')
# Re-run MG mode.
if self._opts.mpi_exec is not None:
returncode = openmc.run(mpi_procs=self._opts.mpi_np,
openmc_exec=self._opts.exe,
mpi_exec=self._opts.mpi_exec)
else:
returncode = openmc.run(openmc_exec=self._opts.exe)
def _cleanup(self):
super(MGXSTestHarness, self)._cleanup()
f = os.path.join(os.getcwd(), 'mgxs.h5')
if os.path.exists(f):
os.remove(f)
if __name__ == '__main__':
harness = MGXSTestHarness('statepoint.10.*', False)
harness.main()
| mit |
DelazJ/QGIS | python/plugins/db_manager/db_plugins/oracle/sql_dictionary.py | 30 | 14745 | # -*- coding: utf-8 -*-
"""
/***************************************************************************
Name : DB Manager
Description : Database manager plugin for QGIS (Oracle)
Date : Aug 27, 2014
copyright : (C) 2014 by Médéric RIBREUX
email : [email protected]
The content of this file is based on
- PG_Manager by Martin Dobias <[email protected]> (GPLv2 license)
- DB Manager by Giuseppe Sucameli <[email protected]> (GPLv2 license)
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
__author__ = 'Médéric RIBREUX'
__date__ = 'August 2014'
__copyright__ = '(C) 2014, Médéric RIBREUX'
# keywords
keywords = [
# From:
# http://docs.oracle.com/cd/B19306_01/server.102/b14200/ap_keywd.htm
"ACCESS", "ADD", "ALL", "ALTER", "AND", "ANY", "AS", "ASC",
"AUDIT", "BETWEEN", "BY", "CHAR", "CHECK", "CLUSTER", "COLUMN",
"COMMENT", "COMPRESS", "CONNECT", "CREATE", "CURRENT", "DATE",
"DECIMAL", "DEFAULT", "DELETE", "DESC", "DISTINCT", "DROP",
"ELSE", "EXCLUSIVE", "EXISTS", "FILE", "FLOAT", "FOR", "FROM",
"GRANT", "GROUP", "HAVING", "IDENTIFIED", "IMMEDIATE", "IN",
"INCREMENT", "INDEX", "INITIAL", "INSERT", "INTEGER", "INTERSECT",
"INTO", "IS", "LEVEL", "LIKE", "LOCK", "LONG", "MAXEXTENTS",
"MINUS", "MLSLABEL", "MODE", "MODIFY", "NOAUDIT", "NOCOMPRESS",
"NOT", "NOWAIT", "NULL", "NUMBER", "OF", "OFFLINE", "ON",
"ONLINE", "OPTION", "OR", "ORDER", "PCTFREE", "PRIOR",
"PRIVILEGES", "PUBLIC", "RAW", "RENAME", "RESOURCE", "REVOKE",
"ROW", "ROWID", "ROWNUM", "ROWS", "SELECT", "SESSION", "SET",
"SHARE", "SIZE", "SMALLINT", "START", "SUCCESSFUL", "SYNONYM",
"SYSDATE", "TABLE", "THEN", "TO", "TRIGGER", "UID", "UNION",
"UNIQUE", "UPDATE", "USER", "VALIDATE", "VALUES", "VARCHAR",
"VARCHAR2", "VIEW", "WHENEVER", "WHERE", "WITH",
# From http://docs.oracle.com/cd/B13789_01/appdev.101/a42525/apb.htm
"ADMIN", "CURSOR", "FOUND", "MOUNT", "AFTER", "CYCLE", "FUNCTION",
"NEXT", "ALLOCATE", "DATABASE", "GO", "NEW", "ANALYZE",
"DATAFILE", "GOTO", "NOARCHIVELOG", "ARCHIVE", "DBA", "GROUPS",
"NOCACHE", "ARCHIVELOG", "DEC", "INCLUDING", "NOCYCLE",
"AUTHORIZATION", "DECLARE", "INDICATOR", "NOMAXVALUE", "AVG",
"DISABLE", "INITRANS", "NOMINVALUE", "BACKUP", "DISMOUNT",
"INSTANCE", "NONE", "BEGIN", "DOUBLE", "INT", "NOORDER", "BECOME",
"DUMP", "KEY", "NORESETLOGS", "BEFORE", "EACH", "LANGUAGE",
"NORMAL", "BLOCK", "ENABLE", "LAYER", "NOSORT", "BODY", "END",
"LINK", "NUMERIC", "CACHE", "ESCAPE", "LISTS", "OFF", "CANCEL",
"EVENTS", "LOGFILE", "OLD", "CASCADE", "EXCEPT", "MANAGE", "ONLY",
"CHANGE", "EXCEPTIONS", "MANUAL", "OPEN", "CHARACTER", "EXEC",
"MAX", "OPTIMAL", "CHECKPOINT", "EXPLAIN", "MAXDATAFILES", "OWN",
"CLOSE", "EXECUTE", "MAXINSTANCES", "PACKAGE", "COBOL", "EXTENT",
"MAXLOGFILES", "PARALLEL", "COMMIT", "EXTERNALLY",
"MAXLOGHISTORY", "PCTINCREASE", "COMPILE", "FETCH",
"MAXLOGMEMBERS", "PCTUSED", "CONSTRAINT", "FLUSH", "MAXTRANS",
"PLAN", "CONSTRAINTS", "FREELIST", "MAXVALUE", "PLI", "CONTENTS",
"FREELISTS", "MIN", "PRECISION", "CONTINUE", "FORCE",
"MINEXTENTS", "PRIMARY", "CONTROLFILE", "FOREIGN", "MINVALUE",
"PRIVATE", "COUNT", "FORTRAN", "MODULE", "PROCEDURE", "PROFILE",
"SAVEPOINT", "SQLSTATE", "TRACING", "QUOTA", "SCHEMA",
"STATEMENT_ID", "TRANSACTION", "READ", "SCN", "STATISTICS",
"TRIGGERS", "REAL", "SECTION", "STOP", "TRUNCATE", "RECOVER",
"SEGMENT", "STORAGE", "UNDER", "REFERENCES", "SEQUENCE", "SUM",
"UNLIMITED", "REFERENCING", "SHARED", "SWITCH", "UNTIL",
"RESETLOGS", "SNAPSHOT", "SYSTEM", "USE", "RESTRICTED", "SOME",
"TABLES", "USING", "REUSE", "SORT", "TABLESPACE", "WHEN", "ROLE",
"SQL", "TEMPORARY", "WRITE", "ROLES", "SQLCODE", "THREAD", "WORK",
"ROLLBACK", "SQLERROR", "TIME", "ABORT", "BETWEEN", "CRASH",
"DIGITS", "ACCEPT", "BINARY_INTEGER", "CREATE", "DISPOSE",
"ACCESS", "BODY", "CURRENT", "DISTINCT", "ADD", "BOOLEAN",
"CURRVAL", "DO", "ALL", "BY", "CURSOR", "DROP", "ALTER", "CASE",
"DATABASE", "ELSE", "AND", "CHAR", "DATA_BASE", "ELSIF", "ANY",
"CHAR_BASE", "DATE", "END", "ARRAY", "CHECK", "DBA", "ENTRY",
"ARRAYLEN", "CLOSE", "DEBUGOFF", "EXCEPTION", "AS", "CLUSTER",
"DEBUGON", "EXCEPTION_INIT", "ASC", "CLUSTERS", "DECLARE",
"EXISTS", "ASSERT", "COLAUTH", "DECIMAL", "EXIT", "ASSIGN",
"COLUMNS", "DEFAULT", "FALSE", "AT", "COMMIT", "DEFINITION",
"FETCH", "AUTHORIZATION", "COMPRESS", "DELAY", "FLOAT", "AVG",
"CONNECT", "DELETE", "FOR", "BASE_TABLE", "CONSTANT", "DELTA",
"FORM", "BEGIN", "COUNT", "DESC", "FROM", "FUNCTION", "NEW",
"RELEASE", "SUM", "GENERIC", "NEXTVAL", "REMR", "TABAUTH", "GOTO",
"NOCOMPRESS", "RENAME", "TABLE", "GRANT", "NOT", "RESOURCE",
"TABLES", "GROUP", "NULL", "RETURN", "TASK", "HAVING", "NUMBER",
"REVERSE", "TERMINATE", "IDENTIFIED", "NUMBER_BASE", "REVOKE",
"THEN", "IF", "OF", "ROLLBACK", "TO", "IN", "ON", "ROWID", "TRUE",
"INDEX", "OPEN", "ROWLABEL", "TYPE", "INDEXES", "OPTION",
"ROWNUM", "UNION", "INDICATOR", "OR", "ROWTYPE", "UNIQUE",
"INSERT", "ORDER", "RUN", "UPDATE", "INTEGER", "OTHERS",
"SAVEPOINT", "USE", "INTERSECT", "OUT", "SCHEMA", "VALUES",
"INTO", "PACKAGE", "SELECT", "VARCHAR", "IS", "PARTITION",
"SEPARATE", "VARCHAR2", "LEVEL", "PCTFREE", "SET", "VARIANCE",
"LIKE", "POSITIVE", "SIZE", "VIEW", "LIMITED", "PRAGMA",
"SMALLINT", "VIEWS", "LOOP", "PRIOR", "SPACE", "WHEN", "MAX",
"PRIVATE", "SQL", "WHERE", "MIN", "PROCEDURE", "SQLCODE", "WHILE",
"MINUS", "PUBLIC", "SQLERRM", "WITH", "MLSLABEL", "RAISE",
"START", "WORK", "MOD", "RANGE", "STATEMENT", "XOR", "MODE",
"REAL", "STDDEV", "NATURAL", "RECORD", "SUBTYPE"
]
oracle_spatial_keywords = []
# SQL functions
# other than math/string/aggregate/date/conversion/xml/data mining
functions = [
# FROM
# https://docs.oracle.com/cd/B19306_01/server.102/b14200/functions001.htm
"CAST", "COALESCE", "DECODE", "GREATEST", "LEAST", "LNNVL",
"NULLIF", "NVL", "NVL2", "SET", "UID", "USER", "USERENV"
]
# SQL math functions
math_functions = [
'ABS', 'ACOS', 'ASIN', 'ATAN', 'ATAN2', 'BITAND', 'CEIL', 'COS',
'COSH', 'EXP', 'FLOOR', 'LN', 'LOG', 'MOD', 'NANVL', 'POWER',
'REMAINDER', 'ROUND', 'SIGN', 'SIN', 'SINH', 'SQRT', 'TAN',
'TANH', 'TRUNC', 'WIDTH_BUCKET'
]
# Strings functions
string_functions = [
'CHR', 'CONCAT', 'INITCAP', 'LOWER', 'LPAD', 'LTRIM', 'NLS_INITCAP',
'NLS_LOWER', 'NLSSORT', 'NLS_UPPER', 'REGEXP_REPLACE', 'REGEXP_SUBSTR',
'REPLACE', 'RPAD', 'RTRIM', 'SOUNDEX', 'SUBSTR', 'TRANSLATE', 'TREAT',
'TRIM', 'UPPER', 'ASCII', 'INSTR', 'LENGTH', 'REGEXP_INSTR'
]
# Aggregate functions
aggregate_functions = [
'AVG', 'COLLECT', 'CORR', 'COUNT', 'COVAR_POP', 'COVAR_SAMP', 'CUME_DIST',
'DENSE_RANK', 'FIRST', 'GROUP_ID', 'GROUPING', 'GROUPING_ID',
'LAST', 'MAX', 'MEDIAN', 'MIN', 'PERCENTILE_CONT',
'PERCENTILE_DISC', 'PERCENT_RANK', 'RANK',
'STATS_BINOMIAL_TEST', 'STATS_CROSSTAB', 'STATS_F_TEST',
'STATS_KS_TEST', 'STATS_MODE', 'STATS_MW_TEST',
'STATS_ONE_WAY_ANOVA', 'STATS_WSR_TEST', 'STDDEV',
'STDDEV_POP', 'STDDEV_SAMP', 'SUM', 'SYS_XMLAGG', 'VAR_POP',
'VAR_SAMP', 'VARIANCE', 'XMLAGG'
]
oracle_spatial_functions = [
# From http://docs.oracle.com/cd/B19306_01/appdev.102/b14255/toc.htm
# Spatial operators
"SDO_ANYINTERACT", "SDO_CONTAINS", "SDO_COVEREDBY", "SDO_COVERS",
"SDO_EQUAL", "SDO_FILTER", "SDO_INSIDE", "SDO_JOIN", "SDO_NN",
"SDO_NN_DISTANCE", "SDO_ON", "SDO_OVERLAPBDYDISJOINT",
"SDO_OVERLAPBDYINTERSECT", "SDO_OVERLAPS", "SDO_RELATE",
"SDO_TOUCH", "SDO_WITHIN_DISTANCE",
# SPATIAL AGGREGATE FUNCTIONS
"SDO_AGGR_CENTROID", "SDO_AGGR_CONCAT_LINES",
"SDO_AGGR_CONVEXHULL", "SDO_AGGR_LRS_CONCAT", "SDO_AGGR_MBR",
"SDO_AGGR_UNION",
# COORDINATE SYSTEM TRANSFORMATION (SDO_CS)
"SDO_CS.ADD_PREFERENCE_FOR_OP", "SDO_CS.CONVERT_NADCON_TO_XML",
"SDO_CS.CONVERT_NTV2_TO_XML", "SDO_CS.CONVERT_XML_TO_NADCON",
"SDO_CS.CONVERT_XML_TO_NTV2", "SDO_CS.CREATE_CONCATENATED_OP",
"SDO_CS.CREATE_OBVIOUS_EPSG_RULES",
"SDO_CS.CREATE_PREF_CONCATENATED_OP",
"SDO_CS.DELETE_ALL_EPSG_RULES", "SDO_CS.DELETE_OP",
"SDO_CS.DETERMINE_CHAIN", "SDO_CS.DETERMINE_DEFAULT_CHAIN",
"SDO_CS.FIND_GEOG_CRS", "SDO_CS.FIND_PROJ_CRS",
"SDO_CS.FROM_OGC_SIMPLEFEATURE_SRS", "SDO_CS.FROM_USNG",
"SDO_CS.MAP_EPSG_SRID_TO_ORACLE",
"SDO_CS.MAP_ORACLE_SRID_TO_EPSG",
"SDO_CS.REVOKE_PREFERENCE_FOR_OP",
"SDO_CS.TO_OGC_SIMPLEFEATURE_SRS", "SDO_CS.TO_USNG",
"SDO_CS.TRANSFORM", "SDO_CS.TRANSFORM_LAYER",
"SDO_CS.UPDATE_WKTS_FOR_ALL_EPSG_CRS",
"SDO_CS.UPDATE_WKTS_FOR_EPSG_CRS",
"SDO_CS.UPDATE_WKTS_FOR_EPSG_DATUM",
"SDO_CS.UPDATE_WKTS_FOR_EPSG_ELLIPS",
"SDO_CS.UPDATE_WKTS_FOR_EPSG_OP",
"SDO_CS.UPDATE_WKTS_FOR_EPSG_PARAM",
"SDO_CS.UPDATE_WKTS_FOR_EPSG_PM", "SDO_CS.VALIDATE_WKT",
"SDO_CS.VIEWPORT_TRANSFORM",
# GEOCODING (SDO_GCDR)
"SDO_GCDR.GEOCODE", "SDO_GCDR.GEOCODE_ADDR",
"SDO_GCDR.GEOCODE_ADDR_ALL", "SDO_GCDR.GEOCODE_ALL",
"SDO_GCDR.GEOCODE_AS_GEOMETRY", "SDO_GCDR.REVERSE_GEOCODE",
# GEOMETRY (SDO_GEOM)
"SDO_GEOM.RELATE", "SDO_GEOM.SDO_ARC_DENSIFY",
"SDO_GEOM.SDO_AREA", "SDO_GEOM.SDO_BUFFER",
"SDO_GEOM.SDO_CENTROID", "SDO_GEOM.SDO_CONVEXHULL",
"SDO_GEOM.SDO_DIFFERENCE", "SDO_GEOM.SDO_DISTANCE",
"SDO_GEOM.SDO_INTERSECTION", "SDO_GEOM.SDO_LENGTH",
"SDO_GEOM.SDO_MAX_MBR_ORDINATE", "SDO_GEOM.SDO_MBR",
"SDO_GEOM.SDO_MIN_MBR_ORDINATE", "SDO_GEOM.SDO_POINTONSURFACE",
"SDO_GEOM.SDO_UNION", "SDO_GEOM.SDO_XOR",
"SDO_GEOM.VALIDATE_GEOMETRY_WITH_CONTEXT",
"SDO_GEOM.VALIDATE_LAYER_WITH_CONTEXT",
"SDO_GEOM.WITHIN_DISTANCE",
# LINEAR REFERENCING SYSTEM (SDO_LRS)
"SDO_LRS.CLIP_GEOM_SEGMENT", "SDO_LRS.CONCATENATE_GEOM_SEGMENTS",
"SDO_LRS.CONNECTED_GEOM_SEGMENTS",
"SDO_LRS.CONVERT_TO_LRS_DIM_ARRAY", "SDO_LRS.CONVERT_TO_LRS_GEOM",
"SDO_LRS.CONVERT_TO_LRS_LAYER",
"SDO_LRS.CONVERT_TO_STD_DIM_ARRAY", "SDO_LRS.CONVERT_TO_STD_GEOM",
"SDO_LRS.CONVERT_TO_STD_LAYER", "SDO_LRS.DEFINE_GEOM_SEGMENT",
"SDO_LRS.DYNAMIC_SEGMENT", "SDO_LRS.FIND_LRS_DIM_POS",
"SDO_LRS.FIND_MEASURE", "SDO_LRS.FIND_OFFSET",
"SDO_LRS.GEOM_SEGMENT_END_MEASURE", "SDO_LRS.GEOM_SEGMENT_END_PT",
"SDO_LRS.GEOM_SEGMENT_LENGTH",
"SDO_LRS.GEOM_SEGMENT_START_MEASURE",
"SDO_LRS.GEOM_SEGMENT_START_PT", "SDO_LRS.GET_MEASURE",
"SDO_LRS.GET_NEXT_SHAPE_PT", "SDO_LRS.GET_NEXT_SHAPE_PT_MEASURE",
"SDO_LRS.GET_PREV_SHAPE_PT", "SDO_LRS.GET_PREV_SHAPE_PT_MEASURE",
"SDO_LRS.IS_GEOM_SEGMENT_DEFINED",
"SDO_LRS.IS_MEASURE_DECREASING", "SDO_LRS.IS_MEASURE_INCREASING",
"SDO_LRS.IS_SHAPE_PT_MEASURE", "SDO_LRS.LOCATE_PT",
"SDO_LRS.LRS_INTERSECTION", "SDO_LRS.MEASURE_RANGE",
"SDO_LRS.MEASURE_TO_PERCENTAGE", "SDO_LRS.OFFSET_GEOM_SEGMENT",
"SDO_LRS.PERCENTAGE_TO_MEASURE", "SDO_LRS.PROJECT_PT",
"SDO_LRS.REDEFINE_GEOM_SEGMENT", "SDO_LRS.RESET_MEASURE",
"SDO_LRS.REVERSE_GEOMETRY", "SDO_LRS.REVERSE_MEASURE",
"SDO_LRS.SET_PT_MEASURE", "SDO_LRS.SPLIT_GEOM_SEGMENT",
"SDO_LRS.TRANSLATE_MEASURE", "SDO_LRS.VALID_GEOM_SEGMENT",
"SDO_LRS.VALID_LRS_PT", "SDO_LRS.VALID_MEASURE",
"SDO_LRS.VALIDATE_LRS_GEOMETRY",
# SDO_MIGRATE
"SDO_MIGRATE.TO_CURRENT",
# SPATIAL ANALYSIS AND MINING (SDO_SAM)
"SDO_SAM.AGGREGATES_FOR_GEOMETRY", "SDO_SAM.AGGREGATES_FOR_LAYER",
"SDO_SAM.BIN_GEOMETRY", "SDO_SAM.BIN_LAYER",
"SDO_SAM.COLOCATED_REFERENCE_FEATURES",
"SDO_SAM.SIMPLIFY_GEOMETRY", "SDO_SAM.SIMPLIFY_LAYER",
"SDO_SAM.SPATIAL_CLUSTERS", "SDO_SAM.TILED_AGGREGATES",
"SDO_SAM.TILED_BINS",
# TUNING (SDO_TUNE)
"SDO_TUNE.AVERAGE_MBR", "SDO_TUNE.ESTIMATE_RTREE_INDEX_SIZE",
"SDO_TUNE.EXTENT_OF", "SDO_TUNE.MIX_INFO",
"SDO_TUNE.QUALITY_DEGRADATION",
# UTILITY (SDO_UTIL)
"SDO_UTIL.APPEND", "SDO_UTIL.CIRCLE_POLYGON",
"SDO_UTIL.CONCAT_LINES", "SDO_UTIL.CONVERT_UNIT",
"SDO_UTIL.ELLIPSE_POLYGON", "SDO_UTIL.EXTRACT",
"SDO_UTIL.FROM_WKBGEOMETRY", "SDO_UTIL.FROM_WKTGEOMETRY",
"SDO_UTIL.GETNUMELEM", "SDO_UTIL.GETNUMVERTICES",
"SDO_UTIL.GETVERTICES", "SDO_UTIL.INITIALIZE_INDEXES_FOR_TTS",
"SDO_UTIL.POINT_AT_BEARING", "SDO_UTIL.POLYGONTOLINE",
"SDO_UTIL.PREPARE_FOR_TTS", "SDO_UTIL.RECTIFY_GEOMETRY",
"SDO_UTIL.REMOVE_DUPLICATE_VERTICES",
"SDO_UTIL.REVERSE_LINESTRING", "SDO_UTIL.SIMPLIFY",
"SDO_UTIL.TO_GMLGEOMETRY", "SDO_UTIL.TO_WKBGEOMETRY",
"SDO_UTIL.TO_WKTGEOMETRY", "SDO_UTIL.VALIDATE_WKBGEOMETRY",
"SDO_UTIL.VALIDATE_WKTGEOMETRY"
]
# Oracle Operators
operators = [
' AND ', ' OR ', '||', ' < ', ' <= ', ' > ', ' >= ', ' = ',
' <> ', '!=', '^=', ' IS ', ' IS NOT ', ' IN ', ' ANY ', ' SOME ',
' NOT IN ', ' LIKE ', ' GLOB ', ' MATCH ', ' REGEXP ',
' BETWEEN x AND y ', ' NOT BETWEEN x AND y ', ' EXISTS ',
' IS NULL ', ' IS NOT NULL', ' ALL ', ' NOT ',
' CASE {column} WHEN {value} THEN {value} '
]
# constants
constants = ["null", "false", "true"]
oracle_spatial_constants = []
def getSqlDictionary(spatial=True):
k, c, f = list(keywords), list(constants), list(functions)
if spatial:
k += oracle_spatial_keywords
f += oracle_spatial_functions
c += oracle_spatial_constants
return {'keyword': k, 'constant': c, 'function': f}
def getQueryBuilderDictionary():
# concat functions
def ff(l):
return [s for s in l if s[0] != '*']
def add_paren(l):
return [s + "(" for s in l]
foo = sorted(
add_paren(
ff(
list(
set.union(set(functions),
set(oracle_spatial_functions))))))
m = sorted(add_paren(ff(math_functions)))
agg = sorted(add_paren(ff(aggregate_functions)))
op = ff(operators)
s = sorted(add_paren(ff(string_functions)))
return {'function': foo, 'math': m, 'aggregate': agg,
'operator': op, 'string': s}
| gpl-2.0 |
ogrisel/scipy | scipy/cluster/hierarchy.py | 5 | 93671 | """
========================================================
Hierarchical clustering (:mod:`scipy.cluster.hierarchy`)
========================================================
.. currentmodule:: scipy.cluster.hierarchy
These functions cut hierarchical clusterings into flat clusterings
or find the roots of the forest formed by a cut by providing the flat
cluster ids of each observation.
.. autosummary::
:toctree: generated/
fcluster
fclusterdata
leaders
These are routines for agglomerative clustering.
.. autosummary::
:toctree: generated/
linkage
single
complete
average
weighted
centroid
median
ward
These routines compute statistics on hierarchies.
.. autosummary::
:toctree: generated/
cophenet
from_mlab_linkage
inconsistent
maxinconsts
maxdists
maxRstat
to_mlab_linkage
Routines for visualizing flat clusters.
.. autosummary::
:toctree: generated/
dendrogram
These are data structures and routines for representing hierarchies as
tree objects.
.. autosummary::
:toctree: generated/
ClusterNode
leaves_list
to_tree
These are predicates for checking the validity of linkage and
inconsistency matrices as well as for checking isomorphism of two
flat cluster assignments.
.. autosummary::
:toctree: generated/
is_valid_im
is_valid_linkage
is_isomorphic
is_monotonic
correspond
num_obs_linkage
Utility routines for plotting:
.. autosummary::
:toctree: generated/
set_link_color_palette
References
----------
.. [1] "Statistics toolbox." API Reference Documentation. The MathWorks.
http://www.mathworks.com/access/helpdesk/help/toolbox/stats/.
Accessed October 1, 2007.
.. [2] "Hierarchical clustering." API Reference Documentation.
The Wolfram Research, Inc.
http://reference.wolfram.com/mathematica/HierarchicalClustering/tutorial/
HierarchicalClustering.html.
Accessed October 1, 2007.
.. [3] Gower, JC and Ross, GJS. "Minimum Spanning Trees and Single Linkage
Cluster Analysis." Applied Statistics. 18(1): pp. 54--64. 1969.
.. [4] Ward Jr, JH. "Hierarchical grouping to optimize an objective
function." Journal of the American Statistical Association. 58(301):
pp. 236--44. 1963.
.. [5] Johnson, SC. "Hierarchical clustering schemes." Psychometrika.
32(2): pp. 241--54. 1966.
.. [6] Sneath, PH and Sokal, RR. "Numerical taxonomy." Nature. 193: pp.
855--60. 1962.
.. [7] Batagelj, V. "Comparing resemblance measures." Journal of
Classification. 12: pp. 73--90. 1995.
.. [8] Sokal, RR and Michener, CD. "A statistical method for evaluating
systematic relationships." Scientific Bulletins. 38(22):
pp. 1409--38. 1958.
.. [9] Edelbrock, C. "Mixture model tests of hierarchical clustering
algorithms: the problem of classifying everybody." Multivariate
Behavioral Research. 14: pp. 367--84. 1979.
.. [10] Jain, A., and Dubes, R., "Algorithms for Clustering Data."
Prentice-Hall. Englewood Cliffs, NJ. 1988.
.. [11] Fisher, RA "The use of multiple measurements in taxonomic
problems." Annals of Eugenics, 7(2): 179-188. 1936
* MATLAB and MathWorks are registered trademarks of The MathWorks, Inc.
* Mathematica is a registered trademark of The Wolfram Research, Inc.
"""
from __future__ import division, print_function, absolute_import
# Copyright (C) Damian Eads, 2007-2008. New BSD License.
# hierarchy.py (derived from cluster.py, http://scipy-cluster.googlecode.com)
#
# Author: Damian Eads
# Date: September 22, 2007
#
# Copyright (c) 2007, 2008, Damian Eads
#
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# - Redistributions of source code must retain the above
# copyright notice, this list of conditions and the
# following disclaimer.
# - Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# - Neither the name of the author nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import warnings
import numpy as np
from . import _hierarchy_wrap
import scipy.spatial.distance as distance
from scipy.lib.six import string_types
from scipy.lib.six import xrange
_cpy_non_euclid_methods = {'single': 0, 'complete': 1, 'average': 2,
'weighted': 6}
_cpy_euclid_methods = {'centroid': 3, 'median': 4, 'ward': 5}
_cpy_linkage_methods = set(_cpy_non_euclid_methods.keys()).union(
set(_cpy_euclid_methods.keys()))
__all__ = ['ClusterNode', 'average', 'centroid', 'complete', 'cophenet',
'correspond', 'dendrogram', 'fcluster', 'fclusterdata',
'from_mlab_linkage', 'inconsistent', 'is_isomorphic',
'is_monotonic', 'is_valid_im', 'is_valid_linkage', 'leaders',
'leaves_list', 'linkage', 'maxRstat', 'maxdists', 'maxinconsts',
'median', 'num_obs_linkage', 'set_link_color_palette', 'single',
'to_mlab_linkage', 'to_tree', 'ward', 'weighted', 'distance']
def _warning(s):
warnings.warn('scipy.cluster: %s' % s, stacklevel=3)
def _copy_array_if_base_present(a):
"""
Copies the array if its base points to a parent array.
"""
if a.base is not None:
return a.copy()
elif np.issubsctype(a, np.float32):
return np.array(a, dtype=np.double)
else:
return a
def _copy_arrays_if_base_present(T):
"""
Accepts a tuple of arrays T. Copies the array T[i] if its base array
points to an actual array. Otherwise, the reference is just copied.
This is useful if the arrays are being passed to a C function that
does not do proper striding.
"""
l = [_copy_array_if_base_present(a) for a in T]
return l
def _randdm(pnts):
""" Generates a random distance matrix stored in condensed form. A
pnts * (pnts - 1) / 2 sized vector is returned.
"""
if pnts >= 2:
D = np.random.rand(pnts * (pnts - 1) / 2)
else:
raise ValueError("The number of points in the distance matrix "
"must be at least 2.")
return D
def single(y):
"""
Performs single/min/nearest linkage on the condensed distance matrix ``y``
Parameters
----------
y : ndarray
The upper triangular of the distance matrix. The result of
``pdist`` is returned in this form.
Returns
-------
Z : ndarray
The linkage matrix.
See Also
--------
linkage: for advanced creation of hierarchical clusterings.
"""
return linkage(y, method='single', metric='euclidean')
def complete(y):
"""
Performs complete/max/farthest point linkage on a condensed distance matrix
Parameters
----------
y : ndarray
The upper triangular of the distance matrix. The result of
``pdist`` is returned in this form.
Returns
-------
Z : ndarray
A linkage matrix containing the hierarchical clustering. See
the ``linkage`` function documentation for more information
on its structure.
See Also
--------
linkage
"""
return linkage(y, method='complete', metric='euclidean')
def average(y):
"""
Performs average/UPGMA linkage on a condensed distance matrix
Parameters
----------
y : ndarray
The upper triangular of the distance matrix. The result of
``pdist`` is returned in this form.
Returns
-------
Z : ndarray
A linkage matrix containing the hierarchical clustering. See
the ``linkage`` function documentation for more information
on its structure.
See Also
--------
linkage: for advanced creation of hierarchical clusterings.
"""
return linkage(y, method='average', metric='euclidean')
def weighted(y):
"""
Performs weighted/WPGMA linkage on the condensed distance matrix.
See ``linkage`` for more information on the return
structure and algorithm.
Parameters
----------
y : ndarray
The upper triangular of the distance matrix. The result of
``pdist`` is returned in this form.
Returns
-------
Z : ndarray
A linkage matrix containing the hierarchical clustering. See
the ``linkage`` function documentation for more information
on its structure.
See Also
--------
linkage : for advanced creation of hierarchical clusterings.
"""
return linkage(y, method='weighted', metric='euclidean')
def centroid(y):
"""
Performs centroid/UPGMC linkage.
See ``linkage`` for more information on the return structure
and algorithm.
The following are common calling conventions:
1. ``Z = centroid(y)``
Performs centroid/UPGMC linkage on the condensed distance
matrix ``y``. See ``linkage`` for more information on the return
structure and algorithm.
2. ``Z = centroid(X)``
Performs centroid/UPGMC linkage on the observation matrix ``X``
using Euclidean distance as the distance metric. See ``linkage``
for more information on the return structure and algorithm.
Parameters
----------
Q : ndarray
A condensed or redundant distance matrix. A condensed
distance matrix is a flat array containing the upper
triangular of the distance matrix. This is the form that
``pdist`` returns. Alternatively, a collection of
m observation vectors in n dimensions may be passed as
a m by n array.
Returns
-------
Z : ndarray
A linkage matrix containing the hierarchical clustering. See
the ``linkage`` function documentation for more information
on its structure.
See Also
--------
linkage: for advanced creation of hierarchical clusterings.
"""
return linkage(y, method='centroid', metric='euclidean')
def median(y):
"""
Performs median/WPGMC linkage.
See ``linkage`` for more information on the return structure
and algorithm.
The following are common calling conventions:
1. ``Z = median(y)``
Performs median/WPGMC linkage on the condensed distance matrix
``y``. See ``linkage`` for more information on the return
structure and algorithm.
2. ``Z = median(X)``
Performs median/WPGMC linkage on the observation matrix ``X``
using Euclidean distance as the distance metric. See linkage
for more information on the return structure and algorithm.
Parameters
----------
Q : ndarray
A condensed or redundant distance matrix. A condensed
distance matrix is a flat array containing the upper
triangular of the distance matrix. This is the form that
``pdist`` returns. Alternatively, a collection of
m observation vectors in n dimensions may be passed as
a m by n array.
Returns
-------
Z : ndarray
The hierarchical clustering encoded as a linkage matrix.
See Also
--------
linkage: for advanced creation of hierarchical clusterings.
"""
return linkage(y, method='median', metric='euclidean')
def ward(y):
"""
Performs Ward's linkage on a condensed or redundant distance matrix.
See linkage for more information on the return structure
and algorithm.
The following are common calling conventions:
1. ``Z = ward(y)``
Performs Ward's linkage on the condensed distance matrix ``Z``. See
linkage for more information on the return structure and
algorithm.
2. ``Z = ward(X)``
Performs Ward's linkage on the observation matrix ``X`` using
Euclidean distance as the distance metric. See linkage for more
information on the return structure and algorithm.
Parameters
----------
Q : ndarray
A condensed or redundant distance matrix. A condensed
distance matrix is a flat array containing the upper
triangular of the distance matrix. This is the form that
``pdist`` returns. Alternatively, a collection of
m observation vectors in n dimensions may be passed as
a m by n array.
Returns
-------
Z : ndarray
The hierarchical clustering encoded as a linkage matrix.
See Also
--------
linkage: for advanced creation of hierarchical clusterings.
"""
return linkage(y, method='ward', metric='euclidean')
def linkage(y, method='single', metric='euclidean'):
"""
Performs hierarchical/agglomerative clustering on the condensed
distance matrix y.
y must be a :math:`{n \\choose 2}` sized
vector where n is the number of original observations paired
in the distance matrix. The behavior of this function is very
similar to the MATLAB linkage function.
A 4 by :math:`(n-1)` matrix ``Z`` is returned. At the
:math:`i`-th iteration, clusters with indices ``Z[i, 0]`` and
``Z[i, 1]`` are combined to form cluster :math:`n + i`. A
cluster with an index less than :math:`n` corresponds to one of
the :math:`n` original observations. The distance between
clusters ``Z[i, 0]`` and ``Z[i, 1]`` is given by ``Z[i, 2]``. The
fourth value ``Z[i, 3]`` represents the number of original
observations in the newly formed cluster.
The following linkage methods are used to compute the distance
:math:`d(s, t)` between two clusters :math:`s` and
:math:`t`. The algorithm begins with a forest of clusters that
have yet to be used in the hierarchy being formed. When two
clusters :math:`s` and :math:`t` from this forest are combined
into a single cluster :math:`u`, :math:`s` and :math:`t` are
removed from the forest, and :math:`u` is added to the
forest. When only one cluster remains in the forest, the algorithm
stops, and this cluster becomes the root.
A distance matrix is maintained at each iteration. The ``d[i,j]``
entry corresponds to the distance between cluster :math:`i` and
:math:`j` in the original forest.
At each iteration, the algorithm must update the distance matrix
to reflect the distance of the newly formed cluster u with the
remaining clusters in the forest.
Suppose there are :math:`|u|` original observations
:math:`u[0], \\ldots, u[|u|-1]` in cluster :math:`u` and
:math:`|v|` original objects :math:`v[0], \\ldots, v[|v|-1]` in
cluster :math:`v`. Recall :math:`s` and :math:`t` are
combined to form cluster :math:`u`. Let :math:`v` be any
remaining cluster in the forest that is not :math:`u`.
The following are methods for calculating the distance between the
newly formed cluster :math:`u` and each :math:`v`.
* method='single' assigns
.. math::
d(u,v) = \\min(dist(u[i],v[j]))
for all points :math:`i` in cluster :math:`u` and
:math:`j` in cluster :math:`v`. This is also known as the
Nearest Point Algorithm.
* method='complete' assigns
.. math::
d(u, v) = \\max(dist(u[i],v[j]))
for all points :math:`i` in cluster u and :math:`j` in
cluster :math:`v`. This is also known by the Farthest Point
Algorithm or Voor Hees Algorithm.
* method='average' assigns
.. math::
d(u,v) = \\sum_{ij} \\frac{d(u[i], v[j])}
{(|u|*|v|)}
for all points :math:`i` and :math:`j` where :math:`|u|`
and :math:`|v|` are the cardinalities of clusters :math:`u`
and :math:`v`, respectively. This is also called the UPGMA
algorithm. This is called UPGMA.
* method='weighted' assigns
.. math::
d(u,v) = (dist(s,v) + dist(t,v))/2
where cluster u was formed with cluster s and t and v
is a remaining cluster in the forest. (also called WPGMA)
* method='centroid' assigns
.. math::
dist(s,t) = ||c_s-c_t||_2
where :math:`c_s` and :math:`c_t` are the centroids of
clusters :math:`s` and :math:`t`, respectively. When two
clusters :math:`s` and :math:`t` are combined into a new
cluster :math:`u`, the new centroid is computed over all the
original objects in clusters :math:`s` and :math:`t`. The
distance then becomes the Euclidean distance between the
centroid of :math:`u` and the centroid of a remaining cluster
:math:`v` in the forest. This is also known as the UPGMC
algorithm.
* method='median' assigns math:`d(s,t)` like the ``centroid``
method. When two clusters :math:`s` and :math:`t` are combined
into a new cluster :math:`u`, the average of centroids s and t
give the new centroid :math:`u`. This is also known as the
WPGMC algorithm.
* method='ward' uses the Ward variance minimization algorithm.
The new entry :math:`d(u,v)` is computed as follows,
.. math::
d(u,v) = \\sqrt{\\frac{|v|+|s|}
{T}d(v,s)^2
+ \\frac{|v|+|t|}
{T}d(v,t)^2
+ \\frac{|v|}
{T}d(s,t)^2}
where :math:`u` is the newly joined cluster consisting of
clusters :math:`s` and :math:`t`, :math:`v` is an unused
cluster in the forest, :math:`T=|v|+|s|+|t|`, and
:math:`|*|` is the cardinality of its argument. This is also
known as the incremental algorithm.
Warning: When the minimum distance pair in the forest is chosen, there
may be two or more pairs with the same minimum distance. This
implementation may chose a different minimum than the MATLAB
version.
Parameters
----------
y : ndarray
A condensed or redundant distance matrix. A condensed distance matrix
is a flat array containing the upper triangular of the distance matrix.
This is the form that ``pdist`` returns. Alternatively, a collection of
:math:`m` observation vectors in n dimensions may be passed as an
:math:`m` by :math:`n` array.
method : str, optional
The linkage algorithm to use. See the ``Linkage Methods`` section below
for full descriptions.
metric : str, optional
The distance metric to use. See the ``distance.pdist`` function for a
list of valid distance metrics.
Returns
-------
Z : ndarray
The hierarchical clustering encoded as a linkage matrix.
"""
if not isinstance(method, string_types):
raise TypeError("Argument 'method' must be a string.")
y = _convert_to_double(np.asarray(y, order='c'))
s = y.shape
if len(s) == 1:
distance.is_valid_y(y, throw=True, name='y')
d = distance.num_obs_y(y)
if method not in _cpy_non_euclid_methods:
raise ValueError("Valid methods when the raw observations are "
"omitted are 'single', 'complete', 'weighted', "
"and 'average'.")
# Since the C code does not support striding using strides.
[y] = _copy_arrays_if_base_present([y])
Z = np.zeros((d - 1, 4))
_hierarchy_wrap.linkage_wrap(y, Z, int(d),
int(_cpy_non_euclid_methods[method]))
elif len(s) == 2:
X = y
n = s[0]
m = s[1]
if method not in _cpy_linkage_methods:
raise ValueError('Invalid method: %s' % method)
if method in _cpy_non_euclid_methods:
dm = distance.pdist(X, metric)
Z = np.zeros((n - 1, 4))
_hierarchy_wrap.linkage_wrap(dm, Z, n,
int(_cpy_non_euclid_methods[method]))
elif method in _cpy_euclid_methods:
if metric != 'euclidean':
raise ValueError(("Method '%s' requires the distance metric "
"to be euclidean") % method)
dm = distance.pdist(X, metric)
Z = np.zeros((n - 1, 4))
_hierarchy_wrap.linkage_euclid_wrap(dm, Z, X, m, n,
int(_cpy_euclid_methods[method]))
return Z
class ClusterNode:
"""
A tree node class for representing a cluster.
Leaf nodes correspond to original observations, while non-leaf nodes
correspond to non-singleton clusters.
The to_tree function converts a matrix returned by the linkage
function into an easy-to-use tree representation.
See Also
--------
to_tree : for converting a linkage matrix ``Z`` into a tree object.
"""
def __init__(self, id, left=None, right=None, dist=0, count=1):
if id < 0:
raise ValueError('The id must be non-negative.')
if dist < 0:
raise ValueError('The distance must be non-negative.')
if (left is None and right is not None) or \
(left is not None and right is None):
raise ValueError('Only full or proper binary trees are permitted.'
' This node has one child.')
if count < 1:
raise ValueError('A cluster must contain at least one original '
'observation.')
self.id = id
self.left = left
self.right = right
self.dist = dist
if self.left is None:
self.count = count
else:
self.count = left.count + right.count
def get_id(self):
"""
The identifier of the target node.
For ``0 <= i < n``, `i` corresponds to original observation i.
For ``n <= i < 2n-1``, `i` corresponds to non-singleton cluster formed
at iteration ``i-n``.
Returns
-------
id : int
The identifier of the target node.
"""
return self.id
def get_count(self):
"""
The number of leaf nodes (original observations) belonging to
the cluster node nd. If the target node is a leaf, 1 is
returned.
Returns
-------
get_count : int
The number of leaf nodes below the target node.
"""
return self.count
def get_left(self):
"""
Return a reference to the left child tree object.
Returns
-------
left : ClusterNode
The left child of the target node. If the node is a leaf,
None is returned.
"""
return self.left
def get_right(self):
"""
Returns a reference to the right child tree object.
Returns
-------
right : ClusterNode
The left child of the target node. If the node is a leaf,
None is returned.
"""
return self.right
def is_leaf(self):
"""
Returns True if the target node is a leaf.
Returns
-------
leafness : bool
True if the target node is a leaf node.
"""
return self.left is None
def pre_order(self, func=(lambda x: x.id)):
"""
Performs pre-order traversal without recursive function calls.
When a leaf node is first encountered, ``func`` is called with
the leaf node as its argument, and its result is appended to
the list.
For example, the statement::
ids = root.pre_order(lambda x: x.id)
returns a list of the node ids corresponding to the leaf nodes
of the tree as they appear from left to right.
Parameters
----------
func : function
Applied to each leaf ClusterNode object in the pre-order traversal.
Given the i'th leaf node in the pre-ordeR traversal ``n[i]``, the
result of func(n[i]) is stored in L[i]. If not provided, the index
of the original observation to which the node corresponds is used.
Returns
-------
L : list
The pre-order traversal.
"""
# Do a preorder traversal, caching the result. To avoid having to do
# recursion, we'll store the previous index we've visited in a vector.
n = self.count
curNode = [None] * (2 * n)
lvisited = set()
rvisited = set()
curNode[0] = self
k = 0
preorder = []
while k >= 0:
nd = curNode[k]
ndid = nd.id
if nd.is_leaf():
preorder.append(func(nd))
k = k - 1
else:
if ndid not in lvisited:
curNode[k + 1] = nd.left
lvisited.add(ndid)
k = k + 1
elif ndid not in rvisited:
curNode[k + 1] = nd.right
rvisited.add(ndid)
k = k + 1
# If we've visited the left and right of this non-leaf
# node already, go up in the tree.
else:
k = k - 1
return preorder
_cnode_bare = ClusterNode(0)
_cnode_type = type(ClusterNode)
def to_tree(Z, rd=False):
"""
Converts a hierarchical clustering encoded in the matrix ``Z`` (by
linkage) into an easy-to-use tree object.
The reference r to the root ClusterNode object is returned.
Each ClusterNode object has a left, right, dist, id, and count
attribute. The left and right attributes point to ClusterNode objects
that were combined to generate the cluster. If both are None then
the ClusterNode object is a leaf node, its count must be 1, and its
distance is meaningless but set to 0.
Note: This function is provided for the convenience of the library
user. ClusterNodes are not used as input to any of the functions in this
library.
Parameters
----------
Z : ndarray
The linkage matrix in proper form (see the ``linkage``
function documentation).
rd : bool, optional
When False, a reference to the root ClusterNode object is
returned. Otherwise, a tuple (r,d) is returned. ``r`` is a
reference to the root node while ``d`` is a dictionary
mapping cluster ids to ClusterNode references. If a cluster id is
less than n, then it corresponds to a singleton cluster
(leaf node). See ``linkage`` for more information on the
assignment of cluster ids to clusters.
Returns
-------
L : list
The pre-order traversal.
"""
Z = np.asarray(Z, order='c')
is_valid_linkage(Z, throw=True, name='Z')
# The number of original objects is equal to the number of rows minus
# 1.
n = Z.shape[0] + 1
# Create a list full of None's to store the node objects
d = [None] * (n * 2 - 1)
# Create the nodes corresponding to the n original objects.
for i in xrange(0, n):
d[i] = ClusterNode(i)
nd = None
for i in xrange(0, n - 1):
fi = int(Z[i, 0])
fj = int(Z[i, 1])
if fi > i + n:
raise ValueError(('Corrupt matrix Z. Index to derivative cluster '
'is used before it is formed. See row %d, '
'column 0') % fi)
if fj > i + n:
raise ValueError(('Corrupt matrix Z. Index to derivative cluster '
'is used before it is formed. See row %d, '
'column 1') % fj)
nd = ClusterNode(i + n, d[fi], d[fj], Z[i, 2])
# ^ id ^ left ^ right ^ dist
if Z[i, 3] != nd.count:
raise ValueError(('Corrupt matrix Z. The count Z[%d,3] is '
'incorrect.') % i)
d[n + i] = nd
if rd:
return (nd, d)
else:
return nd
def _convert_to_bool(X):
if X.dtype != np.bool:
X = np.bool_(X)
if not X.flags.contiguous:
X = X.copy()
return X
def _convert_to_double(X):
if X.dtype != np.double:
X = np.double(X)
if not X.flags.contiguous:
X = X.copy()
return X
def cophenet(Z, Y=None):
"""
Calculates the cophenetic distances between each observation in
the hierarchical clustering defined by the linkage ``Z``.
Suppose ``p`` and ``q`` are original observations in
disjoint clusters ``s`` and ``t``, respectively and
``s`` and ``t`` are joined by a direct parent cluster
``u``. The cophenetic distance between observations
``i`` and ``j`` is simply the distance between
clusters ``s`` and ``t``.
Parameters
----------
Z : ndarray
The hierarchical clustering encoded as an array
(see ``linkage`` function).
Y : ndarray (optional)
Calculates the cophenetic correlation coefficient ``c`` of a
hierarchical clustering defined by the linkage matrix `Z`
of a set of :math:`n` observations in :math:`m`
dimensions. `Y` is the condensed distance matrix from which
`Z` was generated.
Returns
-------
c : ndarray
The cophentic correlation distance (if ``y`` is passed).
d : ndarray
The cophenetic distance matrix in condensed form. The
:math:`ij` th entry is the cophenetic distance between
original observations :math:`i` and :math:`j`.
"""
Z = np.asarray(Z, order='c')
is_valid_linkage(Z, throw=True, name='Z')
Zs = Z.shape
n = Zs[0] + 1
zz = np.zeros((n * (n - 1)) // 2, dtype=np.double)
# Since the C code does not support striding using strides.
# The dimensions are used instead.
Z = _convert_to_double(Z)
_hierarchy_wrap.cophenetic_distances_wrap(Z, zz, int(n))
if Y is None:
return zz
Y = np.asarray(Y, order='c')
distance.is_valid_y(Y, throw=True, name='Y')
z = zz.mean()
y = Y.mean()
Yy = Y - y
Zz = zz - z
numerator = (Yy * Zz)
denomA = Yy ** 2
denomB = Zz ** 2
c = numerator.sum() / np.sqrt((denomA.sum() * denomB.sum()))
return (c, zz)
def inconsistent(Z, d=2):
"""
Calculates inconsistency statistics on a linkage.
Note: This function behaves similarly to the MATLAB(TM)
inconsistent function.
Parameters
----------
Z : ndarray
The :math:`(n-1)` by 4 matrix encoding the linkage
(hierarchical clustering). See ``linkage`` documentation
for more information on its form.
d : int, optional
The number of links up to `d` levels below each
non-singleton cluster.
Returns
-------
R : ndarray
A :math:`(n-1)` by 5 matrix where the ``i``'th row
contains the link statistics for the non-singleton cluster
``i``. The link statistics are computed over the link
heights for links :math:`d` levels below the cluster
``i``. ``R[i,0]`` and ``R[i,1]`` are the mean and standard
deviation of the link heights, respectively; ``R[i,2]`` is
the number of links included in the calculation; and
``R[i,3]`` is the inconsistency coefficient,
.. math:: \\frac{\\mathtt{Z[i,2]}-\\mathtt{R[i,0]}} {R[i,1]}
"""
Z = np.asarray(Z, order='c')
Zs = Z.shape
is_valid_linkage(Z, throw=True, name='Z')
if (not d == np.floor(d)) or d < 0:
raise ValueError('The second argument d must be a nonnegative '
'integer value.')
# Since the C code does not support striding using strides.
# The dimensions are used instead.
[Z] = _copy_arrays_if_base_present([Z])
n = Zs[0] + 1
R = np.zeros((n - 1, 4), dtype=np.double)
_hierarchy_wrap.inconsistent_wrap(Z, R, int(n), int(d))
return R
def from_mlab_linkage(Z):
"""
Converts a linkage matrix generated by MATLAB(TM) to a new
linkage matrix compatible with this module.
The conversion does two things:
* the indices are converted from ``1..N`` to ``0..(N-1)`` form,
and
* a fourth column Z[:,3] is added where Z[i,3] is represents the
number of original observations (leaves) in the non-singleton
cluster i.
This function is useful when loading in linkages from legacy data
files generated by MATLAB.
Parameters
----------
Z : ndarray
A linkage matrix generated by MATLAB(TM).
Returns
-------
ZS : ndarray
A linkage matrix compatible with this library.
"""
Z = np.asarray(Z, dtype=np.double, order='c')
Zs = Z.shape
# If it's empty, return it.
if len(Zs) == 0 or (len(Zs) == 1 and Zs[0] == 0):
return Z.copy()
if len(Zs) != 2:
raise ValueError("The linkage array must be rectangular.")
# If it contains no rows, return it.
if Zs[0] == 0:
return Z.copy()
Zpart = Z.copy()
if Zpart[:, 0:2].min() != 1.0 and Zpart[:, 0:2].max() != 2 * Zs[0]:
raise ValueError('The format of the indices is not 1..N')
Zpart[:, 0:2] -= 1.0
CS = np.zeros((Zs[0],), dtype=np.double)
_hierarchy_wrap.calculate_cluster_sizes_wrap(Zpart, CS, int(Zs[0]) + 1)
return np.hstack([Zpart, CS.reshape(Zs[0], 1)])
def to_mlab_linkage(Z):
"""
Converts a linkage matrix to a MATLAB(TM) compatible one.
Converts a linkage matrix ``Z`` generated by the linkage function
of this module to a MATLAB(TM) compatible one. The return linkage
matrix has the last column removed and the cluster indices are
converted to ``1..N`` indexing.
Parameters
----------
Z : ndarray
A linkage matrix generated by this library.
Returns
-------
to_mlab_linkage : ndarray
A linkage matrix compatible with MATLAB(TM)'s hierarchical
clustering functions.
The return linkage matrix has the last column removed
and the cluster indices are converted to ``1..N`` indexing.
"""
Z = np.asarray(Z, order='c', dtype=np.double)
Zs = Z.shape
if len(Zs) == 0 or (len(Zs) == 1 and Zs[0] == 0):
return Z.copy()
is_valid_linkage(Z, throw=True, name='Z')
ZP = Z[:, 0:3].copy()
ZP[:, 0:2] += 1.0
return ZP
def is_monotonic(Z):
"""
Returns True if the linkage passed is monotonic.
The linkage is monotonic if for every cluster :math:`s` and :math:`t`
joined, the distance between them is no less than the distance
between any previously joined clusters.
Parameters
----------
Z : ndarray
The linkage matrix to check for monotonicity.
Returns
-------
b : bool
A boolean indicating whether the linkage is monotonic.
"""
Z = np.asarray(Z, order='c')
is_valid_linkage(Z, throw=True, name='Z')
# We expect the i'th value to be greater than its successor.
return (Z[1:, 2] >= Z[:-1, 2]).all()
def is_valid_im(R, warning=False, throw=False, name=None):
"""Returns True if the inconsistency matrix passed is valid.
It must be a :math:`n` by 4 numpy array of doubles. The standard
deviations ``R[:,1]`` must be nonnegative. The link counts
``R[:,2]`` must be positive and no greater than :math:`n-1`.
Parameters
----------
R : ndarray
The inconsistency matrix to check for validity.
warning : bool, optional
When True, issues a Python warning if the linkage
matrix passed is invalid.
throw : bool, optional
When True, throws a Python exception if the linkage
matrix passed is invalid.
name : str, optional
This string refers to the variable name of the invalid
linkage matrix.
Returns
-------
b : bool
True if the inconsistency matrix is valid.
"""
R = np.asarray(R, order='c')
valid = True
try:
if type(R) != np.ndarray:
if name:
raise TypeError(('Variable \'%s\' passed as inconsistency '
'matrix is not a numpy array.') % name)
else:
raise TypeError('Variable passed as inconsistency matrix '
'is not a numpy array.')
if R.dtype != np.double:
if name:
raise TypeError(('Inconsistency matrix \'%s\' must contain '
'doubles (double).') % name)
else:
raise TypeError('Inconsistency matrix must contain doubles '
'(double).')
if len(R.shape) != 2:
if name:
raise ValueError(('Inconsistency matrix \'%s\' must have '
'shape=2 (i.e. be two-dimensional).') % name)
else:
raise ValueError('Inconsistency matrix must have shape=2 '
'(i.e. be two-dimensional).')
if R.shape[1] != 4:
if name:
raise ValueError(('Inconsistency matrix \'%s\' must have 4 '
'columns.') % name)
else:
raise ValueError('Inconsistency matrix must have 4 columns.')
if R.shape[0] < 1:
if name:
raise ValueError(('Inconsistency matrix \'%s\' must have at '
'least one row.') % name)
else:
raise ValueError('Inconsistency matrix must have at least '
'one row.')
if (R[:, 0] < 0).any():
if name:
raise ValueError(('Inconsistency matrix \'%s\' contains '
'negative link height means.') % name)
else:
raise ValueError('Inconsistency matrix contains negative '
'link height means.')
if (R[:, 1] < 0).any():
if name:
raise ValueError(('Inconsistency matrix \'%s\' contains '
'negative link height standard '
'deviations.') % name)
else:
raise ValueError('Inconsistency matrix contains negative '
'link height standard deviations.')
if (R[:, 2] < 0).any():
if name:
raise ValueError(('Inconsistency matrix \'%s\' contains '
'negative link counts.') % name)
else:
raise ValueError('Inconsistency matrix contains negative '
'link counts.')
except Exception as e:
if throw:
raise
if warning:
_warning(str(e))
valid = False
return valid
def is_valid_linkage(Z, warning=False, throw=False, name=None):
"""
Checks the validity of a linkage matrix.
A linkage matrix is valid if it is a two dimensional
ndarray (type double) with :math:`n`
rows and 4 columns. The first two columns must contain indices
between 0 and :math:`2n-1`. For a given row ``i``,
:math:`0 \\leq \\mathtt{Z[i,0]} \\leq i+n-1`
and :math:`0 \\leq Z[i,1] \\leq i+n-1`
(i.e. a cluster cannot join another cluster unless the cluster
being joined has been generated.)
Parameters
----------
Z : array_like
Linkage matrix.
warning : bool, optional
When True, issues a Python warning if the linkage
matrix passed is invalid.
throw : bool, optional
When True, throws a Python exception if the linkage
matrix passed is invalid.
name : str, optional
This string refers to the variable name of the invalid
linkage matrix.
Returns
-------
b : bool
True iff the inconsistency matrix is valid.
"""
Z = np.asarray(Z, order='c')
valid = True
try:
if type(Z) != np.ndarray:
if name:
raise TypeError(('\'%s\' passed as a linkage is not a valid '
'array.') % name)
else:
raise TypeError('Variable is not a valid array.')
if Z.dtype != np.double:
if name:
raise TypeError('Linkage matrix \'%s\' must contain doubles.'
% name)
else:
raise TypeError('Linkage matrix must contain doubles.')
if len(Z.shape) != 2:
if name:
raise ValueError(('Linkage matrix \'%s\' must have shape=2 '
'(i.e. be two-dimensional).') % name)
else:
raise ValueError('Linkage matrix must have shape=2 '
'(i.e. be two-dimensional).')
if Z.shape[1] != 4:
if name:
raise ValueError('Linkage matrix \'%s\' must have 4 columns.'
% name)
else:
raise ValueError('Linkage matrix must have 4 columns.')
if Z.shape[0] == 0:
raise ValueError('Linkage must be computed on at least two '
'observations.')
n = Z.shape[0]
if n > 1:
if ((Z[:, 0] < 0).any() or
(Z[:, 1] < 0).any()):
if name:
raise ValueError(('Linkage \'%s\' contains negative '
'indices.') % name)
else:
raise ValueError('Linkage contains negative indices.')
if (Z[:, 2] < 0).any():
if name:
raise ValueError(('Linkage \'%s\' contains negative '
'distances.') % name)
else:
raise ValueError('Linkage contains negative distances.')
if (Z[:, 3] < 0).any():
if name:
raise ValueError('Linkage \'%s\' contains negative counts.'
% name)
else:
raise ValueError('Linkage contains negative counts.')
if _check_hierarchy_uses_cluster_before_formed(Z):
if name:
raise ValueError(('Linkage \'%s\' uses non-singleton cluster '
'before its formed.') % name)
else:
raise ValueError("Linkage uses non-singleton cluster before "
"it's formed.")
if _check_hierarchy_uses_cluster_more_than_once(Z):
if name:
raise ValueError(('Linkage \'%s\' uses the same cluster more '
'than once.') % name)
else:
raise ValueError('Linkage uses the same cluster more than '
'once.')
except Exception as e:
if throw:
raise
if warning:
_warning(str(e))
valid = False
return valid
def _check_hierarchy_uses_cluster_before_formed(Z):
n = Z.shape[0] + 1
for i in xrange(0, n - 1):
if Z[i, 0] >= n + i or Z[i, 1] >= n + i:
return True
return False
def _check_hierarchy_uses_cluster_more_than_once(Z):
n = Z.shape[0] + 1
chosen = set([])
for i in xrange(0, n - 1):
if (Z[i, 0] in chosen) or (Z[i, 1] in chosen) or Z[i, 0] == Z[i, 1]:
return True
chosen.add(Z[i, 0])
chosen.add(Z[i, 1])
return False
def _check_hierarchy_not_all_clusters_used(Z):
n = Z.shape[0] + 1
chosen = set([])
for i in xrange(0, n - 1):
chosen.add(int(Z[i, 0]))
chosen.add(int(Z[i, 1]))
must_chosen = set(range(0, 2 * n - 2))
return len(must_chosen.difference(chosen)) > 0
def num_obs_linkage(Z):
"""
Returns the number of original observations of the linkage matrix
passed.
Parameters
----------
Z : ndarray
The linkage matrix on which to perform the operation.
Returns
-------
n : int
The number of original observations in the linkage.
"""
Z = np.asarray(Z, order='c')
is_valid_linkage(Z, throw=True, name='Z')
return (Z.shape[0] + 1)
def correspond(Z, Y):
"""
Checks for correspondence between linkage and condensed distance matrices
They must have the same number of original observations for
the check to succeed.
This function is useful as a sanity check in algorithms that make
extensive use of linkage and distance matrices that must
correspond to the same set of original observations.
Parameters
----------
Z : array_like
The linkage matrix to check for correspondence.
Y : array_like
The condensed distance matrix to check for correspondence.
Returns
-------
b : bool
A boolean indicating whether the linkage matrix and distance
matrix could possibly correspond to one another.
"""
is_valid_linkage(Z, throw=True)
distance.is_valid_y(Y, throw=True)
Z = np.asarray(Z, order='c')
Y = np.asarray(Y, order='c')
return distance.num_obs_y(Y) == num_obs_linkage(Z)
def fcluster(Z, t, criterion='inconsistent', depth=2, R=None, monocrit=None):
"""
Forms flat clusters from the hierarchical clustering defined by
the linkage matrix ``Z``.
Parameters
----------
Z : ndarray
The hierarchical clustering encoded with the matrix returned
by the `linkage` function.
t : float
The threshold to apply when forming flat clusters.
criterion : str, optional
The criterion to use in forming flat clusters. This can
be any of the following values:
``inconsistent`` : If a cluster node and all its
descendants have an inconsistent value less than or equal
to `t` then all its leaf descendants belong to the
same flat cluster. When no non-singleton cluster meets
this criterion, every node is assigned to its own
cluster. (Default)
``distance`` : Forms flat clusters so that the original
observations in each flat cluster have no greater a
cophenetic distance than `t`.
``maxclust`` : Finds a minimum threshold ``r`` so that
the cophenetic distance between any two original
observations in the same flat cluster is no more than
``r`` and no more than `t` flat clusters are formed.
``monocrit`` : Forms a flat cluster from a cluster node c
with index i when ``monocrit[j] <= t``.
For example, to threshold on the maximum mean distance
as computed in the inconsistency matrix R with a
threshold of 0.8 do:
MR = maxRstat(Z, R, 3)
cluster(Z, t=0.8, criterion='monocrit', monocrit=MR)
``maxclust_monocrit`` : Forms a flat cluster from a
non-singleton cluster node ``c`` when ``monocrit[i] <=
r`` for all cluster indices ``i`` below and including
``c``. ``r`` is minimized such that no more than ``t``
flat clusters are formed. monocrit must be
monotonic. For example, to minimize the threshold t on
maximum inconsistency values so that no more than 3 flat
clusters are formed, do:
MI = maxinconsts(Z, R)
cluster(Z, t=3, criterion='maxclust_monocrit', monocrit=MI)
depth : int, optional
The maximum depth to perform the inconsistency calculation.
It has no meaning for the other criteria. Default is 2.
R : ndarray, optional
The inconsistency matrix to use for the 'inconsistent'
criterion. This matrix is computed if not provided.
monocrit : ndarray, optional
An array of length n-1. `monocrit[i]` is the
statistics upon which non-singleton i is thresholded. The
monocrit vector must be monotonic, i.e. given a node c with
index i, for all node indices j corresponding to nodes
below c, `monocrit[i] >= monocrit[j]`.
Returns
-------
fcluster : ndarray
An array of length n. T[i] is the flat cluster number to
which original observation i belongs.
"""
Z = np.asarray(Z, order='c')
is_valid_linkage(Z, throw=True, name='Z')
n = Z.shape[0] + 1
T = np.zeros((n,), dtype='i')
# Since the C code does not support striding using strides.
# The dimensions are used instead.
[Z] = _copy_arrays_if_base_present([Z])
if criterion == 'inconsistent':
if R is None:
R = inconsistent(Z, depth)
else:
R = np.asarray(R, order='c')
is_valid_im(R, throw=True, name='R')
# Since the C code does not support striding using strides.
# The dimensions are used instead.
[R] = _copy_arrays_if_base_present([R])
_hierarchy_wrap.cluster_in_wrap(Z, R, T, float(t), int(n))
elif criterion == 'distance':
_hierarchy_wrap.cluster_dist_wrap(Z, T, float(t), int(n))
elif criterion == 'maxclust':
_hierarchy_wrap.cluster_maxclust_dist_wrap(Z, T, int(n), int(t))
elif criterion == 'monocrit':
[monocrit] = _copy_arrays_if_base_present([monocrit])
_hierarchy_wrap.cluster_monocrit_wrap(Z, monocrit, T, float(t), int(n))
elif criterion == 'maxclust_monocrit':
[monocrit] = _copy_arrays_if_base_present([monocrit])
_hierarchy_wrap.cluster_maxclust_monocrit_wrap(Z, monocrit, T,
int(n), int(t))
else:
raise ValueError('Invalid cluster formation criterion: %s'
% str(criterion))
return T
def fclusterdata(X, t, criterion='inconsistent',
metric='euclidean', depth=2, method='single', R=None):
"""
Cluster observation data using a given metric.
Clusters the original observations in the n-by-m data
matrix X (n observations in m dimensions), using the euclidean
distance metric to calculate distances between original observations,
performs hierarchical clustering using the single linkage algorithm,
and forms flat clusters using the inconsistency method with `t` as the
cut-off threshold.
A one-dimensional array T of length n is returned. T[i] is the index
of the flat cluster to which the original observation i belongs.
Parameters
----------
X : (N, M) ndarray
N by M data matrix with N observations in M dimensions.
t : float
The threshold to apply when forming flat clusters.
criterion : str, optional
Specifies the criterion for forming flat clusters. Valid
values are 'inconsistent' (default), 'distance', or 'maxclust'
cluster formation algorithms. See `fcluster` for descriptions.
metric : str, optional
The distance metric for calculating pairwise distances. See
`distance.pdist` for descriptions and linkage to verify
compatibility with the linkage method.
depth : int, optional
The maximum depth for the inconsistency calculation. See
`inconsistent` for more information.
method : str, optional
The linkage method to use (single, complete, average,
weighted, median centroid, ward). See `linkage` for more
information. Default is "single".
R : ndarray, optional
The inconsistency matrix. It will be computed if necessary
if it is not passed.
Returns
-------
fclusterdata : ndarray
A vector of length n. T[i] is the flat cluster number to
which original observation i belongs.
Notes
-----
This function is similar to the MATLAB function clusterdata.
"""
X = np.asarray(X, order='c', dtype=np.double)
if type(X) != np.ndarray or len(X.shape) != 2:
raise TypeError('The observation matrix X must be an n by m numpy '
'array.')
Y = distance.pdist(X, metric=metric)
Z = linkage(Y, method=method)
if R is None:
R = inconsistent(Z, d=depth)
else:
R = np.asarray(R, order='c')
T = fcluster(Z, criterion=criterion, depth=depth, R=R, t=t)
return T
def leaves_list(Z):
"""
Returns a list of leaf node ids
The return corresponds to the observation vector index as it appears
in the tree from left to right. Z is a linkage matrix.
Parameters
----------
Z : ndarray
The hierarchical clustering encoded as a matrix. `Z` is
a linkage matrix. See ``linkage`` for more information.
Returns
-------
leaves_list : ndarray
The list of leaf node ids.
"""
Z = np.asarray(Z, order='c')
is_valid_linkage(Z, throw=True, name='Z')
n = Z.shape[0] + 1
ML = np.zeros((n,), dtype='i')
[Z] = _copy_arrays_if_base_present([Z])
_hierarchy_wrap.prelist_wrap(Z, ML, int(n))
return ML
# Maps number of leaves to text size.
#
# p <= 20, size="12"
# 20 < p <= 30, size="10"
# 30 < p <= 50, size="8"
# 50 < p <= np.inf, size="6"
_dtextsizes = {20: 12, 30: 10, 50: 8, 85: 6, np.inf: 5}
_drotation = {20: 0, 40: 45, np.inf: 90}
_dtextsortedkeys = list(_dtextsizes.keys())
_dtextsortedkeys.sort()
_drotationsortedkeys = list(_drotation.keys())
_drotationsortedkeys.sort()
def _remove_dups(L):
"""
Removes duplicates AND preserves the original order of the elements.
The set class is not guaranteed to do this.
"""
seen_before = set([])
L2 = []
for i in L:
if i not in seen_before:
seen_before.add(i)
L2.append(i)
return L2
def _get_tick_text_size(p):
for k in _dtextsortedkeys:
if p <= k:
return _dtextsizes[k]
def _get_tick_rotation(p):
for k in _drotationsortedkeys:
if p <= k:
return _drotation[k]
def _plot_dendrogram(icoords, dcoords, ivl, p, n, mh, orientation,
no_labels, color_list, leaf_font_size=None,
leaf_rotation=None, contraction_marks=None,
ax=None):
# Import matplotlib here so that it's not imported unless dendrograms
# are plotted. Raise an informative error if importing fails.
try:
# if an axis is provided, don't use pylab at all
if ax is None:
import matplotlib.pylab
import matplotlib.patches
import matplotlib.collections
except ImportError:
raise ImportError("You must install the matplotlib library to plot the dendrogram. Use no_plot=True to calculate the dendrogram without plotting.")
if ax is None:
ax = matplotlib.pylab.gca()
# if we're using pylab, we want to trigger a draw at the end
trigger_redraw = True
else:
trigger_redraw = False
# Independent variable plot width
ivw = len(ivl) * 10
# Depenendent variable plot height
dvw = mh + mh * 0.05
ivticks = np.arange(5, len(ivl) * 10 + 5, 10)
if orientation == 'top':
ax.set_ylim([0, dvw])
ax.set_xlim([0, ivw])
xlines = icoords
ylines = dcoords
if no_labels:
ax.set_xticks([])
ax.set_xticklabels([])
else:
ax.set_xticks(ivticks)
ax.set_xticklabels(ivl)
ax.xaxis.set_ticks_position('bottom')
lbls = ax.get_xticklabels()
if leaf_rotation:
map(lambda lbl: lbl.set_rotation(leaf_rotation), lbls)
else:
leaf_rot = float(_get_tick_rotation(len(ivl)))
map(lambda lbl: lbl.set_rotation(leaf_rot), lbls)
if leaf_font_size:
map(lambda lbl: lbl.set_size(leaf_font_size), lbls)
else:
leaf_fs = float(_get_tick_text_size(len(ivl)))
map(lambda lbl: lbl.set_rotation(leaf_fs), lbls)
# Make the tick marks invisible because they cover up the links
for line in ax.get_xticklines():
line.set_visible(False)
elif orientation == 'bottom':
ax.set_ylim([dvw, 0])
ax.set_xlim([0, ivw])
xlines = icoords
ylines = dcoords
if no_labels:
ax.set_xticks([])
ax.set_xticklabels([])
else:
ax.set_xticks(ivticks)
ax.set_xticklabels(ivl)
lbls = ax.get_xticklabels()
if leaf_rotation:
map(lambda lbl: lbl.set_rotation(leaf_rotation), lbls)
else:
leaf_rot = float(_get_tick_rotation(p))
map(lambda lbl: lbl.set_rotation(leaf_rot), lbls)
if leaf_font_size:
map(lambda lbl: lbl.set_size(leaf_font_size), lbls)
else:
leaf_fs = float(_get_tick_text_size(p))
map(lambda lbl: lbl.set_rotation(leaf_fs), lbls)
ax.xaxis.set_ticks_position('top')
# Make the tick marks invisible because they cover up the links
for line in ax.get_xticklines():
line.set_visible(False)
elif orientation == 'left':
ax.set_xlim([0, dvw])
ax.set_ylim([0, ivw])
xlines = dcoords
ylines = icoords
if no_labels:
ax.set_yticks([])
ax.set_yticklabels([])
else:
ax.set_yticks(ivticks)
ax.set_yticklabels(ivl)
lbls = ax.get_yticklabels()
if leaf_rotation:
map(lambda lbl: lbl.set_rotation(leaf_rotation), lbls)
if leaf_font_size:
map(lambda lbl: lbl.set_size(leaf_font_size), lbls)
ax.yaxis.set_ticks_position('left')
# Make the tick marks invisible because they cover up the
# links
for line in ax.get_yticklines():
line.set_visible(False)
elif orientation == 'right':
ax.set_xlim([dvw, 0])
ax.set_ylim([0, ivw])
xlines = dcoords
ylines = icoords
if no_labels:
ax.set_yticks([])
ax.set_yticklabels([])
else:
ax.set_yticks(ivticks)
ax.set_yticklabels(ivl)
lbls = ax.get_yticklabels()
if leaf_rotation:
map(lambda lbl: lbl.set_rotation(leaf_rotation), lbls)
if leaf_font_size:
map(lambda lbl: lbl.set_size(leaf_font_size), lbls)
ax.yaxis.set_ticks_position('right')
# Make the tick marks invisible because they cover up the links
for line in ax.get_yticklines():
line.set_visible(False)
# Let's use collections instead. This way there is a separate legend
# item for each tree grouping, rather than stupidly one for each line
# segment.
colors_used = _remove_dups(color_list)
color_to_lines = {}
for color in colors_used:
color_to_lines[color] = []
for (xline, yline, color) in zip(xlines, ylines, color_list):
color_to_lines[color].append(list(zip(xline, yline)))
colors_to_collections = {}
# Construct the collections.
for color in colors_used:
coll = matplotlib.collections.LineCollection(color_to_lines[color],
colors=(color,))
colors_to_collections[color] = coll
# Add all the non-blue link groupings, i.e. those groupings below the
# color threshold.
for color in colors_used:
if color != 'b':
ax.add_collection(colors_to_collections[color])
# If there is a blue grouping (i.e., links above the color threshold),
# it should go last.
if 'b' in colors_to_collections:
ax.add_collection(colors_to_collections['b'])
if contraction_marks is not None:
if orientation in ('left', 'right'):
for (x, y) in contraction_marks:
e = matplotlib.patches.Ellipse((y, x),
width=dvw / 100, height=1.0)
ax.add_artist(e)
e.set_clip_box(ax.bbox)
e.set_alpha(0.5)
e.set_facecolor('k')
if orientation in ('top', 'bottom'):
for (x, y) in contraction_marks:
e = matplotlib.patches.Ellipse((x, y),
width=1.0, height=dvw / 100)
ax.add_artist(e)
e.set_clip_box(ax.bbox)
e.set_alpha(0.5)
e.set_facecolor('k')
if trigger_redraw:
matplotlib.pylab.draw_if_interactive()
_link_line_colors = ['g', 'r', 'c', 'm', 'y', 'k']
def set_link_color_palette(palette):
"""
Set list of matplotlib color codes for dendrogram color_threshold.
Parameters
----------
palette : list
A list of matplotlib color codes. The order of
the color codes is the order in which the colors are cycled
through when color thresholding in the dendrogram.
"""
if type(palette) not in (list, tuple):
raise TypeError("palette must be a list or tuple")
_ptypes = [isinstance(p, string_types) for p in palette]
if False in _ptypes:
raise TypeError("all palette list elements must be color strings")
for i in list(_link_line_colors):
_link_line_colors.remove(i)
_link_line_colors.extend(list(palette))
def dendrogram(Z, p=30, truncate_mode=None, color_threshold=None,
get_leaves=True, orientation='top', labels=None,
count_sort=False, distance_sort=False, show_leaf_counts=True,
no_plot=False, no_labels=False, color_list=None,
leaf_font_size=None, leaf_rotation=None, leaf_label_func=None,
no_leaves=False, show_contracted=False,
link_color_func=None, ax=None):
"""
Plots the hierarchical clustering as a dendrogram.
The dendrogram illustrates how each cluster is
composed by drawing a U-shaped link between a non-singleton
cluster and its children. The height of the top of the U-link is
the distance between its children clusters. It is also the
cophenetic distance between original observations in the two
children clusters. It is expected that the distances in Z[:,2] be
monotonic, otherwise crossings appear in the dendrogram.
Parameters
----------
Z : ndarray
The linkage matrix encoding the hierarchical clustering to
render as a dendrogram. See the ``linkage`` function for more
information on the format of ``Z``.
p : int, optional
The ``p`` parameter for ``truncate_mode``.
truncate_mode : str, optional
The dendrogram can be hard to read when the original
observation matrix from which the linkage is derived is
large. Truncation is used to condense the dendrogram. There
are several modes:
``None/'none'``
No truncation is performed (Default).
``'lastp'``
The last ``p`` non-singleton formed in the linkage are the only
non-leaf nodes in the linkage; they correspond to rows
``Z[n-p-2:end]`` in ``Z``. All other non-singleton clusters are
contracted into leaf nodes.
``'mlab'``
This corresponds to MATLAB(TM) behavior. (not implemented yet)
``'level'/'mtica'``
No more than ``p`` levels of the dendrogram tree are displayed.
This corresponds to Mathematica(TM) behavior.
color_threshold : double, optional
For brevity, let :math:`t` be the ``color_threshold``.
Colors all the descendent links below a cluster node
:math:`k` the same color if :math:`k` is the first node below
the cut threshold :math:`t`. All links connecting nodes with
distances greater than or equal to the threshold are colored
blue. If :math:`t` is less than or equal to zero, all nodes
are colored blue. If ``color_threshold`` is None or
'default', corresponding with MATLAB(TM) behavior, the
threshold is set to ``0.7*max(Z[:,2])``.
get_leaves : bool, optional
Includes a list ``R['leaves']=H`` in the result
dictionary. For each :math:`i`, ``H[i] == j``, cluster node
``j`` appears in position ``i`` in the left-to-right traversal
of the leaves, where :math:`j < 2n-1` and :math:`i < n`.
orientation : str, optional
The direction to plot the dendrogram, which can be any
of the following strings:
``'top'``
Plots the root at the top, and plot descendent links going downwards.
(default).
``'bottom'``
Plots the root at the bottom, and plot descendent links going
upwards.
``'left'``
Plots the root at the left, and plot descendent links going right.
``'right'``
Plots the root at the right, and plot descendent links going left.
labels : ndarray, optional
By default ``labels`` is None so the index of the original observation
is used to label the leaf nodes. Otherwise, this is an :math:`n`
-sized list (or tuple). The ``labels[i]`` value is the text to put
under the :math:`i` th leaf node only if it corresponds to an original
observation and not a non-singleton cluster.
count_sort : str or bool, optional
For each node n, the order (visually, from left-to-right) n's
two descendent links are plotted is determined by this
parameter, which can be any of the following values:
``False``
Nothing is done.
``'ascending'`` or ``True``
The child with the minimum number of original objects in its cluster
is plotted first.
``'descendent'``
The child with the maximum number of original objects in its cluster
is plotted first.
Note ``distance_sort`` and ``count_sort`` cannot both be True.
distance_sort : str or bool, optional
For each node n, the order (visually, from left-to-right) n's
two descendent links are plotted is determined by this
parameter, which can be any of the following values:
``False``
Nothing is done.
``'ascending'`` or ``True``
The child with the minimum distance between its direct descendents is
plotted first.
``'descending'``
The child with the maximum distance between its direct descendents is
plotted first.
Note ``distance_sort`` and ``count_sort`` cannot both be True.
show_leaf_counts : bool, optional
When True, leaf nodes representing :math:`k>1` original
observation are labeled with the number of observations they
contain in parentheses.
no_plot : bool, optional
When True, the final rendering is not performed. This is
useful if only the data structures computed for the rendering
are needed or if matplotlib is not available.
no_labels : bool, optional
When True, no labels appear next to the leaf nodes in the
rendering of the dendrogram.
leaf_label_rotation : double, optional
Specifies the angle (in degrees) to rotate the leaf
labels. When unspecified, the rotation based on the number of
nodes in the dendrogram (default is 0).
leaf_font_size : int, optional
Specifies the font size (in points) of the leaf labels. When
unspecified, the size based on the number of nodes in the
dendrogram.
leaf_label_func : lambda or function, optional
When leaf_label_func is a callable function, for each
leaf with cluster index :math:`k < 2n-1`. The function
is expected to return a string with the label for the
leaf.
Indices :math:`k < n` correspond to original observations
while indices :math:`k \\geq n` correspond to non-singleton
clusters.
For example, to label singletons with their node id and
non-singletons with their id, count, and inconsistency
coefficient, simply do:
>>> # First define the leaf label function.
>>> def llf(id):
... if id < n:
... return str(id)
... else:
>>> return '[%d %d %1.2f]' % (id, count, R[n-id,3])
>>>
>>> # The text for the leaf nodes is going to be big so force
>>> # a rotation of 90 degrees.
>>> dendrogram(Z, leaf_label_func=llf, leaf_rotation=90)
show_contracted : bool, optional
When True the heights of non-singleton nodes contracted
into a leaf node are plotted as crosses along the link
connecting that leaf node. This really is only useful when
truncation is used (see ``truncate_mode`` parameter).
link_color_func : callable, optional
If given, `link_color_function` is called with each non-singleton id
corresponding to each U-shaped link it will paint. The function is
expected to return the color to paint the link, encoded as a matplotlib
color string code. For example:
>>> dendrogram(Z, link_color_func=lambda k: colors[k])
colors the direct links below each untruncated non-singleton node
``k`` using ``colors[k]``.
ax : matplotlib Axes instance, optional
If None and `no_plot` is not True, the dendrogram will be plotted
on the current axes. Otherwise if `no_plot` is not True the
dendrogram will be plotted on the given ``Axes`` instance. This can be
useful if the dendrogram is part of a more complex figure.
Returns
-------
R : dict
A dictionary of data structures computed to render the
dendrogram. Its has the following keys:
``'icoords'``
A list of lists ``[I1, I2, ..., Ip]`` where ``Ik`` is a list of 4
independent variable coordinates corresponding to the line that
represents the k'th link painted.
``'dcoords'``
A list of lists ``[I2, I2, ..., Ip]`` where ``Ik`` is a list of 4
independent variable coordinates corresponding to the line that
represents the k'th link painted.
``'ivl'``
A list of labels corresponding to the leaf nodes.
``'leaves'``
For each i, ``H[i] == j``, cluster node ``j`` appears in position
``i`` in the left-to-right traversal of the leaves, where
:math:`j < 2n-1` and :math:`i < n`. If ``j`` is less than ``n``, the
``i``-th leaf node corresponds to an original observation.
Otherwise, it corresponds to a non-singleton cluster.
"""
# Features under consideration.
#
# ... = dendrogram(..., leaves_order=None)
#
# Plots the leaves in the order specified by a vector of
# original observation indices. If the vector contains duplicates
# or results in a crossing, an exception will be thrown. Passing
# None orders leaf nodes based on the order they appear in the
# pre-order traversal.
Z = np.asarray(Z, order='c')
is_valid_linkage(Z, throw=True, name='Z')
Zs = Z.shape
n = Zs[0] + 1
if type(p) in (int, float):
p = int(p)
else:
raise TypeError('The second argument must be a number')
if truncate_mode not in ('lastp', 'mlab', 'mtica', 'level', 'none', None):
raise ValueError('Invalid truncation mode.')
if truncate_mode == 'lastp' or truncate_mode == 'mlab':
if p > n or p == 0:
p = n
if truncate_mode == 'mtica' or truncate_mode == 'level':
if p <= 0:
p = np.inf
if get_leaves:
lvs = []
else:
lvs = None
icoord_list = []
dcoord_list = []
color_list = []
current_color = [0]
currently_below_threshold = [False]
if no_leaves:
ivl = None
else:
ivl = []
if color_threshold is None or \
(isinstance(color_threshold, string_types) and
color_threshold == 'default'):
color_threshold = max(Z[:, 2]) * 0.7
R = {'icoord': icoord_list, 'dcoord': dcoord_list, 'ivl': ivl,
'leaves': lvs, 'color_list': color_list}
if show_contracted:
contraction_marks = []
else:
contraction_marks = None
_dendrogram_calculate_info(
Z=Z, p=p,
truncate_mode=truncate_mode,
color_threshold=color_threshold,
get_leaves=get_leaves,
orientation=orientation,
labels=labels,
count_sort=count_sort,
distance_sort=distance_sort,
show_leaf_counts=show_leaf_counts,
i=2 * n - 2, iv=0.0, ivl=ivl, n=n,
icoord_list=icoord_list,
dcoord_list=dcoord_list, lvs=lvs,
current_color=current_color,
color_list=color_list,
currently_below_threshold=currently_below_threshold,
leaf_label_func=leaf_label_func,
contraction_marks=contraction_marks,
link_color_func=link_color_func)
if not no_plot:
mh = max(Z[:, 2])
_plot_dendrogram(icoord_list, dcoord_list, ivl, p, n, mh, orientation,
no_labels, color_list, leaf_font_size=leaf_font_size,
leaf_rotation=leaf_rotation,
contraction_marks=contraction_marks,
ax=ax)
return R
def _append_singleton_leaf_node(Z, p, n, level, lvs, ivl, leaf_label_func,
i, labels):
# If the leaf id structure is not None and is a list then the caller
# to dendrogram has indicated that cluster id's corresponding to the
# leaf nodes should be recorded.
if lvs is not None:
lvs.append(int(i))
# If leaf node labels are to be displayed...
if ivl is not None:
# If a leaf_label_func has been provided, the label comes from the
# string returned from the leaf_label_func, which is a function
# passed to dendrogram.
if leaf_label_func:
ivl.append(leaf_label_func(int(i)))
else:
# Otherwise, if the dendrogram caller has passed a labels list
# for the leaf nodes, use it.
if labels is not None:
ivl.append(labels[int(i - n)])
else:
# Otherwise, use the id as the label for the leaf.x
ivl.append(str(int(i)))
def _append_nonsingleton_leaf_node(Z, p, n, level, lvs, ivl, leaf_label_func,
i, labels, show_leaf_counts):
# If the leaf id structure is not None and is a list then the caller
# to dendrogram has indicated that cluster id's corresponding to the
# leaf nodes should be recorded.
if lvs is not None:
lvs.append(int(i))
if ivl is not None:
if leaf_label_func:
ivl.append(leaf_label_func(int(i)))
else:
if show_leaf_counts:
ivl.append("(" + str(int(Z[i - n, 3])) + ")")
else:
ivl.append("")
def _append_contraction_marks(Z, iv, i, n, contraction_marks):
_append_contraction_marks_sub(Z, iv, Z[i - n, 0], n, contraction_marks)
_append_contraction_marks_sub(Z, iv, Z[i - n, 1], n, contraction_marks)
def _append_contraction_marks_sub(Z, iv, i, n, contraction_marks):
if i >= n:
contraction_marks.append((iv, Z[i - n, 2]))
_append_contraction_marks_sub(Z, iv, Z[i - n, 0], n, contraction_marks)
_append_contraction_marks_sub(Z, iv, Z[i - n, 1], n, contraction_marks)
def _dendrogram_calculate_info(Z, p, truncate_mode,
color_threshold=np.inf, get_leaves=True,
orientation='top', labels=None,
count_sort=False, distance_sort=False,
show_leaf_counts=False, i=-1, iv=0.0,
ivl=[], n=0, icoord_list=[], dcoord_list=[],
lvs=None, mhr=False,
current_color=[], color_list=[],
currently_below_threshold=[],
leaf_label_func=None, level=0,
contraction_marks=None,
link_color_func=None):
"""
Calculates the endpoints of the links as well as the labels for the
the dendrogram rooted at the node with index i. iv is the independent
variable value to plot the left-most leaf node below the root node i
(if orientation='top', this would be the left-most x value where the
plotting of this root node i and its descendents should begin).
ivl is a list to store the labels of the leaf nodes. The leaf_label_func
is called whenever ivl != None, labels == None, and
leaf_label_func != None. When ivl != None and labels != None, the
labels list is used only for labeling the leaf nodes. When
ivl == None, no labels are generated for leaf nodes.
When get_leaves==True, a list of leaves is built as they are visited
in the dendrogram.
Returns a tuple with l being the independent variable coordinate that
corresponds to the midpoint of cluster to the left of cluster i if
i is non-singleton, otherwise the independent coordinate of the leaf
node if i is a leaf node.
Returns
-------
A tuple (left, w, h, md), where:
* left is the independent variable coordinate of the center of the
the U of the subtree
* w is the amount of space used for the subtree (in independent
variable units)
* h is the height of the subtree in dependent variable units
* md is the max(Z[*,2]) for all nodes * below and including
the target node.
"""
if n == 0:
raise ValueError("Invalid singleton cluster count n.")
if i == -1:
raise ValueError("Invalid root cluster index i.")
if truncate_mode == 'lastp':
# If the node is a leaf node but corresponds to a non-single cluster,
# it's label is either the empty string or the number of original
# observations belonging to cluster i.
if i < 2 * n - p and i >= n:
d = Z[i - n, 2]
_append_nonsingleton_leaf_node(Z, p, n, level, lvs, ivl,
leaf_label_func, i, labels,
show_leaf_counts)
if contraction_marks is not None:
_append_contraction_marks(Z, iv + 5.0, i, n, contraction_marks)
return (iv + 5.0, 10.0, 0.0, d)
elif i < n:
_append_singleton_leaf_node(Z, p, n, level, lvs, ivl,
leaf_label_func, i, labels)
return (iv + 5.0, 10.0, 0.0, 0.0)
elif truncate_mode in ('mtica', 'level'):
if i > n and level > p:
d = Z[i - n, 2]
_append_nonsingleton_leaf_node(Z, p, n, level, lvs, ivl,
leaf_label_func, i, labels,
show_leaf_counts)
if contraction_marks is not None:
_append_contraction_marks(Z, iv + 5.0, i, n, contraction_marks)
return (iv + 5.0, 10.0, 0.0, d)
elif i < n:
_append_singleton_leaf_node(Z, p, n, level, lvs, ivl,
leaf_label_func, i, labels)
return (iv + 5.0, 10.0, 0.0, 0.0)
elif truncate_mode in ('mlab',):
pass
# Otherwise, only truncate if we have a leaf node.
#
# If the truncate_mode is mlab, the linkage has been modified
# with the truncated tree.
#
# Only place leaves if they correspond to original observations.
if i < n:
_append_singleton_leaf_node(Z, p, n, level, lvs, ivl,
leaf_label_func, i, labels)
return (iv + 5.0, 10.0, 0.0, 0.0)
# !!! Otherwise, we don't have a leaf node, so work on plotting a
# non-leaf node.
# Actual indices of a and b
aa = int(Z[i - n, 0])
ab = int(Z[i - n, 1])
if aa > n:
# The number of singletons below cluster a
na = Z[aa - n, 3]
# The distance between a's two direct children.
da = Z[aa - n, 2]
else:
na = 1
da = 0.0
if ab > n:
nb = Z[ab - n, 3]
db = Z[ab - n, 2]
else:
nb = 1
db = 0.0
if count_sort == 'ascending' or count_sort == True:
# If a has a count greater than b, it and its descendents should
# be drawn to the right. Otherwise, to the left.
if na > nb:
# The cluster index to draw to the left (ua) will be ab
# and the one to draw to the right (ub) will be aa
ua = ab
ub = aa
else:
ua = aa
ub = ab
elif count_sort == 'descending':
# If a has a count less than or equal to b, it and its
# descendents should be drawn to the left. Otherwise, to
# the right.
if na > nb:
ua = aa
ub = ab
else:
ua = ab
ub = aa
elif distance_sort == 'ascending' or distance_sort == True:
# If a has a distance greater than b, it and its descendents should
# be drawn to the right. Otherwise, to the left.
if da > db:
ua = ab
ub = aa
else:
ua = aa
ub = ab
elif distance_sort == 'descending':
# If a has a distance less than or equal to b, it and its
# descendents should be drawn to the left. Otherwise, to
# the right.
if da > db:
ua = aa
ub = ab
else:
ua = ab
ub = aa
else:
ua = aa
ub = ab
# Updated iv variable and the amount of space used.
(uiva, uwa, uah, uamd) = \
_dendrogram_calculate_info(
Z=Z, p=p,
truncate_mode=truncate_mode,
color_threshold=color_threshold,
get_leaves=get_leaves,
orientation=orientation,
labels=labels,
count_sort=count_sort,
distance_sort=distance_sort,
show_leaf_counts=show_leaf_counts,
i=ua, iv=iv, ivl=ivl, n=n,
icoord_list=icoord_list,
dcoord_list=dcoord_list, lvs=lvs,
current_color=current_color,
color_list=color_list,
currently_below_threshold=currently_below_threshold,
leaf_label_func=leaf_label_func,
level=level + 1, contraction_marks=contraction_marks,
link_color_func=link_color_func)
h = Z[i - n, 2]
if h >= color_threshold or color_threshold <= 0:
c = 'b'
if currently_below_threshold[0]:
current_color[0] = (current_color[0] + 1) % len(_link_line_colors)
currently_below_threshold[0] = False
else:
currently_below_threshold[0] = True
c = _link_line_colors[current_color[0]]
(uivb, uwb, ubh, ubmd) = \
_dendrogram_calculate_info(
Z=Z, p=p,
truncate_mode=truncate_mode,
color_threshold=color_threshold,
get_leaves=get_leaves,
orientation=orientation,
labels=labels,
count_sort=count_sort,
distance_sort=distance_sort,
show_leaf_counts=show_leaf_counts,
i=ub, iv=iv + uwa, ivl=ivl, n=n,
icoord_list=icoord_list,
dcoord_list=dcoord_list, lvs=lvs,
current_color=current_color,
color_list=color_list,
currently_below_threshold=currently_below_threshold,
leaf_label_func=leaf_label_func,
level=level + 1, contraction_marks=contraction_marks,
link_color_func=link_color_func)
max_dist = max(uamd, ubmd, h)
icoord_list.append([uiva, uiva, uivb, uivb])
dcoord_list.append([uah, h, h, ubh])
if link_color_func is not None:
v = link_color_func(int(i))
if not isinstance(v, string_types):
raise TypeError("link_color_func must return a matplotlib "
"color string!")
color_list.append(v)
else:
color_list.append(c)
return (((uiva + uivb) / 2), uwa + uwb, h, max_dist)
def is_isomorphic(T1, T2):
"""
Determines if two different cluster assignments are equivalent.
Parameters
----------
T1 : array_like
An assignment of singleton cluster ids to flat cluster ids.
T2 : array_like
An assignment of singleton cluster ids to flat cluster ids.
Returns
-------
b : bool
Whether the flat cluster assignments `T1` and `T2` are
equivalent.
"""
T1 = np.asarray(T1, order='c')
T2 = np.asarray(T2, order='c')
if type(T1) != np.ndarray:
raise TypeError('T1 must be a numpy array.')
if type(T2) != np.ndarray:
raise TypeError('T2 must be a numpy array.')
T1S = T1.shape
T2S = T2.shape
if len(T1S) != 1:
raise ValueError('T1 must be one-dimensional.')
if len(T2S) != 1:
raise ValueError('T2 must be one-dimensional.')
if T1S[0] != T2S[0]:
raise ValueError('T1 and T2 must have the same number of elements.')
n = T1S[0]
d = {}
for i in xrange(0, n):
if T1[i] in d:
if d[T1[i]] != T2[i]:
return False
else:
d[T1[i]] = T2[i]
return True
def maxdists(Z):
"""
Returns the maximum distance between any non-singleton cluster.
Parameters
----------
Z : ndarray
The hierarchical clustering encoded as a matrix. See
``linkage`` for more information.
Returns
-------
maxdists : ndarray
A ``(n-1)`` sized numpy array of doubles; ``MD[i]`` represents
the maximum distance between any cluster (including
singletons) below and including the node with index i. More
specifically, ``MD[i] = Z[Q(i)-n, 2].max()`` where ``Q(i)`` is the
set of all node indices below and including node i.
"""
Z = np.asarray(Z, order='c', dtype=np.double)
is_valid_linkage(Z, throw=True, name='Z')
n = Z.shape[0] + 1
MD = np.zeros((n - 1,))
[Z] = _copy_arrays_if_base_present([Z])
_hierarchy_wrap.get_max_dist_for_each_cluster_wrap(Z, MD, int(n))
return MD
def maxinconsts(Z, R):
"""
Returns the maximum inconsistency coefficient for each
non-singleton cluster and its descendents.
Parameters
----------
Z : ndarray
The hierarchical clustering encoded as a matrix. See
``linkage`` for more information.
R : ndarray
The inconsistency matrix.
Returns
-------
MI : ndarray
A monotonic ``(n-1)``-sized numpy array of doubles.
"""
Z = np.asarray(Z, order='c')
R = np.asarray(R, order='c')
is_valid_linkage(Z, throw=True, name='Z')
is_valid_im(R, throw=True, name='R')
n = Z.shape[0] + 1
if Z.shape[0] != R.shape[0]:
raise ValueError("The inconsistency matrix and linkage matrix each "
"have a different number of rows.")
MI = np.zeros((n - 1,))
[Z, R] = _copy_arrays_if_base_present([Z, R])
_hierarchy_wrap.get_max_Rfield_for_each_cluster_wrap(Z, R, MI, int(n), 3)
return MI
def maxRstat(Z, R, i):
"""
Returns the maximum statistic for each non-singleton cluster and
its descendents.
Parameters
----------
Z : array_like
The hierarchical clustering encoded as a matrix. See
``linkage`` for more information.
R : array_like
The inconsistency matrix.
i : int
The column of `R` to use as the statistic.
Returns
-------
MR : ndarray
Calculates the maximum statistic for the i'th column of the
inconsistency matrix `R` for each non-singleton cluster
node. ``MR[j]`` is the maximum over ``R[Q(j)-n, i]`` where
``Q(j)`` the set of all node ids corresponding to nodes below
and including ``j``.
"""
Z = np.asarray(Z, order='c')
R = np.asarray(R, order='c')
is_valid_linkage(Z, throw=True, name='Z')
is_valid_im(R, throw=True, name='R')
if type(i) is not int:
raise TypeError('The third argument must be an integer.')
if i < 0 or i > 3:
raise ValueError('i must be an integer between 0 and 3 inclusive.')
if Z.shape[0] != R.shape[0]:
raise ValueError("The inconsistency matrix and linkage matrix each "
"have a different number of rows.")
n = Z.shape[0] + 1
MR = np.zeros((n - 1,))
[Z, R] = _copy_arrays_if_base_present([Z, R])
_hierarchy_wrap.get_max_Rfield_for_each_cluster_wrap(Z, R, MR, int(n), i)
return MR
def leaders(Z, T):
"""
Returns the root nodes in a hierarchical clustering.
Returns the root nodes in a hierarchical clustering corresponding
to a cut defined by a flat cluster assignment vector ``T``. See
the ``fcluster`` function for more information on the format of ``T``.
For each flat cluster :math:`j` of the :math:`k` flat clusters
represented in the n-sized flat cluster assignment vector ``T``,
this function finds the lowest cluster node :math:`i` in the linkage
tree Z such that:
* leaf descendents belong only to flat cluster j
(i.e. ``T[p]==j`` for all :math:`p` in :math:`S(i)` where
:math:`S(i)` is the set of leaf ids of leaf nodes descendent
with cluster node :math:`i`)
* there does not exist a leaf that is not descendent with
:math:`i` that also belongs to cluster :math:`j`
(i.e. ``T[q]!=j`` for all :math:`q` not in :math:`S(i)`). If
this condition is violated, ``T`` is not a valid cluster
assignment vector, and an exception will be thrown.
Parameters
----------
Z : ndarray
The hierarchical clustering encoded as a matrix. See
``linkage`` for more information.
T : ndarray
The flat cluster assignment vector.
Returns
-------
L : ndarray
The leader linkage node id's stored as a k-element 1-D array
where ``k`` is the number of flat clusters found in ``T``.
``L[j]=i`` is the linkage cluster node id that is the
leader of flat cluster with id M[j]. If ``i < n``, ``i``
corresponds to an original observation, otherwise it
corresponds to a non-singleton cluster.
For example: if ``L[3]=2`` and ``M[3]=8``, the flat cluster with
id 8's leader is linkage node 2.
M : ndarray
The leader linkage node id's stored as a k-element 1-D array where
``k`` is the number of flat clusters found in ``T``. This allows the
set of flat cluster ids to be any arbitrary set of ``k`` integers.
"""
Z = np.asarray(Z, order='c')
T = np.asarray(T, order='c')
if type(T) != np.ndarray or T.dtype != 'i':
raise TypeError('T must be a one-dimensional numpy array of integers.')
is_valid_linkage(Z, throw=True, name='Z')
if len(T) != Z.shape[0] + 1:
raise ValueError('Mismatch: len(T)!=Z.shape[0] + 1.')
Cl = np.unique(T)
kk = len(Cl)
L = np.zeros((kk,), dtype='i')
M = np.zeros((kk,), dtype='i')
n = Z.shape[0] + 1
[Z, T] = _copy_arrays_if_base_present([Z, T])
s = _hierarchy_wrap.leaders_wrap(Z, T, L, M, int(kk), int(n))
if s >= 0:
raise ValueError(('T is not a valid assignment vector. Error found '
'when examining linkage node %d (< 2n-1).') % s)
return (L, M)
# These are test functions to help me test the leaders function.
def _leaders_test(Z, T):
tr = to_tree(Z)
_leaders_test_recurs_mark(tr, T)
return tr
def _leader_identify(tr, T):
if tr.is_leaf():
return T[tr.id]
else:
left = tr.get_left()
right = tr.get_right()
lfid = _leader_identify(left, T)
rfid = _leader_identify(right, T)
print('ndid: %d lid: %d lfid: %d rid: %d rfid: %d'
% (tr.get_id(), left.get_id(), lfid, right.get_id(), rfid))
if lfid != rfid:
if lfid != -1:
print('leader: %d with tag %d' % (left.id, lfid))
if rfid != -1:
print('leader: %d with tag %d' % (right.id, rfid))
return -1
else:
return lfid
def _leaders_test_recurs_mark(tr, T):
if tr.is_leaf():
tr.asgn = T[tr.id]
else:
tr.asgn = -1
_leaders_test_recurs_mark(tr.left, T)
_leaders_test_recurs_mark(tr.right, T)
| bsd-3-clause |
tomkralidis/pyowssc | pyowssc/model.py | 1 | 5172 | # -*- coding: ISO-8859-15 -*-
# =================================================================
#
# Authors: Tom Kralidis <[email protected]>
#
# Copyright (c) 2011 Tom Kralidis
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
# conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
# =================================================================
import socket
import StringIO
import urllib2
import urlparse
import datetime
from lxml import etree
from pyowssc import util
class Service(object):
''' Base service class '''
def __init__(self, type, url):
''' initialize '''
self.type = type
self.url = url
self.date = util.datetime2iso(datetime.datetime.now())
self.tests = {}
def test(self):
''' run the test '''
test = {}
test['startTime'] = datetime.datetime.now()
test['input'] = {}
test['output'] = {}
test['input']['type'] = 'URL'
# test http ping
u = urlparse.urlsplit(self.url)
if u.port is None:
port = 80
test['url'] = 'http://%s' % u.netloc
else:
port = u.port
test['url'] = 'http://%s:%s' % (u.netloc, u.port)
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((u.netloc, port))
except socket.error, msg:
test['output']['success'] = '0'
test['output']['type'] = 'error'
test['output']['message'] = str(msg)
test['currentSpeed'] = '-99.99'
test['currentScore'] = '-99.99'
test['endTime'] = datetime.datetime.now()
else:
test['output']['success'] = '1'
test['output']['type'] = 'success'
test['endTime'] = datetime.datetime.now()
delta = test['endTime'] - test['startTime']
deltafmt = '%s.%s' % (delta.seconds, delta.microseconds)
test['currentSpeed'] = deltafmt
test['currentScore'] = '100.00'
self.tests['httpServer'] = test
# test GetCapabilities
test = {}
test['startTime'] = datetime.datetime.now()
test['input'] = {}
test['output'] = {}
test['url'] = '%s%sversion=1.1.1&service=WMS&request=GetCapabilities' % (self.url, util.bindURL(self.url))
try:
response = urllib2.urlopen(test['url'])
except urllib2.URLError, e: # HTTP error
test['output']['success'] = '0'
test['output']['type'] = 'error'
if hasattr(e, 'code'):
test['output']['message'] = str(e.code)
elif hasattr(e, 'reason'):
test['output']['message'] = str(e.reason)
test['currentSpeed'] = '-99.99'
test['currentScore'] = '-99.99'
else:
# test if it's an actual XML document
try:
content = etree.parse(StringIO.StringIO(response.read()))
except etree.XMLSyntaxError, e:
test['output']['success'] = '0'
test['output']['type'] = 'error'
test['output']['message'] = str(e)
test['currentSpeed'] = '-99.99'
test['currentScore'] = '-99.99'
else:
root = content.getroot().tag
test['output']['type'] = 'success'
# test that it's Capabilities XML
if root == 'WMT_MS_Capabilities' or root == 'WMS_Capabilities':
test['output']['success'] = '1'
else:
test['output']['success'] = '0'
if content.find('ServiceException') is not None:
test['output']['message'] = content.find('ServiceException').text
else:
test['output']['message'] = 'Unrecognized Capabilities XML'
test['currentScore'] = '100.00'
test['endTime'] = datetime.datetime.now()
delta = test['endTime'] - test['startTime']
deltafmt = '%s.%s' % (delta.seconds, delta.microseconds)
test['currentSpeed'] = deltafmt
self.tests['GetCapabilities'] = test
| mit |
takis/django | tests/utils_tests/test_decorators.py | 319 | 4870 | from django.http import HttpResponse
from django.template import engines
from django.template.response import TemplateResponse
from django.test import RequestFactory, SimpleTestCase
from django.utils.decorators import classproperty, decorator_from_middleware
class ProcessViewMiddleware(object):
def process_view(self, request, view_func, view_args, view_kwargs):
pass
process_view_dec = decorator_from_middleware(ProcessViewMiddleware)
@process_view_dec
def process_view(request):
return HttpResponse()
class ClassProcessView(object):
def __call__(self, request):
return HttpResponse()
class_process_view = process_view_dec(ClassProcessView())
class FullMiddleware(object):
def process_request(self, request):
request.process_request_reached = True
def process_view(self, request, view_func, view_args, view_kwargs):
request.process_view_reached = True
def process_template_response(self, request, response):
request.process_template_response_reached = True
return response
def process_response(self, request, response):
# This should never receive unrendered content.
request.process_response_content = response.content
request.process_response_reached = True
return response
full_dec = decorator_from_middleware(FullMiddleware)
class DecoratorFromMiddlewareTests(SimpleTestCase):
"""
Tests for view decorators created using
``django.utils.decorators.decorator_from_middleware``.
"""
rf = RequestFactory()
def test_process_view_middleware(self):
"""
Test a middleware that implements process_view.
"""
process_view(self.rf.get('/'))
def test_callable_process_view_middleware(self):
"""
Test a middleware that implements process_view, operating on a callable class.
"""
class_process_view(self.rf.get('/'))
def test_full_dec_normal(self):
"""
Test that all methods of middleware are called for normal HttpResponses
"""
@full_dec
def normal_view(request):
template = engines['django'].from_string("Hello world")
return HttpResponse(template.render())
request = self.rf.get('/')
normal_view(request)
self.assertTrue(getattr(request, 'process_request_reached', False))
self.assertTrue(getattr(request, 'process_view_reached', False))
# process_template_response must not be called for HttpResponse
self.assertFalse(getattr(request, 'process_template_response_reached', False))
self.assertTrue(getattr(request, 'process_response_reached', False))
def test_full_dec_templateresponse(self):
"""
Test that all methods of middleware are called for TemplateResponses in
the right sequence.
"""
@full_dec
def template_response_view(request):
template = engines['django'].from_string("Hello world")
return TemplateResponse(request, template)
request = self.rf.get('/')
response = template_response_view(request)
self.assertTrue(getattr(request, 'process_request_reached', False))
self.assertTrue(getattr(request, 'process_view_reached', False))
self.assertTrue(getattr(request, 'process_template_response_reached', False))
# response must not be rendered yet.
self.assertFalse(response._is_rendered)
# process_response must not be called until after response is rendered,
# otherwise some decorators like csrf_protect and gzip_page will not
# work correctly. See #16004
self.assertFalse(getattr(request, 'process_response_reached', False))
response.render()
self.assertTrue(getattr(request, 'process_response_reached', False))
# Check that process_response saw the rendered content
self.assertEqual(request.process_response_content, b"Hello world")
class ClassPropertyTest(SimpleTestCase):
def test_getter(self):
class Foo(object):
foo_attr = 123
def __init__(self):
self.foo_attr = 456
@classproperty
def foo(cls):
return cls.foo_attr
class Bar(object):
bar = classproperty()
@bar.getter
def bar(cls):
return 123
self.assertEqual(Foo.foo, 123)
self.assertEqual(Foo().foo, 123)
self.assertEqual(Bar.bar, 123)
self.assertEqual(Bar().bar, 123)
def test_override_getter(self):
class Foo(object):
@classproperty
def foo(cls):
return 123
@foo.getter
def foo(cls):
return 456
self.assertEqual(Foo.foo, 456)
self.assertEqual(Foo().foo, 456)
| bsd-3-clause |
janocat/odoo | addons/account_voucher/report/account_voucher_sales_receipt.py | 326 | 5808 | ##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp import tools
class sale_receipt_report(osv.osv):
_name = "sale.receipt.report"
_description = "Sales Receipt Statistics"
_auto = False
_rec_name = 'date'
_columns = {
'date': fields.date('Date', readonly=True),
'currency_id': fields.many2one('res.currency', 'Currency', readonly=True),
'journal_id': fields.many2one('account.journal', 'Journal', readonly=True),
'partner_id': fields.many2one('res.partner', 'Partner', readonly=True),
'company_id': fields.many2one('res.company', 'Company', readonly=True),
'user_id': fields.many2one('res.users', 'Salesperson', readonly=True),
'price_total': fields.float('Total Without Tax', readonly=True),
'price_total_tax': fields.float('Total With Tax', readonly=True),
'nbr':fields.integer('# of Voucher Lines', readonly=True),
'type': fields.selection([
('sale','Sale'),
('purchase','Purchase'),
('payment','Payment'),
('receipt','Receipt'),
],'Type', readonly=True),
'state': fields.selection([
('draft','Draft'),
('proforma','Pro-forma'),
('posted','Posted'),
('cancel','Cancelled')
], 'Voucher Status', readonly=True),
'pay_now':fields.selection([
('pay_now','Pay Directly'),
('pay_later','Pay Later or Group Funds'),
],'Payment', readonly=True),
'date_due': fields.date('Due Date', readonly=True),
'account_id': fields.many2one('account.account', 'Account',readonly=True),
'delay_to_pay': fields.float('Avg. Delay To Pay', readonly=True, group_operator="avg"),
'due_delay': fields.float('Avg. Due Delay', readonly=True, group_operator="avg")
}
_order = 'date desc'
def init(self, cr):
tools.drop_view_if_exists(cr, 'sale_receipt_report')
cr.execute("""
create or replace view sale_receipt_report as (
select min(avl.id) as id,
av.date as date,
av.partner_id as partner_id,
aj.currency as currency_id,
av.journal_id as journal_id,
rp.user_id as user_id,
av.company_id as company_id,
count(avl.*) as nbr,
av.type as type,
av.state,
av.pay_now,
av.date_due as date_due,
av.account_id as account_id,
sum(av.amount-av.tax_amount)/(select count(l.id) from account_voucher_line as l
left join account_voucher as a ON (a.id=l.voucher_id)
where a.id=av.id) as price_total,
sum(av.amount)/(select count(l.id) from account_voucher_line as l
left join account_voucher as a ON (a.id=l.voucher_id)
where a.id=av.id) as price_total_tax,
sum((select extract(epoch from avg(date_trunc('day',aml.date_created)-date_trunc('day',l.create_date)))/(24*60*60)::decimal(16,2)
from account_move_line as aml
left join account_voucher as a ON (a.move_id=aml.move_id)
left join account_voucher_line as l ON (a.id=l.voucher_id)
where a.id=av.id)) as delay_to_pay,
sum((select extract(epoch from avg(date_trunc('day',a.date_due)-date_trunc('day',a.date)))/(24*60*60)::decimal(16,2)
from account_move_line as aml
left join account_voucher as a ON (a.move_id=aml.move_id)
left join account_voucher_line as l ON (a.id=l.voucher_id)
where a.id=av.id)) as due_delay
from account_voucher_line as avl
left join account_voucher as av on (av.id=avl.voucher_id)
left join res_partner as rp ON (rp.id=av.partner_id)
left join account_journal as aj ON (aj.id=av.journal_id)
where av.type='sale' and aj.type in ('sale','sale_refund')
group by
av.date,
av.id,
av.partner_id,
aj.currency,
av.journal_id,
rp.user_id,
av.company_id,
av.type,
av.state,
av.date_due,
av.account_id,
av.tax_amount,
av.amount,
av.tax_amount,
av.pay_now
)
""")
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
mandeepdhami/neutron | neutron/tests/unit/extensions/foxinsocks.py | 6 | 3536 | # Copyright 2011 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import abc
from oslo_serialization import jsonutils
from neutron.api import extensions
from neutron import wsgi
class FoxInSocksController(wsgi.Controller):
def index(self, request):
return "Try to say this Mr. Knox, sir..."
class FoxInSocksPluginInterface(extensions.PluginInterface):
@abc.abstractmethod
def method_to_support_foxnsox_extension(self):
pass
class Foxinsocks(object):
def __init__(self):
pass
def get_plugin_interface(self):
return FoxInSocksPluginInterface
def get_name(self):
return "Fox In Socks"
def get_alias(self):
return "FOXNSOX"
def get_description(self):
return "The Fox In Socks Extension"
def get_updated(self):
return "2011-01-22T13:25:27-06:00"
def get_resources(self):
resources = []
resource = extensions.ResourceExtension('foxnsocks',
FoxInSocksController())
resources.append(resource)
return resources
def get_actions(self):
return [extensions.ActionExtension('dummy_resources',
'FOXNSOX:add_tweedle',
self._add_tweedle_handler),
extensions.ActionExtension('dummy_resources',
'FOXNSOX:delete_tweedle',
self._delete_tweedle_handler)]
def get_request_extensions(self):
request_exts = []
def _goose_handler(req, res):
#NOTE: This only handles JSON responses.
# You can use content type header to test for XML.
data = jsonutils.loads(res.body)
data['FOXNSOX:googoose'] = req.GET.get('chewing')
res.body = jsonutils.dumps(data)
return res
req_ext1 = extensions.RequestExtension('GET', '/dummy_resources/:(id)',
_goose_handler)
request_exts.append(req_ext1)
def _bands_handler(req, res):
#NOTE: This only handles JSON responses.
# You can use content type header to test for XML.
data = jsonutils.loads(res.body)
data['FOXNSOX:big_bands'] = 'Pig Bands!'
res.body = jsonutils.dumps(data)
return res
req_ext2 = extensions.RequestExtension('GET', '/dummy_resources/:(id)',
_bands_handler)
request_exts.append(req_ext2)
return request_exts
def _add_tweedle_handler(self, input_dict, req, id):
return "Tweedle {0} Added.".format(
input_dict['FOXNSOX:add_tweedle']['name'])
def _delete_tweedle_handler(self, input_dict, req, id):
return "Tweedle {0} Deleted.".format(
input_dict['FOXNSOX:delete_tweedle']['name'])
| apache-2.0 |
knifenomad/django | tests/template_tests/syntax_tests/test_comment.py | 521 | 3667 | from django.test import SimpleTestCase
from ..utils import setup
class CommentSyntaxTests(SimpleTestCase):
@setup({'comment-syntax01': '{# this is hidden #}hello'})
def test_comment_syntax01(self):
output = self.engine.render_to_string('comment-syntax01')
self.assertEqual(output, 'hello')
@setup({'comment-syntax02': '{# this is hidden #}hello{# foo #}'})
def test_comment_syntax02(self):
output = self.engine.render_to_string('comment-syntax02')
self.assertEqual(output, 'hello')
@setup({'comment-syntax03': 'foo{# {% if %} #}'})
def test_comment_syntax03(self):
output = self.engine.render_to_string('comment-syntax03')
self.assertEqual(output, 'foo')
@setup({'comment-syntax04': 'foo{# {% endblock %} #}'})
def test_comment_syntax04(self):
output = self.engine.render_to_string('comment-syntax04')
self.assertEqual(output, 'foo')
@setup({'comment-syntax05': 'foo{# {% somerandomtag %} #}'})
def test_comment_syntax05(self):
output = self.engine.render_to_string('comment-syntax05')
self.assertEqual(output, 'foo')
@setup({'comment-syntax06': 'foo{# {% #}'})
def test_comment_syntax06(self):
output = self.engine.render_to_string('comment-syntax06')
self.assertEqual(output, 'foo')
@setup({'comment-syntax07': 'foo{# %} #}'})
def test_comment_syntax07(self):
output = self.engine.render_to_string('comment-syntax07')
self.assertEqual(output, 'foo')
@setup({'comment-syntax08': 'foo{# %} #}bar'})
def test_comment_syntax08(self):
output = self.engine.render_to_string('comment-syntax08')
self.assertEqual(output, 'foobar')
@setup({'comment-syntax09': 'foo{# {{ #}'})
def test_comment_syntax09(self):
output = self.engine.render_to_string('comment-syntax09')
self.assertEqual(output, 'foo')
@setup({'comment-syntax10': 'foo{# }} #}'})
def test_comment_syntax10(self):
output = self.engine.render_to_string('comment-syntax10')
self.assertEqual(output, 'foo')
@setup({'comment-syntax11': 'foo{# { #}'})
def test_comment_syntax11(self):
output = self.engine.render_to_string('comment-syntax11')
self.assertEqual(output, 'foo')
@setup({'comment-syntax12': 'foo{# } #}'})
def test_comment_syntax12(self):
output = self.engine.render_to_string('comment-syntax12')
self.assertEqual(output, 'foo')
@setup({'comment-tag01': '{% comment %}this is hidden{% endcomment %}hello'})
def test_comment_tag01(self):
output = self.engine.render_to_string('comment-tag01')
self.assertEqual(output, 'hello')
@setup({'comment-tag02': '{% comment %}this is hidden{% endcomment %}'
'hello{% comment %}foo{% endcomment %}'})
def test_comment_tag02(self):
output = self.engine.render_to_string('comment-tag02')
self.assertEqual(output, 'hello')
@setup({'comment-tag03': 'foo{% comment %} {% if %} {% endcomment %}'})
def test_comment_tag03(self):
output = self.engine.render_to_string('comment-tag03')
self.assertEqual(output, 'foo')
@setup({'comment-tag04': 'foo{% comment %} {% endblock %} {% endcomment %}'})
def test_comment_tag04(self):
output = self.engine.render_to_string('comment-tag04')
self.assertEqual(output, 'foo')
@setup({'comment-tag05': 'foo{% comment %} {% somerandomtag %} {% endcomment %}'})
def test_comment_tag05(self):
output = self.engine.render_to_string('comment-tag05')
self.assertEqual(output, 'foo')
| bsd-3-clause |
maestrano/openerp | openerp/addons/auth_signup/controllers/main.py | 51 | 3502 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2012-today OpenERP SA (<http://www.openerp.com>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
##############################################################################
import logging
import openerp
from openerp.modules.registry import RegistryManager
from ..res_users import SignupError
_logger = logging.getLogger(__name__)
class Controller(openerp.addons.web.http.Controller):
_cp_path = '/auth_signup'
@openerp.addons.web.http.jsonrequest
def get_config(self, req, dbname):
""" retrieve the module config (which features are enabled) for the login page """
registry = RegistryManager.get(dbname)
with registry.cursor() as cr:
icp = registry.get('ir.config_parameter')
config = {
'signup': icp.get_param(cr, openerp.SUPERUSER_ID, 'auth_signup.allow_uninvited') == 'True',
'reset_password': icp.get_param(cr, openerp.SUPERUSER_ID, 'auth_signup.reset_password') == 'True',
}
return config
@openerp.addons.web.http.jsonrequest
def retrieve(self, req, dbname, token):
""" retrieve the user info (name, login or email) corresponding to a signup token """
registry = RegistryManager.get(dbname)
with registry.cursor() as cr:
res_partner = registry.get('res.partner')
user_info = res_partner.signup_retrieve_info(cr, openerp.SUPERUSER_ID, token)
return user_info
@openerp.addons.web.http.jsonrequest
def signup(self, req, dbname, token, **values):
""" sign up a user (new or existing)"""
try:
self._signup_with_values(req, dbname, token, values)
except SignupError, e:
return {'error': openerp.tools.exception_to_unicode(e)}
return {}
def _signup_with_values(self, req, dbname, token, values):
registry = RegistryManager.get(dbname)
with registry.cursor() as cr:
res_users = registry.get('res.users')
res_users.signup(cr, openerp.SUPERUSER_ID, values, token)
@openerp.addons.web.http.jsonrequest
def reset_password(self, req, dbname, login):
""" retrieve user, and perform reset password """
registry = RegistryManager.get(dbname)
with registry.cursor() as cr:
try:
res_users = registry.get('res.users')
res_users.reset_password(cr, openerp.SUPERUSER_ID, login)
cr.commit()
except Exception as e:
# signup error
_logger.exception('error when resetting password')
raise(e)
return True
# vim:expandtab:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
JVenberg/PokemonGo-Bot-Desktop | pywin/Lib/encodings/iso2022_kr.py | 816 | 1053 | #
# iso2022_kr.py: Python Unicode Codec for ISO2022_KR
#
# Written by Hye-Shik Chang <[email protected]>
#
import _codecs_iso2022, codecs
import _multibytecodec as mbc
codec = _codecs_iso2022.getcodec('iso2022_kr')
class Codec(codecs.Codec):
encode = codec.encode
decode = codec.decode
class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
codecs.IncrementalEncoder):
codec = codec
class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
codecs.IncrementalDecoder):
codec = codec
class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
codec = codec
class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
codec = codec
def getregentry():
return codecs.CodecInfo(
name='iso2022_kr',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
| mit |
ametheus/berlin | bin/berlin/network_config.py | 1 | 23565 | #!/usr/bin/env python
"""
Copyright (C) 2011 Thijs van Dijk
This file is part of berlin.
Berlin is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Berlin is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
file "COPYING" for details.
"""
import termios, sys, os
import subprocess
from collections import *
from getpass import getuser
def open_file( filename, mode ):
"""Does the same as open(), only transparently checks multiple locations."""
if filename[0] == '/':
# This is an absolute path, so just open that file.
return open( filename, mode )
locations = ['/etc/berlin/','/etc/vuurmuur/','/etc/firewall.d/config/']
for L in locations:
fn = L + filename
try:
f = open(fn,mode)
return f
except IOError:
pass
raise IOError("File not found, or permission denied.")
def list_directory( dir ):
"""Does the same as os.listdir, only it transparently checks multiple
locations."""
if dir[0] == '/':
try:
return os.listdir(dir)
except OSError:
return []
locations = ['/etc/berlin/','/etc/vuurmuur/','/etc/firewall.d/config/']
for L in locations:
dn = L + dir
try:
return os.listdir(dn)
except OSError:
pass
return []
def file_put_contents( filename, data ):
"""Write str({data}) to the file {filename}."""
f = open_file( filename, 'w' )
f.write( data )
f.close()
def file_put_data( filename, data ):
"""Perform the opposite of parse_file().
Create a file at {filename} parsable by parse_file(), using {data}.
Examples:
>>> D = dict({'test': ['a', 'b', 'c']})
>>> file_put_data('/tmp/doctest_file_put_data',D)
>>> E = parse_file('/tmp/doctest_file_put_data')
>>> E['test']
['a', 'b', 'c']
>>> E['test another value']
['undefined']"""
file_put_contents( filename, unparse_file( data ) )
def parse_file( filename ):
"""Parse a simple config file.
Parse a file in the form
key: val1 val2 val3 val4
to a defauktdict in the form
dict({'key': ['val1','val2','val3','val4']})
Examples:
>>> P = parse_file('/dev/null')
>>> P['key']
['undefined']
>>> Q = dict({'key': ['val1','val2','val3','val4']})
>>> file_put_data('/tmp/doctest_parse_file',Q)
>>> R = parse_file('/tmp/doctest_parse_file')
>>> R['key']
['val1', 'val2', 'val3', 'val4']
"""
rv = defaultdict(lambda:['undefined'])
try:
f = open_file( filename, 'r' )
except(OSError, IOError):
pass
else:
s = f.read()
l = [ t.split(':',1) for t in s.splitlines() ]
for i in l:
if len(i) < 2: continue
rv[i[0].strip()] = [t.strip() for t in i[1].split(None)]
f.close()
return rv
def unparse_file( data ):
"""String-format a configuration file.
Re-casts a dict as returned by parse_file() to a string.
Examples:
>>> unparse_file(dict({'key':['val'],'key1':['val1','val2']}))
'key1: val1 val2\\nkey: val\\n\\n'
>>> unparse_file(None)
Traceback (most recent call last):
...
AttributeError: 'NoneType' object has no attribute 'items'
>>> unparse_file(dict())
'\\n\\n'
"""
return "\n".join(
[ ': '.join(r) for r in
[ (t[0], ' '.join(t[1]) ) for t in data.items() ]]
) + "\n\n"
def null_callback(s,o):
"""An empty callback function for a Display() method."""
print s
class Config:
"""Complete network configuration for berlin."""
Interfaces = None
ifconfig = None
local_services = []
network_services = []
def __init__( self, network_devices=None ):
"""Parses the config directories into a Config class."""
self.Interfaces = []
self.ifconfig = parse_file("if-config")
if network_devices == None:
#print " detecting network devices..."
P = subprocess.Popen( ['/bin/sh', '-c',
'/sbin/ifconfig -a -s 2>/dev/null | grep -v Iface '
'| cut -d\ -f1 | cut -d: -f1 | grep -v lo | sort | uniq'],
stdout=subprocess.PIPE )
s = P.stdout.read()
else:
s = network_devices
# Parse local services
if not 'undefined' in self.ifconfig['local services']:
self.local_services += [ int(p) for p in self.ifconfig['local services'] ]
for portfile in list_directory('ports'):
self.open_port(portfile)
# Get a list of subnets
nets = list_directory('networks')
gb = None
for ifx in s.splitlines():
I = Iface( ifx.strip(), self.ifconfig, nets )
if gb is None and I.enabled and not I.wan_interface:
gb = I
self.Interfaces.append( I )
for N in nets:
gb.subnets.append(Subnet(N))
self.tion = ( self.Interfaces[0], None, None )
def open_port(self, portfile):
"""Parse an 'open port' file.
Parse an 'open port' file, and modify local_services and
network_services accordingly. 'Open port' files are located in the
ports/ directory in the configuuration file root, and are named after
the port in question. They can either be empty, indicating the router
should accept traffic at said port itself, or contain an IP address
and/or port in the form 'p.q.r.s:t', indicating that all traffic should
be forwarded to that address and port.
"""
try:
f = open_file("ports/" + portfile,'r')
c = f.read().strip()
f.close()
port = int(portfile)
except IOError, ValueError:
print "Error parsing portfile {0}".format(portfile)
return
if len(c) > 0:
# TODO: Some form of validation. Probably a regex
self.network_services.append( (port, c) )
else:
self.local_services.append( port )
def Export( self ):
"""Writes back all config files into /tmp/firewall/."""
ifaces_file = self.interfaces_file()
subprocess.call([
'rm', '-rf', '/tmp/firewall'
])
os.mkdir( '/tmp/firewall' )
# Export special configuration files
file_put_contents( '/tmp/firewall/if-config', self.if_config_file() )
file_put_contents( '/tmp/firewall/interfaces', self.interfaces_file() )
file_put_contents( '/tmp/firewall/dhcpd.conf', self.dhcp_conf() )
# Export all networks
os.mkdir( '/tmp/firewall/networks' )
for I in self.Interfaces:
I.Export( '/tmp/firewall/networks' )
# Export all open or forwarded ports
os.mkdir( '/tmp/firewall/ports' )
for port in self.local_services:
file_put_contents( '/tmp/firewall/ports/' + str(int(port)), '' )
for port,host in self.network_services:
file_put_contents( '/tmp/firewall/ports/' + str(int(port)), host )
def if_config_file( self ):
"""Creates a new if-config file based on current settings.
Returns the contents of the new if-config file in a string."""
rv = dict()
rv['wan address'] = \
[ d.wan_address for d in self.Interfaces if d.wan_interface and d.enabled ]
rv['external address'] = \
[ d.address for d in self.Interfaces if d.wan_interface and d.enabled ]
rv['external interface'] = \
[ d.name for d in self.Interfaces if d.wan_interface and d.enabled ]
rv['internal interface'] = \
[ d.name for d in self.Interfaces if not d.wan_interface and d.enabled ]
return unparse_file( rv )
def interfaces_file( self ):
"""Creates a new /etc/network/interfaces file.
Returns the contents of the /etc/network/interfaces file in a string."""
s = "# This file describes the network interfaces available on your system\n" + \
"# and how to activate them. For more information, see interfaces(5).\n" + \
"\n" + \
"# The loopback network interface\n" + \
"auto lo\n" + \
"iface lo inet loopback\n\n"
return s + "\n\n".join(
[ t.interfaces_file() for t in self.Interfaces ]
)
def dhcp_conf( self ):
"""Creates a new /etc/dhcp3/dhcpd.conf file.
Returns the contents of the /etc/dhcp3/dhcpd.conf file in a string."""
rv = "ddns-updates off;" + "\n" + \
"ddns-update-style interim;" + "\n" + \
"authoritative;" + "\n" + \
"shared-network local" + "\n" + \
"{" + "\n" + \
"\t" + "" + "\n"
for I in self.Interfaces:
rv += I.subnet_decl()
rv += "\t\n\t\n"
for I in self.Interfaces:
rv += I.host_decl()
return rv + "\t\n\t\n}\n\n"
@staticmethod
def sort_iface( a, b ):
"""A sorting function for Iface's"""
if ( not a.enabled ) and ( b.enabled ):
return -1
if ( a.enabled ) and ( not b.enabled ):
return 1
if ( not a.wan_interface ) and ( b.wan_interface ):
return -1
if ( a.wan_interface ) and ( not b.wan_interface ):
return 1
return 0
def Display( self, cb=null_callback ):
"""Print a graphic representation of the network topology to stdout.
Examples:
>>> C = Config()
>>> C.Interfaces = [Iface()]
>>> C.local_services = [1,2,3]
>>> print '.', C.Display()
. \\
/ Open ports: [1, 2, 3]
\\
I / -- ## -- ()
N \\
T /
E \\
None
"""
cnt = [0]
def pr(s,o):
"""Display the jagged line and the word 'INTERNET.' """
S = ' {intc} {intsep}{string}'.format(
intc = (' INTERNET'[cnt[0]] if cnt[0] < 11 else ' '),
intsep = ( s[0:2] if s[0:2].strip() != '' else
(' \\' if cnt[0] % 2 == 0 else ' /')),
string = s[2:]
)
cb(S,o)
cnt[0] = cnt[0] + 1
pr('',self)
self.Interfaces.sort( Config.sort_iface )
self.local_services.sort()
pr(" Open ports: {0}".format(str(self.local_services)),self)
pr('',self)
for i in self.Interfaces:
i.Display( pr )
pr('',self)
pr('',self)
class Iface:
"""Network interface"""
name = '##'
wan_interface = False
wan_address = '0.0.0.0'
address = '0.0.0.0'
dhcp = False
enabled = False
subnets = None
def __init__( self, name='##', ifconfig=defaultdict(lambda:'undefined'), nets=[] ):
"""Create a new Iface object.
Create a new Iface object, adding all appropriate subnets to its own
collection."""
self.name = name
self.subnets = []
if name in ifconfig['external interface']:
self.enabled = True
self.wan_interface = True
self.wan_address = ' '.join(ifconfig['wan address'])
elif name in ifconfig['internal interface']:
self.enabled = True
for net in nets:
N = Subnet( net )
if N.interface != self.name: continue
self.subnets.append( N )
nets.remove( net )
ifc = subprocess.Popen( ['/sbin/ifconfig', name], stdout = subprocess.PIPE, stderr = subprocess.PIPE )
ifc.stderr.close()
pipe = subprocess.Popen( [ '/bin/sh', '-c',
'grep -oP "inet addr:(([0-9]+\.){3}[0-9]+)" | cut -d: -f2'
], stdin = ifc.stdout, stdout = subprocess.PIPE )
ifc.stdout.close()
self.address = pipe.stdout.read().strip()
def __repr__( self ):
return "ifx{{name}}".format( name=self.name )
def Display( self, cb ):
"""Create a graphic representation.
Create a graphic representation, calling cb(S) for each line S to
be printed."""
cb( '{intsep}{og}{iface:^10s}{fg} ({addr}){dhcp}'.format(
intsep = '====' if self.wan_interface else ' ',
iface = self.name,
og = '[[' if self.enabled else '--',
fg = ']]' if self.enabled else '--',
addr = self.address,
dhcp = 'D' if self.dhcp else ' '
), self )
if self.enabled and self.wan_interface:
cb( ' <{wan}>'.format(
wan = self.wan_address
), self )
if self.enabled and not self.wan_interface:
prn = lambda s, o: cb( ' || ' + s, o )
else:
prn = lambda s, o: cb( ' ' + s, o )
for n in self.subnets:
n.Display( prn )
prn('', self)
def new_subnet( self, identifier ):
try:
sn = str(int(identifier))
except ValueError:
print "Error: Network identifier must be an integer between 1 and 255."
return None
SN = Subnet(sn)
self.subnets.append(SN)
return SN
def Export( self, dir ):
"""For each subnet, write config files to disk."""
if self.wan_interface or not self.enabled:
return
for S in self.subnets:
S.Export( dir, self.name )
def interfaces_file( self ):
"""Represent itself in a /etc/network/interfaces file."""
if not self.enabled:
return ""
if self.wan_interface:
if self.dhcp:
s = "auto {name}\n"
s += "iface {name} inet dhcp\n"
s += "name External network interface\n"
s += "\n"
return s.format( name = self.name )
s = "auto {name}\n"
s += "iface {name} inet static\n"
s += "name External network interface\n"
s += "address {addr}\n"
s += "gateway {pref}.1\n"
s += "network {pref}.0\n"
s += "netmask 255.255.255.0\n"
s += "broadcast {pref}.255\n"
s += "\n"
return s.format(
name = self.name,
addr = self.address,
pref = '.'.join(self.address.split('.')[0:3])
)
else:
rv = "\n\n# Interface {name}\n\n".format(name=self.name)
c = -1
for S in self.subnets:
d = self.name if c == -1 else "{a}:{b}".format(a = self.name, b = c)
rv += S.interfaces_file( d )
c += 1
return rv
def subnet_decl( self ):
"""For each subnet, create a representation for the DHCP config file."""
if self.wan_interface or not self.enabled:
return ""
return "".join(
[ t.subnet_decl() for t in self.subnets ]
)
def host_decl( self ):
"""For each host in each subnet, create a representation for the DHCP
config file."""
return "\t\n".join(
[ t.host_decl() for t in self.subnets ]
)
class Subnet:
"""Network subnet"""
name = '##'
address = '0'
interface = None
public = False
showhosts = False
hosts = None
policies = None
services = [] # TODO: implement
def __init__(self, net):
nc = parse_file( 'networks/{net}/netconf'.format( net=net ) )
self.name = ' '.join(nc['friendly name'])
self.address = net
self.policies = nc['policies']
while 'undefined' in self.policies:
self.policies.remove('undefined')
hf = list_directory( 'networks/{net}/hosts'.format( net=net ) )
self.hosts = [Host(t,net) for t in hf]
self.interface = ' '.join(nc['interface'])
def net( self ):
"""Return the subnet address in the form 192.168.x.0/24."""
return '192.168.{net}.0/24'.format(net=self.address)
def gw( self ):
"""Return the default gateway, in the form 192.168.x.1."""
return '192.168.{net}.1'.format(net=self.address)
def Export( self, dir, iface ):
"""Write the appropriate config files into {dir}.
Write the appropriate config files into {dir}, ising {iface} as its
parent Iface object."""
os.mkdir( dir + "/" + self.address )
os.mkdir( dir + "/" + self.address + '/hosts' )
rv = dict()
rv['friendly name'] = [self.name]
rv['interface'] = [iface]
rv['policies'] = self.policies
file_put_data( dir + "/" + self.address + '/netconf', rv )
for H in self.hosts:
H.Export( dir + "/" + self.address + '/hosts' )
@staticmethod
def sort_host( a, b ):
"""Sort function for Host's"""
return a.addr - b.addr
def toggle_policy( self, policy ):
"""Add or remove {policy} from the policy list."""
if policy in self.policies:
i = self.policies.index( policy )
self.policies.pop(i)
return False
else:
self.policies.append( policy )
return True
def new_host( self, hostname, mac, ip ):
try:
ip = int(ip)
except ValueError:
return None
H = Host( hostname, self )
H.addr = ip
H.mac = mac
self.hosts.append(H);
self.showhosts = True
return H
def Display( self, cb ):
"""Create a graphic representation.
Create a graphic representation, calling cb(S) for each line S to
be printed."""
cb( '{{{{ {name:<30s} ({addr:>3s}) }}}}'.format(
name = self.name,
addr = self.address
), self )
cb( '{{{{ {pub:<7s}: {pol:<27s} }}}}'.format(
pub = 'public' if self.public else 'private',
pol = ', '.join(self.policies)
), self )
call = lambda s, o: cb( ' ' + s, o )
self.hosts.sort( Subnet.sort_host )
if self.showhosts:
for h in self.hosts:
h.Display( call )
cb( '', self )
def interfaces_file( self, iface ):
"""Return an entry for /etc/network/interfaces.
Return an entry for /etc/network/interfaces as a string, using {iface}
as the network interface name."""
s = "auto {iface}\n"
s += "iface {iface} inet static\n"
s += "name {desc}\n"
s += "address 192.168.{net}.1\n"
s += "network 192.168.{net}.0\n"
s += "netmask 255.255.255.0\n"
s += "broadcast 192.168.{net}.255\n"
s += "\n"
return s.format(
iface = iface,
desc = self.name,
net = self.address
)
def subnet_decl( self ):
"""Return an entry in /etc/dhcp3/dhcpd.conf as a string."""
rv = "\t" + "# Subnet '{desc}'" + "\n" + \
"\t" + "subnet 192.168.{net}.0 netmask 255.255.255.0 {{" + "\n" + \
"\t\t" + "range 192.168.{net}.100 192.168.{net}.200;" + "\n" + \
"\t\t" + "option routers 192.168.{net}.1;" + "\n" + \
"\t\t" + "option subnet-mask 255.255.255.0;" + "\n" + \
"\t\t" + "option broadcast-address 192.168.{net}.255;" + "\n" + \
"\t\t" + "option domain-name \"{sd}\";" + "\n" + \
"\t\t" + "option domain-name-servers 192.168.{net}.1;" + "\n" + \
"\t\t" + "" + "\n" + \
"\t\t" + "{allow} unknown-clients;" + "\n" + \
"\t" + "}}" + "\n" + \
"\t" + "" + "\n"
return rv.format(
desc = self.name,
net = self.address,
sd = "todo.inurbanus.nl",
allow = 'allow' if self.public else 'deny'
)
def host_decl( self ):
"""Return a string of all host declarations."""
return "".join(
[ t.host_decl( self.address ) for t in self.hosts ]
)
class Host:
"""A single network host."""
mac = '';
addr = '';
name = '';
comment = '';
def __init__( self, name, net ):
self.name = name
fn = 'networks/{net}/hosts/{name}'.format(
net=net, name=name
)
hf = parse_file( fn )
self.mac = ' '.join(hf['hardware ethernet'])
ipaddr = ' '.join(hf['fixed address']).strip().split('.')
if ipaddr[0] == 'undefined':
sys.stderr.write( "Invalid file {fn}\n".format(fn=fn) )
else:
self.addr = int( ipaddr[3] if len(ipaddr) > 3 else ipaddr[0] )
cc = ' '.join(hf['comment'])
self.comment = cc if cc != 'undefined' else ''
def Display( self, cb ):
"""Create a graphic representation.
Create a graphic representation, calling cb(S) for each line S to
be printed."""
cb( '* ({ext:>3d}): {mac} {name}'.format(
ext = self.addr,
mac = self.mac,
name = self.name
), self )
#if len(self.comment) > 0:
# cb( ' "{comment}"'.format( comment = self.comment ), self )
def Export( self, dir ):
"""Export a host file to {dir}."""
rv = dict()
rv['comment'] = [self.comment]
rv['hardware ethernet'] = [self.mac]
rv['fixed address'] = [str(self.addr)]
file_put_data( dir + "/" + self.name, rv )
def host_decl( self, net ):
"""Export a host declaration for dhcpd.conf"""
rv = \
"\t" + "# {desc}" + "\n" + \
"\t" + "host {name} {{" + "\n" + \
"\t\t" + "hardware ethernet {mac};" + "\n" + \
"\t\t" + "fixed-address 192.168.{net}.{adr};" + "\n" + \
"\t" + "}}" + "\n"
return rv.format(
desc = self.comment,
name = self.name,
mac = self.mac,
net = net,
adr = self.addr
)
if __name__ == '__main__':
import doctest
fail, total = doctest.testmod( optionflags = doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE )
sys.exit( fail )
| gpl-3.0 |
eeshangarg/zulip | zerver/tests/test_slack_message_conversion.py | 3 | 4897 | import os
from typing import Any, Dict, List, Tuple
import orjson
from zerver.data_import.slack_message_conversion import (
convert_to_zulip_markdown,
get_user_full_name,
)
from zerver.lib import mdiff
from zerver.lib.test_classes import ZulipTestCase
class SlackMessageConversion(ZulipTestCase):
def assertEqual(self, first: Any, second: Any, msg: str = "") -> None:
if isinstance(first, str) and isinstance(second, str):
if first != second:
raise AssertionError(
"Actual and expected outputs do not match; showing diff.\n"
+ mdiff.diff_strings(first, second)
+ msg
)
else:
super().assertEqual(first, second)
def load_slack_message_conversion_tests(self) -> Dict[Any, Any]:
test_fixtures = {}
with open(
os.path.join(os.path.dirname(__file__), "fixtures/slack_message_conversion.json"), "rb"
) as f:
data = orjson.loads(f.read())
for test in data["regular_tests"]:
test_fixtures[test["name"]] = test
return test_fixtures
def test_message_conversion_fixtures(self) -> None:
format_tests = self.load_slack_message_conversion_tests()
valid_keys = {"name", "input", "conversion_output"}
for name, test in format_tests.items():
# Check that there aren't any unexpected keys as those are often typos
self.assert_length(set(test.keys()) - valid_keys, 0)
slack_user_map: Dict[str, int] = {}
users: List[Dict[str, Any]] = [{}]
channel_map: Dict[str, Tuple[str, int]] = {}
converted = convert_to_zulip_markdown(test["input"], users, channel_map, slack_user_map)
converted_text = converted[0]
with self.subTest(slack_message_conversion=name):
self.assertEqual(converted_text, test["conversion_output"])
def test_mentioned_data(self) -> None:
slack_user_map = {"U08RGD1RD": 540, "U0CBK5KAT": 554, "U09TYF5SK": 571}
# For this test, only relevant keys are 'id', 'name', 'deleted'
# and 'real_name'
users = [
{
"id": "U0CBK5KAT",
"name": "aaron.anzalone",
"deleted": False,
"is_mirror_dummy": False,
"real_name": "",
},
{
"id": "U08RGD1RD",
"name": "john",
"deleted": False,
"is_mirror_dummy": False,
"real_name": "John Doe",
},
{
"id": "U09TYF5Sk",
"name": "Jane",
"is_mirror_dummy": False,
"deleted": True, # Deleted users don't have 'real_name' key in Slack
},
]
channel_map = {"general": ("C5Z73A7RA", 137)}
message = "Hi <@U08RGD1RD|john>: How are you? <#C5Z73A7RA|general>"
text, mentioned_users, has_link = convert_to_zulip_markdown(
message, users, channel_map, slack_user_map
)
full_name = get_user_full_name(users[1])
self.assertEqual(full_name, "John Doe")
self.assertEqual(get_user_full_name(users[2]), "Jane")
self.assertEqual(text, f"Hi @**{full_name}**: How are you? #**general**")
self.assertEqual(mentioned_users, [540])
# multiple mentioning
message = "Hi <@U08RGD1RD|john>: How are you?<@U0CBK5KAT> asked."
text, mentioned_users, has_link = convert_to_zulip_markdown(
message, users, channel_map, slack_user_map
)
self.assertEqual(text, "Hi @**John Doe**: How are you?@**aaron.anzalone** asked.")
self.assertEqual(mentioned_users, [540, 554])
# Check wrong mentioning
message = "Hi <@U08RGD1RD|jon>: How are you?"
text, mentioned_users, has_link = convert_to_zulip_markdown(
message, users, channel_map, slack_user_map
)
self.assertEqual(text, message)
self.assertEqual(mentioned_users, [])
def test_has_link(self) -> None:
slack_user_map: Dict[str, int] = {}
message = "<http://journals.plos.org/plosone/article>"
text, mentioned_users, has_link = convert_to_zulip_markdown(message, [], {}, slack_user_map)
self.assertEqual(text, "http://journals.plos.org/plosone/article")
self.assertEqual(has_link, True)
message = "<mailto:[email protected]>"
text, mentioned_users, has_link = convert_to_zulip_markdown(message, [], {}, slack_user_map)
self.assertEqual(text, "mailto:[email protected]")
self.assertEqual(has_link, True)
message = "random message"
text, mentioned_users, has_link = convert_to_zulip_markdown(message, [], {}, slack_user_map)
self.assertEqual(has_link, False)
| apache-2.0 |
appleseedhq/gaffer | python/GafferImageTest/ImageSamplerTest.py | 8 | 3744 | ##########################################################################
#
# Copyright (c) 2013, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above
# copyright notice, this list of conditions and the following
# disclaimer.
#
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided with
# the distribution.
#
# * Neither the name of John Haddon nor the names of
# any other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import imath
import IECore
import IECoreImage
import Gaffer
import GafferTest
import GafferImage
import GafferImageTest
class ImageSamplerTest( GafferImageTest.ImageTestCase ) :
def test( self ) :
xRamp = GafferImage.Ramp()
xRamp["format"].setValue( GafferImage.Format( 75, 75, 1.000 ) )
xRamp["endPosition"].setValue( imath.V2f( 75, 0 ) )
xRamp["ramp"]["p1"]["y"].setValue( imath.Color4f( 75, 0, 0, 0 ) )
yRamp = GafferImage.Ramp()
yRamp["format"].setValue( GafferImage.Format( 75, 75, 1.000 ) )
yRamp["endPosition"].setValue( imath.V2f( 0, 75 ) )
yRamp["ramp"]["p1"]["y"].setValue( imath.Color4f( 0, 75, 0, 0 ) )
rampMerge = GafferImage.Merge()
rampMerge["operation"].setValue( GafferImage.Merge.Operation.Add )
rampMerge["in"]["in0"].setInput( xRamp["out"] )
rampMerge["in"]["in1"].setInput( yRamp["out"] )
sampler = GafferImage.ImageSampler()
sampler["image"].setInput( rampMerge["out"] )
hashes = set()
for x in range( 0, 75 ) :
for y in range( 0, 75 ) :
sampler["pixel"].setValue( imath.V2f( x + 0.5, y + 0.5 ) )
c = sampler["color"].getValue()
for i in range( 4 ):
self.assertAlmostEqual( c[i], [ x + 0.5, y + 0.5, 0, 0 ][i], places = 4 )
hashes.add( str( sampler["color"].hash() ) )
self.assertEqual( len( hashes ), 75 * 75 )
def testChannelsPlug( self ) :
constant = GafferImage.Constant()
constant["layer"].setValue( "diffuse" )
constant["color"].setValue( imath.Color4f( 1, 0.5, 0.25, 1 ) )
sampler = GafferImage.ImageSampler()
sampler["image"].setInput( constant["out"] )
sampler["pixel"].setValue( imath.V2f( 10.5 ) )
self.assertEqual( sampler["color"].getValue(), imath.Color4f( 0, 0, 0, 0 ) )
sampler["channels"].setValue( IECore.StringVectorData( [ "diffuse.R", "diffuse.G", "diffuse.B", "diffuse.A" ] ) )
self.assertEqual( sampler["color"].getValue(), imath.Color4f( 1, 0.5, 0.25, 1 ) )
if __name__ == "__main__":
unittest.main()
| bsd-3-clause |
lepture/pythondotorg | users/tests/test_models.py | 7 | 1764 | import datetime
from django.contrib.auth import get_user_model
from django.test import TestCase
from django.utils import timezone
from ..factories import UserFactory, MembershipFactory
from ..models import Membership
User = get_user_model()
class UsersModelsTestCase(TestCase):
def test_create_superuser(self):
user = User.objects.create_superuser(
username='username',
password='password',
email='[email protected]'
)
self.assertNotEqual(user, None)
self.assertTrue(user.is_active)
self.assertTrue(user.is_superuser)
self.assertTrue(user.is_staff)
kwargs = {
'username': '',
'password': 'password',
}
self.assertRaises(ValueError, User.objects.create_user, **kwargs)
def test_membership(self):
plain_user = UserFactory()
self.assertFalse(plain_user.has_membership)
member = MembershipFactory()
self.assertTrue(member.creator.has_membership)
def test_higher_level_member(self):
member1 = MembershipFactory()
member2 = MembershipFactory(membership_type=Membership.SPONSOR)
self.assertFalse(member1.higher_level_member)
self.assertTrue(member2.higher_level_member)
def test_needs_vote_affirmation(self):
member1 = MembershipFactory()
self.assertFalse(member1.needs_vote_affirmation)
member2 = MembershipFactory(votes=True)
self.assertFalse(member2.needs_vote_affirmation)
last_year = timezone.now() - datetime.timedelta(days=366)
member3 = MembershipFactory(
votes=True,
last_vote_affirmation=last_year,
)
self.assertTrue(member3.needs_vote_affirmation)
| apache-2.0 |
matplotlib/freetypy | docstrings/charmap.py | 1 | 5602 | # -*- coding: utf-8 -*-
# Copyright (c) 2015, Michael Droettboom All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
# The views and conclusions contained in the software and
# documentation are those of the authors and should not be interpreted
# as representing official policies, either expressed or implied, of
# the FreeBSD Project.
from __future__ import print_function, unicode_literals, absolute_import
CharMap__init__ = """
A mapping from code points to glyph identifiers.
Some font formats may provide several `CharMap` instances per font.
Each face object owns zero or more charmaps, but only one of them can
be ‘active’ and used by `Face.get_char_index` or `Face.load_char`.
The list of available charmaps in a face is available through
`Face.charmaps`.
The currently active charmap is available as `Face.charmap`.
"""
CharMap_encoding = """
An `ENCODING` tag identifying the charmap.
Despite the name, this value specifies a specific character
repertory (i.e., charset), and not a text encoding method (e.g.,
UTF-8, UTF-16, etc.).
"""
CharMap_encoding_id = """
A platform-specific encoding number. This also comes from the TrueType
specification and should be emulated similarly.
Depending on `platform_id`, this will be from `TT_APPLE_ID`,
`TT_MAC_ID`, `TT_MS_ID` or `TT_ADOBE_ID`.
"""
CharMap_face = """
The parent `Face` object.
"""
CharMap_get_format = """
Get the TrueType/sfnt specific cmap format.
Returns
-------
format : int
The format of ‘charmap’. If ‘charmap’ doesn't belong to a
TrueType/sfnt face, raises a `ValueError`.
"""
CharMap_get_language_id = """
Get the TrueType/sfnt specific cmap language ID.
It will be one of the constants in `TT_MAC_LANGID` or `TT_MS_LANGID`.
Returns
-------
language_id : int
The language ID of the `CharMap`. If it doesn't belong to a
TrueType/sfnt face, raises a `ValueError`.
For a format 14 cmap (to access Unicode IVS), the return value is
0xFFFFFFFF.
"""
CharMap_platform_id = """
An `TT_PLATFORM` id describing the platform for the following encoding
ID. This comes directly from the TrueType specification and should be
emulated for other formats.
"""
ENCODING = """
A tag identifying a `CharMap` type.
- `NONE`: The encoding value 0 is reserved.
- `UNICODE`: Corresponds to the Unicode character set. This value
covers all versions of the Unicode repertoire, including ASCII and
Latin-1. Most fonts include a Unicode charmap, but not all of them.
For example, if you want to access Unicode value U+1F028 (and the
font contains it), use value 0x1F028 as the input value for
`Face.get_char_index`.
- `MS_SYMBOL`: Corresponds to the Microsoft Symbol encoding, used to
encode mathematical symbols in the 32..255 character code range. For
more information, see `<http://www.ceviz.net/symbol.htm>`_.
- `SJIS`: Corresponds to Japanese SJIS encoding. More info at at
`<http://langsupport.japanreference.com/encoding.shtml>`_. See note
on multi-byte encodings below.
- `GB2312`: Corresponds to an encoding system for Simplified Chinese
as used used in mainland China.
- `BIG5`: Corresponds to an encoding system for Traditional Chinese as
used in Taiwan and Hong Kong.
- `WANSUNG`: Corresponds to the Korean encoding system known as
Wansung. For more information see
`<http://www.microsoft.com/typography/unicode/949.txt>`_.
- `JOHAB`: The Korean standard character set (KS C 5601-1992), which
corresponds to MS Windows code page 1361. This character set
includes all possible Hangeul character combinations.
- `ADOBE_LATIN_1`: Corresponds to a Latin-1 encoding as defined in a
Type 1 PostScript font. It is limited to 256 character codes.
- `ADOBE_STANDARD`: Corresponds to the Adobe Standard encoding, as
found in Type 1, CFF, and OpenType/CFF fonts. It is limited to 256
character codes.
- `ADOBE_EXPERT`: Corresponds to the Adobe Expert encoding, as found
in Type 1, CFF, and OpenType/CFF fonts. It is limited to 256
character codes.
- `ADOBE_CUSTOM`: Corresponds to a custom encoding, as found in Type
1, CFF, and OpenType/CFF fonts. It is limited to 256 character
codes.
- `APPLE_ROMAN`: Corresponds to the 8-bit Apple roman encoding. Many
TrueType and OpenType fonts contain a charmap for this encoding,
since older versions of Mac OS are able to use it.
"""
| bsd-2-clause |
alianmohammad/pd-gem5 | tests/configs/realview64-minor-dual.py | 33 | 2393 | # Copyright (c) 2012 ARM Limited
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Andreas Sandberg
from m5.objects import *
from arm_generic import *
root = LinuxArmFSSystem(machine_type='VExpress_EMM64',
mem_mode='timing',
mem_class=DDR3_1600_x64,
cpu_class=MinorCPU,
num_cpus=2).create_root()
| bsd-3-clause |
jaor/python | bigml/api_handlers/batchcentroidhandler.py | 2 | 4344 | # -*- coding: utf-8 -*-
#
# Copyright 2014-2021 BigML
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Base class for batch centroids' REST calls
https://bigml.com/api/batchcentroids
"""
try:
import simplejson as json
except ImportError:
import json
from bigml.api_handlers.resourcehandler import ResourceHandlerMixin
from bigml.api_handlers.resourcehandler import check_resource_type
from bigml.constants import BATCH_CENTROID_PATH, CLUSTER_PATH
class BatchCentroidHandlerMixin(ResourceHandlerMixin):
"""This class is used by the BigML class as
a mixin that provides the REST calls models. It should not
be instantiated independently.
"""
def __init__(self):
"""Initializes the BatchCentroidHandler. This class is intended to be
used as a mixin on ResourceHandler, that inherits its
attributes and basic method from BigMLConnection, and must not be
instantiated independently.
"""
self.batch_centroid_url = self.prediction_base_url \
+ BATCH_CENTROID_PATH
def create_batch_centroid(self, cluster, dataset,
args=None, wait_time=3, retries=10):
"""Creates a new batch centroid.
"""
create_args = {}
if args is not None:
create_args.update(args)
origin_resources_checked = self.check_origins(
dataset, cluster, create_args, model_types=[CLUSTER_PATH],
wait_time=wait_time, retries=retries)
if origin_resources_checked:
body = json.dumps(create_args)
return self._create(self.batch_centroid_url, body)
def get_batch_centroid(self, batch_centroid, query_string=''):
"""Retrieves a batch centroid.
The batch_centroid parameter should be a string containing the
batch_centroid id or the dict returned by create_batch_centroid.
As batch_centroid is an evolving object that is processed
until it reaches the FINISHED or FAULTY state, the function will
return a dict that encloses the batch_centroid values and state
info available at the time it is called.
"""
check_resource_type(batch_centroid, BATCH_CENTROID_PATH,
message="A batch centroid id is needed.")
return self.get_resource(batch_centroid, query_string=query_string)
def download_batch_centroid(self, batch_centroid, filename=None,
retries=10):
"""Retrieves the batch centroid file.
Downloads centroids, that are stored in a remote CSV file. If
a path is given in filename, the contents of the file are downloaded
and saved locally. A file-like object is returned otherwise.
"""
check_resource_type(batch_centroid, BATCH_CENTROID_PATH,
message="A batch centroid id is needed.")
return self._download_resource(batch_centroid, filename,
retries=retries)
def list_batch_centroids(self, query_string=''):
"""Lists all your batch centroids.
"""
return self._list(self.batch_centroid_url, query_string)
def update_batch_centroid(self, batch_centroid, changes):
"""Updates a batch centroid.
"""
check_resource_type(batch_centroid, BATCH_CENTROID_PATH,
message="A batch centroid id is needed.")
return self.update_resource(batch_centroid, changes)
def delete_batch_centroid(self, batch_centroid):
"""Deletes a batch centroid.
"""
check_resource_type(batch_centroid, BATCH_CENTROID_PATH,
message="A batch centroid id is needed.")
return self.delete_resource(batch_centroid)
| apache-2.0 |
mscherer/ansible-modules-core | utilities/logic/async_wrapper.py | 10 | 7027 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2012, Michael DeHaan <[email protected]>, and others
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
try:
import json
except ImportError:
import simplejson as json
import shlex
import os
import subprocess
import sys
import traceback
import signal
import time
import syslog
syslog.openlog('ansible-%s' % os.path.basename(__file__))
syslog.syslog(syslog.LOG_NOTICE, 'Invoked with %s' % " ".join(sys.argv[1:]))
def notice(msg):
syslog.syslog(syslog.LOG_NOTICE, msg)
def daemonize_self():
# daemonizing code: http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/66012
try:
pid = os.fork()
if pid > 0:
# exit first parent
sys.exit(0)
except OSError:
e = sys.exc_info()[1]
sys.exit("fork #1 failed: %d (%s)\n" % (e.errno, e.strerror))
# decouple from parent environment
os.chdir("/")
os.setsid()
os.umask(int('022', 8))
# do second fork
try:
pid = os.fork()
if pid > 0:
# print "Daemon PID %d" % pid
sys.exit(0)
except OSError:
e = sys.exc_info()[1]
sys.exit("fork #2 failed: %d (%s)\n" % (e.errno, e.strerror))
dev_null = file('/dev/null','rw')
os.dup2(dev_null.fileno(), sys.stdin.fileno())
os.dup2(dev_null.fileno(), sys.stdout.fileno())
os.dup2(dev_null.fileno(), sys.stderr.fileno())
def _run_module(wrapped_cmd, jid, job_path):
tmp_job_path = job_path + ".tmp"
jobfile = open(tmp_job_path, "w")
jobfile.write(json.dumps({ "started" : 1, "finished" : 0, "ansible_job_id" : jid }))
jobfile.close()
os.rename(tmp_job_path, job_path)
jobfile = open(tmp_job_path, "w")
result = {}
outdata = ''
try:
cmd = shlex.split(wrapped_cmd)
script = subprocess.Popen(cmd, shell=False, stdin=None, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(outdata, stderr) = script.communicate()
result = json.loads(outdata)
if stderr:
result['stderr'] = stderr
jobfile.write(json.dumps(result))
except (OSError, IOError):
e = sys.exc_info()[1]
result = {
"failed": 1,
"cmd" : wrapped_cmd,
"msg": str(e),
}
result['ansible_job_id'] = jid
jobfile.write(json.dumps(result))
except:
result = {
"failed" : 1,
"cmd" : wrapped_cmd,
"data" : outdata, # temporary notice only
"msg" : traceback.format_exc()
}
result['ansible_job_id'] = jid
jobfile.write(json.dumps(result))
jobfile.close()
os.rename(tmp_job_path, job_path)
####################
## main ##
####################
if __name__ == '__main__':
if len(sys.argv) < 3:
print(json.dumps({
"failed" : True,
"msg" : "usage: async_wrapper <jid> <time_limit> <modulescript> <argsfile>. Humans, do not call directly!"
}))
sys.exit(1)
jid = "%s.%d" % (sys.argv[1], os.getpid())
time_limit = sys.argv[2]
wrapped_module = sys.argv[3]
if len(sys.argv) >= 5:
argsfile = sys.argv[4]
cmd = "%s %s" % (wrapped_module, argsfile)
else:
cmd = wrapped_module
step = 5
# setup job output directory
jobdir = os.path.expanduser("~/.ansible_async")
job_path = os.path.join(jobdir, jid)
if not os.path.exists(jobdir):
try:
os.makedirs(jobdir)
except:
print(json.dumps({
"failed" : 1,
"msg" : "could not create: %s" % jobdir
}))
# immediately exit this process, leaving an orphaned process
# running which immediately forks a supervisory timing process
try:
pid = os.fork()
if pid:
# Notify the overlord that the async process started
# we need to not return immmediately such that the launched command has an attempt
# to initialize PRIOR to ansible trying to clean up the launch directory (and argsfile)
# this probably could be done with some IPC later. Modules should always read
# the argsfile at the very first start of their execution anyway
notice("Return async_wrapper task started.")
print(json.dumps({ "started" : 1, "finished" : 0, "ansible_job_id" : jid, "results_file" : job_path }))
sys.stdout.flush()
time.sleep(1)
sys.exit(0)
else:
# The actual wrapper process
# Daemonize, so we keep on running
daemonize_self()
# we are now daemonized, create a supervisory process
notice("Starting module and watcher")
sub_pid = os.fork()
if sub_pid:
# the parent stops the process after the time limit
remaining = int(time_limit)
# set the child process group id to kill all children
os.setpgid(sub_pid, sub_pid)
notice("Start watching %s (%s)"%(sub_pid, remaining))
time.sleep(step)
while os.waitpid(sub_pid, os.WNOHANG) == (0, 0):
notice("%s still running (%s)"%(sub_pid, remaining))
time.sleep(step)
remaining = remaining - step
if remaining <= 0:
notice("Now killing %s"%(sub_pid))
os.killpg(sub_pid, signal.SIGKILL)
notice("Sent kill to group %s"%sub_pid)
time.sleep(1)
sys.exit(0)
notice("Done in kid B.")
sys.exit(0)
else:
# the child process runs the actual module
notice("Start module (%s)"%os.getpid())
_run_module(cmd, jid, job_path)
notice("Module complete (%s)"%os.getpid())
sys.exit(0)
except SystemExit:
# On python2.4, SystemExit is a subclass of Exception.
# This block makes python2.4 behave the same as python2.5+
raise
except Exception:
e = sys.exc_info()[1]
notice("error: %s"%(e))
print(json.dumps({
"failed" : True,
"msg" : "FATAL ERROR: %s" % str(e)
}))
sys.exit(1)
| gpl-3.0 |
torhve/FrameworkBenchmarks | frameworks/Go/beego/setup.py | 6 | 1099 |
import subprocess
import sys
import os
def start(args, logfile, errfile):
if os.name == 'nt':
subprocess.call("set GOPATH=C:\\FrameworkBenchmarks\\beego&&go get ./...", shell=True, cwd="beego", stderr=errfile, stdout=logfile)
subprocess.Popen("setup.bat", shell=True, cwd="beego", stderr=errfile, stdout=logfile)
return 0
os.environ["GOPATH"] = os.path.expanduser('~/FrameworkBenchmarks/beego')
subprocess.call("go get ./...", shell=True, cwd="beego", stderr=errfile, stdout=logfile)
subprocess.Popen("go run src/hello/hello.go".rsplit(" "), cwd="beego", stderr=errfile, stdout=logfile)
return 0
def stop(logfile, errfile):
if os.name == 'nt':
subprocess.call("taskkill /f /im go.exe > NUL", shell=True, stderr=errfile, stdout=logfile)
subprocess.call("taskkill /f /im hello.exe > NUL", shell=True, stderr=errfile, stdout=logfile)
return 0
p = subprocess.Popen(['ps', 'aux'], stdout=subprocess.PIPE)
out, err = p.communicate()
for line in out.splitlines():
if 'hello' in line:
pid = int(line.split(None, 2)[1])
os.kill(pid, 15)
return 0
| bsd-3-clause |
torchingloom/edx-platform | common/lib/chem/chem/chemcalc.py | 67 | 14752 | from __future__ import division
from fractions import Fraction
from pyparsing import (Literal, StringEnd, OneOrMore, ParseException)
import nltk
from nltk.tree import Tree
ARROWS = ('<->', '->')
## Defines a simple pyparsing tokenizer for chemical equations
elements = ['Ac', 'Ag', 'Al', 'Am', 'Ar', 'As', 'At', 'Au', 'B', 'Ba', 'Be',
'Bh', 'Bi', 'Bk', 'Br', 'C', 'Ca', 'Cd', 'Ce', 'Cf', 'Cl', 'Cm',
'Cn', 'Co', 'Cr', 'Cs', 'Cu', 'Db', 'Ds', 'Dy', 'Er', 'Es', 'Eu',
'F', 'Fe', 'Fl', 'Fm', 'Fr', 'Ga', 'Gd', 'Ge', 'H', 'He', 'Hf',
'Hg', 'Ho', 'Hs', 'I', 'In', 'Ir', 'K', 'Kr', 'La', 'Li', 'Lr',
'Lu', 'Lv', 'Md', 'Mg', 'Mn', 'Mo', 'Mt', 'N', 'Na', 'Nb', 'Nd',
'Ne', 'Ni', 'No', 'Np', 'O', 'Os', 'P', 'Pa', 'Pb', 'Pd', 'Pm',
'Po', 'Pr', 'Pt', 'Pu', 'Ra', 'Rb', 'Re', 'Rf', 'Rg', 'Rh', 'Rn',
'Ru', 'S', 'Sb', 'Sc', 'Se', 'Sg', 'Si', 'Sm', 'Sn', 'Sr', 'Ta',
'Tb', 'Tc', 'Te', 'Th', 'Ti', 'Tl', 'Tm', 'U', 'Uuo', 'Uup',
'Uus', 'Uut', 'V', 'W', 'Xe', 'Y', 'Yb', 'Zn', 'Zr']
digits = map(str, range(10))
symbols = list("[](){}^+-/")
phases = ["(s)", "(l)", "(g)", "(aq)"]
tokens = reduce(lambda a, b: a ^ b, map(Literal, elements + digits + symbols + phases))
tokenizer = OneOrMore(tokens) + StringEnd()
def _orjoin(l):
return "'" + "' | '".join(l) + "'"
## Defines an NLTK parser for tokenized expressions
grammar = """
S -> multimolecule | multimolecule '+' S
multimolecule -> count molecule | molecule
count -> number | number '/' number
molecule -> unphased | unphased phase
unphased -> group | paren_group_round | paren_group_square
element -> """ + _orjoin(elements) + """
digit -> """ + _orjoin(digits) + """
phase -> """ + _orjoin(phases) + """
number -> digit | digit number
group -> suffixed | suffixed group
paren_group_round -> '(' group ')'
paren_group_square -> '[' group ']'
plus_minus -> '+' | '-'
number_suffix -> number
ion_suffix -> '^' number plus_minus | '^' plus_minus
suffix -> number_suffix | number_suffix ion_suffix | ion_suffix
unsuffixed -> element | paren_group_round | paren_group_square
suffixed -> unsuffixed | unsuffixed suffix
"""
parser = nltk.ChartParser(nltk.parse_cfg(grammar))
def _clean_parse_tree(tree):
''' The parse tree contains a lot of redundant
nodes. E.g. paren_groups have groups as children, etc. This will
clean up the tree.
'''
def unparse_number(n):
''' Go from a number parse tree to a number '''
if len(n) == 1:
rv = n[0][0]
else:
rv = n[0][0] + unparse_number(n[1])
return rv
def null_tag(n):
''' Remove a tag '''
return n[0]
def ion_suffix(n):
'''1. "if" part handles special case
2. "else" part is general behaviour '''
if n[1:][0].node == 'number' and n[1:][0][0][0] == '1':
# if suffix is explicitly 1, like ^1-
# strip 1, leave only sign: ^-
return nltk.tree.Tree(n.node, n[2:])
else:
return nltk.tree.Tree(n.node, n[1:])
dispatch = {'number': lambda x: nltk.tree.Tree("number", [unparse_number(x)]),
'unphased': null_tag,
'unsuffixed': null_tag,
'number_suffix': lambda x: nltk.tree.Tree('number_suffix', [unparse_number(x[0])]),
'suffixed': lambda x: len(x) > 1 and x or x[0],
'ion_suffix': ion_suffix,
'paren_group_square': lambda x: nltk.tree.Tree(x.node, x[1]),
'paren_group_round': lambda x: nltk.tree.Tree(x.node, x[1])}
if type(tree) == str:
return tree
old_node = None
## This loop means that if a node is processed, and returns a child,
## the child will be processed.
while tree.node in dispatch and tree.node != old_node:
old_node = tree.node
tree = dispatch[tree.node](tree)
children = []
for child in tree:
child = _clean_parse_tree(child)
children.append(child)
tree = nltk.tree.Tree(tree.node, children)
return tree
def _merge_children(tree, tags):
''' nltk, by documentation, cannot do arbitrary length
groups. Instead of:
(group 1 2 3 4)
It has to handle this recursively:
(group 1 (group 2 (group 3 (group 4))))
We do the cleanup of converting from the latter to the former.
'''
if tree is None:
# There was a problem--shouldn't have empty trees (NOTE: see this with input e.g. 'H2O(', or 'Xe+').
# Haven't grokked the code to tell if this is indeed the right thing to do.
raise ParseException("Shouldn't have empty trees")
if type(tree) == str:
return tree
merged_children = []
done = False
#print '00000', tree
## Merge current tag
while not done:
done = True
for child in tree:
if type(child) == nltk.tree.Tree and child.node == tree.node and tree.node in tags:
merged_children = merged_children + list(child)
done = False
else:
merged_children = merged_children + [child]
tree = nltk.tree.Tree(tree.node, merged_children)
merged_children = []
#print '======',tree
# And recurse
children = []
for child in tree:
children.append(_merge_children(child, tags))
#return tree
return nltk.tree.Tree(tree.node, children)
def _render_to_html(tree):
''' Renders a cleaned tree to HTML '''
def molecule_count(tree, children):
# If an integer, return that integer
if len(tree) == 1:
return tree[0][0]
# If a fraction, return the fraction
if len(tree) == 3:
return " <sup>{num}</sup>⁄<sub>{den}</sub> ".format(num=tree[0][0], den=tree[2][0])
return "Error"
def subscript(tree, children):
return "<sub>{sub}</sub>".format(sub=children)
def superscript(tree, children):
return "<sup>{sup}</sup>".format(sup=children)
def round_brackets(tree, children):
return "({insider})".format(insider=children)
def square_brackets(tree, children):
return "[{insider}]".format(insider=children)
dispatch = {'count': molecule_count,
'number_suffix': subscript,
'ion_suffix': superscript,
'paren_group_round': round_brackets,
'paren_group_square': square_brackets}
if type(tree) == str:
return tree
else:
children = "".join(map(_render_to_html, tree))
if tree.node in dispatch:
return dispatch[tree.node](tree, children)
else:
return children.replace(' ', '')
def render_to_html(eq):
'''
Render a chemical equation string to html.
Renders each molecule separately, and returns invalid input wrapped in a <span>.
'''
def err(s):
"Render as an error span"
return '<span class="inline-error inline">{0}</span>'.format(s)
def render_arrow(arrow):
"""Turn text arrows into pretty ones"""
if arrow == '->':
return u'\u2192'
if arrow == '<->':
return u'\u2194'
# this won't be reached unless we add more arrow types, but keep it to avoid explosions when
# that happens.
return arrow
def render_expression(ex):
"""
Render a chemical expression--no arrows.
"""
try:
return _render_to_html(_get_final_tree(ex))
except ParseException:
return err(ex)
def spanify(s):
return u'<span class="math">{0}</span>'.format(s)
left, arrow, right = split_on_arrow(eq)
if arrow == '':
# only one side
return spanify(render_expression(left))
return spanify(render_expression(left) + render_arrow(arrow) + render_expression(right))
def _get_final_tree(s):
'''
Return final tree after merge and clean.
Raises pyparsing.ParseException if s is invalid.
'''
tokenized = tokenizer.parseString(s)
parsed = parser.parse(tokenized)
merged = _merge_children(parsed, {'S', 'group'})
final = _clean_parse_tree(merged)
return final
def _check_equality(tuple1, tuple2):
''' return True if tuples of multimolecules are equal '''
list1 = list(tuple1)
list2 = list(tuple2)
# Hypo: trees where are levels count+molecule vs just molecule
# cannot be sorted properly (tested on test_complex_additivity)
# But without factors and phases sorting seems to work.
# Also for lists of multimolecules without factors and phases
# sorting seems to work fine.
list1.sort()
list2.sort()
return list1 == list2
def compare_chemical_expression(s1, s2, ignore_state=False):
''' It does comparison between two expressions.
It uses divide_chemical_expression and check if division is 1
'''
return divide_chemical_expression(s1, s2, ignore_state) == 1
def divide_chemical_expression(s1, s2, ignore_state=False):
'''Compare two chemical expressions for equivalence up to a multiplicative factor:
- If they are not the same chemicals, returns False.
- If they are the same, "divide" s1 by s2 to returns a factor x such that s1 / s2 == x as a Fraction object.
- if ignore_state is True, ignores phases when doing the comparison.
Examples:
divide_chemical_expression("H2O", "3H2O") -> Fraction(1,3)
divide_chemical_expression("3H2O", "H2O") -> 3 # actually Fraction(3, 1), but compares == to 3.
divide_chemical_expression("2H2O(s) + 2CO2", "H2O(s)+CO2") -> 2
divide_chemical_expression("H2O(s) + CO2", "3H2O(s)+2CO2") -> False
Implementation sketch:
- extract factors and phases to standalone lists,
- compare expressions without factors and phases,
- divide lists of factors for each other and check
for equality of every element in list,
- return result of factor division
'''
# parsed final trees
treedic = {}
treedic['1'] = _get_final_tree(s1)
treedic['2'] = _get_final_tree(s2)
# strip phases and factors
# collect factors in list
for i in ('1', '2'):
treedic[i + ' cleaned_mm_list'] = []
treedic[i + ' factors'] = []
treedic[i + ' phases'] = []
for el in treedic[i].subtrees(filter=lambda t: t.node == 'multimolecule'):
count_subtree = [t for t in el.subtrees() if t.node == 'count']
group_subtree = [t for t in el.subtrees() if t.node == 'group']
phase_subtree = [t for t in el.subtrees() if t.node == 'phase']
if count_subtree:
if len(count_subtree[0]) > 1:
treedic[i + ' factors'].append(
int(count_subtree[0][0][0]) /
int(count_subtree[0][2][0]))
else:
treedic[i + ' factors'].append(int(count_subtree[0][0][0]))
else:
treedic[i + ' factors'].append(1.0)
if phase_subtree:
treedic[i + ' phases'].append(phase_subtree[0][0])
else:
treedic[i + ' phases'].append(' ')
treedic[i + ' cleaned_mm_list'].append(
Tree('multimolecule', [Tree('molecule', group_subtree)]))
# order of factors and phases must mirror the order of multimolecules,
# use 'decorate, sort, undecorate' pattern
treedic['1 cleaned_mm_list'], treedic['1 factors'], treedic['1 phases'] = zip(
*sorted(zip(treedic['1 cleaned_mm_list'], treedic['1 factors'], treedic['1 phases'])))
treedic['2 cleaned_mm_list'], treedic['2 factors'], treedic['2 phases'] = zip(
*sorted(zip(treedic['2 cleaned_mm_list'], treedic['2 factors'], treedic['2 phases'])))
# check if expressions are correct without factors
if not _check_equality(treedic['1 cleaned_mm_list'], treedic['2 cleaned_mm_list']):
return False
# phases are ruled by ingore_state flag
if not ignore_state: # phases matters
if treedic['1 phases'] != treedic['2 phases']:
return False
if any(map(lambda x, y: x / y - treedic['1 factors'][0] / treedic['2 factors'][0],
treedic['1 factors'], treedic['2 factors'])):
# factors are not proportional
return False
else:
# return ratio
return Fraction(treedic['1 factors'][0] / treedic['2 factors'][0])
def split_on_arrow(eq):
"""
Split a string on an arrow. Returns left, arrow, right. If there is no arrow, returns the
entire eq in left, and '' in arrow and right.
Return left, arrow, right.
"""
# order matters -- need to try <-> first
for arrow in ARROWS:
left, a, right = eq.partition(arrow)
if a != '':
return left, a, right
return eq, '', ''
def chemical_equations_equal(eq1, eq2, exact=False):
"""
Check whether two chemical equations are the same. (equations have arrows)
If exact is False, then they are considered equal if they differ by a
constant factor.
arrows matter: -> and <-> are different.
e.g.
chemical_equations_equal('H2 + O2 -> H2O2', 'O2 + H2 -> H2O2') -> True
chemical_equations_equal('H2 + O2 -> H2O2', 'O2 + 2H2 -> H2O2') -> False
chemical_equations_equal('H2 + O2 -> H2O2', 'O2 + H2 <-> H2O2') -> False
chemical_equations_equal('H2 + O2 -> H2O2', '2 H2 + 2 O2 -> 2 H2O2') -> True
chemical_equations_equal('H2 + O2 -> H2O2', '2 H2 + 2 O2 -> 2 H2O2', exact=True) -> False
If there's a syntax error, we return False.
"""
left1, arrow1, right1 = split_on_arrow(eq1)
left2, arrow2, right2 = split_on_arrow(eq2)
if arrow1 == '' or arrow2 == '':
return False
# TODO: may want to be able to give student helpful feedback about why things didn't work.
if arrow1 != arrow2:
# arrows don't match
return False
try:
factor_left = divide_chemical_expression(left1, left2)
if not factor_left:
# left sides don't match
return False
factor_right = divide_chemical_expression(right1, right2)
if not factor_right:
# right sides don't match
return False
if factor_left != factor_right:
# factors don't match (molecule counts to add up)
return False
if exact and factor_left != 1:
# want an exact match.
return False
return True
except ParseException:
# Don't want external users to have to deal with parsing exceptions. Just return False.
return False
| agpl-3.0 |
bearstech/ansible | lib/ansible/module_utils/manageiq.py | 6 | 4239 | #
# Copyright (c) 2017, Daniel Korn <[email protected]>
#
# This code is part of Ansible, but is an independent component.
# This particular file snippet, and this file snippet only, is BSD licensed.
# Modules you write using this snippet, which is embedded dynamically by Ansible
# still belong to the author of the module, and may assign their own license
# to the complete work.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import os
try:
from manageiq_client.api import ManageIQClient
HAS_CLIENT = True
except ImportError:
HAS_CLIENT = False
def manageiq_argument_spec():
return dict(
url=dict(default=os.environ.get('MIQ_URL', None)),
username=dict(default=os.environ.get('MIQ_USERNAME', None)),
password=dict(default=os.environ.get('MIQ_PASSWORD', None), no_log=True),
verify_ssl=dict(default=True, type='bool'),
ca_bundle_path=dict(required=False, default=None),
)
def check_client(module):
if not HAS_CLIENT:
module.fail_json(msg='manageiq_client.api is required for this module')
class ManageIQ(object):
"""
class encapsulating ManageIQ API client.
"""
def __init__(self, module):
# handle import errors
check_client(module)
params = module.params['manageiq_connection']
# check for required arguments
for arg in ['url', 'username', 'password']:
if params[arg] in (None, ''):
module.fail_json(msg="missing required argument: manageiq_connection[{}]".format(arg))
url = params['url']
username = params['username']
password = params['password']
verify_ssl = params['verify_ssl']
ca_bundle_path = params['ca_bundle_path']
self._module = module
self._api_url = url + '/api'
self._client = ManageIQClient(self._api_url, (username, password), verify_ssl=verify_ssl, ca_bundle_path=ca_bundle_path)
@property
def module(self):
""" Ansible module module
Returns:
the ansible module
"""
return self._module
@property
def api_url(self):
""" Base ManageIQ API
Returns:
the base ManageIQ API
"""
return self._api_url
@property
def client(self):
""" ManageIQ client
Returns:
the ManageIQ client
"""
return self._client
def find_collection_resource_by(self, collection_name, **params):
""" Searches the collection resource by the collection name and the param passed.
Returns:
the resource as an object if it exists in manageiq, None otherwise.
"""
try:
entity = self.client.collections.__getattribute__(collection_name).get(**params)
except ValueError:
return None
except Exception as e:
self.module.fail_json(msg="failed to find resource {error}".format(error=e))
return vars(entity)
| gpl-3.0 |
MikeLing/shogun | examples/undocumented/python/multiclass_randomforest.py | 6 | 1175 | #!/usr/bin/env python
from numpy import array
traindat = '../data/fm_train_real.dat'
testdat = '../data/fm_test_real.dat'
label_traindat = '../data/label_train_multiclass.dat'
# set both input attributes as not nominal (ie. continuous)
feattypes = array([False, False])
parameter_list = [[traindat,testdat,label_traindat,feattypes]]
def multiclass_randomforest(train=traindat,test=testdat,labels=label_traindat,ft=feattypes):
try:
from shogun import RealFeatures, MulticlassLabels, CSVFile, RandomForest, MajorityVote
except ImportError:
print("Could not import Shogun modules")
return
# wrap features and labels into Shogun objects
feats_train=RealFeatures(CSVFile(train))
feats_test=RealFeatures(CSVFile(test))
train_labels=MulticlassLabels(CSVFile(labels))
# Random Forest formation
rand_forest=RandomForest(feats_train,train_labels,20,1)
rand_forest.set_feature_types(ft)
rand_forest.set_combination_rule(MajorityVote())
rand_forest.train()
# Classify test data
output=rand_forest.apply_multiclass(feats_test).get_labels()
return rand_forest,output
if __name__=='__main__':
print('RandomForest')
multiclass_randomforest(*parameter_list[0])
| gpl-3.0 |
kenwith/cs561 | cs561-as2-kenwith/pox_module/cs561/ofhandler.py | 1 | 5315 | # Copyright 2011 James McCauley
#
# This file is part of POX.
#
# POX is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# POX is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with POX. If not, see <http://www.gnu.org/licenses/>.
"""
This is an L2 learning switch written directly against the OpenFlow library.
It is derived from one written live for an SDN crash course.
"""
from pox.core import core
import pox.openflow.libopenflow_01 as of
from pox.lib.revent import *
from pox.lib.util import dpidToStr
from pox.lib.util import str_to_bool
from pox.lib.packet.ethernet import ethernet
from pox.lib.packet.ipv4 import ipv4
import pox.lib.packet.icmp as icmp
from pox.lib.packet.arp import arp
from pox.lib.packet.udp import udp
from pox.lib.packet.dns import dns
from pox.lib.addresses import IPAddr, EthAddr
import time
import code
import os
import struct
import sys
log = core.getLogger()
FLOOD_DELAY = 5
#default location /home/networks/cs561-as2/IP_CONFIG
IPCONFIG_FILE = './IP_CONFIG'
IP_SETTING={}
RTABLE = []
ROUTER_IP={}
#Topology is fixed
#sw0-eth1:server1-eth0 sw0-eth2:server2-eth0 sw0-eth3:client
class RouterInfo(Event):
'''Event to raise upon the information about an openflow router is ready'''
def __init__(self, info, rtable):
Event.__init__(self)
self.info = info
self.rtable = rtable
class OFHandler (EventMixin):
def __init__ (self, connection, transparent):
# Switch we'll be adding L2 learning switch capabilities to
self.connection = connection
self.transparent = transparent
self.sw_info = {}
self.connection.send(of.ofp_switch_config(miss_send_len = 20000))
# self.connection.send(of.ofp_switch_config(miss_send_len = 65535))
for port in connection.features.ports:
intf_name = port.name.split('-')
if(len(intf_name) < 2):
continue
else:
intf_name = intf_name[1]
if intf_name in ROUTER_IP.keys():
self.sw_info[intf_name] = (ROUTER_IP[intf_name], port.hw_addr.toStr(), '10Gbps', port.port_no)
self.rtable = RTABLE
# We want to hear Openflow PacketIn messages, so we listen
self.listenTo(connection)
self.listenTo(core.cs561_srhandler)
core.cs561_ofhandler.raiseEvent(RouterInfo(self.sw_info, self.rtable))
def _handle_PacketIn (self, event):
"""
Handles packet in messages from the switch to implement above algorithm.
"""
pkt = event.parse()
raw_packet = pkt.raw
core.cs561_ofhandler.raiseEvent(SRPacketIn(raw_packet, event.port))
msg = of.ofp_packet_out()
msg.buffer_id = event.ofp.buffer_id
msg.in_port = event.port
self.connection.send(msg)
def _handle_SRPacketOut(self, event):
msg = of.ofp_packet_out()
new_packet = event.pkt
msg.actions.append(of.ofp_action_output(port=event.port))
msg.buffer_id = -1
msg.in_port = of.OFPP_NONE
msg.data = new_packet
self.connection.send(msg)
class SRPacketIn(Event):
'''Event to raise upon a receive a packet_in from openflow'''
def __init__(self, packet, port):
Event.__init__(self)
self.pkt = packet
self.port = port
class cs561_ofhandler (EventMixin):
"""
Waits for OpenFlow switches to connect and makes them learning switches.
"""
_eventMixin_events = set([SRPacketIn, RouterInfo])
def __init__ (self, transparent):
EventMixin.__init__(self)
self.listenTo(core.openflow)
self.transparent = transparent
def _handle_ConnectionUp (self, event):
log.debug("Connection %s" % (event.connection,))
OFHandler(event.connection, self.transparent)
def get_ip_setting():
if (not os.path.isfile(IPCONFIG_FILE)):
return -1
f = open(IPCONFIG_FILE, 'r')
for line in f:
if(len(line.split()) == 0):
break
name, ip = line.split()
if ip == "<ELASTIC_IP>":
log.info("ip configuration is not set, please put your Elastic IP addresses into %s" % IPCONFIG_FILE)
sys.exit(2)
#print name, ip
IP_SETTING[name] = ip
RTABLE.append( ('%s' % IP_SETTING['client'], '%s' % IP_SETTING['client'], '255.255.255.255', 'eth3') )
RTABLE.append( ('%s' % IP_SETTING['server1'], '%s' % IP_SETTING['server1'], '255.255.255.255', 'eth1') )
RTABLE.append( ('%s' % IP_SETTING['server2'], '%s' % IP_SETTING['server2'], '255.255.255.255', 'eth2') )
ROUTER_IP['eth1'] = '%s' % IP_SETTING['sw0-eth1']
ROUTER_IP['eth2'] = '%s' % IP_SETTING['sw0-eth2']
ROUTER_IP['eth3'] = '%s' % IP_SETTING['sw0-eth3']
return 0
def launch (transparent=False):
"""
Starts an Simple Router Topology
"""
core.registerNew(cs561_ofhandler, str_to_bool(transparent))
r = get_ip_setting()
if r == -1:
log.debug("Couldn't load config file for ip addresses, check whether %s exists" % IPCONFIG_FILE)
sys.exit(2)
else:
log.debug('*** ofhandler: Successfully loaded ip settings for hosts\n %s\n' % IP_SETTING)
| gpl-3.0 |
BigDataforYou/movie_recommendation_workshop_1 | big_data_4_you_demo_1/venv/lib/python2.7/site-packages/pip/commands/__init__.py | 143 | 2145 | """
Package containing all pip commands
"""
from __future__ import absolute_import
from pip.commands.completion import CompletionCommand
from pip.commands.download import DownloadCommand
from pip.commands.freeze import FreezeCommand
from pip.commands.hash import HashCommand
from pip.commands.help import HelpCommand
from pip.commands.list import ListCommand
from pip.commands.search import SearchCommand
from pip.commands.show import ShowCommand
from pip.commands.install import InstallCommand
from pip.commands.uninstall import UninstallCommand
from pip.commands.wheel import WheelCommand
commands_dict = {
CompletionCommand.name: CompletionCommand,
FreezeCommand.name: FreezeCommand,
HashCommand.name: HashCommand,
HelpCommand.name: HelpCommand,
SearchCommand.name: SearchCommand,
ShowCommand.name: ShowCommand,
InstallCommand.name: InstallCommand,
UninstallCommand.name: UninstallCommand,
DownloadCommand.name: DownloadCommand,
ListCommand.name: ListCommand,
WheelCommand.name: WheelCommand,
}
commands_order = [
InstallCommand,
DownloadCommand,
UninstallCommand,
FreezeCommand,
ListCommand,
ShowCommand,
SearchCommand,
WheelCommand,
HashCommand,
CompletionCommand,
HelpCommand,
]
def get_summaries(ordered=True):
"""Yields sorted (command name, command summary) tuples."""
if ordered:
cmditems = _sort_commands(commands_dict, commands_order)
else:
cmditems = commands_dict.items()
for name, command_class in cmditems:
yield (name, command_class.summary)
def get_similar_commands(name):
"""Command name auto-correct."""
from difflib import get_close_matches
name = name.lower()
close_commands = get_close_matches(name, commands_dict.keys())
if close_commands:
return close_commands[0]
else:
return False
def _sort_commands(cmddict, order):
def keyfn(key):
try:
return order.index(key[1])
except ValueError:
# unordered items should come last
return 0xff
return sorted(cmddict.items(), key=keyfn)
| mit |
programadorjc/django | tests/i18n/test_percents.py | 222 | 6847 | # -*- encoding: utf-8 -*-
from __future__ import unicode_literals
import os
from django.template import Context, Template
from django.test import SimpleTestCase, override_settings
from django.utils._os import upath
from django.utils.encoding import force_text
from django.utils.translation import activate, get_language, trans_real
from .test_extraction import ExtractorTests
SAMPLEPROJECT_DIR = os.path.join(os.path.dirname(os.path.abspath(upath(__file__))), 'sampleproject')
SAMPLEPROJECT_LOCALE = os.path.join(SAMPLEPROJECT_DIR, 'locale')
@override_settings(LOCALE_PATHS=[SAMPLEPROJECT_LOCALE])
class FrenchTestCase(SimpleTestCase):
"""Tests using the French translations of the sampleproject."""
PO_FILE = os.path.join(SAMPLEPROJECT_LOCALE, 'fr', 'LC_MESSAGES', 'django.po')
def setUp(self):
self._language = get_language()
self._translations = trans_real._translations
activate('fr')
def tearDown(self):
trans_real._translations = self._translations
activate(self._language)
class ExtractingStringsWithPercentSigns(FrenchTestCase, ExtractorTests):
"""
Tests the extracted string found in the gettext catalog.
Ensures that percent signs are python formatted.
These tests should all have an analogous translation tests below, ensuring
the python formatting does not persist through to a rendered template.
"""
def setUp(self):
super(ExtractingStringsWithPercentSigns, self).setUp()
with open(self.PO_FILE, 'r') as fp:
self.po_contents = force_text(fp.read())
def test_trans_tag_with_percent_symbol_at_the_end(self):
self.assertMsgId('Literal with a percent symbol at the end %%', self.po_contents)
def test_trans_tag_with_percent_symbol_in_the_middle(self):
self.assertMsgId('Literal with a percent %% symbol in the middle', self.po_contents)
self.assertMsgId('It is 100%%', self.po_contents)
def test_trans_tag_with_string_that_look_like_fmt_spec(self):
self.assertMsgId('Looks like a str fmt spec %%s but should not be interpreted as such', self.po_contents)
self.assertMsgId('Looks like a str fmt spec %% o but should not be interpreted as such', self.po_contents)
def test_adds_python_format_to_all_percent_signs(self):
self.assertMsgId('1 percent sign %%, 2 percent signs %%%%, 3 percent signs %%%%%%', self.po_contents)
self.assertMsgId('%(name)s says: 1 percent sign %%, 2 percent signs %%%%', self.po_contents)
class RenderingTemplatesWithPercentSigns(FrenchTestCase):
"""
Test rendering of templates that use percent signs.
Ensures both trans and blocktrans tags behave consistently.
Refs #11240, #11966, #24257
"""
def test_translates_with_a_percent_symbol_at_the_end(self):
expected = 'Littérale avec un symbole de pour cent à la fin %'
trans_tpl = Template('{% load i18n %}{% trans "Literal with a percent symbol at the end %" %}')
self.assertEqual(trans_tpl.render(Context({})), expected)
block_tpl = Template(
'{% load i18n %}{% blocktrans %}Literal with a percent symbol at '
'the end %{% endblocktrans %}'
)
self.assertEqual(block_tpl.render(Context({})), expected)
def test_translates_with_percent_symbol_in_the_middle(self):
expected = 'Pour cent littérale % avec un symbole au milieu'
trans_tpl = Template('{% load i18n %}{% trans "Literal with a percent % symbol in the middle" %}')
self.assertEqual(trans_tpl.render(Context({})), expected)
block_tpl = Template(
'{% load i18n %}{% blocktrans %}Literal with a percent % symbol '
'in the middle{% endblocktrans %}'
)
self.assertEqual(block_tpl.render(Context({})), expected)
def test_translates_with_percent_symbol_using_context(self):
trans_tpl = Template('{% load i18n %}{% trans "It is 100%" %}')
self.assertEqual(trans_tpl.render(Context({})), 'Il est de 100%')
trans_tpl = Template('{% load i18n %}{% trans "It is 100%" context "female" %}')
self.assertEqual(trans_tpl.render(Context({})), 'Elle est de 100%')
block_tpl = Template('{% load i18n %}{% blocktrans %}It is 100%{% endblocktrans %}')
self.assertEqual(block_tpl.render(Context({})), 'Il est de 100%')
block_tpl = Template('{% load i18n %}{% blocktrans context "female" %}It is 100%{% endblocktrans %}')
self.assertEqual(block_tpl.render(Context({})), 'Elle est de 100%')
def test_translates_with_string_that_look_like_fmt_spec_with_trans(self):
# tests "%s"
expected = ('On dirait un spec str fmt %s mais ne devrait pas être interprété comme plus disponible')
trans_tpl = Template(
'{% load i18n %}{% trans "Looks like a str fmt spec %s but '
'should not be interpreted as such" %}'
)
self.assertEqual(trans_tpl.render(Context({})), expected)
block_tpl = Template(
'{% load i18n %}{% blocktrans %}Looks like a str fmt spec %s but '
'should not be interpreted as such{% endblocktrans %}'
)
self.assertEqual(block_tpl.render(Context({})), expected)
# tests "% o"
expected = ('On dirait un spec str fmt % o mais ne devrait pas être interprété comme plus disponible')
trans_tpl = Template(
'{% load i18n %}{% trans "Looks like a str fmt spec % o but should not be '
'interpreted as such" %}'
)
self.assertEqual(trans_tpl.render(Context({})), expected)
block_tpl = Template(
'{% load i18n %}{% blocktrans %}Looks like a str fmt spec % o but should not be '
'interpreted as such{% endblocktrans %}'
)
self.assertEqual(block_tpl.render(Context({})), expected)
def test_translates_multiple_percent_signs(self):
expected = ('1 % signe pour cent, signes %% 2 pour cent, trois signes de pourcentage %%%')
trans_tpl = Template(
'{% load i18n %}{% trans "1 percent sign %, 2 percent signs %%, '
'3 percent signs %%%" %}'
)
self.assertEqual(trans_tpl.render(Context({})), expected)
block_tpl = Template(
'{% load i18n %}{% blocktrans %}1 percent sign %, 2 percent signs '
'%%, 3 percent signs %%%{% endblocktrans %}'
)
self.assertEqual(block_tpl.render(Context({})), expected)
block_tpl = Template(
'{% load i18n %}{% blocktrans %}{{name}} says: 1 percent sign %, '
'2 percent signs %%{% endblocktrans %}'
)
self.assertEqual(
block_tpl.render(Context({"name": "Django"})),
'Django dit: 1 pour cent signe %, deux signes de pourcentage %%'
)
| bsd-3-clause |
suiyuan2009/tensorflow | tensorflow/contrib/framework/python/ops/ops.py | 94 | 2599 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Classes and functions used to construct graphs."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import ops
__all__ = ['get_graph_from_inputs',
'get_name_scope']
def get_graph_from_inputs(op_input_list, graph=None):
"""Returns the appropriate graph to use for the given inputs.
1. If `graph` is provided, we validate that all inputs in `op_input_list` are
from the same graph.
2. Otherwise, we attempt to select a graph from the first Operation- or
Tensor-valued input in `op_input_list`, and validate that all other
such inputs are in the same graph.
3. If the graph was not specified and it could not be inferred from
`op_input_list`, we attempt to use the default graph.
Args:
op_input_list: A list of inputs to an operation, which may include `Tensor`,
`Operation`, and other objects that may be converted to a graph element.
graph: (Optional) The explicit graph to use.
Raises:
TypeError: If `op_input_list` is not a list or tuple, or if graph is not a
Graph.
ValueError: If a graph is explicitly passed and not all inputs are from it,
or if the inputs are from multiple graphs, or we could not find a graph
and there was no default graph.
Returns:
The appropriate graph to use for the given inputs.
"""
# pylint: disable=protected-access
return ops._get_graph_from_inputs(op_input_list, graph)
def get_name_scope():
"""Returns the current name scope of the default graph.
For example:
```python
with tf.name_scope('scope1'):
with tf.name_scope('scope2'):
print(tf.contrib.framework.get_name_scope())
```
would print the string `scope1/scope2`.
Returns:
A string representing the current name scope.
"""
return ops.get_default_graph().get_name_scope()
| apache-2.0 |
dstufft/sqlalchemy | examples/postgis/__init__.py | 30 | 1159 | """A naive example illustrating techniques to help
embed PostGIS functionality.
This example was originally developed in the hopes that it would be
extrapolated into a comprehensive PostGIS integration layer. We are
pleased to announce that this has come to fruition as `GeoAlchemy
<http://www.geoalchemy.org/>`_.
The example illustrates:
* a DDL extension which allows CREATE/DROP to work in
conjunction with AddGeometryColumn/DropGeometryColumn
* a Geometry type, as well as a few subtypes, which
convert result row values to a GIS-aware object,
and also integrates with the DDL extension.
* a GIS-aware object which stores a raw geometry value
and provides a factory for functions such as AsText().
* an ORM comparator which can override standard column
methods on mapped objects to produce GIS operators.
* an attribute event listener that intercepts strings
and converts to GeomFromText().
* a standalone operator example.
The implementation is limited to only public, well known
and simple to use extension points.
E.g.::
print session.query(Road).filter(Road.road_geom.intersects(r1.road_geom)).all()
.. autosource::
"""
| mit |
tastynoodle/django | docs/_ext/literals_to_xrefs.py | 92 | 4869 | """
Runs through a reST file looking for old-style literals, and helps replace them
with new-style references.
"""
import re
import sys
import shelve
refre = re.compile(r'``([^`\s]+?)``')
ROLES = (
'attr',
'class',
"djadmin",
'data',
'exc',
'file',
'func',
'lookup',
'meth',
'mod' ,
"djadminopt",
"ref",
"setting",
"term",
"tfilter",
"ttag",
# special
"skip"
)
ALWAYS_SKIP = [
"NULL",
"True",
"False",
]
def fixliterals(fname):
with open(fname) as fp:
data = fp.read()
last = 0
new = []
storage = shelve.open("/tmp/literals_to_xref.shelve")
lastvalues = storage.get("lastvalues", {})
for m in refre.finditer(data):
new.append(data[last:m.start()])
last = m.end()
line_start = data.rfind("\n", 0, m.start())
line_end = data.find("\n", m.end())
prev_start = data.rfind("\n", 0, line_start)
next_end = data.find("\n", line_end + 1)
# Skip always-skip stuff
if m.group(1) in ALWAYS_SKIP:
new.append(m.group(0))
continue
# skip when the next line is a title
next_line = data[m.end():next_end].strip()
if next_line[0] in "!-/:-@[-`{-~" and all(c == next_line[0] for c in next_line):
new.append(m.group(0))
continue
sys.stdout.write("\n"+"-"*80+"\n")
sys.stdout.write(data[prev_start+1:m.start()])
sys.stdout.write(colorize(m.group(0), fg="red"))
sys.stdout.write(data[m.end():next_end])
sys.stdout.write("\n\n")
replace_type = None
while replace_type is None:
replace_type = raw_input(
colorize("Replace role: ", fg="yellow")
).strip().lower()
if replace_type and replace_type not in ROLES:
replace_type = None
if replace_type == "":
new.append(m.group(0))
continue
if replace_type == "skip":
new.append(m.group(0))
ALWAYS_SKIP.append(m.group(1))
continue
default = lastvalues.get(m.group(1), m.group(1))
if default.endswith("()") and replace_type in ("class", "func", "meth"):
default = default[:-2]
replace_value = raw_input(
colorize("Text <target> [", fg="yellow") + default + colorize("]: ", fg="yellow")
).strip()
if not replace_value:
replace_value = default
new.append(":%s:`%s`" % (replace_type, replace_value))
lastvalues[m.group(1)] = replace_value
new.append(data[last:])
with open(fname, "w") as fp:
fp.write("".join(new))
storage["lastvalues"] = lastvalues
storage.close()
#
# The following is taken from django.utils.termcolors and is copied here to
# avoid the dependency.
#
def colorize(text='', opts=(), **kwargs):
"""
Returns your text, enclosed in ANSI graphics codes.
Depends on the keyword arguments 'fg' and 'bg', and the contents of
the opts tuple/list.
Returns the RESET code if no parameters are given.
Valid colors:
'black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white'
Valid options:
'bold'
'underscore'
'blink'
'reverse'
'conceal'
'noreset' - string will not be auto-terminated with the RESET code
Examples:
colorize('hello', fg='red', bg='blue', opts=('blink',))
colorize()
colorize('goodbye', opts=('underscore',))
print(colorize('first line', fg='red', opts=('noreset',)))
print('this should be red too')
print(colorize('and so should this'))
print('this should not be red')
"""
color_names = ('black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan', 'white')
foreground = dict([(color_names[x], '3%s' % x) for x in range(8)])
background = dict([(color_names[x], '4%s' % x) for x in range(8)])
RESET = '0'
opt_dict = {'bold': '1', 'underscore': '4', 'blink': '5', 'reverse': '7', 'conceal': '8'}
text = str(text)
code_list = []
if text == '' and len(opts) == 1 and opts[0] == 'reset':
return '\x1b[%sm' % RESET
for k, v in kwargs.iteritems():
if k == 'fg':
code_list.append(foreground[v])
elif k == 'bg':
code_list.append(background[v])
for o in opts:
if o in opt_dict:
code_list.append(opt_dict[o])
if 'noreset' not in opts:
text = text + '\x1b[%sm' % RESET
return ('\x1b[%sm' % ';'.join(code_list)) + text
if __name__ == '__main__':
try:
fixliterals(sys.argv[1])
except (KeyboardInterrupt, SystemExit):
print('')
| bsd-3-clause |
meirwah/st2contrib | packs/puppet/actions/lib/puppet_client.py | 15 | 4657 | import json
import copy
import httplib
import requests
__all__ = [
'PuppetHTTPAPIClient'
]
class PuppetHTTPAPIClient(object):
BASE_GET_HEADERS = {
'Accept': 'text/pson'
}
BASE_POST_HEADERS = {
'Content-Type': 'text/pson'
}
def __init__(self, master_hostname, master_port, client_cert_path,
client_cert_key_path, ca_cert_path=None):
"""
:param master_hostname: Puppet master hostname or IP address.
:type master_hostname: ``str``
:param master_port: Puppet master port.
:type master_port: ``int``
:param client_cert_path: Path to the client certificate which is used
for authentication.
:type client_cert_path: ``str``
:param client_cert_key_path: Path to the private key for the client
certificate.
:type client_cert_key_path: ``str``
:param ca_cert_path: Path to the CA certificate file. Note: If path to
CA certificate file is not specified, no cert
verification is performed.
:type ca_cert_path: ``str``
"""
self._master_hostname = master_hostname
self._master_port = master_port
self._client_cert_path = client_cert_path
self._client_cert_key_path = client_cert_key_path
self._ca_cert_path = ca_cert_path
self._base_url = 'https://%s:%s' % (self._master_hostname,
self._master_port)
def cert_sign(self, environment, host):
"""
Sign a certificate.
:param environment: Environment to operate on.
:type environment: ``str``
:param host: Host to sign the certificate for.
:type host: ``str``
:rtype: ``bool``
"""
path = '/%s/certificate_status/%s/' % (environment, host)
method = 'PUT'
payload = {'desired_state': 'signed'}
response = self._request(path=path, method=method, payload=payload)
return response.status_code in [httplib.OK]
def cert_revoke(self, environment, host):
"""
Revoke a certificate.
:param environment: Environment to operate on.
:type environment: ``str``
:param host: Host to revoke the certificate for.
:type host: ``str``
"""
path = '/%s/certificate_status/%s/' % (environment, host)
method = 'PUT'
payload = {'desired_state': 'revoked'}
response = self._request(path=path, method=method, payload=payload)
return response.status_code in [httplib.OK]
def cert_clean(self, environment, host):
"""
Clean a certificate.
:param environment: Environment to operate on.
:type environment: ``str``
:param host: Host to clean the certificate for.
:type host: ``str``
"""
status1 = self.cert_revoke(environment=environment, host=host)
status2 = self.cert_discard_info(environment=environment, host=host)
return status1 and status2
def cert_discard_info(self, environment, host):
"""
Cause the certificate authority to discard all SSL information
regarding a host (including any certificates, certificate requests,
and keys). This does not revoke the certificate if one is present.
:param environment: Environment to operate on.
:type environment: ``str``
:param host: Host to discard the certificate info for.
:type host: ``str``
"""
path = '/%s/certificate_status/%s/' % (environment, host)
method = 'DELETE'
response = self._request(path=path, method=method)
return response.status_code in [httplib.OK]
def _request(self, path, method='GET', headers=None, payload=None):
url = self._base_url + path
request_headers = copy.deepcopy(self.BASE_GET_HEADERS)
if method.upper in ['POST', 'PUT']:
request_headers.update(self.BASE_POST_HEADERS)
if headers:
request_headers.update(headers)
if payload:
data = json.dumps(payload)
else:
data = None
cert = (self._client_cert_path, self._client_cert_key_path)
if self._ca_cert_path:
verify = self._ca_cert_path
else:
verify = False
response = requests.request(url=url, method=method,
headers=request_headers, data=data,
cert=cert, verify=verify)
return response
| apache-2.0 |
mattt416/neutron | neutron/extensions/providernet.py | 29 | 3511 | # Copyright (c) 2012 OpenStack Foundation.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron.api import extensions
from neutron.api.v2 import attributes
from neutron.common import exceptions as n_exc
NETWORK_TYPE = 'provider:network_type'
PHYSICAL_NETWORK = 'provider:physical_network'
SEGMENTATION_ID = 'provider:segmentation_id'
ATTRIBUTES = (NETWORK_TYPE, PHYSICAL_NETWORK, SEGMENTATION_ID)
# Common definitions for maximum string field length
NETWORK_TYPE_MAX_LEN = 32
PHYSICAL_NETWORK_MAX_LEN = 64
EXTENDED_ATTRIBUTES_2_0 = {
'networks': {
NETWORK_TYPE: {'allow_post': True, 'allow_put': True,
'validate': {'type:string': NETWORK_TYPE_MAX_LEN},
'default': attributes.ATTR_NOT_SPECIFIED,
'enforce_policy': True,
'is_visible': True},
PHYSICAL_NETWORK: {'allow_post': True, 'allow_put': True,
'validate': {'type:string':
PHYSICAL_NETWORK_MAX_LEN},
'default': attributes.ATTR_NOT_SPECIFIED,
'enforce_policy': True,
'is_visible': True},
SEGMENTATION_ID: {'allow_post': True, 'allow_put': True,
'convert_to': attributes.convert_to_int,
'enforce_policy': True,
'default': attributes.ATTR_NOT_SPECIFIED,
'is_visible': True},
}
}
def _raise_if_updates_provider_attributes(attrs):
"""Raise exception if provider attributes are present.
This method is used for plugins that do not support
updating provider networks.
"""
if any(attributes.is_attr_set(attrs.get(a)) for a in ATTRIBUTES):
msg = _("Plugin does not support updating provider attributes")
raise n_exc.InvalidInput(error_message=msg)
class Providernet(extensions.ExtensionDescriptor):
"""Extension class supporting provider networks.
This class is used by neutron's extension framework to make
metadata about the provider network extension available to
clients. No new resources are defined by this extension. Instead,
the existing network resource's request and response messages are
extended with attributes in the provider namespace.
With admin rights, network dictionaries returned will also include
provider attributes.
"""
@classmethod
def get_name(cls):
return "Provider Network"
@classmethod
def get_alias(cls):
return "provider"
@classmethod
def get_description(cls):
return "Expose mapping of virtual networks to physical networks"
@classmethod
def get_updated(cls):
return "2012-09-07T10:00:00-00:00"
def get_extended_resources(self, version):
if version == "2.0":
return EXTENDED_ATTRIBUTES_2_0
else:
return {}
| apache-2.0 |
xiaoxiamii/scikit-learn | sklearn/linear_model/base.py | 66 | 16933 | """
Generalized Linear models.
"""
# Author: Alexandre Gramfort <[email protected]>
# Fabian Pedregosa <[email protected]>
# Olivier Grisel <[email protected]>
# Vincent Michel <[email protected]>
# Peter Prettenhofer <[email protected]>
# Mathieu Blondel <[email protected]>
# Lars Buitinck <[email protected]>
#
# License: BSD 3 clause
from __future__ import division
from abc import ABCMeta, abstractmethod
import numbers
import warnings
import numpy as np
import scipy.sparse as sp
from scipy import linalg
from scipy import sparse
from ..externals import six
from ..externals.joblib import Parallel, delayed
from ..base import BaseEstimator, ClassifierMixin, RegressorMixin
from ..utils import as_float_array, check_array, check_X_y, deprecated
from ..utils import check_random_state, column_or_1d
from ..utils.extmath import safe_sparse_dot
from ..utils.sparsefuncs import mean_variance_axis, inplace_column_scale
from ..utils.fixes import sparse_lsqr
from ..utils.validation import NotFittedError, check_is_fitted
from ..utils.seq_dataset import ArrayDataset, CSRDataset
###
### TODO: intercept for all models
### We should define a common function to center data instead of
### repeating the same code inside each fit method.
### TODO: bayesian_ridge_regression and bayesian_regression_ard
### should be squashed into its respective objects.
SPARSE_INTERCEPT_DECAY = 0.01
# For sparse data intercept updates are scaled by this decay factor to avoid
# intercept oscillation.
def make_dataset(X, y, sample_weight, random_state=None):
"""Create ``Dataset`` abstraction for sparse and dense inputs.
This also returns the ``intercept_decay`` which is different
for sparse datasets.
"""
rng = check_random_state(random_state)
# seed should never be 0 in SequentialDataset
seed = rng.randint(1, np.iinfo(np.int32).max)
if sp.issparse(X):
dataset = CSRDataset(X.data, X.indptr, X.indices,
y, sample_weight, seed=seed)
intercept_decay = SPARSE_INTERCEPT_DECAY
else:
dataset = ArrayDataset(X, y, sample_weight, seed=seed)
intercept_decay = 1.0
return dataset, intercept_decay
def sparse_center_data(X, y, fit_intercept, normalize=False):
"""
Compute information needed to center data to have mean zero along
axis 0. Be aware that X will not be centered since it would break
the sparsity, but will be normalized if asked so.
"""
if fit_intercept:
# we might require not to change the csr matrix sometimes
# store a copy if normalize is True.
# Change dtype to float64 since mean_variance_axis accepts
# it that way.
if sp.isspmatrix(X) and X.getformat() == 'csr':
X = sp.csr_matrix(X, copy=normalize, dtype=np.float64)
else:
X = sp.csc_matrix(X, copy=normalize, dtype=np.float64)
X_mean, X_var = mean_variance_axis(X, axis=0)
if normalize:
# transform variance to std in-place
# XXX: currently scaled to variance=n_samples to match center_data
X_var *= X.shape[0]
X_std = np.sqrt(X_var, X_var)
del X_var
X_std[X_std == 0] = 1
inplace_column_scale(X, 1. / X_std)
else:
X_std = np.ones(X.shape[1])
y_mean = y.mean(axis=0)
y = y - y_mean
else:
X_mean = np.zeros(X.shape[1])
X_std = np.ones(X.shape[1])
y_mean = 0. if y.ndim == 1 else np.zeros(y.shape[1], dtype=X.dtype)
return X, y, X_mean, y_mean, X_std
def center_data(X, y, fit_intercept, normalize=False, copy=True,
sample_weight=None):
"""
Centers data to have mean zero along axis 0. This is here because
nearly all linear models will want their data to be centered.
If sample_weight is not None, then the weighted mean of X and y
is zero, and not the mean itself
"""
X = as_float_array(X, copy)
if fit_intercept:
if isinstance(sample_weight, numbers.Number):
sample_weight = None
if sp.issparse(X):
X_mean = np.zeros(X.shape[1])
X_std = np.ones(X.shape[1])
else:
X_mean = np.average(X, axis=0, weights=sample_weight)
X -= X_mean
if normalize:
# XXX: currently scaled to variance=n_samples
X_std = np.sqrt(np.sum(X ** 2, axis=0))
X_std[X_std == 0] = 1
X /= X_std
else:
X_std = np.ones(X.shape[1])
y_mean = np.average(y, axis=0, weights=sample_weight)
y = y - y_mean
else:
X_mean = np.zeros(X.shape[1])
X_std = np.ones(X.shape[1])
y_mean = 0. if y.ndim == 1 else np.zeros(y.shape[1], dtype=X.dtype)
return X, y, X_mean, y_mean, X_std
def _rescale_data(X, y, sample_weight):
"""Rescale data so as to support sample_weight"""
n_samples = X.shape[0]
sample_weight = sample_weight * np.ones(n_samples)
sample_weight = np.sqrt(sample_weight)
sw_matrix = sparse.dia_matrix((sample_weight, 0),
shape=(n_samples, n_samples))
X = safe_sparse_dot(sw_matrix, X)
y = safe_sparse_dot(sw_matrix, y)
return X, y
class LinearModel(six.with_metaclass(ABCMeta, BaseEstimator)):
"""Base class for Linear Models"""
@abstractmethod
def fit(self, X, y):
"""Fit model."""
@deprecated(" and will be removed in 0.19.")
def decision_function(self, X):
"""Decision function of the linear model.
Parameters
----------
X : {array-like, sparse matrix}, shape = (n_samples, n_features)
Samples.
Returns
-------
C : array, shape = (n_samples,)
Returns predicted values.
"""
return self._decision_function(X)
def _decision_function(self, X):
check_is_fitted(self, "coef_")
X = check_array(X, accept_sparse=['csr', 'csc', 'coo'])
return safe_sparse_dot(X, self.coef_.T,
dense_output=True) + self.intercept_
def predict(self, X):
"""Predict using the linear model
Parameters
----------
X : {array-like, sparse matrix}, shape = (n_samples, n_features)
Samples.
Returns
-------
C : array, shape = (n_samples,)
Returns predicted values.
"""
return self._decision_function(X)
_center_data = staticmethod(center_data)
def _set_intercept(self, X_mean, y_mean, X_std):
"""Set the intercept_
"""
if self.fit_intercept:
self.coef_ = self.coef_ / X_std
self.intercept_ = y_mean - np.dot(X_mean, self.coef_.T)
else:
self.intercept_ = 0.
# XXX Should this derive from LinearModel? It should be a mixin, not an ABC.
# Maybe the n_features checking can be moved to LinearModel.
class LinearClassifierMixin(ClassifierMixin):
"""Mixin for linear classifiers.
Handles prediction for sparse and dense X.
"""
def decision_function(self, X):
"""Predict confidence scores for samples.
The confidence score for a sample is the signed distance of that
sample to the hyperplane.
Parameters
----------
X : {array-like, sparse matrix}, shape = (n_samples, n_features)
Samples.
Returns
-------
array, shape=(n_samples,) if n_classes == 2 else (n_samples, n_classes)
Confidence scores per (sample, class) combination. In the binary
case, confidence score for self.classes_[1] where >0 means this
class would be predicted.
"""
if not hasattr(self, 'coef_') or self.coef_ is None:
raise NotFittedError("This %(name)s instance is not fitted "
"yet" % {'name': type(self).__name__})
X = check_array(X, accept_sparse='csr')
n_features = self.coef_.shape[1]
if X.shape[1] != n_features:
raise ValueError("X has %d features per sample; expecting %d"
% (X.shape[1], n_features))
scores = safe_sparse_dot(X, self.coef_.T,
dense_output=True) + self.intercept_
return scores.ravel() if scores.shape[1] == 1 else scores
def predict(self, X):
"""Predict class labels for samples in X.
Parameters
----------
X : {array-like, sparse matrix}, shape = [n_samples, n_features]
Samples.
Returns
-------
C : array, shape = [n_samples]
Predicted class label per sample.
"""
scores = self.decision_function(X)
if len(scores.shape) == 1:
indices = (scores > 0).astype(np.int)
else:
indices = scores.argmax(axis=1)
return self.classes_[indices]
def _predict_proba_lr(self, X):
"""Probability estimation for OvR logistic regression.
Positive class probabilities are computed as
1. / (1. + np.exp(-self.decision_function(X)));
multiclass is handled by normalizing that over all classes.
"""
prob = self.decision_function(X)
prob *= -1
np.exp(prob, prob)
prob += 1
np.reciprocal(prob, prob)
if prob.ndim == 1:
return np.vstack([1 - prob, prob]).T
else:
# OvR normalization, like LibLinear's predict_probability
prob /= prob.sum(axis=1).reshape((prob.shape[0], -1))
return prob
class SparseCoefMixin(object):
"""Mixin for converting coef_ to and from CSR format.
L1-regularizing estimators should inherit this.
"""
def densify(self):
"""Convert coefficient matrix to dense array format.
Converts the ``coef_`` member (back) to a numpy.ndarray. This is the
default format of ``coef_`` and is required for fitting, so calling
this method is only required on models that have previously been
sparsified; otherwise, it is a no-op.
Returns
-------
self: estimator
"""
msg = "Estimator, %(name)s, must be fitted before densifying."
check_is_fitted(self, "coef_", msg=msg)
if sp.issparse(self.coef_):
self.coef_ = self.coef_.toarray()
return self
def sparsify(self):
"""Convert coefficient matrix to sparse format.
Converts the ``coef_`` member to a scipy.sparse matrix, which for
L1-regularized models can be much more memory- and storage-efficient
than the usual numpy.ndarray representation.
The ``intercept_`` member is not converted.
Notes
-----
For non-sparse models, i.e. when there are not many zeros in ``coef_``,
this may actually *increase* memory usage, so use this method with
care. A rule of thumb is that the number of zero elements, which can
be computed with ``(coef_ == 0).sum()``, must be more than 50% for this
to provide significant benefits.
After calling this method, further fitting with the partial_fit
method (if any) will not work until you call densify.
Returns
-------
self: estimator
"""
msg = "Estimator, %(name)s, must be fitted before sparsifying."
check_is_fitted(self, "coef_", msg=msg)
self.coef_ = sp.csr_matrix(self.coef_)
return self
class LinearRegression(LinearModel, RegressorMixin):
"""
Ordinary least squares Linear Regression.
Parameters
----------
fit_intercept : boolean, optional
whether to calculate the intercept for this model. If set
to false, no intercept will be used in calculations
(e.g. data is expected to be already centered).
normalize : boolean, optional, default False
If True, the regressors X will be normalized before regression.
copy_X : boolean, optional, default True
If True, X will be copied; else, it may be overwritten.
n_jobs : int, optional, default 1
The number of jobs to use for the computation.
If -1 all CPUs are used. This will only provide speedup for
n_targets > 1 and sufficient large problems.
Attributes
----------
coef_ : array, shape (n_features, ) or (n_targets, n_features)
Estimated coefficients for the linear regression problem.
If multiple targets are passed during the fit (y 2D), this
is a 2D array of shape (n_targets, n_features), while if only
one target is passed, this is a 1D array of length n_features.
intercept_ : array
Independent term in the linear model.
Notes
-----
From the implementation point of view, this is just plain Ordinary
Least Squares (scipy.linalg.lstsq) wrapped as a predictor object.
"""
def __init__(self, fit_intercept=True, normalize=False, copy_X=True,
n_jobs=1):
self.fit_intercept = fit_intercept
self.normalize = normalize
self.copy_X = copy_X
self.n_jobs = n_jobs
def fit(self, X, y, sample_weight=None):
"""
Fit linear model.
Parameters
----------
X : numpy array or sparse matrix of shape [n_samples,n_features]
Training data
y : numpy array of shape [n_samples, n_targets]
Target values
sample_weight : numpy array of shape [n_samples]
Individual weights for each sample
Returns
-------
self : returns an instance of self.
"""
n_jobs_ = self.n_jobs
X, y = check_X_y(X, y, accept_sparse=['csr', 'csc', 'coo'],
y_numeric=True, multi_output=True)
if ((sample_weight is not None) and np.atleast_1d(sample_weight).ndim > 1):
sample_weight = column_or_1d(sample_weight, warn=True)
X, y, X_mean, y_mean, X_std = self._center_data(
X, y, self.fit_intercept, self.normalize, self.copy_X,
sample_weight=sample_weight)
if sample_weight is not None:
# Sample weight can be implemented via a simple rescaling.
X, y = _rescale_data(X, y, sample_weight)
if sp.issparse(X):
if y.ndim < 2:
out = sparse_lsqr(X, y)
self.coef_ = out[0]
self.residues_ = out[3]
else:
# sparse_lstsq cannot handle y with shape (M, K)
outs = Parallel(n_jobs=n_jobs_)(
delayed(sparse_lsqr)(X, y[:, j].ravel())
for j in range(y.shape[1]))
self.coef_ = np.vstack(out[0] for out in outs)
self.residues_ = np.vstack(out[3] for out in outs)
else:
self.coef_, self.residues_, self.rank_, self.singular_ = \
linalg.lstsq(X, y)
self.coef_ = self.coef_.T
if y.ndim == 1:
self.coef_ = np.ravel(self.coef_)
self._set_intercept(X_mean, y_mean, X_std)
return self
def _pre_fit(X, y, Xy, precompute, normalize, fit_intercept, copy,
Xy_precompute_order=None):
"""Aux function used at beginning of fit in linear models"""
n_samples, n_features = X.shape
if sparse.isspmatrix(X):
precompute = False
X, y, X_mean, y_mean, X_std = sparse_center_data(
X, y, fit_intercept, normalize)
else:
# copy was done in fit if necessary
X, y, X_mean, y_mean, X_std = center_data(
X, y, fit_intercept, normalize, copy=copy)
if hasattr(precompute, '__array__') and (
fit_intercept and not np.allclose(X_mean, np.zeros(n_features))
or normalize and not np.allclose(X_std, np.ones(n_features))):
warnings.warn("Gram matrix was provided but X was centered"
" to fit intercept, "
"or X was normalized : recomputing Gram matrix.",
UserWarning)
# recompute Gram
precompute = 'auto'
Xy = None
# precompute if n_samples > n_features
if precompute == 'auto':
precompute = (n_samples > n_features)
if precompute is True:
precompute = np.dot(X.T, X)
if Xy_precompute_order == 'F':
precompute = np.dot(X.T, X).T
if not hasattr(precompute, '__array__'):
Xy = None # cannot use Xy if precompute is not Gram
if hasattr(precompute, '__array__') and Xy is None:
if Xy_precompute_order == 'F':
Xy = np.dot(y.T, X).T
else:
Xy = np.dot(X.T, y)
return X, y, X_mean, y_mean, X_std, precompute, Xy
| bsd-3-clause |
gauribhoite/personfinder | app/pytz/zoneinfo/America/Juneau.py | 9 | 6439 | '''tzinfo timezone information for America/Juneau.'''
from pytz.tzinfo import DstTzInfo
from pytz.tzinfo import memorized_datetime as d
from pytz.tzinfo import memorized_ttinfo as i
class Juneau(DstTzInfo):
'''America/Juneau timezone definition. See datetime.tzinfo for details'''
zone = 'America/Juneau'
_utc_transition_times = [
d(1,1,1,0,0,0),
d(1942,2,9,10,0,0),
d(1945,8,14,23,0,0),
d(1945,9,30,9,0,0),
d(1969,4,27,10,0,0),
d(1969,10,26,9,0,0),
d(1970,4,26,10,0,0),
d(1970,10,25,9,0,0),
d(1971,4,25,10,0,0),
d(1971,10,31,9,0,0),
d(1972,4,30,10,0,0),
d(1972,10,29,9,0,0),
d(1973,4,29,10,0,0),
d(1973,10,28,9,0,0),
d(1974,1,6,10,0,0),
d(1974,10,27,9,0,0),
d(1975,2,23,10,0,0),
d(1975,10,26,9,0,0),
d(1976,4,25,10,0,0),
d(1976,10,31,9,0,0),
d(1977,4,24,10,0,0),
d(1977,10,30,9,0,0),
d(1978,4,30,10,0,0),
d(1978,10,29,9,0,0),
d(1979,4,29,10,0,0),
d(1979,10,28,9,0,0),
d(1980,4,27,10,0,0),
d(1980,10,26,9,0,0),
d(1981,4,26,10,0,0),
d(1981,10,25,9,0,0),
d(1982,4,25,10,0,0),
d(1982,10,31,9,0,0),
d(1983,4,24,10,0,0),
d(1983,10,30,9,0,0),
d(1983,11,30,9,0,0),
d(1984,4,29,11,0,0),
d(1984,10,28,10,0,0),
d(1985,4,28,11,0,0),
d(1985,10,27,10,0,0),
d(1986,4,27,11,0,0),
d(1986,10,26,10,0,0),
d(1987,4,5,11,0,0),
d(1987,10,25,10,0,0),
d(1988,4,3,11,0,0),
d(1988,10,30,10,0,0),
d(1989,4,2,11,0,0),
d(1989,10,29,10,0,0),
d(1990,4,1,11,0,0),
d(1990,10,28,10,0,0),
d(1991,4,7,11,0,0),
d(1991,10,27,10,0,0),
d(1992,4,5,11,0,0),
d(1992,10,25,10,0,0),
d(1993,4,4,11,0,0),
d(1993,10,31,10,0,0),
d(1994,4,3,11,0,0),
d(1994,10,30,10,0,0),
d(1995,4,2,11,0,0),
d(1995,10,29,10,0,0),
d(1996,4,7,11,0,0),
d(1996,10,27,10,0,0),
d(1997,4,6,11,0,0),
d(1997,10,26,10,0,0),
d(1998,4,5,11,0,0),
d(1998,10,25,10,0,0),
d(1999,4,4,11,0,0),
d(1999,10,31,10,0,0),
d(2000,4,2,11,0,0),
d(2000,10,29,10,0,0),
d(2001,4,1,11,0,0),
d(2001,10,28,10,0,0),
d(2002,4,7,11,0,0),
d(2002,10,27,10,0,0),
d(2003,4,6,11,0,0),
d(2003,10,26,10,0,0),
d(2004,4,4,11,0,0),
d(2004,10,31,10,0,0),
d(2005,4,3,11,0,0),
d(2005,10,30,10,0,0),
d(2006,4,2,11,0,0),
d(2006,10,29,10,0,0),
d(2007,3,11,11,0,0),
d(2007,11,4,10,0,0),
d(2008,3,9,11,0,0),
d(2008,11,2,10,0,0),
d(2009,3,8,11,0,0),
d(2009,11,1,10,0,0),
d(2010,3,14,11,0,0),
d(2010,11,7,10,0,0),
d(2011,3,13,11,0,0),
d(2011,11,6,10,0,0),
d(2012,3,11,11,0,0),
d(2012,11,4,10,0,0),
d(2013,3,10,11,0,0),
d(2013,11,3,10,0,0),
d(2014,3,9,11,0,0),
d(2014,11,2,10,0,0),
d(2015,3,8,11,0,0),
d(2015,11,1,10,0,0),
d(2016,3,13,11,0,0),
d(2016,11,6,10,0,0),
d(2017,3,12,11,0,0),
d(2017,11,5,10,0,0),
d(2018,3,11,11,0,0),
d(2018,11,4,10,0,0),
d(2019,3,10,11,0,0),
d(2019,11,3,10,0,0),
d(2020,3,8,11,0,0),
d(2020,11,1,10,0,0),
d(2021,3,14,11,0,0),
d(2021,11,7,10,0,0),
d(2022,3,13,11,0,0),
d(2022,11,6,10,0,0),
d(2023,3,12,11,0,0),
d(2023,11,5,10,0,0),
d(2024,3,10,11,0,0),
d(2024,11,3,10,0,0),
d(2025,3,9,11,0,0),
d(2025,11,2,10,0,0),
d(2026,3,8,11,0,0),
d(2026,11,1,10,0,0),
d(2027,3,14,11,0,0),
d(2027,11,7,10,0,0),
d(2028,3,12,11,0,0),
d(2028,11,5,10,0,0),
d(2029,3,11,11,0,0),
d(2029,11,4,10,0,0),
d(2030,3,10,11,0,0),
d(2030,11,3,10,0,0),
d(2031,3,9,11,0,0),
d(2031,11,2,10,0,0),
d(2032,3,14,11,0,0),
d(2032,11,7,10,0,0),
d(2033,3,13,11,0,0),
d(2033,11,6,10,0,0),
d(2034,3,12,11,0,0),
d(2034,11,5,10,0,0),
d(2035,3,11,11,0,0),
d(2035,11,4,10,0,0),
d(2036,3,9,11,0,0),
d(2036,11,2,10,0,0),
d(2037,3,8,11,0,0),
d(2037,11,1,10,0,0),
]
_transition_info = [
i(-28800,0,'PST'),
i(-25200,3600,'PWT'),
i(-25200,3600,'PPT'),
i(-28800,0,'PST'),
i(-25200,3600,'PDT'),
i(-28800,0,'PST'),
i(-25200,3600,'PDT'),
i(-28800,0,'PST'),
i(-25200,3600,'PDT'),
i(-28800,0,'PST'),
i(-25200,3600,'PDT'),
i(-28800,0,'PST'),
i(-25200,3600,'PDT'),
i(-28800,0,'PST'),
i(-25200,3600,'PDT'),
i(-28800,0,'PST'),
i(-25200,3600,'PDT'),
i(-28800,0,'PST'),
i(-25200,3600,'PDT'),
i(-28800,0,'PST'),
i(-25200,3600,'PDT'),
i(-28800,0,'PST'),
i(-25200,3600,'PDT'),
i(-28800,0,'PST'),
i(-25200,3600,'PDT'),
i(-28800,0,'PST'),
i(-25200,3600,'PDT'),
i(-28800,0,'PST'),
i(-25200,3600,'PDT'),
i(-28800,0,'PST'),
i(-25200,3600,'PDT'),
i(-28800,0,'PST'),
i(-25200,3600,'PDT'),
i(-32400,0,'YST'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
i(-28800,3600,'AKDT'),
i(-32400,0,'AKST'),
]
Juneau = Juneau()
| apache-2.0 |
aninternetof/bremen | bremenenv/lib/python3.5/site-packages/pip/_vendor/requests/packages/urllib3/packages/ssl_match_hostname/_implementation.py | 2360 | 3778 | """The match_hostname() function from Python 3.3.3, essential when using SSL."""
# Note: This file is under the PSF license as the code comes from the python
# stdlib. http://docs.python.org/3/license.html
import re
__version__ = '3.4.0.2'
class CertificateError(ValueError):
pass
def _dnsname_match(dn, hostname, max_wildcards=1):
"""Matching according to RFC 6125, section 6.4.3
http://tools.ietf.org/html/rfc6125#section-6.4.3
"""
pats = []
if not dn:
return False
# Ported from python3-syntax:
# leftmost, *remainder = dn.split(r'.')
parts = dn.split(r'.')
leftmost = parts[0]
remainder = parts[1:]
wildcards = leftmost.count('*')
if wildcards > max_wildcards:
# Issue #17980: avoid denials of service by refusing more
# than one wildcard per fragment. A survey of established
# policy among SSL implementations showed it to be a
# reasonable choice.
raise CertificateError(
"too many wildcards in certificate DNS name: " + repr(dn))
# speed up common case w/o wildcards
if not wildcards:
return dn.lower() == hostname.lower()
# RFC 6125, section 6.4.3, subitem 1.
# The client SHOULD NOT attempt to match a presented identifier in which
# the wildcard character comprises a label other than the left-most label.
if leftmost == '*':
# When '*' is a fragment by itself, it matches a non-empty dotless
# fragment.
pats.append('[^.]+')
elif leftmost.startswith('xn--') or hostname.startswith('xn--'):
# RFC 6125, section 6.4.3, subitem 3.
# The client SHOULD NOT attempt to match a presented identifier
# where the wildcard character is embedded within an A-label or
# U-label of an internationalized domain name.
pats.append(re.escape(leftmost))
else:
# Otherwise, '*' matches any dotless string, e.g. www*
pats.append(re.escape(leftmost).replace(r'\*', '[^.]*'))
# add the remaining fragments, ignore any wildcards
for frag in remainder:
pats.append(re.escape(frag))
pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
return pat.match(hostname)
def match_hostname(cert, hostname):
"""Verify that *cert* (in decoded format as returned by
SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125
rules are followed, but IP addresses are not accepted for *hostname*.
CertificateError is raised on failure. On success, the function
returns nothing.
"""
if not cert:
raise ValueError("empty or no certificate")
dnsnames = []
san = cert.get('subjectAltName', ())
for key, value in san:
if key == 'DNS':
if _dnsname_match(value, hostname):
return
dnsnames.append(value)
if not dnsnames:
# The subject is only checked when there is no dNSName entry
# in subjectAltName
for sub in cert.get('subject', ()):
for key, value in sub:
# XXX according to RFC 2818, the most specific Common Name
# must be used.
if key == 'commonName':
if _dnsname_match(value, hostname):
return
dnsnames.append(value)
if len(dnsnames) > 1:
raise CertificateError("hostname %r "
"doesn't match either of %s"
% (hostname, ', '.join(map(repr, dnsnames))))
elif len(dnsnames) == 1:
raise CertificateError("hostname %r "
"doesn't match %r"
% (hostname, dnsnames[0]))
else:
raise CertificateError("no appropriate commonName or "
"subjectAltName fields were found")
| mit |
antonve/s4-project-mooc | common/lib/xmodule/xmodule/tests/test_capa_module.py | 12 | 81450 | # -*- coding: utf-8 -*-
"""
Tests of the Capa XModule
"""
# pylint: disable=missing-docstring
# pylint: disable=invalid-name
import datetime
import json
import random
import os
import textwrap
import unittest
import ddt
from mock import Mock, patch, DEFAULT
import webob
from webob.multidict import MultiDict
import xmodule
from xmodule.tests import DATA_DIR
from capa import responsetypes
from capa.responsetypes import (StudentInputError, LoncapaProblemError,
ResponseError)
from capa.xqueue_interface import XQueueInterface
from xmodule.capa_module import CapaModule, CapaDescriptor, ComplexEncoder
from opaque_keys.edx.locations import Location
from xblock.field_data import DictFieldData
from xblock.fields import ScopeIds
from . import get_test_system
from pytz import UTC
from capa.correctmap import CorrectMap
from ..capa_base_constants import RANDOMIZATION
class CapaFactory(object):
"""
A helper class to create problem modules with various parameters for testing.
"""
sample_problem_xml = textwrap.dedent("""\
<?xml version="1.0"?>
<problem>
<text>
<p>What is pi, to two decimal places?</p>
</text>
<numericalresponse answer="3.14">
<textline math="1" size="30"/>
</numericalresponse>
</problem>
""")
num = 0
@classmethod
def next_num(cls):
cls.num += 1
return cls.num
@classmethod
def input_key(cls, response_num=2, input_num=1):
"""
Return the input key to use when passing GET parameters
"""
return ("input_" + cls.answer_key(response_num, input_num))
@classmethod
def answer_key(cls, response_num=2, input_num=1):
"""
Return the key stored in the capa problem answer dict
"""
return (
"%s_%d_%d" % (
"-".join(['i4x', 'edX', 'capa_test', 'problem', 'SampleProblem%d' % cls.num]),
response_num,
input_num
)
)
@classmethod
def create(cls,
attempts=None,
problem_state=None,
correct=False,
xml=None,
override_get_score=True,
**kwargs
):
"""
All parameters are optional, and are added to the created problem if specified.
Arguments:
graceperiod:
due:
max_attempts:
showanswer:
force_save_button:
rerandomize: all strings, as specified in the policy for the problem
problem_state: a dict to to be serialized into the instance_state of the
module.
attempts: also added to instance state. Will be converted to an int.
"""
location = Location(
"edX",
"capa_test",
"2012_Fall",
"problem",
"SampleProblem{0}".format(cls.next_num()),
None
)
if xml is None:
xml = cls.sample_problem_xml
field_data = {'data': xml}
field_data.update(kwargs)
descriptor = Mock(weight="1")
if problem_state is not None:
field_data.update(problem_state)
if attempts is not None:
# converting to int here because I keep putting "0" and "1" in the tests
# since everything else is a string.
field_data['attempts'] = int(attempts)
system = get_test_system()
system.render_template = Mock(return_value="<div>Test Template HTML</div>")
module = CapaModule(
descriptor,
system,
DictFieldData(field_data),
ScopeIds(None, None, location, location),
)
if override_get_score:
if correct:
# TODO: probably better to actually set the internal state properly, but...
module.get_score = lambda: {'score': 1, 'total': 1}
else:
module.get_score = lambda: {'score': 0, 'total': 1}
return module
class CapaFactoryWithFiles(CapaFactory):
"""
A factory for creating a Capa problem with files attached.
"""
sample_problem_xml = textwrap.dedent("""\
<problem>
<coderesponse queuename="BerkeleyX-cs188x">
<!-- actual filenames here don't matter for server-side tests,
they are only acted upon in the browser. -->
<filesubmission
points="25"
allowed_files="prog1.py prog2.py prog3.py"
required_files="prog1.py prog2.py prog3.py"
/>
<codeparam>
<answer_display>
If you're having trouble with this Project,
please refer to the Lecture Slides and attend office hours.
</answer_display>
<grader_payload>{"project": "p3"}</grader_payload>
</codeparam>
</coderesponse>
<customresponse>
<text>
If you worked with a partner, enter their username or email address. If you
worked alone, enter None.
</text>
<textline points="0" size="40" correct_answer="Your partner's username or 'None'"/>
<answer type="loncapa/python">
correct=['correct']
s = str(submission[0]).strip()
if submission[0] == '':
correct[0] = 'incorrect'
</answer>
</customresponse>
</problem>
""")
@ddt.ddt
class CapaModuleTest(unittest.TestCase):
def setUp(self):
super(CapaModuleTest, self).setUp()
now = datetime.datetime.now(UTC)
day_delta = datetime.timedelta(days=1)
self.yesterday_str = str(now - day_delta)
self.today_str = str(now)
self.tomorrow_str = str(now + day_delta)
# in the capa grace period format, not in time delta format
self.two_day_delta_str = "2 days"
def test_import(self):
module = CapaFactory.create()
self.assertEqual(module.get_score()['score'], 0)
other_module = CapaFactory.create()
self.assertEqual(module.get_score()['score'], 0)
self.assertNotEqual(module.url_name, other_module.url_name,
"Factory should be creating unique names for each problem")
def test_correct(self):
"""
Check that the factory creates correct and incorrect problems properly.
"""
module = CapaFactory.create()
self.assertEqual(module.get_score()['score'], 0)
other_module = CapaFactory.create(correct=True)
self.assertEqual(other_module.get_score()['score'], 1)
def test_get_score(self):
"""
Do 1 test where the internals of get_score are properly set
@jbau Note: this obviously depends on a particular implementation of get_score, but I think this is actually
useful as unit-code coverage for this current implementation. I don't see a layer where LoncapaProblem
is tested directly
"""
from capa.correctmap import CorrectMap
student_answers = {'1_2_1': 'abcd'}
correct_map = CorrectMap(answer_id='1_2_1', correctness="correct", npoints=0.9)
module = CapaFactory.create(correct=True, override_get_score=False)
module.lcp.correct_map = correct_map
module.lcp.student_answers = student_answers
self.assertEqual(module.get_score()['score'], 0.9)
other_correct_map = CorrectMap(answer_id='1_2_1', correctness="incorrect", npoints=0.1)
other_module = CapaFactory.create(correct=False, override_get_score=False)
other_module.lcp.correct_map = other_correct_map
other_module.lcp.student_answers = student_answers
self.assertEqual(other_module.get_score()['score'], 0.1)
def test_showanswer_default(self):
"""
Make sure the show answer logic does the right thing.
"""
# default, no due date, showanswer 'closed', so problem is open, and show_answer
# not visible.
problem = CapaFactory.create()
self.assertFalse(problem.answer_available())
def test_showanswer_attempted(self):
problem = CapaFactory.create(showanswer='attempted')
self.assertFalse(problem.answer_available())
problem.attempts = 1
self.assertTrue(problem.answer_available())
def test_showanswer_closed(self):
# can see after attempts used up, even with due date in the future
used_all_attempts = CapaFactory.create(showanswer='closed',
max_attempts="1",
attempts="1",
due=self.tomorrow_str)
self.assertTrue(used_all_attempts.answer_available())
# can see after due date
after_due_date = CapaFactory.create(showanswer='closed',
max_attempts="1",
attempts="0",
due=self.yesterday_str)
self.assertTrue(after_due_date.answer_available())
# can't see because attempts left
attempts_left_open = CapaFactory.create(showanswer='closed',
max_attempts="1",
attempts="0",
due=self.tomorrow_str)
self.assertFalse(attempts_left_open.answer_available())
# Can't see because grace period hasn't expired
still_in_grace = CapaFactory.create(showanswer='closed',
max_attempts="1",
attempts="0",
due=self.yesterday_str,
graceperiod=self.two_day_delta_str)
self.assertFalse(still_in_grace.answer_available())
def test_showanswer_correct_or_past_due(self):
"""
With showanswer="correct_or_past_due" should show answer after the answer is correct
or after the problem is closed for everyone--e.g. after due date + grace period.
"""
# can see because answer is correct, even with due date in the future
answer_correct = CapaFactory.create(showanswer='correct_or_past_due',
max_attempts="1",
attempts="0",
due=self.tomorrow_str,
correct=True)
self.assertTrue(answer_correct.answer_available())
# can see after due date, even when answer isn't correct
past_due_date = CapaFactory.create(showanswer='correct_or_past_due',
max_attempts="1",
attempts="0",
due=self.yesterday_str)
self.assertTrue(past_due_date.answer_available())
# can also see after due date when answer _is_ correct
past_due_date_correct = CapaFactory.create(showanswer='correct_or_past_due',
max_attempts="1",
attempts="0",
due=self.yesterday_str,
correct=True)
self.assertTrue(past_due_date_correct.answer_available())
# Can't see because grace period hasn't expired and answer isn't correct
still_in_grace = CapaFactory.create(showanswer='correct_or_past_due',
max_attempts="1",
attempts="1",
due=self.yesterday_str,
graceperiod=self.two_day_delta_str)
self.assertFalse(still_in_grace.answer_available())
def test_showanswer_past_due(self):
"""
With showanswer="past_due" should only show answer after the problem is closed
for everyone--e.g. after due date + grace period.
"""
# can't see after attempts used up, even with due date in the future
used_all_attempts = CapaFactory.create(showanswer='past_due',
max_attempts="1",
attempts="1",
due=self.tomorrow_str)
self.assertFalse(used_all_attempts.answer_available())
# can see after due date
past_due_date = CapaFactory.create(showanswer='past_due',
max_attempts="1",
attempts="0",
due=self.yesterday_str)
self.assertTrue(past_due_date.answer_available())
# can't see because attempts left
attempts_left_open = CapaFactory.create(showanswer='past_due',
max_attempts="1",
attempts="0",
due=self.tomorrow_str)
self.assertFalse(attempts_left_open.answer_available())
# Can't see because grace period hasn't expired, even though have no more
# attempts.
still_in_grace = CapaFactory.create(showanswer='past_due',
max_attempts="1",
attempts="1",
due=self.yesterday_str,
graceperiod=self.two_day_delta_str)
self.assertFalse(still_in_grace.answer_available())
def test_showanswer_finished(self):
"""
With showanswer="finished" should show answer after the problem is closed,
or after the answer is correct.
"""
# can see after attempts used up, even with due date in the future
used_all_attempts = CapaFactory.create(showanswer='finished',
max_attempts="1",
attempts="1",
due=self.tomorrow_str)
self.assertTrue(used_all_attempts.answer_available())
# can see after due date
past_due_date = CapaFactory.create(showanswer='finished',
max_attempts="1",
attempts="0",
due=self.yesterday_str)
self.assertTrue(past_due_date.answer_available())
# can't see because attempts left and wrong
attempts_left_open = CapaFactory.create(showanswer='finished',
max_attempts="1",
attempts="0",
due=self.tomorrow_str)
self.assertFalse(attempts_left_open.answer_available())
# _can_ see because attempts left and right
correct_ans = CapaFactory.create(showanswer='finished',
max_attempts="1",
attempts="0",
due=self.tomorrow_str,
correct=True)
self.assertTrue(correct_ans.answer_available())
# Can see even though grace period hasn't expired, because have no more
# attempts.
still_in_grace = CapaFactory.create(showanswer='finished',
max_attempts="1",
attempts="1",
due=self.yesterday_str,
graceperiod=self.two_day_delta_str)
self.assertTrue(still_in_grace.answer_available())
def test_closed(self):
# Attempts < Max attempts --> NOT closed
module = CapaFactory.create(max_attempts="1", attempts="0")
self.assertFalse(module.closed())
# Attempts < Max attempts --> NOT closed
module = CapaFactory.create(max_attempts="2", attempts="1")
self.assertFalse(module.closed())
# Attempts = Max attempts --> closed
module = CapaFactory.create(max_attempts="1", attempts="1")
self.assertTrue(module.closed())
# Attempts > Max attempts --> closed
module = CapaFactory.create(max_attempts="1", attempts="2")
self.assertTrue(module.closed())
# Max attempts = 0 --> closed
module = CapaFactory.create(max_attempts="0", attempts="2")
self.assertTrue(module.closed())
# Past due --> closed
module = CapaFactory.create(max_attempts="1", attempts="0",
due=self.yesterday_str)
self.assertTrue(module.closed())
def test_parse_get_params(self):
# Valid GET param dict
# 'input_5' intentionally left unset,
valid_get_dict = MultiDict({
'input_1': 'test',
'input_1_2': 'test',
'input_1_2_3': 'test',
'input_[]_3': 'test',
'input_4': None,
'input_6': 5
})
result = CapaModule.make_dict_of_responses(valid_get_dict)
# Expect that we get a dict with "input" stripped from key names
# and that we get the same values back
for key in result.keys():
original_key = "input_" + key
self.assertTrue(original_key in valid_get_dict,
"Output dict should have key %s" % original_key)
self.assertEqual(valid_get_dict[original_key], result[key])
# Valid GET param dict with list keys
# Each tuple represents a single parameter in the query string
valid_get_dict = MultiDict((('input_2[]', 'test1'), ('input_2[]', 'test2')))
result = CapaModule.make_dict_of_responses(valid_get_dict)
self.assertTrue('2' in result)
self.assertEqual(['test1', 'test2'], result['2'])
# If we use [] at the end of a key name, we should always
# get a list, even if there's just one value
valid_get_dict = MultiDict({'input_1[]': 'test'})
result = CapaModule.make_dict_of_responses(valid_get_dict)
self.assertEqual(result['1'], ['test'])
# If we have no underscores in the name, then the key is invalid
invalid_get_dict = MultiDict({'input': 'test'})
with self.assertRaises(ValueError):
result = CapaModule.make_dict_of_responses(invalid_get_dict)
# Two equivalent names (one list, one non-list)
# One of the values would overwrite the other, so detect this
# and raise an exception
invalid_get_dict = MultiDict({'input_1[]': 'test 1',
'input_1': 'test 2'})
with self.assertRaises(ValueError):
result = CapaModule.make_dict_of_responses(invalid_get_dict)
def test_check_problem_correct(self):
module = CapaFactory.create(attempts=1)
# Simulate that all answers are marked correct, no matter
# what the input is, by patching CorrectMap.is_correct()
# Also simulate rendering the HTML
# TODO: pep8 thinks the following line has invalid syntax
with patch('capa.correctmap.CorrectMap.is_correct') as mock_is_correct, \
patch('xmodule.capa_module.CapaModule.get_problem_html') as mock_html:
mock_is_correct.return_value = True
mock_html.return_value = "Test HTML"
# Check the problem
get_request_dict = {CapaFactory.input_key(): '3.14'}
result = module.check_problem(get_request_dict)
# Expect that the problem is marked correct
self.assertEqual(result['success'], 'correct')
# Expect that we get the (mocked) HTML
self.assertEqual(result['contents'], 'Test HTML')
# Expect that the number of attempts is incremented by 1
self.assertEqual(module.attempts, 2)
def test_check_problem_incorrect(self):
module = CapaFactory.create(attempts=0)
# Simulate marking the input incorrect
with patch('capa.correctmap.CorrectMap.is_correct') as mock_is_correct:
mock_is_correct.return_value = False
# Check the problem
get_request_dict = {CapaFactory.input_key(): '0'}
result = module.check_problem(get_request_dict)
# Expect that the problem is marked correct
self.assertEqual(result['success'], 'incorrect')
# Expect that the number of attempts is incremented by 1
self.assertEqual(module.attempts, 1)
def test_check_problem_closed(self):
module = CapaFactory.create(attempts=3)
# Problem closed -- cannot submit
# Simulate that CapaModule.closed() always returns True
with patch('xmodule.capa_module.CapaModule.closed') as mock_closed:
mock_closed.return_value = True
with self.assertRaises(xmodule.exceptions.NotFoundError):
get_request_dict = {CapaFactory.input_key(): '3.14'}
module.check_problem(get_request_dict)
# Expect that number of attempts NOT incremented
self.assertEqual(module.attempts, 3)
@ddt.data(
RANDOMIZATION.ALWAYS,
'true'
)
def test_check_problem_resubmitted_with_randomize(self, rerandomize):
# Randomize turned on
module = CapaFactory.create(rerandomize=rerandomize, attempts=0)
# Simulate that the problem is completed
module.done = True
# Expect that we cannot submit
with self.assertRaises(xmodule.exceptions.NotFoundError):
get_request_dict = {CapaFactory.input_key(): '3.14'}
module.check_problem(get_request_dict)
# Expect that number of attempts NOT incremented
self.assertEqual(module.attempts, 0)
@ddt.data(
RANDOMIZATION.NEVER,
'false',
RANDOMIZATION.PER_STUDENT
)
def test_check_problem_resubmitted_no_randomize(self, rerandomize):
# Randomize turned off
module = CapaFactory.create(rerandomize=rerandomize, attempts=0, done=True)
# Expect that we can submit successfully
get_request_dict = {CapaFactory.input_key(): '3.14'}
result = module.check_problem(get_request_dict)
self.assertEqual(result['success'], 'correct')
# Expect that number of attempts IS incremented
self.assertEqual(module.attempts, 1)
def test_check_problem_queued(self):
module = CapaFactory.create(attempts=1)
# Simulate that the problem is queued
multipatch = patch.multiple(
'capa.capa_problem.LoncapaProblem',
is_queued=DEFAULT,
get_recentmost_queuetime=DEFAULT
)
with multipatch as values:
values['is_queued'].return_value = True
values['get_recentmost_queuetime'].return_value = datetime.datetime.now(UTC)
get_request_dict = {CapaFactory.input_key(): '3.14'}
result = module.check_problem(get_request_dict)
# Expect an AJAX alert message in 'success'
self.assertIn('You must wait', result['success'])
# Expect that the number of attempts is NOT incremented
self.assertEqual(module.attempts, 1)
def test_check_problem_with_files(self):
# Check a problem with uploaded files, using the check_problem API.
# pylint: disable=protected-access
# The files we'll be uploading.
fnames = ["prog1.py", "prog2.py", "prog3.py"]
fpaths = [os.path.join(DATA_DIR, "capa", fname) for fname in fnames]
fileobjs = [open(fpath) for fpath in fpaths]
for fileobj in fileobjs:
self.addCleanup(fileobj.close)
module = CapaFactoryWithFiles.create()
# Mock the XQueueInterface.
xqueue_interface = XQueueInterface("http://example.com/xqueue", Mock())
xqueue_interface._http_post = Mock(return_value=(0, "ok"))
module.system.xqueue['interface'] = xqueue_interface
# Create a request dictionary for check_problem.
get_request_dict = {
CapaFactoryWithFiles.input_key(response_num=2): fileobjs,
CapaFactoryWithFiles.input_key(response_num=3): 'None',
}
module.check_problem(get_request_dict)
# _http_post is called like this:
# _http_post(
# 'http://example.com/xqueue/xqueue/submit/',
# {
# 'xqueue_header': '{"lms_key": "df34fb702620d7ae892866ba57572491", "lms_callback_url": "/", "queue_name": "BerkeleyX-cs188x"}',
# 'xqueue_body': '{"student_info": "{\\"anonymous_student_id\\": \\"student\\", \\"submission_time\\": \\"20131117183318\\"}", "grader_payload": "{\\"project\\": \\"p3\\"}", "student_response": ""}',
# },
# files={
# path(u'/home/ned/edx/edx-platform/common/test/data/uploads/asset.html'):
# <open file u'/home/ned/edx/edx-platform/common/test/data/uploads/asset.html', mode 'r' at 0x49c5f60>,
# path(u'/home/ned/edx/edx-platform/common/test/data/uploads/image.jpg'):
# <open file u'/home/ned/edx/edx-platform/common/test/data/uploads/image.jpg', mode 'r' at 0x49c56f0>,
# path(u'/home/ned/edx/edx-platform/common/test/data/uploads/textbook.pdf'):
# <open file u'/home/ned/edx/edx-platform/common/test/data/uploads/textbook.pdf', mode 'r' at 0x49c5a50>,
# },
# )
self.assertEqual(xqueue_interface._http_post.call_count, 1)
_, kwargs = xqueue_interface._http_post.call_args
self.assertItemsEqual(fpaths, kwargs['files'].keys())
for fpath, fileobj in kwargs['files'].iteritems():
self.assertEqual(fpath, fileobj.name)
def test_check_problem_with_files_as_xblock(self):
# Check a problem with uploaded files, using the XBlock API.
# pylint: disable=protected-access
# The files we'll be uploading.
fnames = ["prog1.py", "prog2.py", "prog3.py"]
fpaths = [os.path.join(DATA_DIR, "capa", fname) for fname in fnames]
fileobjs = [open(fpath) for fpath in fpaths]
for fileobj in fileobjs:
self.addCleanup(fileobj.close)
module = CapaFactoryWithFiles.create()
# Mock the XQueueInterface.
xqueue_interface = XQueueInterface("http://example.com/xqueue", Mock())
xqueue_interface._http_post = Mock(return_value=(0, "ok"))
module.system.xqueue['interface'] = xqueue_interface
# Create a webob Request with the files uploaded.
post_data = []
for fname, fileobj in zip(fnames, fileobjs):
post_data.append((CapaFactoryWithFiles.input_key(response_num=2), (fname, fileobj)))
post_data.append((CapaFactoryWithFiles.input_key(response_num=3), 'None'))
request = webob.Request.blank("/some/fake/url", POST=post_data, content_type='multipart/form-data')
module.handle('xmodule_handler', request, 'problem_check')
self.assertEqual(xqueue_interface._http_post.call_count, 1)
_, kwargs = xqueue_interface._http_post.call_args
self.assertItemsEqual(fnames, kwargs['files'].keys())
for fpath, fileobj in kwargs['files'].iteritems():
self.assertEqual(fpath, fileobj.name)
def test_check_problem_error(self):
# Try each exception that capa_module should handle
exception_classes = [StudentInputError,
LoncapaProblemError,
ResponseError]
for exception_class in exception_classes:
# Create the module
module = CapaFactory.create(attempts=1)
# Ensure that the user is NOT staff
module.system.user_is_staff = False
# Simulate answering a problem that raises the exception
with patch('capa.capa_problem.LoncapaProblem.grade_answers') as mock_grade:
mock_grade.side_effect = exception_class('test error')
get_request_dict = {CapaFactory.input_key(): '3.14'}
result = module.check_problem(get_request_dict)
# Expect an AJAX alert message in 'success'
expected_msg = 'Error: test error'
self.assertEqual(expected_msg, result['success'])
# Expect that the number of attempts is NOT incremented
self.assertEqual(module.attempts, 1)
def test_check_problem_other_errors(self):
"""
Test that errors other than the expected kinds give an appropriate message.
See also `test_check_problem_error` for the "expected kinds" or errors.
"""
# Create the module
module = CapaFactory.create(attempts=1)
# Ensure that the user is NOT staff
module.system.user_is_staff = False
# Ensure that DEBUG is on
module.system.DEBUG = True
# Simulate answering a problem that raises the exception
with patch('capa.capa_problem.LoncapaProblem.grade_answers') as mock_grade:
error_msg = u"Superterrible error happened: ☠"
mock_grade.side_effect = Exception(error_msg)
get_request_dict = {CapaFactory.input_key(): '3.14'}
result = module.check_problem(get_request_dict)
# Expect an AJAX alert message in 'success'
self.assertTrue(error_msg in result['success'])
def test_check_problem_error_nonascii(self):
# Try each exception that capa_module should handle
exception_classes = [StudentInputError,
LoncapaProblemError,
ResponseError]
for exception_class in exception_classes:
# Create the module
module = CapaFactory.create(attempts=1)
# Ensure that the user is NOT staff
module.system.user_is_staff = False
# Simulate answering a problem that raises the exception
with patch('capa.capa_problem.LoncapaProblem.grade_answers') as mock_grade:
mock_grade.side_effect = exception_class(u"ȧƈƈḗƞŧḗḓ ŧḗẋŧ ƒǿř ŧḗşŧīƞɠ")
get_request_dict = {CapaFactory.input_key(): '3.14'}
result = module.check_problem(get_request_dict)
# Expect an AJAX alert message in 'success'
expected_msg = u'Error: ȧƈƈḗƞŧḗḓ ŧḗẋŧ ƒǿř ŧḗşŧīƞɠ'
self.assertEqual(expected_msg, result['success'])
# Expect that the number of attempts is NOT incremented
self.assertEqual(module.attempts, 1)
def test_check_problem_error_with_staff_user(self):
# Try each exception that capa module should handle
for exception_class in [StudentInputError,
LoncapaProblemError,
ResponseError]:
# Create the module
module = CapaFactory.create(attempts=1)
# Ensure that the user IS staff
module.system.user_is_staff = True
# Simulate answering a problem that raises an exception
with patch('capa.capa_problem.LoncapaProblem.grade_answers') as mock_grade:
mock_grade.side_effect = exception_class('test error')
get_request_dict = {CapaFactory.input_key(): '3.14'}
result = module.check_problem(get_request_dict)
# Expect an AJAX alert message in 'success'
self.assertTrue('test error' in result['success'])
# We DO include traceback information for staff users
self.assertTrue('Traceback' in result['success'])
# Expect that the number of attempts is NOT incremented
self.assertEqual(module.attempts, 1)
def test_reset_problem(self):
module = CapaFactory.create(done=True)
module.new_lcp = Mock(wraps=module.new_lcp)
module.choose_new_seed = Mock(wraps=module.choose_new_seed)
# Stub out HTML rendering
with patch('xmodule.capa_module.CapaModule.get_problem_html') as mock_html:
mock_html.return_value = "<div>Test HTML</div>"
# Reset the problem
get_request_dict = {}
result = module.reset_problem(get_request_dict)
# Expect that the request was successful
self.assertTrue('success' in result and result['success'])
# Expect that the problem HTML is retrieved
self.assertTrue('html' in result)
self.assertEqual(result['html'], "<div>Test HTML</div>")
# Expect that the problem was reset
module.new_lcp.assert_called_once_with(None)
def test_reset_problem_closed(self):
# pre studio default
module = CapaFactory.create(rerandomize=RANDOMIZATION.ALWAYS)
# Simulate that the problem is closed
with patch('xmodule.capa_module.CapaModule.closed') as mock_closed:
mock_closed.return_value = True
# Try to reset the problem
get_request_dict = {}
result = module.reset_problem(get_request_dict)
# Expect that the problem was NOT reset
self.assertTrue('success' in result and not result['success'])
def test_reset_problem_not_done(self):
# Simulate that the problem is NOT done
module = CapaFactory.create(done=False)
# Try to reset the problem
get_request_dict = {}
result = module.reset_problem(get_request_dict)
# Expect that the problem was NOT reset
self.assertTrue('success' in result and not result['success'])
def test_rescore_problem_correct(self):
module = CapaFactory.create(attempts=1, done=True)
# Simulate that all answers are marked correct, no matter
# what the input is, by patching LoncapaResponse.evaluate_answers()
with patch('capa.responsetypes.LoncapaResponse.evaluate_answers') as mock_evaluate_answers:
mock_evaluate_answers.return_value = CorrectMap(CapaFactory.answer_key(), 'correct')
result = module.rescore_problem()
# Expect that the problem is marked correct
self.assertEqual(result['success'], 'correct')
# Expect that we get no HTML
self.assertFalse('contents' in result)
# Expect that the number of attempts is not incremented
self.assertEqual(module.attempts, 1)
def test_rescore_problem_incorrect(self):
# make sure it also works when attempts have been reset,
# so add this to the test:
module = CapaFactory.create(attempts=0, done=True)
# Simulate that all answers are marked incorrect, no matter
# what the input is, by patching LoncapaResponse.evaluate_answers()
with patch('capa.responsetypes.LoncapaResponse.evaluate_answers') as mock_evaluate_answers:
mock_evaluate_answers.return_value = CorrectMap(CapaFactory.answer_key(), 'incorrect')
result = module.rescore_problem()
# Expect that the problem is marked incorrect
self.assertEqual(result['success'], 'incorrect')
# Expect that the number of attempts is not incremented
self.assertEqual(module.attempts, 0)
def test_rescore_problem_not_done(self):
# Simulate that the problem is NOT done
module = CapaFactory.create(done=False)
# Try to rescore the problem, and get exception
with self.assertRaises(xmodule.exceptions.NotFoundError):
module.rescore_problem()
def test_rescore_problem_not_supported(self):
module = CapaFactory.create(done=True)
# Try to rescore the problem, and get exception
with patch('capa.capa_problem.LoncapaProblem.supports_rescoring') as mock_supports_rescoring:
mock_supports_rescoring.return_value = False
with self.assertRaises(NotImplementedError):
module.rescore_problem()
def _rescore_problem_error_helper(self, exception_class):
"""Helper to allow testing all errors that rescoring might return."""
# Create the module
module = CapaFactory.create(attempts=1, done=True)
# Simulate answering a problem that raises the exception
with patch('capa.capa_problem.LoncapaProblem.rescore_existing_answers') as mock_rescore:
mock_rescore.side_effect = exception_class(u'test error \u03a9')
result = module.rescore_problem()
# Expect an AJAX alert message in 'success'
expected_msg = u'Error: test error \u03a9'
self.assertEqual(result['success'], expected_msg)
# Expect that the number of attempts is NOT incremented
self.assertEqual(module.attempts, 1)
def test_rescore_problem_student_input_error(self):
self._rescore_problem_error_helper(StudentInputError)
def test_rescore_problem_problem_error(self):
self._rescore_problem_error_helper(LoncapaProblemError)
def test_rescore_problem_response_error(self):
self._rescore_problem_error_helper(ResponseError)
def test_save_problem(self):
module = CapaFactory.create(done=False)
# Save the problem
get_request_dict = {CapaFactory.input_key(): '3.14'}
result = module.save_problem(get_request_dict)
# Expect that answers are saved to the problem
expected_answers = {CapaFactory.answer_key(): '3.14'}
self.assertEqual(module.lcp.student_answers, expected_answers)
# Expect that the result is success
self.assertTrue('success' in result and result['success'])
def test_save_problem_closed(self):
module = CapaFactory.create(done=False)
# Simulate that the problem is closed
with patch('xmodule.capa_module.CapaModule.closed') as mock_closed:
mock_closed.return_value = True
# Try to save the problem
get_request_dict = {CapaFactory.input_key(): '3.14'}
result = module.save_problem(get_request_dict)
# Expect that the result is failure
self.assertTrue('success' in result and not result['success'])
@ddt.data(
RANDOMIZATION.ALWAYS,
'true'
)
def test_save_problem_submitted_with_randomize(self, rerandomize):
# Capa XModule treats 'always' and 'true' equivalently
module = CapaFactory.create(rerandomize=rerandomize, done=True)
# Try to save
get_request_dict = {CapaFactory.input_key(): '3.14'}
result = module.save_problem(get_request_dict)
# Expect that we cannot save
self.assertTrue('success' in result and not result['success'])
@ddt.data(
RANDOMIZATION.NEVER,
'false',
RANDOMIZATION.PER_STUDENT
)
def test_save_problem_submitted_no_randomize(self, rerandomize):
# Capa XModule treats 'false' and 'per_student' equivalently
module = CapaFactory.create(rerandomize=rerandomize, done=True)
# Try to save
get_request_dict = {CapaFactory.input_key(): '3.14'}
result = module.save_problem(get_request_dict)
# Expect that we succeed
self.assertTrue('success' in result and result['success'])
def test_check_button_name(self):
# If last attempt, button name changes to "Final Check"
# Just in case, we also check what happens if we have
# more attempts than allowed.
attempts = random.randint(1, 10)
module = CapaFactory.create(attempts=attempts - 1, max_attempts=attempts)
self.assertEqual(module.check_button_name(), "Final Check")
module = CapaFactory.create(attempts=attempts, max_attempts=attempts)
self.assertEqual(module.check_button_name(), "Final Check")
module = CapaFactory.create(attempts=attempts + 1, max_attempts=attempts)
self.assertEqual(module.check_button_name(), "Final Check")
# Otherwise, button name is "Check"
module = CapaFactory.create(attempts=attempts - 2, max_attempts=attempts)
self.assertEqual(module.check_button_name(), "Check")
module = CapaFactory.create(attempts=attempts - 3, max_attempts=attempts)
self.assertEqual(module.check_button_name(), "Check")
# If no limit on attempts, then always show "Check"
module = CapaFactory.create(attempts=attempts - 3)
self.assertEqual(module.check_button_name(), "Check")
module = CapaFactory.create(attempts=0)
self.assertEqual(module.check_button_name(), "Check")
def test_check_button_checking_name(self):
module = CapaFactory.create(attempts=1, max_attempts=10)
self.assertEqual(module.check_button_checking_name(), "Checking...")
module = CapaFactory.create(attempts=10, max_attempts=10)
self.assertEqual(module.check_button_checking_name(), "Checking...")
def test_check_button_name_customization(self):
module = CapaFactory.create(
attempts=1,
max_attempts=10,
text_customization={"custom_check": "Submit", "custom_final_check": "Final Submit"}
)
self.assertEqual(module.check_button_name(), "Submit")
module = CapaFactory.create(attempts=9,
max_attempts=10,
text_customization={"custom_check": "Submit", "custom_final_check": "Final Submit"}
)
self.assertEqual(module.check_button_name(), "Final Submit")
def test_check_button_checking_name_customization(self):
module = CapaFactory.create(
attempts=1,
max_attempts=10,
text_customization={
"custom_check": "Submit",
"custom_final_check": "Final Submit",
"custom_checking": "Checking..."
}
)
self.assertEqual(module.check_button_checking_name(), "Checking...")
module = CapaFactory.create(
attempts=9,
max_attempts=10,
text_customization={
"custom_check": "Submit",
"custom_final_check": "Final Submit",
"custom_checking": "Checking..."
}
)
self.assertEqual(module.check_button_checking_name(), "Checking...")
def test_should_show_check_button(self):
attempts = random.randint(1, 10)
# If we're after the deadline, do NOT show check button
module = CapaFactory.create(due=self.yesterday_str)
self.assertFalse(module.should_show_check_button())
# If user is out of attempts, do NOT show the check button
module = CapaFactory.create(attempts=attempts, max_attempts=attempts)
self.assertFalse(module.should_show_check_button())
# If survey question (max_attempts = 0), do NOT show the check button
module = CapaFactory.create(max_attempts=0)
self.assertFalse(module.should_show_check_button())
# If user submitted a problem but hasn't reset,
# do NOT show the check button
# Note: we can only reset when rerandomize="always" or "true"
module = CapaFactory.create(rerandomize=RANDOMIZATION.ALWAYS, done=True)
self.assertFalse(module.should_show_check_button())
module = CapaFactory.create(rerandomize="true", done=True)
self.assertFalse(module.should_show_check_button())
# Otherwise, DO show the check button
module = CapaFactory.create()
self.assertTrue(module.should_show_check_button())
# If the user has submitted the problem
# and we do NOT have a reset button, then we can show the check button
# Setting rerandomize to "never" or "false" ensures that the reset button
# is not shown
module = CapaFactory.create(rerandomize=RANDOMIZATION.NEVER, done=True)
self.assertTrue(module.should_show_check_button())
module = CapaFactory.create(rerandomize="false", done=True)
self.assertTrue(module.should_show_check_button())
module = CapaFactory.create(rerandomize=RANDOMIZATION.PER_STUDENT, done=True)
self.assertTrue(module.should_show_check_button())
def test_should_show_reset_button(self):
attempts = random.randint(1, 10)
# If we're after the deadline, do NOT show the reset button
module = CapaFactory.create(due=self.yesterday_str, done=True)
self.assertFalse(module.should_show_reset_button())
# If the user is out of attempts, do NOT show the reset button
module = CapaFactory.create(attempts=attempts, max_attempts=attempts, done=True)
self.assertFalse(module.should_show_reset_button())
# pre studio default value, DO show the reset button
module = CapaFactory.create(rerandomize=RANDOMIZATION.ALWAYS, done=True)
self.assertTrue(module.should_show_reset_button())
# If survey question for capa (max_attempts = 0),
# DO show the reset button
module = CapaFactory.create(rerandomize=RANDOMIZATION.ALWAYS, max_attempts=0, done=True)
self.assertTrue(module.should_show_reset_button())
# If the question is not correct
# DO show the reset button
module = CapaFactory.create(rerandomize=RANDOMIZATION.ALWAYS, max_attempts=0, done=True, correct=False)
self.assertTrue(module.should_show_reset_button())
# If the question is correct and randomization is never
# DO not show the reset button
module = CapaFactory.create(rerandomize=RANDOMIZATION.NEVER, max_attempts=0, done=True, correct=True)
self.assertFalse(module.should_show_reset_button())
# If the question is correct and randomization is always
# Show the reset button
module = CapaFactory.create(rerandomize=RANDOMIZATION.ALWAYS, max_attempts=0, done=True, correct=True)
self.assertTrue(module.should_show_reset_button())
# Don't show reset button if randomization is turned on and the question is not done
module = CapaFactory.create(rerandomize=RANDOMIZATION.ALWAYS, show_reset_button=False, done=False)
self.assertFalse(module.should_show_reset_button())
# Show reset button if randomization is turned on and the problem is done
module = CapaFactory.create(rerandomize=RANDOMIZATION.ALWAYS, show_reset_button=False, done=True)
self.assertTrue(module.should_show_reset_button())
def test_should_show_save_button(self):
attempts = random.randint(1, 10)
# If we're after the deadline, do NOT show the save button
module = CapaFactory.create(due=self.yesterday_str, done=True)
self.assertFalse(module.should_show_save_button())
# If the user is out of attempts, do NOT show the save button
module = CapaFactory.create(attempts=attempts, max_attempts=attempts, done=True)
self.assertFalse(module.should_show_save_button())
# If user submitted a problem but hasn't reset, do NOT show the save button
module = CapaFactory.create(rerandomize=RANDOMIZATION.ALWAYS, done=True)
self.assertFalse(module.should_show_save_button())
module = CapaFactory.create(rerandomize="true", done=True)
self.assertFalse(module.should_show_save_button())
# If the user has unlimited attempts and we are not randomizing,
# then do NOT show a save button
# because they can keep using "Check"
module = CapaFactory.create(max_attempts=None, rerandomize=RANDOMIZATION.NEVER, done=False)
self.assertFalse(module.should_show_save_button())
module = CapaFactory.create(max_attempts=None, rerandomize="false", done=True)
self.assertFalse(module.should_show_save_button())
module = CapaFactory.create(max_attempts=None, rerandomize=RANDOMIZATION.PER_STUDENT, done=True)
self.assertFalse(module.should_show_save_button())
# pre-studio default, DO show the save button
module = CapaFactory.create(rerandomize=RANDOMIZATION.ALWAYS, done=False)
self.assertTrue(module.should_show_save_button())
# If we're not randomizing and we have limited attempts, then we can save
module = CapaFactory.create(rerandomize=RANDOMIZATION.NEVER, max_attempts=2, done=True)
self.assertTrue(module.should_show_save_button())
module = CapaFactory.create(rerandomize="false", max_attempts=2, done=True)
self.assertTrue(module.should_show_save_button())
module = CapaFactory.create(rerandomize=RANDOMIZATION.PER_STUDENT, max_attempts=2, done=True)
self.assertTrue(module.should_show_save_button())
# If survey question for capa (max_attempts = 0),
# DO show the save button
module = CapaFactory.create(max_attempts=0, done=False)
self.assertTrue(module.should_show_save_button())
def test_should_show_save_button_force_save_button(self):
# If we're after the deadline, do NOT show the save button
# even though we're forcing a save
module = CapaFactory.create(due=self.yesterday_str,
force_save_button="true",
done=True)
self.assertFalse(module.should_show_save_button())
# If the user is out of attempts, do NOT show the save button
attempts = random.randint(1, 10)
module = CapaFactory.create(attempts=attempts,
max_attempts=attempts,
force_save_button="true",
done=True)
self.assertFalse(module.should_show_save_button())
# Otherwise, if we force the save button,
# then show it even if we would ordinarily
# require a reset first
module = CapaFactory.create(force_save_button="true",
rerandomize=RANDOMIZATION.ALWAYS,
done=True)
self.assertTrue(module.should_show_save_button())
module = CapaFactory.create(force_save_button="true",
rerandomize="true",
done=True)
self.assertTrue(module.should_show_save_button())
def test_no_max_attempts(self):
module = CapaFactory.create(max_attempts='')
html = module.get_problem_html()
self.assertTrue(html is not None)
# assert that we got here without exploding
def test_get_problem_html(self):
module = CapaFactory.create()
# We've tested the show/hide button logic in other tests,
# so here we hard-wire the values
show_check_button = bool(random.randint(0, 1) % 2)
show_reset_button = bool(random.randint(0, 1) % 2)
show_save_button = bool(random.randint(0, 1) % 2)
module.should_show_check_button = Mock(return_value=show_check_button)
module.should_show_reset_button = Mock(return_value=show_reset_button)
module.should_show_save_button = Mock(return_value=show_save_button)
# Mock the system rendering function
module.system.render_template = Mock(return_value="<div>Test Template HTML</div>")
# Patch the capa problem's HTML rendering
with patch('capa.capa_problem.LoncapaProblem.get_html') as mock_html:
mock_html.return_value = "<div>Test Problem HTML</div>"
# Render the problem HTML
html = module.get_problem_html(encapsulate=False)
# Also render the problem encapsulated in a <div>
html_encapsulated = module.get_problem_html(encapsulate=True)
# Expect that we get the rendered template back
self.assertEqual(html, "<div>Test Template HTML</div>")
# Check the rendering context
render_args, _ = module.system.render_template.call_args
self.assertEqual(len(render_args), 2)
template_name = render_args[0]
self.assertEqual(template_name, "problem.html")
context = render_args[1]
self.assertEqual(context['problem']['html'], "<div>Test Problem HTML</div>")
self.assertEqual(bool(context['check_button']), show_check_button)
self.assertEqual(bool(context['reset_button']), show_reset_button)
self.assertEqual(bool(context['save_button']), show_save_button)
# Assert that the encapsulated html contains the original html
self.assertTrue(html in html_encapsulated)
def test_input_state_consistency(self):
module1 = CapaFactory.create()
module2 = CapaFactory.create()
# check to make sure that the input_state and the keys have the same values
module1.set_state_from_lcp()
self.assertEqual(module1.lcp.inputs.keys(), module1.input_state.keys())
module2.set_state_from_lcp()
intersection = set(module2.input_state.keys()).intersection(set(module1.input_state.keys()))
self.assertEqual(len(intersection), 0)
def test_get_problem_html_error(self):
"""
In production, when an error occurs with the problem HTML
rendering, a "dummy" problem is created with an error
message to display to the user.
"""
module = CapaFactory.create()
# Save the original problem so we can compare it later
original_problem = module.lcp
# Simulate throwing an exception when the capa problem
# is asked to render itself as HTML
module.lcp.get_html = Mock(side_effect=Exception("Test"))
# Stub out the get_test_system rendering function
module.system.render_template = Mock(return_value="<div>Test Template HTML</div>")
# Turn off DEBUG
module.system.DEBUG = False
# Try to render the module with DEBUG turned off
html = module.get_problem_html()
self.assertTrue(html is not None)
# Check the rendering context
render_args, _ = module.system.render_template.call_args
context = render_args[1]
self.assertTrue("error" in context['problem']['html'])
# Expect that the module has created a new dummy problem with the error
self.assertNotEqual(original_problem, module.lcp)
def test_get_problem_html_error_w_debug(self):
"""
Test the html response when an error occurs with DEBUG on
"""
module = CapaFactory.create()
# Simulate throwing an exception when the capa problem
# is asked to render itself as HTML
error_msg = u"Superterrible error happened: ☠"
module.lcp.get_html = Mock(side_effect=Exception(error_msg))
# Stub out the get_test_system rendering function
module.system.render_template = Mock(return_value="<div>Test Template HTML</div>")
# Make sure DEBUG is on
module.system.DEBUG = True
# Try to render the module with DEBUG turned on
html = module.get_problem_html()
self.assertTrue(html is not None)
# Check the rendering context
render_args, _ = module.system.render_template.call_args
context = render_args[1]
self.assertTrue(error_msg in context['problem']['html'])
@ddt.data(
'false',
'true',
RANDOMIZATION.NEVER,
RANDOMIZATION.PER_STUDENT,
RANDOMIZATION.ALWAYS,
RANDOMIZATION.ONRESET
)
def test_random_seed_no_change(self, rerandomize):
# Run the test for each possible rerandomize value
module = CapaFactory.create(rerandomize=rerandomize)
# Get the seed
# By this point, the module should have persisted the seed
seed = module.seed
self.assertTrue(seed is not None)
# If we're not rerandomizing, the seed is always set
# to the same value (1)
if rerandomize == RANDOMIZATION.NEVER:
self.assertEqual(seed, 1,
msg="Seed should always be 1 when rerandomize='%s'" % rerandomize)
# Check the problem
get_request_dict = {CapaFactory.input_key(): '3.14'}
module.check_problem(get_request_dict)
# Expect that the seed is the same
self.assertEqual(seed, module.seed)
# Save the problem
module.save_problem(get_request_dict)
# Expect that the seed is the same
self.assertEqual(seed, module.seed)
@ddt.data(
'false',
'true',
RANDOMIZATION.NEVER,
RANDOMIZATION.PER_STUDENT,
RANDOMIZATION.ALWAYS,
RANDOMIZATION.ONRESET
)
def test_random_seed_with_reset(self, rerandomize):
"""
Run the test for each possible rerandomize value
"""
def _reset_and_get_seed(module):
"""
Reset the XModule and return the module's seed
"""
# Simulate submitting an attempt
# We need to do this, or reset_problem() will
# fail because it won't re-randomize until the problem has been submitted
# the problem yet.
module.done = True
# Reset the problem
module.reset_problem({})
# Return the seed
return module.seed
def _retry_and_check(num_tries, test_func):
'''
Returns True if *test_func* was successful
(returned True) within *num_tries* attempts
*test_func* must be a function
of the form test_func() -> bool
'''
success = False
for i in range(num_tries):
if test_func() is True:
success = True
break
return success
module = CapaFactory.create(rerandomize=rerandomize, done=True)
# Get the seed
# By this point, the module should have persisted the seed
seed = module.seed
self.assertTrue(seed is not None)
# We do NOT want the seed to reset if rerandomize
# is set to 'never' -- it should still be 1
# The seed also stays the same if we're randomizing
# 'per_student': the same student should see the same problem
if rerandomize in [RANDOMIZATION.NEVER,
'false',
RANDOMIZATION.PER_STUDENT]:
self.assertEqual(seed, _reset_and_get_seed(module))
# Otherwise, we expect the seed to change
# to another valid seed
else:
# Since there's a small chance we might get the
# same seed again, give it 5 chances
# to generate a different seed
success = _retry_and_check(5, lambda: _reset_and_get_seed(module) != seed)
self.assertTrue(module.seed is not None)
msg = 'Could not get a new seed from reset after 5 tries'
self.assertTrue(success, msg)
@ddt.data(
'false',
'true',
RANDOMIZATION.NEVER,
RANDOMIZATION.PER_STUDENT,
RANDOMIZATION.ALWAYS,
RANDOMIZATION.ONRESET
)
def test_random_seed_with_reset_question_unsubmitted(self, rerandomize):
"""
Run the test for each possible rerandomize value
"""
def _reset_and_get_seed(module):
"""
Reset the XModule and return the module's seed
"""
# Reset the problem
# By default, the problem is instantiated as unsubmitted
module.reset_problem({})
# Return the seed
return module.seed
module = CapaFactory.create(rerandomize=rerandomize, done=False)
# Get the seed
# By this point, the module should have persisted the seed
seed = module.seed
self.assertTrue(seed is not None)
#the seed should never change because the student hasn't finished the problem
self.assertEqual(seed, _reset_and_get_seed(module))
@ddt.data(
RANDOMIZATION.ALWAYS,
RANDOMIZATION.PER_STUDENT,
'true',
RANDOMIZATION.ONRESET
)
def test_random_seed_bins(self, rerandomize):
# Assert that we are limiting the number of possible seeds.
# Get a bunch of seeds, they should all be in 0-999.
i = 200
while i > 0:
module = CapaFactory.create(rerandomize=rerandomize)
assert 0 <= module.seed < 1000
i -= 1
@patch('xmodule.capa_base.log')
@patch('xmodule.capa_base.Progress')
def test_get_progress_error(self, mock_progress, mock_log):
"""
Check that an exception given in `Progress` produces a `log.exception` call.
"""
error_types = [TypeError, ValueError]
for error_type in error_types:
mock_progress.side_effect = error_type
module = CapaFactory.create()
self.assertIsNone(module.get_progress())
mock_log.exception.assert_called_once_with('Got bad progress')
mock_log.reset_mock()
@patch('xmodule.capa_base.Progress')
def test_get_progress_no_error_if_weight_zero(self, mock_progress):
"""
Check that if the weight is 0 get_progress does not try to create a Progress object.
"""
mock_progress.return_value = True
module = CapaFactory.create()
module.weight = 0
progress = module.get_progress()
self.assertIsNone(progress)
self.assertFalse(mock_progress.called)
@patch('xmodule.capa_base.Progress')
def test_get_progress_calculate_progress_fraction(self, mock_progress):
"""
Check that score and total are calculated correctly for the progress fraction.
"""
module = CapaFactory.create()
module.weight = 1
module.get_progress()
mock_progress.assert_called_with(0, 1)
other_module = CapaFactory.create(correct=True)
other_module.weight = 1
other_module.get_progress()
mock_progress.assert_called_with(1, 1)
def test_get_html(self):
"""
Check that get_html() calls get_progress() with no arguments.
"""
module = CapaFactory.create()
module.get_progress = Mock(wraps=module.get_progress)
module.get_html()
module.get_progress.assert_called_once_with()
def test_get_problem(self):
"""
Check that get_problem() returns the expected dictionary.
"""
module = CapaFactory.create()
self.assertEquals(module.get_problem("data"), {'html': module.get_problem_html(encapsulate=False)})
# Standard question with shuffle="true" used by a few tests
common_shuffle_xml = textwrap.dedent("""
<problem>
<multiplechoiceresponse>
<choicegroup type="MultipleChoice" shuffle="true">
<choice correct="false">Apple</choice>
<choice correct="false">Banana</choice>
<choice correct="false">Chocolate</choice>
<choice correct ="true">Donut</choice>
</choicegroup>
</multiplechoiceresponse>
</problem>
""")
def test_check_unmask(self):
"""
Check that shuffle unmasking is plumbed through: when check_problem is called,
unmasked names should appear in the track_function event_info.
"""
module = CapaFactory.create(xml=self.common_shuffle_xml)
with patch.object(module.runtime, 'track_function') as mock_track_function:
get_request_dict = {CapaFactory.input_key(): 'choice_3'} # the correct choice
module.check_problem(get_request_dict)
mock_call = mock_track_function.mock_calls[0]
event_info = mock_call[1][1]
self.assertEqual(event_info['answers'][CapaFactory.answer_key()], 'choice_3')
# 'permutation' key added to record how problem was shown
self.assertEquals(event_info['permutation'][CapaFactory.answer_key()],
('shuffle', ['choice_3', 'choice_1', 'choice_2', 'choice_0']))
self.assertEquals(event_info['success'], 'correct')
@unittest.skip("masking temporarily disabled")
def test_save_unmask(self):
"""On problem save, unmasked data should appear on track_function."""
module = CapaFactory.create(xml=self.common_shuffle_xml)
with patch.object(module.runtime, 'track_function') as mock_track_function:
get_request_dict = {CapaFactory.input_key(): 'mask_0'}
module.save_problem(get_request_dict)
mock_call = mock_track_function.mock_calls[0]
event_info = mock_call[1][1]
self.assertEquals(event_info['answers'][CapaFactory.answer_key()], 'choice_2')
self.assertIsNotNone(event_info['permutation'][CapaFactory.answer_key()])
@unittest.skip("masking temporarily disabled")
def test_reset_unmask(self):
"""On problem reset, unmask names should appear track_function."""
module = CapaFactory.create(xml=self.common_shuffle_xml)
get_request_dict = {CapaFactory.input_key(): 'mask_0'}
module.check_problem(get_request_dict)
# On reset, 'old_state' should use unmasked names
with patch.object(module.runtime, 'track_function') as mock_track_function:
module.reset_problem(None)
mock_call = mock_track_function.mock_calls[0]
event_info = mock_call[1][1]
self.assertEquals(mock_call[1][0], 'reset_problem')
self.assertEquals(event_info['old_state']['student_answers'][CapaFactory.answer_key()], 'choice_2')
self.assertIsNotNone(event_info['permutation'][CapaFactory.answer_key()])
@unittest.skip("masking temporarily disabled")
def test_rescore_unmask(self):
"""On problem rescore, unmasked names should appear on track_function."""
module = CapaFactory.create(xml=self.common_shuffle_xml)
get_request_dict = {CapaFactory.input_key(): 'mask_0'}
module.check_problem(get_request_dict)
# On rescore, state/student_answers should use unmasked names
with patch.object(module.runtime, 'track_function') as mock_track_function:
module.rescore_problem()
mock_call = mock_track_function.mock_calls[0]
event_info = mock_call[1][1]
self.assertEquals(mock_call[1][0], 'problem_rescore')
self.assertEquals(event_info['state']['student_answers'][CapaFactory.answer_key()], 'choice_2')
self.assertIsNotNone(event_info['permutation'][CapaFactory.answer_key()])
def test_check_unmask_answerpool(self):
"""Check answer-pool question track_function uses unmasked names"""
xml = textwrap.dedent("""
<problem>
<multiplechoiceresponse>
<choicegroup type="MultipleChoice" answer-pool="4">
<choice correct="false">Apple</choice>
<choice correct="false">Banana</choice>
<choice correct="false">Chocolate</choice>
<choice correct ="true">Donut</choice>
</choicegroup>
</multiplechoiceresponse>
</problem>
""")
module = CapaFactory.create(xml=xml)
with patch.object(module.runtime, 'track_function') as mock_track_function:
get_request_dict = {CapaFactory.input_key(): 'choice_2'} # mask_X form when masking enabled
module.check_problem(get_request_dict)
mock_call = mock_track_function.mock_calls[0]
event_info = mock_call[1][1]
self.assertEqual(event_info['answers'][CapaFactory.answer_key()], 'choice_2')
# 'permutation' key added to record how problem was shown
self.assertEquals(event_info['permutation'][CapaFactory.answer_key()],
('answerpool', ['choice_1', 'choice_3', 'choice_2', 'choice_0']))
self.assertEquals(event_info['success'], 'incorrect')
@ddt.ddt
class CapaDescriptorTest(unittest.TestCase):
def _create_descriptor(self, xml, name=None):
""" Creates a CapaDescriptor to run test against """
descriptor = CapaDescriptor(get_test_system(), scope_ids=1)
descriptor.data = xml
if name:
descriptor.display_name = name
return descriptor
@ddt.data(*responsetypes.registry.registered_tags())
def test_all_response_types(self, response_tag):
""" Tests that every registered response tag is correctly returned """
xml = "<problem><{response_tag}></{response_tag}></problem>".format(response_tag=response_tag)
name = "Some Capa Problem"
descriptor = self._create_descriptor(xml, name=name)
self.assertEquals(descriptor.problem_types, {response_tag})
self.assertEquals(descriptor.index_dictionary(), {
'content_type': CapaDescriptor.INDEX_CONTENT_TYPE,
'display_name': name,
'problem_types': [response_tag]
})
def test_response_types_ignores_non_response_tags(self):
xml = textwrap.dedent("""
<problem>
<p>Label</p>
<div>Some comment</div>
<multiplechoiceresponse>
<choicegroup type="MultipleChoice" answer-pool="4">
<choice correct="false">Apple</choice>
<choice correct="false">Banana</choice>
<choice correct="false">Chocolate</choice>
<choice correct ="true">Donut</choice>
</choicegroup>
</multiplechoiceresponse>
</problem>
""")
name = "Test Capa Problem"
descriptor = self._create_descriptor(xml, name=name)
self.assertEquals(descriptor.problem_types, {"multiplechoiceresponse"})
self.assertEquals(descriptor.index_dictionary(), {
'content_type': CapaDescriptor.INDEX_CONTENT_TYPE,
'display_name': name,
'problem_types': ["multiplechoiceresponse"]
})
def test_response_types_multiple_tags(self):
xml = textwrap.dedent("""
<problem>
<p>Label</p>
<div>Some comment</div>
<multiplechoiceresponse>
<choicegroup type="MultipleChoice" answer-pool="1">
<choice correct ="true">Donut</choice>
</choicegroup>
</multiplechoiceresponse>
<multiplechoiceresponse>
<choicegroup type="MultipleChoice" answer-pool="1">
<choice correct ="true">Buggy</choice>
</choicegroup>
</multiplechoiceresponse>
<optionresponse>
<optioninput label="Option" options="('1','2')" correct="2"></optioninput>
</optionresponse>
</problem>
""")
name = "Other Test Capa Problem"
descriptor = self._create_descriptor(xml, name=name)
self.assertEquals(descriptor.problem_types, {"multiplechoiceresponse", "optionresponse"})
self.assertEquals(
descriptor.index_dictionary(), {
'content_type': CapaDescriptor.INDEX_CONTENT_TYPE,
'display_name': name,
'problem_types': ["optionresponse", "multiplechoiceresponse"]
}
)
class ComplexEncoderTest(unittest.TestCase):
def test_default(self):
"""
Check that complex numbers can be encoded into JSON.
"""
complex_num = 1 - 1j
expected_str = '1-1*j'
json_str = json.dumps(complex_num, cls=ComplexEncoder)
self.assertEqual(expected_str, json_str[1:-1]) # ignore quotes
class TestProblemCheckTracking(unittest.TestCase):
"""
Ensure correct tracking information is included in events emitted during problem checks.
"""
def setUp(self):
super(TestProblemCheckTracking, self).setUp()
self.maxDiff = None
def test_choice_answer_text(self):
xml = """\
<problem display_name="Multiple Choice Questions">
<p>What color is the open ocean on a sunny day?</p>
<optionresponse>
<optioninput options="('yellow','blue','green')" correct="blue" label="What color is the open ocean on a sunny day?"/>
</optionresponse>
<p>Which piece of furniture is built for sitting?</p>
<multiplechoiceresponse>
<choicegroup type="MultipleChoice">
<choice correct="false"><text>a table</text></choice>
<choice correct="false"><text>a desk</text></choice>
<choice correct="true"><text>a chair</text></choice>
<choice correct="false"><text>a bookshelf</text></choice>
</choicegroup>
</multiplechoiceresponse>
<p>Which of the following are musical instruments?</p>
<choiceresponse>
<checkboxgroup direction="vertical" label="Which of the following are musical instruments?">
<choice correct="true">a piano</choice>
<choice correct="false">a tree</choice>
<choice correct="true">a guitar</choice>
<choice correct="false">a window</choice>
</checkboxgroup>
</choiceresponse>
</problem>
"""
# Whitespace screws up comparisons
xml = ''.join(line.strip() for line in xml.split('\n'))
factory = self.capa_factory_for_problem_xml(xml)
module = factory.create()
answer_input_dict = {
factory.input_key(2): 'blue',
factory.input_key(3): 'choice_0',
factory.input_key(4): ['choice_0', 'choice_1'],
}
event = self.get_event_for_answers(module, answer_input_dict)
self.assertEquals(event['submission'], {
factory.answer_key(2): {
'question': 'What color is the open ocean on a sunny day?',
'answer': 'blue',
'response_type': 'optionresponse',
'input_type': 'optioninput',
'correct': True,
'variant': '',
},
factory.answer_key(3): {
'question': '',
'answer': u'<text>a table</text>',
'response_type': 'multiplechoiceresponse',
'input_type': 'choicegroup',
'correct': False,
'variant': '',
},
factory.answer_key(4): {
'question': 'Which of the following are musical instruments?',
'answer': [u'a piano', u'a tree'],
'response_type': 'choiceresponse',
'input_type': 'checkboxgroup',
'correct': False,
'variant': '',
},
})
def capa_factory_for_problem_xml(self, xml):
class CustomCapaFactory(CapaFactory):
"""
A factory for creating a Capa problem with arbitrary xml.
"""
sample_problem_xml = textwrap.dedent(xml)
return CustomCapaFactory
def get_event_for_answers(self, module, answer_input_dict):
with patch.object(module.runtime, 'track_function') as mock_track_function:
module.check_problem(answer_input_dict)
self.assertEquals(len(mock_track_function.mock_calls), 1)
mock_call = mock_track_function.mock_calls[0]
event = mock_call[1][1]
return event
def test_numerical_textline(self):
factory = CapaFactory
module = factory.create()
answer_input_dict = {
factory.input_key(2): '3.14'
}
event = self.get_event_for_answers(module, answer_input_dict)
self.assertEquals(event['submission'], {
factory.answer_key(2): {
'question': '',
'answer': '3.14',
'response_type': 'numericalresponse',
'input_type': 'textline',
'correct': True,
'variant': '',
}
})
def test_multiple_inputs(self):
factory = self.capa_factory_for_problem_xml("""\
<problem display_name="Multiple Inputs">
<p>Choose the correct color</p>
<optionresponse>
<p>What color is the sky?</p>
<optioninput options="('yellow','blue','green')" correct="blue"/>
<p>What color are pine needles?</p>
<optioninput options="('yellow','blue','green')" correct="green"/>
</optionresponse>
</problem>
""")
module = factory.create()
answer_input_dict = {
factory.input_key(2, 1): 'blue',
factory.input_key(2, 2): 'yellow',
}
event = self.get_event_for_answers(module, answer_input_dict)
self.assertEquals(event['submission'], {
factory.answer_key(2, 1): {
'question': '',
'answer': 'blue',
'response_type': 'optionresponse',
'input_type': 'optioninput',
'correct': True,
'variant': '',
},
factory.answer_key(2, 2): {
'question': '',
'answer': 'yellow',
'response_type': 'optionresponse',
'input_type': 'optioninput',
'correct': False,
'variant': '',
},
})
def test_rerandomized_inputs(self):
factory = CapaFactory
module = factory.create(rerandomize=RANDOMIZATION.ALWAYS)
answer_input_dict = {
factory.input_key(2): '3.14'
}
event = self.get_event_for_answers(module, answer_input_dict)
self.assertEquals(event['submission'], {
factory.answer_key(2): {
'question': '',
'answer': '3.14',
'response_type': 'numericalresponse',
'input_type': 'textline',
'correct': True,
'variant': module.seed,
}
})
def test_file_inputs(self):
fnames = ["prog1.py", "prog2.py", "prog3.py"]
fpaths = [os.path.join(DATA_DIR, "capa", fname) for fname in fnames]
fileobjs = [open(fpath) for fpath in fpaths]
for fileobj in fileobjs:
self.addCleanup(fileobj.close)
factory = CapaFactoryWithFiles
module = factory.create()
# Mock the XQueueInterface.
xqueue_interface = XQueueInterface("http://example.com/xqueue", Mock())
xqueue_interface._http_post = Mock(return_value=(0, "ok")) # pylint: disable=protected-access
module.system.xqueue['interface'] = xqueue_interface
answer_input_dict = {
CapaFactoryWithFiles.input_key(response_num=2): fileobjs,
CapaFactoryWithFiles.input_key(response_num=3): 'None',
}
event = self.get_event_for_answers(module, answer_input_dict)
self.assertEquals(event['submission'], {
factory.answer_key(2): {
'question': '',
'answer': fpaths,
'response_type': 'coderesponse',
'input_type': 'filesubmission',
'correct': False,
'variant': '',
},
factory.answer_key(3): {
'answer': 'None',
'correct': True,
'question': '',
'response_type': 'customresponse',
'input_type': 'textline',
'variant': ''
}
})
def test_get_answer_with_jump_to_id_urls(self):
"""
Make sure replace_jump_to_id_urls() is called in get_answer.
"""
problem_xml = textwrap.dedent("""
<problem>
<p>What is 1+4?</p>
<numericalresponse answer="5">
<formulaequationinput />
</numericalresponse>
<solution>
<div class="detailed-solution">
<p>Explanation</p>
<a href="/jump_to_id/c0f8d54964bc44a4a1deb8ecce561ecd">here's the same link to the hint page.</a>
</div>
</solution>
</problem>
""")
data = dict()
problem = CapaFactory.create(showanswer='always', xml=problem_xml)
problem.runtime.replace_jump_to_id_urls = Mock()
problem.get_answer(data)
self.assertTrue(problem.runtime.replace_jump_to_id_urls.called)
| agpl-3.0 |
SebastianMerz/calalert | Server/venv/lib/python2.7/site-packages/werkzeug/contrib/securecookie.py | 318 | 12204 | # -*- coding: utf-8 -*-
r"""
werkzeug.contrib.securecookie
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This module implements a cookie that is not alterable from the client
because it adds a checksum the server checks for. You can use it as
session replacement if all you have is a user id or something to mark
a logged in user.
Keep in mind that the data is still readable from the client as a
normal cookie is. However you don't have to store and flush the
sessions you have at the server.
Example usage:
>>> from werkzeug.contrib.securecookie import SecureCookie
>>> x = SecureCookie({"foo": 42, "baz": (1, 2, 3)}, "deadbeef")
Dumping into a string so that one can store it in a cookie:
>>> value = x.serialize()
Loading from that string again:
>>> x = SecureCookie.unserialize(value, "deadbeef")
>>> x["baz"]
(1, 2, 3)
If someone modifies the cookie and the checksum is wrong the unserialize
method will fail silently and return a new empty `SecureCookie` object.
Keep in mind that the values will be visible in the cookie so do not
store data in a cookie you don't want the user to see.
Application Integration
=======================
If you are using the werkzeug request objects you could integrate the
secure cookie into your application like this::
from werkzeug.utils import cached_property
from werkzeug.wrappers import BaseRequest
from werkzeug.contrib.securecookie import SecureCookie
# don't use this key but a different one; you could just use
# os.urandom(20) to get something random
SECRET_KEY = '\xfa\xdd\xb8z\xae\xe0}4\x8b\xea'
class Request(BaseRequest):
@cached_property
def client_session(self):
data = self.cookies.get('session_data')
if not data:
return SecureCookie(secret_key=SECRET_KEY)
return SecureCookie.unserialize(data, SECRET_KEY)
def application(environ, start_response):
request = Request(environ, start_response)
# get a response object here
response = ...
if request.client_session.should_save:
session_data = request.client_session.serialize()
response.set_cookie('session_data', session_data,
httponly=True)
return response(environ, start_response)
A less verbose integration can be achieved by using shorthand methods::
class Request(BaseRequest):
@cached_property
def client_session(self):
return SecureCookie.load_cookie(self, secret_key=COOKIE_SECRET)
def application(environ, start_response):
request = Request(environ, start_response)
# get a response object here
response = ...
request.client_session.save_cookie(response)
return response(environ, start_response)
:copyright: (c) 2013 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import pickle
import base64
from hmac import new as hmac
from time import time
from hashlib import sha1 as _default_hash
from werkzeug._compat import iteritems, text_type
from werkzeug.urls import url_quote_plus, url_unquote_plus
from werkzeug._internal import _date_to_unix
from werkzeug.contrib.sessions import ModificationTrackingDict
from werkzeug.security import safe_str_cmp
from werkzeug._compat import to_native
class UnquoteError(Exception):
"""Internal exception used to signal failures on quoting."""
class SecureCookie(ModificationTrackingDict):
"""Represents a secure cookie. You can subclass this class and provide
an alternative mac method. The import thing is that the mac method
is a function with a similar interface to the hashlib. Required
methods are update() and digest().
Example usage:
>>> x = SecureCookie({"foo": 42, "baz": (1, 2, 3)}, "deadbeef")
>>> x["foo"]
42
>>> x["baz"]
(1, 2, 3)
>>> x["blafasel"] = 23
>>> x.should_save
True
:param data: the initial data. Either a dict, list of tuples or `None`.
:param secret_key: the secret key. If not set `None` or not specified
it has to be set before :meth:`serialize` is called.
:param new: The initial value of the `new` flag.
"""
#: The hash method to use. This has to be a module with a new function
#: or a function that creates a hashlib object. Such as `hashlib.md5`
#: Subclasses can override this attribute. The default hash is sha1.
#: Make sure to wrap this in staticmethod() if you store an arbitrary
#: function there such as hashlib.sha1 which might be implemented
#: as a function.
hash_method = staticmethod(_default_hash)
#: the module used for serialization. Unless overriden by subclasses
#: the standard pickle module is used.
serialization_method = pickle
#: if the contents should be base64 quoted. This can be disabled if the
#: serialization process returns cookie safe strings only.
quote_base64 = True
def __init__(self, data=None, secret_key=None, new=True):
ModificationTrackingDict.__init__(self, data or ())
# explicitly convert it into a bytestring because python 2.6
# no longer performs an implicit string conversion on hmac
if secret_key is not None:
secret_key = bytes(secret_key)
self.secret_key = secret_key
self.new = new
def __repr__(self):
return '<%s %s%s>' % (
self.__class__.__name__,
dict.__repr__(self),
self.should_save and '*' or ''
)
@property
def should_save(self):
"""True if the session should be saved. By default this is only true
for :attr:`modified` cookies, not :attr:`new`.
"""
return self.modified
@classmethod
def quote(cls, value):
"""Quote the value for the cookie. This can be any object supported
by :attr:`serialization_method`.
:param value: the value to quote.
"""
if cls.serialization_method is not None:
value = cls.serialization_method.dumps(value)
if cls.quote_base64:
value = b''.join(base64.b64encode(value).splitlines()).strip()
return value
@classmethod
def unquote(cls, value):
"""Unquote the value for the cookie. If unquoting does not work a
:exc:`UnquoteError` is raised.
:param value: the value to unquote.
"""
try:
if cls.quote_base64:
value = base64.b64decode(value)
if cls.serialization_method is not None:
value = cls.serialization_method.loads(value)
return value
except Exception:
# unfortunately pickle and other serialization modules can
# cause pretty every error here. if we get one we catch it
# and convert it into an UnquoteError
raise UnquoteError()
def serialize(self, expires=None):
"""Serialize the secure cookie into a string.
If expires is provided, the session will be automatically invalidated
after expiration when you unseralize it. This provides better
protection against session cookie theft.
:param expires: an optional expiration date for the cookie (a
:class:`datetime.datetime` object)
"""
if self.secret_key is None:
raise RuntimeError('no secret key defined')
if expires:
self['_expires'] = _date_to_unix(expires)
result = []
mac = hmac(self.secret_key, None, self.hash_method)
for key, value in sorted(self.items()):
result.append(('%s=%s' % (
url_quote_plus(key),
self.quote(value).decode('ascii')
)).encode('ascii'))
mac.update(b'|' + result[-1])
return b'?'.join([
base64.b64encode(mac.digest()).strip(),
b'&'.join(result)
])
@classmethod
def unserialize(cls, string, secret_key):
"""Load the secure cookie from a serialized string.
:param string: the cookie value to unserialize.
:param secret_key: the secret key used to serialize the cookie.
:return: a new :class:`SecureCookie`.
"""
if isinstance(string, text_type):
string = string.encode('utf-8', 'replace')
if isinstance(secret_key, text_type):
secret_key = secret_key.encode('utf-8', 'replace')
try:
base64_hash, data = string.split(b'?', 1)
except (ValueError, IndexError):
items = ()
else:
items = {}
mac = hmac(secret_key, None, cls.hash_method)
for item in data.split(b'&'):
mac.update(b'|' + item)
if not b'=' in item:
items = None
break
key, value = item.split(b'=', 1)
# try to make the key a string
key = url_unquote_plus(key.decode('ascii'))
try:
key = to_native(key)
except UnicodeError:
pass
items[key] = value
# no parsing error and the mac looks okay, we can now
# sercurely unpickle our cookie.
try:
client_hash = base64.b64decode(base64_hash)
except TypeError:
items = client_hash = None
if items is not None and safe_str_cmp(client_hash, mac.digest()):
try:
for key, value in iteritems(items):
items[key] = cls.unquote(value)
except UnquoteError:
items = ()
else:
if '_expires' in items:
if time() > items['_expires']:
items = ()
else:
del items['_expires']
else:
items = ()
return cls(items, secret_key, False)
@classmethod
def load_cookie(cls, request, key='session', secret_key=None):
"""Loads a :class:`SecureCookie` from a cookie in request. If the
cookie is not set, a new :class:`SecureCookie` instanced is
returned.
:param request: a request object that has a `cookies` attribute
which is a dict of all cookie values.
:param key: the name of the cookie.
:param secret_key: the secret key used to unquote the cookie.
Always provide the value even though it has
no default!
"""
data = request.cookies.get(key)
if not data:
return cls(secret_key=secret_key)
return cls.unserialize(data, secret_key)
def save_cookie(self, response, key='session', expires=None,
session_expires=None, max_age=None, path='/', domain=None,
secure=None, httponly=False, force=False):
"""Saves the SecureCookie in a cookie on response object. All
parameters that are not described here are forwarded directly
to :meth:`~BaseResponse.set_cookie`.
:param response: a response object that has a
:meth:`~BaseResponse.set_cookie` method.
:param key: the name of the cookie.
:param session_expires: the expiration date of the secure cookie
stored information. If this is not provided
the cookie `expires` date is used instead.
"""
if force or self.should_save:
data = self.serialize(session_expires or expires)
response.set_cookie(key, data, expires=expires, max_age=max_age,
path=path, domain=domain, secure=secure,
httponly=httponly)
| gpl-2.0 |
AKS1996/VOCOWA | merge_images.py | 1 | 2513 | from operator import add
from numpy import subtract
from PIL import Image, ImageMath
def merge(name_img1, name_img2, position_wrt_img1=(0, 0)):
"""
Merges two images. Currently, works for -ve position too
:param name_img1: Image 1
:param name_img2: Image 2
:param position_wrt_img1: Merging w.r.t. top left corner of image 1
:return:
"""
if position_wrt_img1 > (0, 0):
img1 = Image.open(name_img1)
img2 = Image.open(name_img2)
else:
position_wrt_img1 = tuple(subtract((0, 0), position_wrt_img1))
img2 = Image.open(name_img1)
img1 = Image.open(name_img2)
w, h = map(max, map(add, img2.size, position_wrt_img1), img1.size)
# pasting img1 on img2
_img1 = Image.new('RGB', size=(w, h), color=0)
_img1.paste(img1, (0, 0))
# pasting opposite way
_img2 = Image.new('RGB', size=(w, h), color=0)
_img2.paste(img2, position_wrt_img1)
return Image.blend(_img1, _img2, alpha=0.5)
def merge_BW(name_img1, name_img2, PWSB):
"""
Merges two BW images
Calculates Top Left Corner of img2 in expected (resultant) image
:param name_img1: Image 1
:param name_img2: Image 2
:param PWSB : Point Where Scan Began in img1
:return : Updated PWSB ,in resultant map
"""
img1 = Image.open(name_img1)
img2 = Image.open(name_img2)
# TODO Negative Corner Issue
# PWSB will be positive. Corner May not be
corner = tuple(subtract(PWSB, (img2.size[0]/2, img2.size[1]/2)))
w = max(img2.size[0] + corner[0], img1.size[0]) - min(0, corner[0])
h = max(img2.size[1] + corner[1], img1.size[1]) - min(0, corner[1])
# Create 2 canvas
_img1 = Image.new('L', size=(w, h), color=0)
_img2 = Image.new('L', size=(w, h), color=0)
if corner[0] >= 0 and corner[1] >= 0:
_img1.paste(img1, (0, 0))
_img2.paste(img2, corner)
elif corner[0] <= 0 and corner[1] <= 0:
_img1.paste(img1, tuple(subtract((0, 0), corner)))
_img2.paste(img2, (0, 0))
elif corner[0] <= 0 and corner[1] >= 0:
_img1.paste(img1, (w - img1.size[0], 0))
_img2.paste(img2, (0, h - img2.size[1]))
else:
_img1.paste(img1, (0, h - img1.size[1]))
_img2.paste(img2, (w - img2.size[0], 0))
i = ImageMath.eval("a|b", a=_img1, b=_img2)
i.convert('L').save('result.jpg')
for i in range(2):
if img2.size[i] / 2 > PWSB[i]: # PWSB will be positive
PWSB[i] = img2.size[i] / 2
return PWSB
| mit |
jhdulaney/fulla | fulla/droplet.py | 1 | 5978 | #
# fulla -- work with Digital Ocean
#
# Copyright (C) 2015 John H. Dulaney <[email protected]>
#
# Licensed under the GNU General Public License Version 2
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
#
"""Interact with Digital Ocean account"""
try:
from io import BytesIO
except ImportError:
from StringIO import StringIO as BytesIO
import pycurl
import json
from fulla import settings
Null = json.dumps(None)
def get_info(location):
"""Retreive Droplet data from Digital Ocean"""
buff = BytesIO()
auth = 'Authorization: Bearer ' + settings.token
curler = pycurl.Curl()
curler.setopt(curler.URL, settings.api_url + location)
curler.setopt(curler.HTTPHEADER, [auth])
curler.setopt(curler.WRITEDATA, buff)
try:
curler.perform()
except:
raise
curler.close()
results = buff.getvalue()
results = results.decode('iso-8859-1')
results = json.loads(results)
return results
def send_request(location, request):
location = settings.api_url + location
class _Buffer(object):
def __init__(self):
self.data = ''
def incoming(self, buff):
self.data += buff.decode('iso-8859-1')
auth = 'Authorization: Bearer ' + settings.token
post_request = json.dumps(request)
try:
buff = _Buffer()
curler = pycurl.Curl()
curler.setopt(curler.HTTPHEADER, [auth, "Content-type: application/json"])
curler.setopt(curler.URL, location)
curler.setopt(curler.POSTFIELDS, post_request)
curler.setopt(curler.WRITEFUNCTION, buff.incoming)
curler.perform()
curler.close()
return buff.data
except:
raise
def send_delete(location):
location = settings.api_url + location
buff = BytesIO()
auth = 'Authorization: Bearer ' + settings.token
try:
curler = pycurl.Curl()
curler.setopt(curler.HTTPHEADER, [auth, "Content-type: application/json"])
curler.setopt(curler.URL, location)
curler.setopt(curler.CUSTOMREQUEST, "DELETE")
curler.setopt(curler.WRITEDATA, buff)
curler.perform()
curler.close()
result = json.loads(buff.getvalue().decode('iso-8859-1'))
return result
except:
raise
class Account(object):
"""Digital Ocean Account object"""
def __init__(self):
self.droplet_limit = 0
self.email = ''
self.uuid = ''
self.email_verified = None
self.status = ''
self.status_message = ''
def get_data(self):
"""Retreive user data from Digital Ocean"""
results = get_info('account')
try:
results = results['account']
self.droplet_limit = results['droplet_limit']
self.email = results['email']
self.uuid = results['uuid']
self.email_verified = results['email_verified']
self.status = results['status']
self.status_message = ['status_message']
except:
print(results['id'], results['message'])
raise
return 0
def get_droplets():
"""Retreive Droplet data from Digital Ocean"""
results = get_info('droplets')
try:
droplets = results['droplets']
num_droplets = results['meta']['total']
except:
print(results['id'], results['message'])
return droplets, num_droplets
def get_imagelist():
"""Get list of available images"""
results = get_info('images?page=1')
try:
num_pages = int(results['links']['pages']['last'].rsplit('=', 1)[1])
except:
print(results['id'], results['message'])
raise
image_list = results['images']
for page in range(2, num_pages + 1):
results = get_info('images?page=' + str(page))
image_list += results['images']
return image_list
def get_keys():
results = get_info('account/keys')
try:
num_keys = int(results['meta']['total'])
keys = results['ssh_keys']
except:
print(results['id'], results['message'])
raise
return keys, num_keys
def create_droplet(name, region, size, image_slug, ssh_keys, user_data=Null, private_networking=Null, ipv6=Null, backups=Null):
"""Create new droplet
Note: ssh_keys *must* be a list
"""
images = get_imagelist()
droplet = None
for image in images:
if (image_slug == image['slug'] or image_slug == image['id']):
droplet = {"name": name, "region": region, "size": size, "image": image_slug,
"ssh_keys": ssh_keys, "backups": backups, "ipv6": ipv6,
"user_data": user_data, "private_networking": private_networking}
if droplet is not None:
result = send_request('droplets', droplet)
try:
result = json.loads(result)
except:
print(result['id'], result['message'])
raise
return result
else:
print("Image does not exist")
raise
def delete_droplet(droplet_id):
send_delete('droplets/' + str(droplet_id))
return 0
def reboot_droplet(droplet_id):
"""Reboot droplet"""
request = 'droplets/' + str(droplet_id) + '/actions'
result = send_request(request, '{"type":"reboot"}')
return result
| gpl-3.0 |
PythonCharmers/bokeh | bokeh/charts/utils.py | 42 | 7607 | """ This is the utils module that collects convenience functions and code that are
useful for charts ecosystem.
"""
#-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2014, Continuum Analytics, Inc. All rights reserved.
#
# Powered by the Bokeh Development Team.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function
from collections import OrderedDict
import itertools
from math import cos, sin
from ..browserlib import view
from ..document import Document
from ..embed import file_html
from ..models import GlyphRenderer
from ..models.glyphs import (
Asterisk, Circle, CircleCross, CircleX, Cross, Diamond, DiamondCross,
InvertedTriangle, Square, SquareCross, SquareX, Triangle, X)
from ..resources import INLINE
from ..session import Session
from ..util.notebook import publish_display_data
#-----------------------------------------------------------------------------
# Classes and functions
#-----------------------------------------------------------------------------
# TODO: (bev) this should go in a plotting utils one level up
_default_cycle_palette = [
"#f22c40", "#5ab738", "#407ee7", "#df5320", "#00ad9c", "#c33ff3"
]
def cycle_colors(chunk, palette=_default_cycle_palette):
""" Build a color list just cycling through a given palette.
Args:
chuck (seq): the chunk of elements to generate the color list
palette (seq[color]) : a palette of colors to cycle through
Returns:
colors
"""
colors = []
g = itertools.cycle(palette)
for i in range(len(chunk)):
colors.append(next(g))
return colors
# TODO: (bev) this should go in a plotting utils one level up
def make_scatter(source, x, y, markertype, color, line_color=None,
size=10, fill_alpha=0.2, line_alpha=1.0):
"""Create a marker glyph and appends it to the renderers list.
Args:
source (obj): datasource object containing markers references.
x (str or list[float]) : values or field names of line ``x`` coordinates
y (str or list[float]) : values or field names of line ``y`` coordinates
markertype (int or str): Marker type to use (e.g., 2, 'circle', etc.)
color (str): color of the points
size (int) : size of the scatter marker
fill_alpha(float) : alpha value of the fill color
line_alpha(float) : alpha value of the line color
Return:
scatter: Marker Glyph instance
"""
if line_color is None:
line_color = color
_marker_types = OrderedDict(
[
("circle", Circle),
("square", Square),
("triangle", Triangle),
("diamond", Diamond),
("inverted_triangle", InvertedTriangle),
("asterisk", Asterisk),
("cross", Cross),
("x", X),
("circle_cross", CircleCross),
("circle_x", CircleX),
("square_x", SquareX),
("square_cross", SquareCross),
("diamond_cross", DiamondCross),
]
)
g = itertools.cycle(_marker_types.keys())
if isinstance(markertype, int):
for i in range(markertype):
shape = next(g)
else:
shape = markertype
glyph = _marker_types[shape](
x=x, y=y, size=size, fill_color=color, fill_alpha=fill_alpha,
line_color=line_color, line_alpha=line_alpha
)
return GlyphRenderer(data_source=source, glyph=glyph)
def chunk(l, n):
"""Yield successive n-sized chunks from l.
Args:
l (list: the incomming list to be chunked
n (int): lenght of you chucks
"""
for i in range(0, len(l), n):
yield l[i:i + n]
def polar_to_cartesian(r, start_angles, end_angles):
"""Translate polar coordinates to cartesian.
Args:
r (float): radial coordinate
start_angles (list(float)): list of start angles
end_angles (list(float)): list of end_angles angles
Returns:
x, y points
"""
cartesian = lambda r, alpha: (r*cos(alpha), r*sin(alpha))
points = []
for start, end in zip(start_angles, end_angles):
points.append(cartesian(r, (end + start)/2))
return zip(*points)
# TODO: Experimental implementation. This should really be a shared
# pattern between plotting/charts and other bokeh interfaces.
# This will probably be part of the future charts re-design
# to make them inherit from plot (or at least be closer to).
# In this was both charts and plotting could share figure,
# show, save, push methods as well as VBox, etc...
class Figure(object):
def __init__(self, *charts, **kwargs):
self.filename = kwargs.pop('filename', None)
self.server = kwargs.pop('server', None)
self.notebook = kwargs.pop('notebook', None)
self.title = kwargs.pop('title', '')
self.children = kwargs.pop('children', None)
self.charts = charts
self.doc = Document()
self.doc.hold(True)
self._plots = []
if self.server:
self.session = Session()
self.session.use_doc(self.server)
self.session.load_document(self.doc)
if self.children:
from bokeh.models import VBox
self.doc.add(VBox(children=self.children))
self.plot = None
for i, chart in enumerate(self.charts):
chart.doc = self.doc
if self.server:
chart.session = self.session
# Force the chart to create the underlying plot
chart._setup_show()
chart._prepare_show()
chart._show_teardown()
if not self.title:
self.title = chart.chart.title
self._plots += chart.chart._plots
# reset the pot title with the one set for the Figure
self.doc._current_plot.title = self.title
def show(self):
"""Main show function.
It shows the Figure in file, server and notebook outputs.
"""
show(self, self.title, self.filename, self.server, self.notebook)
def show(obj, title='test', filename=False, server=False, notebook=False, **kws):
""" 'shows' a plot object, by auto-raising the window or tab
displaying the current plot (for file/server output modes) or displaying
it in an output cell (IPython notebook).
Args:
obj (Widget/Plot object, optional): it accepts a plot object and just shows it.
"""
if filename:
if filename is True:
filename = "untitled"
else:
filename = filename
with open(filename, "w") as f:
f.write(file_html(obj.doc, INLINE, title))
print("Wrote %s" % filename)
view(filename)
elif filename is False and server is False and notebook is False:
print("You have to provide a filename (filename='foo.html' or"
" .filename('foo.html')) to save your plot.")
if server:
obj.session.store_document(obj.doc)
link = obj.session.object_link(obj.doc.context)
view(link)
if notebook:
from bokeh.embed import notebook_div
for plot in obj._plots:
publish_display_data({'text/html': notebook_div(plot)})
| bsd-3-clause |
openfun/edx-platform | lms/djangoapps/verify_student/tests/fake_software_secure.py | 29 | 1657 | """
Fake Software Secure page for use in acceptance tests.
"""
from django.conf import settings
from django.contrib.auth.decorators import login_required
from django.core.urlresolvers import reverse
from django.utils.decorators import method_decorator
from django.views.generic.base import View
from edxmako.shortcuts import render_to_response
from verify_student.models import SoftwareSecurePhotoVerification
class SoftwareSecureFakeView(View):
"""
Fake SoftwareSecure view for testing different photo verification statuses
and email functionality.
"""
@method_decorator(login_required)
def get(self, request):
"""
Render a fake Software Secure page that will pick the most recent
attempt for a given user and pass it to the html page.
"""
context_dict = self.response_post_params(request.user)
return render_to_response("verify_student/test/fake_softwaresecure_response.html", context_dict)
@classmethod
def response_post_params(cls, user):
"""
Calculate the POST params we want to send back to the client.
"""
access_key = settings.VERIFY_STUDENT["SOFTWARE_SECURE"]["API_ACCESS_KEY"]
context = {
'receipt_id': None,
'authorization_code': 'SIS {}:0000'.format(access_key),
'results_callback': reverse('verify_student_results_callback')
}
try:
most_recent = SoftwareSecurePhotoVerification.original_verification(user)
context["receipt_id"] = most_recent.receipt_id
except: # pylint: disable=bare-except
pass
return context
| agpl-3.0 |
liorvh/phantomjs | src/qt/qtwebkit/Source/WebKit2/Scripts/generate-messages-header.py | 145 | 1809 | #!/usr/bin/env python
#
# Copyright (C) 2010 Apple Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from __future__ import with_statement
import sys
import webkit2.messages
def main(argv=None):
if not argv:
argv = sys.argv
input_path = argv[1]
with open(input_path) as input_file:
# Python 3, change to: print(webkit2.messages.generate_messages_header(input_file), end='')
sys.stdout.write(webkit2.messages.generate_messages_header(input_file))
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
| bsd-3-clause |
abhilashnta/edx-platform | common/test/acceptance/pages/studio/settings_group_configurations.py | 74 | 10062 | """
Course Group Configurations page.
"""
from bok_choy.promise import EmptyPromise
from .course_page import CoursePage
from .utils import confirm_prompt
class GroupConfigurationsPage(CoursePage):
"""
Course Group Configurations page.
"""
url_path = "group_configurations"
experiment_groups_css = ".experiment-groups"
content_groups_css = ".content-groups"
def is_browser_on_page(self):
"""
Verify that the browser is on the page and it is not still loading.
"""
EmptyPromise(
lambda: self.q(css='body.view-group-configurations').present,
'On the group configuration page'
).fulfill()
EmptyPromise(
lambda: not self.q(css='span.spin').visible,
'Group Configurations are finished loading'
).fulfill()
return True
@property
def experiment_group_configurations(self):
"""
Return list of the experiment group configurations for the course.
"""
return self._get_groups(self.experiment_groups_css)
@property
def content_groups(self):
"""
Return list of the content groups for the course.
"""
return self._get_groups(self.content_groups_css)
def _get_groups(self, prefix):
"""
Return list of the group-configurations-list-item's of specified type for the course.
"""
css = prefix + ' .wrapper-collection'
return [GroupConfiguration(self, prefix, index) for index in xrange(len(self.q(css=css)))]
def create_experiment_group_configuration(self):
"""
Creates new group configuration.
"""
self.q(css=self.experiment_groups_css + " .new-button").first.click()
def create_first_content_group(self):
"""
Creates new content group when there are none initially defined.
"""
self.q(css=self.content_groups_css + " .new-button").first.click()
def add_content_group(self):
"""
Creates new content group when at least one already exists
"""
self.q(css=self.content_groups_css + " .action-add").first.click()
@property
def no_experiment_groups_message_is_present(self):
return self._no_content_message(self.experiment_groups_css).present
@property
def no_content_groups_message_is_present(self):
return self._no_content_message(self.content_groups_css).present
@property
def no_experiment_groups_message_text(self):
return self._no_content_message(self.experiment_groups_css).text[0]
@property
def no_content_groups_message_text(self):
return self._no_content_message(self.content_groups_css).text[0]
def _no_content_message(self, prefix):
"""
Returns the message about "no content" for the specified type.
"""
return self.q(css='.wrapper-content ' + prefix + ' .no-content')
@property
def experiment_group_sections_present(self):
"""
Returns whether or not anything related to content experiments is present.
"""
return self.q(css=self.experiment_groups_css).present or self.q(css=".experiment-groups-doc").present
class GroupConfiguration(object):
"""
Group Configuration wrapper.
"""
def __init__(self, page, prefix, index):
self.page = page
self.SELECTOR = prefix + ' .wrapper-collection-{}'.format(index)
self.index = index
def get_selector(self, css=''):
return ' '.join([self.SELECTOR, css])
def find_css(self, selector):
"""
Find elements as defined by css locator.
"""
return self.page.q(css=self.get_selector(css=selector))
def toggle(self):
"""
Expand/collapse group configuration.
"""
self.find_css('a.group-toggle').first.click()
@property
def is_expanded(self):
"""
Group configuration usage information is expanded.
"""
return self.find_css('a.group-toggle.hide-groups').present
def add_group(self):
"""
Add new group.
"""
self.find_css('button.action-add-group').first.click()
def get_text(self, css):
"""
Return text for the defined by css locator.
"""
return self.find_css(css).first.text[0]
def click_outline_anchor(self):
"""
Click on the `Course Outline` link.
"""
self.find_css('p.group-configuration-usage-text a').first.click()
def click_unit_anchor(self, index=0):
"""
Click on the link to the unit.
"""
self.find_css('li.group-configuration-usage-unit a').nth(index).click()
def edit(self):
"""
Open editing view for the group configuration.
"""
self.find_css('.action-edit .edit').first.click()
@property
def delete_button_is_disabled(self):
return self.find_css('.actions .delete.is-disabled').present
@property
def delete_button_is_present(self):
"""
Returns whether or not the delete icon is present.
"""
return self.find_css('.actions .delete').present
def delete(self):
"""
Delete the group configuration.
"""
self.find_css('.actions .delete').first.click()
confirm_prompt(self.page)
def save(self):
"""
Save group configuration.
"""
self.find_css('.action-primary').first.click()
self.page.wait_for_ajax()
def cancel(self):
"""
Cancel group configuration.
"""
self.find_css('.action-secondary').first.click()
@property
def mode(self):
"""
Return group configuration mode.
"""
if self.find_css('.collection-edit').present:
return 'edit'
elif self.find_css('.collection').present:
return 'details'
@property
def id(self):
"""
Return group configuration id.
"""
return self.get_text('.group-configuration-id .group-configuration-value')
@property
def validation_message(self):
"""
Return validation message.
"""
return self.get_text('.message-status.error')
@property
def usages(self):
"""
Return list of usages.
"""
css = '.group-configuration-usage-unit'
return self.find_css(css).text
@property
def name(self):
"""
Return group configuration name.
"""
return self.get_text('.title')
@name.setter
def name(self, value):
"""
Set group configuration name.
"""
self.find_css('.collection-name-input').first.fill(value)
@property
def description(self):
"""
Return group configuration description.
"""
return self.get_text('.group-configuration-description')
@description.setter
def description(self, value):
"""
Set group configuration description.
"""
self.find_css('.group-configuration-description-input').first.fill(value)
@property
def groups(self):
"""
Return list of groups.
"""
def group_selector(group_index):
return self.get_selector('.group-{} '.format(group_index))
return [Group(self.page, group_selector(index)) for index, element in enumerate(self.find_css('.group'))]
@property
def delete_note(self):
"""
Return delete note for the group configuration.
"""
return self.find_css('.wrapper-delete-button').first.attrs('data-tooltip')[0]
@property
def details_error_icon_is_present(self):
return self.find_css('.wrapper-group-configuration-usages .fa-exclamation-circle').present
@property
def details_warning_icon_is_present(self):
return self.find_css('.wrapper-group-configuration-usages .fa-warning').present
@property
def details_message_is_present(self):
return self.find_css('.wrapper-group-configuration-usages .group-configuration-validation-message').present
@property
def details_message_text(self):
return self.find_css('.wrapper-group-configuration-usages .group-configuration-validation-message').text[0]
@property
def edit_warning_icon_is_present(self):
return self.find_css('.wrapper-group-configuration-validation .fa-warning').present
@property
def edit_warning_message_is_present(self):
return self.find_css('.wrapper-group-configuration-validation .group-configuration-validation-text').present
@property
def edit_warning_message_text(self):
return self.find_css('.wrapper-group-configuration-validation .group-configuration-validation-text').text[0]
def __repr__(self):
return "<{}:{}>".format(self.__class__.__name__, self.name)
class Group(object):
"""
Group wrapper.
"""
def __init__(self, page, prefix_selector):
self.page = page
self.prefix = prefix_selector
def find_css(self, selector):
"""
Find elements as defined by css locator.
"""
return self.page.q(css=self.prefix + selector)
@property
def name(self):
"""
Return the name of the group .
"""
css = '.group-name'
return self.find_css(css).first.text[0]
@name.setter
def name(self, value):
"""
Set the name for the group.
"""
css = '.group-name'
self.find_css(css).first.fill(value)
@property
def allocation(self):
"""
Return allocation for the group.
"""
css = '.group-allocation'
return self.find_css(css).first.text[0]
def remove(self):
"""
Remove the group.
"""
css = '.action-close'
return self.find_css(css).first.click()
def __repr__(self):
return "<{}:{}>".format(self.__class__.__name__, self.name)
| agpl-3.0 |
PowerDNS/exabgp | lib/exabgp/bgp/message/update/nlri/flow.py | 1 | 16373 | # encoding: utf-8
"""
flow.py
Created by Thomas Mangin on 2010-01-14.
Copyright (c) 2009-2013 Exa Networks. All rights reserved.
"""
# Do not use __slots__ here, we never create enough of them to be worth it
# And it really break complex inheritance
from struct import pack
from struct import unpack
from exabgp.protocol.ip import IP
from exabgp.protocol.ip import NoIP
from exabgp.protocol.family import AFI
from exabgp.protocol.family import SAFI
from exabgp.bgp.message import OUT
from exabgp.bgp.message.notification import Notify
from exabgp.bgp.message.update.nlri.cidr import CIDR
from exabgp.protocol import Protocol
from exabgp.protocol import NamedProtocol
from exabgp.protocol.ip.icmp import ICMPType
from exabgp.protocol.ip.icmp import ICMPCode
from exabgp.protocol.ip.icmp import NamedICMPType
from exabgp.protocol.ip.icmp import NamedICMPCode
from exabgp.protocol.ip.fragment import Fragment
from exabgp.protocol.ip.fragment import NamedFragment
from exabgp.protocol.ip.tcp.flag import TCPFlag
from exabgp.protocol.ip.tcp.flag import NamedTCPFlag
from exabgp.bgp.message.update.nlri.nlri import NLRI
from exabgp.bgp.message.update.nlri.qualifier.rd import RouteDistinguisher
# =================================================================== Flow Components
class IComponent (object):
# all have ID
# should have an interface for serialisation and put it here
pass
class CommonOperator (object):
# power (2,x) is the same as 1 << x which is what the RFC say the len is
power = {0:1, 1:2, 2:4, 3:8,}
rewop = {1:0, 2:1, 4:2, 8:3,}
len_position = 0x30
EOL = 0x80 # 0b10000000
AND = 0x40 # 0b01000000
LEN = 0x30 # 0b00110000
NOP = 0x00
OPERATOR = 0xFF ^ (EOL | LEN)
@staticmethod
def eol (data):
return data & CommonOperator.EOL
@staticmethod
def operator (data):
return data & CommonOperator.OPERATOR
@staticmethod
def length (data):
return 1 << ((data & CommonOperator.LEN) >> 4)
class NumericOperator (CommonOperator):
# reserved = 0x08 # 0b00001000
LT = 0x04 # 0b00000100
GT = 0x02 # 0b00000010
EQ = 0x01 # 0b00000001
class BinaryOperator (CommonOperator):
# reserved = 0x0C # 0b00001100
NOT = 0x02 # 0b00000010
MATCH = 0x01 # 0b00000001
def _len_to_bit (value):
return NumericOperator.rewop[value] << 4
def _bit_to_len (value):
return NumericOperator.power[(value & CommonOperator.len_position) >> 4]
def _number (string):
value = 0
for c in string:
value = (value << 8) + ord(c)
return value
# def short (value):
# return (ord(value[0]) << 8) + ord(value[1])
# Interface ..................
class IPv4 (object):
afi = AFI.ipv4
class IPv6 (object):
afi = AFI.ipv6
class IPrefix (object):
pass
# Prococol
class IPrefix4 (IPrefix,IComponent,IPv4):
# not used, just present for simplying the nlri generation
operations = 0x0
# NAME
def __init__ (self,raw,netmask):
self.nlri = CIDR(raw,netmask)
def pack (self):
raw = self.nlri.pack()
return "%s%s" % (chr(self.ID),raw)
def __str__ (self):
return str(self.nlri)
class IPrefix6 (IPrefix,IComponent,IPv6):
# not used, just present for simplying the nlri generation
operations = 0x0
# NAME
def __init__ (self,raw,netmask,offset):
self.nlri = CIDR(raw,netmask)
self.offset = offset
def pack (self):
raw = self.nlri.packed_ip()
return "%s%s%s%s" % (chr(self.ID),chr(self.nlri.mask),chr(self.offset),raw)
def __str__ (self):
return "%s/%s" % (self.nlri,self.offset)
class IOperation (IComponent):
# need to implement encode which encode the value of the operator
def __init__ (self,operations,value):
self.operations = operations
self.value = value
self.first = None # handled by pack/str
def pack (self):
l,v = self.encode(self.value)
op = self.operations | _len_to_bit(l)
return "%s%s" % (chr(op),v)
def encode (self,value):
raise NotImplemented('this method must be implemented by subclasses')
def decode (self,value):
raise NotImplemented('this method must be implemented by subclasses')
#class IOperationIPv4 (IOperation):
# def encode (self,value):
# return 4, socket.pton(socket.AF_INET,value)
class IOperationByte (IOperation):
def encode (self,value):
return 1,chr(value)
def decode (self,bgp):
return ord(bgp[0]),bgp[1:]
class IOperationByteShort (IOperation):
def encode (self,value):
if value < (1<<8):
return 1,chr(value)
return 2,pack('!H',value)
def decode (self,bgp):
return unpack('!H',bgp[:2])[0],bgp[2:]
# String representation for Numeric and Binary Tests
class NumericString (object):
_string = {
NumericOperator.LT : '<',
NumericOperator.GT : '>',
NumericOperator.EQ : '=',
NumericOperator.LT|NumericOperator.EQ : '<=',
NumericOperator.GT|NumericOperator.EQ : '>=',
NumericOperator.AND|NumericOperator.LT : '&<',
NumericOperator.AND|NumericOperator.GT : '&>',
NumericOperator.AND|NumericOperator.EQ : '&=',
NumericOperator.AND|NumericOperator.LT|NumericOperator.EQ : '&<=',
NumericOperator.AND|NumericOperator.GT|NumericOperator.EQ : '&>=',
}
def __str__ (self):
return "%s%s" % (self._string[self.operations & (CommonOperator.EOL ^ 0xFF)], self.value)
class BinaryString (object):
_string = {
BinaryOperator.NOT : '!',
BinaryOperator.MATCH : '=',
BinaryOperator.AND|BinaryOperator.NOT : '&!',
BinaryOperator.AND|BinaryOperator.MATCH : '&=',
}
def __str__ (self):
return "%s%s" % (self._string[self.operations & (CommonOperator.EOL ^ 0xFF)], self.value)
# Components ..............................
def converter (function,klass=int):
def _integer (value):
try:
return klass(value)
except ValueError:
return function(value)
return _integer
def decoder (function,klass=int):
def _inner (value):
return klass(function(value))
return _inner
def PacketLength (data):
_str_bad_length = "cloudflare already found that invalid max-packet length for for you .."
number = int(data)
if number > 0xFFFF:
raise ValueError(_str_bad_length)
return number
def PortValue (data):
_str_bad_port = "you tried to set an invalid port number .."
number = int(data)
if number < 0 or number > 0xFFFF:
raise ValueError(_str_bad_port)
return number
def DSCPValue (data):
_str_bad_dscp = "you tried to filter a flow using an invalid dscp for a component .."
number = int(data)
if number < 0 or number > 0xFFFF:
raise ValueError(_str_bad_dscp)
return number
def ClassValue (data):
_str_bad_class = "you tried to filter a flow using an invalid traffic class for a component .."
number = int(data)
if number < 0 or number > 0xFFFF:
raise ValueError(_str_bad_class)
return number
def LabelValue (data):
_str_bad_label = "you tried to filter a flow using an invalid traffic label for a component .."
number = int(data)
if number < 0 or number > 0xFFFFF: # 20 bits 5 bytes
raise ValueError(_str_bad_label)
return number
# Protocol Shared
class FlowDestination (object):
ID = 0x01
NAME = 'destination'
class FlowSource (object):
ID = 0x02
NAME = 'source'
# Prefix
class Flow4Destination (IPrefix4,FlowDestination):
pass
# Prefix
class Flow4Source (IPrefix4,FlowSource):
pass
# Prefix
class Flow6Destination (IPrefix6,FlowDestination):
pass
# Prefix
class Flow6Source (IPrefix6,FlowSource):
pass
class FlowIPProtocol (IOperationByte,NumericString,IPv4):
ID = 0x03
NAME = 'protocol'
converter = staticmethod(converter(NamedProtocol,Protocol))
decoder = staticmethod(decoder(ord,Protocol))
class FlowNextHeader (IOperationByte,NumericString,IPv6):
ID = 0x03
NAME = 'next-header'
converter = staticmethod(converter(NamedProtocol,Protocol))
decoder = staticmethod(decoder(ord,Protocol))
class FlowAnyPort (IOperationByteShort,NumericString,IPv4,IPv6):
ID = 0x04
NAME = 'port'
converter = staticmethod(converter(PortValue))
decoder = staticmethod(_number)
class FlowDestinationPort (IOperationByteShort,NumericString,IPv4,IPv6):
ID = 0x05
NAME = 'destination-port'
converter = staticmethod(converter(PortValue))
decoder = staticmethod(_number)
class FlowSourcePort (IOperationByteShort,NumericString,IPv4,IPv6):
ID = 0x06
NAME = 'source-port'
converter = staticmethod(converter(PortValue))
decoder = staticmethod(_number)
class FlowICMPType (IOperationByte,BinaryString,IPv4,IPv6):
ID = 0x07
NAME = 'icmp-type'
converter = staticmethod(converter(NamedICMPType))
decoder = staticmethod(decoder(_number,ICMPType))
class FlowICMPCode (IOperationByte,BinaryString,IPv4,IPv6):
ID = 0x08
NAME = 'icmp-code'
converter = staticmethod(converter(NamedICMPCode))
decoder = staticmethod(decoder(_number,ICMPCode))
class FlowTCPFlag (IOperationByte,BinaryString,IPv4,IPv6):
ID = 0x09
NAME = 'tcp-flags'
converter = staticmethod(converter(NamedTCPFlag))
decoder = staticmethod(decoder(ord,TCPFlag))
class FlowPacketLength (IOperationByteShort,NumericString,IPv4,IPv6):
ID = 0x0A
NAME = 'packet-length'
converter = staticmethod(converter(PacketLength))
decoder = staticmethod(_number)
# RFC2474
class FlowDSCP (IOperationByteShort,NumericString,IPv4):
ID = 0x0B
NAME = 'dscp'
converter = staticmethod(converter(DSCPValue))
decoder = staticmethod(_number)
# RFC2460
class FlowTrafficClass (IOperationByte,NumericString,IPv6):
ID = 0x0B
NAME = 'traffic-class'
converter = staticmethod(converter(ClassValue))
decoder = staticmethod(_number)
# BinaryOperator
class FlowFragment (IOperationByteShort,NumericString,IPv4):
ID = 0x0C
NAME = 'fragment'
converter = staticmethod(converter(NamedFragment))
decoder = staticmethod(decoder(ord,Fragment))
# draft-raszuk-idr-flow-spec-v6-01
class FlowFlowLabel (IOperationByteShort,NumericString,IPv6):
ID = 0x0D
NAME = 'flow-label'
converter = staticmethod(converter(LabelValue))
decoder = staticmethod(_number)
# ..........................................................
decode = {AFI.ipv4: {}, AFI.ipv6: {}}
factory = {AFI.ipv4: {}, AFI.ipv6: {}}
for content in dir():
klass = globals().get(content,None)
if not isinstance(klass,type(IComponent)):
continue
if not issubclass(klass,IComponent):
continue
if issubclass(klass,IPv4):
afi = AFI.ipv4
elif issubclass(klass,IPv6):
afi = AFI.ipv6
else:
continue
ID = getattr(klass,'ID',None)
if not ID:
continue
factory[afi][ID] = klass
name = getattr(klass,'NAME')
if issubclass(klass, IOperation):
if issubclass(klass, BinaryString):
decode[afi][ID] = 'binary'
elif issubclass(klass, NumericString):
decode[afi][ID] = 'numeric'
else:
raise RuntimeError('invalid class defined (string)')
elif issubclass(klass, IPrefix):
decode[afi][ID] = 'prefix'
else:
raise RuntimeError('unvalid class defined (type)')
# ..........................................................
def _unique ():
value = 0
while True:
yield value
value += 1
unique = _unique()
class Flow (NLRI):
def __init__ (self,afi=AFI.ipv4,safi=SAFI.flow_ip,nexthop=None,rd=None):
NLRI.__init__(self,afi,safi)
self.rules = {}
self.action = OUT.announce
self.nexthop = IP.unpack(nexthop) if nexthop else NoIP
self.rd = rd
self.unique = unique.next()
def __len__ (self):
return len(self.pack())
def add (self,rule):
ID = rule.ID
if ID in (FlowDestination.ID,FlowSource.ID):
if ID in self.rules:
return False
if ID == FlowDestination.ID:
pair = self.rules.get(FlowSource.ID,[])
else:
pair = self.rules.get(FlowDestination.ID,[])
if pair:
if rule.afi != pair[0].afi:
return False
self.rules.setdefault(ID,[]).append(rule)
return True
# The API requires addpath, but it is irrelevant here.
def pack (self,addpath=None):
ordered_rules = []
# the order is a RFC requirement
for ID in sorted(self.rules.keys()):
rules = self.rules[ID]
# for each component get all the operation to do
# the format use does not prevent two opposing rules meaning that no packet can ever match
for rule in rules:
rule.operations &= (CommonOperator.EOL ^ 0xFF)
rules[-1].operations |= CommonOperator.EOL
# and add it to the last rule
if ID not in (FlowDestination.ID,FlowSource.ID):
ordered_rules.append(chr(ID))
ordered_rules.append(''.join(rule.pack() for rule in rules))
components = ''.join(ordered_rules)
if self.safi == SAFI.flow_vpn:
components = self.rd.pack() + components
l = len(components)
if l < 0xF0:
data = "%s%s" % (chr(l),components)
elif l < 0x0FFF:
data = "%s%s" % (pack('!H',l | 0xF000),components)
else:
raise Notify(3,0,"rule too big for NLRI - how to handle this - does this work ?")
# data = "%s" % chr(0)
return data
def extensive (self):
string = []
for index in sorted(self.rules):
rules = self.rules[index]
s = []
for idx,rule in enumerate(rules):
# only add ' ' after the first element
if idx and not rule.operations & NumericOperator.AND:
s.append(' ')
s.append(rule)
string.append(' %s %s' % (rules[0].NAME,''.join(str(_) for _ in s)))
nexthop = ' next-hop %s' % self.nexthop if self.nexthop is not NoIP else ''
rd = str(self.rd) if self.rd else ''
return 'flow' + rd + ''.join(string) + nexthop
def __str__ (self):
return self.extensive()
def _json (self):
string = []
for index in sorted(self.rules):
rules = self.rules[index]
s = []
for idx,rule in enumerate(rules):
# only add ' ' after the first element
if idx and not rule.operations & NumericOperator.AND:
s.append(', ')
s.append('"%s"' % rule)
string.append(' "%s": [ %s ]' % (rules[0].NAME,''.join(str(_) for _ in s)))
nexthop = ', "next-hop": "%s"' % self.nexthop if self.nexthop is not NoIP else ''
rd = ', %s' % self.rd.json() if self.rd else ''
compatibility = ', "string": "%s"' % self.extensive()
return '{' + rd + ','.join(string) + nexthop + compatibility +' }'
def json (self):
# this is a stop gap so flow route parsing does not crash exabgp
# delete unique when this is fixed
return '"flow-%d": %s' % (self.unique,self._json())
def index (self):
return self.pack()
@classmethod
def unpack (cls,afi,safi,bgp,has_multiple_path,nexthop,action):
total = len(bgp)
length,bgp = ord(bgp[0]),bgp[1:]
if length & 0xF0 == 0xF0: # bigger than 240
extra,bgp = ord(bgp[0]),bgp[1:]
length = ((length & 0x0F) << 16) + extra
if length > len(bgp):
raise Notify(3,10,'invalid length at the start of the the flow')
bgp = bgp[:length]
nlri = Flow(afi,safi,nexthop)
nlri.action = action
if safi == SAFI.flow_vpn:
nlri.rd = RouteDistinguisher(bgp[:8])
bgp = bgp[8:]
seen = []
while bgp:
what,bgp = ord(bgp[0]),bgp[1:]
if what not in decode.get(afi,{}):
raise Notify(3,10,'unknown flowspec component received for address family %d' % what)
seen.append(what)
if sorted(seen) != seen:
raise Notify(3,10,'components are not sent in the right order %s' % seen)
decoder = decode[afi][what]
klass = factory[afi][what]
if decoder == 'prefix':
if afi == AFI.ipv4:
_,rd,_,mask,size,prefix,left = NLRI._nlri(afi,safi,bgp,action,False)
adding = klass(prefix,mask)
if not nlri.add(adding):
raise Notify(3,10,'components are incompatible (two sources, two destinations, mix ipv4/ipv6) %s' % seen)
# logger.parser(LazyFormat("added flow %s (%s) payload " % (klass.NAME,adding),od,bgp[:-len(left)]))
bgp = left
else:
byte,bgp = bgp[1],bgp[0]+bgp[2:]
offset = ord(byte)
_,rd,_,mask,size,prefix,left = NLRI._nlri(afi,safi,bgp,action,False)
adding = klass(prefix,mask,offset)
if not nlri.add(adding):
raise Notify(3,10,'components are incompatible (two sources, two destinations, mix ipv4/ipv6) %s' % seen)
# logger.parser(LazyFormat("added flow %s (%s) payload " % (klass.NAME,adding),od,bgp[:-len(left)]))
bgp = left
else:
end = False
while not end:
byte,bgp = ord(bgp[0]),bgp[1:]
end = CommonOperator.eol(byte)
operator = CommonOperator.operator(byte)
length = CommonOperator.length(byte)
value,bgp = bgp[:length],bgp[length:]
adding = klass.decoder(value)
nlri.add(klass(operator,adding))
# logger.parser(LazyFormat("added flow %s (%s) operator %d len %d payload " % (klass.NAME,adding,byte,length),od,value))
return total-len(bgp),nlri
for safi in (SAFI.flow_ip,SAFI.flow_vpn):
for afi in (AFI.ipv4, AFI.ipv6):
Flow.register_nlri(afi,safi)
| bsd-3-clause |
GetSomeBlocks/ServerStatus | resources/lib/twisted/twisted/conch/test/test_knownhosts.py | 60 | 34426 | # Copyright (c) 2008 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.conch.client.knownhosts}.
"""
import os
from binascii import Error as BinasciiError, b2a_base64, a2b_base64
try:
import Crypto
import pyasn1
except ImportError:
skip = "PyCrypto and PyASN1 required for twisted.conch.knownhosts."
else:
from twisted.conch.ssh.keys import Key, BadKeyError
from twisted.conch.client.knownhosts import \
PlainEntry, HashedEntry, KnownHostsFile, UnparsedEntry, ConsoleUI
from twisted.conch.client import default
from zope.interface.verify import verifyObject
from twisted.python.filepath import FilePath
from twisted.trial.unittest import TestCase
from twisted.internet.defer import Deferred
from twisted.conch.interfaces import IKnownHostEntry
from twisted.conch.error import HostKeyChanged, UserRejectedKey, InvalidEntry
sampleEncodedKey = (
'AAAAB3NzaC1yc2EAAAABIwAAAQEAsV0VMRbGmzhqxxayLRHmvnFvtyNqgbNKV46dU1bVFB+3y'
'tNvue4Riqv/SVkPRNwMb7eWH29SviXaBxUhYyzKkDoNUq3rTNnH1Vnif6d6X4JCrUb5d3W+Dm'
'YClyJrZ5HgD/hUpdSkTRqdbQ2TrvSAxRacj+vHHT4F4dm1bJSewm3B2D8HVOoi/CbVh3dsIiC'
'dp8VltdZx4qYVfYe2LwVINCbAa3d3tj9ma7RVfw3OH2Mfb+toLd1N5tBQFb7oqTt2nC6I/6Bd'
'4JwPUld+IEitw/suElq/AIJVQXXujeyiZlea90HE65U2mF1ytr17HTAIT2ySokJWyuBANGACk'
'6iIaw==')
otherSampleEncodedKey = (
'AAAAB3NzaC1yc2EAAAABIwAAAIEAwaeCZd3UCuPXhX39+/p9qO028jTF76DMVd9mPvYVDVXuf'
'WckKZauF7+0b7qm+ChT7kan6BzRVo4++gCVNfAlMzLysSt3ylmOR48tFpAfygg9UCX3DjHz0E'
'lOOUKh3iifc9aUShD0OPaK3pR5JJ8jfiBfzSYWt/hDi/iZ4igsSs8=')
thirdSampleEncodedKey = (
'AAAAB3NzaC1yc2EAAAABIwAAAQEAl/TQakPkePlnwCBRPitIVUTg6Z8VzN1en+DGkyo/evkmLw'
'7o4NWR5qbysk9A9jXW332nxnEuAnbcCam9SHe1su1liVfyIK0+3bdn0YRB0sXIbNEtMs2LtCho'
'/aV3cXPS+Cf1yut3wvIpaRnAzXxuKPCTXQ7/y0IXa8TwkRBH58OJa3RqfQ/NsSp5SAfdsrHyH2'
'aitiVKm2jfbTKzSEqOQG/zq4J9GXTkq61gZugory/Tvl5/yPgSnOR6C9jVOMHf27ZPoRtyj9SY'
'343Hd2QHiIE0KPZJEgCynKeWoKz8v6eTSK8n4rBnaqWdp8MnGZK1WGy05MguXbyCDuTC8AmJXQ'
'==')
sampleKey = a2b_base64(sampleEncodedKey)
otherSampleKey = a2b_base64(otherSampleEncodedKey)
thirdSampleKey = a2b_base64(thirdSampleEncodedKey)
samplePlaintextLine = (
"www.twistedmatrix.com ssh-rsa " + sampleEncodedKey + "\n")
otherSamplePlaintextLine = (
"divmod.com ssh-rsa " + otherSampleEncodedKey + "\n")
sampleHostIPLine = (
"www.twistedmatrix.com,198.49.126.131 ssh-rsa " + sampleEncodedKey + "\n")
sampleHashedLine = (
"|1|gJbSEPBG9ZSBoZpHNtZBD1bHKBA=|bQv+0Xa0dByrwkA1EB0E7Xop/Fo= ssh-rsa " +
sampleEncodedKey + "\n")
class EntryTestsMixin:
"""
Tests for implementations of L{IKnownHostEntry}. Subclasses must set the
'entry' attribute to a provider of that interface, the implementation of
that interface under test.
@ivar entry: a provider of L{IKnownHostEntry} with a hostname of
www.twistedmatrix.com and an RSA key of sampleKey.
"""
def test_providesInterface(self):
"""
The given entry should provide IKnownHostEntry.
"""
verifyObject(IKnownHostEntry, self.entry)
def test_fromString(self):
"""
Constructing a plain text entry from an unhashed known_hosts entry will
result in an L{IKnownHostEntry} provider with 'keyString', 'hostname',
and 'keyType' attributes. While outside the interface in question,
these attributes are held in common by L{PlainEntry} and L{HashedEntry}
implementations; other implementations should override this method in
subclasses.
"""
entry = self.entry
self.assertEqual(entry.publicKey, Key.fromString(sampleKey))
self.assertEqual(entry.keyType, "ssh-rsa")
def test_matchesKey(self):
"""
L{IKnownHostEntry.matchesKey} checks to see if an entry matches a given
SSH key.
"""
twistedmatrixDotCom = Key.fromString(sampleKey)
divmodDotCom = Key.fromString(otherSampleKey)
self.assertEqual(
True,
self.entry.matchesKey(twistedmatrixDotCom))
self.assertEqual(
False,
self.entry.matchesKey(divmodDotCom))
def test_matchesHost(self):
"""
L{IKnownHostEntry.matchesHost} checks to see if an entry matches a
given hostname.
"""
self.assertEqual(True, self.entry.matchesHost(
"www.twistedmatrix.com"))
self.assertEqual(False, self.entry.matchesHost(
"www.divmod.com"))
class PlainEntryTests(EntryTestsMixin, TestCase):
"""
Test cases for L{PlainEntry}.
"""
plaintextLine = samplePlaintextLine
hostIPLine = sampleHostIPLine
def setUp(self):
"""
Set 'entry' to a sample plain-text entry with sampleKey as its key.
"""
self.entry = PlainEntry.fromString(self.plaintextLine)
def test_matchesHostIP(self):
"""
A "hostname,ip" formatted line will match both the host and the IP.
"""
self.entry = PlainEntry.fromString(self.hostIPLine)
self.assertEqual(True, self.entry.matchesHost("198.49.126.131"))
self.test_matchesHost()
def test_toString(self):
"""
L{PlainEntry.toString} generates the serialized OpenSSL format string
for the entry, sans newline.
"""
self.assertEqual(self.entry.toString(), self.plaintextLine.rstrip("\n"))
multiHostEntry = PlainEntry.fromString(self.hostIPLine)
self.assertEqual(multiHostEntry.toString(), self.hostIPLine.rstrip("\n"))
class PlainTextWithCommentTests(PlainEntryTests):
"""
Test cases for L{PlainEntry} when parsed from a line with a comment.
"""
plaintextLine = samplePlaintextLine[:-1] + " plain text comment.\n"
hostIPLine = sampleHostIPLine[:-1] + " text following host/IP line\n"
class HashedEntryTests(EntryTestsMixin, TestCase):
"""
Tests for L{HashedEntry}.
This suite doesn't include any tests for host/IP pairs because hashed
entries store IP addresses the same way as hostnames and does not support
comma-separated lists. (If you hash the IP and host together you can't
tell if you've got the key already for one or the other.)
"""
hashedLine = sampleHashedLine
def setUp(self):
"""
Set 'entry' to a sample hashed entry for twistedmatrix.com with
sampleKey as its key.
"""
self.entry = HashedEntry.fromString(self.hashedLine)
def test_toString(self):
"""
L{HashedEntry.toString} generates the serialized OpenSSL format string
for the entry, sans the newline.
"""
self.assertEqual(self.entry.toString(), self.hashedLine.rstrip("\n"))
class HashedEntryWithCommentTests(HashedEntryTests):
"""
Test cases for L{PlainEntry} when parsed from a line with a comment.
"""
hashedLine = sampleHashedLine[:-1] + " plain text comment.\n"
class UnparsedEntryTests(TestCase, EntryTestsMixin):
"""
Tests for L{UnparsedEntry}
"""
def setUp(self):
"""
Set up the 'entry' to be an unparsed entry for some random text.
"""
self.entry = UnparsedEntry(" This is a bogus entry. \n")
def test_fromString(self):
"""
Creating an L{UnparsedEntry} should simply record the string it was
passed.
"""
self.assertEqual(" This is a bogus entry. \n",
self.entry._string)
def test_matchesHost(self):
"""
An unparsed entry can't match any hosts.
"""
self.assertEqual(False, self.entry.matchesHost("www.twistedmatrix.com"))
def test_matchesKey(self):
"""
An unparsed entry can't match any keys.
"""
self.assertEqual(False, self.entry.matchesKey(Key.fromString(sampleKey)))
def test_toString(self):
"""
L{UnparsedEntry.toString} returns its input string, sans trailing newline.
"""
self.assertEqual(" This is a bogus entry. ", self.entry.toString())
class ParseErrorTests(TestCase):
"""
L{HashedEntry.fromString} and L{PlainEntry.fromString} can raise a variety
of errors depending on misformattings of certain strings. These tests make
sure those errors are caught. Since many of the ways that this can go
wrong are in the lower-level APIs being invoked by the parsing logic,
several of these are integration tests with the L{base64} and
L{twisted.conch.ssh.keys} modules.
"""
def invalidEntryTest(self, cls):
"""
If there are fewer than three elements, C{fromString} should raise
L{InvalidEntry}.
"""
self.assertRaises(InvalidEntry, cls.fromString, "invalid")
def notBase64Test(self, cls):
"""
If the key is not base64, C{fromString} should raise L{BinasciiError}.
"""
self.assertRaises(BinasciiError, cls.fromString, "x x x")
def badKeyTest(self, cls, prefix):
"""
If the key portion of the entry is valid base64, but is not actually an
SSH key, C{fromString} should raise L{BadKeyError}.
"""
self.assertRaises(BadKeyError, cls.fromString, ' '.join(
[prefix, "ssh-rsa", b2a_base64(
"Hey, this isn't an SSH key!").strip()]))
def test_invalidPlainEntry(self):
"""
If there are fewer than three whitespace-separated elements in an
entry, L{PlainEntry.fromString} should raise L{InvalidEntry}.
"""
self.invalidEntryTest(PlainEntry)
def test_invalidHashedEntry(self):
"""
If there are fewer than three whitespace-separated elements in an
entry, or the hostname salt/hash portion has more than two elements,
L{HashedEntry.fromString} should raise L{InvalidEntry}.
"""
self.invalidEntryTest(HashedEntry)
a, b, c = sampleHashedLine.split()
self.assertRaises(InvalidEntry, HashedEntry.fromString, ' '.join(
[a + "||", b, c]))
def test_plainNotBase64(self):
"""
If the key portion of a plain entry is not decodable as base64,
C{fromString} should raise L{BinasciiError}.
"""
self.notBase64Test(PlainEntry)
def test_hashedNotBase64(self):
"""
If the key, host salt, or host hash portion of a hashed entry is not
encoded, it will raise L{BinasciiError}.
"""
self.notBase64Test(HashedEntry)
a, b, c = sampleHashedLine.split()
# Salt not valid base64.
self.assertRaises(
BinasciiError, HashedEntry.fromString,
' '.join(["|1|x|" + b2a_base64("stuff").strip(), b, c]))
# Host hash not valid base64.
self.assertRaises(
BinasciiError, HashedEntry.fromString,
' '.join([HashedEntry.MAGIC + b2a_base64("stuff").strip() + "|x", b, c]))
# Neither salt nor hash valid base64.
self.assertRaises(
BinasciiError, HashedEntry.fromString,
' '.join(["|1|x|x", b, c]))
def test_hashedBadKey(self):
"""
If the key portion of the entry is valid base64, but is not actually an
SSH key, C{HashedEntry.fromString} should raise L{BadKeyError}.
"""
a, b, c = sampleHashedLine.split()
self.badKeyTest(HashedEntry, a)
def test_plainBadKey(self):
"""
If the key portion of the entry is valid base64, but is not actually an
SSH key, C{PlainEntry.fromString} should raise L{BadKeyError}.
"""
self.badKeyTest(PlainEntry, "hostname")
class KnownHostsDatabaseTests(TestCase):
"""
Tests for L{KnownHostsFile}.
"""
def pathWithContent(self, content):
"""
Return a FilePath with the given initial content.
"""
fp = FilePath(self.mktemp())
fp.setContent(content)
return fp
def loadSampleHostsFile(self, content=(
sampleHashedLine + otherSamplePlaintextLine +
"\n# That was a blank line.\n"
"This is just unparseable.\n"
"This also unparseable.\n")):
"""
Return a sample hosts file, with keys for www.twistedmatrix.com and
divmod.com present.
"""
return KnownHostsFile.fromPath(self.pathWithContent(content))
def test_loadFromPath(self):
"""
Loading a L{KnownHostsFile} from a path with six entries in it will
result in a L{KnownHostsFile} object with six L{IKnownHostEntry}
providers in it, each of the appropriate type.
"""
hostsFile = self.loadSampleHostsFile()
self.assertEqual(len(hostsFile._entries), 6)
self.assertIsInstance(hostsFile._entries[0], HashedEntry)
self.assertEqual(True, hostsFile._entries[0].matchesHost(
"www.twistedmatrix.com"))
self.assertIsInstance(hostsFile._entries[1], PlainEntry)
self.assertEqual(True, hostsFile._entries[1].matchesHost(
"divmod.com"))
self.assertIsInstance(hostsFile._entries[2], UnparsedEntry)
self.assertEqual(hostsFile._entries[2].toString(), "")
self.assertIsInstance(hostsFile._entries[3], UnparsedEntry)
self.assertEqual(hostsFile._entries[3].toString(),
"# That was a blank line.")
self.assertIsInstance(hostsFile._entries[4], UnparsedEntry)
self.assertEqual(hostsFile._entries[4].toString(),
"This is just unparseable.")
self.assertIsInstance(hostsFile._entries[5], UnparsedEntry)
self.assertEqual(hostsFile._entries[5].toString(),
"This also unparseable.")
def test_loadNonExistent(self):
"""
Loading a L{KnownHostsFile} from a path that does not exist should
result in an empty L{KnownHostsFile} that will save back to that path.
"""
pn = self.mktemp()
knownHostsFile = KnownHostsFile.fromPath(FilePath(pn))
self.assertEqual([], list(knownHostsFile._entries))
self.assertEqual(False, FilePath(pn).exists())
knownHostsFile.save()
self.assertEqual(True, FilePath(pn).exists())
def test_loadNonExistentParent(self):
"""
Loading a L{KnownHostsFile} from a path whose parent directory does not
exist should result in an empty L{KnownHostsFile} that will save back
to that path, creating its parent directory(ies) in the process.
"""
thePath = FilePath(self.mktemp())
knownHostsPath = thePath.child("foo").child("known_hosts")
knownHostsFile = KnownHostsFile.fromPath(knownHostsPath)
knownHostsFile.save()
knownHostsPath.restat(False)
self.assertEqual(True, knownHostsPath.exists())
def test_savingAddsEntry(self):
"""
L{KnownHostsFile.save()} will write out a new file with any entries
that have been added.
"""
path = self.pathWithContent(sampleHashedLine +
otherSamplePlaintextLine)
knownHostsFile = KnownHostsFile.fromPath(path)
newEntry = knownHostsFile.addHostKey("some.example.com", Key.fromString(thirdSampleKey))
expectedContent = (
sampleHashedLine +
otherSamplePlaintextLine + HashedEntry.MAGIC +
b2a_base64(newEntry._hostSalt).strip() + "|" +
b2a_base64(newEntry._hostHash).strip() + " ssh-rsa " +
thirdSampleEncodedKey + "\n")
# Sanity check, let's make sure the base64 API being used for the test
# isn't inserting spurious newlines.
self.assertEqual(3, expectedContent.count("\n"))
knownHostsFile.save()
self.assertEqual(expectedContent, path.getContent())
def test_hasPresentKey(self):
"""
L{KnownHostsFile.hasHostKey} returns C{True} when a key for the given
hostname is present and matches the expected key.
"""
hostsFile = self.loadSampleHostsFile()
self.assertEqual(True, hostsFile.hasHostKey(
"www.twistedmatrix.com", Key.fromString(sampleKey)))
def test_hasNonPresentKey(self):
"""
L{KnownHostsFile.hasHostKey} returns C{False} when a key for the given
hostname is not present.
"""
hostsFile = self.loadSampleHostsFile()
self.assertEqual(False, hostsFile.hasHostKey(
"non-existent.example.com", Key.fromString(sampleKey)))
def test_hasKeyMismatch(self):
"""
L{KnownHostsFile.hasHostKey} raises L{HostKeyChanged} if the host key
is present, but different from the expected one. The resulting
exception should have an offendingEntry indicating the given entry.
"""
hostsFile = self.loadSampleHostsFile()
exception = self.assertRaises(
HostKeyChanged, hostsFile.hasHostKey,
"www.twistedmatrix.com", Key.fromString(otherSampleKey))
self.assertEqual(exception.offendingEntry, hostsFile._entries[0])
self.assertEqual(exception.lineno, 1)
self.assertEqual(exception.path, hostsFile._savePath)
def test_addHostKey(self):
"""
L{KnownHostsFile.addHostKey} adds a new L{HashedEntry} to the host
file, and returns it.
"""
hostsFile = self.loadSampleHostsFile()
aKey = Key.fromString(thirdSampleKey)
self.assertEqual(False,
hostsFile.hasHostKey("somewhere.example.com", aKey))
newEntry = hostsFile.addHostKey("somewhere.example.com", aKey)
# The code in OpenSSH requires host salts to be 20 characters long.
# This is the required length of a SHA-1 HMAC hash, so it's just a
# sanity check.
self.assertEqual(20, len(newEntry._hostSalt))
self.assertEqual(True,
newEntry.matchesHost("somewhere.example.com"))
self.assertEqual(newEntry.keyType, "ssh-rsa")
self.assertEqual(aKey, newEntry.publicKey)
self.assertEqual(True,
hostsFile.hasHostKey("somewhere.example.com", aKey))
def test_randomSalts(self):
"""
L{KnownHostsFile.addHostKey} generates a random salt for each new key,
so subsequent salts will be different.
"""
hostsFile = self.loadSampleHostsFile()
aKey = Key.fromString(thirdSampleKey)
self.assertNotEqual(
hostsFile.addHostKey("somewhere.example.com", aKey)._hostSalt,
hostsFile.addHostKey("somewhere-else.example.com", aKey)._hostSalt)
def test_verifyValidKey(self):
"""
Verifying a valid key should return a L{Deferred} which fires with
True.
"""
hostsFile = self.loadSampleHostsFile()
hostsFile.addHostKey("1.2.3.4", Key.fromString(sampleKey))
ui = FakeUI()
d = hostsFile.verifyHostKey(ui, "www.twistedmatrix.com", "1.2.3.4",
Key.fromString(sampleKey))
l = []
d.addCallback(l.append)
self.assertEqual(l, [True])
def test_verifyInvalidKey(self):
"""
Verfying an invalid key should return a L{Deferred} which fires with a
L{HostKeyChanged} failure.
"""
hostsFile = self.loadSampleHostsFile()
wrongKey = Key.fromString(thirdSampleKey)
ui = FakeUI()
hostsFile.addHostKey("1.2.3.4", Key.fromString(sampleKey))
d = hostsFile.verifyHostKey(
ui, "www.twistedmatrix.com", "1.2.3.4", wrongKey)
return self.assertFailure(d, HostKeyChanged)
def verifyNonPresentKey(self):
"""
Set up a test to verify a key that isn't present. Return a 3-tuple of
the UI, a list set up to collect the result of the verifyHostKey call,
and the sample L{KnownHostsFile} being used.
This utility method avoids returning a L{Deferred}, and records results
in the returned list instead, because the events which get generated
here are pre-recorded in the 'ui' object. If the L{Deferred} in
question does not fire, the it will fail quickly with an empty list.
"""
hostsFile = self.loadSampleHostsFile()
absentKey = Key.fromString(thirdSampleKey)
ui = FakeUI()
l = []
d = hostsFile.verifyHostKey(
ui, "sample-host.example.com", "4.3.2.1", absentKey)
d.addBoth(l.append)
self.assertEqual([], l)
self.assertEqual(
ui.promptText,
"The authenticity of host 'sample-host.example.com (4.3.2.1)' "
"can't be established.\n"
"RSA key fingerprint is "
"89:4e:cc:8c:57:83:96:48:ef:63:ad:ee:99:00:4c:8f.\n"
"Are you sure you want to continue connecting (yes/no)? ")
return ui, l, hostsFile
def test_verifyNonPresentKey_Yes(self):
"""
Verifying a key where neither the hostname nor the IP are present
should result in the UI being prompted with a message explaining as
much. If the UI says yes, the Deferred should fire with True.
"""
ui, l, knownHostsFile = self.verifyNonPresentKey()
ui.promptDeferred.callback(True)
self.assertEqual([True], l)
reloaded = KnownHostsFile.fromPath(knownHostsFile._savePath)
self.assertEqual(
True,
reloaded.hasHostKey("4.3.2.1", Key.fromString(thirdSampleKey)))
self.assertEqual(
True,
reloaded.hasHostKey("sample-host.example.com",
Key.fromString(thirdSampleKey)))
def test_verifyNonPresentKey_No(self):
"""
Verifying a key where neither the hostname nor the IP are present
should result in the UI being prompted with a message explaining as
much. If the UI says no, the Deferred should fail with
UserRejectedKey.
"""
ui, l, knownHostsFile = self.verifyNonPresentKey()
ui.promptDeferred.callback(False)
l[0].trap(UserRejectedKey)
def test_verifyHostIPMismatch(self):
"""
Verifying a key where the host is present (and correct), but the IP is
present and different, should result the deferred firing in a
HostKeyChanged failure.
"""
hostsFile = self.loadSampleHostsFile()
wrongKey = Key.fromString(thirdSampleKey)
ui = FakeUI()
d = hostsFile.verifyHostKey(
ui, "www.twistedmatrix.com", "4.3.2.1", wrongKey)
return self.assertFailure(d, HostKeyChanged)
def test_verifyKeyForHostAndIP(self):
"""
Verifying a key where the hostname is present but the IP is not should
result in the key being added for the IP and the user being warned
about the change.
"""
ui = FakeUI()
hostsFile = self.loadSampleHostsFile()
expectedKey = Key.fromString(sampleKey)
hostsFile.verifyHostKey(
ui, "www.twistedmatrix.com", "5.4.3.2", expectedKey)
self.assertEqual(
True, KnownHostsFile.fromPath(hostsFile._savePath).hasHostKey(
"5.4.3.2", expectedKey))
self.assertEqual(
["Warning: Permanently added the RSA host key for IP address "
"'5.4.3.2' to the list of known hosts."],
ui.userWarnings)
class FakeFile(object):
"""
A fake file-like object that acts enough like a file for
L{ConsoleUI.prompt}.
"""
def __init__(self):
self.inlines = []
self.outchunks = []
self.closed = False
def readline(self):
"""
Return a line from the 'inlines' list.
"""
return self.inlines.pop(0)
def write(self, chunk):
"""
Append the given item to the 'outchunks' list.
"""
if self.closed:
raise IOError("the file was closed")
self.outchunks.append(chunk)
def close(self):
"""
Set the 'closed' flag to True, explicitly marking that it has been
closed.
"""
self.closed = True
class ConsoleUITests(TestCase):
"""
Test cases for L{ConsoleUI}.
"""
def setUp(self):
"""
Create a L{ConsoleUI} pointed at a L{FakeFile}.
"""
self.fakeFile = FakeFile()
self.ui = ConsoleUI(self.openFile)
def openFile(self):
"""
Return the current fake file.
"""
return self.fakeFile
def newFile(self, lines):
"""
Create a new fake file (the next file that self.ui will open) with the
given list of lines to be returned from readline().
"""
self.fakeFile = FakeFile()
self.fakeFile.inlines = lines
def test_promptYes(self):
"""
L{ConsoleUI.prompt} writes a message to the console, then reads a line.
If that line is 'yes', then it returns a L{Deferred} that fires with
True.
"""
for okYes in ['yes', 'Yes', 'yes\n']:
self.newFile([okYes])
l = []
self.ui.prompt("Hello, world!").addCallback(l.append)
self.assertEqual(["Hello, world!"], self.fakeFile.outchunks)
self.assertEqual([True], l)
self.assertEqual(True, self.fakeFile.closed)
def test_promptNo(self):
"""
L{ConsoleUI.prompt} writes a message to the console, then reads a line.
If that line is 'no', then it returns a L{Deferred} that fires with
False.
"""
for okNo in ['no', 'No', 'no\n']:
self.newFile([okNo])
l = []
self.ui.prompt("Goodbye, world!").addCallback(l.append)
self.assertEqual(["Goodbye, world!"], self.fakeFile.outchunks)
self.assertEqual([False], l)
self.assertEqual(True, self.fakeFile.closed)
def test_promptRepeatedly(self):
"""
L{ConsoleUI.prompt} writes a message to the console, then reads a line.
If that line is neither 'yes' nor 'no', then it says "Please enter
'yes' or 'no'" until it gets a 'yes' or a 'no', at which point it
returns a Deferred that answers either True or False.
"""
self.newFile(['what', 'uh', 'okay', 'yes'])
l = []
self.ui.prompt("Please say something useful.").addCallback(l.append)
self.assertEqual([True], l)
self.assertEqual(self.fakeFile.outchunks,
["Please say something useful."] +
["Please type 'yes' or 'no': "] * 3)
self.assertEqual(True, self.fakeFile.closed)
self.newFile(['blah', 'stuff', 'feh', 'no'])
l = []
self.ui.prompt("Please say something negative.").addCallback(l.append)
self.assertEqual([False], l)
self.assertEqual(self.fakeFile.outchunks,
["Please say something negative."] +
["Please type 'yes' or 'no': "] * 3)
self.assertEqual(True, self.fakeFile.closed)
def test_promptOpenFailed(self):
"""
If the C{opener} passed to L{ConsoleUI} raises an exception, that
exception will fail the L{Deferred} returned from L{ConsoleUI.prompt}.
"""
def raiseIt():
raise IOError()
ui = ConsoleUI(raiseIt)
d = ui.prompt("This is a test.")
return self.assertFailure(d, IOError)
def test_warn(self):
"""
L{ConsoleUI.warn} should output a message to the console object.
"""
self.ui.warn("Test message.")
self.assertEqual(["Test message."], self.fakeFile.outchunks)
self.assertEqual(True, self.fakeFile.closed)
def test_warnOpenFailed(self):
"""
L{ConsoleUI.warn} should log a traceback if the output can't be opened.
"""
def raiseIt():
1 / 0
ui = ConsoleUI(raiseIt)
ui.warn("This message never makes it.")
self.assertEqual(len(self.flushLoggedErrors(ZeroDivisionError)), 1)
class FakeUI(object):
"""
A fake UI object, adhering to the interface expected by
L{KnownHostsFile.verifyHostKey}
@ivar userWarnings: inputs provided to 'warn'.
@ivar promptDeferred: last result returned from 'prompt'.
@ivar promptText: the last input provided to 'prompt'.
"""
def __init__(self):
self.userWarnings = []
self.promptDeferred = None
self.promptText = None
def prompt(self, text):
"""
Issue the user an interactive prompt, which they can accept or deny.
"""
self.promptText = text
self.promptDeferred = Deferred()
return self.promptDeferred
def warn(self, text):
"""
Issue a non-interactive warning to the user.
"""
self.userWarnings.append(text)
class FakeObject(object):
"""
A fake object that can have some attributes. Used to fake
L{SSHClientTransport} and L{SSHClientFactory}.
"""
class DefaultAPITests(TestCase):
"""
The API in L{twisted.conch.client.default.verifyHostKey} is the integration
point between the code in the rest of conch and L{KnownHostsFile}.
"""
def patchedOpen(self, fname, mode):
"""
The patched version of 'open'; this returns a L{FakeFile} that the
instantiated L{ConsoleUI} can use.
"""
self.assertEqual(fname, "/dev/tty")
self.assertEqual(mode, "r+b")
return self.fakeFile
def setUp(self):
"""
Patch 'open' in verifyHostKey.
"""
self.fakeFile = FakeFile()
self.patch(default, "_open", self.patchedOpen)
self.hostsOption = self.mktemp()
knownHostsFile = KnownHostsFile(FilePath(self.hostsOption))
knownHostsFile.addHostKey("exists.example.com", Key.fromString(sampleKey))
knownHostsFile.addHostKey("4.3.2.1", Key.fromString(sampleKey))
knownHostsFile.save()
self.fakeTransport = FakeObject()
self.fakeTransport.factory = FakeObject()
self.options = self.fakeTransport.factory.options = {
'host': "exists.example.com",
'known-hosts': self.hostsOption
}
def test_verifyOKKey(self):
"""
L{default.verifyHostKey} should return a L{Deferred} which fires with
C{1} when passed a host, IP, and key which already match the
known_hosts file it is supposed to check.
"""
l = []
default.verifyHostKey(self.fakeTransport, "4.3.2.1", sampleKey,
"I don't care.").addCallback(l.append)
self.assertEqual([1], l)
def replaceHome(self, tempHome):
"""
Replace the HOME environment variable until the end of the current
test, with the given new home-directory, so that L{os.path.expanduser}
will yield controllable, predictable results.
@param tempHome: the pathname to replace the HOME variable with.
@type tempHome: L{str}
"""
oldHome = os.environ.get('HOME')
def cleanupHome():
if oldHome is None:
del os.environ['HOME']
else:
os.environ['HOME'] = oldHome
self.addCleanup(cleanupHome)
os.environ['HOME'] = tempHome
def test_noKnownHostsOption(self):
"""
L{default.verifyHostKey} should find your known_hosts file in
~/.ssh/known_hosts if you don't specify one explicitly on the command
line.
"""
l = []
tmpdir = self.mktemp()
oldHostsOption = self.hostsOption
hostsNonOption = FilePath(tmpdir).child(".ssh").child("known_hosts")
hostsNonOption.parent().makedirs()
FilePath(oldHostsOption).moveTo(hostsNonOption)
self.replaceHome(tmpdir)
self.options['known-hosts'] = None
default.verifyHostKey(self.fakeTransport, "4.3.2.1", sampleKey,
"I don't care.").addCallback(l.append)
self.assertEqual([1], l)
def test_verifyHostButNotIP(self):
"""
L{default.verifyHostKey} should return a L{Deferred} which fires with
C{1} when passed a host which matches with an IP is not present in its
known_hosts file, and should also warn the user that it has added the
IP address.
"""
l = []
default.verifyHostKey(self.fakeTransport, "8.7.6.5", sampleKey,
"Fingerprint not required.").addCallback(l.append)
self.assertEqual(
["Warning: Permanently added the RSA host key for IP address "
"'8.7.6.5' to the list of known hosts."],
self.fakeFile.outchunks)
self.assertEqual([1], l)
knownHostsFile = KnownHostsFile.fromPath(FilePath(self.hostsOption))
self.assertEqual(True, knownHostsFile.hasHostKey("8.7.6.5",
Key.fromString(sampleKey)))
def test_verifyQuestion(self):
"""
L{default.verifyHostKey} should return a L{Default} which fires with
C{0} when passed a unknown host that the user refuses to acknowledge.
"""
self.fakeTransport.factory.options['host'] = 'fake.example.com'
self.fakeFile.inlines.append("no")
d = default.verifyHostKey(
self.fakeTransport, "9.8.7.6", otherSampleKey, "No fingerprint!")
self.assertEqual(
["The authenticity of host 'fake.example.com (9.8.7.6)' "
"can't be established.\n"
"RSA key fingerprint is "
"57:a1:c2:a1:07:a0:2b:f4:ce:b5:e5:b7:ae:cc:e1:99.\n"
"Are you sure you want to continue connecting (yes/no)? "],
self.fakeFile.outchunks)
return self.assertFailure(d, UserRejectedKey)
def test_verifyBadKey(self):
"""
L{default.verifyHostKey} should return a L{Deferred} which fails with
L{HostKeyChanged} if the host key is incorrect.
"""
d = default.verifyHostKey(
self.fakeTransport, "4.3.2.1", otherSampleKey,
"Again, not required.")
return self.assertFailure(d, HostKeyChanged)
| mit |
momennaas/kalam-lp | constants.py | 1 | 15769 | # -*- encoding: utf-8 -*-
##############################################################
## Author: Abdulmumen Naas
## Description: Arabic Natural Language Processor (Kalam-lp)
## Version: 0.0.1
## Copyright (c) 2014 Abdulmumen Naas
#############################################################
from alefba import *
#Constantns
#DEMOSTRATIVES
DEM = (ARABIC_LETTER_THAL+ARABIC_LETTER_ALEF,
ARABIC_LETTER_HEH+ARABIC_LETTER_THAL+ARABIC_LETTER_ALEF,
ARABIC_LETTER_HEH+ARABIC_LETTER_THAL+ARABIC_LETTER_HEH,
ARABIC_LETTER_HEH+ARABIC_LETTER_THAL+ARABIC_LETTER_ALEF+ARABIC_LETTER_NOON,
ARABIC_LETTER_HEH+ARABIC_LETTER_THAL+ARABIC_LETTER_YEH+ARABIC_LETTER_NOON,
ARABIC_LETTER_HEH+ARABIC_LETTER_ALEF+ARABIC_LETTER_TEH+ARABIC_LETTER_ALEF+ARABIC_LETTER_NOON,
ARABIC_LETTER_HEH+ARABIC_LETTER_ALEF+ARABIC_LETTER_TEH+ARABIC_LETTER_YEH+ARABIC_LETTER_NOON,
ARABIC_LETTER_HEH+ARABIC_LETTER_WAW_WITH_HAMZA_ABOVE+ARABIC_LETTER_LAM+ARABIC_LETTER_ALEF+ARABIC_LETTER_HAMZA,
)
DEMLOC = (ARABIC_LETTER_HEH+ARABIC_LETTER_NOON+ARABIC_LETTER_ALEF,
ARABIC_LETTER_HEH+ARABIC_LETTER_NOON+ARABIC_LETTER_ALEF+ARABIC_LETTER_KAF,
ARABIC_LETTER_HEH+ARABIC_LETTER_NOON+ARABIC_LETTER_ALEF+ARABIC_LETTER_LAM+ARABIC_LETTER_KAF)
#Relatives nouns
REL = (ARABIC_LETTER_ALEF+ARABIC_LETTER_LAM+ARABIC_LETTER_THAL+ARABIC_LETTER_YEH,
ARABIC_LETTER_ALEF+ARABIC_LETTER_LAM+ARABIC_LETTER_TEH+ARABIC_LETTER_YEH,
ARABIC_LETTER_ALEF+ARABIC_LETTER_LAM+ARABIC_LETTER_THAL+ARABIC_LETTER_ALEF+ARABIC_LETTER_NOON,
ARABIC_LETTER_ALEF+ARABIC_LETTER_LAM+ARABIC_LETTER_LAM+ARABIC_LETTER_TEH+ARABIC_LETTER_ALEF+ARABIC_LETTER_NOON,
ARABIC_LETTER_ALEF+ARABIC_LETTER_LAM+ARABIC_LETTER_LAM+ARABIC_LETTER_TEH+ARABIC_LETTER_YEH+ARABIC_LETTER_NOON,
ARABIC_LETTER_ALEF+ARABIC_LETTER_LAM+ARABIC_LETTER_LAM+ARABIC_LETTER_THAL+ARABIC_LETTER_ALEF+ARABIC_LETTER_NOON,
ARABIC_LETTER_ALEF+ARABIC_LETTER_LAM+ARABIC_LETTER_LAM+ARABIC_LETTER_ALEF+ARABIC_LETTER_TEH+ARABIC_LETTER_YEH,
ARABIC_LETTER_ALEF+ARABIC_LETTER_LAM+ARABIC_LETTER_LAM+ARABIC_LETTER_ALEF+ARABIC_LETTER_YEH_WITH_HAMZA_ABOVE+ARABIC_LETTER_YEH,)
#Personal Pronouns
PPRON1 = (ARABIC_LETTER_ALEF+ARABIC_LETTER_NOON+ARABIC_LETTER_ALEF,
ARABIC_LETTER_ALEF_WITH_HAMZA_ABOVE+ARABIC_LETTER_NOON+ARABIC_LETTER_ALEF,
ARABIC_LETTER_NOON+ARABIC_LETTER_HAH+ARABIC_LETTER_NOON)
PPRON2 = (ARABIC_LETTER_ALEF_WITH_HAMZA_ABOVE+ARABIC_LETTER_NOON+ARABIC_LETTER_TEH,
ARABIC_LETTER_ALEF+ARABIC_LETTER_NOON+ARABIC_LETTER_TEH,
ARABIC_LETTER_ALEF_WITH_HAMZA_ABOVE+ARABIC_LETTER_NOON+ARABIC_LETTER_TEH+ARABIC_LETTER_MEEM+ARABIC_LETTER_ALEF,
ARABIC_LETTER_ALEF+ARABIC_LETTER_NOON+ARABIC_LETTER_TEH+ARABIC_LETTER_MEEM+ARABIC_LETTER_ALEF,
ARABIC_LETTER_ALEF_WITH_HAMZA_ABOVE+ARABIC_LETTER_NOON+ARABIC_LETTER_TEH+ARABIC_LETTER_MEEM,
ARABIC_LETTER_ALEF+ARABIC_LETTER_NOON+ARABIC_LETTER_TEH+ARABIC_LETTER_MEEM,)
PPRON3 = (ARABIC_LETTER_HEH+ARABIC_LETTER_WAW,
ARABIC_LETTER_HEH+ARABIC_LETTER_YEH,
ARABIC_LETTER_HEH+ARABIC_LETTER_MEEM+ARABIC_LETTER_ALEF,
ARABIC_LETTER_HEH+ARABIC_LETTER_MEEM,
ARABIC_LETTER_HEH+ARABIC_LETTER_NOON,)
#Possessive Pronouns
POSPRON = (ARABIC_LETTER_YEH,
ARABIC_LETTER_KAF,
ARABIC_LETTER_HEH,
ARABIC_LETTER_HEH+ARABIC_LETTER_ALEF,
ARABIC_LETTER_NOON+ARABIC_LETTER_ALEF,
ARABIC_LETTER_KAF+ARABIC_LETTER_MEEM+ARABIC_LETTER_ALEF,
ARABIC_LETTER_HEH+ARABIC_LETTER_MEEM+ARABIC_LETTER_ALEF,
ARABIC_LETTER_KAF+ARABIC_LETTER_MEEM,
ARABIC_LETTER_KAF+ARABIC_LETTER_NOON,
ARABIC_LETTER_HEH+ARABIC_LETTER_NOON,
ARABIC_LETTER_HEH+ARABIC_LETTER_MEEM)
POSPRON1 = (ARABIC_LETTER_YEH,
ARABIC_LETTER_NOON+ARABIC_LETTER_ALEF)
POSPRON2 = (ARABIC_LETTER_KAF+ARABIC_LETTER_MEEM+ARABIC_LETTER_ALEF,
ARABIC_LETTER_KAF+ARABIC_LETTER_MEEM+ARABIC_LETTER_ALEF,
ARABIC_LETTER_KAF+ARABIC_LETTER_MEEM,
ARABIC_LETTER_KAF+ARABIC_LETTER_NOON,)
POSPRON3 = (ARABIC_LETTER_HEH,
ARABIC_LETTER_HEH+ARABIC_LETTER_ALEF,
ARABIC_LETTER_HEH+ARABIC_LETTER_MEEM+ARABIC_LETTER_ALEF,
ARABIC_LETTER_HEH+ARABIC_LETTER_NOON,
ARABIC_LETTER_HEH+ARABIC_LETTER_MEEM)
CE_SF = (ARABIC_LETTER_TEH_MARBUTA,)
CE_DM = (ARABIC_LETTER_ALEF+ARABIC_LETTER_NOON,)
CE_DF = (ARABIC_LETTER_TEH+ARABIC_LETTER_ALEF+ARABIC_LETTER_NOON,)
CE_PM = (ARABIC_LETTER_YEH+ARABIC_LETTER_NOON,)
CE_PF = (ARABIC_LETTER_ALEF+ARABIC_LETTER_TEH,)
#Prepositions
PREP = (ARABIC_PREP_MEN,
ARABIC_PREP_ELA,
ARABIC_PREP_HATTA,
ARABIC_PREP_KHALA,
ARABIC_PREP_HASHA,
ARABIC_PREP_ADA,
ARABIC_PREP_FE,
ARABIC_PREP_AN,
ARABIC_PREP_ALA,
ARABIC_PREP_MUTH,
ARABIC_PREP_MUNTHO,
ARABIC_PREP_KAY,
ARABIC_PREP_WAW,
ARABIC_PREP_TA,
ARABIC_PREP_KAF,
ARABIC_PREP_BA,
ARABIC_PREP_LALLA,
ARABIC_PREP_MATA)
PVSOLO = (ARABIC_LETTER_ALEF_WITH_HAMZA_ABOVE,
ARABIC_LETTER_ALEF,
ARABIC_LETTER_TEH,
ARABIC_LETTER_NOON,
ARABIC_LETTER_YEH,)
#Kana and sisters
KANA = (ARABIC_LETTER_KAF+ARABIC_LETTER_ALEF+ARABIC_LETTER_NOON,
ARABIC_LETTER_LAM+ARABIC_LETTER_YEH+ARABIC_LETTER_SEEN,
ARABIC_LETTER_SAD+ARABIC_LETTER_ALEF+ARABIC_LETTER_REH,
ARABIC_LETTER_ALEF_WITH_HAMZA_ABOVE+ARABIC_LETTER_SAD+ARABIC_LETTER_BEH+ARABIC_LETTER_HAH,
ARABIC_LETTER_ALEF+ARABIC_LETTER_SAD+ARABIC_LETTER_BEH+ARABIC_LETTER_HAH,
ARABIC_LETTER_ALEF_WITH_HAMZA_ABOVE+ARABIC_LETTER_DAD+ARABIC_LETTER_HAH+ARABIC_LETTER_ALEF_MAKSURA,
ARABIC_LETTER_ALEF+ARABIC_LETTER_DAD+ARABIC_LETTER_HEH+ARABIC_LETTER_ALEF_MAKSURA,
ARABIC_LETTER_ALEF_WITH_HAMZA_ABOVE+ARABIC_LETTER_MEEM+ARABIC_LETTER_SEEN+ARABIC_LETTER_ALEF_MAKSURA,
ARABIC_LETTER_ALEF+ARABIC_LETTER_MEEM+ARABIC_LETTER_SEEN+ARABIC_LETTER_ALEF_MAKSURA,
ARABIC_LETTER_ZAH+ARABIC_LETTER_LAM,
ARABIC_LETTER_BEH+ARABIC_LETTER_ALEF+ARABIC_LETTER_TEH,)
#Conjunctions
CONJ = (ARABIC_LETTER_WAW,
ARABIC_LETTER_THEH+ARABIC_LETTER_MEEM,
ARABIC_LETTER_ALEF+ARABIC_LETTER_WAW,
ARABIC_LETTER_ALEF_WITH_HAMZA_ABOVE+ARABIC_LETTER_WAW,
ARABIC_LETTER_FEH)
#Accusative(INNA)
ACC = (ARABIC_LETTER_ALEF_WITH_HAMZA_BELOW+ARABIC_LETTER_NOON,
ARABIC_LETTER_ALEF_WITH_HAMZA_ABOVE+ARABIC_LETTER_NOON,
ARABIC_LETTER_ALEF+ARABIC_LETTER_NOON,
ARABIC_PREP_LALLA,
ARABIC_LETTER_LAM+ARABIC_LETTER_KAF+ARABIC_LETTER_NOON,
ARABIC_LETTER_KAF+ARABIC_LETTER_ALEF_WITH_HAMZA_ABOVE+ARABIC_LETTER_NOON,
ARABIC_LETTER_LAM+ARABIC_LETTER_YEH+ARABIC_LETTER_TEH)
#Expceptions
EXP = (ARABIC_LETTER_ALEF_WITH_HAMZA_BELOW+ARABIC_LETTER_LAM+ARABIC_LETTER_ALEF,
ARABIC_LETTER_ALEF+ARABIC_LETTER_LAM+ARABIC_LETTER_ALEF,
ARABIC_LETTER_GHAIN,ARABIC_LETTER_YEH+ARABIC_LETTER_REH,
ARABIC_PREP_KHALA,
ARABIC_PREP_ADA,
ARABIC_PREP_HASHA,
ARABIC_LETTER_LAM+ARABIC_LETTER_YEH+ARABIC_LETTER_SEEN,)
#Interogative
INTG = ()
#Negative
NEG = (
ARABIC_LETTER_LAM+ARABIC_LETTER_MEEM,
ARABIC_LETTER_MEEM+ARABIC_LETTER_ALEF,)
#Conditional
COND = (ARABIC_LETTER_LAM+ARABIC_LETTER_MEEM+ARABIC_LETTER_ALEF,
ARABIC_LETTER_ALEF_WITH_HAMZA_BELOW+ARABIC_LETTER_NOON,
ARABIC_LETTER_ALEF+ARABIC_LETTER_NOON,
ARABIC_LETTER_MEEM+ARABIC_LETTER_NOON,
ARABIC_LETTER_MEEM+ARABIC_LETTER_HEH+ARABIC_LETTER_MEEM+ARABIC_LETTER_ALEF,
ARABIC_PREP_MATA,
ARABIC_LETTER_ALEF_WITH_HAMZA_ABOVE+ARABIC_LETTER_YEH+ARABIC_LETTER_NOON,
ARABIC_LETTER_ALEF+ARABIC_LETTER_YEH+ARABIC_LETTER_NOON,
ARABIC_LETTER_KAF+ARABIC_LETTER_YEH+ARABIC_LETTER_FEH,
ARABIC_LETTER_HAH+ARABIC_LETTER_YEH+ARABIC_LETTER_THEH+ARABIC_LETTER_MEEM+ARABIC_LETTER_ALEF,
ARABIC_LETTER_ALEF_WITH_HAMZA_BELOW+ARABIC_LETTER_THAL+ARABIC_LETTER_MEEM+ARABIC_LETTER_ALEF,
ARABIC_LETTER_ALEF+ARABIC_LETTER_THAL+ARABIC_LETTER_MEEM+ARABIC_LETTER_ALEF,
ARABIC_LETTER_ALEF+ARABIC_LETTER_THAL+ARABIC_LETTER_ALEF,
ARABIC_LETTER_ALEF_WITH_HAMZA_BELOW+ARABIC_LETTER_THAL+ARABIC_LETTER_ALEF,
ARABIC_LETTER_ALEF+ARABIC_LETTER_THAL,
ARABIC_LETTER_ALEF_WITH_HAMZA_BELOW+ARABIC_LETTER_THAL,
ARABIC_LETTER_ALEF_WITH_HAMZA_ABOVE+ARABIC_LETTER_YEH+ARABIC_LETTER_ALEF+ARABIC_LETTER_NOON,
ARABIC_LETTER_ALEF+ARABIC_LETTER_YEH+ARABIC_LETTER_ALEF+ARABIC_LETTER_NOON,
ARABIC_LETTER_ALEF+ARABIC_LETTER_YEH+ARABIC_LETTER_YEH+ARABIC_LETTER_NOON,
ARABIC_LETTER_ALEF_WITH_HAMZA_ABOVE+ARABIC_LETTER_YEH+ARABIC_LETTER_YEH+ARABIC_LETTER_NOON,
ARABIC_LETTER_ALEF+ARABIC_LETTER_YEH+ARABIC_LETTER_YEH,
ARABIC_LETTER_ALEF_WITH_HAMZA_ABOVE+ARABIC_LETTER_YEH+ARABIC_LETTER_YEH
)
#Vocals particles
VOC = (ARABIC_LETTER_ALEF_WITH_MADDA_ABOVE,
ARABIC_LETTER_YEH+ARABIC_LETTER_ALEF)
TENN = (ARABIC_LETTER_ALEF+ARABIC_LETTER_SEEN+ARABIC_LETTER_MEEM,
ARABIC_LETTER_ALEF+ARABIC_LETTER_SEEN+ARABIC_LETTER_TEH,
ARABIC_LETTER_ALEF+ARABIC_LETTER_BEH+ARABIC_LETTER_NOON,
ARABIC_LETTER_ALEF+ARABIC_LETTER_BEH+ARABIC_LETTER_NOON+ARABIC_LETTER_TEH_MARBUTA,
ARABIC_LETTER_ALEF+ARABIC_LETTER_BEH+ARABIC_LETTER_NOON+ARABIC_LETTER_MEEM,
ARABIC_LETTER_ALEF+ARABIC_LETTER_MEEM+ARABIC_LETTER_WAW_WITH_HAMZA_ABOVE,
ARABIC_LETTER_ALEF+ARABIC_LETTER_MEEM+ARABIC_LETTER_ALEF_WITH_HAMZA_ABOVE+ARABIC_LETTER_TEH_MARBUTA,
ARABIC_LETTER_ALEF+ARABIC_LETTER_THEH+ARABIC_LETTER_NOON+ARABIC_LETTER_ALEF+ARABIC_LETTER_NOON,
ARABIC_LETTER_ALEF+ARABIC_LETTER_NOON+ARABIC_LETTER_TEH+ARABIC_LETTER_TEH+ARABIC_LETTER_ALEF+ARABIC_LETTER_NOON,
ARABIC_LETTER_ALEF+ARABIC_LETTER_YEH+ARABIC_LETTER_MEEM+ARABIC_LETTER_NOON+" "+ARABIC_WORD_ALLAH)
DEM_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in DEM]
DEMLOC_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in DEM]
REL_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in REL]
PPRON1_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in PPRON1]
PPRON2_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in PPRON2]
PPRON3_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in PPRON3]
POSPRON_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in POSPRON]
PREP_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in PREP]
#PSOLO_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in PSOLO]
PVSOLO_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in PVSOLO]
CONJ_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in CONJ]
ACC_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in ACC]
NEG_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in NEG]
COND_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in COND]
EXP_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in EXP]
VOC_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in VOC]
TENN_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in TENN]
KANA_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in KANA]
#Noun-specific Prefixes
#CONJ+P+AL
PRFX_CONJPAL = (
ARABIC_LETTER_WAW+ARABIC_LETTER_BEH+ARABIC_LETTER_ALEF+ARABIC_LETTER_LAM,
ARABIC_LETTER_WAW+ARABIC_LETTER_KAF+ARABIC_LETTER_ALEF+ARABIC_LETTER_LAM,
ARABIC_LETTER_WAW+ARABIC_LETTER_LAM+ARABIC_LETTER_LAM,
ARABIC_LETTER_FEH+ARABIC_LETTER_BEH+ARABIC_LETTER_ALEF+ARABIC_LETTER_LAM,
ARABIC_LETTER_FEH+ARABIC_LETTER_KAF+ARABIC_LETTER_ALEF+ARABIC_LETTER_LAM,
ARABIC_LETTER_FEH+ARABIC_LETTER_LAM+ARABIC_LETTER_LAM,)
#CONJ+AL
PRFX_CONJAL = (
ARABIC_LETTER_WAW+ARABIC_LETTER_ALEF+ARABIC_LETTER_LAM,
ARABIC_LETTER_FEH+ARABIC_LETTER_ALEF+ARABIC_LETTER_LAM,
)
#P+AL
PRFX_PAL = (ARABIC_LETTER_BEH+ARABIC_LETTER_ALEF+ARABIC_LETTER_LAM,
ARABIC_LETTER_KAF+ARABIC_LETTER_ALEF+ARABIC_LETTER_LAM,
ARABIC_LETTER_LAM+ARABIC_LETTER_LAM,)
#CONJ+P
PRFX_CONJP = (ARABIC_LETTER_WAW+ARABIC_LETTER_BEH,
ARABIC_LETTER_WAW+ARABIC_LETTER_KAF,
ARABIC_LETTER_WAW+ARABIC_LETTER_LAM,
ARABIC_LETTER_WAW+ARABIC_LETTER_FEH,
ARABIC_LETTER_FEH+ARABIC_LETTER_BEH,
ARABIC_LETTER_FEH+ARABIC_LETTER_KAF,
ARABIC_LETTER_FEH+ARABIC_LETTER_LAM,
ARABIC_LETTER_FEH+ARABIC_LETTER_FEH,)
#AL
PRFX_AL = (ARABIC_LETTER_ALEF+ARABIC_LETTER_LAM,)
#CONJ
PRFX_CONJ = (ARABIC_LETTER_WAW,ARABIC_LETTER_FEH,)
#P
PRFX_P = (ARABIC_LETTER_BEH,
ARABIC_LETTER_KAF,
ARABIC_LETTER_LAM,)
##Verb specific Prefix
#PV+SEEN+CONJ
PRFX_CONJSPV = (ARABIC_LETTER_WAW+ARABIC_LETTER_SEEN+ARABIC_LETTER_YEH,
ARABIC_LETTER_WAW+ARABIC_LETTER_SEEN+ARABIC_LETTER_NOON,
ARABIC_LETTER_WAW+ARABIC_LETTER_SEEN+ARABIC_LETTER_ALEF_WITH_HAMZA_ABOVE,
ARABIC_LETTER_WAW+ARABIC_LETTER_SEEN+ARABIC_LETTER_TEH,
ARABIC_LETTER_FEH+ARABIC_LETTER_SEEN+ARABIC_LETTER_YEH,
ARABIC_LETTER_FEH+ARABIC_LETTER_SEEN+ARABIC_LETTER_NOON,
ARABIC_LETTER_FEH+ARABIC_LETTER_SEEN+ARABIC_LETTER_ALEF_WITH_HAMZA_ABOVE,
ARABIC_LETTER_FEH+ARABIC_LETTER_SEEN+ARABIC_LETTER_TEH)
#PV+CONJ
PRFX_CONJPV = (ARABIC_LETTER_WAW+ARABIC_LETTER_ALEF_WITH_HAMZA_ABOVE,
ARABIC_LETTER_WAW+ARABIC_LETTER_TEH,
ARABIC_LETTER_WAW+ARABIC_LETTER_NOON,
ARABIC_LETTER_WAW+ARABIC_LETTER_YEH,
ARABIC_LETTER_FEH+ARABIC_LETTER_ALEF,
ARABIC_LETTER_FEH+ARABIC_LETTER_TEH,
ARABIC_LETTER_FEH+ARABIC_LETTER_NOON,
ARABIC_LETTER_FEH+ARABIC_LETTER_YEH,)
#PV+SEEN
PRFX_SEENPV = (ARABIC_LETTER_SEEN+ARABIC_LETTER_NOON,
ARABIC_LETTER_SEEN+ARABIC_LETTER_YEH,
ARABIC_LETTER_SEEN+ARABIC_LETTER_TEH,
ARABIC_LETTER_SEEN+ARABIC_LETTER_ALEF_WITH_HAMZA_ABOVE,)
#SEEN
PRFX_SEEN = (ARABIC_LETTER_SEEN,)
#PV
PRFX_PV = (ARABIC_LETTER_TEH,
ARABIC_LETTER_YEH,
ARABIC_LETTER_NOON,
ARABIC_LETTER_ALEF_WITH_HAMZA_ABOVE,)
#SAWFA
PRFX_SAWFA = (ARABIC_LETTER_SEEN+ARABIC_LETTER_WAW+ARABIC_LETTER_FEH,)
#------------Prefixes Regex----------------------
###Noun-Specific###
CONJPAL_REGX = [(len(prefix), re.compile(u"%s" % prefix, re.UNICODE)) for prefix in PRFX_CONJPAL]
CONJAL_REGX = [(len(prefix), re.compile(u"%s" % prefix, re.UNICODE)) for prefix in PRFX_CONJAL]
AL_REGX = [(len(prefix), re.compile(u"%s" % prefix, re.UNICODE)) for prefix in PRFX_AL]
CONJP_REGX = [(len(prefix), re.compile(u"%s" % prefix, re.UNICODE)) for prefix in PRFX_CONJP]
PAL_REGX = [(len(prefix), re.compile(u"%s" % prefix, re.UNICODE)) for prefix in PRFX_PAL]
CONJ_REGX = [(len(prefix), re.compile(u"%s" % prefix, re.UNICODE)) for prefix in PRFX_CONJ]
P_REGX = [(len(prefix), re.compile(u"%s" % prefix, re.UNICODE)) for prefix in PRFX_P]
###Verb-Specific###
CONJSPV_REGX = [(len(prefix), re.compile(u"%s" % prefix, re.UNICODE)) for prefix in PRFX_CONJSPV]
CONJPV_REGX = [(len(prefix), re.compile(u"%s" % prefix, re.UNICODE)) for prefix in PRFX_CONJPV]
SEENPV_REGX = [(len(prefix), re.compile(u"%s" % prefix, re.UNICODE)) for prefix in PRFX_SEENPV]
PV_REGX = [(len(prefix), re.compile(u"%s" % prefix, re.UNICODE)) for prefix in PRFX_PV]
SEEN_REGX = [(len(prefix), re.compile(u"%s" % prefix, re.UNICODE)) for prefix in PRFX_SEEN]
SAWFA_REGX = [(len(prefix), re.compile(u"%s" % prefix, re.UNICODE)) for prefix in PRFX_SAWFA]
#------------Suffixes Regex--------------------------
###Possessives Pronouns###
POSPRON1_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in POSPRON1]
POSPRON2_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in POSPRON2]
POSPRON3_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in POSPRON3]
###Case Ending###
CE_SF_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in CE_SF]
CE_DM_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in CE_DM]
CE_DF_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in CE_DF]
CE_PM_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in CE_PM]
CE_PF_REGX = [(len(p), re.compile(u"%s" % p, re.UNICODE)) for p in CE_PF]
| mit |
srinathv/vispy | examples/tutorial/gloo/lighted_cube.py | 18 | 5014 | # -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) 2015, Vispy Development Team. All Rights Reserved.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
# -----------------------------------------------------------------------------
# Author: Nicolas P .Rougier
# Date: 04/03/2014
# -----------------------------------------------------------------------------
import numpy as np
from vispy import gloo, app
from vispy.gloo import Program, VertexBuffer, IndexBuffer
from vispy.util.transforms import perspective, translate, rotate
from vispy.geometry import create_cube
vertex = """
uniform mat4 u_model;
uniform mat4 u_view;
uniform mat4 u_projection;
uniform vec4 u_color;
attribute vec3 position;
attribute vec2 texcoord;
attribute vec3 normal;
attribute vec4 color;
varying vec3 v_position;
varying vec3 v_normal;
varying vec4 v_color;
void main()
{
v_normal = normal;
v_position = position;
v_color = color * u_color;
gl_Position = u_projection * u_view * u_model * vec4(position,1.0);
}
"""
fragment = """
uniform mat4 u_model;
uniform mat4 u_view;
uniform mat4 u_normal;
uniform vec3 u_light_intensity;
uniform vec3 u_light_position;
varying vec3 v_position;
varying vec3 v_normal;
varying vec4 v_color;
void main()
{
// Calculate normal in world coordinates
vec3 normal = normalize(u_normal * vec4(v_normal,1.0)).xyz;
// Calculate the location of this fragment (pixel) in world coordinates
vec3 position = vec3(u_view*u_model * vec4(v_position, 1));
// Calculate the vector from this pixels surface to the light source
vec3 surfaceToLight = u_light_position - position;
// Calculate the cosine of the angle of incidence (brightness)
float brightness = dot(normal, surfaceToLight) /
(length(surfaceToLight) * length(normal));
brightness = max(min(brightness,1.0),0.0);
// Calculate final color of the pixel, based on:
// 1. The angle of incidence: brightness
// 2. The color/intensities of the light: light.intensities
// 3. The texture and texture coord: texture(tex, fragTexCoord)
gl_FragColor = v_color * brightness * vec4(u_light_intensity, 1);
}
"""
class Canvas(app.Canvas):
def __init__(self):
app.Canvas.__init__(self, size=(512, 512), title='Lighted cube',
keys='interactive')
self.timer = app.Timer('auto', self.on_timer)
# Build cube data
V, F, O = create_cube()
vertices = VertexBuffer(V)
self.faces = IndexBuffer(F)
self.outline = IndexBuffer(O)
# Build view, model, projection & normal
# --------------------------------------
self.view = translate((0, 0, -5))
model = np.eye(4, dtype=np.float32)
normal = np.array(np.matrix(np.dot(self.view, model)).I.T)
# Build program
# --------------------------------------
self.program = Program(vertex, fragment)
self.program.bind(vertices)
self.program["u_light_position"] = 2, 2, 2
self.program["u_light_intensity"] = 1, 1, 1
self.program["u_model"] = model
self.program["u_view"] = self.view
self.program["u_normal"] = normal
self.phi, self.theta = 0, 0
self.activate_zoom()
# OpenGL initialization
# --------------------------------------
gloo.set_state(clear_color=(0.30, 0.30, 0.35, 1.00), depth_test=True,
polygon_offset=(1, 1),
blend_func=('src_alpha', 'one_minus_src_alpha'),
line_width=0.75)
self.timer.start()
self.show()
def on_draw(self, event):
gloo.clear(color=True, depth=True)
# program.draw(gl.GL_TRIANGLES, indices)
# Filled cube
gloo.set_state(blend=False, depth_test=True, polygon_offset_fill=True)
self.program['u_color'] = 1, 1, 1, 1
self.program.draw('triangles', self.faces)
# Outlined cube
gloo.set_state(polygon_offset_fill=False, blend=True, depth_mask=False)
self.program['u_color'] = 0, 0, 0, 1
self.program.draw('lines', self.outline)
gloo.set_state(depth_mask=True)
def on_resize(self, event):
self.activate_zoom()
def activate_zoom(self):
gloo.set_viewport(0, 0, *self.physical_size)
projection = perspective(45.0, self.size[0] / float(self.size[1]),
2.0, 10.0)
self.program['u_projection'] = projection
def on_timer(self, event):
self.theta += .5
self.phi += .5
model = np.dot(rotate(self.theta, (0, 0, 1)),
rotate(self.phi, (0, 1, 0)))
normal = np.linalg.inv(np.dot(self.view, model)).T
self.program['u_model'] = model
self.program['u_normal'] = normal
self.update()
if __name__ == '__main__':
c = Canvas()
app.run()
| bsd-3-clause |
40423217/2016fallcadp_hw | plugin/liquid_tags/test_audio.py | 273 | 1456 | from . import audio
import pytest
import re
@pytest.mark.parametrize('input,expected', [
('http://foo.bar https://bar.foo',
('http://foo.bar', 'https://bar.foo', None)),
('http://test.foo',
('http://test.foo', None, None)),
('https://test.foo',
('https://test.foo', None, None)),
('http://foo.foo https://bar.bar http://zonk.zonk',
('http://foo.foo', 'https://bar.bar', 'http://zonk.zonk'))
])
def test_regex(input, expected):
assert re.match(audio.AUDIO, input).groups() == expected
@pytest.mark.parametrize('input,expected', [
('http://foo.foo/foo.mp3',
('<audio controls>'
'<source src="http://foo.foo/foo.mp3" type="audio/mpeg">'
'Your browser does not support the audio element.</audio>')),
('https://foo.foo/foo.ogg http://bar.bar/bar.opus',
('<audio controls>'
'<source src="https://foo.foo/foo.ogg" type="audio/ogg">'
'<source src="http://bar.bar/bar.opus" type="audio/ogg">'
'Your browser does not support the audio element.</audio>')),
('http://1.de/1.wav http://2.de/2.mp4 http://3.de/3.ogg',
('<audio controls>'
'<source src="http://1.de/1.wav" type="audio/wav">'
'<source src="http://2.de/2.mp4" type="audio/mp4">'
'<source src="http://3.de/3.ogg" type="audio/ogg">'
'Your browser does not support the audio element.</audio>'))
])
def test_create_html(input, expected):
assert audio.create_html(input) == expected
| agpl-3.0 |
Zanzibar82/streamondemand.test | servers/vk.py | 67 | 9258 | # -*- coding: iso-8859-1 -*-
#------------------------------------------------------------
# pelisalacarta - XBMC Plugin
# Conector para VK Server
# http://blog.tvalacarta.info/plugin-xbmc/pelisalacarta/
#------------------------------------------------------------
import urlparse,urllib2,urllib,re
import os
from core import scrapertools
from core import logger
from core import config
def test_video_exists( page_url ):
logger.info("[uploadedto.py] test_video_exists(page_url='%s')" % page_url)
data = scrapertools.cache_page(page_url)
if "This video has been removed from public access" in data:
return False,"El archivo ya no esta disponible<br/>en VK (ha sido borrado)"
else:
return True,""
# Returns an array of possible video url's from the page_url
def get_video_url( page_url , premium = False , user="" , password="", video_password="" ):
logger.info("[vk.py] get_video_url(page_url='%s')" % page_url)
# Lee la página y extrae el ID del vídeo
data = scrapertools.cache_page(page_url.replace("amp;",""))
videourl = ""
regexp =re.compile(r'vkid=([^\&]+)\&')
match = regexp.search(data)
vkid = ""
if match is not None:
vkid = match.group(1)
else:
data2 = data.replace("\\","")
patron = '"vkid":"([^"]+)"'
matches = re.compile(patron,re.DOTALL).findall(data2)
if len(matches)>0:
vkid = matches[0]
else:
logger.info("no encontro vkid")
logger.info("vkid="+vkid)
# Extrae los parámetros del vídeo y añade las calidades a la lista
patron = "var video_host = '([^']+)'.*?"
patron += "var video_uid = '([^']+)'.*?"
patron += "var video_vtag = '([^']+)'.*?"
patron += "var video_no_flv = ([^;]+);.*?"
patron += "var video_max_hd = '([^']+)'"
matches = re.compile(patron,re.DOTALL).findall(data)
scrapertools.printMatches(matches)
if len(matches)>0:
#01:44:52 T:2957156352 NOTICE: video_host=http://cs509601.vk.com/, video_uid=149623387, video_vtag=1108941f4c, video_no_flv=1, video_max_hd=1
video_host = matches[0][0]
video_uid = matches[0][1]
video_vtag = matches[0][2]
video_no_flv = matches[0][3]
video_max_hd = matches[0][4]
else:
#{"uid":"97482389","vid":"161509127\",\"oid\":\"97482389\","host":"507214",\"vtag\":\"99bca9d028\",\"ltag\":\"l_26f55018\",\"vkid\":\"161509127\",\"md_title\":\"El Libro de La Selva - 1967 - tetelx - spanish\",\"md_author\":\"Tetelx Tete\",\"hd\":1,\"no_flv\":1,\"hd_def\":-1,\"dbg_on\":0,\"t\":\"\",\"thumb\":\"http:\\\/\\\/cs507214.vkontakte.ru\\\/u97482389\\\/video\\\/l_26f55018.jpg\",\"hash\":\"3a576695e9f0bfe3093eb21239bd322f\",\"hash2\":\"be750b8971933dd6\",\"is_vk\":\"1\",\"is_ext\":\"0\",\"lang_add\":\"Add to My Videos\",\"lang_share\":\"Share\",\"lang_like\":\"Like\",\"lang_volume_on\":\"Unmute\",\"lang_volume_off\":\"Mute\",\"lang_volume\":\"Volume\",\"lang_hdsd\":\"Change Video Quality\",\"lang_fullscreen\":\"Full Screen\",\"lang_window\":\"Minimize\",\"lang_rotate\":\"Rotate\",\"video_play_hd\":\"Watch in HD\",\"video_stop_loading\":\"Stop Download\",\"video_player_version\":\"VK Video Player\",\"video_player_author\":\"Author - Alexey Kharkov\",\"goto_orig_video\":\"Go to Video\",\"video_get_video_code\":\"Copy vdeo code\",\"video_load_error\":\"The video has not uploaded yet or the server is not available\",\"video_get_current_url\":\"Copy frame link\",\"nologo\":1,\"liked\":0,\"add_hash\":\"67cd39a080ad6e0ad7\",\"added\":1,\"use_p2p\":0,\"p2p_group_id\":\"fb2d8cfdcbea4f3c\"}
#01:46:05 T:2955558912 NOTICE: video_host=507214, video_uid=97482389, video_vtag=99bca9d028, video_no_flv=1, video_max_hd=1
data2 = data.replace("\\","")
video_host = scrapertools.get_match(data2,'"host":"([^"]+)"')
video_uid = scrapertools.get_match(data2,'"uid":"([^"]+)"')
video_vtag = scrapertools.get_match(data2,'"vtag":"([^"]+)"')
video_no_flv = scrapertools.get_match(data2,'"no_flv":([0-9]+)')
video_max_hd = scrapertools.get_match(data2,'"hd":([0-9]+)')
if not video_host.startswith("http://"):
video_host = "http://cs"+video_host+".vk.com/"
logger.info("video_host="+video_host+", video_uid="+video_uid+", video_vtag="+video_vtag+", video_no_flv="+video_no_flv+", video_max_hd="+video_max_hd)
video_urls = []
if video_no_flv.strip() == "0" and video_uid != "0":
tipo = "flv"
if "http://" in video_host:
videourl = "%s/u%s/video/%s.%s" % (video_host,video_uid,video_vtag,tipo)
else:
videourl = "http://%s/u%s/video/%s.%s" % (video_host,video_uid,video_vtag,tipo)
# Lo añade a la lista
video_urls.append( ["FLV [vk]",videourl])
elif video_uid== "0" and vkid != "": #http://447.gt3.vkadre.ru/assets/videos/2638f17ddd39-75081019.vk.flv
tipo = "flv"
if "http://" in video_host:
videourl = "%s/assets/videos/%s%s.vk.%s" % (video_host,video_vtag,vkid,tipo)
else:
videourl = "http://%s/assets/videos/%s%s.vk.%s" % (video_host,video_vtag,vkid,tipo)
# Lo añade a la lista
video_urls.append( ["FLV [vk]",videourl])
else: #http://cs12385.vkontakte.ru/u88260894/video/d09802a95b.360.mp4
#Si la calidad elegida en el setting es HD se reproducira a 480 o 720, caso contrario solo 360, este control es por la xbox
if video_max_hd=="0":
video_urls.append( ["240p [vk]",get_mp4_video_link(video_host,video_uid,video_vtag,"240.mp4")])
elif video_max_hd=="1":
video_urls.append( ["240p [vk]",get_mp4_video_link(video_host,video_uid,video_vtag,"240.mp4")])
video_urls.append( ["360p [vk]",get_mp4_video_link(video_host,video_uid,video_vtag,"360.mp4")])
elif video_max_hd=="2":
video_urls.append( ["240p [vk]",get_mp4_video_link(video_host,video_uid,video_vtag,"240.mp4")])
video_urls.append( ["360p [vk]",get_mp4_video_link(video_host,video_uid,video_vtag,"360.mp4")])
video_urls.append( ["480p [vk]",get_mp4_video_link(video_host,video_uid,video_vtag,"480.mp4")])
elif video_max_hd=="3":
video_urls.append( ["240p [vk]",get_mp4_video_link(video_host,video_uid,video_vtag,"240.mp4")])
video_urls.append( ["360p [vk]",get_mp4_video_link(video_host,video_uid,video_vtag,"360.mp4")])
video_urls.append( ["480p [vk]",get_mp4_video_link(video_host,video_uid,video_vtag,"480.mp4")])
video_urls.append( ["720p [vk]",get_mp4_video_link(video_host,video_uid,video_vtag,"720.mp4")])
else:
video_urls.append( ["240p [vk]",get_mp4_video_link(video_host,video_uid,video_vtag,"240.mp4")])
video_urls.append( ["360p [vk]",get_mp4_video_link(video_host,video_uid,video_vtag,"360.mp4")])
for video_url in video_urls:
logger.info("[vk.py] %s - %s" % (video_url[0],video_url[1]))
return video_urls
def get_mp4_video_link(match0,match1,match2,tipo):
if match0.endswith("/"):
videourl = "%su%s/videos/%s.%s" % (match0,match1,match2,tipo)
else:
videourl = "%s/u%s/videos/%s.%s" % (match0,match1,match2,tipo)
return videourl
def find_videos(data):
encontrados = set()
devuelve = []
#http://vkontakte.ru/video_ext.php?oid=95855298&id=162902512&hash=4f0d023887f3648e
#http://vk.com/video_ext.php?oid=70712020&id=159787030&hash=88899d94685174af&hd=3"
#http://vk.com/video_ext.php?oid=161288347&id=162474656&hash=3b4e73a2c282f9b4&sd
#http://vk.com/video_ext.php?oid=146263567&id=163818182&hash=2dafe3b87a4da653&sd
#http://vk.com/video_ext.php?oid=146263567&id=163818182&hash=2dafe3b87a4da653
#http://vk.com/video_ext.php?oid=-34450039&id=161977144&hash=0305047ffe3c55a8&hd=3
data = data.replace("&","&")
data = data.replace("&","&")
patronvideos = '(/video_ext.php\?oid=[^&]+&id=[^&]+&hash=[a-z0-9]+)'
logger.info("[vk.py] find_videos #"+patronvideos+"#")
matches = re.compile(patronvideos).findall(data)
for match in matches:
titulo = "[vk]"
url = "http://vk.com"+match
if url not in encontrados:
logger.info(" url="+url)
devuelve.append( [ titulo , url , 'vk' ] )
encontrados.add(url)
else:
logger.info(" url duplicada="+url)
# http://vk.com/video97482389_161509127?section=all
patronvideos = '(vk\.[a-z]+\/video[0-9]+_[0-9]+)'
logger.info("[vk.py] find_videos #"+patronvideos+"#")
matches = re.compile(patronvideos,re.DOTALL).findall(data)
#print data
for match in matches:
titulo = "[vk]"
url = "http://"+match
if url not in encontrados:
logger.info(" url="+url)
devuelve.append( [ titulo , url , 'vk' ] )
encontrados.add(url)
else:
logger.info(" url duplicada="+url)
return devuelve
def test():
video_urls = get_video_url("http://vk.com/video_ext.php?oid=190230445&id=164616513&hash=ef16fcd83b58b192&hd=1")
return len(video_urls)>0 | gpl-3.0 |
rkashapov/buildbot | master/buildbot/scheduler.py | 11 | 1352 | # This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
from __future__ import absolute_import
from __future__ import print_function
from buildbot.schedulers.basic import AnyBranchScheduler
from buildbot.schedulers.basic import Scheduler
from buildbot.schedulers.dependent import Dependent
from buildbot.schedulers.timed import Nightly
from buildbot.schedulers.timed import Periodic
from buildbot.schedulers.triggerable import Triggerable
from buildbot.schedulers.trysched import Try_Jobdir
from buildbot.schedulers.trysched import Try_Userpass
_hush_pyflakes = [Scheduler, AnyBranchScheduler, Dependent,
Periodic, Nightly, Triggerable, Try_Jobdir, Try_Userpass]
del _hush_pyflakes
| gpl-2.0 |
alsrgv/tensorflow | tensorflow/contrib/predictor/__init__.py | 93 | 1229 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Modules for `Predictor`s.
@@from_contrib_estimator
@@from_estimator
@@from_saved_model
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.contrib.predictor.predictor_factories import from_contrib_estimator
from tensorflow.contrib.predictor.predictor_factories import from_estimator
from tensorflow.contrib.predictor.predictor_factories import from_saved_model
from tensorflow.python.util.all_util import remove_undocumented
remove_undocumented(__name__)
| apache-2.0 |
mpimenov/omim | tools/python/airmaps/instruments/utils.py | 4 | 1380 | import os
import shutil
from datetime import datetime
from typing import Iterable
from airflow.operators.python_operator import PythonOperator
from maps_generator.generator.env import Env
from maps_generator.generator.stages import Stage
from maps_generator.generator.stages import get_stage_name
from maps_generator.maps_generator import run_generation
def put_current_date_in_filename(filename):
path, name = os.path.split(filename)
parts = name.split(".", maxsplit=1)
parts[0] += f"__{datetime.today().strftime('%Y_%m_%d')}"
return os.path.join(path, ".".join(parts))
def get_latest_filename(filename, prefix=""):
path, name = os.path.split(filename)
parts = name.split(".", maxsplit=1)
assert len(parts) != 0, parts
parts[0] = f"{prefix}latest"
return os.path.join(path, ".".join(parts))
def rm_build(**kwargs):
build_name = kwargs["ti"].xcom_pull(key="build_name")
env = Env(build_name=build_name)
shutil.rmtree(env.build_path)
def make_rm_build_task(dag):
return PythonOperator(
task_id="Rm_build_task",
provide_context=True,
python_callable=rm_build,
dag=dag,
)
def run_generation_from_first_stage(
env: Env, stages: Iterable[Stage], build_lock: bool = True
):
from_stage = get_stage_name(next(iter(stages)))
run_generation(env, stages, from_stage, build_lock)
| apache-2.0 |
chapmanb/bcbio-nextgen | bcbio/variation/pisces.py | 4 | 4024 | """Tumor only somatic calling with Pisces.
https://github.com/Illumina/Pisces
"""
import os
import shutil
import pysam
from bcbio import utils
from bcbio.distributed.transaction import file_transaction
from bcbio.pipeline import shared
from bcbio.pipeline import datadict as dd
from bcbio.provenance import do
from bcbio.variation import bedutils, ploidy, vcfutils
def run(align_bams, items, ref_file, assoc_files, region=None, out_file=None):
"""Run tumor only pisces calling
Handles bgzipping output file and fixing VCF sample naming to match BAM sample.
"""
paired = vcfutils.get_paired_bams(align_bams, items)
assert paired and not paired.normal_bam, ("Pisces supports tumor-only variant calling: %s" %
(",".join([dd.get_sample_name(d) for d in items])))
vrs = bedutils.population_variant_regions(items)
target = shared.subset_variant_regions(vrs, region,
out_file, items=items, do_merge=True)
min_af = float(dd.get_min_allele_fraction(paired.tumor_data)) / 100.0
if not utils.file_exists(out_file):
base_out_name = utils.splitext_plus(os.path.basename(paired.tumor_bam))[0]
raw_file = "%s.vcf" % utils.splitext_plus(out_file)[0]
with file_transaction(paired.tumor_data, raw_file) as tx_out_file:
ref_dir = _prep_genome(os.path.dirname(tx_out_file), paired.tumor_data)
out_dir = os.path.dirname(tx_out_file)
cores = dd.get_num_cores(paired.tumor_data)
emit_min_af = min_af / 10.0
cmd = ("pisces --bampaths {paired.tumor_bam} --genomepaths {ref_dir} --intervalpaths {target} "
"--maxthreads {cores} --minvf {emit_min_af} --vffilter {min_af} "
"--ploidy somatic --gvcf false -o {out_dir}")
# Recommended filtering for low frequency indels
# https://github.com/bcbio/bcbio-nextgen/commit/49d0cbb1f6dcbea629c63749e2f9813bd06dcee3#commitcomment-29765373
cmd += " -RMxNFilter 5,9,0.35"
# For low frequency UMI tagged variants, set higher variant thresholds
# https://github.com/Illumina/Pisces/issues/14#issuecomment-399756862
if min_af < (1.0 / 100.0):
cmd += " --minbasecallquality 30"
do.run(cmd.format(**locals()), "Pisces tumor-only somatic calling")
shutil.move(os.path.join(out_dir, "%s.vcf" % base_out_name),
tx_out_file)
vcfutils.bgzip_and_index(raw_file, paired.tumor_data["config"],
prep_cmd="sed 's#%s.bam#%s#' | %s" %
(base_out_name, dd.get_sample_name(paired.tumor_data),
vcfutils.add_contig_to_header_cl(dd.get_ref_file(paired.tumor_data), out_file)))
return vcfutils.bgzip_and_index(out_file, paired.tumor_data["config"])
def _prep_genome(out_dir, data):
"""Create prepped reference directory for pisces.
Requires a custom GenomeSize.xml file present.
"""
genome_name = utils.splitext_plus(os.path.basename(dd.get_ref_file(data)))[0]
out_dir = utils.safe_makedir(os.path.join(out_dir, genome_name))
ref_file = dd.get_ref_file(data)
utils.symlink_plus(ref_file, os.path.join(out_dir, os.path.basename(ref_file)))
with open(os.path.join(out_dir, "GenomeSize.xml"), "w") as out_handle:
out_handle.write('<sequenceSizes genomeName="%s">' % genome_name)
for c in pysam.AlignmentFile("%s.dict" % utils.splitext_plus(ref_file)[0]).header["SQ"]:
cur_ploidy = ploidy.get_ploidy([data], region=[c["SN"]])
out_handle.write('<chromosome fileName="%s" contigName="%s" totalBases="%s" knownBases="%s" '
'isCircular="false" ploidy="%s" md5="%s"/>' %
(os.path.basename(ref_file), c["SN"], c["LN"], c["LN"], cur_ploidy, c["M5"]))
out_handle.write('</sequenceSizes>')
return out_dir
| mit |
kangkot/arangodb | 3rdParty/V8-4.3.61/build/gyp/test/standalone-static-library/gyptest-standalone-static-library.py | 186 | 1891 | #!/usr/bin/env python
# Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies build of a static_library with the standalone_static_library flag set.
"""
import os
import subprocess
import sys
import TestGyp
# standalone_static_library currently means two things: a specific output
# location for the built target and non-thin archive files. The Android gyp
# generator leaves both decisions to the Android build system, so this test
# doesn't work for that format.
test = TestGyp.TestGyp(formats=['!android'])
# Verify that types other than static_library cause a failure.
test.run_gyp('invalid.gyp', status=1, stderr=None)
target_str = 'invalid.gyp:bad#target'
err = ['gyp: Target %s has type executable but standalone_static_library flag '
'is only valid for static_library type.' % target_str]
test.must_contain_all_lines(test.stderr(), err)
# Build a valid standalone_static_library.
test.run_gyp('mylib.gyp')
test.build('mylib.gyp', target='prog')
# Verify that the static library is copied to the correct location.
# We expect the library to be copied to $PRODUCT_DIR.
standalone_static_library_dir = test.EXECUTABLE
path_to_lib = os.path.split(
test.built_file_path('mylib', type=standalone_static_library_dir))[0]
lib_name = test.built_file_basename('mylib', type=test.STATIC_LIB)
path = os.path.join(path_to_lib, lib_name)
test.must_exist(path)
# Verify that the program runs properly.
expect = 'hello from mylib.c\n'
test.run_built_executable('prog', stdout=expect)
# Verify that libmylib.a contains symbols. "ar -x" fails on a 'thin' archive.
supports_thick = ('make', 'ninja', 'cmake')
if test.format in supports_thick and sys.platform.startswith('linux'):
retcode = subprocess.call(['ar', '-x', path])
assert retcode == 0
test.pass_test()
| apache-2.0 |
jaffee/pigeon | pigeon/views.py | 1 | 2258 | from pigeon import app
from copy import deepcopy
import re
import requests
from flask import request
import json
JIRA_PROJECTS = app.config['JIRA_PROJECTS']
JIRA_URL = app.config['JIRA_URL']
USER = app.config['JIRA_USER']
PASS = app.config['JIRA_PASS']
ISSUE_COMMENT_URL = JIRA_URL + "/rest/api/2/issue/{issueIdOrKey}/comment"
COMMENT_TEMPLATE = """{author_name} referenced this issue in a commit.
{message}
{url}
"""
comment_json = {
"body": None,
}
@app.route('/push', methods=["POST"])
def new_push():
data = json.loads(request.data)
for commit in data.get('commits', []):
# skip merge commits
if commit.get('message', "").startswith('Merge branch'):
continue
issues = set(get_message_issues(JIRA_PROJECTS, commit.get('message', "")))
post_comment(data, commit, issues)
return ""
def build_comment(data, commit):
commit["author_name"] = commit["author"]["name"]
commit["author_email"] = commit["author"]["email"]
return COMMENT_TEMPLATE.format(**commit)
def post_comment(data, commit, issues):
for issue in issues:
existing_comments = get_existing_comments(issue)
for comment_body in existing_comments:
if commit['message'] in comment_body:
print "skipping commit: %s" % commit
return # Don't post this commit as it (or one with the same message already has been posted)
body = build_comment(data, commit)
data = deepcopy(comment_json)
data["body"] = body
resp = requests.post(ISSUE_COMMENT_URL.format(issueIdOrKey=issue),
data=json.dumps(data),
auth=(USER, PASS),
headers={'content-type': 'application/json'})
print resp
# TODO: check response and email someone or something if it fails
def get_existing_comments(issue):
r = requests.get(ISSUE_COMMENT_URL.format(issueIdOrKey=issue), auth=(USER, PASS))
comment_list = r.json()['comments']
return map(lambda c: c['body'], comment_list)
def get_message_issues(projects, msg):
proj_search_str = "(?:"+"|".join(projects)+")"
regex = "%s-[0-9]+" % proj_search_str
return re.findall(regex, msg)
| mit |
SCOAP3/invenio | invenio/ext/logging/backends/fs.py | 18 | 2491 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2014 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""Rotating file log handler for writing logs to the file system.
**Configuration**
======================== ======================================================
`LOGGING_FS_BACKUPCOUNT` Number of files to keep. **Default:** ``5``.
`LOGGING_FS_MAXBYTES` Max file size in bytes. **Default:** ``104857600``
(100 MB).
`LOGGING_FS_LEVEL` Log level threshold for handler. **Default:**
``WARNING``.
======================== ======================================================
"""
from __future__ import absolute_import
import os
import logging
from logging.handlers import RotatingFileHandler
def setup_app(app):
"""Filesystem logging handler."""
app.config.setdefault('LOGGING_FS_BACKUPCOUNT', 5)
app.config.setdefault('LOGGING_FS_MAXBYTES', 104857600) # 100mb
app.config.setdefault(
'LOGGING_FS_LEVEL',
'DEBUG' if app.debug else 'WARNING'
)
# Create log directory if it does not exists
try:
os.makedirs(
os.path.join(app.instance_path, app.config.get('CFG_LOGDIR', ''))
)
except Exception:
pass
handler = RotatingFileHandler(
os.path.join(
app.instance_path,
app.config.get('CFG_LOGDIR', ''),
app.logger_name + '.log'
),
backupCount=app.config['LOGGING_FS_BACKUPCOUNT'],
maxBytes=app.config['LOGGING_FS_MAXBYTES']
)
handler.setFormatter(logging.Formatter(
'%(asctime)s %(levelname)s: %(message)s '
'[in %(pathname)s:%(lineno)d]'
))
handler.setLevel(app.config['LOGGING_FS_LEVEL'])
# Add handler to application logger
app.logger.addHandler(handler)
| gpl-2.0 |
sgoudelis/mybitbank | mybitbank/middleware/switcher.py | 1 | 3010 | """
The MIT License (MIT)
Copyright (c) 2016 Stratos Goudelis
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import datetime
from django.utils.timezone import utc
from mybitbank.libs.connections import connector
from mybitbank.libs.jsonrpc import ServiceProxy
class CurrencyEnabler():
'''
Re-enable currency service after time has elapsed. This is a Django middleware.
'''
def process_request(self, request):
'''
Before the view is compiled, re-enabled disabled currency services.
'''
# just re-enable the service
for provider_id in connector.config.keys():
if isinstance(connector.config[provider_id].get('enabled', None), datetime.datetime):
# assume the value is datetime in the future... or is it ?
if datetime.datetime.utcnow().replace(tzinfo=utc) >= connector.config[provider_id]['enabled']:
# re-enable currency service
connector.services[provider_id] = ServiceProxy("http://%s:%s@%s:%s" %
(connector.config[provider_id]['rpcusername'],
connector.config[provider_id]['rpcpassword'],
connector.config[provider_id]['rpchost'],
connector.config[provider_id]['rpcport']))
connector.config[provider_id]['enabled'] = True
connector.alerts['currencybackend'][:] = [alert for alert in connector.alerts['currencybackend'] if alert.get('provider_id') != provider_id]
return None
def process_response(self, request, response):
'''
Clear connector errors after the view has been compiled
'''
connector.errors = []
return response
| mit |
cpknowles/BRG | black_rapids_forward_model/Test_black_rapids_forward_model.py | 1 | 15674 | from pylab import *
from scipy.interpolate import RectBivariateSpline
from scipy import ndimage as nd
import numpy as np
import gdal
###############################################################################
#HOW DOES H0 SET INITIAL THICKNESS?? SOMEHOW S_spline of x and y minus the interpolated B vector(?) sets thickness. WE NEED A z!?!?!?!?!?!?
#Looks like H, which is used to define dsdx,dsdy,dsdz comes from split(Function(V)) where V is a mixed functionspace([FunctionSpace(mesh,"CG",1)]*5) What does that bracket imply?
#S_array = data.ReadAsArray()[::-1,:] is called in what looks to be the 'z' position for S_spline, as S_array.T What does that .T imply?
###############################################################################
def fill(data, invalid=None):
"""
Replace the value of invalid 'data' cells (indicated by 'invalid')
by the value of the nearest valid data cell
Input:
data: numpy array of any dimension
invalid: a binary array of same shape as 'data'. True cells set where data
value should be replaced.
If None (default), use: invalid = np.isnan(data)
Output:
Return a filled array.
"""
#import numpy as np
#import scipy.ndimage as nd
if invalid is None: invalid = np.isnan(data)
ind = nd.distance_transform_edt(invalid, return_distances=False, return_indices=True)
return data[tuple(ind)]
### DATA ###
#Digital Elevation Model needed to find slopes of glacier, should have something created for that
data = gdal.Open('input_data_bed_v2/DEM_2010/ifsar_2010.tif')
S_array = data.ReadAsArray()[::-1,:]
ncols = data.RasterXSize
nrows = data.RasterYSize
transf = data.GetGeoTransform()
x = arange(transf[0],transf[0]+transf[1]*data.RasterXSize,transf[1])
y = arange(transf[3],transf[3]+transf[5]*data.RasterYSize,transf[5])[::-1]
S_spline = RectBivariateSpline(x,y,S_array.T,kx=1,ky=1,s=0)
#This is my bed data, it looks like it only reads in x and y data?
data = gdal.Open('input_data_bed_v2/BED_MC/bed.tif')
B_array = data.ReadAsArray()[::-1,:]
ncols = data.RasterXSize
nrows = data.RasterYSize
transf = data.GetGeoTransform()
x = arange(transf[0],transf[0]+transf[1]*data.RasterXSize,transf[1])
y = arange(transf[3],transf[3]+transf[5]*data.RasterYSize,transf[5])[::-1]
B_spline = RectBivariateSpline(x,y,B_array.T,kx=1,ky=1,s=0)
# #SMB can be just a uniform parameter? I would need accum and abbla zones I guess in the long run.
# data = gdal.Open('input_data_bed_v2/SMB_2010_2013/mb_field_25.tif')
# adot_array = data.ReadAsArray()[::-1,:]
# adot_array = fill(adot_array,adot_array==adot_array.min())
# ncols = data.RasterXSize
# nrows = data.RasterYSize
# transf = data.GetGeoTransform()
# x = arange(transf[0],transf[0]+transf[1]*data.RasterXSize,transf[1])
# y = arange(transf[3],transf[3]+transf[5]*data.RasterYSize,transf[5])[::-1]
# adot_spline = RectBivariateSpline(x,y,adot_array.T,kx=1,ky=1,s=0)
# #Dhdt combined with SMB gives me my adot, so currently not super needed
# data = gdal.Open('input_data_bed_v2/DH_2010_2013/dhdt_weq_lower.tif')
# dhdt_array = data.ReadAsArray()[::-1,:]
# dhdt_array[dhdt_array<-1000] = 0
# dhdt_array = fill(dhdt_array,dhdt_array==dhdt_array.min())
# ncols = data.RasterXSize
# nrows = data.RasterYSize
# transf = data.GetGeoTransform()
# x = arange(transf[0],transf[0]+transf[1]*data.RasterXSize,transf[1])
# y = arange(transf[3],transf[3]+transf[5]*data.RasterYSize,transf[5])[::-1]
# dhdt_spline = RectBivariateSpline(x,y,dhdt_array.T,kx=1,ky=1,s=0)
from dolfin import *
from ice_model_functions import *
##########################################################
################# SET PETSC OPTIONS ####################
##########################################################
PETScOptions.set("ksp_type","preonly")
PETScOptions.set("pc_type","lu")
PETScOptions.set("pc_factor_mat_solver_package","mumps")
PETScOptions.set("mat_mumps_icntl_14","1000")
PETScOptions.set("ksp_final_residual","0")
##########################################################
################# SET FENICS OPTIONS ###################
##########################################################
parameters['form_compiler']['quadrature_degree'] = 2
parameters['form_compiler']['cpp_optimize'] = True
parameters['form_compiler']['representation'] = 'quadrature'
#parameters['form_compiler']['precision'] = 30
parameters['allow_extrapolation'] = True
ffc_options = {"optimize": True, \
"eliminate_zeros": True, \
"precompute_basis_const": True, \
"precompute_ip_const": True}
##########################################################
#################### CONSTANTS #########################
##########################################################
# TIME
minute = 60.0
hour = 60*minute
day = 24*hour
year = 365*day
# CONSTANTS
rho = 917.
g = 9.81
# RHEOLOGICAL CONSTANTS
rho_i = 910.
n = 3.0
Bc = 3.61e-13*year
Bw = 1.73e3*year
Qc = 6e4
Qw = 13.9e4
Rc = 8.314
gamma = 8.7e-4
eps_reg = Constant(1e-10)
# THERMAL CONTANTS
k = 2.1*year
Cp = 2009.
kappa = k/(rho_i*Cp)
q_geo = 0.042*year
# ADJOINT REG
theta = Constant(1e-10)
# MASS
thklim = 10.
dt = Constant(0.001)
###################################################
########### GEOMETRY AND INPUT DATA ##############
###################################################
##### BOUNDARY DATA #####
class Beta2(Expression):
def eval(self,values,x):
values[0] = 11000.
class S_exp(Expression):
def eval(self,values,x):
values[0] = S_spline(x[0],x[1])
class B_exp(Expression):
def eval(self,values,x):
values[0] = B_spline(x[0],x[1])
class Adot_exp(Expression):
def eval(self,values,x):
values[0] = 1000.0/910.0*adot_spline(x[0],x[1])/3. # Christian provides these fields as mwe/3a, hence correction.
class Dhdt_exp(Expression):
def eval(self,values,x):
values[0] = 1000.0/910.0*dhdt_spline(x[0],x[1])/3.
mesh = Mesh('outline.xml')
# FUNCTION SPACES
Q = FunctionSpace(mesh,"CG",1) # SCALAR
Q2 = MixedFunctionSpace([Q,]*2)
V = MixedFunctionSpace([Q]*5) # VELOCITY + MASS
beta2 = interpolate(Beta2(),Q)
#### !!!!!! #### Note the distinction between effective and normal mass balance !
#adot = interpolate(Adot_exp(),Q) - interpolate(Dhdt_exp(),Q) # Effective mass balance (near steady initially)
#adot = interpolate(Adot_exp(),Q) # True mass balance (way imbalanced)
B = interpolate(B_exp(),Q)
S_obs = interpolate(S_exp(),Q)
S0 = interpolate(S_exp(),Q)
H0 = Function(Q)
H0.vector()[:] = S_obs.vector()[:] - B.vector()[:] # Set initial thickness
# FUNCTIONS
U = Function(V)
Lamda = Function(V)
Phi = TestFunction(V)
dU = TrialFunction(V)
gamma = TestFunction(Q)
ubar,vbar,udef,vdef,H = split(U)
phibar,psibar,phidef,psidef,xsi = split(Lamda)
S = B+H
# METRICS FOR COORDINATE TRANSFORM
def dsdx(s):
return 1./H*(S.dx(0) - s*H.dx(0))
def dsdy(s):
return 1./H*(S.dx(1) - s*H.dx(1))
def dsdz(s):
return -1./H
p = 4
# TEST FUNCTION COEFFICIENTS
coef = [lambda s:1.0, lambda s:1./p*((p+1)*s**p - 1)]
dcoef = [lambda s:0, lambda s:(p+1)*s**(p-1)]
u_ = [ubar,udef]
v_ = [vbar,vdef]
phi_ = [phibar,phidef]
psi_ = [psibar,psidef]
u = VerticalBasis(u_,coef,dcoef)
v = VerticalBasis(v_,coef,dcoef)
phi = VerticalBasis(phi_,coef,dcoef)
psi = VerticalBasis(psi_,coef,dcoef)
# TERMWISE STRESSES AND NONLINEARITIES
def A_v():
return Constant(1e-16)
# 2nd INVARIANT STRAIN RATE
def epsilon_dot(s):
return ((u.dx(s,0) + u.ds(s)*dsdx(s))**2 \
+(v.dx(s,1) + v.ds(s)*dsdy(s))**2 \
+(u.dx(s,0) + u.ds(s)*dsdx(s))*(v.dx(s,1) + v.ds(s)*dsdy(s)) \
+0.25*((u.ds(s)*dsdz(s))**2 + (v.ds(s)*dsdz(s))**2 \
+ ((u.dx(s,1) + u.ds(s)*dsdy(s)) + (v.dx(s,0) + v.ds(s)*dsdx(s)))**2) \
+ eps_reg)
# VISCOSITY
def eta_v(s):
return A_v()**(-1./n)/2.*epsilon_dot(s)**((1.-n)/(2*n))
# MEMBRANE STRESSES
E = Constant(1.0)
def membrane_xx(s):
return (phi.dx(s,0) + phi.ds(s)*dsdx(s))*H*(E*eta_v(s))*(4*(u.dx(s,0) + u.ds(s)*dsdx(s)) + 2*(v.dx(s,1) + v.ds(s)*dsdy(s)))
def membrane_xy(s):
return (phi.dx(s,1) + phi.ds(s)*dsdy(s))*H*(E*eta_v(s))*((u.dx(s,1) + u.ds(s)*dsdy(s)) + (v.dx(s,0) + v.ds(s)*dsdx(s)))
def membrane_yx(s):
return (psi.dx(s,0) + psi.ds(s)*dsdx(s))*H*(E*eta_v(s))*((u.dx(s,1) + u.ds(s)*dsdy(s)) + (v.dx(s,0) + v.ds(s)*dsdx(s)))
def membrane_yy(s):
return (psi.dx(s,1) + psi.ds(s)*dsdy(s))*H*(E*eta_v(s))*(2*(u.dx(s,0) + u.ds(s)*dsdx(s)) + 4*(v.dx(s,1) + v.ds(s)*dsdy(s)))
# SHEAR STRESSES
def shear_xz(s):
return dsdz(s)**2*phi.ds(s)*H*eta_v(s)*u.ds(s)
def shear_yz(s):
return dsdz(s)**2*psi.ds(s)*H*eta_v(s)*v.ds(s)
# DRIVING STRESSES
def tau_dx():
return rho*g*H*S.dx(0)*Lamda[0]
def tau_dy():
return rho*g*H*S.dx(1)*Lamda[1]
def boundary_membrane_xx(s):
return phi(s)*H*(E*eta_v(s))*(4*(u.dx(s,0) + u.ds(s)*dsdx(s)) + 2*(v.dx(s,1) + v.ds(s)*dsdy(s)))
def boundary_membrane_xy(s):
return phi(s)*H*(E*eta_v(s))*((u.dx(s,1) + u.ds(s)*dsdy(s)) + (v.dx(s,0) + v.ds(s)*dsdx(s)))
def boundary_membrane_yx(s):
return psi(s)*H*(E*eta_v(s))*((u.dx(s,1) + u.ds(s)*dsdy(s)) + (v.dx(s,0) + v.ds(s)*dsdx(s)))
def boundary_membrane_yy(s):
return psi(s)*H*(E*eta_v(s))*(2*(u.dx(s,0) + u.ds(s)*dsdx(s)) + 4*(v.dx(s,1) + v.ds(s)*dsdy(s)))
N = FacetNormal(mesh)
# GET QUADRATURE POINTS (THIS SHOULD BE ODD: WILL GENERATE THE GAUSS-LEGENDRE RULE
# POINTS AND WEIGHTS OF O(n), BUT ONLY THE POINTS IN [0,1] ARE KEPT< DUE TO SYMMETRY.
points,weights = half_quad(11)
# INSTANTIATE VERTICAL INTEGRATOR
vi = VerticalIntegrator(points,weights)
# FIRST ORDER EQUATIONS
I_x = - vi.intz(membrane_xx) - vi.intz(membrane_xy) - vi.intz(shear_xz) - phi(1)*beta2*u(1) - tau_dx()
I_y = - vi.intz(membrane_yx) - vi.intz(membrane_yy) - vi.intz(shear_yz) - psi(1)*beta2*v(1) - tau_dy()
I = (I_x + I_y)*dx
### MASS BALANCE ###
# SUPG PARAMETERS
h = CellSize(mesh)
tau = h/(2.0*sqrt(U[0]**2 + U[1]**2 + 25.0))
Hmid = 0.5*H + 0.5*H0
xsihat = tau*(U[0]*xsi.dx(0) + U[1]*xsi.dx(1))
# STABILIZED CONTINUITY EQUATION
#I += ((H - H0)/dt*xsi - (xsi.dx(0)*U[0]*Hmid + xsi.dx(1)*U[1]*Hmid) + xsihat*(U[0]*Hmid.dx(0) + U[1]*Hmid.dx(1) + Hmid*(U[0].dx(0) + U[1].dx(1)))#- (adot)*(xsi + xsihat))*dx# + xsi*(U[0]*Hmid*N[0] + U[1]*Hmid*N[1])*ds(1)
I += ((H - H0)/dt*xsi - (xsi.dx(0)*U[0]*Hmid + xsi.dx(1)*U[1]*Hmid) + xsihat*(U[0]*Hmid.dx(0) + U[1]*Hmid.dx(1) + Hmid*(U[0].dx(0) + U[1].dx(1))))*dx# + xsi*(U[0]*Hmid*N[0] + U[1]*Hmid*N[1])*ds(1)
I_misfit = theta*dot(grad(beta2),grad(beta2))*dx
I += I_misfit
# JACOBIAN FOR COUPLED MASS + MOMENTUM SOLVE
R = derivative(I,Lamda,Phi)
J = derivative(R,U,dU)
# Adjoint forms, if so desired
R_adj = derivative(I,U,Phi)
J_adj = derivative(R_adj,Lamda,dU)
G = derivative(I,beta2,gamma)
#####################################################################
######################### I/O Functions ###########################
#####################################################################
# For moving data between vector functions and scalar functions
assigner_inv = FunctionAssigner([Q,Q,Q,Q,Q],V)
assigner = FunctionAssigner(V,[Q,Q,Q,Q,Q])
assigner_vec = FunctionAssigner(Q2,[Q,Q])
#####################################################################
###################### Variational Solvers ########################
#####################################################################
# Positivity constraints and zero-flux boundary conditions don't play well together, so I enforce the former through a non-slip Dirichlet boundary condition on velocity. This is a little weird in the context of glaciers, but it's the only condition that will uphold mass conservation (there is still a fictitious momentum flux across the boundary, aka a non-real stress, but that's more acceptable to me).
bcs = [DirichletBC(V.sub(i),0,lambda x,on:on) for i in range(4)]
bc_2 = DirichletBC(V.sub(4),thklim,lambda x,o:(o and x[0]>393092) or (o and (x[1]>1.5273e6 and x[0]<372129 and x[0]>368953)))
mass_problem = NonlinearVariationalProblem(R,U,J=J,bcs=bcs+[bc_2],form_compiler_parameters=ffc_options)
mass_solver = NonlinearVariationalSolver(mass_problem)
mass_solver.parameters['nonlinear_solver'] = 'snes'
mass_solver.parameters['snes_solver']['method'] = 'vinewtonrsls'
mass_solver.parameters['snes_solver']['relative_tolerance'] = 1e-6
mass_solver.parameters['snes_solver']['absolute_tolerance'] = 1e-6
mass_solver.parameters['snes_solver']['maximum_iterations'] = 10
mass_solver.parameters['snes_solver']['error_on_nonconvergence'] = False
mass_solver.parameters['snes_solver']['linear_solver'] = 'mumps'
bc_adj_1 = DirichletBC(V,[0.0,0.0,0.0,0.0,0.0],lambda x,on:on)
bc_adj_2 = DirichletBC(V.sub(4),0.0,lambda x,on:on)
adj_problem = NonlinearVariationalProblem(R_adj,Lamda,J=J_adj,bcs=[bc_adj_1,bc_adj_2],form_compiler_parameters=ffc_options)
adj_solver = NonlinearVariationalSolver(adj_problem)
adj_solver.parameters['newton_solver']['relative_tolerance'] = 1e-3
adj_solver.parameters['newton_solver']['absolute_tolerance'] = 1e-3
adj_solver.parameters['newton_solver']['maximum_iterations'] = 3
adj_solver.parameters['newton_solver']['error_on_nonconvergence'] = False
adj_solver.parameters['newton_solver']['linear_solver'] = 'mumps'
#####################################################################
################## INITIAL CONDITIONS AND BOUNDS ##################
#####################################################################
l_thick_bound = project(Constant(thklim),Q)
u_thick_bound = project(Constant(1e4),Q)
l_v_bound = project(-10000.0,Q)
u_v_bound = project(10000.0,Q)
l_bound = Function(V)
u_bound = Function(V)
un = Function(Q)
u2n = Function(Q)
vn = Function(Q)
v2n = Function(Q)
lx = Function(Q)
l2x = Function(Q)
mx = Function(Q)
m2x = Function(Q)
p0 = Function(Q)
assigner.assign(U,[un,vn,u2n,v2n,H0])
assigner.assign(l_bound,[l_v_bound]*4+[l_thick_bound])
assigner.assign(u_bound,[u_v_bound]*4+[u_thick_bound])
results_dir = './results/'
Hfile_ptc = File(results_dir + 'H.pvd')
Ufile_ptc = File(results_dir + 'Us.pvd')
bfile_ptc = File(results_dir + 'beta2.pvd')
opt_dir = './results_opt/'
Ufile_opt = File(opt_dir + 'Us.pvd')
bfile_opt = File(opt_dir + 'beta2.pvd')
Us = project(as_vector([u(0),v(0)]))
assigner_inv.assign([lx,l2x,mx,m2x,p0],Lamda)
# Uncomment if you want to start from the end of the last run
#File(results_dir + 'U.xml') >> U
#H0_temp = project(H)
#H0.vector()[:] = H0_temp.vector()[:]
t = 2016.75
#Start slow for convergence. Due to an oddity in topography, this model will not converge for the first 5 or so iterations as it fills a hole, then will work fine after.
dt_schedule = [0.00001]*5 + [0.01]*10 + [0.1]*5 + [0.5]*100 + [1.0]*100
#Time stepping
solve(R==0, U, bcs=bcs+[bc_2])
assigner_inv.assign([un,vn,u2n,v2n,H0],U)
#H0_temp = project(H)
#H0.vector()[:] = H0_temp.vector()[:]
Us_temp = project(as_vector([u(0),v(0)]))
Us.vector()[:] = Us_temp.vector()[:]
S_temp = project(S)
S0.vector()[:] = S_temp.vector()[:]
Hfile_ptc << (H0,t)
Ufile_ptc << (Us,t)
bfile_ptc << (S0,t)
"""
for dts in dt_schedule:
dt.assign(dts)
t += dts
#mass_solver.solve(l_bound,u_bound)
solve(R==0, U, bcs=bcs+[bc_2])
assigner_inv.assign([un,vn,u2n,v2n,H0],U)
#H0_temp = project(H)
#H0.vector()[:] = H0_temp.vector()[:]
Us_temp = project(as_vector([u(0),v(0)]))
Us.vector()[:] = Us_temp.vector()[:]
S_temp = project(S)
S0.vector()[:] = S_temp.vector()[:]
Hfile_ptc << (H0,t)
Ufile_ptc << (Us,t)
bfile_ptc << (S0,t)
"""
File(results_dir + 'Ustar.xml') << U
| mit |
saidimu/POSTMan-Chrome-Extension | tests/selenium/pmtests/postman_tests_requests.py | 104 | 23191 | from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support.select import Select
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.common.keys import Keys
import selenium.webdriver.chrome.service as service
import traceback
import inspect
import time
from postman_tests import PostmanTests
class PostmanTestsRequests(PostmanTests):
def test_1_get_basic(self):
self.reset_request()
self.set_url_field(self.browser, "http://localhost:5000/get")
send_button = self.browser.find_element_by_id("submit-request")
send_button.click()
code_data_value = self.get_codemirror_value(self.browser)
if code_data_value.find("get") > 0:
return True
else:
return False
def test_2_get_only_key(self):
self.reset_request()
self.set_url_field(self.browser, "http://localhost:5000/get?start")
send_button = self.browser.find_element_by_id("submit-request")
send_button.click()
code_data_value = self.get_codemirror_value(self.browser)
if code_data_value.find("/get?start") > 0:
return True
else:
return False
def test_3_delete_basic(self):
self.reset_request()
self.set_url_field(self.browser, "http://localhost:5000/delete")
method_select = self.browser.find_element_by_id("request-method-selector")
Select(method_select).select_by_value("DELETE")
send_button = self.browser.find_element_by_id("submit-request")
send_button.click()
code_data_value = self.get_codemirror_value(self.browser)
if code_data_value.find("delete") > 0:
return True
else:
return False
return True
def test_4_head_basic(self):
self.reset_request()
self.set_url_field(self.browser, "http://localhost:5000/html")
method_select = self.browser.find_element_by_id("request-method-selector")
Select(method_select).select_by_value("HEAD")
send_button = self.browser.find_element_by_id("submit-request")
send_button.click()
code_data_value = self.get_codemirror_value(self.browser)
if code_data_value.find("div") > 0:
return True
else:
return False
def test_5_options_basic(self):
self.reset_request()
self.set_url_field(self.browser, "http://localhost:5000/html")
method_select = self.browser.find_element_by_id("request-method-selector")
Select(method_select).select_by_value("OPTIONS")
send_button = self.browser.find_element_by_id("submit-request")
send_button.click()
code_data_value = self.get_codemirror_value(self.browser)
if code_data_value.find("div") > 0:
return True
else:
return False
def test_6_post_basic(self):
self.reset_request()
self.set_url_field(self.browser, "http://localhost:5000/post")
method_select = self.browser.find_element_by_id("request-method-selector")
Select(method_select).select_by_value("POST")
send_button = self.browser.find_element_by_id("submit-request")
send_button.click()
code_data_value = self.get_codemirror_value(self.browser)
if code_data_value.find("post") > 0:
return True
else:
return False
def test_7_put_basic(self):
self.reset_request()
self.set_url_field(self.browser, "http://localhost:5000/put")
method_select = self.browser.find_element_by_id("request-method-selector")
Select(method_select).select_by_value("PUT")
send_button = self.browser.find_element_by_id("submit-request")
send_button.click()
code_data_value = self.get_codemirror_value(self.browser)
if code_data_value.find("put") > 0:
return True
else:
return False
def test_8_init_environment(self):
environment_selector = self.browser.find_element_by_id("environment-selector")
environment_selector.click()
time.sleep(0.1)
manage_env_link = self.browser.find_element_by_css_selector("#environment-selector .dropdown-menu li:last-child a")
manage_env_link.click()
time.sleep(1)
add_env_button = self.browser.find_element_by_css_selector("#environments-list-wrapper .toolbar .environments-actions-add")
add_env_button.click()
time.sleep(0.3)
environment_name = self.browser.find_element_by_id("environment-editor-name")
environment_name.clear()
environment_name.send_keys("Requests environment")
first_key = self.browser.find_element_by_css_selector("#environment-keyvaleditor .keyvalueeditor-row:first-child .keyvalueeditor-key")
first_key.clear()
first_key.send_keys("path_get")
first_val = self.browser.find_element_by_css_selector("#environment-keyvaleditor .keyvalueeditor-row:first-child .keyvalueeditor-value")
first_val.clear()
first_val.send_keys("get?start=something")
second_key = self.browser.find_element_by_css_selector("#environment-keyvaleditor .keyvalueeditor-row:nth-of-type(2) .keyvalueeditor-key")
second_key.clear()
second_key.send_keys("path_post")
second_val = self.browser.find_element_by_css_selector("#environment-keyvaleditor .keyvalueeditor-row:nth-of-type(2) .keyvalueeditor-value")
second_val.clear()
second_val.send_keys("post")
third_key = self.browser.find_element_by_css_selector("#environment-keyvaleditor .keyvalueeditor-row:nth-of-type(3) .keyvalueeditor-key")
third_key.clear()
third_key.send_keys("Foo")
third_val = self.browser.find_element_by_css_selector("#environment-keyvaleditor .keyvalueeditor-row:nth-of-type(3) .keyvalueeditor-value")
third_val.clear()
third_val.send_keys("Bar")
fourth_key = self.browser.find_element_by_css_selector("#environment-keyvaleditor .keyvalueeditor-row:nth-of-type(4) .keyvalueeditor-key")
fourth_key.clear()
fourth_key.send_keys("Name")
fourth_val = self.browser.find_element_by_css_selector("#environment-keyvaleditor .keyvalueeditor-row:nth-of-type(4) .keyvalueeditor-value")
fourth_val.clear()
fourth_val.send_keys("John Appleseed")
fifth_key = self.browser.find_element_by_css_selector("#environment-keyvaleditor .keyvalueeditor-row:nth-of-type(5) .keyvalueeditor-key")
fifth_key.clear()
fifth_key.send_keys("nonce")
fifth_val = self.browser.find_element_by_css_selector("#environment-keyvaleditor .keyvalueeditor-row:nth-of-type(5) .keyvalueeditor-value")
fifth_val.clear()
fifth_val.send_keys("kllo9940pd9333jh")
sixth_key = self.browser.find_element_by_css_selector("#environment-keyvaleditor .keyvalueeditor-row:nth-of-type(6) .keyvalueeditor-key")
sixth_key.clear()
sixth_key.send_keys("timestamp")
sixth_val = self.browser.find_element_by_css_selector("#environment-keyvaleditor .keyvalueeditor-row:nth-of-type(6) .keyvalueeditor-value")
sixth_val.clear()
sixth_val.send_keys("1191242096")
seventh_key = self.browser.find_element_by_css_selector("#environment-keyvaleditor .keyvalueeditor-row:nth-of-type(7) .keyvalueeditor-key")
seventh_key.clear()
seventh_key.send_keys("url")
seventh_val = self.browser.find_element_by_css_selector("#environment-keyvaleditor .keyvalueeditor-row:nth-of-type(7) .keyvalueeditor-value")
seventh_val.clear()
seventh_val.send_keys("http://photos.example.net/photos")
eigth_key = self.browser.find_element_by_css_selector("#environment-keyvaleditor .keyvalueeditor-row:nth-of-type(8) .keyvalueeditor-key")
eigth_key.clear()
eigth_key.send_keys("file")
eigth_val = self.browser.find_element_by_css_selector("#environment-keyvaleditor .keyvalueeditor-row:nth-of-type(8) .keyvalueeditor-value")
eigth_val.clear()
eigth_val.send_keys("vacation.jpg")
ninth_key = self.browser.find_element_by_css_selector("#environment-keyvaleditor .keyvalueeditor-row:nth-of-type(9) .keyvalueeditor-key")
ninth_key.clear()
ninth_key.send_keys("size")
ninth_val = self.browser.find_element_by_css_selector("#environment-keyvaleditor .keyvalueeditor-row:nth-of-type(9) .keyvalueeditor-value")
ninth_val.clear()
ninth_val.send_keys("original")
submit_button = self.browser.find_element_by_css_selector("#modal-environments .environments-actions-add-submit")
submit_button.click()
time.sleep(0.3)
close_button = self.browser.find_element_by_css_selector("#modal-environments .modal-header .close")
close_button.click()
time.sleep(1)
environment_selector = self.browser.find_element_by_id("environment-selector")
environment_selector.click()
# Select the environment
manage_env_link = self.browser.find_element_by_css_selector("#environment-selector .dropdown-menu li:nth-of-type(1) a")
manage_env_link.click()
return True
def test_9_get_environment(self):
self.reset_request()
self.set_url_field(self.browser, "http://localhost:5000/{{path_get}}")
send_button = self.browser.find_element_by_id("submit-request")
send_button.click()
code_data_value = self.get_codemirror_value(self.browser)
if code_data_value.find("get?start=something") > 0:
return True
else:
return False
def test_10_post_formdata_environment(self):
self.reset_request()
self.set_url_field(self.browser, "http://localhost:5000/{{path_post}}")
method_select = self.browser.find_element_by_id("request-method-selector")
Select(method_select).select_by_value("POST")
first_formdata_key = self.browser.find_element_by_css_selector("#formdata-keyvaleditor .keyvalueeditor-row:nth-of-type(1) .keyvalueeditor-key")
first_formdata_key.clear()
first_formdata_key.send_keys("size")
first_formdata_value = self.browser.find_element_by_css_selector("#formdata-keyvaleditor .keyvalueeditor-row:nth-of-type(1) .keyvalueeditor-value")
first_formdata_value.clear()
first_formdata_value.send_keys("{{size}}")
second_formdata_key = self.browser.find_element_by_css_selector("#formdata-keyvaleditor .keyvalueeditor-row:nth-of-type(2) .keyvalueeditor-key")
second_formdata_key.clear()
second_formdata_key.send_keys("file")
second_formdata_value = self.browser.find_element_by_css_selector("#formdata-keyvaleditor .keyvalueeditor-row:nth-of-type(2) .keyvalueeditor-value")
second_formdata_value.clear()
second_formdata_value.send_keys("{{file}}")
send_button = self.browser.find_element_by_id("submit-request")
send_button.click()
code_data_value = self.get_codemirror_value(self.browser)
if code_data_value.find("original") > 0:
return True
else:
return False
def test_11_post_urlencoded_environment(self):
self.reset_request()
self.set_url_field(self.browser, "http://localhost:5000/{{path_post}}")
method_select = self.browser.find_element_by_id("request-method-selector")
Select(method_select).select_by_value("POST")
# Select urlencoded
self.browser.find_element_by_css_selector("#data-mode-selector a:nth-of-type(2)").click()
first_formdata_key = self.browser.find_element_by_css_selector("#urlencoded-keyvaleditor .keyvalueeditor-row:nth-of-type(1) .keyvalueeditor-key")
first_formdata_key.clear()
first_formdata_key.send_keys("size")
first_formdata_value = self.browser.find_element_by_css_selector("#urlencoded-keyvaleditor .keyvalueeditor-row:nth-of-type(1) .keyvalueeditor-value")
first_formdata_value.clear()
first_formdata_value.send_keys("{{size}}")
second_formdata_key = self.browser.find_element_by_css_selector("#urlencoded-keyvaleditor .keyvalueeditor-row:nth-of-type(2) .keyvalueeditor-key")
second_formdata_key.clear()
second_formdata_key.send_keys("file")
second_formdata_value = self.browser.find_element_by_css_selector("#urlencoded-keyvaleditor .keyvalueeditor-row:nth-of-type(2) .keyvalueeditor-value")
second_formdata_value.clear()
second_formdata_value.send_keys("{{file}}")
send_button = self.browser.find_element_by_id("submit-request")
send_button.click()
code_data_value = self.get_codemirror_value(self.browser)
if code_data_value.find("original") > 0:
return True
else:
return False
def test_12_post_raw_environment(self):
self.reset_request()
self.set_url_field(self.browser, "http://localhost:5000/{{path_post}}")
method_select = self.browser.find_element_by_id("request-method-selector")
Select(method_select).select_by_value("POST")
# Select urlencoded
self.browser.find_element_by_css_selector("#data-mode-selector a:nth-of-type(3)").click()
self.set_code_mirror_raw_value("{{Foo}}={{Name}}")
send_button = self.browser.find_element_by_id("submit-request")
send_button.click()
code_data_value = self.get_codemirror_value(self.browser)
if code_data_value.find("John Appleseed") > 0:
return True
else:
return False
def test_13_post_raw_json_environment(self):
self.reset_request()
self.set_url_field(self.browser, "http://localhost:5000/{{path_post}}")
method_select = self.browser.find_element_by_id("request-method-selector")
Select(method_select).select_by_value("POST")
self.browser.find_element_by_css_selector("#data-mode-selector a:nth-of-type(3)").click()
self.set_code_mirror_raw_value("{\"{{Foo}}\":\"{{Name}}\"")
send_button = self.browser.find_element_by_id("submit-request")
send_button.click()
code_data_value = self.get_codemirror_value(self.browser)
if code_data_value.find("John Appleseed") > 0:
return True
else:
return False
# https://github.com/a85/POSTMan-Chrome-Extension/issues/174
def test_14_url_with_semicolon(self):
self.reset_request()
self.set_url_field(self.browser, "http://localhost:5000/get?some=start;val")
send_button = self.browser.find_element_by_id("submit-request")
send_button.click()
code_data_value = self.get_codemirror_value(self.browser)
if code_data_value.find("/get?some=start;val") > 0:
return True
else:
return False
# https://github.com/a85/POSTMan-Chrome-Extension/issues/165
def test_15_odata_url(self):
self.reset_request()
self.set_url_field(self.browser, "http://localhost:5000/Resource(code1='1',code2='1')")
send_button = self.browser.find_element_by_id("submit-request")
send_button.click()
code_data_value = self.get_codemirror_value(self.browser)
if code_data_value.find("Not Found") > 0:
first_history_item = self.browser.find_element_by_css_selector("#history-items li:nth-of-type(1) .request .request-name")
value = self.browser.execute_script("return arguments[0].innerHTML", first_history_item)
if value.find("http://localhost:5000/Resource(code1='1'<br>,code2='1')") > 0:
return True
else:
return False
else:
return False
def test_16_with_no_cache(self):
self.reset_request()
self.set_url_field(self.browser, "http://localhost:5000/get")
settings_button = self.browser.find_element_by_css_selector(".preferences a:nth-of-type(2)")
settings_button.click()
time.sleep(1)
no_cache_select = self.browser.find_element_by_id("send-no-cache-header")
Select(no_cache_select).select_by_value("true")
close_button = self.browser.find_element_by_css_selector("#modal-settings .modal-header .close")
close_button.click()
time.sleep(1)
self.set_url_field(self.browser, "http://localhost:5000/get")
send_button = self.browser.find_element_by_id("submit-request")
send_button.click()
code_data_value = self.get_codemirror_value(self.browser)
if code_data_value.find("no-cache") > 0:
return True
else:
return False
def test_17_without_no_cache(self):
self.reset_request()
self.set_url_field(self.browser, "http://localhost:5000/get")
settings_button = self.browser.find_element_by_css_selector(".preferences a:nth-of-type(2)")
settings_button.click()
time.sleep(1)
no_cache_select = self.browser.find_element_by_id("send-no-cache-header")
Select(no_cache_select).select_by_value("false")
close_button = self.browser.find_element_by_css_selector("#modal-settings .modal-header .close")
close_button.click()
time.sleep(1)
self.set_url_field(self.browser, "http://localhost:5000/get")
send_button = self.browser.find_element_by_id("submit-request")
send_button.click()
code_data_value = self.get_codemirror_value(self.browser)
if code_data_value.find("no-cache") < 0:
return True
else:
return False
def test_18_raw_json_type(self):
self.reset_request()
self.set_url_field(self.browser, "http://localhost:5000/post")
self.browser.find_element_by_id("headers-keyvaleditor-actions-open").click()
time.sleep(0.1)
first_key = self.browser.find_element_by_css_selector("#headers-keyvaleditor .keyvalueeditor-row:first-child .keyvalueeditor-key")
first_key.clear()
first_key.send_keys("Content-Type")
first_val = self.browser.find_element_by_css_selector("#headers-keyvaleditor .keyvalueeditor-row:first-child .keyvalueeditor-value")
first_val.clear()
first_val.send_keys("text/json")
method_select = self.browser.find_element_by_id("request-method-selector")
Select(method_select).select_by_value("POST")
self.browser.find_element_by_css_selector("#data-mode-selector a:nth-of-type(3)").click()
self.set_code_mirror_raw_value("{\"{{Foo}}\":\"{{Name}}\"")
send_button = self.browser.find_element_by_id("submit-request")
send_button.click()
code_data_value = self.get_codemirror_value(self.browser)
if code_data_value.find("text/json") > 0:
self.reset_request();
first_history_item = self.browser.find_element_by_css_selector("#history-items li:nth-of-type(1) .request")
first_history_item.click()
try:
w = WebDriverWait(self.browser, 10)
w.until(lambda browser: self.browser.find_element_by_id("url").get_attribute("value") == "http://localhost:5000/post")
selected_mode_element = self.browser.find_element_by_id("body-editor-mode-item-selected")
selected_mode_element_value = self.browser.execute_script("return arguments[0].innerHTML", selected_mode_element)
if selected_mode_element_value.find("JSON") == 0:
return True
else:
return False
except:
return False
else:
return False
def test_19_raw_xml_type(self):
self.reset_request()
self.set_url_field(self.browser, "http://localhost:5000/post")
self.browser.find_element_by_id("headers-keyvaleditor-actions-open").click()
time.sleep(0.1)
self.browser.find_element_by_id("headers-keyvaleditor-actions-open").click()
time.sleep(0.1)
first_key = self.browser.find_element_by_css_selector("#headers-keyvaleditor .keyvalueeditor-row:first-child .keyvalueeditor-key")
first_key.clear()
first_key.send_keys("Content-Type")
first_val = self.browser.find_element_by_css_selector("#headers-keyvaleditor .keyvalueeditor-row:first-child .keyvalueeditor-value")
first_val.clear()
first_val.send_keys("text/xml")
method_select = self.browser.find_element_by_id("request-method-selector")
Select(method_select).select_by_value("POST")
self.browser.find_element_by_css_selector("#data-mode-selector a:nth-of-type(3)").click()
self.set_code_mirror_raw_value("{\"{{Foo}}\":\"{{Name}}\"")
send_button = self.browser.find_element_by_id("submit-request")
send_button.click()
code_data_value = self.get_codemirror_value(self.browser)
if code_data_value.find("text/xml") > 0:
self.reset_request();
first_history_item = self.browser.find_element_by_css_selector("#history-items li:nth-of-type(1) .request")
first_history_item.click()
try:
w = WebDriverWait(self.browser, 10)
w.until(lambda browser: self.browser.find_element_by_id("url").get_attribute("value") == "http://localhost:5000/post")
selected_mode_element = self.browser.find_element_by_id("body-editor-mode-item-selected")
selected_mode_element_value = self.browser.execute_script("return arguments[0].innerHTML", selected_mode_element)
if selected_mode_element_value.find("XML") == 0:
return True
else:
return False
except:
return False
else:
return False
def na_test_20_raw_large_request(self):
self.reset_request()
self.set_url_field(self.browser, "http://localhost:5000/post")
method_select = self.browser.find_element_by_id("request-method-selector")
Select(method_select).select_by_value("POST")
self.browser.find_element_by_css_selector("#data-mode-selector a:nth-of-type(3)").click()
try:
raw_json = open("large_json.json").read()
self.set_code_mirror_raw_value(raw_json)
send_button = self.browser.find_element_by_id("submit-request")
send_button.click()
code_data_value = self.get_codemirror_value(self.browser)
if code_data_value.find("images/user_1.png") > 0:
return True
else:
return False
except:
print traceback.format_exc()
return False
PostmanTestsRequests().run()
| apache-2.0 |
h3llrais3r/Auto-Subliminal | lib/pushbullet/listener.py | 3 | 2841 | __author__ = 'Igor Maculan <[email protected]>'
import logging
import time
import json
from threading import Thread
import requests
import websocket
log = logging.getLogger('pushbullet.Listener')
WEBSOCKET_URL = 'wss://stream.pushbullet.com/websocket/'
class Listener(Thread, websocket.WebSocketApp):
def __init__(self, account,
on_push=None,
on_error=None,
http_proxy_host=None,
http_proxy_port=None):
"""
:param api_key: pushbullet Key
:param on_push: function that get's called on all pushes
:param http_proxy_host: host proxy (ie localhost)
:param http_proxy_port: host port (ie 3128)
"""
self._account = account
self._api_key = self._account.api_key
self.on_error = on_error
Thread.__init__(self)
websocket.WebSocketApp.__init__(self, WEBSOCKET_URL + self._api_key,
on_open=self.on_open,
on_error=self.on_error,
on_message=self.on_message,
on_close=self.on_close)
self.connected = False
self.last_update = time.time()
self.on_push = on_push
# History
self.history = None
self.clean_history()
# proxy configuration
self.http_proxy_host = http_proxy_host
self.http_proxy_port = http_proxy_port
self.proxies = None
if http_proxy_port is not None and http_proxy_port is not None:
self.proxies = {
"http": "http://" + http_proxy_host + ":" + str(http_proxy_port),
"https": "http://" + http_proxy_host + ":" + str(http_proxy_port),
}
def clean_history(self):
self.history = []
def on_open(self, ws):
self.connected = True
self.last_update = time.time()
def on_close(self, ws):
log.debug('Listener closed')
self.connected = False
def on_message(self, ws, message):
log.debug('Message received:' + message)
try:
json_message = json.loads(message)
if json_message["type"] != "nop":
self.on_push(json_message)
except Exception as e:
logging.exception(e)
def run_forever(self, sockopt=None, sslopt=None, ping_interval=0, ping_timeout=None):
websocket.WebSocketApp.run_forever(self, sockopt=sockopt, sslopt=sslopt, ping_interval=ping_interval,
ping_timeout=ping_timeout,
http_proxy_host=self.http_proxy_host,
http_proxy_port=self.http_proxy_port)
def run(self):
self.run_forever()
| gpl-3.0 |
jeffersonvenancio/BarzingaNow | python/user/controller.py | 1 | 4306 | import os
import json
import cloudstorage as gcs
import datetime
from flask import Blueprint, request, session
from google.appengine.api import search
from google.appengine.api import app_identity
from credit.model import Credit
from transaction.model import Transaction
from user.model import User
user = Blueprint('user', __name__)
@user.route('/', methods=['GET'])
def get_all():
users = [u.to_dict() for u in User.query().fetch()]
return json.dumps(users)
@user.route('/<int:user_id>', methods=['GET'])
def get_by_id(user_id):
user = User.get_by_id(user_id).to_dict()
return json.dumps(user)
@user.route('/email/<string:email>', methods=['GET'])
def get_by_email(email):
email = email.split('@')[0] + '@dextra-sw.com'
userClient = User.query().filter(User.email == email).get()
return json.dumps(userClient.to_dict())
@user.route('/logged', methods=['GET'])
def get_logged():
user_json = session['barzinga_user']
user = User.query().filter(User.email == user_json["email"]).get()
return json.dumps(user.to_dict())
@user.route('/filter', methods=['POST'])
def filter():
name = request.form['name']
index = search.Index(name='user')
users = [[f.__dict__ for f in user.fields] for user in index.search(name)]
user_list = []
for u in users:
user_list.append({
'name': u[0]['_value'],
'email': u[1]['_value']
})
return json.dumps(user_list)
@user.route('/', methods=['POST'], strict_slashes=False)
def add():
name = request.form['name']
email = request.form['email']
rfid = request.form['rfid']
user = User(name=name, email=email, admin=False, photo_url='', money=0, rfid=rfid, active = True)
user.put()
user_document = search.Document(
fields=[
search.TextField(name='name', value=user.name),
search.TextField(name='email', value=user.email)
])
search.Index(name='user').put(user_document)
return '', 204
@user.route('/pin', methods=['POST'], strict_slashes=False)
def put_pin():
user_json = session['barzinga_user']
user = User.query().filter(User.email == user_json["email"]).get()
pin = request.form['pin']
if user:
user.pin = pin
user.put()
return '', 204
@user.route('/rfid', methods=['PUT'], strict_slashes=False)
def put_rfid():
user = User.query().filter(User.email == request.form['email']).get()
rfid = request.form['rfid']
name = request.form['name']
if user:
user.rfid = rfid
user.name = name
user.put()
return '', 204
return '', 404
@user.route('/rfid/<string:rfid>', methods=['GET'], strict_slashes=False)
def get_by_rfid(rfid):
user = User.query().filter(User.rfid == rfid).get()
if user:
user_json = {
'name' : user.name,
'email' : user.email,
'money' : user.money,
'photo_url' : user.photo_url,
'id' : user.key.id()
}
return json.dumps(user_json)
return '', 404
@user.route('/deactivate', methods=['PUT'], strict_slashes=False)
def deactivate():
user = User.query().filter(User.email == request.form['email']).get()
if user:
user.active = False
user.put()
return '', 204
return '', 404
# @user.route('/cron/gerarodejulho', methods=['GET'], strict_slashes=False)
# def gerarodejulho():
# users = User.query().fetch()
#
# from_date = datetime.datetime(year=2019, month=8, day=1)
# to_date = datetime.datetime(year=2019, month=8, day=6)
#
# transactions = Transaction.query().filter(Transaction.date <= to_date, Transaction.date >= from_date).fetch()
# credits = Credit.query().filter(Credit.date <= to_date, Credit.date >= from_date).fetch()
#
# usersCSV = 'email;valor \n'
#
# for u in users:
# saldo = u.money
#
# for t in transactions:
# if t.user.get().email == u.email:
# saldo+=t.value
#
# for c in credits:
# if c.user_email == u.email:
# saldo-=c.value
#
# usersCSV += str(u.email)+';'+str("%.2f" % round(saldo,2))+' \n'
#
# make_blob_public(usersCSV, 'monthly/', 'credit_balance_01_08_2019')
#
# return json.dumps(usersCSV)
| apache-2.0 |
bjzhang/xen_arm_pv | tools/python/xen/xm/main.py | 3 | 37535 | # (C) Copyright IBM Corp. 2005
# Copyright (C) 2004 Mike Wray
# Copyright (c) 2005-2006 XenSource Ltd.
#
# Authors:
# Sean Dague <sean at dague dot net>
# Mike Wray <mike dot wray at hp dot com>
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of version 2.1 of the GNU Lesser General Public
# License as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#============================================================================
# Some codes in this file was added
# for supporting ARM processor by Samsung Electronics.
#============================================================================
"""Grand unified management application for Xen.
"""
import os
import os.path
import sys
import re
import getopt
import socket
import warnings
warnings.filterwarnings('ignore', category=FutureWarning)
import xmlrpclib
import xen.xend.XendProtocol
from xen.xend import PrettyPrint
from xen.xend import sxp
from xen.xm.opts import *
import console
import xen.xend.XendClient
from xen.xend.XendClient import server
# getopt.gnu_getopt is better, but only exists in Python 2.3+. Use
# getopt.getopt if gnu_getopt is not available. This will mean that options
# may only be specified before positional arguments.
if not hasattr(getopt, 'gnu_getopt'):
getopt.gnu_getopt = getopt.getopt
# Strings for shorthelp
console_help = "console <DomId> Attach to domain DomId's console."
create_help = """create [-c] <ConfigFile>
[Name=Value].. Create a domain based on Config File"""
destroy_help = "destroy <DomId> Terminate a domain immediately"
help_help = "help Display this message"
list_help = "list [--long] [DomId, ...] List information about domains"
mem_max_help = "mem-max <DomId> <Mem> Set maximum memory reservation for a domain"
mem_set_help = "mem-set <DomId> <Mem> Adjust the current memory usage for a domain"
migrate_help = "migrate <DomId> <Host> Migrate a domain to another machine"
pause_help = "pause <DomId> Pause execution of a domain"
reboot_help = "reboot <DomId> [-w][-a] Reboot a domain"
restore_help = "restore <File> Create a domain from a saved state file"
save_help = "save <DomId> <File> Save domain state (and config) to file"
shutdown_help ="shutdown <DomId> [-w][-a][-R|-H] Shutdown a domain"
top_help = "top Monitor system and domains in real-time"
unpause_help = "unpause <DomId> Unpause a paused domain"
help_spacer = """
"""
# Strings for longhelp
sysrq_help = "sysrq <DomId> <letter> Send a sysrq to a domain"
domid_help = "domid <DomName> Converts a domain name to a domain id"
domname_help = "domname <DomId> Convert a domain id to a domain name"
vcpu_set_help = """vcpu-set <DomId> <VCPUs> Set the number of VCPUs for a domain"""
vcpu_list_help = "vcpu-list <DomId> List the VCPUs for a domain (or all domains)"
vcpu_pin_help = "vcpu-pin <DomId> <VCPU> <CPUs> Set which cpus a VCPU can use"
dmesg_help = "dmesg [-c|--clear] Read or clear Xen's message buffer"
info_help = "info Get information about the xen host"
rename_help = "rename <DomId> <New Name> Rename a domain"
log_help = "log Print the xend log"
sched_bvt_help = """sched-bvt <Parameters> Set Borrowed Virtual Time scheduler
parameters"""
sched_bvt_ctxallow_help = """sched-bvt-ctxallow <Allow> Set the BVT scheduler context switch
allowance"""
sched_sedf_help = "sched-sedf [DOM] [OPTIONS] Show|Set simple EDF parameters\n" + \
" -p, --period Relative deadline(ms).\n\
-s, --slice Worst-case execution time(ms)\n\
(slice < period).\n\
-l, --latency scaled period(ms) in case the domain\n\
is doing heavy I/O.\n\
-e, --extra flag (0/1) which controls whether the\n\
domain can run in extra-time\n\
-w, --weight mutually exclusive with period/slice and\n\
specifies another way of setting a domain's\n\
cpu period/slice."
block_attach_help = """block-attach <DomId> <BackDev> <FrontDev> <Mode>
[BackDomId] Create a new virtual block device"""
block_detach_help = """block-detach <DomId> <DevId> Destroy a domain's virtual block device,
where <DevId> may either be the device ID
or the device name as mounted in the guest"""
block_list_help = "block-list <DomId> [--long] List virtual block devices for a domain"
network_attach_help = """network-attach <DomID> [script=<script>] [ip=<ip>] [mac=<mac>]
[bridge=<bridge>] [backend=<backDomID>]
Create a new virtual network device """
network_detach_help = """network-detach <DomId> <DevId> Destroy a domain's virtual network
device, where <DevId> is the device ID."""
network_list_help = "network-list <DomId> [--long] List virtual network interfaces for a domain"
vkpp_list_help = "vkpp-list <DomId> [--long] list virtual keypad devices"
vkpp_attach_help = """vkpp-attach <DomId> <BackDev> <FrontDev> <Mode> [BackDomId]
Create a new virtual keypad device"""
vfb_attach_help = """vfb-attach <DomId> [BackDomId]
Create a new virtual frame buffer device"""
vmtd_attach_help = """vmtd-attach <DomId> <BackDev> <FrontDev> <Mode>
[BackDomId] Create a new virtual vmtd device"""
vmtd_detach_help = """vmtd-detach <DomId> <DevId> Destroy a domain's virtual vmtd device,
where <DevId> may either be the device ID
or the device name as mounted in the guest"""
vmtd_list_help = "vmtd-list <DomId> [--long] List virtual vmtd devices for a domain"
vnet_list_help = "vnet-list [-l|--long] list vnets"
vnet_create_help = "vnet-create <config> create a vnet from a config file"
vnet_delete_help = "vnet-delete <vnetid> delete a vnet"
vtpm_list_help = "vtpm-list <DomId> [--long] list virtual TPM devices"
vs_attach_help = """vs-attach <DomId> [BackDomId] Create a new virtual usb serial device"""
vs_detach_help = """vs-detach <DomId> <DevId> Destroy a domain's virtual usb serial device. DevId=977"""
vbfs_attach_help = """vbfs-attach <DomId> <luns="<LunDomID>:0/1:<FilePath>;[<LunDomID>:0/1:<FilePath>];...> [BackDomId]
Create a new virtual usb backed file storage device,
where <LunDomId> is <DomId> or <BackDomId>. Max 8 luns. 0 - RW, 1 - RO"""
vbfs_detach_help = """vbfs-detach <DomId> <DevId> Destroy a domain's virtual usb backed file storage device. DevID=1"""
short_command_list = [
"console",
"create",
"destroy",
"help",
"list",
"mem-set",
"migrate",
"pause",
"reboot",
"restore",
"save",
"shutdown",
"top",
"unpause",
"vcpu-set",
]
domain_commands = [
"console",
"create",
"destroy",
"domid",
"domname",
"list",
"mem-max",
"mem-set",
"migrate",
"pause",
"reboot",
"rename",
"restore",
"save",
"shutdown",
"sysrq",
"top",
"unpause",
"vcpu-list",
"vcpu-pin",
"vcpu-set",
]
host_commands = [
"dmesg",
"info",
"log"
]
scheduler_commands = [
"sched-bvt",
"sched-bvt-ctxallow",
"sched-sedf",
]
device_commands = [
"block-attach",
"block-detach",
"block-list",
"network-attach",
"network-detach",
"network-list",
"vkpp-attach",
"vfb-attach",
"vmtd-attach",
"vmtd-detach",
"vmtd-list",
"vtpm-list",
"vs-attach",
"vs-detach",
"vbfs-attach",
"vbfs-detach",
]
vnet_commands = [
"vnet-list",
"vnet-create",
"vnet-delete",
]
all_commands = (domain_commands + host_commands + scheduler_commands +
device_commands + vnet_commands)
def commandToHelp(cmd):
return eval(cmd.replace("-", "_") + "_help")
shorthelp = """Usage: xm <subcommand> [args]
Control, list, and manipulate Xen guest instances
xm common subcommands:
""" + help_spacer.join(map(commandToHelp, short_command_list)) + """
<DomName> can be substituted for <DomId> in xm subcommands.
For a complete list of subcommands run 'xm help --long'
For more help on xm see the xm(1) man page
For more help on xm create, see the xmdomain.cfg(5) man page"""
longhelp = """Usage: xm <subcommand> [args]
Control, list, and manipulate Xen guest instances
xm full list of subcommands:
Domain Commands:
""" + help_spacer.join(map(commandToHelp, domain_commands)) + """
Xen Host Commands:
""" + help_spacer.join(map(commandToHelp, host_commands)) + """
Scheduler Commands:
""" + help_spacer.join(map(commandToHelp, scheduler_commands)) + """
Virtual Device Commands:
""" + help_spacer.join(map(commandToHelp, device_commands)) + """
Vnet commands:
""" + help_spacer.join(map(commandToHelp, vnet_commands)) + """
<DomName> can be substituted for <DomId> in xm subcommands.
For a short list of subcommands run 'xm help'
For more help on xm see the xm(1) man page
For more help on xm create, see the xmdomain.cfg(5) man page"""
# array for xm help <command>
help = {
"--long": longhelp
}
for command in all_commands:
# create is handled specially
if (command != 'create'):
help[command] = commandToHelp(command)
####################################################################
#
# Utility functions
#
####################################################################
def arg_check(args, name, lo, hi = -1):
n = len(args)
if hi == -1:
if n != lo:
err("'xm %s' requires %d argument%s.\n" % (name, lo,
lo > 1 and 's' or ''))
usage(name)
else:
if n < lo or n > hi:
err("'xm %s' requires between %d and %d arguments.\n" %
(name, lo, hi))
usage(name)
def unit(c):
if not c.isalpha():
return 0
base = 1
if c == 'G' or c == 'g': base = 1024 * 1024 * 1024
elif c == 'M' or c == 'm': base = 1024 * 1024
elif c == 'K' or c == 'k': base = 1024
else:
print 'ignoring unknown unit'
return base
def int_unit(str, dest):
base = unit(str[-1])
if not base:
return int(str)
value = int(str[:-1])
dst_base = unit(dest)
if dst_base == 0:
dst_base = 1
if dst_base > base:
return value / (dst_base / base)
else:
return value * (base / dst_base)
def err(msg):
print >>sys.stderr, "Error:", msg
#########################################################################
#
# Main xm functions
#
#########################################################################
def xm_save(args):
arg_check(args, "save", 2)
dom = args[0] # TODO: should check if this exists
savefile = os.path.abspath(args[1])
if not os.access(os.path.dirname(savefile), os.W_OK):
err("xm save: Unable to create file %s" % savefile)
sys.exit(1)
server.xend.domain.save(dom, savefile)
def xm_restore(args):
arg_check(args, "restore", 1)
savefile = os.path.abspath(args[0])
if not os.access(savefile, os.R_OK):
err("xm restore: Unable to read file %s" % savefile)
sys.exit(1)
server.xend.domain.restore(savefile)
def getDomains(domain_names):
if domain_names:
return map(server.xend.domain, domain_names)
else:
return server.xend.domains(1)
def xm_list(args):
use_long = 0
show_vcpus = 0
try:
(options, params) = getopt.gnu_getopt(args, 'lv', ['long','vcpus'])
except getopt.GetoptError, opterr:
err(opterr)
sys.exit(1)
for (k, v) in options:
if k in ['-l', '--long']:
use_long = 1
if k in ['-v', '--vcpus']:
show_vcpus = 1
if show_vcpus:
print >>sys.stderr, (
"xm list -v is deprecated. Please use xm vcpu-list.")
xm_vcpu_list(params)
return
doms = getDomains(params)
if use_long:
map(PrettyPrint.prettyprint, doms)
else:
xm_brief_list(doms)
def parse_doms_info(info):
def get_info(n, t, d):
return t(sxp.child_value(info, n, d))
return {
'dom' : get_info('domid', int, -1),
'name' : get_info('name', str, '??'),
'mem' : get_info('memory', int, 0),
'vcpus' : get_info('online_vcpus', int, 0),
'state' : get_info('state', str, '??'),
'cpu_time' : get_info('cpu_time', float, 0),
'ssidref' : get_info('ssidref', int, 0),
}
def parse_sedf_info(info):
def get_info(n, t, d):
return t(sxp.child_value(info, n, d))
return {
'dom' : get_info('domain', int, -1),
'period' : get_info('period', int, -1),
'slice' : get_info('slice', int, -1),
'latency' : get_info('latency', int, -1),
'extratime': get_info('extratime', int, -1),
'weight' : get_info('weight', int, -1),
}
def xm_brief_list(doms):
print 'Name ID Mem(MiB) VCPUs State Time(s)'
for dom in doms:
d = parse_doms_info(dom)
if (d['ssidref'] != 0):
d['ssidstr'] = (" s:%04x/p:%04x" %
((d['ssidref'] >> 16) & 0xffff,
d['ssidref'] & 0xffff))
else:
d['ssidstr'] = ""
print ("%(name)-32s %(dom)3d %(mem)8d %(vcpus)5d %(state)5s %(cpu_time)7.1f%(ssidstr)s" % d)
def xm_vcpu_list(args):
if args:
dominfo = map(server.xend.domain.getVCPUInfo, args)
else:
doms = server.xend.domains(False)
dominfo = map(server.xend.domain.getVCPUInfo, doms)
print 'Name ID VCPU CPU State Time(s) CPU Affinity'
for dom in dominfo:
def get_info(n):
return sxp.child_value(dom, n)
#
# convert a list of integers into a list of pairs indicating
# continuous sequences in the list:
#
# [0,1,2,3] -> [(0,3)]
# [1,2,4,5] -> [(1,2),(4,5)]
# [0] -> [(0,0)]
# [0,1,4,6,7] -> [(0,1),(4,4),(6,7)]
#
def list_to_rangepairs(cmap):
cmap.sort()
pairs = []
x = y = 0
for i in range(0,len(cmap)):
try:
if ((cmap[y+1] - cmap[i]) > 1):
pairs.append((cmap[x],cmap[y]))
x = y = i+1
else:
y = y + 1
# if we go off the end, then just add x to y
except IndexError:
pairs.append((cmap[x],cmap[y]))
return pairs
#
# Convert pairs to range string, e.g: [(1,2),(3,3),(5,7)] -> 1-2,3,5-7
#
def format_pairs(pairs):
if not pairs:
return "no cpus"
out = ""
for f,s in pairs:
if (f==s):
out += '%d'%f
else:
out += '%d-%d'%(f,s)
out += ','
# trim trailing ','
return out[:-1]
def format_cpumap(cpumap):
cpumap = map(lambda x: int(x), cpumap)
cpumap.sort()
for x in server.xend.node.info()[1:]:
if len(x) > 1 and x[0] == 'nr_cpus':
nr_cpus = int(x[1])
# normalize cpumap by modulus nr_cpus, and drop duplicates
cpumap = dict.fromkeys(
map(lambda x: x % nr_cpus, cpumap)).keys()
if len(cpumap) == nr_cpus:
return "any cpu"
break
return format_pairs(list_to_rangepairs(cpumap))
name = get_info('name')
domid = int(get_info('domid'))
for vcpu in sxp.children(dom, 'vcpu'):
def vinfo(n, t):
return t(sxp.child_value(vcpu, n))
number = vinfo('number', int)
cpu = vinfo('cpu', int)
cpumap = format_cpumap(vinfo('cpumap', list))
online = vinfo('online', int)
cpu_time = vinfo('cpu_time', float)
running = vinfo('running', int)
blocked = vinfo('blocked', int)
if online:
c = str(cpu)
if running:
s = 'r'
else:
s = '-'
if blocked:
s += 'b'
else:
s += '-'
s += '-'
else:
c = "-"
s = "--p"
print (
"%(name)-32s %(domid)3d %(number)4d %(c)3s %(s)-3s %(cpu_time)7.1f %(cpumap)s" %
locals())
def xm_reboot(args):
arg_check(args, "reboot", 1, 4)
from xen.xm import shutdown
shutdown.main(["shutdown", "-R"] + args)
def xm_pause(args):
arg_check(args, "pause", 1)
dom = args[0]
server.xend.domain.pause(dom)
def xm_unpause(args):
arg_check(args, "unpause", 1)
dom = args[0]
server.xend.domain.unpause(dom)
def xm_rename(args):
arg_check(args, "rename", 2)
server.xend.domain.setName(args[0], args[1])
def xm_subcommand(command, args):
cmd = __import__(command, globals(), locals(), 'xen.xm')
cmd.main([command] + args)
#############################################################
def cpu_make_map(cpulist):
cpus = []
for c in cpulist.split(','):
if c.find('-') != -1:
(x,y) = c.split('-')
for i in range(int(x),int(y)+1):
cpus.append(int(i))
else:
cpus.append(int(c))
cpus.sort()
return cpus
def xm_vcpu_pin(args):
arg_check(args, "vcpu-pin", 3)
dom = args[0]
vcpu = int(args[1])
cpumap = cpu_make_map(args[2])
server.xend.domain.pincpu(dom, vcpu, cpumap)
def xm_mem_max(args):
arg_check(args, "mem-max", 2)
dom = args[0]
mem = int_unit(args[1], 'm')
server.xend.domain.maxmem_set(dom, mem)
def xm_mem_set(args):
arg_check(args, "mem-set", 2)
dom = args[0]
mem_target = int_unit(args[1], 'm')
server.xend.domain.setMemoryTarget(dom, mem_target)
def xm_vcpu_set(args):
arg_check(args, "vcpu-set", 2)
server.xend.domain.setVCpuCount(args[0], int(args[1]))
def xm_destroy(args):
arg_check(args, "destroy", 1)
server.xend.domain.destroy(args[0])
def xm_domid(args):
arg_check(args, "domid", 1)
name = args[0]
dom = server.xend.domain(name)
print sxp.child_value(dom, 'domid')
def xm_domname(args):
arg_check(args, "domname", 1)
name = args[0]
dom = server.xend.domain(name)
print sxp.child_value(dom, 'name')
def xm_sched_bvt(args):
arg_check(args, "sched-bvt", 6)
dom = args[0]
v = map(long, args[1:6])
server.xend.domain.cpu_bvt_set(dom, *v)
def xm_sched_bvt_ctxallow(args):
arg_check(args, "sched-bvt-ctxallow", 1)
slice = int(args[0])
server.xend.node.cpu_bvt_slice_set(slice)
def xm_sched_sedf(args):
def ns_to_ms(val):
return float(val) * 0.000001
def ms_to_ns(val):
return (float(val) / 0.000001)
def print_sedf(info):
info['period'] = ns_to_ms(info['period'])
info['slice'] = ns_to_ms(info['slice'])
info['latency'] = ns_to_ms(info['latency'])
print( ("%(name)-32s %(dom)3d %(period)9.1f %(slice)9.1f" +
" %(latency)7.1f %(extratime)6d %(weight)6d") % info)
def domid_match(domid, info):
return domid is None or domid == info['name'] or domid == str(info['dom'])
# we want to just display current info if no parameters are passed
if len(args) == 0:
domid = None
else:
# we expect at least a domain id (name or number)
# and at most a domid up to 5 options with values
arg_check(args, "sched-sedf", 1, 11)
domid = args[0]
# drop domid from args since get_opt doesn't recognize it
args = args[1:]
opts = {}
try:
(options, params) = getopt.gnu_getopt(args, 'p:s:l:e:w:',
['period=', 'slice=', 'latency=', 'extratime=', 'weight='])
except getopt.GetoptError, opterr:
err(opterr)
sys.exit(1)
# convert to nanoseconds if needed
for (k, v) in options:
if k in ['-p', '--period']:
opts['period'] = ms_to_ns(v)
elif k in ['-s', '--slice']:
opts['slice'] = ms_to_ns(v)
elif k in ['-l', '--latency']:
opts['latency'] = ms_to_ns(v)
elif k in ['-e', '--extratime']:
opts['extratime'] = v
elif k in ['-w', '--weight']:
opts['weight'] = v
# print header if we aren't setting any parameters
if len(opts.keys()) == 0:
print '%-33s %-2s %-4s %-4s %-7s %-5s %-6s'%('Name','ID','Period(ms)',
'Slice(ms)', 'Lat(ms)',
'Extra','Weight')
doms = filter(lambda x : domid_match(domid, x),
[parse_doms_info(dom) for dom in getDomains("")])
for d in doms:
# fetch current values so as not to clobber them
sedf_info = \
parse_sedf_info(server.xend.domain.cpu_sedf_get(d['dom']))
sedf_info['name'] = d['name']
# update values in case of call to set
if len(opts.keys()) > 0:
for k in opts.keys():
sedf_info[k]=opts[k]
# send the update, converting user input
v = map(int, [sedf_info['period'], sedf_info['slice'],
sedf_info['latency'],sedf_info['extratime'],
sedf_info['weight']])
rv = server.xend.domain.cpu_sedf_set(d['dom'], *v)
if int(rv) != 0:
err("Failed to set sedf parameters (rv=%d)."%(rv))
# not setting values, display info
else:
print_sedf(sedf_info)
def xm_info(args):
arg_check(args, "info", 0)
info = server.xend.node.info()
for x in info[1:]:
if len(x) < 2:
print "%-23s: (none)" % x[0]
else:
print "%-23s:" % x[0], x[1]
def xm_console(args):
arg_check(args, "console", 1)
dom = args[0]
info = server.xend.domain(dom)
domid = int(sxp.child_value(info, 'domid', '-1'))
console.execConsole(domid)
def xm_top(args):
arg_check(args, "top", 0)
os.execvp('xentop', ['xentop'])
def xm_dmesg(args):
arg_check(args, "dmesg", 0)
gopts = Opts(use="""[-c|--clear]
Read Xen's message buffer (boot output, warning and error messages) or clear
its contents if the [-c|--clear] flag is specified.
""")
gopts.opt('clear', short='c',
fn=set_true, default=0,
use="Clear the contents of the Xen message buffer.")
# Work around for gopts
myargs = args
myargs.insert(0, 'dmesg')
gopts.parse(myargs)
if not (1 <= len(myargs) <= 2):
err('Invalid arguments: ' + str(myargs))
if not gopts.vals.clear:
print server.xend.node.dmesg.info()
else:
server.xend.node.dmesg.clear()
def xm_log(args):
arg_check(args, "log", 0)
print server.xend.node.log()
def parse_dev_info(info):
def get_info(n, t, d):
i = 0
while i < len(info):
if (info[i][0] == n):
return t(info[i][1])
i = i + 1
return t(d)
return {
#common
'backend-id' : get_info('backend-id', int, -1),
'handle' : get_info('handle', int, 0),
'state' : get_info('state', int, -1),
'be-path' : get_info('backend', str, '??'),
'event-ch' : get_info('event-channel',int, -1),
#network specific
'virtual-device' : get_info('virtual-device', str, '??'),
'tx-ring-ref': get_info('tx-ring-ref', int, -1),
'rx-ring-ref': get_info('rx-ring-ref', int, -1),
'mac' : get_info('mac', str, '??'),
#block-device specific
'ring-ref' : get_info('ring-ref', int, -1),
}
def has_long_option(args):
use_long = 0
try:
(options, params) = getopt.gnu_getopt(args, 'l', ['long'])
except getopt.GetoptError, opterr:
err(opterr)
sys.exit(1)
for (k, v) in options:
if k in ['-l', '--long']:
use_long = 1
return (use_long, params)
def xm_network_list(args):
arg_check(args, "network-list", 1, 2)
(use_long, params) = has_long_option(args)
if len(params) == 0:
print 'No domain parameter given'
sys.exit(1)
dom = params[0]
if use_long:
devs = server.xend.domain.getDeviceSxprs(dom, 'vif')
map(PrettyPrint.prettyprint, devs)
else:
hdr = 0
for x in server.xend.domain.getDeviceSxprs(dom, 'vif'):
if hdr == 0:
print 'Idx BE MAC Addr. handle state evt-ch tx-/rx-ring-ref BE-path'
hdr = 1
ni = parse_dev_info(x[1])
ni['idx'] = int(x[0])
print ("%(idx)-3d "
"%(backend-id)-3d"
"%(mac)-17s "
"%(handle)-3d "
"%(state)-3d "
"%(event-ch)-3d "
"%(tx-ring-ref)-5d/%(rx-ring-ref)-5d "
"%(be-path)-30s "
% ni)
def xm_block_list(args):
arg_check(args, "block-list", 1, 2)
(use_long, params) = has_long_option(args)
if len(params) == 0:
print 'No domain parameter given'
sys.exit(1)
dom = params[0]
if use_long:
devs = server.xend.domain.getDeviceSxprs(dom, 'vbd')
map(PrettyPrint.prettyprint, devs)
else:
hdr = 0
for x in server.xend.domain.getDeviceSxprs(dom, 'vbd'):
if hdr == 0:
print 'Vdev BE handle state evt-ch ring-ref BE-path'
hdr = 1
ni = parse_dev_info(x[1])
ni['idx'] = int(x[0])
print ("%(idx)-3d "
"%(backend-id)-3d "
"%(handle)-3d "
"%(state)-3d "
"%(event-ch)-3d "
"%(ring-ref)-5d "
"%(be-path)-30s "
% ni)
def xm_vmtd_list(args):
arg_check(args, "vmtd-list", 1, 2)
(use_long, params) = has_long_option(args)
if len(params) == 0:
print 'No domain parameter given'
sys.exit(1)
dom = params[0]
if use_long:
devs = server.xend.domain.getDeviceSxprs(dom, 'vmtd')
map(PrettyPrint.prettyprint, devs)
else:
hdr = 0
for x in server.xend.domain.getDeviceSxprs(dom, 'vmtd'):
if hdr == 0:
print 'Vdev BE handle state evt-ch ring-ref BE-path'
hdr = 1
ni = parse_dev_info(x[1])
ni['idx'] = int(x[0])
print ("%(idx)-3d "
"%(backend-id)-3d "
"%(handle)-3d "
"%(state)-3d "
"%(event-ch)-3d "
"%(ring-ref)-5d "
"%(be-path)-30s "
% ni)
def xm_vtpm_list(args):
arg_check(args, "vtpm-list", 1, 2)
(use_long, params) = has_long_option(args)
if len(params) == 0:
print 'No domain parameter given'
sys.exit(1)
dom = params[0]
if use_long:
devs = server.xend.domain.getDeviceSxprs(dom, 'vtpm')
map(PrettyPrint.prettyprint, devs)
else:
hdr = 0
for x in server.xend.domain.getDeviceSxprs(dom, 'vtpm'):
if hdr == 0:
print 'Idx BE handle state evt-ch ring-ref BE-path'
hdr = 1
ni = parse_dev_info(x[1])
ni['idx'] = int(x[0])
print ("%(idx)-3d "
"%(backend-id)-3d "
"%(handle)-3d "
"%(state)-3d "
"%(event-ch)-3d "
"%(ring-ref)-5d "
"%(be-path)-30s "
% ni)
def xm_block_attach(args):
arg_check(args, 'block-attach', 4, 5)
dom = args[0]
vbd = ['vbd',
['uname', args[1]],
['dev', args[2]],
['mode', args[3]]]
if len(args) == 5:
vbd.append(['backend', args[4]])
server.xend.domain.device_create(dom, vbd)
def xm_network_attach(args):
arg_check(args, 'network-attach', 1, 10000)
dom = args[0]
vif = ['vif']
for a in args[1:]:
vif.append(a.split("="))
server.xend.domain.device_create(dom, vif)
def xm_vkpp_attach(args):
arg_check(args, 'vkpp-attach', 1, 2)
dom = args[0]
vkpp = ['vkpp']
if len(args) == 5:
vkpp.append(['backend', args[1]])
server.xend.domain.device_create(dom, vkpp)
def xm_vfb_attach(args):
arg_check(args, 'vfb-attach', 1, 2)
dom = args[0]
vfb= ['vfb']
if len(args) == 2:
vfb.append(['backend', args[1]])
server.xend.domain.device_create(dom, vfb)
def xm_vmtd_attach(args):
arg_check(args, 'vmtd-attach', 4, 5)
dom = args[0]
vmtd = ['vmtd',
['uname', args[1]],
['dev', args[2]],
['mode', args[3]]]
if len(args) == 5:
vmtd.append(['backend', args[4]])
server.xend.domain.device_create(dom, vmtd)
def xm_vs_attach(args):
arg_check(args, 'vs-attach', 1, 2)
dom = args[0]
vs = ['vs']
if len(args) == 2:
vs.append(['backend', args[1]])
server.xend.domain.device_create(dom, vs)
def xm_vbfs_attach(args):
arg_check(args, 'vbfs-attach', 2, 3)
dom = args[0]
vbfs = ['vbfs']
if len(args) == 3:
vbfs.append(['backend', args[2]])
vbfs.append(args[1].split("="))
server.xend.domain.device_create(dom, vbfs)
def detach(args, command, deviceClass):
arg_check(args, command, 2)
dom = args[0]
dev = args[1]
server.xend.domain.destroyDevice(dom, deviceClass, dev)
def xm_block_detach(args):
detach(args, 'block-detach', 'vbd')
def xm_network_detach(args):
detach(args, 'network-detach', 'vif')
def xm_vmtd_detach(args):
detach(args, 'vmtd-detach', 'vmtd')
def xm_vs_detach(args):
detach(args, 'vs-detach', 'vs')
def xm_vbfs_detach(args):
detach(args, 'vbfs-detach', 'vbfs')
def xm_vnet_list(args):
try:
(options, params) = getopt.gnu_getopt(args, 'l', ['long'])
except getopt.GetoptError, opterr:
err(opterr)
sys.exit(1)
use_long = 0
for (k, v) in options:
if k in ['-l', '--long']:
use_long = 1
if params:
use_long = 1
vnets = params
else:
vnets = server.xend_vnets()
for vnet in vnets:
try:
if use_long:
info = server.xend_vnet(vnet)
PrettyPrint.prettyprint(info)
else:
print vnet
except Exception, ex:
print vnet, ex
def xm_vnet_create(args):
arg_check(args, "vnet-create", 1)
conf = args[0]
if not os.access(conf, os.R_OK):
print "File not found: %s" % conf
sys.exit(1)
server.xend_vnet_create(conf)
def xm_vnet_delete(args):
arg_check(args, "vnet-delete", 1)
vnet = args[0]
server.xend_vnet_delete(vnet)
commands = {
# console commands
"console": xm_console,
# xenstat commands
"top": xm_top,
# domain commands
"destroy": xm_destroy,
"domid": xm_domid,
"domname": xm_domname,
"rename": xm_rename,
"restore": xm_restore,
"save": xm_save,
"reboot": xm_reboot,
"list": xm_list,
# memory commands
"mem-max": xm_mem_max,
"mem-set": xm_mem_set,
# cpu commands
"vcpu-pin": xm_vcpu_pin,
"vcpu-list": xm_vcpu_list,
"vcpu-set": xm_vcpu_set,
# special
"pause": xm_pause,
"unpause": xm_unpause,
# host commands
"dmesg": xm_dmesg,
"info": xm_info,
"log": xm_log,
# scheduler
"sched-bvt": xm_sched_bvt,
"sched-bvt-ctxallow": xm_sched_bvt_ctxallow,
"sched-sedf": xm_sched_sedf,
# block
"block-attach": xm_block_attach,
"block-detach": xm_block_detach,
"block-list": xm_block_list,
# network
"network-attach": xm_network_attach,
"network-detach": xm_network_detach,
"network-list": xm_network_list,
# vkpp
"vkpp-attach": xm_vkpp_attach,
# vfb
"vfb-attach": xm_vfb_attach,
# vmtd
"vmtd-attach": xm_vmtd_attach,
"vmtd-detach": xm_vmtd_detach,
"vmtd-list": xm_vmtd_list,
# vnet
"vnet-list": xm_vnet_list,
"vnet-create": xm_vnet_create,
"vnet-delete": xm_vnet_delete,
# vtpm
"vtpm-list": xm_vtpm_list,
# vs
"vs-attach": xm_vs_attach,
"vs-detach": xm_vs_detach,
# vbfs
"vbfs-attach": xm_vbfs_attach,
"vbfs-detach": xm_vbfs_detach,
}
## The commands supported by a separate argument parser in xend.xm.
subcommands = [
'create',
'migrate',
'sysrq',
'shutdown'
]
for c in subcommands:
commands[c] = eval('lambda args: xm_subcommand("%s", args)' % c)
aliases = {
"balloon": "mem-set",
"set-vcpus": "vcpu-set",
"vif-list": "network-list",
"vbd-create": "block-attach",
"vbd-destroy": "block-detach",
"vbd-list": "block-list",
}
def xm_lookup_cmd(cmd):
if commands.has_key(cmd):
return commands[cmd]
elif aliases.has_key(cmd):
deprecated(cmd,aliases[cmd])
return commands[aliases[cmd]]
else:
if len( cmd ) > 1:
matched_commands = filter( lambda (command, func): command[ 0:len(cmd) ] == cmd, commands.iteritems() )
if len( matched_commands ) == 1:
return matched_commands[0][1]
err('Sub Command %s not found!' % cmd)
usage()
def deprecated(old,new):
print >>sys.stderr, (
"Command %s is deprecated. Please use xm %s instead." % (old, new))
def usage(cmd=None):
if cmd == 'create':
mycmd = xm_lookup_cmd(cmd)
mycmd( ['--help'] )
sys.exit(1)
if help.has_key(cmd):
print " " + help[cmd]
else:
print shorthelp
sys.exit(1)
def main(argv=sys.argv):
if len(argv) < 2:
usage()
if re.compile('-*help').match(argv[1]):
if len(argv) > 2:
usage(argv[2])
else:
usage()
sys.exit(0)
cmd = xm_lookup_cmd(argv[1])
# strip off prog name and subcmd
args = argv[2:]
if cmd:
try:
rc = cmd(args)
if rc:
usage()
except socket.error, ex:
if os.geteuid() != 0:
err("Most commands need root access. Please try again as root.")
else:
err("Error connecting to xend: %s. Is xend running?" % ex[1])
sys.exit(1)
except KeyboardInterrupt:
print "Interrupted."
sys.exit(1)
except IOError, ex:
if os.geteuid() != 0:
err("Most commands need root access. Please try again as root.")
else:
err("Error connecting to xend: %s." % ex[1])
sys.exit(1)
except SystemExit:
sys.exit(1)
except xmlrpclib.Fault, ex:
if ex.faultCode == xen.xend.XendClient.ERROR_INVALID_DOMAIN:
print >>sys.stderr, (
"Error: the domain '%s' does not exist." % ex.faultString)
else:
print >>sys.stderr, "Error: %s" % ex.faultString
sys.exit(1)
except:
print "Unexpected error:", sys.exc_info()[0]
print
print "Please report to [email protected]"
raise
else:
usage()
if __name__ == "__main__":
main()
| gpl-2.0 |
goliveirab/odoo | addons/account_analytic_analysis/account_analytic_analysis.py | 96 | 48446 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from dateutil.relativedelta import relativedelta
import datetime
import logging
import time
from openerp.osv import osv, fields
import openerp.tools
from openerp.tools.translate import _
from openerp.addons.decimal_precision import decimal_precision as dp
_logger = logging.getLogger(__name__)
class account_analytic_invoice_line(osv.osv):
_name = "account.analytic.invoice.line"
def _amount_line(self, cr, uid, ids, prop, unknow_none, unknow_dict, context=None):
res = {}
for line in self.browse(cr, uid, ids, context=context):
res[line.id] = line.quantity * line.price_unit
if line.analytic_account_id.pricelist_id:
cur = line.analytic_account_id.pricelist_id.currency_id
res[line.id] = self.pool.get('res.currency').round(cr, uid, cur, res[line.id])
return res
_columns = {
'product_id': fields.many2one('product.product','Product',required=True),
'analytic_account_id': fields.many2one('account.analytic.account', 'Analytic Account', ondelete='cascade'),
'name': fields.text('Description', required=True),
'quantity': fields.float('Quantity', required=True),
'uom_id': fields.many2one('product.uom', 'Unit of Measure',required=True),
'price_unit': fields.float('Unit Price', required=True),
'price_subtotal': fields.function(_amount_line, string='Sub Total', type="float",digits_compute= dp.get_precision('Account')),
}
_defaults = {
'quantity' : 1,
}
def product_id_change(self, cr, uid, ids, product, uom_id, qty=0, name='', partner_id=False, price_unit=False, pricelist_id=False, company_id=None, context=None):
context = context or {}
uom_obj = self.pool.get('product.uom')
company_id = company_id or False
local_context = dict(context, company_id=company_id, force_company=company_id, pricelist=pricelist_id)
if not product:
return {'value': {'price_unit': 0.0}, 'domain':{'product_uom':[]}}
if partner_id:
part = self.pool.get('res.partner').browse(cr, uid, partner_id, context=local_context)
if part.lang:
local_context.update({'lang': part.lang})
result = {}
res = self.pool.get('product.product').browse(cr, uid, product, context=local_context)
price = False
if price_unit is not False:
price = price_unit
elif pricelist_id:
price = res.price
if price is False:
price = res.list_price
if not name:
name = self.pool.get('product.product').name_get(cr, uid, [res.id], context=local_context)[0][1]
if res.description_sale:
name += '\n'+res.description_sale
result.update({'name': name or False,'uom_id': uom_id or res.uom_id.id or False, 'price_unit': price})
res_final = {'value':result}
if result['uom_id'] != res.uom_id.id:
selected_uom = uom_obj.browse(cr, uid, result['uom_id'], context=local_context)
new_price = uom_obj._compute_price(cr, uid, res.uom_id.id, res_final['value']['price_unit'], result['uom_id'])
res_final['value']['price_unit'] = new_price
return res_final
class account_analytic_account(osv.osv):
_name = "account.analytic.account"
_inherit = "account.analytic.account"
def _analysis_all(self, cr, uid, ids, fields, arg, context=None):
dp = 2
res = dict([(i, {}) for i in ids])
parent_ids = tuple(ids) #We don't want consolidation for each of these fields because those complex computation is resource-greedy.
accounts = self.browse(cr, uid, ids, context=context)
for f in fields:
if f == 'user_ids':
cr.execute('SELECT MAX(id) FROM res_users')
max_user = cr.fetchone()[0]
if parent_ids:
cr.execute('SELECT DISTINCT("user") FROM account_analytic_analysis_summary_user ' \
'WHERE account_id IN %s AND unit_amount <> 0.0', (parent_ids,))
result = cr.fetchall()
else:
result = []
for id in ids:
res[id][f] = [int((id * max_user) + x[0]) for x in result]
elif f == 'month_ids':
if parent_ids:
cr.execute('SELECT DISTINCT(month_id) FROM account_analytic_analysis_summary_month ' \
'WHERE account_id IN %s AND unit_amount <> 0.0', (parent_ids,))
result = cr.fetchall()
else:
result = []
for id in ids:
res[id][f] = [int(id * 1000000 + int(x[0])) for x in result]
elif f == 'last_worked_invoiced_date':
for id in ids:
res[id][f] = False
if parent_ids:
cr.execute("SELECT account_analytic_line.account_id, MAX(date) \
FROM account_analytic_line \
WHERE account_id IN %s \
AND invoice_id IS NOT NULL \
GROUP BY account_analytic_line.account_id;", (parent_ids,))
for account_id, sum in cr.fetchall():
if account_id not in res:
res[account_id] = {}
res[account_id][f] = sum
elif f == 'ca_to_invoice':
for id in ids:
res[id][f] = 0.0
res2 = {}
for account in accounts:
cr.execute("""
SELECT product_id, sum(amount), user_id, to_invoice, sum(unit_amount), product_uom_id, line.name
FROM account_analytic_line line
LEFT JOIN account_analytic_journal journal ON (journal.id = line.journal_id)
WHERE account_id = %s
AND journal.type != 'purchase'
AND invoice_id IS NULL
AND to_invoice IS NOT NULL
GROUP BY product_id, user_id, to_invoice, product_uom_id, line.name""", (account.id,))
res[account.id][f] = 0.0
for product_id, price, user_id, factor_id, qty, uom, line_name in cr.fetchall():
price = -price
if product_id:
price = self.pool.get('account.analytic.line')._get_invoice_price(cr, uid, account, product_id, user_id, qty, context)
factor = self.pool.get('hr_timesheet_invoice.factor').browse(cr, uid, factor_id, context=context)
res[account.id][f] += price * qty * (100-factor.factor or 0.0) / 100.0
# sum both result on account_id
for id in ids:
res[id][f] = round(res.get(id, {}).get(f, 0.0), dp) + round(res2.get(id, 0.0), 2)
elif f == 'last_invoice_date':
for id in ids:
res[id][f] = False
if parent_ids:
cr.execute ("SELECT account_analytic_line.account_id, \
DATE(MAX(account_invoice.date_invoice)) \
FROM account_analytic_line \
JOIN account_invoice \
ON account_analytic_line.invoice_id = account_invoice.id \
WHERE account_analytic_line.account_id IN %s \
AND account_analytic_line.invoice_id IS NOT NULL \
GROUP BY account_analytic_line.account_id",(parent_ids,))
for account_id, lid in cr.fetchall():
res[account_id][f] = lid
elif f == 'last_worked_date':
for id in ids:
res[id][f] = False
if parent_ids:
cr.execute("SELECT account_analytic_line.account_id, MAX(date) \
FROM account_analytic_line \
WHERE account_id IN %s \
AND invoice_id IS NULL \
GROUP BY account_analytic_line.account_id",(parent_ids,))
for account_id, lwd in cr.fetchall():
if account_id not in res:
res[account_id] = {}
res[account_id][f] = lwd
elif f == 'hours_qtt_non_invoiced':
for id in ids:
res[id][f] = 0.0
if parent_ids:
cr.execute("SELECT account_analytic_line.account_id, COALESCE(SUM(unit_amount), 0.0) \
FROM account_analytic_line \
JOIN account_analytic_journal \
ON account_analytic_line.journal_id = account_analytic_journal.id \
WHERE account_analytic_line.account_id IN %s \
AND account_analytic_journal.type='general' \
AND invoice_id IS NULL \
AND to_invoice IS NOT NULL \
GROUP BY account_analytic_line.account_id;",(parent_ids,))
for account_id, sua in cr.fetchall():
if account_id not in res:
res[account_id] = {}
res[account_id][f] = round(sua, dp)
for id in ids:
res[id][f] = round(res[id][f], dp)
elif f == 'hours_quantity':
for id in ids:
res[id][f] = 0.0
if parent_ids:
cr.execute("SELECT account_analytic_line.account_id, COALESCE(SUM(unit_amount), 0.0) \
FROM account_analytic_line \
JOIN account_analytic_journal \
ON account_analytic_line.journal_id = account_analytic_journal.id \
WHERE account_analytic_line.account_id IN %s \
AND account_analytic_journal.type='general' \
GROUP BY account_analytic_line.account_id",(parent_ids,))
ff = cr.fetchall()
for account_id, hq in ff:
if account_id not in res:
res[account_id] = {}
res[account_id][f] = round(hq, dp)
for id in ids:
res[id][f] = round(res[id][f], dp)
elif f == 'ca_theorical':
# TODO Take care of pricelist and purchase !
for id in ids:
res[id][f] = 0.0
# Warning
# This computation doesn't take care of pricelist !
# Just consider list_price
if parent_ids:
cr.execute("""SELECT account_analytic_line.account_id AS account_id, \
COALESCE(SUM((account_analytic_line.unit_amount * pt.list_price) \
- (account_analytic_line.unit_amount * pt.list_price \
* hr.factor)), 0.0) AS somme
FROM account_analytic_line \
LEFT JOIN account_analytic_journal \
ON (account_analytic_line.journal_id = account_analytic_journal.id) \
JOIN product_product pp \
ON (account_analytic_line.product_id = pp.id) \
JOIN product_template pt \
ON (pp.product_tmpl_id = pt.id) \
JOIN account_analytic_account a \
ON (a.id=account_analytic_line.account_id) \
JOIN hr_timesheet_invoice_factor hr \
ON (hr.id=a.to_invoice) \
WHERE account_analytic_line.account_id IN %s \
AND a.to_invoice IS NOT NULL \
AND account_analytic_journal.type IN ('purchase', 'general')
GROUP BY account_analytic_line.account_id""",(parent_ids,))
for account_id, sum in cr.fetchall():
res[account_id][f] = round(sum, dp)
return res
def _ca_invoiced_calc(self, cr, uid, ids, name, arg, context=None):
res = {}
res_final = {}
child_ids = tuple(ids) #We don't want consolidation for each of these fields because those complex computation is resource-greedy.
for i in child_ids:
res[i] = 0.0
if not child_ids:
return res
if child_ids:
#Search all invoice lines not in cancelled state that refer to this analytic account
inv_line_obj = self.pool.get("account.invoice.line")
inv_lines = inv_line_obj.search(cr, uid, ['&', ('account_analytic_id', 'in', child_ids), ('invoice_id.state', 'not in', ['draft', 'cancel']), ('invoice_id.type', 'in', ['out_invoice', 'out_refund'])], context=context)
for line in inv_line_obj.browse(cr, uid, inv_lines, context=context):
if line.invoice_id.type == 'out_refund':
res[line.account_analytic_id.id] -= line.price_subtotal
else:
res[line.account_analytic_id.id] += line.price_subtotal
for acc in self.browse(cr, uid, res.keys(), context=context):
res[acc.id] = res[acc.id] - (acc.timesheet_ca_invoiced or 0.0)
res_final = res
return res_final
def _total_cost_calc(self, cr, uid, ids, name, arg, context=None):
res = {}
res_final = {}
child_ids = tuple(ids) #We don't want consolidation for each of these fields because those complex computation is resource-greedy.
for i in child_ids:
res[i] = 0.0
if not child_ids:
return res
if child_ids:
cr.execute("""SELECT account_analytic_line.account_id, COALESCE(SUM(amount), 0.0) \
FROM account_analytic_line \
JOIN account_analytic_journal \
ON account_analytic_line.journal_id = account_analytic_journal.id \
WHERE account_analytic_line.account_id IN %s \
AND amount<0 \
GROUP BY account_analytic_line.account_id""",(child_ids,))
for account_id, sum in cr.fetchall():
res[account_id] = round(sum,2)
res_final = res
return res_final
def _remaining_hours_calc(self, cr, uid, ids, name, arg, context=None):
res = {}
for account in self.browse(cr, uid, ids, context=context):
if account.quantity_max != 0:
res[account.id] = account.quantity_max - account.hours_quantity
else:
res[account.id] = 0.0
for id in ids:
res[id] = round(res.get(id, 0.0),2)
return res
def _remaining_hours_to_invoice_calc(self, cr, uid, ids, name, arg, context=None):
res = {}
for account in self.browse(cr, uid, ids, context=context):
res[account.id] = max(account.hours_qtt_est - account.timesheet_ca_invoiced, account.ca_to_invoice)
return res
def _hours_qtt_invoiced_calc(self, cr, uid, ids, name, arg, context=None):
res = {}
for account in self.browse(cr, uid, ids, context=context):
res[account.id] = account.hours_quantity - account.hours_qtt_non_invoiced
if res[account.id] < 0:
res[account.id] = 0.0
for id in ids:
res[id] = round(res.get(id, 0.0),2)
return res
def _revenue_per_hour_calc(self, cr, uid, ids, name, arg, context=None):
res = {}
for account in self.browse(cr, uid, ids, context=context):
if account.hours_qtt_invoiced == 0:
res[account.id]=0.0
else:
res[account.id] = account.ca_invoiced / account.hours_qtt_invoiced
for id in ids:
res[id] = round(res.get(id, 0.0),2)
return res
def _real_margin_rate_calc(self, cr, uid, ids, name, arg, context=None):
res = {}
for account in self.browse(cr, uid, ids, context=context):
if account.ca_invoiced == 0:
res[account.id]=0.0
elif account.total_cost != 0.0:
res[account.id] = -(account.real_margin / account.total_cost) * 100
else:
res[account.id] = 0.0
for id in ids:
res[id] = round(res.get(id, 0.0),2)
return res
def _fix_price_to_invoice_calc(self, cr, uid, ids, name, arg, context=None):
sale_obj = self.pool.get('sale.order')
res = {}
for account in self.browse(cr, uid, ids, context=context):
res[account.id] = 0.0
sale_ids = sale_obj.search(cr, uid, [('project_id','=', account.id), ('state', '=', 'manual')], context=context)
for sale in sale_obj.browse(cr, uid, sale_ids, context=context):
res[account.id] += sale.amount_untaxed
for invoice in sale.invoice_ids:
if invoice.state != 'cancel':
res[account.id] -= invoice.amount_untaxed
return res
def _timesheet_ca_invoiced_calc(self, cr, uid, ids, name, arg, context=None):
lines_obj = self.pool.get('account.analytic.line')
res = {}
inv_ids = []
for account in self.browse(cr, uid, ids, context=context):
res[account.id] = 0.0
line_ids = lines_obj.search(cr, uid, [('account_id','=', account.id), ('invoice_id','!=',False), ('invoice_id.state', 'not in', ['draft', 'cancel']), ('to_invoice','!=', False), ('journal_id.type', '=', 'general'), ('invoice_id.type', 'in', ['out_invoice', 'out_refund'])], context=context)
for line in lines_obj.browse(cr, uid, line_ids, context=context):
if line.invoice_id not in inv_ids:
inv_ids.append(line.invoice_id)
if line.invoice_id.type == 'out_refund':
res[account.id] -= line.invoice_id.amount_untaxed
else:
res[account.id] += line.invoice_id.amount_untaxed
return res
def _remaining_ca_calc(self, cr, uid, ids, name, arg, context=None):
res = {}
for account in self.browse(cr, uid, ids, context=context):
res[account.id] = max(account.amount_max - account.ca_invoiced, account.fix_price_to_invoice)
return res
def _real_margin_calc(self, cr, uid, ids, name, arg, context=None):
res = {}
for account in self.browse(cr, uid, ids, context=context):
res[account.id] = account.ca_invoiced + account.total_cost
for id in ids:
res[id] = round(res.get(id, 0.0),2)
return res
def _theorical_margin_calc(self, cr, uid, ids, name, arg, context=None):
res = {}
for account in self.browse(cr, uid, ids, context=context):
res[account.id] = account.ca_theorical + account.total_cost
for id in ids:
res[id] = round(res.get(id, 0.0),2)
return res
def _is_overdue_quantity(self, cr, uid, ids, fieldnames, args, context=None):
result = dict.fromkeys(ids, 0)
for record in self.browse(cr, uid, ids, context=context):
if record.quantity_max > 0.0:
result[record.id] = int(record.hours_quantity > record.quantity_max)
else:
result[record.id] = 0
return result
def _get_analytic_account(self, cr, uid, ids, context=None):
result = set()
for line in self.pool.get('account.analytic.line').browse(cr, uid, ids, context=context):
result.add(line.account_id.id)
return list(result)
def _get_total_estimation(self, account):
tot_est = 0.0
if account.fix_price_invoices:
tot_est += account.amount_max
if account.invoice_on_timesheets:
tot_est += account.hours_qtt_est
return tot_est
def _get_total_invoiced(self, account):
total_invoiced = 0.0
if account.fix_price_invoices:
total_invoiced += account.ca_invoiced
if account.invoice_on_timesheets:
total_invoiced += account.timesheet_ca_invoiced
return total_invoiced
def _get_total_remaining(self, account):
total_remaining = 0.0
if account.fix_price_invoices:
total_remaining += account.remaining_ca
if account.invoice_on_timesheets:
total_remaining += account.remaining_hours_to_invoice
return total_remaining
def _get_total_toinvoice(self, account):
total_toinvoice = 0.0
if account.fix_price_invoices:
total_toinvoice += account.fix_price_to_invoice
if account.invoice_on_timesheets:
total_toinvoice += account.ca_to_invoice
return total_toinvoice
def _sum_of_fields(self, cr, uid, ids, name, arg, context=None):
res = dict([(i, {}) for i in ids])
for account in self.browse(cr, uid, ids, context=context):
res[account.id]['est_total'] = self._get_total_estimation(account)
res[account.id]['invoiced_total'] = self._get_total_invoiced(account)
res[account.id]['remaining_total'] = self._get_total_remaining(account)
res[account.id]['toinvoice_total'] = self._get_total_toinvoice(account)
return res
_columns = {
'is_overdue_quantity' : fields.function(_is_overdue_quantity, method=True, type='boolean', string='Overdue Quantity',
store={
'account.analytic.line' : (_get_analytic_account, None, 20),
'account.analytic.account': (lambda self, cr, uid, ids, c=None: ids, ['quantity_max'], 10),
}),
'ca_invoiced': fields.function(_ca_invoiced_calc, type='float', string='Invoiced Amount',
help="Total customer invoiced amount for this account.",
digits_compute=dp.get_precision('Account')),
'total_cost': fields.function(_total_cost_calc, type='float', string='Total Costs',
help="Total of costs for this account. It includes real costs (from invoices) and indirect costs, like time spent on timesheets.",
digits_compute=dp.get_precision('Account')),
'ca_to_invoice': fields.function(_analysis_all, multi='analytic_analysis', type='float', string='Uninvoiced Amount',
help="If invoice from analytic account, the remaining amount you can invoice to the customer based on the total costs.",
digits_compute=dp.get_precision('Account')),
'ca_theorical': fields.function(_analysis_all, multi='analytic_analysis', type='float', string='Theoretical Revenue',
help="Based on the costs you had on the project, what would have been the revenue if all these costs have been invoiced at the normal sale price provided by the pricelist.",
digits_compute=dp.get_precision('Account')),
'hours_quantity': fields.function(_analysis_all, multi='analytic_analysis', type='float', string='Total Worked Time',
help="Number of time you spent on the analytic account (from timesheet). It computes quantities on all journal of type 'general'."),
'last_invoice_date': fields.function(_analysis_all, multi='analytic_analysis', type='date', string='Last Invoice Date',
help="If invoice from the costs, this is the date of the latest invoiced."),
'last_worked_invoiced_date': fields.function(_analysis_all, multi='analytic_analysis', type='date', string='Date of Last Invoiced Cost',
help="If invoice from the costs, this is the date of the latest work or cost that have been invoiced."),
'last_worked_date': fields.function(_analysis_all, multi='analytic_analysis', type='date', string='Date of Last Cost/Work',
help="Date of the latest work done on this account."),
'hours_qtt_non_invoiced': fields.function(_analysis_all, multi='analytic_analysis', type='float', string='Uninvoiced Time',
help="Number of time (hours/days) (from journal of type 'general') that can be invoiced if you invoice based on analytic account."),
'hours_qtt_invoiced': fields.function(_hours_qtt_invoiced_calc, type='float', string='Invoiced Time',
help="Number of time (hours/days) that can be invoiced plus those that already have been invoiced."),
'remaining_hours': fields.function(_remaining_hours_calc, type='float', string='Remaining Time',
help="Computed using the formula: Maximum Time - Total Worked Time"),
'remaining_hours_to_invoice': fields.function(_remaining_hours_to_invoice_calc, type='float', string='Remaining Time',
help="Computed using the formula: Expected on timesheets - Total invoiced on timesheets"),
'fix_price_to_invoice': fields.function(_fix_price_to_invoice_calc, type='float', string='Remaining Time',
help="Sum of quotations for this contract."),
'timesheet_ca_invoiced': fields.function(_timesheet_ca_invoiced_calc, type='float', string='Remaining Time',
help="Sum of timesheet lines invoiced for this contract."),
'remaining_ca': fields.function(_remaining_ca_calc, type='float', string='Remaining Revenue',
help="Computed using the formula: Max Invoice Price - Invoiced Amount.",
digits_compute=dp.get_precision('Account')),
'revenue_per_hour': fields.function(_revenue_per_hour_calc, type='float', string='Revenue per Time (real)',
help="Computed using the formula: Invoiced Amount / Total Time",
digits_compute=dp.get_precision('Account')),
'real_margin': fields.function(_real_margin_calc, type='float', string='Real Margin',
help="Computed using the formula: Invoiced Amount - Total Costs.",
digits_compute=dp.get_precision('Account')),
'theorical_margin': fields.function(_theorical_margin_calc, type='float', string='Theoretical Margin',
help="Computed using the formula: Theoretical Revenue - Total Costs",
digits_compute=dp.get_precision('Account')),
'real_margin_rate': fields.function(_real_margin_rate_calc, type='float', string='Real Margin Rate (%)',
help="Computes using the formula: (Real Margin / Total Costs) * 100.",
digits_compute=dp.get_precision('Account')),
'fix_price_invoices' : fields.boolean('Fixed Price'),
'invoice_on_timesheets' : fields.boolean("On Timesheets"),
'month_ids': fields.function(_analysis_all, multi='analytic_analysis', type='many2many', relation='account_analytic_analysis.summary.month', string='Month'),
'user_ids': fields.function(_analysis_all, multi='analytic_analysis', type="many2many", relation='account_analytic_analysis.summary.user', string='User'),
'hours_qtt_est': fields.float('Estimation of Hours to Invoice'),
'est_total' : fields.function(_sum_of_fields, type="float",multi="sum_of_all", string="Total Estimation"),
'invoiced_total' : fields.function(_sum_of_fields, type="float",multi="sum_of_all", string="Total Invoiced"),
'remaining_total' : fields.function(_sum_of_fields, type="float",multi="sum_of_all", string="Total Remaining", help="Expectation of remaining income for this contract. Computed as the sum of remaining subtotals which, in turn, are computed as the maximum between '(Estimation - Invoiced)' and 'To Invoice' amounts"),
'toinvoice_total' : fields.function(_sum_of_fields, type="float",multi="sum_of_all", string="Total to Invoice", help=" Sum of everything that could be invoiced for this contract."),
'recurring_invoice_line_ids': fields.one2many('account.analytic.invoice.line', 'analytic_account_id', 'Invoice Lines', copy=True),
'recurring_invoices' : fields.boolean('Generate recurring invoices automatically'),
'recurring_rule_type': fields.selection([
('daily', 'Day(s)'),
('weekly', 'Week(s)'),
('monthly', 'Month(s)'),
('yearly', 'Year(s)'),
], 'Recurrency', help="Invoice automatically repeat at specified interval"),
'recurring_interval': fields.integer('Repeat Every', help="Repeat every (Days/Week/Month/Year)"),
'recurring_next_date': fields.date('Date of Next Invoice'),
}
_defaults = {
'recurring_interval': 1,
'recurring_next_date': lambda *a: time.strftime('%Y-%m-%d'),
'recurring_rule_type':'monthly'
}
def open_sale_order_lines(self,cr,uid,ids,context=None):
if context is None:
context = {}
sale_ids = self.pool.get('sale.order').search(cr,uid,[('project_id','=',context.get('search_default_project_id',False)),('partner_id','in',context.get('search_default_partner_id',False))])
names = [record.name for record in self.browse(cr, uid, ids, context=context)]
name = _('Sales Order Lines to Invoice of %s') % ','.join(names)
return {
'type': 'ir.actions.act_window',
'name': name,
'view_type': 'form',
'view_mode': 'tree,form',
'context': context,
'domain' : [('order_id','in',sale_ids)],
'res_model': 'sale.order.line',
'nodestroy': True,
}
def on_change_template(self, cr, uid, ids, template_id, date_start=False, context=None):
if not template_id:
return {}
res = super(account_analytic_account, self).on_change_template(cr, uid, ids, template_id, date_start=date_start, context=context)
template = self.browse(cr, uid, template_id, context=context)
if not ids:
res['value']['fix_price_invoices'] = template.fix_price_invoices
res['value']['amount_max'] = template.amount_max
if not ids:
res['value']['invoice_on_timesheets'] = template.invoice_on_timesheets
res['value']['hours_qtt_est'] = template.hours_qtt_est
if template.to_invoice.id:
res['value']['to_invoice'] = template.to_invoice.id
if template.pricelist_id.id:
res['value']['pricelist_id'] = template.pricelist_id.id
if not ids:
invoice_line_ids = []
for x in template.recurring_invoice_line_ids:
invoice_line_ids.append((0, 0, {
'product_id': x.product_id.id,
'uom_id': x.uom_id.id,
'name': x.name,
'quantity': x.quantity,
'price_unit': x.price_unit,
'analytic_account_id': x.analytic_account_id and x.analytic_account_id.id or False,
}))
res['value']['recurring_invoices'] = template.recurring_invoices
res['value']['recurring_interval'] = template.recurring_interval
res['value']['recurring_rule_type'] = template.recurring_rule_type
res['value']['recurring_invoice_line_ids'] = invoice_line_ids
return res
def onchange_recurring_invoices(self, cr, uid, ids, recurring_invoices, date_start=False, context=None):
value = {}
if date_start and recurring_invoices:
value = {'value': {'recurring_next_date': date_start}}
return value
def cron_account_analytic_account(self, cr, uid, context=None):
context = dict(context or {})
remind = {}
def fill_remind(key, domain, write_pending=False):
base_domain = [
('type', '=', 'contract'),
('partner_id', '!=', False),
('manager_id', '!=', False),
('manager_id.email', '!=', False),
]
base_domain.extend(domain)
accounts_ids = self.search(cr, uid, base_domain, context=context, order='name asc')
accounts = self.browse(cr, uid, accounts_ids, context=context)
for account in accounts:
if write_pending:
account.write({'state' : 'pending'})
remind_user = remind.setdefault(account.manager_id.id, {})
remind_type = remind_user.setdefault(key, {})
remind_partner = remind_type.setdefault(account.partner_id, []).append(account)
# Already expired
fill_remind("old", [('state', 'in', ['pending'])])
# Expires now
fill_remind("new", [('state', 'in', ['draft', 'open']), '|', '&', ('date', '!=', False), ('date', '<=', time.strftime('%Y-%m-%d')), ('is_overdue_quantity', '=', True)], True)
# Expires in less than 30 days
fill_remind("future", [('state', 'in', ['draft', 'open']), ('date', '!=', False), ('date', '<', (datetime.datetime.now() + datetime.timedelta(30)).strftime("%Y-%m-%d"))])
context['base_url'] = self.pool.get('ir.config_parameter').get_param(cr, uid, 'web.base.url')
context['action_id'] = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'account_analytic_analysis', 'action_account_analytic_overdue_all')[1]
template_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'account_analytic_analysis', 'account_analytic_cron_email_template')[1]
for user_id, data in remind.items():
context["data"] = data
_logger.debug("Sending reminder to uid %s", user_id)
self.pool.get('email.template').send_mail(cr, uid, template_id, user_id, force_send=True, context=context)
return True
def onchange_invoice_on_timesheets(self, cr, uid, ids, invoice_on_timesheets, context=None):
if not invoice_on_timesheets:
return {'value': {'to_invoice': False}}
result = {'value': {'use_timesheets': True}}
try:
to_invoice = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'hr_timesheet_invoice', 'timesheet_invoice_factor1')
result['value']['to_invoice'] = to_invoice[1]
except ValueError:
pass
return result
def hr_to_invoice_timesheets(self, cr, uid, ids, context=None):
domain = [('invoice_id','=',False),('to_invoice','!=',False), ('journal_id.type', '=', 'general'), ('account_id', 'in', ids)]
names = [record.name for record in self.browse(cr, uid, ids, context=context)]
name = _('Timesheets to Invoice of %s') % ','.join(names)
return {
'type': 'ir.actions.act_window',
'name': name,
'view_type': 'form',
'view_mode': 'tree,form',
'domain' : domain,
'res_model': 'account.analytic.line',
'nodestroy': True,
}
def _prepare_invoice_data(self, cr, uid, contract, context=None):
context = context or {}
journal_obj = self.pool.get('account.journal')
fpos_obj = self.pool['account.fiscal.position']
partner = contract.partner_id
if not partner:
raise osv.except_osv(_('No Customer Defined!'),_("You must first select a Customer for Contract %s!") % contract.name )
fpos_id = fpos_obj.get_fiscal_position(cr, uid, partner.company_id.id, partner.id, context=context)
journal_ids = journal_obj.search(cr, uid, [('type', '=','sale'),('company_id', '=', contract.company_id.id or False)], limit=1)
if not journal_ids:
raise osv.except_osv(_('Error!'),
_('Please define a sale journal for the company "%s".') % (contract.company_id.name or '', ))
partner_payment_term = partner.property_payment_term and partner.property_payment_term.id or False
currency_id = False
if contract.pricelist_id:
currency_id = contract.pricelist_id.currency_id.id
elif partner.property_product_pricelist:
currency_id = partner.property_product_pricelist.currency_id.id
elif contract.company_id:
currency_id = contract.company_id.currency_id.id
invoice = {
'account_id': partner.property_account_receivable.id,
'type': 'out_invoice',
'partner_id': partner.id,
'currency_id': currency_id,
'journal_id': len(journal_ids) and journal_ids[0] or False,
'date_invoice': contract.recurring_next_date,
'origin': contract.code,
'fiscal_position': fpos_id,
'payment_term': partner_payment_term,
'company_id': contract.company_id.id or False,
'user_id': contract.manager_id.id or uid,
}
return invoice
def _prepare_invoice_line(self, cr, uid, line, fiscal_position, context=None):
fpos_obj = self.pool.get('account.fiscal.position')
res = line.product_id
account_id = res.property_account_income.id
if not account_id:
account_id = res.categ_id.property_account_income_categ.id
account_id = fpos_obj.map_account(cr, uid, fiscal_position, account_id)
taxes = res.taxes_id or False
tax_id = fpos_obj.map_tax(cr, uid, fiscal_position, taxes)
values = {
'name': line.name,
'account_id': account_id,
'account_analytic_id': line.analytic_account_id.id,
'price_unit': line.price_unit or 0.0,
'quantity': line.quantity,
'uos_id': line.uom_id.id or False,
'product_id': line.product_id.id or False,
'invoice_line_tax_id': [(6, 0, tax_id)],
}
return values
def _prepare_invoice_lines(self, cr, uid, contract, fiscal_position_id, context=None):
fpos_obj = self.pool.get('account.fiscal.position')
fiscal_position = None
if fiscal_position_id:
fiscal_position = fpos_obj.browse(cr, uid, fiscal_position_id, context=context)
invoice_lines = []
for line in contract.recurring_invoice_line_ids:
values = self._prepare_invoice_line(cr, uid, line, fiscal_position, context=context)
invoice_lines.append((0, 0, values))
return invoice_lines
def _prepare_invoice(self, cr, uid, contract, context=None):
invoice = self._prepare_invoice_data(cr, uid, contract, context=context)
invoice['invoice_line'] = self._prepare_invoice_lines(cr, uid, contract, invoice['fiscal_position'], context=context)
return invoice
def recurring_create_invoice(self, cr, uid, ids, context=None):
return self._recurring_create_invoice(cr, uid, ids, context=context)
def _cron_recurring_create_invoice(self, cr, uid, context=None):
return self._recurring_create_invoice(cr, uid, [], automatic=True, context=context)
def _recurring_create_invoice(self, cr, uid, ids, automatic=False, context=None):
context = context or {}
invoice_ids = []
current_date = time.strftime('%Y-%m-%d')
if ids:
contract_ids = ids
else:
contract_ids = self.search(cr, uid, [('recurring_next_date','<=', current_date), ('state','=', 'open'), ('recurring_invoices','=', True), ('type', '=', 'contract')])
if contract_ids:
cr.execute('SELECT company_id, array_agg(id) as ids FROM account_analytic_account WHERE id IN %s GROUP BY company_id', (tuple(contract_ids),))
for company_id, ids in cr.fetchall():
for contract in self.browse(cr, uid, ids, context=dict(context, company_id=company_id, force_company=company_id)):
try:
invoice_values = self._prepare_invoice(cr, uid, contract, context=context)
invoice_ids.append(self.pool['account.invoice'].create(cr, uid, invoice_values, context=context))
next_date = datetime.datetime.strptime(contract.recurring_next_date or current_date, "%Y-%m-%d")
interval = contract.recurring_interval
if contract.recurring_rule_type == 'daily':
new_date = next_date+relativedelta(days=+interval)
elif contract.recurring_rule_type == 'weekly':
new_date = next_date+relativedelta(weeks=+interval)
elif contract.recurring_rule_type == 'monthly':
new_date = next_date+relativedelta(months=+interval)
else:
new_date = next_date+relativedelta(years=+interval)
self.write(cr, uid, [contract.id], {'recurring_next_date': new_date.strftime('%Y-%m-%d')}, context=context)
if automatic:
cr.commit()
except Exception:
if automatic:
cr.rollback()
_logger.exception('Fail to create recurring invoice for contract %s', contract.code)
else:
raise
return invoice_ids
class account_analytic_account_summary_user(osv.osv):
_name = "account_analytic_analysis.summary.user"
_description = "Hours Summary by User"
_order='user'
_auto = False
_rec_name = 'user'
def _unit_amount(self, cr, uid, ids, name, arg, context=None):
res = {}
account_obj = self.pool.get('account.analytic.account')
cr.execute('SELECT MAX(id) FROM res_users')
max_user = cr.fetchone()[0]
account_ids = [int(str(x/max_user - (x%max_user == 0 and 1 or 0))) for x in ids]
user_ids = [int(str(x-((x/max_user - (x%max_user == 0 and 1 or 0)) *max_user))) for x in ids]
parent_ids = tuple(account_ids) #We don't want consolidation for each of these fields because those complex computation is resource-greedy.
if parent_ids:
cr.execute('SELECT id, unit_amount ' \
'FROM account_analytic_analysis_summary_user ' \
'WHERE account_id IN %s ' \
'AND "user" IN %s',(parent_ids, tuple(user_ids),))
for sum_id, unit_amount in cr.fetchall():
res[sum_id] = unit_amount
for id in ids:
res[id] = round(res.get(id, 0.0), 2)
return res
_columns = {
'account_id': fields.many2one('account.analytic.account', 'Analytic Account', readonly=True),
'unit_amount': fields.float('Total Time'),
'user': fields.many2one('res.users', 'User'),
}
_depends = {
'res.users': ['id'],
'account.analytic.line': ['account_id', 'journal_id', 'unit_amount', 'user_id'],
'account.analytic.journal': ['type'],
}
def init(self, cr):
openerp.tools.sql.drop_view_if_exists(cr, 'account_analytic_analysis_summary_user')
cr.execute('''CREATE OR REPLACE VIEW account_analytic_analysis_summary_user AS (
with mu as
(select max(id) as max_user from res_users)
, lu AS
(SELECT
l.account_id AS account_id,
coalesce(l.user_id, 0) AS user_id,
SUM(l.unit_amount) AS unit_amount
FROM account_analytic_line AS l,
account_analytic_journal AS j
WHERE (j.type = 'general' ) and (j.id=l.journal_id)
GROUP BY l.account_id, l.user_id
)
select (lu.account_id * mu.max_user) + lu.user_id as id,
lu.account_id as account_id,
lu.user_id as "user",
unit_amount
from lu, mu)''')
class account_analytic_account_summary_month(osv.osv):
_name = "account_analytic_analysis.summary.month"
_description = "Hours summary by month"
_auto = False
_rec_name = 'month'
_columns = {
'account_id': fields.many2one('account.analytic.account', 'Analytic Account', readonly=True),
'unit_amount': fields.float('Total Time'),
'month': fields.char('Month', size=32, readonly=True),
}
_depends = {
'account.analytic.line': ['account_id', 'date', 'journal_id', 'unit_amount'],
'account.analytic.journal': ['type'],
}
def init(self, cr):
openerp.tools.sql.drop_view_if_exists(cr, 'account_analytic_analysis_summary_month')
cr.execute('CREATE VIEW account_analytic_analysis_summary_month AS (' \
'SELECT ' \
'(TO_NUMBER(TO_CHAR(d.month, \'YYYYMM\'), \'999999\') + (d.account_id * 1000000::bigint))::bigint AS id, ' \
'd.account_id AS account_id, ' \
'TO_CHAR(d.month, \'Mon YYYY\') AS month, ' \
'TO_NUMBER(TO_CHAR(d.month, \'YYYYMM\'), \'999999\') AS month_id, ' \
'COALESCE(SUM(l.unit_amount), 0.0) AS unit_amount ' \
'FROM ' \
'(SELECT ' \
'd2.account_id, ' \
'd2.month ' \
'FROM ' \
'(SELECT ' \
'a.id AS account_id, ' \
'l.month AS month ' \
'FROM ' \
'(SELECT ' \
'DATE_TRUNC(\'month\', l.date) AS month ' \
'FROM account_analytic_line AS l, ' \
'account_analytic_journal AS j ' \
'WHERE j.type = \'general\' ' \
'GROUP BY DATE_TRUNC(\'month\', l.date) ' \
') AS l, ' \
'account_analytic_account AS a ' \
'GROUP BY l.month, a.id ' \
') AS d2 ' \
'GROUP BY d2.account_id, d2.month ' \
') AS d ' \
'LEFT JOIN ' \
'(SELECT ' \
'l.account_id AS account_id, ' \
'DATE_TRUNC(\'month\', l.date) AS month, ' \
'SUM(l.unit_amount) AS unit_amount ' \
'FROM account_analytic_line AS l, ' \
'account_analytic_journal AS j ' \
'WHERE (j.type = \'general\') and (j.id=l.journal_id) ' \
'GROUP BY l.account_id, DATE_TRUNC(\'month\', l.date) ' \
') AS l '
'ON (' \
'd.account_id = l.account_id ' \
'AND d.month = l.month' \
') ' \
'GROUP BY d.month, d.account_id ' \
')')
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
cgvarela/vitess | py/vtproto/mysqlctl_pb2.py | 6 | 9108 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: mysqlctl.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='mysqlctl.proto',
package='mysqlctl',
serialized_pb=_b('\n\x0emysqlctl.proto\x12\x08mysqlctl\"\x0e\n\x0cStartRequest\"\x0f\n\rStartResponse\"*\n\x0fShutdownRequest\x12\x17\n\x0fwait_for_mysqld\x18\x01 \x01(\x08\"\x12\n\x10ShutdownResponse\"\x18\n\x16RunMysqlUpgradeRequest\"\x19\n\x17RunMysqlUpgradeResponse2\xe5\x01\n\x08MysqlCtl\x12:\n\x05Start\x12\x16.mysqlctl.StartRequest\x1a\x17.mysqlctl.StartResponse\"\x00\x12\x43\n\x08Shutdown\x12\x19.mysqlctl.ShutdownRequest\x1a\x1a.mysqlctl.ShutdownResponse\"\x00\x12X\n\x0fRunMysqlUpgrade\x12 .mysqlctl.RunMysqlUpgradeRequest\x1a!.mysqlctl.RunMysqlUpgradeResponse\"\x00\x62\x06proto3')
)
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_STARTREQUEST = _descriptor.Descriptor(
name='StartRequest',
full_name='mysqlctl.StartRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=28,
serialized_end=42,
)
_STARTRESPONSE = _descriptor.Descriptor(
name='StartResponse',
full_name='mysqlctl.StartResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=44,
serialized_end=59,
)
_SHUTDOWNREQUEST = _descriptor.Descriptor(
name='ShutdownRequest',
full_name='mysqlctl.ShutdownRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='wait_for_mysqld', full_name='mysqlctl.ShutdownRequest.wait_for_mysqld', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=61,
serialized_end=103,
)
_SHUTDOWNRESPONSE = _descriptor.Descriptor(
name='ShutdownResponse',
full_name='mysqlctl.ShutdownResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=105,
serialized_end=123,
)
_RUNMYSQLUPGRADEREQUEST = _descriptor.Descriptor(
name='RunMysqlUpgradeRequest',
full_name='mysqlctl.RunMysqlUpgradeRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=125,
serialized_end=149,
)
_RUNMYSQLUPGRADERESPONSE = _descriptor.Descriptor(
name='RunMysqlUpgradeResponse',
full_name='mysqlctl.RunMysqlUpgradeResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
extension_ranges=[],
oneofs=[
],
serialized_start=151,
serialized_end=176,
)
DESCRIPTOR.message_types_by_name['StartRequest'] = _STARTREQUEST
DESCRIPTOR.message_types_by_name['StartResponse'] = _STARTRESPONSE
DESCRIPTOR.message_types_by_name['ShutdownRequest'] = _SHUTDOWNREQUEST
DESCRIPTOR.message_types_by_name['ShutdownResponse'] = _SHUTDOWNRESPONSE
DESCRIPTOR.message_types_by_name['RunMysqlUpgradeRequest'] = _RUNMYSQLUPGRADEREQUEST
DESCRIPTOR.message_types_by_name['RunMysqlUpgradeResponse'] = _RUNMYSQLUPGRADERESPONSE
StartRequest = _reflection.GeneratedProtocolMessageType('StartRequest', (_message.Message,), dict(
DESCRIPTOR = _STARTREQUEST,
__module__ = 'mysqlctl_pb2'
# @@protoc_insertion_point(class_scope:mysqlctl.StartRequest)
))
_sym_db.RegisterMessage(StartRequest)
StartResponse = _reflection.GeneratedProtocolMessageType('StartResponse', (_message.Message,), dict(
DESCRIPTOR = _STARTRESPONSE,
__module__ = 'mysqlctl_pb2'
# @@protoc_insertion_point(class_scope:mysqlctl.StartResponse)
))
_sym_db.RegisterMessage(StartResponse)
ShutdownRequest = _reflection.GeneratedProtocolMessageType('ShutdownRequest', (_message.Message,), dict(
DESCRIPTOR = _SHUTDOWNREQUEST,
__module__ = 'mysqlctl_pb2'
# @@protoc_insertion_point(class_scope:mysqlctl.ShutdownRequest)
))
_sym_db.RegisterMessage(ShutdownRequest)
ShutdownResponse = _reflection.GeneratedProtocolMessageType('ShutdownResponse', (_message.Message,), dict(
DESCRIPTOR = _SHUTDOWNRESPONSE,
__module__ = 'mysqlctl_pb2'
# @@protoc_insertion_point(class_scope:mysqlctl.ShutdownResponse)
))
_sym_db.RegisterMessage(ShutdownResponse)
RunMysqlUpgradeRequest = _reflection.GeneratedProtocolMessageType('RunMysqlUpgradeRequest', (_message.Message,), dict(
DESCRIPTOR = _RUNMYSQLUPGRADEREQUEST,
__module__ = 'mysqlctl_pb2'
# @@protoc_insertion_point(class_scope:mysqlctl.RunMysqlUpgradeRequest)
))
_sym_db.RegisterMessage(RunMysqlUpgradeRequest)
RunMysqlUpgradeResponse = _reflection.GeneratedProtocolMessageType('RunMysqlUpgradeResponse', (_message.Message,), dict(
DESCRIPTOR = _RUNMYSQLUPGRADERESPONSE,
__module__ = 'mysqlctl_pb2'
# @@protoc_insertion_point(class_scope:mysqlctl.RunMysqlUpgradeResponse)
))
_sym_db.RegisterMessage(RunMysqlUpgradeResponse)
import abc
from grpc.early_adopter import implementations
from grpc.framework.alpha import utilities
class EarlyAdopterMysqlCtlServicer(object):
"""<fill me in later!>"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def Start(self, request, context):
raise NotImplementedError()
@abc.abstractmethod
def Shutdown(self, request, context):
raise NotImplementedError()
@abc.abstractmethod
def RunMysqlUpgrade(self, request, context):
raise NotImplementedError()
class EarlyAdopterMysqlCtlServer(object):
"""<fill me in later!>"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def start(self):
raise NotImplementedError()
@abc.abstractmethod
def stop(self):
raise NotImplementedError()
class EarlyAdopterMysqlCtlStub(object):
"""<fill me in later!>"""
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def Start(self, request):
raise NotImplementedError()
Start.async = None
@abc.abstractmethod
def Shutdown(self, request):
raise NotImplementedError()
Shutdown.async = None
@abc.abstractmethod
def RunMysqlUpgrade(self, request):
raise NotImplementedError()
RunMysqlUpgrade.async = None
def early_adopter_create_MysqlCtl_server(servicer, port, root_certificates, key_chain_pairs):
import mysqlctl_pb2
import mysqlctl_pb2
import mysqlctl_pb2
import mysqlctl_pb2
import mysqlctl_pb2
import mysqlctl_pb2
method_service_descriptions = {
"RunMysqlUpgrade": utilities.unary_unary_service_description(
servicer.RunMysqlUpgrade,
mysqlctl_pb2.RunMysqlUpgradeRequest.FromString,
mysqlctl_pb2.RunMysqlUpgradeResponse.SerializeToString,
),
"Shutdown": utilities.unary_unary_service_description(
servicer.Shutdown,
mysqlctl_pb2.ShutdownRequest.FromString,
mysqlctl_pb2.ShutdownResponse.SerializeToString,
),
"Start": utilities.unary_unary_service_description(
servicer.Start,
mysqlctl_pb2.StartRequest.FromString,
mysqlctl_pb2.StartResponse.SerializeToString,
),
}
return implementations.secure_server("mysqlctl.MysqlCtl", method_service_descriptions, port, root_certificates, key_chain_pairs)
def early_adopter_create_MysqlCtl_stub(host, port):
import mysqlctl_pb2
import mysqlctl_pb2
import mysqlctl_pb2
import mysqlctl_pb2
import mysqlctl_pb2
import mysqlctl_pb2
method_invocation_descriptions = {
"RunMysqlUpgrade": utilities.unary_unary_invocation_description(
mysqlctl_pb2.RunMysqlUpgradeRequest.SerializeToString,
mysqlctl_pb2.RunMysqlUpgradeResponse.FromString,
),
"Shutdown": utilities.unary_unary_invocation_description(
mysqlctl_pb2.ShutdownRequest.SerializeToString,
mysqlctl_pb2.ShutdownResponse.FromString,
),
"Start": utilities.unary_unary_invocation_description(
mysqlctl_pb2.StartRequest.SerializeToString,
mysqlctl_pb2.StartResponse.FromString,
),
}
return implementations.insecure_stub("mysqlctl.MysqlCtl", method_invocation_descriptions, host, port)
# @@protoc_insertion_point(module_scope)
| bsd-3-clause |
quodlibet/mutagen | tests/test_asf.py | 1 | 23454 |
import os
import warnings
from io import BytesIO
from mutagen.asf import ASF, ASFHeaderError, ASFValue, UNICODE, DWORD, QWORD
from mutagen.asf import BOOL, WORD, BYTEARRAY, GUID
from mutagen.asf._util import guid2bytes, bytes2guid
from mutagen.asf._objects import ContentDescriptionObject, \
ExtendedContentDescriptionObject, HeaderExtensionObject, \
MetadataObject, MetadataLibraryObject, CodecListObject, PaddingObject, \
HeaderObject
from mutagen.asf import ASFUnicodeAttribute, ASFError, ASFByteArrayAttribute, \
ASFBoolAttribute, ASFDWordAttribute, ASFQWordAttribute, ASFWordAttribute, \
ASFGUIDAttribute
from tests import TestCase, DATA_DIR, get_temp_copy
class TASFFile(TestCase):
def test_not_my_file(self):
self.failUnlessRaises(
ASFHeaderError, ASF,
os.path.join(DATA_DIR, "empty.ogg"))
self.failUnlessRaises(
ASFHeaderError, ASF,
os.path.join(DATA_DIR, "click.mpc"))
class TASFMisc(TestCase):
def test_guid(self):
ex = "75B22633-668E-11CF-A6D9-00AA0062CE6C"
b = guid2bytes(ex)
self.assertEqual(len(b), 16)
self.assertTrue(isinstance(b, bytes))
self.assertEqual(bytes2guid(b), ex)
class TASFInfo(TestCase):
def setUp(self):
# WMA 9.1 64kbps CBR 48khz
self.wma1 = ASF(os.path.join(DATA_DIR, "silence-1.wma"))
# WMA 9.1 Professional 192kbps VBR 44khz
self.wma2 = ASF(os.path.join(DATA_DIR, "silence-2.wma"))
# WMA 9.1 Lossless 44khz
self.wma3 = ASF(os.path.join(DATA_DIR, "silence-3.wma"))
def test_length(self):
self.failUnlessAlmostEqual(self.wma1.info.length, 3.7, 1)
self.failUnlessAlmostEqual(self.wma2.info.length, 3.7, 1)
self.failUnlessAlmostEqual(self.wma3.info.length, 3.7, 1)
def test_bitrate(self):
self.failUnlessEqual(self.wma1.info.bitrate // 1000, 64)
self.failUnlessEqual(self.wma2.info.bitrate // 1000, 38)
self.failUnlessEqual(self.wma3.info.bitrate // 1000, 58)
def test_sample_rate(self):
self.failUnlessEqual(self.wma1.info.sample_rate, 48000)
self.failUnlessEqual(self.wma2.info.sample_rate, 44100)
self.failUnlessEqual(self.wma3.info.sample_rate, 44100)
def test_channels(self):
self.failUnlessEqual(self.wma1.info.channels, 2)
self.failUnlessEqual(self.wma2.info.channels, 2)
self.failUnlessEqual(self.wma3.info.channels, 2)
def test_codec_type(self):
self.assertEqual(self.wma1.info.codec_type,
"Windows Media Audio 9 Standard")
self.assertEqual(self.wma2.info.codec_type,
"Windows Media Audio 9 Professional")
self.assertEqual(self.wma3.info.codec_type,
"Windows Media Audio 9 Lossless")
def test_codec_name(self):
self.assertEqual(self.wma1.info.codec_name,
"Windows Media Audio 9.1")
self.assertEqual(self.wma2.info.codec_name,
"Windows Media Audio 9.1 Professional")
self.assertEqual(self.wma3.info.codec_name,
"Windows Media Audio 9.1 Lossless")
def test_codec_description(self):
self.assertEqual(self.wma1.info.codec_description,
"64 kbps, 48 kHz, stereo 2-pass CBR")
self.assertEqual(self.wma2.info.codec_description,
"192 kbps, 44 kHz, 2 channel 24 bit 2-pass VBR")
self.assertEqual(self.wma3.info.codec_description,
"VBR Quality 100, 44 kHz, 2 channel 16 bit 1-pass VBR")
def test_pprint(self):
self.assertTrue(self.wma1.info.pprint())
self.assertTrue(isinstance(self.wma1.info.pprint(), str))
class TASF(TestCase):
def setUp(self):
self.filename = get_temp_copy(self.original)
self.audio = ASF(self.filename)
def tearDown(self):
os.unlink(self.filename)
class TASFMixin(object):
def test_header_object_misc(self):
header = self.audio._header
header.pprint()
repr(header)
def test_delete(self):
self.audio["QL/Bla"] = u"Foooooooooooooooooo"
self.audio.save(padding=lambda x: 0)
filesize = os.path.getsize(self.audio.filename)
self.audio.delete()
self.assertTrue(os.path.getsize(self.audio.filename) < filesize)
def test_pprint(self):
self.failUnless(self.audio.pprint())
def set_key(self, key, value, result=None, expected=True):
self.audio[key] = value
self.audio.save()
self.audio = ASF(self.audio.filename)
self.failUnless(key in self.audio)
self.failUnless(key in self.audio.tags)
self.failUnless(key in self.audio.tags.keys())
self.failUnless(key in self.audio.tags.as_dict().keys())
newvalue = self.audio[key]
if isinstance(newvalue, list):
for a, b in zip(sorted(newvalue), sorted(result or value)):
self.failUnlessEqual(a, b)
else:
self.failUnlessEqual(self.audio[key], result or value)
def test_slice(self):
tags = self.audio.tags
tags.clear()
tags["Author"] = [u"Foo", u"Bar"]
self.assertEqual(tags[:], [("Author", "Foo"), ("Author", "Bar")])
del tags[:]
self.assertEqual(tags[:], [])
tags[:] = [("Author", "Baz")]
self.assertEqual(tags.items(), [("Author", ["Baz"])])
def test_iter(self):
self.assertEqual(next(iter(self.audio.tags)), ("Title", "test"))
self.assertEqual(list(self.audio.tags)[0], ("Title", "test"))
def test_contains(self):
self.failUnlessEqual("notatag" in self.audio.tags, False)
def test_inval_type(self):
self.failUnlessRaises(ValueError, ASFValue, "", 4242)
def test_repr(self):
repr(ASFValue(u"foo", UNICODE, stream=1, language=2))
def test_auto_guuid(self):
value = ASFValue(b'\x9eZl}\x89\xa2\xb5D\xb8\xa30\xfe', GUID)
self.set_key(u"WM/WMCollectionGroupID", value, [value])
def test_py3_bytes(self):
value = ASFValue(b'\xff\x00', BYTEARRAY)
self.set_key(u"QL/Something", [b'\xff\x00'], [value])
def test_set_invalid(self):
setitem = self.audio.__setitem__
self.assertRaises(TypeError, setitem, u"QL/Something", [object()])
# don't delete on error
setitem(u"QL/Foobar", [u"ok"])
self.assertRaises(TypeError, setitem, u"QL/Foobar", [object()])
self.assertEqual(self.audio[u"QL/Foobar"], [u"ok"])
def test_auto_unicode(self):
self.set_key(u"WM/AlbumTitle", u"foo",
[ASFValue(u"foo", UNICODE)])
def test_auto_unicode_list(self):
self.set_key(u"WM/AlbumTitle", [u"foo", u"bar"],
[ASFValue(u"foo", UNICODE), ASFValue(u"bar", UNICODE)])
def test_word(self):
self.set_key(u"WM/Track", ASFValue(24, WORD), [ASFValue(24, WORD)])
def test_auto_word(self):
self.set_key(u"WM/Track", 12,
[ASFValue(12, DWORD)])
def test_auto_word_list(self):
self.set_key(u"WM/Track", [12, 13],
[ASFValue(12, WORD), ASFValue(13, WORD)])
def test_auto_dword(self):
self.set_key(u"WM/Track", 12,
[ASFValue(12, DWORD)])
def test_auto_dword_list(self):
self.set_key(u"WM/Track", [12, 13],
[ASFValue(12, DWORD), ASFValue(13, DWORD)])
def test_auto_qword(self):
self.set_key(u"WM/Track", 12,
[ASFValue(12, QWORD)])
def test_auto_qword_list(self):
self.set_key(u"WM/Track", [12, 13],
[ASFValue(12, QWORD), ASFValue(13, QWORD)])
def test_auto_bool(self):
self.set_key(u"IsVBR", True,
[ASFValue(True, BOOL)])
def test_auto_bool_list(self):
self.set_key(u"IsVBR", [True, False],
[ASFValue(True, BOOL), ASFValue(False, BOOL)])
def test_basic_tags(self):
self.set_key("Title", "Wheeee", ["Wheeee"])
self.set_key("Author", "Whoooo", ["Whoooo"])
self.set_key("Copyright", "Whaaaa", ["Whaaaa"])
self.set_key("Description", "Wii", ["Wii"])
self.set_key("Rating", "5", ["5"])
def test_stream(self):
self.audio["QL/OneHasStream"] = [
ASFValue("Whee", UNICODE, stream=2),
ASFValue("Whee", UNICODE),
]
self.audio["QL/AllHaveStream"] = [
ASFValue("Whee", UNICODE, stream=1),
ASFValue("Whee", UNICODE, stream=2),
]
self.audio["QL/NoStream"] = ASFValue("Whee", UNICODE)
self.audio.save()
self.audio = ASF(self.audio.filename)
self.failUnlessEqual(self.audio["QL/NoStream"][0].stream, None)
self.failUnlessEqual(self.audio["QL/OneHasStream"][1].stream, 2)
self.failUnlessEqual(self.audio["QL/OneHasStream"][0].stream, None)
self.failUnlessEqual(self.audio["QL/AllHaveStream"][0].stream, 1)
self.failUnlessEqual(self.audio["QL/AllHaveStream"][1].stream, 2)
def test_language(self):
self.failIf("QL/OneHasLang" in self.audio)
self.failIf("QL/AllHaveLang" in self.audio)
self.audio["QL/OneHasLang"] = [
ASFValue("Whee", UNICODE, language=2),
ASFValue("Whee", UNICODE),
]
self.audio["QL/AllHaveLang"] = [
ASFValue("Whee", UNICODE, language=1),
ASFValue("Whee", UNICODE, language=2),
]
self.audio["QL/NoLang"] = ASFValue("Whee", UNICODE)
self.audio.save()
self.audio = ASF(self.audio.filename)
self.failUnlessEqual(self.audio["QL/NoLang"][0].language, None)
self.failUnlessEqual(self.audio["QL/OneHasLang"][1].language, 2)
self.failUnlessEqual(self.audio["QL/OneHasLang"][0].language, None)
self.failUnlessEqual(self.audio["QL/AllHaveLang"][0].language, 1)
self.failUnlessEqual(self.audio["QL/AllHaveLang"][1].language, 2)
def test_lang_and_stream_mix(self):
self.audio["QL/Mix"] = [
ASFValue("Whee", UNICODE, stream=1),
ASFValue("Whee", UNICODE, language=2),
ASFValue("Whee", UNICODE, stream=3, language=4),
ASFValue("Whee", UNICODE),
]
self.audio.save()
self.audio = ASF(self.audio.filename)
# order not preserved here because they end up in different objects.
self.failUnlessEqual(self.audio["QL/Mix"][1].language, None)
self.failUnlessEqual(self.audio["QL/Mix"][1].stream, 1)
self.failUnlessEqual(self.audio["QL/Mix"][2].language, 2)
self.failUnlessEqual(self.audio["QL/Mix"][2].stream, 0)
self.failUnlessEqual(self.audio["QL/Mix"][3].language, 4)
self.failUnlessEqual(self.audio["QL/Mix"][3].stream, 3)
self.failUnlessEqual(self.audio["QL/Mix"][0].language, None)
self.failUnlessEqual(self.audio["QL/Mix"][0].stream, None)
def test_data_size(self):
v = ASFValue("", UNICODE, data=b'4\xd8\x1e\xdd\x00\x00')
self.failUnlessEqual(v.data_size(), len(v._render()))
class TASFAttributes(TestCase):
def test_ASFUnicodeAttribute(self):
self.assertRaises(TypeError, ASFUnicodeAttribute, b"\xff")
self.assertRaises(ASFError, ASFUnicodeAttribute, data=b"\x00")
self.assertEqual(ASFUnicodeAttribute(u"foo").value, u"foo")
assert ASFUnicodeAttribute(data=b"") == u""
def test_ASFUnicodeAttribute_dunder(self):
attr = ASFUnicodeAttribute(u"foo")
self.assertEqual(bytes(attr), b"f\x00o\x00o\x00")
self.assertEqual(str(attr), u"foo")
self.assertEqual(repr(attr), "ASFUnicodeAttribute('foo')")
self.assertRaises(TypeError, int, attr)
def test_ASFByteArrayAttribute(self):
self.assertRaises(TypeError, ASFByteArrayAttribute, u"foo")
self.assertEqual(ASFByteArrayAttribute(data=b"\xff").value, b"\xff")
def test_ASFByteArrayAttribute_dunder(self):
attr = ASFByteArrayAttribute(data=b"\xff")
self.assertEqual(bytes(attr), b"\xff")
self.assertEqual(str(attr), u"[binary data (1 bytes)]")
self.assertEqual(repr(attr), r"ASFByteArrayAttribute(b'\xff')")
self.assertRaises(TypeError, int, attr)
def test_ASFByteArrayAttribute_compat(self):
ba = ASFByteArrayAttribute()
ba.value = b"\xff"
self.assertEqual(ba._render(), b"\xff")
def test_ASFGUIDAttribute(self):
self.assertEqual(ASFGUIDAttribute(data=b"\xff").value, b"\xff")
self.assertRaises(TypeError, ASFGUIDAttribute, u"foo")
def test_ASFGUIDAttribute_dunder(self):
attr = ASFGUIDAttribute(data=b"\xff")
self.assertEqual(bytes(attr), b"\xff")
self.assertEqual(str(attr), u"b'\\xff'")
self.assertEqual(repr(attr), "ASFGUIDAttribute(b'\\xff')")
self.assertRaises(TypeError, int, attr)
def test_ASFBoolAttribute(self):
self.assertEqual(
ASFBoolAttribute(data=b"\x01\x00\x00\x00").value, True)
self.assertEqual(
ASFBoolAttribute(data=b"\x00\x00\x00\x00").value, False)
self.assertEqual(ASFBoolAttribute(False).value, False)
def test_ASFBoolAttribute_dunder(self):
attr = ASFBoolAttribute(False)
self.assertEqual(bytes(attr), b"False")
self.assertEqual(str(attr), u"False")
self.assertEqual(repr(attr), "ASFBoolAttribute(False)")
self.assertRaises(TypeError, int, attr)
def test_ASFWordAttribute(self):
self.assertEqual(
ASFWordAttribute(data=b"\x00" * 2).value, 0)
self.assertEqual(
ASFWordAttribute(data=b"\xff" * 2).value, 2 ** 16 - 1)
self.assertRaises(ValueError, ASFWordAttribute, -1)
self.assertRaises(ValueError, ASFWordAttribute, 2 ** 16)
def test_ASFWordAttribute_dunder(self):
attr = ASFWordAttribute(data=b"\x00" * 2)
self.assertEqual(bytes(attr), b"0")
self.assertEqual(str(attr), u"0")
self.assertEqual(repr(attr), "ASFWordAttribute(0)")
self.assertEqual(int(attr), 0)
def test_ASFDWordAttribute(self):
self.assertEqual(
ASFDWordAttribute(data=b"\x00" * 4).value, 0)
self.assertEqual(
ASFDWordAttribute(data=b"\xff" * 4).value, 2 ** 32 - 1)
self.assertRaises(ValueError, ASFDWordAttribute, -1)
self.assertRaises(ValueError, ASFDWordAttribute, 2 ** 32)
def test_ASFDWordAttribute_dunder(self):
attr = ASFDWordAttribute(data=b"\x00" * 4)
self.assertEqual(bytes(attr), b"0")
self.assertEqual(str(attr), u"0")
self.assertEqual(repr(attr).replace("0L", "0"), "ASFDWordAttribute(0)")
self.assertEqual(int(attr), 0)
def test_ASFQWordAttribute(self):
self.assertEqual(
ASFQWordAttribute(data=b"\x00" * 8).value, 0)
self.assertEqual(
ASFQWordAttribute(data=b"\xff" * 8).value, 2 ** 64 - 1)
self.assertRaises(ValueError, ASFQWordAttribute, -1)
self.assertRaises(ValueError, ASFQWordAttribute, 2 ** 64)
def test_ASFQWordAttribute_dunder(self):
attr = ASFQWordAttribute(data=b"\x00" * 8)
self.assertEqual(bytes(attr), b"0")
self.assertEqual(str(attr), u"0")
self.assertEqual(repr(attr).replace("0L", "0"), "ASFQWordAttribute(0)")
self.assertEqual(int(attr), 0)
class TASFTags1(TASF, TASFMixin):
original = os.path.join(DATA_DIR, "silence-1.wma")
class TASFTags2(TASF, TASFMixin):
original = os.path.join(DATA_DIR, "silence-2.wma")
class TASFTags3(TASF, TASFMixin):
original = os.path.join(DATA_DIR, "silence-3.wma")
class TASFIssue29(TestCase):
original = os.path.join(DATA_DIR, "issue_29.wma")
def setUp(self):
self.filename = get_temp_copy(self.original)
self.audio = ASF(self.filename)
def tearDown(self):
os.unlink(self.filename)
def test_pprint(self):
self.audio.pprint()
def test_issue_29_description(self):
self.audio["Description"] = "Hello"
self.audio.save()
audio = ASF(self.filename)
self.failUnless("Description" in audio)
self.failUnlessEqual(audio["Description"], ["Hello"])
del(audio["Description"])
self.failIf("Description" in audio)
audio.save()
audio = ASF(self.filename)
self.failIf("Description" in audio)
class TASFObjects(TestCase):
filename = os.path.join(DATA_DIR, "silence-1.wma")
def test_invalid_header(self):
with warnings.catch_warnings():
warnings.simplefilter("ignore")
asf = ASF()
fileobj = BytesIO(
b"0&\xb2u\x8ef\xcf\x11\xa6\xd9\x00\xaa\x00b\xcel\x19\xbf\x01\x00"
b"\x00\x00\x00\x00\x07\x00\x00\x00\x01\x02")
self.assertRaises(
ASFHeaderError, HeaderObject.parse_full, asf, fileobj)
class TASFAttrDest(TestCase):
original = os.path.join(DATA_DIR, "silence-1.wma")
def setUp(self):
self.filename = get_temp_copy(self.original)
audio = ASF(self.filename)
audio.clear()
audio.save()
def tearDown(self):
os.unlink(self.filename)
def test_author(self):
audio = ASF(self.filename)
values = [u"Foo", u"Bar", u"Baz"]
audio["Author"] = values
audio.save()
self.assertEqual(
list(audio.to_content_description.items()), [(u"Author", u"Foo")])
self.assertEqual(
audio.to_metadata_library,
[(u"Author", u"Bar"), (u"Author", u"Baz")])
new = ASF(self.filename)
self.assertEqual(new["Author"], values)
def test_author_long(self):
audio = ASF(self.filename)
# 2 ** 16 - 2 bytes encoded text + 2 bytes termination
just_small_enough = u"a" * (((2 ** 16) // 2) - 2)
audio["Author"] = [just_small_enough]
audio.save()
self.assertTrue(audio.to_content_description)
self.assertFalse(audio.to_metadata_library)
audio["Author"] = [just_small_enough + u"a"]
audio.save()
self.assertFalse(audio.to_content_description)
self.assertTrue(audio.to_metadata_library)
def test_multi_order(self):
audio = ASF(self.filename)
audio["Author"] = [u"a", u"b", u"c"]
audio.save()
audio = ASF(self.filename)
self.assertEqual(audio["Author"], [u"a", u"b", u"c"])
def test_multi_order_extended(self):
audio = ASF(self.filename)
audio["WM/Composer"] = [u"a", u"b", u"c"]
audio.save()
audio = ASF(self.filename)
self.assertEqual(audio["WM/Composer"], [u"a", u"b", u"c"])
def test_non_str(self):
audio = ASF(self.filename)
audio["Author"] = [42]
audio.save()
self.assertFalse(audio.to_content_description)
new = ASF(self.filename)
self.assertEqual(new["Author"], [42])
def test_empty(self):
audio = ASF(self.filename)
audio["Author"] = [u"", u""]
audio["Title"] = [u""]
audio["Copyright"] = []
audio.save()
new = ASF(self.filename)
self.assertEqual(new["Author"], [u"", u""])
self.assertEqual(new["Title"], [u""])
self.assertFalse("Copyright" in new)
class TASFLargeValue(TestCase):
original = os.path.join(DATA_DIR, "silence-1.wma")
def setUp(self):
self.filename = get_temp_copy(self.original)
def tearDown(self):
os.unlink(self.filename)
def test_save_small_bytearray(self):
audio = ASF(self.filename)
audio["QL/LargeObject"] = [ASFValue(b"." * 0xFFFF, BYTEARRAY)]
audio.save()
self.failIf(
"QL/LargeObject" not in audio.to_extended_content_description)
self.failIf("QL/LargeObject" in audio.to_metadata)
self.failIf("QL/LargeObject" in dict(audio.to_metadata_library))
def test_save_large_bytearray(self):
audio = ASF(self.filename)
audio["QL/LargeObject"] = [ASFValue(b"." * (0xFFFF + 1), BYTEARRAY)]
audio.save()
self.failIf("QL/LargeObject" in audio.to_extended_content_description)
self.failIf("QL/LargeObject" in audio.to_metadata)
self.failIf("QL/LargeObject" not in dict(audio.to_metadata_library))
def test_save_small_string(self):
audio = ASF(self.filename)
audio["QL/LargeObject"] = [ASFValue("." * (0x7FFF - 1), UNICODE)]
audio.save()
self.failIf(
"QL/LargeObject" not in audio.to_extended_content_description)
self.failIf("QL/LargeObject" in audio.to_metadata)
self.failIf("QL/LargeObject" in dict(audio.to_metadata_library))
def test_save_large_string(self):
audio = ASF(self.filename)
audio["QL/LargeObject"] = [ASFValue("." * 0x7FFF, UNICODE)]
audio.save()
self.failIf("QL/LargeObject" in audio.to_extended_content_description)
self.failIf("QL/LargeObject" in audio.to_metadata)
self.failIf("QL/LargeObject" not in dict(audio.to_metadata_library))
def test_save_guid(self):
# https://github.com/quodlibet/mutagen/issues/81
audio = ASF(self.filename)
audio["QL/GuidObject"] = [ASFValue(b" " * 16, GUID)]
audio.save()
self.failIf("QL/GuidObject" in audio.to_extended_content_description)
self.failIf("QL/GuidObject" in audio.to_metadata)
self.failIf("QL/GuidObject" not in dict(audio.to_metadata_library))
class TASFSave(TestCase):
# https://github.com/quodlibet/mutagen/issues/81#issuecomment-207014936
original = os.path.join(DATA_DIR, "silence-1.wma")
def setUp(self):
self.filename = get_temp_copy(self.original)
self.audio = ASF(self.filename)
def tearDown(self):
os.unlink(self.filename)
def test_save_filename(self):
self.audio.save(self.audio.filename)
def test_multiple_delete(self):
self.audio["large_value1"] = "#" * 50000
self.audio.save()
audio = ASF(self.filename)
for tag in audio.keys():
del(audio[tag])
audio.save()
def test_readd_objects(self):
header = self.audio._header
del header.objects[:]
self.audio.save()
self.assertTrue(header.get_child(ContentDescriptionObject.GUID))
self.assertTrue(
header.get_child(ExtendedContentDescriptionObject.GUID))
self.assertTrue(header.get_child(HeaderExtensionObject.GUID))
ext = header.get_child(HeaderExtensionObject.GUID)
self.assertTrue(ext.get_child(MetadataObject.GUID))
self.assertTrue(ext.get_child(MetadataLibraryObject.GUID))
def test_keep_others(self):
self.audio.save()
new = ASF(self.filename)
self.assertTrue(new._header.get_child(CodecListObject.GUID))
def test_padding(self):
old_tags = sorted(self.audio.items())
def get_padding(fn):
header = ASF(fn)._header
return len(header.get_child(PaddingObject.GUID).data)
for i in [0, 1, 2, 3, 42, 100, 5000, 30432, 1]:
def padding_cb(info):
self.assertEqual(info.size, 30432)
return i
self.audio.save(padding=padding_cb)
self.assertEqual(get_padding(self.filename), i)
last = ASF(self.filename)
self.assertEqual(sorted(last.items()), old_tags)
| gpl-2.0 |
transientskp/tkp | tkp/utility/containers.py | 3 | 2605 | """
Container classes for the TKP pipeline.
These provide convenient means of marshalling the various types of data --
lightcurves, detections, sources, etc -- that the pipeline must handle.
"""
import logging
logger = logging.getLogger(__name__)
class ObjectContainer(list):
"""A container class for objects.
What sort of objects? Well, anything that has a position and we
want to keep lists of, really. So detections (ie, an individual
source measurement on an image), sources (ie all the detections of
a given object in a given image stack) and lightcurves (ie, all
the sources associated with a given object through time).
You probably don't want to use this on it's own: see ExtractionResults,
TargetList or source for more useful derived classes.
"""
def closest_to(self, pix_x, pix_y):
distance, target = False, False
logger.debug("Beginning a search for objects near %.1f, %.1f: ",
pix_x, pix_y)
logger.debug("%s contains %d objects", str(self), len(self))
for obj in self:
tmpdist = (pix_x - obj.x)**2 + (pix_y - obj.y)**2
logger.debug("Object at %f, %f", obj.x, obj.y)
logger.debug("Has distance %f", tmpdist)
if not distance:
distance = tmpdist
target = obj
else:
if tmpdist < distance:
target = obj
distance = tmpdist
logger.debug("Best distance is now %f", distance)
logger.debug("From object %s", str(target))
if not distance:
return (target, distance)
else:
return (target, distance**0.5)
def __setslice__(self, section, items):
"""
Not implemented.
"""
raise NotImplementedError
def __iadd__(self, y):
"""
Not implemented.
"""
raise NotImplementedError
def __imul__(self, y):
"""
Not implemented.
"""
raise NotImplementedError
def __mul__(self, y):
"""
Not implemented.
"""
raise NotImplementedError
def __rmul__(self, y):
"""
Not implemented.
"""
raise NotImplementedError
def __str__(self):
return 'Container: ' + str(len(self)) + ' object(s).'
class ExtractionResults(ObjectContainer):
"""Container for the results of running source extraction on an
ImageData object"""
def __str__(self):
return 'ExtractionResults: ' + str(len(self)) + ' detection(s).'
| bsd-2-clause |
shail2810/nova | nova/tests/unit/api/openstack/compute/test_simple_tenant_usage.py | 17 | 13935 | # Copyright 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import mock
from oslo_utils import timeutils
from six.moves import range
import webob
from nova.api.openstack.compute.legacy_v2.contrib import simple_tenant_usage as \
simple_tenant_usage_v2
from nova.api.openstack.compute import simple_tenant_usage as \
simple_tenant_usage_v21
from nova.compute import vm_states
from nova import context
from nova import db
from nova import exception
from nova import objects
from nova.openstack.common import policy as common_policy
from nova import policy
from nova import test
from nova.tests.unit.api.openstack import fakes
from nova.tests.unit import fake_flavor
SERVERS = 5
TENANTS = 2
HOURS = 24
ROOT_GB = 10
EPHEMERAL_GB = 20
MEMORY_MB = 1024
VCPUS = 2
NOW = timeutils.utcnow()
START = NOW - datetime.timedelta(hours=HOURS)
STOP = NOW
FAKE_INST_TYPE = {'id': 1,
'vcpus': VCPUS,
'root_gb': ROOT_GB,
'ephemeral_gb': EPHEMERAL_GB,
'memory_mb': MEMORY_MB,
'name': 'fakeflavor',
'flavorid': 'foo',
'rxtx_factor': 1.0,
'vcpu_weight': 1,
'swap': 0,
'created_at': None,
'updated_at': None,
'deleted_at': None,
'deleted': 0,
'disabled': False,
'is_public': True,
'extra_specs': {'foo': 'bar'}}
def get_fake_db_instance(start, end, instance_id, tenant_id,
vm_state=vm_states.ACTIVE):
inst = fakes.stub_instance(
id=instance_id,
uuid='00000000-0000-0000-0000-00000000000000%02d' % instance_id,
image_ref='1',
project_id=tenant_id,
user_id='fakeuser',
display_name='name',
flavor_id=FAKE_INST_TYPE['id'],
launched_at=start,
terminated_at=end,
vm_state=vm_state,
memory_mb=MEMORY_MB,
vcpus=VCPUS,
root_gb=ROOT_GB,
ephemeral_gb=EPHEMERAL_GB,)
return inst
def fake_instance_get_active_by_window_joined(context, begin, end,
project_id, host, columns_to_join):
return [get_fake_db_instance(START,
STOP,
x,
project_id if project_id else
"faketenant_%s" % (x / SERVERS))
for x in range(TENANTS * SERVERS)]
@mock.patch.object(db, 'instance_get_active_by_window_joined',
fake_instance_get_active_by_window_joined)
class SimpleTenantUsageTestV21(test.TestCase):
policy_rule_prefix = "os_compute_api:os-simple-tenant-usage"
controller = simple_tenant_usage_v21.SimpleTenantUsageController()
def setUp(self):
super(SimpleTenantUsageTestV21, self).setUp()
self.admin_context = context.RequestContext('fakeadmin_0',
'faketenant_0',
is_admin=True)
self.user_context = context.RequestContext('fakeadmin_0',
'faketenant_0',
is_admin=False)
self.alt_user_context = context.RequestContext('fakeadmin_0',
'faketenant_1',
is_admin=False)
def _test_verify_index(self, start, stop):
req = fakes.HTTPRequest.blank('?start=%s&end=%s' %
(start.isoformat(), stop.isoformat()))
req.environ['nova.context'] = self.admin_context
res_dict = self.controller.index(req)
usages = res_dict['tenant_usages']
for i in range(TENANTS):
self.assertEqual(int(usages[i]['total_hours']),
SERVERS * HOURS)
self.assertEqual(int(usages[i]['total_local_gb_usage']),
SERVERS * (ROOT_GB + EPHEMERAL_GB) * HOURS)
self.assertEqual(int(usages[i]['total_memory_mb_usage']),
SERVERS * MEMORY_MB * HOURS)
self.assertEqual(int(usages[i]['total_vcpus_usage']),
SERVERS * VCPUS * HOURS)
self.assertFalse(usages[i].get('server_usages'))
def test_verify_index(self):
self._test_verify_index(START, STOP)
def test_verify_index_future_end_time(self):
future = NOW + datetime.timedelta(hours=HOURS)
self._test_verify_index(START, future)
def test_verify_show(self):
self._test_verify_show(START, STOP)
def test_verify_show_future_end_time(self):
future = NOW + datetime.timedelta(hours=HOURS)
self._test_verify_show(START, future)
def _get_tenant_usages(self, detailed=''):
req = fakes.HTTPRequest.blank('?detailed=%s&start=%s&end=%s' %
(detailed, START.isoformat(), STOP.isoformat()))
req.environ['nova.context'] = self.admin_context
# Make sure that get_active_by_window_joined is only called with
# expected_attrs=['flavor'].
orig_get_active_by_window_joined = (
objects.InstanceList.get_active_by_window_joined)
def fake_get_active_by_window_joined(context, begin, end=None,
project_id=None, host=None,
expected_attrs=None,
use_slave=False):
self.assertEqual(['flavor'], expected_attrs)
return orig_get_active_by_window_joined(context, begin, end,
project_id, host,
expected_attrs, use_slave)
with mock.patch.object(objects.InstanceList,
'get_active_by_window_joined',
side_effect=fake_get_active_by_window_joined):
res_dict = self.controller.index(req)
return res_dict['tenant_usages']
def test_verify_detailed_index(self):
usages = self._get_tenant_usages('1')
for i in range(TENANTS):
servers = usages[i]['server_usages']
for j in range(SERVERS):
self.assertEqual(int(servers[j]['hours']), HOURS)
def test_verify_simple_index(self):
usages = self._get_tenant_usages(detailed='0')
for i in range(TENANTS):
self.assertIsNone(usages[i].get('server_usages'))
def test_verify_simple_index_empty_param(self):
# NOTE(lzyeval): 'detailed=&start=..&end=..'
usages = self._get_tenant_usages()
for i in range(TENANTS):
self.assertIsNone(usages[i].get('server_usages'))
def _test_verify_show(self, start, stop):
tenant_id = 1
req = fakes.HTTPRequest.blank('?start=%s&end=%s' %
(start.isoformat(), stop.isoformat()))
req.environ['nova.context'] = self.user_context
res_dict = self.controller.show(req, tenant_id)
usage = res_dict['tenant_usage']
servers = usage['server_usages']
self.assertEqual(len(usage['server_usages']), TENANTS * SERVERS)
uuids = ['00000000-0000-0000-0000-00000000000000%02d' %
x for x in range(SERVERS)]
for j in range(SERVERS):
delta = STOP - START
uptime = delta.days * 24 * 3600 + delta.seconds
self.assertEqual(int(servers[j]['uptime']), uptime)
self.assertEqual(int(servers[j]['hours']), HOURS)
self.assertIn(servers[j]['instance_id'], uuids)
def test_verify_show_cannot_view_other_tenant(self):
req = fakes.HTTPRequest.blank('?start=%s&end=%s' %
(START.isoformat(), STOP.isoformat()))
req.environ['nova.context'] = self.alt_user_context
rules = {
self.policy_rule_prefix + ":show":
common_policy.parse_rule([
["role:admin"], ["project_id:%(project_id)s"]
])
}
policy.set_rules(rules)
try:
self.assertRaises(exception.PolicyNotAuthorized,
self.controller.show, req, 'faketenant_0')
finally:
policy.reset()
def test_get_tenants_usage_with_bad_start_date(self):
future = NOW + datetime.timedelta(hours=HOURS)
req = fakes.HTTPRequest.blank('?start=%s&end=%s' %
(future.isoformat(), NOW.isoformat()))
req.environ['nova.context'] = self.user_context
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.show, req, 'faketenant_0')
def test_get_tenants_usage_with_invalid_start_date(self):
req = fakes.HTTPRequest.blank('?start=%s&end=%s' %
("xxxx", NOW.isoformat()))
req.environ['nova.context'] = self.user_context
self.assertRaises(webob.exc.HTTPBadRequest,
self.controller.show, req, 'faketenant_0')
def _test_get_tenants_usage_with_one_date(self, date_url_param):
req = fakes.HTTPRequest.blank('?%s' % date_url_param)
req.environ['nova.context'] = self.user_context
res = self.controller.show(req, 'faketenant_0')
self.assertIn('tenant_usage', res)
def test_get_tenants_usage_with_no_start_date(self):
self._test_get_tenants_usage_with_one_date(
'end=%s' % (NOW + datetime.timedelta(5)).isoformat())
def test_get_tenants_usage_with_no_end_date(self):
self._test_get_tenants_usage_with_one_date(
'start=%s' % (NOW - datetime.timedelta(5)).isoformat())
class SimpleTenantUsageTestV2(SimpleTenantUsageTestV21):
policy_rule_prefix = "compute_extension:simple_tenant_usage"
controller = simple_tenant_usage_v2.SimpleTenantUsageController()
class SimpleTenantUsageControllerTestV21(test.TestCase):
controller = simple_tenant_usage_v21.SimpleTenantUsageController()
def setUp(self):
super(SimpleTenantUsageControllerTestV21, self).setUp()
self.context = context.RequestContext('fakeuser', 'fake-project')
self.baseinst = get_fake_db_instance(START, STOP, instance_id=1,
tenant_id=self.context.project_id,
vm_state=vm_states.DELETED)
# convert the fake instance dict to an object
flavor = fake_flavor.fake_flavor_obj(self.context, **FAKE_INST_TYPE)
self.inst_obj = objects.Instance._from_db_object(
self.context, objects.Instance(), self.baseinst)
self.inst_obj.flavor = flavor
@mock.patch('nova.objects.Instance.get_flavor',
side_effect=exception.NotFound())
def test_get_flavor_from_non_deleted_with_id_fails(self, fake_get_flavor):
# If an instance is not deleted and missing type information from
# instance.flavor, then that's a bug
self.assertRaises(exception.NotFound,
self.controller._get_flavor, self.context,
self.inst_obj, {})
@mock.patch('nova.objects.Instance.get_flavor',
side_effect=exception.NotFound())
def test_get_flavor_from_deleted_with_notfound(self, fake_get_flavor):
# If the flavor is not found from the instance and the instance is
# deleted, attempt to look it up from the DB and if found we're OK.
self.inst_obj.deleted = 1
flavor = self.controller._get_flavor(self.context, self.inst_obj, {})
self.assertEqual(objects.Flavor, type(flavor))
self.assertEqual(FAKE_INST_TYPE['id'], flavor.id)
@mock.patch('nova.objects.Instance.get_flavor',
side_effect=exception.NotFound())
def test_get_flavor_from_deleted_with_id_of_deleted(self, fake_get_flavor):
# Verify the legacy behavior of instance_type_id pointing to a
# missing type being non-fatal
self.inst_obj.deleted = 1
self.inst_obj.instance_type_id = 99
flavor = self.controller._get_flavor(self.context, self.inst_obj, {})
self.assertIsNone(flavor)
class SimpleTenantUsageControllerTestV2(SimpleTenantUsageControllerTestV21):
controller = simple_tenant_usage_v2.SimpleTenantUsageController()
class SimpleTenantUsageUtilsV21(test.NoDBTestCase):
simple_tenant_usage = simple_tenant_usage_v21
def test_valid_string(self):
dt = self.simple_tenant_usage.parse_strtime(
"2014-02-21T13:47:20.824060", "%Y-%m-%dT%H:%M:%S.%f")
self.assertEqual(datetime.datetime(
microsecond=824060, second=20, minute=47, hour=13,
day=21, month=2, year=2014), dt)
def test_invalid_string(self):
self.assertRaises(exception.InvalidStrTime,
self.simple_tenant_usage.parse_strtime,
"2014-02-21 13:47:20.824060",
"%Y-%m-%dT%H:%M:%S.%f")
class SimpleTenantUsageUtilsV2(SimpleTenantUsageUtilsV21):
simple_tenant_usage = simple_tenant_usage_v2
| apache-2.0 |
jmacmahon/invenio | modules/bibfield/lib/functions/is_type_num.py | 3 | 1214 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2004, 2005, 2006, 2007, 2008, 2010, 2011 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
def is_type_num(field):
if isinstance(field, list):
for value in field:
if not is_type_num(value):
return False
elif isinstance(field, dict):
for value in field.itervalues():
if not is_type_num(value):
return False
elif field:
try:
int(field)
except:
return False
return True
| gpl-2.0 |
jeanlinux/calibre | src/calibre/ebooks/markdown/extensions/sane_lists.py | 46 | 1327 | """
Sane List Extension for Python-Markdown
=======================================
Modify the behavior of Lists in Python-Markdown t act in a sane manor.
In standard Markdown sytex, the following would constitute a single
ordered list. However, with this extension, the output would include
two lists, the first an ordered list and the second and unordered list.
1. ordered
2. list
* unordered
* list
Copyright 2011 - [Waylan Limberg](http://achinghead.com)
"""
from __future__ import absolute_import
from __future__ import unicode_literals
from . import Extension
from ..blockprocessors import OListProcessor, UListProcessor
import re
class SaneOListProcessor(OListProcessor):
CHILD_RE = re.compile(r'^[ ]{0,3}((\d+\.))[ ]+(.*)')
SIBLING_TAGS = ['ol']
class SaneUListProcessor(UListProcessor):
CHILD_RE = re.compile(r'^[ ]{0,3}(([*+-]))[ ]+(.*)')
SIBLING_TAGS = ['ul']
class SaneListExtension(Extension):
""" Add sane lists to Markdown. """
def extendMarkdown(self, md, md_globals):
""" Override existing Processors. """
md.parser.blockprocessors['olist'] = SaneOListProcessor(md.parser)
md.parser.blockprocessors['ulist'] = SaneUListProcessor(md.parser)
def makeExtension(configs={}):
return SaneListExtension(configs=configs)
| gpl-3.0 |
igorlimasan/pythonbirds | diagramas/testes/atores_testes.py | 10 | 47143 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from os import path
import sys
project_dir = path.dirname(__file__)
project_dir = path.join('..')
sys.path.append(project_dir)
import unittest
from unittest.case import TestCase
from atores import Ator, DESTRUIDO, ATIVO, Obstaculo, Porco, PassaroAmarelo, PassaroVermelho
class AtorTestes(TestCase):
def teste_valores_padrao(self):
'Testa valores iniciais padrão de um Ator'
ator = Ator()
self.assertEqual(0, ator.x)
self.assertEqual(0, ator.y)
self.assertEqual(ATIVO, ator.status)
self.assertEqual('A', ator.caracter())
def teste_valores_passados_por_parametro(self):
'Testa se valores passados no inicializador são armazenados no objeto'
ator = Ator(1, 2)
self.assertEqual(1, ator.x)
self.assertEqual(2, ator.y)
self.assertEqual(ATIVO, ator.status)
self.assertEqual('A', ator.caracter())
def teste_ator_posicao(self):
'Teste que verifica que o ator comum não deve se mover independente do tempo do jogo'
ator = Ator()
x, y = ator.calcular_posicao(0)
self.assertEqual(0, x)
self.assertEqual(0, y)
ator = Ator(0.3, 0.5)
x, y = ator.calcular_posicao(10)
self.assertEqual(0.3, x)
self.assertEqual(0.5, y)
def teste_colisao_entre_atores_ativos(self):
"""
Teste de colisão entre dois atores
Inicialmente atores possuem status ATIVO. Ao se chocarem, ele muda para DESTRUIDO
A função assert_colisao_atores_ativos testa justamente se dois atore ativos se chocam quando estão em posições
vizinhas.
"""
ator = Ator(2, 2) # Ator recém criado deve ter status ativo
ator2 = Ator(2, 2)
self.assert_colisao_atores_ativos(ator, ator2)
self.assert_colisao_atores_ativos(Ator(2, 2), Ator(2, 3))
self.assert_colisao_atores_ativos(Ator(2, 2), Ator(3, 3))
self.assert_colisao_atores_ativos(Ator(2, 2), Ator(3, 2))
self.assert_colisao_atores_ativos(Ator(2, 2), Ator(3, 1))
self.assert_colisao_atores_ativos(Ator(2, 2), Ator(2, 1))
self.assert_colisao_atores_ativos(Ator(2, 2), Ator(1, 1))
self.assert_colisao_atores_ativos(Ator(2, 2), Ator(1, 2))
self.assert_colisao_atores_ativos(Ator(2, 2), Ator(1, 3))
def teste_colisao_entre_atores_ativos_com_intervalo(self):
# Com intervalo 2, diferente do padrão 1, essa colisão deveria acontecer
self.assert_colisao_atores_ativos(Ator(2, 2), Ator(2, 4), 2)
def teste_nao_colisao_entre_atores_distantes(self):
'Teste de que não há colisão entre atores distantes'
self.assert_nao_colisao(Ator(2, 2), Ator(2, 4))
self.assert_nao_colisao(Ator(2, 2), Ator(3, 4))
self.assert_nao_colisao(Ator(2, 2), Ator(4, 2))
self.assert_nao_colisao(Ator(2, 2), Ator(3, 0))
self.assert_nao_colisao(Ator(2, 2), Ator(2, 0))
self.assert_nao_colisao(Ator(2, 2), Ator(0, 1))
self.assert_nao_colisao(Ator(2, 2), Ator(0, 2))
self.assert_nao_colisao(Ator(2, 2), Ator(0, 4))
def teste_colisao_somente_um_ator_destruido(self):
'Teste de que um ator destruído não pode colidir com nenhum outro, mesmo que estejam próximos'
ator = Ator(2, 2)
ator.colidir(ator, 0) # colidindo ator com ele mesmo para alterar seu status para destruido
ator2 = Ator(2, 2)
self.assert_nao_colisao(ator, ator2)
self.assert_nao_colisao(Ator(2, 3), ator)
self.assert_nao_colisao(Ator(3, 3), ator)
self.assert_nao_colisao(Ator(3, 2), ator)
self.assert_nao_colisao(Ator(3, 1), ator)
self.assert_nao_colisao(Ator(2, 1), ator)
self.assert_nao_colisao(Ator(1, 1), ator)
self.assert_nao_colisao(Ator(1, 2), ator)
self.assert_nao_colisao(Ator(1, 3), ator)
self.assert_nao_colisao(ator2, ator)
self.assert_nao_colisao(Ator(2, 3), ator)
self.assert_nao_colisao(Ator(3, 3), ator)
self.assert_nao_colisao(Ator(3, 2), ator)
self.assert_nao_colisao(Ator(3, 1), ator)
self.assert_nao_colisao(Ator(2, 1), ator)
self.assert_nao_colisao(Ator(1, 1), ator)
self.assert_nao_colisao(Ator(1, 2), ator)
self.assert_nao_colisao(Ator(1, 3), ator)
def test_caracter(self):
'Teste de caracter para status ATIVO e DESTRUIDO'
ator = Ator()
self.assertEqual('A', ator.caracter())
outro_ator_na_mesma_posicao = Ator()
ator.colidir(outro_ator_na_mesma_posicao)
self.assertEqual(' ', ator.caracter())
def assert_colisao_atores_ativos(self, ator, ator2, intervalo=1):
"""
Se certifica que há colisão entre atores ativos
Atenção: Esse não é método de teste porque nao se inicia com prefixo "text".
Ele serve apenas para encapsular toda lógica de teste de colisão entre dois atores ativos
"""
# Conferindo status dos dois atores antes da colisão
self.assertEqual(ator.status, ATIVO, 'Status deveria ser ativo antes da colisão')
self.assertEqual(ator2.status, ATIVO, 'Status deveria ser ativo antes da colisão')
ator.colidir(ator2, intervalo)
# Conferindo status dos dois atores depois da colisão
self.assertEqual(ator2.status, DESTRUIDO, 'Status deveria ser destruido depois da colisão')
self.assertEqual(ator.status, DESTRUIDO, 'Status deveria ser destruido depois da colisão')
def assert_nao_colisao(self, ator, ator2):
"""
Se certifica que não colisão entre dois atores
Atenção: Esse não é método de teste porque nao se inicia com prefixo "text".
Ele apenas encapsula a lógica de não colisão entre dois atores.
So seja, eles deve manter seus respectivos status mesmo depois da chamada do metodo colidir
"""
# Armazenando status antes da colisão
status_inicial_ator = ator.status
status_inicial_ator_2 = ator2.status
ator.colidir(ator2)
# Conferindo se status ficaram inalterados
self.assertEqual(status_inicial_ator, ator.status, 'Status de ator não deveria mudar')
self.assertEqual(status_inicial_ator_2, ator2.status, 'Status de ator2 não deveria mudar')
class ObstaculoTestes(TestCase):
"""
Esperado '0' como caracter de obstáculo ativo e ' ' como caracter de obstáculo destruído
"""
def teste_status(self):
obstaculo = Obstaculo()
self.assertEqual('O', obstaculo.caracter())
outro_ator_na_mesma_posicao = Ator()
obstaculo.colidir(outro_ator_na_mesma_posicao)
self.assertEqual(' ', obstaculo.caracter())
class PorcoTestes(TestCase):
"""
Esperado '@' como caracter de porco ativo e '+' como caracter de porco destruido
"""
def teste_status(self):
porco = Porco()
self.assertEqual('@', porco.caracter())
outro_ator_na_mesma_posicao = Ator()
porco.colidir(outro_ator_na_mesma_posicao)
self.assertEqual('+', porco.caracter())
class PassaroBaseTests(TestCase):
"""
Classe base para teste de passaros.
Essa classe não contèm nenhum teste, serve apenas para encapsular a lógica de asserção de posição de passaros
vermelhos e também dos amarelos.
"""
def assert_passaro_posicao(self, x_esperado, y_esperado, status_esperado, passaro, tempo):
"""
Método que se testa posição do pássaro.
Atenção: Esse não é um método de teste porque não se inicia com prefixo "test".
:param x_esperado: posição x esperada do passaro
:param y_esperado: posição y esperada do passaro
:param status_esperado: status esperado do passaro
:param passaro: passaro alvo do teste
:param tempo: tempo do jogo
"""
x_calculado, y_calculado = passaro.calcular_posicao(tempo)
self.assertEqual(x_esperado, round(x_calculado), 'valor real de x = %s' % x_calculado)
self.assertEqual(y_esperado, round(y_calculado), 'valor real de y = %s' % y_calculado)
self.assertEqual(status_esperado, passaro.status, '(x = %s, y = %s)' % (x_calculado, y_calculado))
class PassaroVermelhoTests(PassaroBaseTests):
"""
Classe de teste e Passaro Vermelho
"""
def teste_status(self):
passaro_vermelho = PassaroVermelho(1, 1)
self.assertEqual('V', passaro_vermelho.caracter())
outro_ator_na_mesma_posicao = Ator()
passaro_vermelho.colidir(outro_ator_na_mesma_posicao)
self.assertEqual('v', passaro_vermelho.caracter())
def teste_velocidade_escalar(self):
self.assertEqual(20, PassaroVermelho.velocidade_escalar)
def teste_foi_lancado(self):
"""
Teste de lançamento. Enquanto o método lançar do passaro não for chamado, o méotodo foi_lancado deve retornar
Falso
:return:
"""
passaro_vermelho = PassaroVermelho(1, 1)
self.assertFalse(passaro_vermelho.foi_lancado(),
'Se o método lançar ainda não foi executado, deve retornar falso')
passaro_vermelho.lancar(0, 0)
self.assertTrue(passaro_vermelho.foi_lancado(),
'Se o método lançar foi executado, deve retornar verdadeiro')
def teste_colisao_com_chao(self):
"""
Testando que o passáro colide quando sua posição y é menor ou igual a 0
:return:
"""
passaro = PassaroVermelho(0, 0)
passaro.colidir_com_chao()
self.assertEqual(DESTRUIDO, passaro.status, 'Deve colidir com chão sempre que y<=0')
passaro = PassaroVermelho(1, 0)
passaro.colidir_com_chao()
self.assertEqual(DESTRUIDO, passaro.status, 'Deve colidir com chão sempre que y<=0')
passaro = PassaroVermelho(2, 0)
passaro.colidir_com_chao()
self.assertEqual(DESTRUIDO, passaro.status, 'Deve colidir com chão sempre que y<=0')
passaro = PassaroVermelho(2, -0.1)
passaro.colidir_com_chao()
self.assertEqual(DESTRUIDO, passaro.status, 'Deve colidir com chão sempre que y<=0')
passaro = PassaroVermelho(2, -5)
passaro.colidir_com_chao()
self.assertEqual(DESTRUIDO, passaro.status, 'Deve colidir com chão sempre que y<=0')
class PassaroAmareloTests(PassaroBaseTests):
"""
Classe de Tests para passaros amarelos
"""
def teste_status(self):
passaro_amarelo = PassaroAmarelo(1, 1)
self.assertEqual('A', passaro_amarelo.caracter())
outro_ator_na_mesma_posicao = Ator()
passaro_amarelo.colidir(outro_ator_na_mesma_posicao)
self.assertEqual('a', passaro_amarelo.caracter())
def teste_velocidade_escalar(self):
self.assertEqual(30, PassaroAmarelo.velocidade_escalar)
def teste_lacamento_vertical(self):
"""
Tests de lançamento vertical. Nele, o passaro só se move verticalmente e sua posição y se matém contanstante
:return:
"""
passaro_amarelo = PassaroAmarelo(1, 1)
passaro_amarelo.lancar(90, 2) # passaro lancado a 90 graus no tempo 2 segundos
# subindo
self.assert_posicao_vertical(1, 2.0, passaro_amarelo)
self.assert_posicao_vertical(1, 2.01, passaro_amarelo)
self.assert_posicao_vertical(2, 2.02, passaro_amarelo)
self.assert_posicao_vertical(2, 2.03, passaro_amarelo)
self.assert_posicao_vertical(2, 2.04, passaro_amarelo)
self.assert_posicao_vertical(2, 2.05, passaro_amarelo)
# descendo
self.assert_posicao_vertical(46, 5.26, passaro_amarelo)
self.assert_posicao_vertical(46, 5.27, passaro_amarelo)
self.assert_posicao_vertical(46, 5.279999999999999, passaro_amarelo)
self.assert_posicao_vertical(46, 5.29, passaro_amarelo)
self.assert_posicao_vertical(46, 5.3, passaro_amarelo)
self.assert_posicao_vertical(46, 5.3100000000000005, passaro_amarelo)
self.assert_posicao_vertical(45, 5.32, passaro_amarelo)
self.assert_posicao_vertical(45, 5.33, passaro_amarelo)
self.assert_posicao_vertical(45, 5.34, passaro_amarelo)
self.assert_posicao_vertical(45, 5.35, passaro_amarelo)
self.assert_posicao_vertical(45, 5.359999999999999, passaro_amarelo)
self.assert_posicao_vertical(45, 5.37, passaro_amarelo)
self.assert_posicao_vertical(45, 5.38, passaro_amarelo)
self.assert_posicao_vertical(45, 5.390000000000001, passaro_amarelo)
self.assert_posicao_vertical(45, 5.4, passaro_amarelo)
self.assert_posicao_vertical(45, 5.41, passaro_amarelo)
self.assert_posicao_vertical(45, 5.42, passaro_amarelo)
self.assert_posicao_vertical(45, 5.43, passaro_amarelo)
self.assert_posicao_vertical(45, 5.4399999999999995, passaro_amarelo)
self.assert_posicao_vertical(45, 5.45, passaro_amarelo)
self.assert_posicao_vertical(45, 5.46, passaro_amarelo)
self.assert_posicao_vertical(45, 5.470000000000001, passaro_amarelo)
self.assert_posicao_vertical(45, 5.48, passaro_amarelo)
# preparando para impacto no chão
self.assert_posicao_vertical(1, 8.0, passaro_amarelo)
self.assert_posicao_vertical(1, 8.01, passaro_amarelo)
# colisão
self.assert_posicao_vertical(0, 8.04, passaro_amarelo)
passaro_amarelo.colidir_com_chao()
self.assertEqual(DESTRUIDO, passaro_amarelo.status)
def test_lancamento_45_graus(self):
passaro_amarelo = PassaroAmarelo(1, 1)
passaro_amarelo.lancar(45, 2) # passaro lancado a 45 graus no tempo 2 segundos
self.assert_passaro_posicao(1, 1, ATIVO, passaro_amarelo, 2.0)
self.assert_passaro_posicao(1, 1, ATIVO, passaro_amarelo, 2.01)
self.assert_passaro_posicao(1, 1, ATIVO, passaro_amarelo, 2.02)
self.assert_passaro_posicao(2, 2, ATIVO, passaro_amarelo, 2.03)
self.assert_passaro_posicao(2, 2, ATIVO, passaro_amarelo, 2.04)
self.assert_passaro_posicao(2, 2, ATIVO, passaro_amarelo, 2.05)
self.assert_passaro_posicao(2, 2, ATIVO, passaro_amarelo, 2.06)
self.assert_passaro_posicao(2, 2, ATIVO, passaro_amarelo, 2.07)
self.assert_passaro_posicao(3, 3, ATIVO, passaro_amarelo, 2.08)
self.assert_passaro_posicao(3, 3, ATIVO, passaro_amarelo, 2.09)
self.assert_passaro_posicao(3, 3, ATIVO, passaro_amarelo, 2.1)
self.assert_passaro_posicao(3, 3, ATIVO, passaro_amarelo, 2.11)
self.assert_passaro_posicao(4, 3, ATIVO, passaro_amarelo, 2.12)
self.assert_passaro_posicao(4, 4, ATIVO, passaro_amarelo, 2.13)
self.assert_passaro_posicao(4, 4, ATIVO, passaro_amarelo, 2.14)
self.assert_passaro_posicao(4, 4, ATIVO, passaro_amarelo, 2.15)
self.assert_passaro_posicao(4, 4, ATIVO, passaro_amarelo, 2.16)
self.assert_passaro_posicao(5, 4, ATIVO, passaro_amarelo, 2.17)
self.assert_passaro_posicao(5, 5, ATIVO, passaro_amarelo, 2.18)
self.assert_passaro_posicao(5, 5, ATIVO, passaro_amarelo, 2.19)
self.assert_passaro_posicao(5, 5, ATIVO, passaro_amarelo, 2.2)
self.assert_passaro_posicao(5, 5, ATIVO, passaro_amarelo, 2.21)
self.assert_passaro_posicao(6, 5, ATIVO, passaro_amarelo, 2.22)
self.assert_passaro_posicao(6, 6, ATIVO, passaro_amarelo, 2.23)
self.assert_passaro_posicao(6, 6, ATIVO, passaro_amarelo, 2.24)
self.assert_passaro_posicao(6, 6, ATIVO, passaro_amarelo, 2.25)
self.assert_passaro_posicao(7, 6, ATIVO, passaro_amarelo, 2.26)
self.assert_passaro_posicao(7, 6, ATIVO, passaro_amarelo, 2.27)
self.assert_passaro_posicao(7, 7, ATIVO, passaro_amarelo, 2.2800000000000002)
self.assert_passaro_posicao(7, 7, ATIVO, passaro_amarelo, 2.29)
self.assert_passaro_posicao(7, 7, ATIVO, passaro_amarelo, 2.3)
self.assert_passaro_posicao(8, 7, ATIVO, passaro_amarelo, 2.31)
self.assert_passaro_posicao(8, 7, ATIVO, passaro_amarelo, 2.32)
self.assert_passaro_posicao(8, 7, ATIVO, passaro_amarelo, 2.33)
self.assert_passaro_posicao(8, 8, ATIVO, passaro_amarelo, 2.34)
self.assert_passaro_posicao(8, 8, ATIVO, passaro_amarelo, 2.35)
self.assert_passaro_posicao(9, 8, ATIVO, passaro_amarelo, 2.36)
self.assert_passaro_posicao(9, 8, ATIVO, passaro_amarelo, 2.37)
self.assert_passaro_posicao(9, 8, ATIVO, passaro_amarelo, 2.38)
self.assert_passaro_posicao(9, 9, ATIVO, passaro_amarelo, 2.39)
self.assert_passaro_posicao(9, 9, ATIVO, passaro_amarelo, 2.4)
self.assert_passaro_posicao(10, 9, ATIVO, passaro_amarelo, 2.41)
self.assert_passaro_posicao(10, 9, ATIVO, passaro_amarelo, 2.42)
self.assert_passaro_posicao(10, 9, ATIVO, passaro_amarelo, 2.43)
self.assert_passaro_posicao(10, 9, ATIVO, passaro_amarelo, 2.44)
self.assert_passaro_posicao(11, 10, ATIVO, passaro_amarelo, 2.45)
self.assert_passaro_posicao(11, 10, ATIVO, passaro_amarelo, 2.46)
self.assert_passaro_posicao(11, 10, ATIVO, passaro_amarelo, 2.4699999999999998)
self.assert_passaro_posicao(11, 10, ATIVO, passaro_amarelo, 2.48)
self.assert_passaro_posicao(11, 10, ATIVO, passaro_amarelo, 2.49)
self.assert_passaro_posicao(12, 10, ATIVO, passaro_amarelo, 2.5)
self.assert_passaro_posicao(12, 11, ATIVO, passaro_amarelo, 2.51)
self.assert_passaro_posicao(12, 11, ATIVO, passaro_amarelo, 2.52)
self.assert_passaro_posicao(12, 11, ATIVO, passaro_amarelo, 2.5300000000000002)
self.assert_passaro_posicao(12, 11, ATIVO, passaro_amarelo, 2.54)
self.assert_passaro_posicao(13, 11, ATIVO, passaro_amarelo, 2.55)
self.assert_passaro_posicao(13, 11, ATIVO, passaro_amarelo, 2.56)
self.assert_passaro_posicao(13, 11, ATIVO, passaro_amarelo, 2.57)
self.assert_passaro_posicao(13, 12, ATIVO, passaro_amarelo, 2.58)
self.assert_passaro_posicao(14, 12, ATIVO, passaro_amarelo, 2.59)
self.assert_passaro_posicao(14, 12, ATIVO, passaro_amarelo, 2.6)
self.assert_passaro_posicao(14, 12, ATIVO, passaro_amarelo, 2.61)
self.assert_passaro_posicao(14, 12, ATIVO, passaro_amarelo, 2.62)
self.assert_passaro_posicao(14, 12, ATIVO, passaro_amarelo, 2.63)
self.assert_passaro_posicao(15, 13, ATIVO, passaro_amarelo, 2.64)
self.assert_passaro_posicao(15, 13, ATIVO, passaro_amarelo, 2.65)
self.assert_passaro_posicao(15, 13, ATIVO, passaro_amarelo, 2.66)
self.assert_passaro_posicao(15, 13, ATIVO, passaro_amarelo, 2.67)
self.assert_passaro_posicao(15, 13, ATIVO, passaro_amarelo, 2.68)
self.assert_passaro_posicao(16, 13, ATIVO, passaro_amarelo, 2.69)
self.assert_passaro_posicao(16, 13, ATIVO, passaro_amarelo, 2.7)
self.assert_passaro_posicao(16, 14, ATIVO, passaro_amarelo, 2.71)
self.assert_passaro_posicao(16, 14, ATIVO, passaro_amarelo, 2.7199999999999998)
self.assert_passaro_posicao(16, 14, ATIVO, passaro_amarelo, 2.73)
self.assert_passaro_posicao(17, 14, ATIVO, passaro_amarelo, 2.74)
self.assert_passaro_posicao(17, 14, ATIVO, passaro_amarelo, 2.75)
self.assert_passaro_posicao(17, 14, ATIVO, passaro_amarelo, 2.76)
self.assert_passaro_posicao(17, 14, ATIVO, passaro_amarelo, 2.77)
self.assert_passaro_posicao(18, 15, ATIVO, passaro_amarelo, 2.7800000000000002)
self.assert_passaro_posicao(18, 15, ATIVO, passaro_amarelo, 2.79)
self.assert_passaro_posicao(18, 15, ATIVO, passaro_amarelo, 2.8)
self.assert_passaro_posicao(18, 15, ATIVO, passaro_amarelo, 2.81)
self.assert_passaro_posicao(18, 15, ATIVO, passaro_amarelo, 2.82)
self.assert_passaro_posicao(19, 15, ATIVO, passaro_amarelo, 2.83)
self.assert_passaro_posicao(19, 15, ATIVO, passaro_amarelo, 2.84)
self.assert_passaro_posicao(19, 15, ATIVO, passaro_amarelo, 2.85)
self.assert_passaro_posicao(19, 16, ATIVO, passaro_amarelo, 2.86)
self.assert_passaro_posicao(19, 16, ATIVO, passaro_amarelo, 2.87)
self.assert_passaro_posicao(20, 16, ATIVO, passaro_amarelo, 2.88)
self.assert_passaro_posicao(20, 16, ATIVO, passaro_amarelo, 2.89)
self.assert_passaro_posicao(20, 16, ATIVO, passaro_amarelo, 2.9)
self.assert_passaro_posicao(20, 16, ATIVO, passaro_amarelo, 2.91)
self.assert_passaro_posicao(21, 16, ATIVO, passaro_amarelo, 2.92)
self.assert_passaro_posicao(21, 16, ATIVO, passaro_amarelo, 2.93)
self.assert_passaro_posicao(21, 17, ATIVO, passaro_amarelo, 2.94)
self.assert_passaro_posicao(21, 17, ATIVO, passaro_amarelo, 2.95)
self.assert_passaro_posicao(21, 17, ATIVO, passaro_amarelo, 2.96)
self.assert_passaro_posicao(22, 17, ATIVO, passaro_amarelo, 2.9699999999999998)
self.assert_passaro_posicao(22, 17, ATIVO, passaro_amarelo, 2.98)
self.assert_passaro_posicao(22, 17, ATIVO, passaro_amarelo, 2.99)
self.assert_passaro_posicao(22, 17, ATIVO, passaro_amarelo, 3.0)
self.assert_passaro_posicao(22, 17, ATIVO, passaro_amarelo, 3.01)
self.assert_passaro_posicao(23, 17, ATIVO, passaro_amarelo, 3.02)
self.assert_passaro_posicao(23, 18, ATIVO, passaro_amarelo, 3.0300000000000002)
self.assert_passaro_posicao(23, 18, ATIVO, passaro_amarelo, 3.04)
self.assert_passaro_posicao(23, 18, ATIVO, passaro_amarelo, 3.05)
self.assert_passaro_posicao(23, 18, ATIVO, passaro_amarelo, 3.06)
self.assert_passaro_posicao(24, 18, ATIVO, passaro_amarelo, 3.0700000000000003)
self.assert_passaro_posicao(24, 18, ATIVO, passaro_amarelo, 3.08)
self.assert_passaro_posicao(24, 18, ATIVO, passaro_amarelo, 3.09)
self.assert_passaro_posicao(24, 18, ATIVO, passaro_amarelo, 3.1)
self.assert_passaro_posicao(25, 18, ATIVO, passaro_amarelo, 3.1100000000000003)
self.assert_passaro_posicao(25, 18, ATIVO, passaro_amarelo, 3.12)
self.assert_passaro_posicao(25, 19, ATIVO, passaro_amarelo, 3.13)
self.assert_passaro_posicao(25, 19, ATIVO, passaro_amarelo, 3.1399999999999997)
self.assert_passaro_posicao(25, 19, ATIVO, passaro_amarelo, 3.15)
self.assert_passaro_posicao(26, 19, ATIVO, passaro_amarelo, 3.16)
self.assert_passaro_posicao(26, 19, ATIVO, passaro_amarelo, 3.17)
self.assert_passaro_posicao(26, 19, ATIVO, passaro_amarelo, 3.1799999999999997)
self.assert_passaro_posicao(26, 19, ATIVO, passaro_amarelo, 3.19)
self.assert_passaro_posicao(26, 19, ATIVO, passaro_amarelo, 3.2)
self.assert_passaro_posicao(27, 19, ATIVO, passaro_amarelo, 3.21)
self.assert_passaro_posicao(27, 19, ATIVO, passaro_amarelo, 3.2199999999999998)
self.assert_passaro_posicao(27, 20, ATIVO, passaro_amarelo, 3.23)
self.assert_passaro_posicao(27, 20, ATIVO, passaro_amarelo, 3.24)
self.assert_passaro_posicao(28, 20, ATIVO, passaro_amarelo, 3.25)
self.assert_passaro_posicao(28, 20, ATIVO, passaro_amarelo, 3.26)
self.assert_passaro_posicao(28, 20, ATIVO, passaro_amarelo, 3.27)
self.assert_passaro_posicao(28, 20, ATIVO, passaro_amarelo, 3.2800000000000002)
self.assert_passaro_posicao(28, 20, ATIVO, passaro_amarelo, 3.29)
self.assert_passaro_posicao(29, 20, ATIVO, passaro_amarelo, 3.3)
self.assert_passaro_posicao(29, 20, ATIVO, passaro_amarelo, 3.31)
self.assert_passaro_posicao(29, 20, ATIVO, passaro_amarelo, 3.3200000000000003)
self.assert_passaro_posicao(29, 20, ATIVO, passaro_amarelo, 3.33)
self.assert_passaro_posicao(29, 20, ATIVO, passaro_amarelo, 3.34)
self.assert_passaro_posicao(30, 21, ATIVO, passaro_amarelo, 3.35)
self.assert_passaro_posicao(30, 21, ATIVO, passaro_amarelo, 3.3600000000000003)
self.assert_passaro_posicao(30, 21, ATIVO, passaro_amarelo, 3.37)
self.assert_passaro_posicao(30, 21, ATIVO, passaro_amarelo, 3.38)
self.assert_passaro_posicao(30, 21, ATIVO, passaro_amarelo, 3.3899999999999997)
self.assert_passaro_posicao(31, 21, ATIVO, passaro_amarelo, 3.4)
self.assert_passaro_posicao(31, 21, ATIVO, passaro_amarelo, 3.41)
self.assert_passaro_posicao(31, 21, ATIVO, passaro_amarelo, 3.42)
self.assert_passaro_posicao(31, 21, ATIVO, passaro_amarelo, 3.4299999999999997)
self.assert_passaro_posicao(32, 21, ATIVO, passaro_amarelo, 3.44)
self.assert_passaro_posicao(32, 21, ATIVO, passaro_amarelo, 3.45)
self.assert_passaro_posicao(32, 21, ATIVO, passaro_amarelo, 3.46)
self.assert_passaro_posicao(32, 21, ATIVO, passaro_amarelo, 3.4699999999999998)
self.assert_passaro_posicao(32, 21, ATIVO, passaro_amarelo, 3.48)
self.assert_passaro_posicao(33, 22, ATIVO, passaro_amarelo, 3.49)
self.assert_passaro_posicao(33, 22, ATIVO, passaro_amarelo, 3.5)
self.assert_passaro_posicao(33, 22, ATIVO, passaro_amarelo, 3.51)
self.assert_passaro_posicao(33, 22, ATIVO, passaro_amarelo, 3.52)
self.assert_passaro_posicao(33, 22, ATIVO, passaro_amarelo, 3.5300000000000002)
self.assert_passaro_posicao(34, 22, ATIVO, passaro_amarelo, 3.54)
self.assert_passaro_posicao(34, 22, ATIVO, passaro_amarelo, 3.55)
self.assert_passaro_posicao(34, 22, ATIVO, passaro_amarelo, 3.56)
self.assert_passaro_posicao(34, 22, ATIVO, passaro_amarelo, 3.5700000000000003)
self.assert_passaro_posicao(35, 22, ATIVO, passaro_amarelo, 3.58)
self.assert_passaro_posicao(35, 22, ATIVO, passaro_amarelo, 3.59)
self.assert_passaro_posicao(35, 22, ATIVO, passaro_amarelo, 3.6)
self.assert_passaro_posicao(35, 22, ATIVO, passaro_amarelo, 3.6100000000000003)
self.assert_passaro_posicao(35, 22, ATIVO, passaro_amarelo, 3.62)
self.assert_passaro_posicao(36, 22, ATIVO, passaro_amarelo, 3.63)
self.assert_passaro_posicao(36, 22, ATIVO, passaro_amarelo, 3.6399999999999997)
self.assert_passaro_posicao(36, 22, ATIVO, passaro_amarelo, 3.65)
self.assert_passaro_posicao(36, 22, ATIVO, passaro_amarelo, 3.66)
self.assert_passaro_posicao(36, 22, ATIVO, passaro_amarelo, 3.67)
self.assert_passaro_posicao(37, 23, ATIVO, passaro_amarelo, 3.6799999999999997)
self.assert_passaro_posicao(37, 23, ATIVO, passaro_amarelo, 3.69)
self.assert_passaro_posicao(37, 23, ATIVO, passaro_amarelo, 3.7)
self.assert_passaro_posicao(37, 23, ATIVO, passaro_amarelo, 3.71)
self.assert_passaro_posicao(37, 23, ATIVO, passaro_amarelo, 3.7199999999999998)
self.assert_passaro_posicao(38, 23, ATIVO, passaro_amarelo, 3.73)
self.assert_passaro_posicao(38, 23, ATIVO, passaro_amarelo, 3.74)
self.assert_passaro_posicao(38, 23, ATIVO, passaro_amarelo, 3.75)
self.assert_passaro_posicao(38, 23, ATIVO, passaro_amarelo, 3.76)
self.assert_passaro_posicao(39, 23, ATIVO, passaro_amarelo, 3.77)
self.assert_passaro_posicao(39, 23, ATIVO, passaro_amarelo, 3.7800000000000002)
self.assert_passaro_posicao(39, 23, ATIVO, passaro_amarelo, 3.79)
self.assert_passaro_posicao(39, 23, ATIVO, passaro_amarelo, 3.8)
self.assert_passaro_posicao(39, 23, ATIVO, passaro_amarelo, 3.81)
self.assert_passaro_posicao(40, 23, ATIVO, passaro_amarelo, 3.8200000000000003)
self.assert_passaro_posicao(40, 23, ATIVO, passaro_amarelo, 3.83)
self.assert_passaro_posicao(40, 23, ATIVO, passaro_amarelo, 3.84)
self.assert_passaro_posicao(40, 23, ATIVO, passaro_amarelo, 3.85)
self.assert_passaro_posicao(40, 23, ATIVO, passaro_amarelo, 3.8600000000000003)
self.assert_passaro_posicao(41, 23, ATIVO, passaro_amarelo, 3.87)
self.assert_passaro_posicao(41, 23, ATIVO, passaro_amarelo, 3.88)
self.assert_passaro_posicao(41, 23, ATIVO, passaro_amarelo, 3.8899999999999997)
self.assert_passaro_posicao(41, 23, ATIVO, passaro_amarelo, 3.9)
self.assert_passaro_posicao(42, 23, ATIVO, passaro_amarelo, 3.91)
self.assert_passaro_posicao(42, 23, ATIVO, passaro_amarelo, 3.92)
self.assert_passaro_posicao(42, 23, ATIVO, passaro_amarelo, 3.9299999999999997)
self.assert_passaro_posicao(42, 23, ATIVO, passaro_amarelo, 3.94)
self.assert_passaro_posicao(42, 23, ATIVO, passaro_amarelo, 3.95)
self.assert_passaro_posicao(43, 23, ATIVO, passaro_amarelo, 3.96)
self.assert_passaro_posicao(43, 23, ATIVO, passaro_amarelo, 3.9699999999999998)
self.assert_passaro_posicao(43, 23, ATIVO, passaro_amarelo, 3.98)
self.assert_passaro_posicao(43, 23, ATIVO, passaro_amarelo, 3.99)
self.assert_passaro_posicao(43, 23, ATIVO, passaro_amarelo, 4.0)
self.assert_passaro_posicao(44, 23, ATIVO, passaro_amarelo, 4.01)
self.assert_passaro_posicao(44, 23, ATIVO, passaro_amarelo, 4.02)
self.assert_passaro_posicao(44, 23, ATIVO, passaro_amarelo, 4.029999999999999)
self.assert_passaro_posicao(44, 23, ATIVO, passaro_amarelo, 4.04)
self.assert_passaro_posicao(44, 23, ATIVO, passaro_amarelo, 4.05)
self.assert_passaro_posicao(45, 23, ATIVO, passaro_amarelo, 4.0600000000000005)
self.assert_passaro_posicao(45, 23, ATIVO, passaro_amarelo, 4.07)
self.assert_passaro_posicao(45, 23, ATIVO, passaro_amarelo, 4.08)
self.assert_passaro_posicao(45, 23, ATIVO, passaro_amarelo, 4.09)
self.assert_passaro_posicao(46, 23, ATIVO, passaro_amarelo, 4.1)
self.assert_passaro_posicao(46, 23, ATIVO, passaro_amarelo, 4.109999999999999)
self.assert_passaro_posicao(46, 23, ATIVO, passaro_amarelo, 4.12)
self.assert_passaro_posicao(46, 23, ATIVO, passaro_amarelo, 4.13)
self.assert_passaro_posicao(46, 23, ATIVO, passaro_amarelo, 4.140000000000001)
self.assert_passaro_posicao(47, 23, ATIVO, passaro_amarelo, 4.15)
self.assert_passaro_posicao(47, 23, ATIVO, passaro_amarelo, 4.16)
self.assert_passaro_posicao(47, 23, ATIVO, passaro_amarelo, 4.17)
self.assert_passaro_posicao(47, 23, ATIVO, passaro_amarelo, 4.18)
self.assert_passaro_posicao(47, 23, ATIVO, passaro_amarelo, 4.1899999999999995)
self.assert_passaro_posicao(48, 23, ATIVO, passaro_amarelo, 4.2)
self.assert_passaro_posicao(48, 23, ATIVO, passaro_amarelo, 4.21)
self.assert_passaro_posicao(48, 23, ATIVO, passaro_amarelo, 4.220000000000001)
self.assert_passaro_posicao(48, 23, ATIVO, passaro_amarelo, 4.23)
self.assert_passaro_posicao(49, 23, ATIVO, passaro_amarelo, 4.24)
self.assert_passaro_posicao(49, 23, ATIVO, passaro_amarelo, 4.25)
self.assert_passaro_posicao(49, 23, ATIVO, passaro_amarelo, 4.26)
self.assert_passaro_posicao(49, 23, ATIVO, passaro_amarelo, 4.27)
self.assert_passaro_posicao(49, 23, ATIVO, passaro_amarelo, 4.279999999999999)
self.assert_passaro_posicao(50, 23, ATIVO, passaro_amarelo, 4.29)
self.assert_passaro_posicao(50, 23, ATIVO, passaro_amarelo, 4.3)
self.assert_passaro_posicao(50, 23, ATIVO, passaro_amarelo, 4.3100000000000005)
self.assert_passaro_posicao(50, 23, ATIVO, passaro_amarelo, 4.32)
self.assert_passaro_posicao(50, 23, ATIVO, passaro_amarelo, 4.33)
self.assert_passaro_posicao(51, 23, ATIVO, passaro_amarelo, 4.34)
self.assert_passaro_posicao(51, 23, ATIVO, passaro_amarelo, 4.35)
self.assert_passaro_posicao(51, 23, ATIVO, passaro_amarelo, 4.359999999999999)
self.assert_passaro_posicao(51, 23, ATIVO, passaro_amarelo, 4.37)
self.assert_passaro_posicao(51, 23, ATIVO, passaro_amarelo, 4.38)
self.assert_passaro_posicao(52, 23, ATIVO, passaro_amarelo, 4.390000000000001)
self.assert_passaro_posicao(52, 23, ATIVO, passaro_amarelo, 4.4)
self.assert_passaro_posicao(52, 23, ATIVO, passaro_amarelo, 4.41)
self.assert_passaro_posicao(52, 23, ATIVO, passaro_amarelo, 4.42)
self.assert_passaro_posicao(53, 23, ATIVO, passaro_amarelo, 4.43)
self.assert_passaro_posicao(53, 23, ATIVO, passaro_amarelo, 4.4399999999999995)
self.assert_passaro_posicao(53, 23, ATIVO, passaro_amarelo, 4.45)
self.assert_passaro_posicao(53, 23, ATIVO, passaro_amarelo, 4.46)
self.assert_passaro_posicao(53, 23, ATIVO, passaro_amarelo, 4.470000000000001)
self.assert_passaro_posicao(54, 23, ATIVO, passaro_amarelo, 4.48)
self.assert_passaro_posicao(54, 23, ATIVO, passaro_amarelo, 4.49)
self.assert_passaro_posicao(54, 23, ATIVO, passaro_amarelo, 4.5)
self.assert_passaro_posicao(54, 23, ATIVO, passaro_amarelo, 4.51)
self.assert_passaro_posicao(54, 23, ATIVO, passaro_amarelo, 4.52)
self.assert_passaro_posicao(55, 23, ATIVO, passaro_amarelo, 4.529999999999999)
self.assert_passaro_posicao(55, 23, ATIVO, passaro_amarelo, 4.54)
self.assert_passaro_posicao(55, 23, ATIVO, passaro_amarelo, 4.55)
self.assert_passaro_posicao(55, 23, ATIVO, passaro_amarelo, 4.5600000000000005)
self.assert_passaro_posicao(56, 22, ATIVO, passaro_amarelo, 4.57)
self.assert_passaro_posicao(56, 22, ATIVO, passaro_amarelo, 4.58)
self.assert_passaro_posicao(56, 22, ATIVO, passaro_amarelo, 4.59)
self.assert_passaro_posicao(56, 22, ATIVO, passaro_amarelo, 4.6)
self.assert_passaro_posicao(56, 22, ATIVO, passaro_amarelo, 4.609999999999999)
self.assert_passaro_posicao(57, 22, ATIVO, passaro_amarelo, 4.62)
self.assert_passaro_posicao(57, 22, ATIVO, passaro_amarelo, 4.63)
self.assert_passaro_posicao(57, 22, ATIVO, passaro_amarelo, 4.640000000000001)
self.assert_passaro_posicao(57, 22, ATIVO, passaro_amarelo, 4.65)
self.assert_passaro_posicao(57, 22, ATIVO, passaro_amarelo, 4.66)
self.assert_passaro_posicao(58, 22, ATIVO, passaro_amarelo, 4.67)
self.assert_passaro_posicao(58, 22, ATIVO, passaro_amarelo, 4.68)
self.assert_passaro_posicao(58, 22, ATIVO, passaro_amarelo, 4.6899999999999995)
self.assert_passaro_posicao(58, 22, ATIVO, passaro_amarelo, 4.7)
self.assert_passaro_posicao(58, 22, ATIVO, passaro_amarelo, 4.71)
self.assert_passaro_posicao(59, 22, ATIVO, passaro_amarelo, 4.720000000000001)
self.assert_passaro_posicao(59, 22, ATIVO, passaro_amarelo, 4.73)
self.assert_passaro_posicao(59, 22, ATIVO, passaro_amarelo, 4.74)
self.assert_passaro_posicao(59, 22, ATIVO, passaro_amarelo, 4.75)
self.assert_passaro_posicao(60, 21, ATIVO, passaro_amarelo, 4.76)
self.assert_passaro_posicao(60, 21, ATIVO, passaro_amarelo, 4.77)
self.assert_passaro_posicao(60, 21, ATIVO, passaro_amarelo, 4.779999999999999)
self.assert_passaro_posicao(60, 21, ATIVO, passaro_amarelo, 4.79)
self.assert_passaro_posicao(60, 21, ATIVO, passaro_amarelo, 4.8)
self.assert_passaro_posicao(61, 21, ATIVO, passaro_amarelo, 4.8100000000000005)
self.assert_passaro_posicao(61, 21, ATIVO, passaro_amarelo, 4.82)
self.assert_passaro_posicao(61, 21, ATIVO, passaro_amarelo, 4.83)
self.assert_passaro_posicao(61, 21, ATIVO, passaro_amarelo, 4.84)
self.assert_passaro_posicao(61, 21, ATIVO, passaro_amarelo, 4.85)
self.assert_passaro_posicao(62, 21, ATIVO, passaro_amarelo, 4.859999999999999)
self.assert_passaro_posicao(62, 21, ATIVO, passaro_amarelo, 4.87)
self.assert_passaro_posicao(62, 21, ATIVO, passaro_amarelo, 4.88)
self.assert_passaro_posicao(62, 21, ATIVO, passaro_amarelo, 4.890000000000001)
self.assert_passaro_posicao(63, 20, ATIVO, passaro_amarelo, 4.9)
self.assert_passaro_posicao(63, 20, ATIVO, passaro_amarelo, 4.91)
self.assert_passaro_posicao(63, 20, ATIVO, passaro_amarelo, 4.92)
self.assert_passaro_posicao(63, 20, ATIVO, passaro_amarelo, 4.93)
self.assert_passaro_posicao(63, 20, ATIVO, passaro_amarelo, 4.9399999999999995)
self.assert_passaro_posicao(64, 20, ATIVO, passaro_amarelo, 4.95)
self.assert_passaro_posicao(64, 20, ATIVO, passaro_amarelo, 4.96)
self.assert_passaro_posicao(64, 20, ATIVO, passaro_amarelo, 4.970000000000001)
self.assert_passaro_posicao(64, 20, ATIVO, passaro_amarelo, 4.98)
self.assert_passaro_posicao(64, 20, ATIVO, passaro_amarelo, 4.99)
self.assert_passaro_posicao(65, 20, ATIVO, passaro_amarelo, 5.0)
self.assert_passaro_posicao(65, 20, ATIVO, passaro_amarelo, 5.01)
self.assert_passaro_posicao(65, 19, ATIVO, passaro_amarelo, 5.02)
self.assert_passaro_posicao(65, 19, ATIVO, passaro_amarelo, 5.029999999999999)
self.assert_passaro_posicao(65, 19, ATIVO, passaro_amarelo, 5.04)
self.assert_passaro_posicao(66, 19, ATIVO, passaro_amarelo, 5.05)
self.assert_passaro_posicao(66, 19, ATIVO, passaro_amarelo, 5.0600000000000005)
self.assert_passaro_posicao(66, 19, ATIVO, passaro_amarelo, 5.07)
self.assert_passaro_posicao(66, 19, ATIVO, passaro_amarelo, 5.08)
self.assert_passaro_posicao(67, 19, ATIVO, passaro_amarelo, 5.09)
self.assert_passaro_posicao(67, 19, ATIVO, passaro_amarelo, 5.1)
self.assert_passaro_posicao(67, 19, ATIVO, passaro_amarelo, 5.109999999999999)
self.assert_passaro_posicao(67, 19, ATIVO, passaro_amarelo, 5.12)
self.assert_passaro_posicao(67, 18, ATIVO, passaro_amarelo, 5.13)
self.assert_passaro_posicao(68, 18, ATIVO, passaro_amarelo, 5.140000000000001)
self.assert_passaro_posicao(68, 18, ATIVO, passaro_amarelo, 5.15)
self.assert_passaro_posicao(68, 18, ATIVO, passaro_amarelo, 5.16)
self.assert_passaro_posicao(68, 18, ATIVO, passaro_amarelo, 5.17)
self.assert_passaro_posicao(68, 18, ATIVO, passaro_amarelo, 5.18)
self.assert_passaro_posicao(69, 18, ATIVO, passaro_amarelo, 5.1899999999999995)
self.assert_passaro_posicao(69, 18, ATIVO, passaro_amarelo, 5.2)
self.assert_passaro_posicao(69, 18, ATIVO, passaro_amarelo, 5.21)
self.assert_passaro_posicao(69, 17, ATIVO, passaro_amarelo, 5.220000000000001)
self.assert_passaro_posicao(70, 17, ATIVO, passaro_amarelo, 5.23)
self.assert_passaro_posicao(70, 17, ATIVO, passaro_amarelo, 5.24)
self.assert_passaro_posicao(70, 17, ATIVO, passaro_amarelo, 5.25)
self.assert_passaro_posicao(70, 17, ATIVO, passaro_amarelo, 5.26)
self.assert_passaro_posicao(70, 17, ATIVO, passaro_amarelo, 5.27)
self.assert_passaro_posicao(71, 17, ATIVO, passaro_amarelo, 5.279999999999999)
self.assert_passaro_posicao(71, 17, ATIVO, passaro_amarelo, 5.29)
self.assert_passaro_posicao(71, 17, ATIVO, passaro_amarelo, 5.3)
self.assert_passaro_posicao(71, 16, ATIVO, passaro_amarelo, 5.3100000000000005)
self.assert_passaro_posicao(71, 16, ATIVO, passaro_amarelo, 5.32)
self.assert_passaro_posicao(72, 16, ATIVO, passaro_amarelo, 5.33)
self.assert_passaro_posicao(72, 16, ATIVO, passaro_amarelo, 5.34)
self.assert_passaro_posicao(72, 16, ATIVO, passaro_amarelo, 5.35)
self.assert_passaro_posicao(72, 16, ATIVO, passaro_amarelo, 5.359999999999999)
self.assert_passaro_posicao(72, 16, ATIVO, passaro_amarelo, 5.37)
self.assert_passaro_posicao(73, 16, ATIVO, passaro_amarelo, 5.38)
self.assert_passaro_posicao(73, 15, ATIVO, passaro_amarelo, 5.390000000000001)
self.assert_passaro_posicao(73, 15, ATIVO, passaro_amarelo, 5.4)
self.assert_passaro_posicao(73, 15, ATIVO, passaro_amarelo, 5.41)
self.assert_passaro_posicao(74, 15, ATIVO, passaro_amarelo, 5.42)
self.assert_passaro_posicao(74, 15, ATIVO, passaro_amarelo, 5.43)
self.assert_passaro_posicao(74, 15, ATIVO, passaro_amarelo, 5.4399999999999995)
self.assert_passaro_posicao(74, 15, ATIVO, passaro_amarelo, 5.45)
self.assert_passaro_posicao(74, 15, ATIVO, passaro_amarelo, 5.46)
self.assert_passaro_posicao(75, 14, ATIVO, passaro_amarelo, 5.470000000000001)
self.assert_passaro_posicao(75, 14, ATIVO, passaro_amarelo, 5.48)
self.assert_passaro_posicao(75, 14, ATIVO, passaro_amarelo, 5.49)
self.assert_passaro_posicao(75, 14, ATIVO, passaro_amarelo, 5.5)
self.assert_passaro_posicao(75, 14, ATIVO, passaro_amarelo, 5.51)
self.assert_passaro_posicao(76, 14, ATIVO, passaro_amarelo, 5.52)
self.assert_passaro_posicao(76, 14, ATIVO, passaro_amarelo, 5.529999999999999)
self.assert_passaro_posicao(76, 13, ATIVO, passaro_amarelo, 5.54)
self.assert_passaro_posicao(76, 13, ATIVO, passaro_amarelo, 5.55)
self.assert_passaro_posicao(77, 13, ATIVO, passaro_amarelo, 5.5600000000000005)
self.assert_passaro_posicao(77, 13, ATIVO, passaro_amarelo, 5.57)
self.assert_passaro_posicao(77, 13, ATIVO, passaro_amarelo, 5.58)
self.assert_passaro_posicao(77, 13, ATIVO, passaro_amarelo, 5.59)
self.assert_passaro_posicao(77, 13, ATIVO, passaro_amarelo, 5.6)
self.assert_passaro_posicao(78, 12, ATIVO, passaro_amarelo, 5.609999999999999)
self.assert_passaro_posicao(78, 12, ATIVO, passaro_amarelo, 5.62)
self.assert_passaro_posicao(78, 12, ATIVO, passaro_amarelo, 5.63)
self.assert_passaro_posicao(78, 12, ATIVO, passaro_amarelo, 5.640000000000001)
self.assert_passaro_posicao(78, 12, ATIVO, passaro_amarelo, 5.65)
self.assert_passaro_posicao(79, 12, ATIVO, passaro_amarelo, 5.66)
self.assert_passaro_posicao(79, 12, ATIVO, passaro_amarelo, 5.67)
self.assert_passaro_posicao(79, 11, ATIVO, passaro_amarelo, 5.68)
self.assert_passaro_posicao(79, 11, ATIVO, passaro_amarelo, 5.6899999999999995)
self.assert_passaro_posicao(79, 11, ATIVO, passaro_amarelo, 5.7)
self.assert_passaro_posicao(80, 11, ATIVO, passaro_amarelo, 5.71)
self.assert_passaro_posicao(80, 11, ATIVO, passaro_amarelo, 5.720000000000001)
self.assert_passaro_posicao(80, 11, ATIVO, passaro_amarelo, 5.73)
self.assert_passaro_posicao(80, 10, ATIVO, passaro_amarelo, 5.74)
self.assert_passaro_posicao(81, 10, ATIVO, passaro_amarelo, 5.75)
self.assert_passaro_posicao(81, 10, ATIVO, passaro_amarelo, 5.76)
self.assert_passaro_posicao(81, 10, ATIVO, passaro_amarelo, 5.77)
self.assert_passaro_posicao(81, 10, ATIVO, passaro_amarelo, 5.779999999999999)
self.assert_passaro_posicao(81, 10, ATIVO, passaro_amarelo, 5.79)
self.assert_passaro_posicao(82, 9, ATIVO, passaro_amarelo, 5.8)
self.assert_passaro_posicao(82, 9, ATIVO, passaro_amarelo, 5.8100000000000005)
self.assert_passaro_posicao(82, 9, ATIVO, passaro_amarelo, 5.82)
self.assert_passaro_posicao(82, 9, ATIVO, passaro_amarelo, 5.83)
self.assert_passaro_posicao(82, 9, ATIVO, passaro_amarelo, 5.84)
self.assert_passaro_posicao(83, 9, ATIVO, passaro_amarelo, 5.85)
self.assert_passaro_posicao(83, 8, ATIVO, passaro_amarelo, 5.859999999999999)
self.assert_passaro_posicao(83, 8, ATIVO, passaro_amarelo, 5.87)
self.assert_passaro_posicao(83, 8, ATIVO, passaro_amarelo, 5.88)
self.assert_passaro_posicao(84, 8, ATIVO, passaro_amarelo, 5.890000000000001)
self.assert_passaro_posicao(84, 8, ATIVO, passaro_amarelo, 5.9)
self.assert_passaro_posicao(84, 8, ATIVO, passaro_amarelo, 5.91)
self.assert_passaro_posicao(84, 7, ATIVO, passaro_amarelo, 5.92)
self.assert_passaro_posicao(84, 7, ATIVO, passaro_amarelo, 5.93)
self.assert_passaro_posicao(85, 7, ATIVO, passaro_amarelo, 5.9399999999999995)
self.assert_passaro_posicao(85, 7, ATIVO, passaro_amarelo, 5.95)
self.assert_passaro_posicao(85, 7, ATIVO, passaro_amarelo, 5.96)
self.assert_passaro_posicao(85, 6, ATIVO, passaro_amarelo, 5.970000000000001)
self.assert_passaro_posicao(85, 6, ATIVO, passaro_amarelo, 5.98)
self.assert_passaro_posicao(86, 6, ATIVO, passaro_amarelo, 5.99)
self.assert_passaro_posicao(86, 6, ATIVO, passaro_amarelo, 6.0)
self.assert_passaro_posicao(86, 6, ATIVO, passaro_amarelo, 6.01)
self.assert_passaro_posicao(86, 5, ATIVO, passaro_amarelo, 6.02)
self.assert_passaro_posicao(86, 5, ATIVO, passaro_amarelo, 6.03)
self.assert_passaro_posicao(87, 5, ATIVO, passaro_amarelo, 6.04)
self.assert_passaro_posicao(87, 5, ATIVO, passaro_amarelo, 6.05)
self.assert_passaro_posicao(87, 5, ATIVO, passaro_amarelo, 6.06)
self.assert_passaro_posicao(87, 5, ATIVO, passaro_amarelo, 6.07)
self.assert_passaro_posicao(88, 4, ATIVO, passaro_amarelo, 6.08)
self.assert_passaro_posicao(88, 4, ATIVO, passaro_amarelo, 6.09)
self.assert_passaro_posicao(88, 4, ATIVO, passaro_amarelo, 6.1)
self.assert_passaro_posicao(88, 4, ATIVO, passaro_amarelo, 6.11)
self.assert_passaro_posicao(88, 4, ATIVO, passaro_amarelo, 6.12)
self.assert_passaro_posicao(89, 3, ATIVO, passaro_amarelo, 6.13)
self.assert_passaro_posicao(89, 3, ATIVO, passaro_amarelo, 6.14)
self.assert_passaro_posicao(89, 3, ATIVO, passaro_amarelo, 6.15)
self.assert_passaro_posicao(89, 3, ATIVO, passaro_amarelo, 6.16)
self.assert_passaro_posicao(89, 3, ATIVO, passaro_amarelo, 6.17)
self.assert_passaro_posicao(90, 2, ATIVO, passaro_amarelo, 6.18)
self.assert_passaro_posicao(90, 2, ATIVO, passaro_amarelo, 6.19)
self.assert_passaro_posicao(90, 2, ATIVO, passaro_amarelo, 6.2)
self.assert_passaro_posicao(90, 2, ATIVO, passaro_amarelo, 6.21)
self.assert_passaro_posicao(91, 1, ATIVO, passaro_amarelo, 6.22)
self.assert_passaro_posicao(91, 1, ATIVO, passaro_amarelo, 6.23)
self.assert_passaro_posicao(91, 1, ATIVO, passaro_amarelo, 6.24)
self.assert_passaro_posicao(91, 1, ATIVO, passaro_amarelo, 6.25)
self.assert_passaro_posicao(91, 1, ATIVO, passaro_amarelo, 6.26)
self.assert_passaro_posicao(92, 0, ATIVO, passaro_amarelo, 6.29)
passaro_amarelo.colidir_com_chao()
self.assertEqual(DESTRUIDO, passaro_amarelo.status)
# Código de geração de testes
# for delta_t in range(0, 550):
# t = 2 + (delta_t / 100)
# x, y = passaro_amarelo.calcular_posicao(t)
# print(' self.assert_passaro_posicao(%s, %s, ATIVO, passaro_amarelo, %s)' % (x, y, t))
def assert_posicao_vertical(self, y, tempo, passaro):
"""
Método auxiliar que mantém x fixo com valor 1, status Ativo, variando apenas o tempo e a posição y
Atenção, esse não é um teste porque não começa com prefixo "test"
"""
self.assert_passaro_posicao(1, y, ATIVO, passaro, tempo)
if __name__=='__main__':
teste= AtorTestes()
teste.teste_colisao_entre_atores_ativos()
| mit |
twiest/openshift-tools | ansible/roles/lib_statuspageio/library/statuspage_incident.py | 6 | 24082 | #!/usr/bin/env python # pylint: disable=too-many-lines
''' Ansible module '''
# ___ ___ _ _ ___ ___ _ _____ ___ ___
# / __| __| \| | __| _ \ /_\_ _| __| \
# | (_ | _|| .` | _|| / / _ \| | | _|| |) |
# \___|___|_|\_|___|_|_\/_/_\_\_|_|___|___/_ _____
# | \ / _ \ | \| |/ _ \_ _| | __| \_ _|_ _|
# | |) | (_) | | .` | (_) || | | _|| |) | | | |
# |___/ \___/ |_|\_|\___/ |_| |___|___/___| |_|
# vim: expandtab:tabstop=4:shiftwidth=4
# Copyright 2016 Red Hat Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Purpose: An ansible module to communicate with statuspageio.
DOCUMENTATION = '''
module: statuspage_incident
short_description: Create, modify, and idempotently manage statuspage incidents
description:
- Manage statuspage incidents
options:
api_key:
description:
- statuspage api key
required: True
default: os.environ.get('STATUSPAGE_API_KEY', '')
aliases: []
page_id:
description:
- The statuspage page
required: True
default: None
aliases: []
org_id:
description:
- Organization id for the user. Required when modifying users.
required: false
default: None
aliases: []
state:
description:
- Whether to create, update, delete, or list the desired object
required: True
default: present
aliases: []
name:
description:
- Name of the incident
required: false
default: None
aliases: []
unresolved_only:
description:
- Filter the incidents on the unresolved_only
required: false
default: None
aliases: []
scheduled_only:
description:
- Filter the incidents on the scheduled_only
required: false
default: None
aliases: []
incident_type:
description:
- The type of incident to create.
choices: ['realtime', 'scheduled', 'historical']
required: false
default: None
aliases: []
status:
description:
- The status of the incident.
choices: ['investigating', 'identified', 'monitoring', 'resolved', 'scheduled', 'in_progress', 'verifying', 'completed']
required: false
default: None
aliases: []
update_twitter:
description:
- Whether to update the twitters
required: false
default: False
aliases: []
msg:
description:
- The incident message that gets posted
required: false
default: None
aliases: []
impact_override:
description:
- Whether update the impact
choices: ['none', 'minor', 'major', 'critical']
required: false
default: None
aliases: []
components:
description:
- An array of the components
required: false
default: None
aliases: []
scheduled_for:
description:
- The date when the maintenance will start
required: false
default: None
aliases: []
scheduled_until:
description:
- The date when the maintenance will end
required: false
default: None
aliases: []
scheduled_remind_prior:
description:
- Whether to remind the subscribers that the maintenance will begin
required: false
default: None
aliases: []
scheduled_auto_in_progress:
description:
- Whether to auto start the maintenance period and transition the status to in_progress
required: false
default: None
aliases: []
scheduled_auto_completed:
description:
- Whether to auto complete the maintenance period and transition the status to completed
required: false
default: None
aliases: []
'''
EXAMPLES = '''
# list indicents
- name: list incidents
statuspage_incident:
state: list
api_key: "{{ api_key }}"
org_id: "{{ org_id }}"
page_id: "{{ page_id }}"
register: incout
# create an incident
- name: create an incident
statuspage_incident:
api_key: "{{ api_key }}"
org_id: "{{ org_id }}"
page_id: "{{ page_id }}"
name: API Outage
message: Investigating an issue with the API
components:
- group: opstest
component:
- name: Master API
status: partial_outage
register: incout
- debug: var=incout
# create a scheduled maintenance incident
- name: create a scheduled incident
statuspage_incident:
api_key: "{{ api_key }}"
org_id: "{{ org_id }}"
page_id: "{{ page_id }}"
incident_type: scheduled
status: scheduled
name: Cluster upgrade
message: "Upgrading from 3.2 to 3.3."
components:
- group: opstest
component:
- name: Etcd Service
status: partial_outage
- name: Master API
status: partial_outage
scheduled_for: '2016-10-14T13:21:00-0400'
scheduled_until: '2016-10-14T13:25:00-0400'
scheduled_auto_in_progress: True
scheduled_remind_prior: True
register: incout
- debug: var=incout
#resolve an incident
- name: resolve an incident
statuspage_incident:
api_key: "{{ api_key }}"
org_id: "{{ org_id }}"
page_id: "{{ page_id }}"
status: resolved
name: API Outage
message: "Fixed and ready to go."
components:
- group: opstest
component:
- name: Master API
status: operational
register: incout
- debug: var=incout
'''
'''
OpenShiftCLI class that wraps the oc commands in a subprocess
'''
# pylint: disable=too-many-lines
import os
# pylint: disable=import-error
import statuspageio
class StatusPageIOAPIError(Exception):
'''Exception class for openshiftcli'''
pass
# pylint: disable=too-few-public-methods
class StatusPageIOAPI(object):
''' Class to wrap the command line tools '''
def __init__(self,
api_key,
page_id,
org_id=None):
''' Constructor for OpenshiftCLI '''
self.api_key = api_key
self.page_id = page_id
self.org_id = org_id
self.client = statuspageio.Client(api_key=self.api_key, page_id=self.page_id, organization_id=self.org_id)
def _get_incidents(self, scheduled=False, unresolved_only=False):
'''return a list of incidents'''
if unresolved_only:
return self.client.incidents.list_unresolved()
if scheduled:
return self.client.incidents.list_scheduled()
return self.client.incidents.list()
def _delete_component(self, compid):
'''delete a component'''
return self.client.components.delete(compid)
def _delete_incident(self, incid):
'''delete a incident'''
return self.client.incidents.delete(incid)
def _create_component(self, kwargs):
'''create a component'''
return self.client.components.create(**kwargs)
def _create_incident(self, kwargs, scheduled=False):
'''create a an incident'''
if scheduled:
return self.client.incidents.create_scheduled(**kwargs)
return self.client.incidents.create(**kwargs)
def _update_incident(self, incid, kwargs):
'''return a list of incidents'''
return self.client.incidents.update(incid, **kwargs)
def _get_components_by_name(self, names):
'''return the components in a specific group'''
components = self._get_components()
# first, find the parent component
tmp_comps = []
for comp in components:
if comp.name in names:
tmp_comps.append(comp)
return tmp_comps
def _get_components_by_group(self, group):
'''return the components in a specific group'''
components = self._get_components()
# first, find the parent component
tmp_comps = []
parent = None
for comp in components:
if group == comp.name:
parent = comp
tmp_comps.append(comp)
# now, find all subcomponents
for comp in components:
if comp.group_id == parent.id:
tmp_comps.append(comp)
return tmp_comps
def _get_components(self):
'''return components'''
return self.client.components.list()
def _update_component(self, cid, name=None, desc=None, status=None):
'''update a component'''
kwargs = {}
if name:
kwargs['name'] = name
if desc:
kwargs['desc'] = desc
if status:
kwargs['status'] = status
return self.client.components.update(cid, **kwargs)
# pylint: disable=too-many-instance-attributes
class StatusPageIncident(StatusPageIOAPI):
''' Class to wrap the oc command line tools '''
kind = 'sa'
# pylint allows 5
# pylint: disable=too-many-arguments
def __init__(self,
api_key,
page_id,
name=None,
scheduled=None,
unresolved=None,
org_id=None,
incident_type='realtime',
status='investigating',
update_twitter=False,
message=None,
components=None,
scheduled_for=None,
scheduled_until=None,
scheduled_remind_prior=False,
scheduled_auto_in_progress=False,
scheduled_auto_completed=False,
verbose=False):
''' Constructor for OCVolume '''
super(StatusPageIncident, self).__init__(api_key, page_id, org_id)
self.name = name
self.api_key = api_key
self.page_id = page_id
self.org_id = org_id
self.scheduled = scheduled
self.unresolved = unresolved
self.verbose = verbose
self.incidents = None
self.incident_type = incident_type
self.status = status
self.update_twitter = update_twitter
self.message = message
self.components = components
self.scheduled_for = scheduled_for
self.scheduled_until = scheduled_until
self.scheduled_remind_prior = scheduled_remind_prior
self.scheduled_auto_in_progress = scheduled_auto_in_progress
self.scheduled_auto_completed = scheduled_auto_completed
if self.components == None:
self.components = {}
self._params = None
self._incidents = None
@property
def incidents(self):
''' property function service'''
if not self._incidents:
self._incidents = self.get()
return self._incidents
@incidents.setter
def incidents(self, data):
''' setter function for incidents var '''
self._incidents = data
@property
def params(self):
''' proeprty for params '''
if self._params == None:
self._params = self.build_params()
return self._params
@params.setter
def params(self, data):
''' setter function for params'''
self._params = data
def get(self):
'''return incidents'''
# unresolved? unscheduled?
incs = self._get_incidents(scheduled=self.scheduled, unresolved_only=self.unresolved)
if self.name:
r_incs = []
for inc in incs:
if self.name.lower() in inc.name.lower():
r_incs.append(inc)
else:
r_incs = incs
return r_incs
def delete(self):
'''delete the incident'''
found, _, _ = self.find_incident()
if len(found) == 1:
results = self._delete_incident(found[0].id)
for comp in found[0].incident_updates[-1].affected_components:
self.set_component_status(comp.keys()[0], name=None, desc=None, status='operational')
return results
else:
return False
def build_params(self):
'''build parameters for update or create'''
ids = []
r_comps = {}
for inc_comp in self.components:
if inc_comp.has_key('group') and inc_comp['group']:
comps = self._get_components_by_group(inc_comp['group'])
else:
comps = self._get_components_by_name([_comp['name'] for _comp in inc_comp['component']])
for comp in comps:
# only include the components in my passed in component list
if comp.name in [tmp_comp['name'] for tmp_comp in inc_comp['component']]:
ids.append(comp.id)
r_comps[comp.id] = comp
if self.components and not ids:
raise StatusPageIOAPIError('No components found.')
args = {'name': self.name,
'component_ids': ids,
'message': self.message,
'wants_twitter_update': self.update_twitter,
}
if self.status:
args['status'] = self.status
if self.incident_type == 'scheduled':
args['scheduled_for'] = self.scheduled_for
args['scheduled_until'] = self.scheduled_until
args['scheduled_remind_prior'] = self.scheduled_remind_prior
args['scheduled_auto_in_progress'] = self.scheduled_auto_in_progress
args['scheduled_auto_completed'] = self.scheduled_auto_completed
return {'params': args, 'comps': r_comps}
def set_component_status(self, cid, name=None, desc=None, status=None):
'''update a component's status'''
return self._update_component(cid, name=name, desc=desc, status=status)
def create(self):
'''create the object'''
params = self.params['params']
comps = self.prepare_component_status(self.params['comps'])
scheduled = self.incident_type == 'scheduled'
results = self._create_incident(params, scheduled=scheduled)
for cid, comp in comps.items():
self.set_component_status(cid, name=None, desc=None, status=comp.status)
return results
def prepare_component_status(self, comps):
'''prepare the component status for update'''
# for each group
for inc_comp in self.components:
# for each component in this group
for tmp_comp in inc_comp['component']:
for ex_comp in comps.values():
if tmp_comp['name'] == ex_comp.name and tmp_comp.get('status', 'operational') != ex_comp.status:
ex_comp.status = tmp_comp.get('status', 'operational')
return comps
def update(self):
'''update the object'''
# need to update the tls information and the service name
found, params, comps = self.find_incident()
results = self._update_incident(found[0].id, kwargs=params)
comps = self.prepare_component_status(comps)
for cid, comp in comps.items():
self.set_component_status(cid, name=None, desc=None, status=comp.status)
return results
@staticmethod
def get_affected_components(aff_comps):
'''return a list of affected component ids'''
ids = []
if aff_comps and aff_comps.has_key('affected_components'):
for comp in aff_comps['affected_components']:
# data structure appears to have changed recently (2017-12):
# - if comp.code exists, use it as component code
# - if not, then use comp.key()[0] for backwards compatability
if 'code' in comp.keys():
ids.append(comp.code)
else:
ids.append(comp.keys()[0])
return ids
# skip false positive on "for incident in self.incidents:"
# pylint: disable=not-an-iterable
def find_incident(self):
'''attempt to match the incoming incident with existing incidents'''
params = self.params['params']
comps = self.params['comps']
found = []
for incident in self.incidents:
if incident.name == params['name'] and \
incident.resolved_at == None and \
set(StatusPageIncident.get_affected_components(incident.incident_updates[-1])) == \
set(params['component_ids']):
# This could be the one!
found.append(incident)
return found, params, comps
def exists(self):
''' verify if the incoming incident exists
As per some discussion, this is a difficult task without
a unique identifier on the incident.
Decision: If an incident exists, with the same components, and the components
are in the same state as before then we can say with a small degree of
confidenced that this is the correct incident referred to by the caller.
'''
found, _, _ = self.find_incident()
if len(found) == 1:
return True
if len(found) == 0:
return False
raise StatusPageIOAPIError('Found %s instances matching your search. Please resolve this issue ids=[%s].' \
% (len(found), ', '.join([inc.id for inc in found])))
def needs_update(self):
''' verify an update is needed '''
# cannot update historical
if self.incident_type == 'historical':
return False
# we need to check to see if the current status metches what we are about to update
found, params, comps = self.find_incident()
# check incoming components status against existing
curr_incident = found[0]
# for each group
for comp in self.components:
if comp['component']:
# for each component in a group
for inc_comp in comp['component']:
# for each comp in the current existing incident
for ex_comp in comps.values():
if ex_comp.name == inc_comp['name']:
if ex_comp.status == inc_comp.get('status', 'operational'):
break
return True
# didn't find the component name in the existing compents, need to update
else:
return True
# Checdk the message is the same
if params['message'] != curr_incident.incident_updates[-1].body or \
params['status'] != curr_incident.incident_updates[-1].status:
return True
if self.incident_type == 'scheduled':
if self.scheduled_for != params['scheduled_for'] or \
self.scheduled_until != params['scheduled_until'] or \
self.scheduled_remind_prior != params['scheduled_remind_prior'] or \
self.scheduled_auto_in_progress != params['scheduled_auto_in_progress'] or \
self.scheduled_auto_completed != params['scheduled_auto_completed']:
return True
return False
@staticmethod
def run_ansible(params):
'''run the idempotent actions'''
spio = StatusPageIncident(params['api_key'],
params['page_id'],
params['name'],
params['scheduled_only'],
params['unresolved_only'],
params['org_id'],
params['incident_type'],
params['status'],
params['update_twitter'],
params['msg'],
params['components'],
params['scheduled_for'],
params['scheduled_until'],
params['scheduled_remind_prior'],
params['scheduled_auto_in_progress'],
params['scheduled_auto_completed'],
params['verbose'])
results = spio.get()
if params['state'] == 'list':
return {'changed': False, 'result': results}
elif params['state'] == 'absent':
if spio.exists():
results = spio.delete()
return {'changed': True, 'result': results}
else:
return {'changed': False, 'result': {}}
elif params['state'] == 'present':
if not spio.exists():
results = spio.create()
return {'changed': True, 'result': results}
elif spio.needs_update():
results = spio.update()
return {'changed': True, 'result': results}
return {'changed': False, 'result': results}
raise StatusPageIOAPIError('Unsupported state: %s' % params['state'])
def main():
'''
ansible oc module for route
'''
module = AnsibleModule(
argument_spec=dict(
api_key=dict(default=os.environ.get('STATUSPAGE_API_KEY', ''), type='str'),
page_id=dict(default=None, type='str', required=True, ),
org_id=dict(default=None, type='str'),
state=dict(default='present', type='str',
choices=['present', 'absent', 'list']),
name=dict(default=None, type='str'),
unresolved_only=dict(default=False, type='bool'),
scheduled_only=dict(default=False, type='bool'),
incident_type=dict(default='realtime', choices=['scheduled', 'realtime', 'historical'], type='str'),
status=dict(default='investigating',
choices=['investigating', 'identified', 'monitoring', 'resolved',
'scheduled', 'in_progress', 'verifying', 'completed'],
type='str'),
update_twitter=dict(default=False, type='bool'),
msg=dict(default=None, type='str'),
impact_override=dict(default=None, choices=['none', 'minor', 'major', 'critical'], type='str'),
components=dict(default=None, type='list'),
scheduled_for=dict(default=None, type='str'),
scheduled_until=dict(default=None, type='str'),
scheduled_remind_prior=dict(default=False, type='bool'),
scheduled_auto_in_progress=dict(default=False, type='bool'),
scheduled_auto_completed=dict(default=False, type='bool'),
verbose=dict(default=False, type='bool'),
),
supports_check_mode=True,
required_if=[['incident_type', 'scheduled', ['scheduled_for', 'scheduled_until']]],
)
if module.params['incident_type'] == 'scheduled':
if not module.params['status'] in ['scheduled', 'in_progress', 'verifying', 'completed']:
module.exit_json(msg='If incident type is scheduled, then status must be one of ' +
'scheduled|in_progress|verifying|completed')
elif module.params['incident_type'] in 'realtime':
if not module.params['status'] in ['investigating', 'identified', 'monitoring', 'resolved']:
module.exit_json(msg='If incident type is realtime, then status must be one of' +
' investigating|identified|monitoring|resolved')
results = StatusPageIncident.run_ansible(module.params)
module.exit_json(**results)
# pylint: disable=redefined-builtin, unused-wildcard-import, wildcard-import, locally-disabled
# import module snippets. This are required
if __name__ == '__main__':
from ansible.module_utils.basic import *
main()
| apache-2.0 |
olivierdalang/QGIS | tests/src/python/test_qgsmaplayerstore.py | 30 | 20567 | # -*- coding: utf-8 -*-
"""QGIS Unit tests for QgsMapLayerStore.
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
__author__ = 'Nyall Dawson'
__date__ = '2017-05'
__copyright__ = 'Copyright 2017, The QGIS Project'
import os
from qgis.core import (
QgsMapLayerStore,
QgsVectorLayer,
QgsMapLayer,
QgsDataProvider,
QgsProject,
QgsReadWriteContext,
)
from qgis.testing import start_app, unittest
from qgis.PyQt.QtCore import QT_VERSION_STR
from qgis.PyQt import sip
from qgis.PyQt.QtTest import QSignalSpy
from qgis.PyQt.QtXml import QDomDocument, QDomNode
from time import sleep
from utilities import unitTestDataPath
start_app()
TEST_DATA_DIR = unitTestDataPath()
def createLayer(name):
return QgsVectorLayer("Point?field=x:string", name, "memory")
class TestQgsMapLayerStore(unittest.TestCase):
def setUp(self):
pass
def test_addMapLayer(self):
""" test adding individual map layers to store"""
store = QgsMapLayerStore()
l1 = createLayer('test')
self.assertEqual(store.addMapLayer(l1), l1)
self.assertEqual(len(store.mapLayersByName('test')), 1)
self.assertEqual(store.count(), 1)
self.assertEqual(len(store), 1)
# adding a second layer should leave existing layers intact
l2 = createLayer('test2')
self.assertEqual(store.addMapLayer(l2), l2)
self.assertEqual(len(store.mapLayersByName('test')), 1)
self.assertEqual(len(store.mapLayersByName('test2')), 1)
self.assertEqual(store.count(), 2)
self.assertEqual(len(store), 2)
def test_addMapLayerAlreadyAdded(self):
""" test that already added layers can't be readded to store """
store = QgsMapLayerStore()
l1 = createLayer('test')
store.addMapLayer(l1)
self.assertEqual(len(store.mapLayersByName('test')), 1)
self.assertEqual(store.count(), 1)
self.assertEqual(store.addMapLayer(l1), None)
self.assertEqual(len(store.mapLayersByName('test')), 1)
self.assertEqual(store.count(), 1)
self.assertEqual(len(store), 1)
def test_addMapLayerInvalid(self):
""" test that invalid map layers can't be added to store """
store = QgsMapLayerStore()
vl = QgsVectorLayer("Point?field=x:string", 'test', "xxx")
self.assertEqual(store.addMapLayer(vl), vl)
self.assertEqual(len(store.mapLayersByName('test')), 1)
self.assertEqual(store.count(), 1)
self.assertEqual(store.validCount(), 0)
def test_addMapLayerSignals(self):
""" test that signals are correctly emitted when adding map layer"""
store = QgsMapLayerStore()
layer_was_added_spy = QSignalSpy(store.layerWasAdded)
layers_added_spy = QSignalSpy(store.layersAdded)
l1 = createLayer('test')
store.addMapLayer(l1)
# can't seem to actually test the data which was emitted, so best we can do is test
# the signal count
self.assertEqual(len(layer_was_added_spy), 1)
self.assertEqual(len(layers_added_spy), 1)
store.addMapLayer(createLayer('test2'))
self.assertEqual(len(layer_was_added_spy), 2)
self.assertEqual(len(layers_added_spy), 2)
# try readding a layer already in the store
store.addMapLayer(l1)
# should be no extra signals emitted
self.assertEqual(len(layer_was_added_spy), 2)
self.assertEqual(len(layers_added_spy), 2)
def test_addMapLayers(self):
""" test adding multiple map layers to store """
store = QgsMapLayerStore()
l1 = createLayer('test')
l2 = createLayer('test2')
self.assertEqual(set(store.addMapLayers([l1, l2])), {l1, l2})
self.assertEqual(len(store.mapLayersByName('test')), 1)
self.assertEqual(len(store.mapLayersByName('test2')), 1)
self.assertEqual(store.count(), 2)
# adding more layers should leave existing layers intact
l3 = createLayer('test3')
l4 = createLayer('test4')
self.assertEqual(set(store.addMapLayers([l3, l4])), {l3, l4})
self.assertEqual(len(store.mapLayersByName('test')), 1)
self.assertEqual(len(store.mapLayersByName('test2')), 1)
self.assertEqual(len(store.mapLayersByName('test3')), 1)
self.assertEqual(len(store.mapLayersByName('test4')), 1)
self.assertEqual(store.count(), 4)
store.removeAllMapLayers()
def test_addMapLayersInvalid(self):
""" test that invalid map layers can be added to store """
store = QgsMapLayerStore()
vl = QgsVectorLayer("Point?field=x:string", 'test', "xxx")
self.assertEqual(store.addMapLayers([vl]), [vl])
self.assertEqual(len(store.mapLayersByName('test')), 1)
self.assertEqual(store.count(), 1)
self.assertEqual(store.validCount(), 0)
def test_addMapLayersAlreadyAdded(self):
""" test that already added layers can't be readded to store """
store = QgsMapLayerStore()
l1 = createLayer('test')
self.assertEqual(store.addMapLayers([l1]), [l1])
self.assertEqual(len(store.mapLayersByName('test')), 1)
self.assertEqual(store.count(), 1)
self.assertEqual(store.addMapLayers([l1]), [])
self.assertEqual(len(store.mapLayersByName('test')), 1)
self.assertEqual(store.count(), 1)
def test_addMapLayersSignals(self):
""" test that signals are correctly emitted when adding map layers"""
store = QgsMapLayerStore()
layer_was_added_spy = QSignalSpy(store.layerWasAdded)
layers_added_spy = QSignalSpy(store.layersAdded)
l1 = createLayer('test')
l2 = createLayer('test2')
store.addMapLayers([l1, l2])
# can't seem to actually test the data which was emitted, so best we can do is test
# the signal count
self.assertEqual(len(layer_was_added_spy), 2)
self.assertEqual(len(layers_added_spy), 1)
store.addMapLayers([createLayer('test3'), createLayer('test4')])
self.assertEqual(len(layer_was_added_spy), 4)
self.assertEqual(len(layers_added_spy), 2)
# try readding a layer already in the store
store.addMapLayers([l1, l2])
# should be no extra signals emitted
self.assertEqual(len(layer_was_added_spy), 4)
self.assertEqual(len(layers_added_spy), 2)
def test_mapLayerById(self):
""" test retrieving map layer by ID """
store = QgsMapLayerStore()
# test no crash with empty store
self.assertEqual(store.mapLayer('bad'), None)
self.assertEqual(store.mapLayer(None), None)
l1 = createLayer('test')
l2 = createLayer('test2')
store.addMapLayers([l1, l2])
self.assertEqual(store.mapLayer('bad'), None)
self.assertEqual(store.mapLayer(None), None)
self.assertEqual(store.mapLayer(l1.id()), l1)
self.assertEqual(store.mapLayer(l2.id()), l2)
def test_mapLayersByName(self):
""" test retrieving map layer by name """
store = QgsMapLayerStore()
# test no crash with empty store
self.assertEqual(store.mapLayersByName('bad'), [])
self.assertEqual(store.mapLayersByName(None), [])
l1 = createLayer('test')
l2 = createLayer('test2')
store.addMapLayers([l1, l2])
self.assertEqual(store.mapLayersByName('bad'), [])
self.assertEqual(store.mapLayersByName(None), [])
self.assertEqual(store.mapLayersByName('test'), [l1])
self.assertEqual(store.mapLayersByName('test2'), [l2])
# duplicate name
# little bit of a hack - we don't want a duplicate ID and since IDs are currently based on time we wait a bit here
sleep(0.1)
l3 = createLayer('test')
store.addMapLayer(l3)
self.assertEqual(set(store.mapLayersByName('test')), {l1, l3})
def test_mapLayers(self):
""" test retrieving map layers list """
store = QgsMapLayerStore()
# test no crash with empty store
self.assertEqual(store.mapLayers(), {})
l1 = createLayer('test')
l2 = createLayer('test2')
store.addMapLayers([l1, l2])
self.assertEqual(store.mapLayers(), {l1.id(): l1, l2.id(): l2})
def test_removeMapLayersById(self):
""" test removing map layers by ID """
store = QgsMapLayerStore()
# test no crash with empty store
store.removeMapLayersById(['bad'])
store.removeMapLayersById([None])
l1 = createLayer('test')
l2 = createLayer('test2')
l3 = createLayer('test3')
store.addMapLayers([l1, l2, l3])
self.assertEqual(store.count(), 3)
# remove bad layers
store.removeMapLayersById(['bad'])
self.assertEqual(store.count(), 3)
store.removeMapLayersById([None])
self.assertEqual(store.count(), 3)
# remove valid layers
l1_id = l1.id()
store.removeMapLayersById([l1_id])
self.assertEqual(store.count(), 2)
# double remove
store.removeMapLayersById([l1_id])
self.assertEqual(store.count(), 2)
# test that layer has been deleted
self.assertTrue(sip.isdeleted(l1))
# remove multiple
store.removeMapLayersById([l2.id(), l3.id()])
self.assertEqual(store.count(), 0)
self.assertTrue(sip.isdeleted(l2))
# try removing a layer not in the store
l4 = createLayer('test4')
store.removeMapLayersById([l4.id()])
self.assertFalse(sip.isdeleted(l4))
def test_removeMapLayersByLayer(self):
""" test removing map layers by layer"""
store = QgsMapLayerStore()
# test no crash with empty store
store.removeMapLayers([None])
l1 = createLayer('test')
l2 = createLayer('test2')
l3 = createLayer('test3')
store.addMapLayers([l1, l2, l3])
self.assertEqual(store.count(), 3)
# remove bad layers
store.removeMapLayers([None])
self.assertEqual(store.count(), 3)
# remove valid layers
store.removeMapLayers([l1])
self.assertEqual(store.count(), 2)
# test that layer has been deleted
self.assertTrue(sip.isdeleted(l1))
# remove multiple
store.removeMapLayers([l2, l3])
self.assertEqual(store.count(), 0)
self.assertTrue(sip.isdeleted(l2))
self.assertTrue(sip.isdeleted(l3))
def test_removeMapLayerById(self):
""" test removing a map layer by ID """
store = QgsMapLayerStore()
# test no crash with empty store
store.removeMapLayer('bad')
store.removeMapLayer(None)
l1 = createLayer('test')
l2 = createLayer('test2')
store.addMapLayers([l1, l2])
self.assertEqual(store.count(), 2)
# remove bad layers
store.removeMapLayer('bad')
self.assertEqual(store.count(), 2)
store.removeMapLayer(None)
self.assertEqual(store.count(), 2)
# remove valid layers
l1_id = l1.id()
store.removeMapLayer(l1_id)
self.assertEqual(store.count(), 1)
# double remove
store.removeMapLayer(l1_id)
self.assertEqual(store.count(), 1)
# test that layer has been deleted
self.assertTrue(sip.isdeleted(l1))
# remove second layer
store.removeMapLayer(l2.id())
self.assertEqual(store.count(), 0)
self.assertTrue(sip.isdeleted(l2))
# try removing a layer not in the store
l3 = createLayer('test3')
store.removeMapLayer(l3.id())
self.assertFalse(sip.isdeleted(l3))
def test_removeMapLayerByLayer(self):
""" test removing a map layer by layer """
store = QgsMapLayerStore()
# test no crash with empty store
store.removeMapLayer('bad')
store.removeMapLayer(None)
l1 = createLayer('test')
l2 = createLayer('test2')
store.addMapLayers([l1, l2])
self.assertEqual(store.count(), 2)
# remove bad layers
store.removeMapLayer(None)
self.assertEqual(store.count(), 2)
l3 = createLayer('test3')
store.removeMapLayer(l3)
self.assertEqual(store.count(), 2)
# remove valid layers
store.removeMapLayer(l1)
self.assertEqual(store.count(), 1)
# test that layer has been deleted
self.assertTrue(sip.isdeleted(l1))
# remove second layer
store.removeMapLayer(l2)
self.assertEqual(store.count(), 0)
self.assertTrue(sip.isdeleted(l2))
# try removing a layer not in the store
l3 = createLayer('test3')
store.removeMapLayer(l3)
self.assertFalse(sip.isdeleted(l3))
def test_removeAllMapLayers(self):
""" test removing all map layers from store """
store = QgsMapLayerStore()
l1 = createLayer('test')
l2 = createLayer('test2')
store.addMapLayers([l1, l2])
self.assertEqual(store.count(), 2)
store.removeAllMapLayers()
self.assertEqual(store.count(), 0)
self.assertEqual(store.mapLayersByName('test'), [])
self.assertEqual(store.mapLayersByName('test2'), [])
def test_addRemoveLayersSignals(self):
""" test that signals are correctly emitted when removing map layers"""
store = QgsMapLayerStore()
layers_will_be_removed_spy = QSignalSpy(store.layersWillBeRemoved)
layer_will_be_removed_spy_str = QSignalSpy(store.layerWillBeRemoved[str])
layer_will_be_removed_spy_layer = QSignalSpy(store.layerWillBeRemoved[QgsMapLayer])
layers_removed_spy = QSignalSpy(store.layersRemoved)
layer_removed_spy = QSignalSpy(store.layerRemoved)
remove_all_spy = QSignalSpy(store.allLayersRemoved)
l1 = createLayer('l1')
l2 = createLayer('l2')
l3 = createLayer('l3')
l4 = createLayer('l4')
store.addMapLayers([l1, l2, l3, l4])
# remove 1 layer
store.removeMapLayer(l1)
# can't seem to actually test the data which was emitted, so best we can do is test
# the signal count
self.assertEqual(len(layers_will_be_removed_spy), 1)
self.assertEqual(len(layer_will_be_removed_spy_str), 1)
self.assertEqual(len(layer_will_be_removed_spy_layer), 1)
self.assertEqual(len(layers_removed_spy), 1)
self.assertEqual(len(layer_removed_spy), 1)
self.assertEqual(len(remove_all_spy), 0)
self.assertEqual(store.count(), 3)
# remove 2 layers at once
store.removeMapLayersById([l2.id(), l3.id()])
self.assertEqual(len(layers_will_be_removed_spy), 2)
self.assertEqual(len(layer_will_be_removed_spy_str), 3)
self.assertEqual(len(layer_will_be_removed_spy_layer), 3)
self.assertEqual(len(layers_removed_spy), 2)
self.assertEqual(len(layer_removed_spy), 3)
self.assertEqual(len(remove_all_spy), 0)
self.assertEqual(store.count(), 1)
# remove all
store.removeAllMapLayers()
self.assertEqual(len(layers_will_be_removed_spy), 3)
self.assertEqual(len(layer_will_be_removed_spy_str), 4)
self.assertEqual(len(layer_will_be_removed_spy_layer), 4)
self.assertEqual(len(layers_removed_spy), 3)
self.assertEqual(len(layer_removed_spy), 4)
self.assertEqual(len(remove_all_spy), 1)
# remove some layers which aren't in the store
store.removeMapLayersById(['asdasd'])
self.assertEqual(len(layers_will_be_removed_spy), 3)
self.assertEqual(len(layer_will_be_removed_spy_str), 4)
self.assertEqual(len(layer_will_be_removed_spy_layer), 4)
self.assertEqual(len(layers_removed_spy), 3)
self.assertEqual(len(layer_removed_spy), 4)
self.assertEqual(len(remove_all_spy), 1)
l5 = createLayer('test5')
store.removeMapLayer(l5)
self.assertEqual(len(layers_will_be_removed_spy), 3)
self.assertEqual(len(layer_will_be_removed_spy_str), 4)
self.assertEqual(len(layer_will_be_removed_spy_layer), 4)
self.assertEqual(len(layers_removed_spy), 3)
self.assertEqual(len(layer_removed_spy), 4)
self.assertEqual(len(remove_all_spy), 1)
def test_RemoveLayerShouldNotSegFault(self):
store = QgsMapLayerStore()
# Should not segfault
store.removeMapLayersById(['not_exists'])
store.removeMapLayer('not_exists2')
# check also that the removal of an unexistent layer does not insert a null layer
for k, layer in list(store.mapLayers().items()):
assert (layer is not None)
def testTakeLayer(self):
# test taking ownership of a layer from the store
l1 = createLayer('l1')
l2 = createLayer('l2')
store = QgsMapLayerStore()
# add one layer to store
store.addMapLayer(l1)
self.assertEqual(store.mapLayers(), {l1.id(): l1})
self.assertEqual(l1.parent(), store)
# try taking some layers which don't exist in store
self.assertFalse(store.takeMapLayer(None))
self.assertFalse(store.takeMapLayer(l2))
# but l2 should still exist..
self.assertTrue(l2.isValid())
# take layer from store
self.assertEqual(store.takeMapLayer(l1), l1)
self.assertFalse(store.mapLayers()) # no layers left
# but l1 should still exist
self.assertTrue(l1.isValid())
# layer should have no parent now
self.assertFalse(l1.parent())
# destroy store
store = None
self.assertTrue(l1.isValid())
def testTransferLayers(self):
# test transferring all layers from another store
store1 = QgsMapLayerStore()
store2 = QgsMapLayerStore()
# empty stores
store1.transferLayersFromStore(store2)
# silly behavior checks
store1.transferLayersFromStore(None)
store1.transferLayersFromStore(store1)
l1 = createLayer('l1')
l2 = createLayer('l2')
store1.addMapLayer(l1)
store1.addMapLayer(l2)
l3 = createLayer('l3')
store2.addMapLayer(l3)
store2.transferLayersFromStore(store1)
self.assertFalse(store1.mapLayers()) # no layers left
self.assertEqual(len(store2.mapLayers()), 3)
self.assertEqual(store2.mapLayers(), {l1.id(): l1, l2.id(): l2, l3.id(): l3})
store1.transferLayersFromStore(store2)
self.assertFalse(store2.mapLayers()) # no layers left
self.assertEqual(len(store1.mapLayers()), 3)
self.assertEqual(store1.mapLayers(), {l1.id(): l1, l2.id(): l2, l3.id(): l3})
def testLayerDataSourceReset(self):
"""When adding a layer with the same id to the store make sure
the data source is also updated in case the layer validity has
changed from False to True"""
p = QgsProject()
store = p.layerStore()
vl1 = createLayer('valid')
vl2 = QgsVectorLayer('/not_a_valid_path.shp', 'invalid', 'ogr')
self.assertTrue(vl1.isValid())
self.assertFalse(vl2.isValid())
store.addMapLayers([vl1, vl2])
self.assertEqual(store.validCount(), 1)
self.assertEqual(len(store.mapLayers()), 2)
# Re-add the bad layer
store.addMapLayers([vl2])
self.assertEqual(store.validCount(), 1)
self.assertEqual(len(store.mapLayers()), 2)
doc = QDomDocument()
doc.setContent(
'<maplayer><provider encoding="UTF-8">ogr</provider><layername>fixed</layername><id>%s</id></maplayer>' % vl2.id())
layer_node = QDomNode(doc.firstChild())
self.assertTrue(vl2.writeXml(layer_node, doc, QgsReadWriteContext()))
datasource_node = doc.createElement("datasource")
datasource_node.appendChild(doc.createTextNode(os.path.join(TEST_DATA_DIR, 'points.shp')))
layer_node.appendChild(datasource_node)
p.readLayer(layer_node)
self.assertEqual(store.validCount(), 2)
self.assertEqual(len(store.mapLayers()), 2)
self.assertEqual(store.mapLayers()[vl2.id()].name(), 'fixed')
if __name__ == '__main__':
unittest.main()
| gpl-2.0 |
stewartsmith/bzr | bzrlib/tests/blackbox/test_view.py | 2 | 5051 | # Copyright (C) 2008 Canonical Ltd
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
"""Tests for the view command"""
from bzrlib import bzrdir
from bzrlib.tests import TestCaseWithTransport
from bzrlib.workingtree import WorkingTree
class TestViewUI(TestCaseWithTransport):
def test_view_command_help(self):
out, err = self.run_bzr('help view')
self.assertContainsRe(out, 'Manage filtered views')
def test_define_view(self):
wt = self.make_branch_and_tree('.')
# Check definition of a new view
out, err = self.run_bzr('view a b c')
self.assertEquals(out, "Using 'my' view: a, b, c\n")
out, err = self.run_bzr('view e f --name foo')
self.assertEquals(out, "Using 'foo' view: e, f\n")
# Check re-definition of an existing view
out, err = self.run_bzr('view p q')
self.assertEquals(out, "Using 'foo' view: p, q\n")
out, err = self.run_bzr('view r s --name my')
self.assertEquals(out, "Using 'my' view: r, s\n")
# Check attempts to define the 'off' view are prevented
out, err = self.run_bzr('view a --name off', retcode=3)
self.assertContainsRe(err, "Cannot change the 'off' pseudo view")
def test_list_view(self):
wt = self.make_branch_and_tree('.')
# Check list of the current view
out, err = self.run_bzr('view')
self.assertEquals(out, "No current view.\n")
self.run_bzr('view a b c')
out, err = self.run_bzr('view')
self.assertEquals(out, "'my' view is: a, b, c\n")
# Check list of a named view
self.run_bzr('view e f --name foo')
out, err = self.run_bzr('view --name my')
self.assertEquals(out, "'my' view is: a, b, c\n")
out, err = self.run_bzr('view --name foo')
self.assertEquals(out, "'foo' view is: e, f\n")
# Check list of all views
out, err = self.run_bzr('view --all')
self.assertEquals(out.splitlines(), [
"Views defined:",
"=> foo e, f",
" my a, b, c",
])
# Check list of an unknown view
out, err = self.run_bzr('view --name bar', retcode=3)
self.assertContainsRe(err, "No such view")
def test_delete_view(self):
wt = self.make_branch_and_tree('.')
# Check delete of the current view
out, err = self.run_bzr('view --delete', retcode=3)
self.assertContainsRe(err, "No current view to delete")
self.run_bzr('view a b c')
out, err = self.run_bzr('view --delete')
self.assertEquals(out, "Deleted 'my' view.\n")
# Check delete of a named view
self.run_bzr('view e f --name foo')
out, err = self.run_bzr('view --name foo --delete')
self.assertEquals(out, "Deleted 'foo' view.\n")
# Check delete of all views
out, err = self.run_bzr('view --delete --all')
self.assertEquals(out, "Deleted all views.\n")
# Check delete of an unknown view
out, err = self.run_bzr('view --delete --name bar', retcode=3)
self.assertContainsRe(err, "No such view")
# Check bad usage is reported to the user
out, err = self.run_bzr('view --delete --switch x', retcode=3)
self.assertContainsRe(err,
"Both --delete and --switch specified")
out, err = self.run_bzr('view --delete a b c', retcode=3)
self.assertContainsRe(err, "Both --delete and a file list specified")
def test_switch_view(self):
wt = self.make_branch_and_tree('.')
# Check switching to a named view
self.run_bzr('view a b c')
self.run_bzr('view e f --name foo')
out, err = self.run_bzr('view --switch my')
self.assertEquals(out, "Using 'my' view: a, b, c\n")
# Check switching off the current view does not delete it
out, err = self.run_bzr('view --switch off')
self.assertEquals(out, "Disabled 'my' view.\n")
# Check error reporting when attempt to switch off again
out, err = self.run_bzr('view --switch off', retcode=3)
self.assertContainsRe(err, "No current view to disable")
# Check bad usage is reported to the user
out, err = self.run_bzr('view --switch x --all', retcode=3)
self.assertContainsRe(err, "Both --switch and --all specified")
| gpl-2.0 |
GenericStudent/home-assistant | tests/components/plex/const.py | 8 | 1384 | """Constants used by Plex tests."""
from homeassistant.components.media_player import DOMAIN as MP_DOMAIN
from homeassistant.components.plex import const
from homeassistant.const import (
CONF_CLIENT_ID,
CONF_HOST,
CONF_PORT,
CONF_TOKEN,
CONF_URL,
CONF_VERIFY_SSL,
)
MOCK_SERVERS = [
{
CONF_HOST: "1.2.3.4",
CONF_PORT: 32400,
const.CONF_SERVER: "Plex Server 1",
const.CONF_SERVER_IDENTIFIER: "unique_id_123",
},
{
CONF_HOST: "4.3.2.1",
CONF_PORT: 32400,
const.CONF_SERVER: "Plex Server 2",
const.CONF_SERVER_IDENTIFIER: "unique_id_456",
},
]
MOCK_USERS = {
"Owner": {"enabled": True},
"b": {"enabled": True},
"c": {"enabled": True},
}
MOCK_TOKEN = "secret_token"
DEFAULT_DATA = {
const.CONF_SERVER: MOCK_SERVERS[0][const.CONF_SERVER],
const.PLEX_SERVER_CONFIG: {
CONF_CLIENT_ID: "00000000-0000-0000-0000-000000000000",
CONF_TOKEN: MOCK_TOKEN,
CONF_URL: f"https://{MOCK_SERVERS[0][CONF_HOST]}:{MOCK_SERVERS[0][CONF_PORT]}",
CONF_VERIFY_SSL: True,
},
const.CONF_SERVER_IDENTIFIER: MOCK_SERVERS[0][const.CONF_SERVER_IDENTIFIER],
}
DEFAULT_OPTIONS = {
MP_DOMAIN: {
const.CONF_IGNORE_NEW_SHARED_USERS: False,
const.CONF_MONITORED_USERS: MOCK_USERS,
const.CONF_USE_EPISODE_ART: False,
}
}
| apache-2.0 |
bala4901/odoo | addons/report_webkit/company.py | 431 | 2562 | # -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (c) 2010 Camptocamp SA (http://www.camptocamp.com)
# All Right Reserved
#
# Author : Nicolas Bessi (Camptocamp)
#
# WARNING: This program as such is intended to be used by professional
# programmers who take the whole responsability of assessing all potential
# consequences resulting from its eventual inadequacies and bugs
# End users who are looking for a ready-to-use solution with commercial
# garantees and support are strongly adviced to contract a Free Software
# Service Company
#
# This program is Free Software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
#
##############################################################################
from openerp.osv import fields, osv
class res_company(osv.osv):
"""Override company to add Header object link a company can have many header and logos"""
_inherit = "res.company"
_columns = {
'header_image' : fields.many2many(
'ir.header_img',
'company_img_rel',
'company_id',
'img_id',
'Available Images',
),
'header_webkit' : fields.many2many(
'ir.header_webkit',
'company_html_rel',
'company_id',
'html_id',
'Available html',
),
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
rajegannathan/grasp-lift-eeg-cat-dog-solution-updated | python-packages/hyperopt-0.0.2/hyperopt/tests/test_pchoice.py | 6 | 4524 | from functools import partial
import numpy as np
import unittest
from sklearn import datasets
from hyperopt import hp, Trials, fmin, tpe, rand
import hyperopt.pyll.stochastic
class TestPChoice(unittest.TestCase):
def test_basic(self):
space = hp.pchoice('naive_type',
[(.14, 'gaussian'),
(.02, 'multinomial'),
(.84, 'bernoulli')])
a, b, c = 0, 0, 0
rng = np.random.RandomState(123)
for i in range(0, 1000):
nesto = hyperopt.pyll.stochastic.sample(space, rng=rng)
if nesto == 'gaussian':
a += 1
elif nesto == 'multinomial':
b += 1
elif nesto == 'bernoulli':
c += 1
print(a, b, c)
assert a + b + c == 1000
assert 120 < a < 160
assert 0 < b < 40
assert 800 < c < 900
def test_basic2(self):
space = hp.choice('normal_choice', [
hp.pchoice('fsd',
[(.1, 'first'),
(.8, 'second'),
(.1, 2)]),
hp.choice('something_else', [10, 20])
])
a, b, c = 0, 0, 0
rng=np.random.RandomState(123)
for i in range(0, 1000):
nesto = hyperopt.pyll.stochastic.sample(space, rng=rng)
if nesto == 'first':
a += 1
elif nesto == 'second':
b += 1
elif nesto == 2:
c += 1
elif nesto in (10, 20):
pass
else:
assert 0, nesto
print(a, b, c)
assert b > 2 * a
assert b > 2 * c
def test_basic3(self):
space = hp.pchoice('something', [
(.2, hp.pchoice('number', [(.8, 2), (.2, 1)])),
(.8, hp.pchoice('number1', [(.7, 5), (.3, 6)]))
])
a, b, c, d = 0, 0, 0, 0
rng = np.random.RandomState(123)
for i in range(0, 2000):
nesto = hyperopt.pyll.stochastic.sample(space, rng=rng)
if nesto == 2:
a += 1
elif nesto == 1:
b += 1
elif nesto == 5:
c += 1
elif nesto == 6:
d += 1
else:
assert 0, nesto
print(a, b, c, d)
assert a + b + c + d == 2000
assert 300 < a + b < 500
assert 1500 < c + d < 1700
assert a * .3 > b # a * 1.2 > 4 * b
assert c * 3 * 1.2 > d * 7
class TestSimpleFMin(unittest.TestCase):
# test that that a space with a pchoice in it is
# (a) accepted by various algos and
# (b) handled correctly.
#
def setUp(self):
self.space = hp.pchoice('a', [
(.1, 0),
(.2, 1),
(.3, 2),
(.4, 3)])
self.trials = Trials()
def objective(self, a):
return [1, 1, 1, 0 ][a]
def test_random(self):
# test that that a space with a pchoice in it is
# (a) accepted by tpe.suggest and
# (b) handled correctly.
N = 50
fmin(self.objective,
space=self.space,
trials=self.trials,
algo=rand.suggest,
max_evals=N)
a_vals = [t['misc']['vals']['a'][0] for t in self.trials.trials]
counts = np.bincount(a_vals)
print counts
assert counts[3] > N * .35
assert counts[3] < N * .60
def test_tpe(self):
N = 100
fmin(self.objective,
space=self.space,
trials=self.trials,
algo=partial(tpe.suggest, n_startup_jobs=10),
max_evals=N)
a_vals = [t['misc']['vals']['a'][0] for t in self.trials.trials]
counts = np.bincount(a_vals)
print counts
assert counts[3] > N * .6
def test_bug1_rand():
space = hp.choice('preprocess_choice', [
{'pwhiten': hp.pchoice('whiten_randomPCA',
[(.3, False), (.7, True)])},
{'palgo': False},
{'pthree': 7}])
best = fmin(fn=lambda x: 1,
space=space,
algo=rand.suggest,
max_evals=50)
def test_bug1_tpe():
space = hp.choice('preprocess_choice', [
{'pwhiten': hp.pchoice('whiten_randomPCA',
[(.3, False), (.7, True)])},
{'palgo': False},
{'pthree': 7}])
best = fmin(fn=lambda x: 1,
space=space,
algo=tpe.suggest,
max_evals=50)
| bsd-3-clause |
noba3/KoTos | addons/plugin.video.mega/resources/lib/platform_libraries/Linux/32bit/Crypto/SelfTest/Hash/test_SHA256.py | 11 | 3224 | # -*- coding: utf-8 -*-
#
# SelfTest/Hash/SHA256.py: Self-test for the SHA-256 hash function
#
# Written in 2008 by Dwayne C. Litzenberger <[email protected]>
#
# ===================================================================
# The contents of this file are dedicated to the public domain. To
# the extent that dedication to the public domain is not available,
# everyone is granted a worldwide, perpetual, royalty-free,
# non-exclusive license to exercise all rights associated with the
# contents of this file for any purpose whatsoever.
# No rights are reserved.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ===================================================================
"""Self-test suite for Crypto.Hash.SHA256"""
__revision__ = "$Id$"
import unittest
class LargeSHA256Test(unittest.TestCase):
def runTest(self):
"""SHA256: 512/520 MiB test"""
from Crypto.Hash import SHA256
zeros = '\0' * (1024*1024)
h = SHA256.new(zeros)
for i in xrange(511):
h.update(zeros)
# This test vector is from PyCrypto's old testdata.py file.
self.assertEqual('9acca8e8c22201155389f65abbf6bc9723edc7384ead80503839f49dcc56d767', h.hexdigest()) # 512 MiB
for i in xrange(8):
h.update(zeros)
# This test vector is from PyCrypto's old testdata.py file.
self.assertEqual('abf51ad954b246009dfe5a50ecd582fd5b8f1b8b27f30393853c3ef721e7fa6e', h.hexdigest()) # 520 MiB
def get_tests(config={}):
# Test vectors from FIPS PUB 180-2
# This is a list of (expected_result, input[, description]) tuples.
test_data = [
# FIPS PUB 180-2, B.1 - "One-Block Message"
('ba7816bf8f01cfea414140de5dae2223b00361a396177a9cb410ff61f20015ad',
'abc'),
# FIPS PUB 180-2, B.2 - "Multi-Block Message"
('248d6a61d20638b8e5c026930c3e6039a33ce45964ff2167f6ecedd419db06c1',
'abcdbcdecdefdefgefghfghighijhijkijkljklmklmnlmnomnopnopq'),
# FIPS PUB 180-2, B.3 - "Long Message"
('cdc76e5c9914fb9281a1c7e284d73e67f1809a48a497200e046d39ccc7112cd0',
'a' * 10**6,
'"a" * 10**6'),
# Test for an old PyCrypto bug.
('f7fd017a3c721ce7ff03f3552c0813adcc48b7f33f07e5e2ba71e23ea393d103',
'This message is precisely 55 bytes long, to test a bug.',
'Length = 55 (mod 64)'),
]
from Crypto.Hash import SHA256
from common import make_hash_tests
tests = make_hash_tests(SHA256, "SHA256", test_data)
if config.get('slow_tests'):
tests += [LargeSHA256Test()]
return tests
if __name__ == '__main__':
import unittest
suite = lambda: unittest.TestSuite(get_tests())
unittest.main(defaultTest='suite')
# vim:set ts=4 sw=4 sts=4 expandtab:
| gpl-2.0 |
ros2/ci | tools/get_coverage_ros2_pkg.py | 1 | 4862 | #!/usr/bin/env python3
import argparse
import os
import requests
import subprocess
import sys
import tempfile
# Compare if a list is inside other list in order
# From here: https://stackoverflow.com/a/20789669
def is_slice_in_list(s, l):
len_s = len(s)
return any(s == l[i:len_s + i] for i in range(len(l) - len_s + 1))
def get_src_path(package_name, colcon_ws):
cmd = ['colcon', 'list', '--paths-only', '--base-paths', colcon_ws, '--packages-select', package_name]
try:
path = subprocess.check_output(cmd).decode('ascii').strip()
except subprocess.CalledProcessError as e:
print(e.output, file=sys.stderr)
sys.exit(-1)
if not path:
print('Package not found: ' + input_pkg, file=sys.stderr)
sys.exit(-1)
assert len(path.splitlines()) == 1, 'Found more than one line returned by colcon list'
return path
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('jenkins_coverage_build', help='URL of a ci.ro2s.org build using coverage (i.e https://ci.ros2.org/job/ci_linux_coverage/182)')
parser.add_argument('ros_package', help='ROS package name to get the coverage rate from (i.e: rcutils)')
args = parser.parse_args()
input_url = args.jenkins_coverage_build
input_pkg = args.ros_package
r = requests.get(url=input_url + '/cobertura/api/json?depth=3')
if r.status_code != 200:
print('Wrong input URL ' + input_url, file=sys.stderr)
sys.exit(-1)
# Create colcon workspace, checkout sources
with tempfile.TemporaryDirectory() as ros2_ws_path:
ros2_repos_path = os.path.join(ros2_ws_path, 'ros2.repos')
ros2_repos = requests.get('https://raw.githubusercontent.com/ros2/ros2/master/ros2.repos')
if ros2_repos.status_code != requests.codes.ok:
print('Failed to download ros2.repos file', file=sys.stderr)
sys.exit(-1)
with open(ros2_repos_path, 'wb') as file:
file.write(ros2_repos.content)
cmd = ['vcs', 'import', ros2_ws_path, '--shallow', '--retry', '5', '--input', ros2_repos_path]
try:
print('Getting ros2.repos sources to get packages source paths. Please wait')
subprocess.check_output(cmd)
except subprocess.CalledProcessError as e:
print(e.output, file=sys.stderr)
sys.exit(-1)
# Get relative path in ROS workspace to real the package source code
ros_package_path_in_ws = get_src_path(input_pkg, ros2_ws_path)
input_pkg_rel_path = os.path.relpath(ros_package_path_in_ws, ros2_ws_path).split(os.path.sep)
coverage_entries = r.json()['results']['children']
total_lines_under_testing = 0
total_lines_tested = 0
for e in coverage_entries:
if e['name'] == '.':
continue
# e has children, elements or name
entry_name = e['name'].replace("'", '')
# search for the Lines coverage method
lines_coverage = None
for coverage_method in e['elements']:
if coverage_method['name'] == 'Lines':
lines_coverage = coverage_method
assert lines_coverage, 'Did not find Lines metric in coverage'
name_parts = entry_name.split('.')
if len(name_parts) == 1:
package_under_cov = name_parts[0]
elif name_parts[0].startswith('test'):
# no interest in test code
continue
elif name_parts[0].startswith('install'):
# integration/system testing, out by now
continue
elif name_parts[0].startswith('build'):
# in build the first part is always the ROS package name
package_under_cov = name_parts[1]
elif is_slice_in_list(input_pkg_rel_path, name_parts):
# source code: check if coverage entry contains exactly the source path
package_under_cov = input_pkg
else:
package_under_cov = 'NOT-FOUND'
if package_under_cov == input_pkg:
total_lines_under_testing += lines_coverage['denominator']
total_lines_tested += lines_coverage['numerator']
print(f' * {entry_name}'
f" [{lines_coverage['ratio']:.2f}] --"
f" {lines_coverage['numerator']:.0f}/{lines_coverage['denominator']:.0f}")
if total_lines_under_testing == 0:
print(f'No coverage found for package {input_pkg}', file=sys.stderr)
sys.exit(-1)
print(f'\nCombined unit testing for {input_pkg}:'
f' {total_lines_tested / total_lines_under_testing * 100:.2f}%'
f' {total_lines_tested:.0f}/{total_lines_under_testing:.0f}')
| apache-2.0 |
menzenski/tagger-tester | razmetka/tag/tag.py | 2 | 3178 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import codecs
import os
import subprocess32
from nltk.tag.stanford import StanfordPOSTagger
from .config import DATA_DIR_NAME, PATH_TO_DATA_DIR
from .files import write_to_directory
class FilePair(object):
"""Pair of files: one for training and one for testing."""
def __init__(self, idx, testfile, trainfile, separator='_', props=''):
"""Initialize the TrainingSuite object.
Parameters
----------
idx (int) : index number to keep files straight
testfile (str) : filename containing the file to tag/test
trainfile (str) : filename containing the training file
separator (basestring) : the character used to separate words
from their POS tags in the training file and the output.
Default is underscore '_'; slash '/' is also common.
props (str) : prefix for naming/saving the properties file
"""
# one-digit numbers should be prefaced with a leading zero
self.idx = str(idx).rjust(2, '0')
self.testfile = testfile
self.trainfile = trainfile
if props == '':
self.props_name = 'props_{}.props'.format(self.idx)
else:
self.props_name = '{}{}.props'.format(props, self.idx)
# self.all_files = file_dict
self.sep = separator
self.prop_template = (
"model = {p_model}\n"
"trainFile = {p_train_file}\n"
"tagSeparator = {p_tag_separator}\n"
"encoding = {p_encoding}\n"
"verbose = {p_verbose}\n"
"verboseResults = {p_verbose_results}\n"
"tokenize = {p_tokenize}\n"
"arch = {p_arch}\n"
"learnClosedClassTags = {p_learn_closed_class_tags}\n"
"closedClassTagThreshold = {p_closed_class_tag_threshold}\n"
)
def write_props(self, props_name=None, model=None, train_file=None,
tag_separator=None, encoding="UTF-8", verbose="true",
verbose_results="true", tokenize="false", arch="generic",
learn_closed_class_tags='', closed_class_tag_threshold=5):
"""Write a props file to disk."""
if props_name == None:
props_name = self.props_name
if model == None:
model_name = 'model_{}.model'.format(self.idx)
model = os.path.join(PATH_TO_DATA_DIR, model_name)
if train_file == None:
train_file = os.path.join(PATH_TO_DATA_DIR, self.trainfile)
if tag_separator == None:
tag_separator = self.sep
output_string = self.prop_template.format(
p_model=model, p_train_file=train_file,
p_tag_separator=tag_separator, p_encoding=encoding,
p_verbose=verbose, p_verbose_results=verbose_results,
p_tokenize=tokenize, p_arch=arch,
p_learn_closed_class_tags=learn_closed_class_tags,
p_closed_class_tag_threshold=closed_class_tag_threshold
)
write_to_directory(dir_name=DATA_DIR_NAME, file_name=props_name,
a_string=output_string)
| mit |
jesseditson/rethinkdb | test/rql_test/connections/http_support/flask/testsuite/ext.py | 563 | 5156 | # -*- coding: utf-8 -*-
"""
flask.testsuite.ext
~~~~~~~~~~~~~~~~~~~
Tests the extension import thing.
:copyright: (c) 2011 by Armin Ronacher.
:license: BSD, see LICENSE for more details.
"""
import sys
import unittest
try:
from imp import reload as reload_module
except ImportError:
reload_module = reload
from flask.testsuite import FlaskTestCase
from flask._compat import PY2
class ExtImportHookTestCase(FlaskTestCase):
def setup(self):
# we clear this out for various reasons. The most important one is
# that a real flaskext could be in there which would disable our
# fake package. Secondly we want to make sure that the flaskext
# import hook does not break on reloading.
for entry, value in list(sys.modules.items()):
if (entry.startswith('flask.ext.') or
entry.startswith('flask_') or
entry.startswith('flaskext.') or
entry == 'flaskext') and value is not None:
sys.modules.pop(entry, None)
from flask import ext
reload_module(ext)
# reloading must not add more hooks
import_hooks = 0
for item in sys.meta_path:
cls = type(item)
if cls.__module__ == 'flask.exthook' and \
cls.__name__ == 'ExtensionImporter':
import_hooks += 1
self.assert_equal(import_hooks, 1)
def teardown(self):
from flask import ext
for key in ext.__dict__:
self.assert_not_in('.', key)
def test_flaskext_new_simple_import_normal(self):
from flask.ext.newext_simple import ext_id
self.assert_equal(ext_id, 'newext_simple')
def test_flaskext_new_simple_import_module(self):
from flask.ext import newext_simple
self.assert_equal(newext_simple.ext_id, 'newext_simple')
self.assert_equal(newext_simple.__name__, 'flask_newext_simple')
def test_flaskext_new_package_import_normal(self):
from flask.ext.newext_package import ext_id
self.assert_equal(ext_id, 'newext_package')
def test_flaskext_new_package_import_module(self):
from flask.ext import newext_package
self.assert_equal(newext_package.ext_id, 'newext_package')
self.assert_equal(newext_package.__name__, 'flask_newext_package')
def test_flaskext_new_package_import_submodule_function(self):
from flask.ext.newext_package.submodule import test_function
self.assert_equal(test_function(), 42)
def test_flaskext_new_package_import_submodule(self):
from flask.ext.newext_package import submodule
self.assert_equal(submodule.__name__, 'flask_newext_package.submodule')
self.assert_equal(submodule.test_function(), 42)
def test_flaskext_old_simple_import_normal(self):
from flask.ext.oldext_simple import ext_id
self.assert_equal(ext_id, 'oldext_simple')
def test_flaskext_old_simple_import_module(self):
from flask.ext import oldext_simple
self.assert_equal(oldext_simple.ext_id, 'oldext_simple')
self.assert_equal(oldext_simple.__name__, 'flaskext.oldext_simple')
def test_flaskext_old_package_import_normal(self):
from flask.ext.oldext_package import ext_id
self.assert_equal(ext_id, 'oldext_package')
def test_flaskext_old_package_import_module(self):
from flask.ext import oldext_package
self.assert_equal(oldext_package.ext_id, 'oldext_package')
self.assert_equal(oldext_package.__name__, 'flaskext.oldext_package')
def test_flaskext_old_package_import_submodule(self):
from flask.ext.oldext_package import submodule
self.assert_equal(submodule.__name__, 'flaskext.oldext_package.submodule')
self.assert_equal(submodule.test_function(), 42)
def test_flaskext_old_package_import_submodule_function(self):
from flask.ext.oldext_package.submodule import test_function
self.assert_equal(test_function(), 42)
def test_flaskext_broken_package_no_module_caching(self):
for x in range(2):
with self.assert_raises(ImportError):
import flask.ext.broken
def test_no_error_swallowing(self):
try:
import flask.ext.broken
except ImportError:
exc_type, exc_value, tb = sys.exc_info()
self.assert_true(exc_type is ImportError)
if PY2:
message = 'No module named missing_module'
else:
message = 'No module named \'missing_module\''
self.assert_equal(str(exc_value), message)
self.assert_true(tb.tb_frame.f_globals is globals())
# reraise() adds a second frame so we need to skip that one too.
# On PY3 we even have another one :(
next = tb.tb_next.tb_next
if not PY2:
next = next.tb_next
self.assert_in('flask_broken/__init__.py', next.tb_frame.f_code.co_filename)
def suite():
suite = unittest.TestSuite()
suite.addTest(unittest.makeSuite(ExtImportHookTestCase))
return suite
| agpl-3.0 |
saurabh6790/omnitech-lib | website/doctype/blog_post/blog_post.py | 34 | 3425 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import webnotes
import webnotes.webutils
from webnotes.webutils import WebsiteGenerator, cleanup_page_name
from webnotes import _
from webnotes.utils import today
class DocType(WebsiteGenerator):
def __init__(self, d, dl):
self.doc, self.doclist = d, dl
def autoname(self):
self.doc.name = cleanup_page_name(self.doc.title)
def validate(self):
if self.doc.blog_intro:
self.doc.blog_intro = self.doc.blog_intro[:140]
if self.doc.published and not self.doc.published_on:
self.doc.published_on = today()
# update posts
webnotes.conn.sql("""update tabBlogger set posts=(select count(*) from `tabBlog Post`
where ifnull(blogger,'')=tabBlogger.name)
where name=%s""", self.doc.blogger)
def on_update(self):
WebsiteGenerator.on_update(self)
webnotes.webutils.delete_page_cache("writers")
def get_context(self):
import webnotes.utils
import markdown2
# this is for double precaution. usually it wont reach this code if not published
if not webnotes.utils.cint(self.doc.published):
raise Exception, "This blog has not been published yet!"
# temp fields
from webnotes.utils import global_date_format, get_fullname
self.doc.full_name = get_fullname(self.doc.owner)
self.doc.updated = global_date_format(self.doc.published_on)
if self.doc.blogger:
self.doc.blogger_info = webnotes.doc("Blogger", self.doc.blogger).fields
self.doc.description = self.doc.blog_intro or self.doc.content[:140]
self.doc.meta_description = self.doc.description
self.doc.categories = webnotes.conn.sql_list("select name from `tabBlog Category` order by name")
self.doc.comment_list = webnotes.conn.sql("""\
select comment, comment_by_fullname, creation
from `tabComment` where comment_doctype="Blog Post"
and comment_docname=%s order by creation""", self.doc.name, as_dict=1) or []
def clear_blog_cache():
for blog in webnotes.conn.sql_list("""select page_name from
`tabBlog Post` where ifnull(published,0)=1"""):
webnotes.webutils.delete_page_cache(blog)
webnotes.webutils.delete_page_cache("writers")
@webnotes.whitelist(allow_guest=True)
def get_blog_list(start=0, by=None, category=None):
import webnotes
condition = ""
if by:
condition = " and t1.blogger='%s'" % by.replace("'", "\'")
if category:
condition += " and t1.blog_category='%s'" % category.replace("'", "\'")
query = """\
select
t1.title, t1.name, t1.page_name, t1.published_on as creation,
ifnull(t1.blog_intro, t1.content) as content,
t2.full_name, t2.avatar, t1.blogger,
(select count(name) from `tabComment` where
comment_doctype='Blog Post' and comment_docname=t1.name) as comments
from `tabBlog Post` t1, `tabBlogger` t2
where ifnull(t1.published,0)=1
and t1.blogger = t2.name
%(condition)s
order by published_on desc, name asc
limit %(start)s, 20""" % {"start": start, "condition": condition}
result = webnotes.conn.sql(query, as_dict=1)
# strip html tags from content
import webnotes.utils
for res in result:
from webnotes.utils import global_date_format
res['published'] = global_date_format(res['creation'])
if not res['content']:
res['content'] = webnotes.webutils.get_html(res['page_name'])
res['content'] = res['content'][:140]
return result
| mit |
arju88nair/projectCulminate | venv/lib/python3.5/site-packages/mccabe.py | 15 | 10693 | """ Meager code path measurement tool.
Ned Batchelder
http://nedbatchelder.com/blog/200803/python_code_complexity_microtool.html
MIT License.
"""
from __future__ import with_statement
import optparse
import sys
import tokenize
from collections import defaultdict
try:
import ast
from ast import iter_child_nodes
except ImportError: # Python 2.5
from flake8.util import ast, iter_child_nodes
__version__ = '0.6.1'
class ASTVisitor(object):
"""Performs a depth-first walk of the AST."""
def __init__(self):
self.node = None
self._cache = {}
def default(self, node, *args):
for child in iter_child_nodes(node):
self.dispatch(child, *args)
def dispatch(self, node, *args):
self.node = node
klass = node.__class__
meth = self._cache.get(klass)
if meth is None:
className = klass.__name__
meth = getattr(self.visitor, 'visit' + className, self.default)
self._cache[klass] = meth
return meth(node, *args)
def preorder(self, tree, visitor, *args):
"""Do preorder walk of tree using visitor"""
self.visitor = visitor
visitor.visit = self.dispatch
self.dispatch(tree, *args) # XXX *args make sense?
class PathNode(object):
def __init__(self, name, look="circle"):
self.name = name
self.look = look
def to_dot(self):
print('node [shape=%s,label="%s"] %d;' % (
self.look, self.name, self.dot_id()))
def dot_id(self):
return id(self)
class PathGraph(object):
def __init__(self, name, entity, lineno, column=0):
self.name = name
self.entity = entity
self.lineno = lineno
self.column = column
self.nodes = defaultdict(list)
def connect(self, n1, n2):
self.nodes[n1].append(n2)
# Ensure that the destination node is always counted.
self.nodes[n2] = []
def to_dot(self):
print('subgraph {')
for node in self.nodes:
node.to_dot()
for node, nexts in self.nodes.items():
for next in nexts:
print('%s -- %s;' % (node.dot_id(), next.dot_id()))
print('}')
def complexity(self):
""" Return the McCabe complexity for the graph.
V-E+2
"""
num_edges = sum([len(n) for n in self.nodes.values()])
num_nodes = len(self.nodes)
return num_edges - num_nodes + 2
class PathGraphingAstVisitor(ASTVisitor):
""" A visitor for a parsed Abstract Syntax Tree which finds executable
statements.
"""
def __init__(self):
super(PathGraphingAstVisitor, self).__init__()
self.classname = ""
self.graphs = {}
self.reset()
def reset(self):
self.graph = None
self.tail = None
def dispatch_list(self, node_list):
for node in node_list:
self.dispatch(node)
def visitFunctionDef(self, node):
if self.classname:
entity = '%s%s' % (self.classname, node.name)
else:
entity = node.name
name = '%d:%d: %r' % (node.lineno, node.col_offset, entity)
if self.graph is not None:
# closure
pathnode = self.appendPathNode(name)
self.tail = pathnode
self.dispatch_list(node.body)
bottom = PathNode("", look='point')
self.graph.connect(self.tail, bottom)
self.graph.connect(pathnode, bottom)
self.tail = bottom
else:
self.graph = PathGraph(name, entity, node.lineno, node.col_offset)
pathnode = PathNode(name)
self.tail = pathnode
self.dispatch_list(node.body)
self.graphs["%s%s" % (self.classname, node.name)] = self.graph
self.reset()
visitAsyncFunctionDef = visitFunctionDef
def visitClassDef(self, node):
old_classname = self.classname
self.classname += node.name + "."
self.dispatch_list(node.body)
self.classname = old_classname
def appendPathNode(self, name):
if not self.tail:
return
pathnode = PathNode(name)
self.graph.connect(self.tail, pathnode)
self.tail = pathnode
return pathnode
def visitSimpleStatement(self, node):
if node.lineno is None:
lineno = 0
else:
lineno = node.lineno
name = "Stmt %d" % lineno
self.appendPathNode(name)
def default(self, node, *args):
if isinstance(node, ast.stmt):
self.visitSimpleStatement(node)
else:
super(PathGraphingAstVisitor, self).default(node, *args)
def visitLoop(self, node):
name = "Loop %d" % node.lineno
self._subgraph(node, name)
visitAsyncFor = visitFor = visitWhile = visitLoop
def visitIf(self, node):
name = "If %d" % node.lineno
self._subgraph(node, name)
def _subgraph(self, node, name, extra_blocks=()):
"""create the subgraphs representing any `if` and `for` statements"""
if self.graph is None:
# global loop
self.graph = PathGraph(name, name, node.lineno, node.col_offset)
pathnode = PathNode(name)
self._subgraph_parse(node, pathnode, extra_blocks)
self.graphs["%s%s" % (self.classname, name)] = self.graph
self.reset()
else:
pathnode = self.appendPathNode(name)
self._subgraph_parse(node, pathnode, extra_blocks)
def _subgraph_parse(self, node, pathnode, extra_blocks):
"""parse the body and any `else` block of `if` and `for` statements"""
loose_ends = []
self.tail = pathnode
self.dispatch_list(node.body)
loose_ends.append(self.tail)
for extra in extra_blocks:
self.tail = pathnode
self.dispatch_list(extra.body)
loose_ends.append(self.tail)
if node.orelse:
self.tail = pathnode
self.dispatch_list(node.orelse)
loose_ends.append(self.tail)
else:
loose_ends.append(pathnode)
if pathnode:
bottom = PathNode("", look='point')
for le in loose_ends:
self.graph.connect(le, bottom)
self.tail = bottom
def visitTryExcept(self, node):
name = "TryExcept %d" % node.lineno
self._subgraph(node, name, extra_blocks=node.handlers)
visitTry = visitTryExcept
def visitWith(self, node):
name = "With %d" % node.lineno
self.appendPathNode(name)
self.dispatch_list(node.body)
visitAsyncWith = visitWith
class McCabeChecker(object):
"""McCabe cyclomatic complexity checker."""
name = 'mccabe'
version = __version__
_code = 'C901'
_error_tmpl = "C901 %r is too complex (%d)"
max_complexity = -1
def __init__(self, tree, filename):
self.tree = tree
@classmethod
def add_options(cls, parser):
flag = '--max-complexity'
kwargs = {
'default': -1,
'action': 'store',
'type': 'int',
'help': 'McCabe complexity threshold',
'parse_from_config': 'True',
}
config_opts = getattr(parser, 'config_options', None)
if isinstance(config_opts, list):
# Flake8 2.x
kwargs.pop('parse_from_config')
parser.add_option(flag, **kwargs)
parser.config_options.append('max-complexity')
else:
parser.add_option(flag, **kwargs)
@classmethod
def parse_options(cls, options):
cls.max_complexity = int(options.max_complexity)
def run(self):
if self.max_complexity < 0:
return
visitor = PathGraphingAstVisitor()
visitor.preorder(self.tree, visitor)
for graph in visitor.graphs.values():
if graph.complexity() > self.max_complexity:
text = self._error_tmpl % (graph.entity, graph.complexity())
yield graph.lineno, graph.column, text, type(self)
def get_code_complexity(code, threshold=7, filename='stdin'):
try:
tree = compile(code, filename, "exec", ast.PyCF_ONLY_AST)
except SyntaxError:
e = sys.exc_info()[1]
sys.stderr.write("Unable to parse %s: %s\n" % (filename, e))
return 0
complx = []
McCabeChecker.max_complexity = threshold
for lineno, offset, text, check in McCabeChecker(tree, filename).run():
complx.append('%s:%d:1: %s' % (filename, lineno, text))
if len(complx) == 0:
return 0
print('\n'.join(complx))
return len(complx)
def get_module_complexity(module_path, threshold=7):
"""Returns the complexity of a module"""
with open(module_path, "rU") as mod:
code = mod.read()
return get_code_complexity(code, threshold, filename=module_path)
def _read(filename):
if (2, 5) < sys.version_info < (3, 0):
with open(filename, 'rU') as f:
return f.read()
elif (3, 0) <= sys.version_info < (4, 0):
"""Read the source code."""
try:
with open(filename, 'rb') as f:
(encoding, _) = tokenize.detect_encoding(f.readline)
except (LookupError, SyntaxError, UnicodeError):
# Fall back if file encoding is improperly declared
with open(filename, encoding='latin-1') as f:
return f.read()
with open(filename, 'r', encoding=encoding) as f:
return f.read()
def main(argv=None):
if argv is None:
argv = sys.argv[1:]
opar = optparse.OptionParser()
opar.add_option("-d", "--dot", dest="dot",
help="output a graphviz dot file", action="store_true")
opar.add_option("-m", "--min", dest="threshold",
help="minimum complexity for output", type="int",
default=1)
options, args = opar.parse_args(argv)
code = _read(args[0])
tree = compile(code, args[0], "exec", ast.PyCF_ONLY_AST)
visitor = PathGraphingAstVisitor()
visitor.preorder(tree, visitor)
if options.dot:
print('graph {')
for graph in visitor.graphs.values():
if (not options.threshold or
graph.complexity() >= options.threshold):
graph.to_dot()
print('}')
else:
for graph in visitor.graphs.values():
if graph.complexity() >= options.threshold:
print(graph.name, graph.complexity())
if __name__ == '__main__':
main(sys.argv[1:])
| apache-2.0 |
technologiescollege/s2a_fr | s2a/Python/Lib/test/inspect_fodder2.py | 213 | 1279 | # line 1
def wrap(foo=None):
def wrapper(func):
return func
return wrapper
# line 7
def replace(func):
def insteadfunc():
print 'hello'
return insteadfunc
# line 13
@wrap()
@wrap(wrap)
def wrapped():
pass
# line 19
@replace
def gone():
pass
# line 24
oll = lambda m: m
# line 27
tll = lambda g: g and \
g and \
g
# line 32
tlli = lambda d: d and \
d
# line 36
def onelinefunc(): pass
# line 39
def manyargs(arg1, arg2,
arg3, arg4): pass
# line 43
def twolinefunc(m): return m and \
m
# line 47
a = [None,
lambda x: x,
None]
# line 52
def setfunc(func):
globals()["anonymous"] = func
setfunc(lambda x, y: x*y)
# line 57
def with_comment(): # hello
world
# line 61
multiline_sig = [
lambda (x,
y): x+y,
None,
]
# line 68
def func69():
class cls70:
def func71():
pass
return cls70
extra74 = 74
# line 76
def func77(): pass
(extra78, stuff78) = 'xy'
extra79 = 'stop'
# line 81
class cls82:
def func83(): pass
(extra84, stuff84) = 'xy'
extra85 = 'stop'
# line 87
def func88():
# comment
return 90
# line 92
def f():
class X:
def g():
"doc"
return 42
return X
method_in_dynamic_class = f().g.im_func
| gpl-3.0 |
zasdfgbnm/tensorflow | tensorflow/python/eager/function_test.py | 1 | 23337 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
from tensorflow.python.eager import backprop
from tensorflow.python.eager import context
from tensorflow.python.eager import function
from tensorflow.python.eager import tape
from tensorflow.python.eager import test
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import errors
from tensorflow.python.framework import function as tf_function
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import clip_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
class FunctionTest(test.TestCase):
def testBasic(self):
matmul = function.defun(math_ops.matmul)
t = constant_op.constant([[1.0, 2.0], [3.0, 4.0]])
sq = matmul(t, t, transpose_a=True)
sq2 = matmul(sq, t, transpose_a=True)
self.assertAllEqual(sq.numpy().reshape(-1), [10, 14, 14, 20])
self.assertAllEqual(sq2.numpy().reshape(-1), [52, 76, 74, 108])
def testBasicGraphMode(self):
matmul = function.defun(math_ops.matmul)
@function.defun
def sq(a):
return matmul(a, a)
t = constant_op.constant([[1.0, 2.0], [3.0, 4.0]])
out = sq(t)
self.assertAllEqual(out, math_ops.matmul(t, t).numpy())
def testNestedInputsGraphMode(self):
matmul = function.defun(math_ops.matmul)
pair = collections.namedtuple('pair', ['a', 'b'])
@function.defun
def a_times_b(inputs):
return matmul(inputs.a['a'], inputs.b['b'])
t = constant_op.constant([[1.0, 2.0], [3.0, 4.0]])
out = a_times_b(pair({'a': t}, {'b': t}))
self.assertAllEqual(out, math_ops.matmul(t, t).numpy())
def testGraphModeWithGradients(self):
v = resource_variable_ops.ResourceVariable(1.0, name='v')
@function.defun
def step():
def inner():
return v * v
return backprop.implicit_grad(inner)()[0][0]
self.assertAllEqual(step(), 2.0)
def testBasicDefunOpGraphMode(self):
matmul = function.defun(math_ops.matmul)
def sq(a):
return matmul(a, a)
t = constant_op.constant([[1.0, 2.0], [3.0, 4.0]])
sq_op = function.make_defun_op(sq, t)
self.assertEqual(sq_op.output_shapes, tensor_shape.TensorShape([2, 2]))
out = sq_op(t)
self.assertAllEqual(out, math_ops.matmul(t, t).numpy())
def testNestedInputsDefunOpGraphMode(self):
matmul = function.defun(math_ops.matmul)
pair = collections.namedtuple('pair', ['a', 'b'])
def a_times_b(inputs):
return matmul(inputs.a['a'], inputs.b['b'])
t = constant_op.constant([[1.0, 2.0], [3.0, 4.0]])
inputs = pair({'a': t}, {'b': t})
sq_op = function.make_defun_op(a_times_b, inputs)
self.assertEqual(sq_op.output_shapes, tensor_shape.TensorShape([2, 2]))
out = sq_op(inputs)
self.assertAllEqual(out, math_ops.matmul(t, t).numpy())
def testNestedOutputDefunOpGraphMode(self):
matmul = function.defun(math_ops.matmul)
def sq(a):
return (matmul(a, a), {'b': constant_op.constant(1.0)})
t = constant_op.constant([[1.0, 2.0], [3.0, 4.0]])
sq_op = function.make_defun_op(sq, t)
self.assertEqual(sq_op.output_shapes,
(tensor_shape.TensorShape([2, 2]),
{'b': tensor_shape.TensorShape([])}))
self.assertEqual(sq_op.output_dtypes,
(dtypes.float32, {'b': dtypes.float32}))
(a, b) = sq_op(t)
self.assertAllEqual(a, math_ops.matmul(t, t).numpy())
self.assertAllEqual(b['b'].numpy(), 1.0)
def testDefunOpGraphModeWithGradients(self):
v = resource_variable_ops.ResourceVariable(1.0, name='v')
def step():
def inner():
return v * v
return backprop.implicit_grad(inner)()[0][0]
step_op = function.make_defun_op(step)
self.assertEqual(step_op.output_dtypes, dtypes.float32)
self.assertEqual(step_op.output_shapes, tensor_shape.TensorShape([]))
self.assertAllEqual(step_op(), 2.0)
def testDefunOpGraphModeNoneOutput(self):
def fn(unused_a, unused_b):
return None
x = constant_op.constant(1)
fn_op = function.make_defun_op(fn, x, x)
self.assertEqual(fn_op.output_dtypes, None)
self.assertEqual(fn_op.output_shapes, None)
self.assertAllEqual(fn_op(x, x), None)
def testDefunReadVariable(self):
v = resource_variable_ops.ResourceVariable(1.0)
@function.defun
def f():
return v.read_value()
self.assertEqual(1.0, float(f()))
def testDefunAssignAddVariable(self):
v = resource_variable_ops.ResourceVariable(1.0)
@function.defun
def f():
v.assign_add(2.0)
return v.read_value()
self.assertEqual(3.0, float(f()))
def testDefunShapeInferenceWithCapturedResourceVariable(self):
v = resource_variable_ops.ResourceVariable([[1, 2], [3, 4]])
def f():
x = constant_op.constant([[1, 2], [3, 4]])
out = math_ops.matmul(v, x)
self.assertEqual(out.get_shape(), tensor_shape.TensorShape([2, 2]))
compiled = function.defun(f)
compiled()
def testDefunShapeInferenceWithCapturedResourceVariableInGraphMode(self):
with context.graph_mode():
v = resource_variable_ops.ResourceVariable([[1, 2], [3, 4]])
def f():
x = constant_op.constant([[1, 2], [3, 4]])
out = math_ops.matmul(v, x)
self.assertEqual(out.get_shape(), tensor_shape.TensorShape([2, 2]))
compiled = function.defun(f)
compiled()
def testDefunShapeInferenceWithCapturedVariableInGraphMode(self):
with context.graph_mode():
v = variables.Variable([[1, 2], [3, 4]])
def f():
x = constant_op.constant([[1, 2], [3, 4]])
out = math_ops.matmul(v, x)
self.assertEqual(out.get_shape(), tensor_shape.TensorShape([2, 2]))
# Check that shape inference works while creating the defun
compiled = function.defun(f)
compiled()
def testDefunDifferentiable(self):
v = resource_variable_ops.ResourceVariable(1.0)
@function.defun
def f():
return v * v
self.assertAllEqual(backprop.implicit_grad(f)()[0][0], 2.0)
def testDefunCanBeDifferentiatedTwice(self):
v = resource_variable_ops.ResourceVariable(1.0)
@function.defun
def f():
return v * v
self.assertAllEqual(backprop.implicit_grad(f)()[0][0], 2.0)
# Ensure that v is watched again.
self.assertAllEqual(backprop.implicit_grad(f)()[0][0], 2.0)
def testGraphModeCaptureVariable(self):
with context.graph_mode(), self.test_session() as sess:
class HasAVar(object):
def __init__(self):
self.v = resource_variable_ops.ResourceVariable(1.0)
def call(self):
return self.v * 2
o = HasAVar()
variables.global_variables_initializer().run()
call = function.defun(o.call)
op = call()
self.assertAllEqual(sess.run(op), 2.0)
def testGraphModeManyFunctions(self):
with context.graph_mode(), self.test_session():
@function.defun
def f(x):
return x * x
@function.defun
def g(x):
return f(x) + 1
self.assertAllEqual(g(constant_op.constant(2.0)).eval(), 5.0)
def testDict(self):
@function.defun
def f(x):
return {'name': x + 1}
self.assertAllEqual(f(constant_op.constant(1.0))['name'], 2.0)
def testTensorConversionWithDefun(self):
@function.defun
def f(x):
return math_ops.add(x, constant_op.constant(3))
self.assertAllEqual(5, f(constant_op.constant(2)))
def testTensorConversionCall(self):
@function.defun
def f(x):
return math_ops.add(x, constant_op.constant(3))
@function.defun
def g(x):
return f(f(x))
self.assertAllEqual(8, g(constant_op.constant(2)))
def testDefunCallBackprop(self):
@function.defun
def f(x):
return math_ops.add(x, x)
@function.defun
def g(x):
return backprop.gradients_function(f, [0])(x)[0]
self.assertAllEqual(2, g(constant_op.constant(2)))
def testGraphModeEagerGradError(self):
with context.graph_mode():
def f():
x = variable_scope.get_variable(
'v', initializer=constant_op.constant(1.0))
return x * constant_op.constant(2.0)
with self.assertRaisesRegexp(ValueError,
'No trainable variables were accessed'):
backprop.implicit_val_and_grad(f)()
def testDefunCallBackpropUsingSameObjectForMultipleArguments(self):
@function.defun
def g(x):
return backprop.gradients_function(math_ops.multiply, [0, 1])(x, x)
def np_g(x):
return [d.numpy() for d in g(x)]
x = constant_op.constant(1.)
self.assertAllEqual([1., 1.], np_g(x))
self.assertAllEqual([1., 1.], np_g(1.))
def testCallShape(self):
@function.defun
def f(x):
return x + 1
@function.defun
def g(x):
x = f(x)
self.assertEqual(x.shape.as_list(), [])
return None
g(constant_op.constant(1.0))
def testGradientTensorConversionWithDefun(self):
three = resource_variable_ops.ResourceVariable(3.0, name='v')
@function.defun
def f(x):
return math_ops.add(x, three)
def g(x):
tape.watch_variable(three)
return f(x)
g = backprop.implicit_grad(g)(constant_op.constant(1.0))[0][0]
self.assertAllEqual(g, 1.0)
def testGradient(self):
matmul = function.defun(math_ops.matmul)
def sq(x):
return matmul(x, x, transpose_a=True)
t = constant_op.constant([[1.0, 2.0], [3.0, 4.0]])
grad_t, = backprop.gradients_function(sq, [0])(t)
self.assertAllEqual(grad_t, [[6, 6], [14, 14]])
def testGradientInFunction(self):
@function.defun
def f(x):
return backprop.gradients_function(lambda y: y * y, [0])(x)[0]
self.assertAllEqual(f(constant_op.constant(1.0)), 2.0)
def testGradientOfGatherWithDefun(self):
v = resource_variable_ops.ResourceVariable([0.0, 1.0, 2.0])
def sum_gather():
return math_ops.reduce_sum(array_ops.gather(v, [1, 2]))
grad_fn = backprop.implicit_grad(sum_gather)
gradient = grad_fn()
defun_grad_fn = backprop.implicit_grad(function.defun(sum_gather))
defun_gradient = defun_grad_fn()
self.assertEqual(len(gradient), len(defun_gradient))
gradient = gradient[0][0]
defun_gradient = defun_gradient[0][0]
self.assertAllEqual(gradient.values, defun_gradient.values)
self.assertAllEqual(gradient.indices, defun_gradient.indices)
self.assertAllEqual(gradient.dense_shape, defun_gradient.dense_shape)
def testReturningIndexedSlicesWithDefun(self):
def validate(indexed_slice):
def f():
return indexed_slice
output = function.defun(f)()
self.assertTrue(isinstance(output, ops.IndexedSlices))
self.assertAllEqual(indexed_slice.values, output.values)
self.assertAllEqual(indexed_slice.indices, output.indices)
self.assertAllEqual(indexed_slice.dense_shape, output.dense_shape)
self.assertEqual(
function.make_defun_op(f).output_shapes, indexed_slice.values.shape)
arg = ops.IndexedSlices(
values=constant_op.constant([1, 2]),
indices=constant_op.constant([0, 1]),
dense_shape=constant_op.constant([2]))
validate(arg)
arg = ops.IndexedSlices(
values=constant_op.constant([1, 2]),
indices=constant_op.constant([0, 1]),
dense_shape=None)
validate(arg)
def testIndexedSliceAsArgumentWithDefun(self):
@function.defun
def f(indexed_slice):
return indexed_slice
def validate(arg):
output = f(arg)
self.assertTrue(isinstance(output, ops.IndexedSlices))
self.assertAllEqual(arg.values, output.values)
self.assertAllEqual(arg.indices, output.indices)
self.assertAllEqual(arg.dense_shape, output.dense_shape)
indexed_slice = ops.IndexedSlices(
values=constant_op.constant([1]),
indices=constant_op.constant([0]),
dense_shape=constant_op.constant([1]))
validate(indexed_slice)
# Test that `f` works even when `dense_shape` is None.
indexed_slice = ops.IndexedSlices(
values=constant_op.constant([1]),
indices=constant_op.constant([0]),
dense_shape=None)
validate(indexed_slice)
def testFunctionOnDevice(self):
if not context.context().num_gpus():
self.skipTest('No GPUs found')
x = constant_op.constant([1.]).gpu()
f = function.defun(math_ops.add)
y = f(x, x).cpu()
self.assertAllEqual(y, [2.])
def testFunctionHandlesInputsOnDifferentDevices(self):
if not context.context().num_gpus():
self.skipTest('No GPUs found')
# The Reshape op requires the shape tensor to be placed in host memory.
reshape = function.defun(array_ops.reshape)
value = constant_op.constant([1., 2.]).gpu()
shape = constant_op.constant([2, 1])
reshaped = reshape(value, shape).cpu()
self.assertAllEqual(reshaped, [[1], [2]])
def testFunctionHandlesInputsPlacedOnTheWrongDeviceGracefully(self):
if not context.context().num_gpus():
self.skipTest('No GPUs found')
# The Reshape op requires the shape tensor to be placed in host memory.
reshape = function.defun(array_ops.reshape)
value = constant_op.constant([1., 2.])
shape = constant_op.constant([2, 1]).gpu()
with self.assertRaises(errors.InvalidArgumentError):
with ops.device('gpu:0'):
reshape(value, shape)
def testDifferentiableFunctionNoneOutputs(self):
@function.defun
def my_function(x):
return x, None
def wrapper(x):
return my_function(x)[0]
g = backprop.gradients_function(wrapper, [0])(constant_op.constant(0.0))
self.assertAllEqual(g[0], 1.)
def testNoneOutput(self):
@function.defun
def my_function(_):
return None
self.assertAllEqual(my_function(1), None)
def testNestedFunctions(self):
# TensorFlow function (which is what would be used in TensorFlow graph
# construction).
@tf_function.Defun(dtypes.int32, dtypes.int32)
def add(a, b):
return math_ops.add(a, b)
@function.defun
def add_one(x):
return add(x, 1)
self.assertAllEqual(3, add_one(constant_op.constant(2)))
def testVariableCaptureInNestedFunctions(self):
v = resource_variable_ops.ResourceVariable(1)
@function.defun
def read():
return v.read_value()
@function.defun
def outer():
return read()
self.assertEqual(1, int(outer()))
def testReturnCapturedEagerTensor(self):
t = constant_op.constant(1)
@function.defun
def read():
return t
self.assertEqual(1, int(read()))
def testReturnCapturedGraphTensor(self):
with context.graph_mode(), self.test_session():
t = constant_op.constant(1)
@function.defun
def read():
return t
self.assertEqual(1, int(self.evaluate(read())))
def testSequenceInputs(self):
clip_by_global_norm = function.defun(clip_ops.clip_by_global_norm)
t_list = [constant_op.constant(1.0), constant_op.constant(2.0)]
clipped_list, global_norm = clip_by_global_norm(t_list,
constant_op.constant(.2))
for t in clipped_list:
self.assertTrue(isinstance(t, ops.Tensor))
self.assertTrue(isinstance(global_norm, ops.Tensor))
def testNestedSequenceInputs(self):
def my_op(inputs):
a, b, c = inputs
e, f = b
g, h = e
return [a + a, [tuple([f + f, g + g]), h + h], c + c], a + f + g + h + c
my_eager_op = function.defun(my_op)
ret = my_eager_op([
constant_op.constant(1), [(constant_op.constant(2),
constant_op.constant(3)),
constant_op.constant(4)],
constant_op.constant(5)
])
self.assertEqual(len(ret), 2)
self.assertAllEqual(ret[0][0], 2)
self.assertAllEqual(ret[0][1][0][0], 8)
self.assertAllEqual(ret[0][1][0][1], 4)
self.assertTrue(isinstance(ret[0][1][0], tuple))
self.assertAllEqual(ret[0][1][1], 6)
self.assertAllEqual(ret[0][2], 10)
self.assertAllEqual(ret[1], 15)
def testVariableNamesRespectNameScopesWithDefun(self):
@function.defun
def create_variable():
with ops.name_scope('foo'):
v = resource_variable_ops.ResourceVariable(0.0, name='bar')
self.assertEqual(v.name, 'foo/bar:0')
create_variable()
def testVariableNamesRespectNameScopesWithDefunInGraph(self):
with context.graph_mode():
@function.defun
def create_variable():
with ops.name_scope('foo'):
v = resource_variable_ops.ResourceVariable([1.0, 2.0], name='bar')
self.assertEqual(v.name, 'foo/bar:0')
with ops.get_default_graph().as_default():
create_variable()
class AutomaticControlDependenciesTest(test.TestCase):
def testBasic(self):
with context.graph_mode(), self.test_session():
v = resource_variable_ops.ResourceVariable(1.0)
variables.global_variables_initializer().run()
with function.AutomaticControlDependencies() as c:
v.assign(v + 1)
v.assign(2 * v)
val = v.read_value()
c.mark_as_return(val)
self.assertAllEqual(val.eval(), 4.0)
def testCondMustRun(self):
with context.graph_mode(), self.test_session():
v = resource_variable_ops.ResourceVariable(1.0)
variables.global_variables_initializer().run()
p = array_ops.placeholder(dtype=dtypes.bool)
with function.AutomaticControlDependencies() as c:
def true_fn():
v.assign(v + 1)
return 0.0
def false_fn():
v.assign(v + 4)
return 1.0
control_flow_ops.cond(p, true_fn, false_fn)
val = v.read_value()
c.mark_as_return(val)
self.assertAllEqual(val.eval(feed_dict={p: False}), 5.0)
self.assertAllEqual(val.eval(feed_dict={p: True}), 6.0)
def testCondMustRunSeparateRead(self):
with context.graph_mode(), self.test_session():
v = resource_variable_ops.ResourceVariable(1.0)
variables.global_variables_initializer().run()
p = array_ops.placeholder(dtype=dtypes.bool)
with function.AutomaticControlDependencies() as c:
def true_fn():
v.assign(v + 1)
return 0.0
def false_fn():
v.assign(v + 4)
return 1.0
control_flow_ops.cond(p, true_fn, false_fn)
one = constant_op.constant(1.0)
c.mark_as_return(one)
one.eval(feed_dict={p: False})
self.assertAllEqual(v.read_value().eval(), 5.0)
one.eval(feed_dict={p: True})
self.assertAllEqual(v.read_value().eval(), 6.0)
def testCondNested(self):
with context.graph_mode(), self.test_session():
v = resource_variable_ops.ResourceVariable(1.0)
variables.global_variables_initializer().run()
p = array_ops.placeholder(dtype=dtypes.bool)
q = array_ops.placeholder(dtype=dtypes.bool)
with function.AutomaticControlDependencies() as c:
def true_fn():
v.assign(v + 1, name='true')
return 1.0
def false_fn():
def inner_true_fn():
v.assign(v * 2, name='false_true')
return 2.0
def inner_false_fn():
v.assign(v * 3, name='false_false')
return 3.0
control_flow_ops.cond(q, inner_true_fn, inner_false_fn)
return 1.0
control_flow_ops.cond(p, true_fn, false_fn)
with ops.name_scope('final'):
val = v.read_value()
c.mark_as_return(val)
self.assertAllEqual(val.eval(feed_dict={p: False, q: False}), 3.0)
self.assertAllEqual(val.eval(feed_dict={p: False, q: True}), 6.0)
self.assertAllEqual(val.eval(feed_dict={p: True, q: True}), 7.0)
self.assertAllEqual(val.eval(feed_dict={p: True, q: False}), 8.0)
def testCondOneBranch(self):
with context.graph_mode(), self.test_session():
v = resource_variable_ops.ResourceVariable(1.0)
variables.global_variables_initializer().run()
p = array_ops.placeholder(dtype=dtypes.bool)
with function.AutomaticControlDependencies() as c:
def true_fn():
return 0.0
def false_fn():
v.assign(v + 4)
return 1.0
control_flow_ops.cond(p, true_fn, false_fn)
val = v.read_value()
c.mark_as_return(val)
self.assertAllEqual(val.eval(feed_dict={p: False}), 5.0)
self.assertAllEqual(val.eval(feed_dict={p: True}), 5.0)
def testCondOneBranchUpdateBefore(self):
with context.graph_mode(), self.test_session():
v = resource_variable_ops.ResourceVariable(1.0)
variables.global_variables_initializer().run()
p = array_ops.placeholder(dtype=dtypes.bool)
with function.AutomaticControlDependencies() as c:
v.assign(v * 2)
def true_fn():
return 0.0
def false_fn():
v.assign(v + 4)
return 1.0
control_flow_ops.cond(p, true_fn, false_fn)
val = v.read_value()
c.mark_as_return(val)
self.assertAllEqual(val.eval(feed_dict={p: False}), 6.0)
self.assertAllEqual(val.eval(feed_dict={p: True}), 12.0)
def testCondOneBranchUpdateAfter(self):
with context.graph_mode(), self.test_session():
v = resource_variable_ops.ResourceVariable(1.0)
variables.global_variables_initializer().run()
p = array_ops.placeholder(dtype=dtypes.bool)
with function.AutomaticControlDependencies() as c:
def true_fn():
return 0.0
def false_fn():
v.assign(v + 4)
return 1.0
control_flow_ops.cond(p, true_fn, false_fn)
v.assign(v * 2)
val = v.read_value()
c.mark_as_return(val)
self.assertAllEqual(val.eval(feed_dict={p: False}), 10.0)
self.assertAllEqual(val.eval(feed_dict={p: True}), 20.0)
def testDecorator(self):
with context.graph_mode(), self.test_session():
v = resource_variable_ops.ResourceVariable(1.0)
variables.global_variables_initializer().run()
@function.automatic_control_dependencies
def f():
v.assign(v + 1)
v.assign(2 * v)
return v.read_value()
self.assertAllEqual(f().eval(), 4.0)
if __name__ == '__main__':
test.main()
| apache-2.0 |
DUCSS/ducss-site-old | project/local_settings.py | 1 | 1631 | DEBUG = True
TEMPLATE_DEBUG = DEBUG
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'database.sqlite3', # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
DEBUG_TOOLBAR_PANELS = (
'debug_toolbar.panels.version.VersionDebugPanel',
'debug_toolbar.panels.timer.TimerDebugPanel',
'debug_toolbar.panels.settings_vars.SettingsVarsDebugPanel',
'debug_toolbar.panels.headers.HeaderDebugPanel',
'debug_toolbar.panels.request_vars.RequestVarsDebugPanel',
'debug_toolbar.panels.template.TemplateDebugPanel',
'debug_toolbar.panels.sql.SQLDebugPanel',
'debug_toolbar.panels.signals.SignalDebugPanel',
'debug_toolbar.panels.logger.LoggingPanel',
)
DEBUG_TOOLBAR_CONFIG = {
'INTERCEPT_REDIRECTS': False,
'HIDE_DJANGO_SQL': False,
'TAG': 'div',
}
import settings
settings.MIDDLEWARE_CLASSES += ('debug_toolbar.middleware.DebugToolbarMiddleware',)
settings.INSTALLED_APPS += ('debug_toolbar',)
# Nice to have a simple cache for testing
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
#'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
'LOCATION': 'ducss'
}
} | mit |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.